& so it begins
This commit is contained in:
13
packages/db/drizzle.config.ts
Normal file
13
packages/db/drizzle.config.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { defineConfig } from "drizzle-kit";
|
||||
import { settings } from "@pkg/settings";
|
||||
|
||||
export default defineConfig({
|
||||
schema: "./schema",
|
||||
out: "./migrations",
|
||||
dialect: "postgresql",
|
||||
verbose: true,
|
||||
strict: false,
|
||||
dbCredentials: {
|
||||
url: settings.databaseUrl,
|
||||
},
|
||||
});
|
||||
11
packages/db/index.ts
Normal file
11
packages/db/index.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { drizzle } from "drizzle-orm/postgres-js";
|
||||
import { settings } from "@pkg/settings";
|
||||
import * as schema from "./schema";
|
||||
|
||||
const db = drizzle(settings.databaseUrl, { schema });
|
||||
|
||||
export type Database = typeof db;
|
||||
|
||||
export * from "drizzle-orm";
|
||||
|
||||
export { db, schema };
|
||||
119
packages/db/migrations/0000_woozy_mother_askani.sql
Normal file
119
packages/db/migrations/0000_woozy_mother_askani.sql
Normal file
@@ -0,0 +1,119 @@
|
||||
CREATE TABLE IF NOT EXISTS "two_factor" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"secret" text NOT NULL,
|
||||
"backup_codes" json,
|
||||
"user_id" text NOT NULL,
|
||||
"created_at" timestamp NOT NULL,
|
||||
"updated_at" timestamp NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE IF NOT EXISTS "twofa_sessions" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"user_id" text NOT NULL,
|
||||
"session_id" text NOT NULL,
|
||||
"verification_token" text NOT NULL,
|
||||
"code_used" text,
|
||||
"status" varchar(16) NOT NULL,
|
||||
"attempts" integer DEFAULT 0 NOT NULL,
|
||||
"max_attempts" integer DEFAULT 5 NOT NULL,
|
||||
"verified_at" timestamp,
|
||||
"expires_at" timestamp NOT NULL,
|
||||
"created_at" timestamp NOT NULL,
|
||||
"ip_address" text DEFAULT '',
|
||||
"user_agent" text DEFAULT '',
|
||||
CONSTRAINT "twofa_sessions_verification_token_unique" UNIQUE("verification_token")
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE IF NOT EXISTS "account" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"account_id" text NOT NULL,
|
||||
"provider_id" text NOT NULL,
|
||||
"user_id" text NOT NULL,
|
||||
"access_token" text,
|
||||
"refresh_token" text,
|
||||
"id_token" text,
|
||||
"access_token_expires_at" timestamp,
|
||||
"refresh_token_expires_at" timestamp,
|
||||
"scope" text,
|
||||
"password" text,
|
||||
"created_at" timestamp DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE IF NOT EXISTS "user" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"email" text NOT NULL,
|
||||
"email_verified" boolean DEFAULT false NOT NULL,
|
||||
"image" text,
|
||||
"created_at" timestamp DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp DEFAULT now() NOT NULL,
|
||||
"username" text,
|
||||
"display_username" text,
|
||||
"role" text,
|
||||
"banned" boolean DEFAULT false,
|
||||
"ban_reason" text,
|
||||
"ban_expires" timestamp,
|
||||
"onboarding_done" boolean DEFAULT false,
|
||||
"last2_fa_verified_at" timestamp,
|
||||
"parent_id" text,
|
||||
CONSTRAINT "user_email_unique" UNIQUE("email"),
|
||||
CONSTRAINT "user_username_unique" UNIQUE("username")
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE IF NOT EXISTS "verification" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"identifier" text NOT NULL,
|
||||
"value" text NOT NULL,
|
||||
"expires_at" timestamp NOT NULL,
|
||||
"created_at" timestamp DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp DEFAULT now() NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE IF NOT EXISTS "notifications" (
|
||||
"id" serial PRIMARY KEY NOT NULL,
|
||||
"title" text NOT NULL,
|
||||
"body" text NOT NULL,
|
||||
"priority" varchar(12) DEFAULT 'normal' NOT NULL,
|
||||
"type" varchar(12) NOT NULL,
|
||||
"category" varchar(64),
|
||||
"is_read" boolean DEFAULT false NOT NULL,
|
||||
"is_archived" boolean DEFAULT false NOT NULL,
|
||||
"action_url" text,
|
||||
"action_type" varchar(16),
|
||||
"action_data" json,
|
||||
"icon" varchar(64),
|
||||
"user_id" text NOT NULL,
|
||||
"sent_at" timestamp NOT NULL,
|
||||
"read_at" timestamp,
|
||||
"expires_at" timestamp,
|
||||
"created_at" timestamp NOT NULL,
|
||||
"updated_at" timestamp NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
ALTER TABLE "two_factor" ADD CONSTRAINT "two_factor_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
ALTER TABLE "twofa_sessions" ADD CONSTRAINT "twofa_sessions_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
ALTER TABLE "account" ADD CONSTRAINT "account_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
--> statement-breakpoint
|
||||
DO $$ BEGIN
|
||||
ALTER TABLE "notifications" ADD CONSTRAINT "notifications_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX IF NOT EXISTS "account_userId_idx" ON "account" USING btree ("user_id");--> statement-breakpoint
|
||||
CREATE INDEX IF NOT EXISTS "verification_identifier_idx" ON "verification" USING btree ("identifier");
|
||||
655
packages/db/migrations/meta/0000_snapshot.json
Normal file
655
packages/db/migrations/meta/0000_snapshot.json
Normal file
@@ -0,0 +1,655 @@
|
||||
{
|
||||
"id": "1bb75845-f9cf-41a0-96ec-10c66fdc34d6",
|
||||
"prevId": "00000000-0000-0000-0000-000000000000",
|
||||
"version": "7",
|
||||
"dialect": "postgresql",
|
||||
"tables": {
|
||||
"public.two_factor": {
|
||||
"name": "two_factor",
|
||||
"schema": "",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true
|
||||
},
|
||||
"secret": {
|
||||
"name": "secret",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
},
|
||||
"backup_codes": {
|
||||
"name": "backup_codes",
|
||||
"type": "json",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"user_id": {
|
||||
"name": "user_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
},
|
||||
"created_at": {
|
||||
"name": "created_at",
|
||||
"type": "timestamp",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "timestamp",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {
|
||||
"two_factor_user_id_user_id_fk": {
|
||||
"name": "two_factor_user_id_user_id_fk",
|
||||
"tableFrom": "two_factor",
|
||||
"tableTo": "user",
|
||||
"columnsFrom": [
|
||||
"user_id"
|
||||
],
|
||||
"columnsTo": [
|
||||
"id"
|
||||
],
|
||||
"onDelete": "cascade",
|
||||
"onUpdate": "no action"
|
||||
}
|
||||
},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"policies": {},
|
||||
"checkConstraints": {},
|
||||
"isRLSEnabled": false
|
||||
},
|
||||
"public.twofa_sessions": {
|
||||
"name": "twofa_sessions",
|
||||
"schema": "",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true
|
||||
},
|
||||
"user_id": {
|
||||
"name": "user_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
},
|
||||
"session_id": {
|
||||
"name": "session_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
},
|
||||
"verification_token": {
|
||||
"name": "verification_token",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
},
|
||||
"code_used": {
|
||||
"name": "code_used",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"status": {
|
||||
"name": "status",
|
||||
"type": "varchar(16)",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
},
|
||||
"attempts": {
|
||||
"name": "attempts",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"default": 0
|
||||
},
|
||||
"max_attempts": {
|
||||
"name": "max_attempts",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"default": 5
|
||||
},
|
||||
"verified_at": {
|
||||
"name": "verified_at",
|
||||
"type": "timestamp",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"expires_at": {
|
||||
"name": "expires_at",
|
||||
"type": "timestamp",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
},
|
||||
"created_at": {
|
||||
"name": "created_at",
|
||||
"type": "timestamp",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
},
|
||||
"ip_address": {
|
||||
"name": "ip_address",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"default": "''"
|
||||
},
|
||||
"user_agent": {
|
||||
"name": "user_agent",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"default": "''"
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {
|
||||
"twofa_sessions_user_id_user_id_fk": {
|
||||
"name": "twofa_sessions_user_id_user_id_fk",
|
||||
"tableFrom": "twofa_sessions",
|
||||
"tableTo": "user",
|
||||
"columnsFrom": [
|
||||
"user_id"
|
||||
],
|
||||
"columnsTo": [
|
||||
"id"
|
||||
],
|
||||
"onDelete": "cascade",
|
||||
"onUpdate": "no action"
|
||||
}
|
||||
},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {
|
||||
"twofa_sessions_verification_token_unique": {
|
||||
"name": "twofa_sessions_verification_token_unique",
|
||||
"nullsNotDistinct": false,
|
||||
"columns": [
|
||||
"verification_token"
|
||||
]
|
||||
}
|
||||
},
|
||||
"policies": {},
|
||||
"checkConstraints": {},
|
||||
"isRLSEnabled": false
|
||||
},
|
||||
"public.account": {
|
||||
"name": "account",
|
||||
"schema": "",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true
|
||||
},
|
||||
"account_id": {
|
||||
"name": "account_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
},
|
||||
"provider_id": {
|
||||
"name": "provider_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
},
|
||||
"user_id": {
|
||||
"name": "user_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
},
|
||||
"access_token": {
|
||||
"name": "access_token",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"refresh_token": {
|
||||
"name": "refresh_token",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"id_token": {
|
||||
"name": "id_token",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"access_token_expires_at": {
|
||||
"name": "access_token_expires_at",
|
||||
"type": "timestamp",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"refresh_token_expires_at": {
|
||||
"name": "refresh_token_expires_at",
|
||||
"type": "timestamp",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"scope": {
|
||||
"name": "scope",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"password": {
|
||||
"name": "password",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"created_at": {
|
||||
"name": "created_at",
|
||||
"type": "timestamp",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"default": "now()"
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "timestamp",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
}
|
||||
},
|
||||
"indexes": {
|
||||
"account_userId_idx": {
|
||||
"name": "account_userId_idx",
|
||||
"columns": [
|
||||
{
|
||||
"expression": "user_id",
|
||||
"isExpression": false,
|
||||
"asc": true,
|
||||
"nulls": "last"
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"concurrently": false,
|
||||
"method": "btree",
|
||||
"with": {}
|
||||
}
|
||||
},
|
||||
"foreignKeys": {
|
||||
"account_user_id_user_id_fk": {
|
||||
"name": "account_user_id_user_id_fk",
|
||||
"tableFrom": "account",
|
||||
"tableTo": "user",
|
||||
"columnsFrom": [
|
||||
"user_id"
|
||||
],
|
||||
"columnsTo": [
|
||||
"id"
|
||||
],
|
||||
"onDelete": "cascade",
|
||||
"onUpdate": "no action"
|
||||
}
|
||||
},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"policies": {},
|
||||
"checkConstraints": {},
|
||||
"isRLSEnabled": false
|
||||
},
|
||||
"public.user": {
|
||||
"name": "user",
|
||||
"schema": "",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true
|
||||
},
|
||||
"name": {
|
||||
"name": "name",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
},
|
||||
"email": {
|
||||
"name": "email",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
},
|
||||
"email_verified": {
|
||||
"name": "email_verified",
|
||||
"type": "boolean",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"default": false
|
||||
},
|
||||
"image": {
|
||||
"name": "image",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"created_at": {
|
||||
"name": "created_at",
|
||||
"type": "timestamp",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"default": "now()"
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "timestamp",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"default": "now()"
|
||||
},
|
||||
"username": {
|
||||
"name": "username",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"display_username": {
|
||||
"name": "display_username",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"role": {
|
||||
"name": "role",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"banned": {
|
||||
"name": "banned",
|
||||
"type": "boolean",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"default": false
|
||||
},
|
||||
"ban_reason": {
|
||||
"name": "ban_reason",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"ban_expires": {
|
||||
"name": "ban_expires",
|
||||
"type": "timestamp",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"onboarding_done": {
|
||||
"name": "onboarding_done",
|
||||
"type": "boolean",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"default": false
|
||||
},
|
||||
"last2_fa_verified_at": {
|
||||
"name": "last2_fa_verified_at",
|
||||
"type": "timestamp",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"parent_id": {
|
||||
"name": "parent_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {
|
||||
"user_email_unique": {
|
||||
"name": "user_email_unique",
|
||||
"nullsNotDistinct": false,
|
||||
"columns": [
|
||||
"email"
|
||||
]
|
||||
},
|
||||
"user_username_unique": {
|
||||
"name": "user_username_unique",
|
||||
"nullsNotDistinct": false,
|
||||
"columns": [
|
||||
"username"
|
||||
]
|
||||
}
|
||||
},
|
||||
"policies": {},
|
||||
"checkConstraints": {},
|
||||
"isRLSEnabled": false
|
||||
},
|
||||
"public.verification": {
|
||||
"name": "verification",
|
||||
"schema": "",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true
|
||||
},
|
||||
"identifier": {
|
||||
"name": "identifier",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
},
|
||||
"value": {
|
||||
"name": "value",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
},
|
||||
"expires_at": {
|
||||
"name": "expires_at",
|
||||
"type": "timestamp",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
},
|
||||
"created_at": {
|
||||
"name": "created_at",
|
||||
"type": "timestamp",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"default": "now()"
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "timestamp",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"default": "now()"
|
||||
}
|
||||
},
|
||||
"indexes": {
|
||||
"verification_identifier_idx": {
|
||||
"name": "verification_identifier_idx",
|
||||
"columns": [
|
||||
{
|
||||
"expression": "identifier",
|
||||
"isExpression": false,
|
||||
"asc": true,
|
||||
"nulls": "last"
|
||||
}
|
||||
],
|
||||
"isUnique": false,
|
||||
"concurrently": false,
|
||||
"method": "btree",
|
||||
"with": {}
|
||||
}
|
||||
},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"policies": {},
|
||||
"checkConstraints": {},
|
||||
"isRLSEnabled": false
|
||||
},
|
||||
"public.notifications": {
|
||||
"name": "notifications",
|
||||
"schema": "",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "serial",
|
||||
"primaryKey": true,
|
||||
"notNull": true
|
||||
},
|
||||
"title": {
|
||||
"name": "title",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
},
|
||||
"body": {
|
||||
"name": "body",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
},
|
||||
"priority": {
|
||||
"name": "priority",
|
||||
"type": "varchar(12)",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"default": "'normal'"
|
||||
},
|
||||
"type": {
|
||||
"name": "type",
|
||||
"type": "varchar(12)",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
},
|
||||
"category": {
|
||||
"name": "category",
|
||||
"type": "varchar(64)",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"is_read": {
|
||||
"name": "is_read",
|
||||
"type": "boolean",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"default": false
|
||||
},
|
||||
"is_archived": {
|
||||
"name": "is_archived",
|
||||
"type": "boolean",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"default": false
|
||||
},
|
||||
"action_url": {
|
||||
"name": "action_url",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"action_type": {
|
||||
"name": "action_type",
|
||||
"type": "varchar(16)",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"action_data": {
|
||||
"name": "action_data",
|
||||
"type": "json",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"icon": {
|
||||
"name": "icon",
|
||||
"type": "varchar(64)",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"user_id": {
|
||||
"name": "user_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
},
|
||||
"sent_at": {
|
||||
"name": "sent_at",
|
||||
"type": "timestamp",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
},
|
||||
"read_at": {
|
||||
"name": "read_at",
|
||||
"type": "timestamp",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"expires_at": {
|
||||
"name": "expires_at",
|
||||
"type": "timestamp",
|
||||
"primaryKey": false,
|
||||
"notNull": false
|
||||
},
|
||||
"created_at": {
|
||||
"name": "created_at",
|
||||
"type": "timestamp",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "timestamp",
|
||||
"primaryKey": false,
|
||||
"notNull": true
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {
|
||||
"notifications_user_id_user_id_fk": {
|
||||
"name": "notifications_user_id_user_id_fk",
|
||||
"tableFrom": "notifications",
|
||||
"tableTo": "user",
|
||||
"columnsFrom": [
|
||||
"user_id"
|
||||
],
|
||||
"columnsTo": [
|
||||
"id"
|
||||
],
|
||||
"onDelete": "cascade",
|
||||
"onUpdate": "no action"
|
||||
}
|
||||
},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"policies": {},
|
||||
"checkConstraints": {},
|
||||
"isRLSEnabled": false
|
||||
}
|
||||
},
|
||||
"enums": {},
|
||||
"schemas": {},
|
||||
"sequences": {},
|
||||
"roles": {},
|
||||
"policies": {},
|
||||
"views": {},
|
||||
"_meta": {
|
||||
"columns": {},
|
||||
"schemas": {},
|
||||
"tables": {}
|
||||
}
|
||||
}
|
||||
13
packages/db/migrations/meta/_journal.json
Normal file
13
packages/db/migrations/meta/_journal.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"version": "7",
|
||||
"dialect": "postgresql",
|
||||
"entries": [
|
||||
{
|
||||
"idx": 0,
|
||||
"version": "7",
|
||||
"when": 1769954723767,
|
||||
"tag": "0000_woozy_mother_askani",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
27
packages/db/package.json
Normal file
27
packages/db/package.json
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"name": "@pkg/db",
|
||||
"module": "index.ts",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"db:gen": "drizzle-kit generate --config=drizzle.config.ts",
|
||||
"db:drop": "drizzle-kit drop --config=drizzle.config.ts",
|
||||
"db:push": "drizzle-kit push --config=drizzle.config.ts",
|
||||
"db:migrate": "drizzle-kit generate --config=drizzle.config.ts && drizzle-kit push --config=drizzle.config.ts",
|
||||
"db:forcemigrate": "drizzle-kit generate --config=drizzle.config.ts && drizzle-kit push --config=drizzle.config.ts --force",
|
||||
"dev": "drizzle-kit studio --host=0.0.0.0 --port=5420 --config=drizzle.config.ts --verbose"
|
||||
},
|
||||
"dependencies": {
|
||||
"@pkg/settings": "workspace:*",
|
||||
"dotenv": "^16.4.7",
|
||||
"drizzle-orm": "^0.36.1",
|
||||
"postgres": "^3.4.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bun": "latest",
|
||||
"@types/pg": "^8.11.10",
|
||||
"drizzle-kit": "^0.28.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5.0.0"
|
||||
}
|
||||
}
|
||||
52
packages/db/schema/auth.schema.ts
Normal file
52
packages/db/schema/auth.schema.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import {
|
||||
integer,
|
||||
json,
|
||||
pgTable,
|
||||
text,
|
||||
timestamp,
|
||||
varchar,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { user } from "./better.auth.schema";
|
||||
import { relations } from "drizzle-orm";
|
||||
|
||||
export const twoFactor = pgTable("two_factor", {
|
||||
id: text("id").primaryKey(),
|
||||
secret: text("secret").notNull(),
|
||||
backupCodes: json("backup_codes").$type<string[]>(),
|
||||
userId: text("user_id")
|
||||
.notNull()
|
||||
.references(() => user.id, { onDelete: "cascade" }),
|
||||
createdAt: timestamp("created_at").notNull(),
|
||||
updatedAt: timestamp("updated_at").notNull(),
|
||||
});
|
||||
|
||||
export const twofaSessions = pgTable("twofa_sessions", {
|
||||
id: text("id").primaryKey(),
|
||||
userId: text("user_id")
|
||||
.notNull()
|
||||
.references(() => user.id, { onDelete: "cascade" }),
|
||||
sessionId: text("session_id").notNull(), // Better Auth session ID
|
||||
|
||||
// Verification Tracking
|
||||
verificationToken: text("verification_token").notNull().unique(), // Unique nonce for this attempt
|
||||
codeUsed: text("code_used"), // The TOTP code submitted (prevent replay)
|
||||
status: varchar("status", { length: 16 }).notNull(), // "pending" | "verified" | "failed" | "expired"
|
||||
|
||||
attempts: integer("attempts").default(0).notNull(),
|
||||
maxAttempts: integer("max_attempts").default(5).notNull(),
|
||||
|
||||
verifiedAt: timestamp("verified_at"),
|
||||
expiresAt: timestamp("expires_at").notNull(),
|
||||
createdAt: timestamp("created_at").notNull(),
|
||||
|
||||
// Security Audit
|
||||
ipAddress: text("ip_address").default(""),
|
||||
userAgent: text("user_agent").default(""),
|
||||
});
|
||||
|
||||
export const twofaSessionsRelations = relations(twofaSessions, ({ one }) => ({
|
||||
userAccount: one(user, {
|
||||
fields: [twofaSessions.userId],
|
||||
references: [user.id],
|
||||
}),
|
||||
}));
|
||||
75
packages/db/schema/better.auth.schema.ts
Normal file
75
packages/db/schema/better.auth.schema.ts
Normal file
@@ -0,0 +1,75 @@
|
||||
import { boolean, index, pgTable, text, timestamp } from "drizzle-orm/pg-core";
|
||||
import { relations } from "drizzle-orm";
|
||||
|
||||
export const user = pgTable("user", {
|
||||
id: text("id").primaryKey(),
|
||||
name: text("name").notNull(),
|
||||
email: text("email").notNull().unique(),
|
||||
emailVerified: boolean("email_verified").default(false).notNull(),
|
||||
image: text("image"),
|
||||
createdAt: timestamp("created_at").defaultNow().notNull(),
|
||||
updatedAt: timestamp("updated_at")
|
||||
.defaultNow()
|
||||
.$onUpdate(() => /* @__PURE__ */ new Date())
|
||||
.notNull(),
|
||||
username: text("username").unique(),
|
||||
displayUsername: text("display_username"),
|
||||
role: text("role"),
|
||||
banned: boolean("banned").default(false),
|
||||
banReason: text("ban_reason"),
|
||||
banExpires: timestamp("ban_expires"),
|
||||
onboardingDone: boolean("onboarding_done").default(false),
|
||||
last2FAVerifiedAt: timestamp("last2_fa_verified_at"),
|
||||
parentId: text("parent_id"),
|
||||
});
|
||||
|
||||
export const account = pgTable(
|
||||
"account",
|
||||
{
|
||||
id: text("id").primaryKey(),
|
||||
accountId: text("account_id").notNull(),
|
||||
providerId: text("provider_id").notNull(),
|
||||
userId: text("user_id")
|
||||
.notNull()
|
||||
.references(() => user.id, { onDelete: "cascade" }),
|
||||
accessToken: text("access_token"),
|
||||
refreshToken: text("refresh_token"),
|
||||
idToken: text("id_token"),
|
||||
accessTokenExpiresAt: timestamp("access_token_expires_at"),
|
||||
refreshTokenExpiresAt: timestamp("refresh_token_expires_at"),
|
||||
scope: text("scope"),
|
||||
password: text("password"),
|
||||
createdAt: timestamp("created_at").defaultNow().notNull(),
|
||||
updatedAt: timestamp("updated_at")
|
||||
.$onUpdate(() => /* @__PURE__ */ new Date())
|
||||
.notNull(),
|
||||
},
|
||||
(table) => [index("account_userId_idx").on(table.userId)],
|
||||
);
|
||||
|
||||
export const verification = pgTable(
|
||||
"verification",
|
||||
{
|
||||
id: text("id").primaryKey(),
|
||||
identifier: text("identifier").notNull(),
|
||||
value: text("value").notNull(),
|
||||
expiresAt: timestamp("expires_at").notNull(),
|
||||
createdAt: timestamp("created_at").defaultNow().notNull(),
|
||||
updatedAt: timestamp("updated_at")
|
||||
.defaultNow()
|
||||
.$onUpdate(() => /* @__PURE__ */ new Date())
|
||||
.notNull(),
|
||||
},
|
||||
(table) => [index("verification_identifier_idx").on(table.identifier)],
|
||||
);
|
||||
|
||||
export const userRelations = relations(user, ({ many }) => ({
|
||||
accounts: many(account),
|
||||
}));
|
||||
|
||||
export const accountRelations = relations(account, ({ one }) => ({
|
||||
user: one(user, {
|
||||
fields: [account.userId],
|
||||
references: [user.id],
|
||||
}),
|
||||
}));
|
||||
49
packages/db/schema/general.schema.ts
Normal file
49
packages/db/schema/general.schema.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import {
|
||||
boolean,
|
||||
json,
|
||||
pgTable,
|
||||
serial,
|
||||
text,
|
||||
timestamp,
|
||||
varchar,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { user } from "./better.auth.schema";
|
||||
import { relations } from "drizzle-orm";
|
||||
|
||||
export const notifications = pgTable("notifications", {
|
||||
id: serial("id").primaryKey(),
|
||||
|
||||
title: text("title").notNull(),
|
||||
body: text("body").notNull(),
|
||||
priority: varchar("priority", { length: 12 }).default("normal").notNull(), // "low", "normal", "high", "urgent"
|
||||
|
||||
type: varchar("type", { length: 12 }).notNull(),
|
||||
category: varchar("category", { length: 64 }),
|
||||
|
||||
isRead: boolean("is_read").default(false).notNull(),
|
||||
isArchived: boolean("is_archived").default(false).notNull(),
|
||||
|
||||
actionUrl: text("action_url"), // URL to navigate to when clicked
|
||||
actionType: varchar("action_type", { length: 16 }), // Type of action ("link", "function", etc.)
|
||||
actionData: json("action_data"), // Any additional data for the action
|
||||
|
||||
icon: varchar("icon", { length: 64 }), // Optional icon identifier
|
||||
|
||||
userId: text("user_id")
|
||||
.notNull()
|
||||
.references(() => user.id, { onDelete: "cascade" }),
|
||||
|
||||
// Lifecycle management
|
||||
sentAt: timestamp("sent_at").notNull(),
|
||||
readAt: timestamp("read_at"),
|
||||
expiresAt: timestamp("expires_at"),
|
||||
createdAt: timestamp("created_at").notNull(),
|
||||
updatedAt: timestamp("updated_at").notNull(),
|
||||
});
|
||||
|
||||
export const notificationsRelations = relations(notifications, ({ one }) => ({
|
||||
userAccount: one(user, {
|
||||
fields: [notifications.userId],
|
||||
references: [user.id],
|
||||
}),
|
||||
}));
|
||||
3
packages/db/schema/index.ts
Normal file
3
packages/db/schema/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from "./auth.schema";
|
||||
export * from "./better.auth.schema";
|
||||
export * from "./general.schema";
|
||||
222
packages/logger/client.ts
Normal file
222
packages/logger/client.ts
Normal file
@@ -0,0 +1,222 @@
|
||||
type LogLevel = "error" | "warn" | "info" | "http" | "debug";
|
||||
|
||||
export interface LogEntry {
|
||||
level: LogLevel;
|
||||
timestamp: string;
|
||||
message: any;
|
||||
metadata?: any;
|
||||
}
|
||||
|
||||
interface Error {
|
||||
code: string;
|
||||
message: string;
|
||||
description?: string;
|
||||
detail?: string;
|
||||
error?: any;
|
||||
actionable?: boolean;
|
||||
}
|
||||
|
||||
class BrowserLogger {
|
||||
private queue: LogEntry[] = [];
|
||||
private timer: ReturnType<typeof setInterval> | null = null;
|
||||
private readonly BATCH_INTERVAL = 5000; // 5 seconds
|
||||
private readonly BATCH_SIZE_LIMIT = 50;
|
||||
private readonly isDev: boolean;
|
||||
|
||||
constructor(isDev: boolean) {
|
||||
this.isDev = isDev;
|
||||
if (!this.isDev) {
|
||||
this.startBatchTimer();
|
||||
this.setupBeforeUnloadHandler();
|
||||
}
|
||||
}
|
||||
|
||||
private startBatchTimer() {
|
||||
this.timer = setInterval(() => this.flush(), this.BATCH_INTERVAL);
|
||||
}
|
||||
|
||||
private setupBeforeUnloadHandler() {
|
||||
// Flush logs before page unload to avoid losing them
|
||||
if (typeof window !== "undefined") {
|
||||
window.addEventListener("beforeunload", () => {
|
||||
this.flushSync();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private async flush() {
|
||||
if (this.queue.length === 0) return;
|
||||
|
||||
const batch = [...this.queue];
|
||||
this.queue = [];
|
||||
|
||||
try {
|
||||
// Forward batch to Hono route
|
||||
await fetch("/api/logs", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ logs: batch }),
|
||||
});
|
||||
} catch (err) {
|
||||
console.error("Axiom batch upload failed", err);
|
||||
// Re-add failed logs back to queue (up to a limit to avoid memory issues)
|
||||
if (this.queue.length < this.BATCH_SIZE_LIMIT * 2) {
|
||||
this.queue.push(...batch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private flushSync() {
|
||||
// Synchronous flush for beforeunload using sendBeacon
|
||||
if (this.queue.length === 0) return;
|
||||
|
||||
const batch = [...this.queue];
|
||||
this.queue = [];
|
||||
|
||||
try {
|
||||
const blob = new Blob([JSON.stringify({ logs: batch })], {
|
||||
type: "application/json",
|
||||
});
|
||||
navigator.sendBeacon("/api/logs", blob);
|
||||
} catch (err) {
|
||||
console.error("Failed to send logs via sendBeacon", err);
|
||||
}
|
||||
}
|
||||
|
||||
private serializeError(error: unknown): any {
|
||||
if (error instanceof Error) {
|
||||
return {
|
||||
name: error.name,
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
...(error.cause && { cause: this.serializeError(error.cause) }),
|
||||
};
|
||||
}
|
||||
return error;
|
||||
}
|
||||
|
||||
private createLogEntry(
|
||||
level: LogLevel,
|
||||
message: any,
|
||||
metadata?: any,
|
||||
): LogEntry {
|
||||
// Handle Error serialization for message
|
||||
const cleanMessage =
|
||||
message instanceof Error ? this.serializeError(message) : message;
|
||||
|
||||
// Handle Error serialization for metadata
|
||||
const cleanMetadata =
|
||||
metadata instanceof Error
|
||||
? this.serializeError(metadata)
|
||||
: metadata;
|
||||
|
||||
return {
|
||||
level,
|
||||
timestamp: new Date().toISOString(),
|
||||
message: cleanMessage,
|
||||
metadata: cleanMetadata || {},
|
||||
};
|
||||
}
|
||||
|
||||
private async sendLog(entry: LogEntry) {
|
||||
// Always log errors to console, even in production (for user debugging)
|
||||
// In dev, log everything to console
|
||||
const shouldConsoleLog = this.isDev || entry.level === "error";
|
||||
|
||||
if (shouldConsoleLog) {
|
||||
const consoleMethod =
|
||||
entry.level === "http" ? "debug" : entry.level;
|
||||
console[consoleMethod](
|
||||
`[client-${entry.level}] ${entry.timestamp}:`,
|
||||
entry.message,
|
||||
entry.metadata && Object.keys(entry.metadata).length > 0
|
||||
? entry.metadata
|
||||
: "",
|
||||
);
|
||||
}
|
||||
|
||||
// In production, add to queue for batching
|
||||
if (!this.isDev) {
|
||||
this.queue.push(entry);
|
||||
|
||||
// Safety flush if queue gets too large
|
||||
if (this.queue.length >= this.BATCH_SIZE_LIMIT) {
|
||||
await this.flush();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
error(message: any, metadata?: any) {
|
||||
this.sendLog(this.createLogEntry("error", message, metadata));
|
||||
}
|
||||
|
||||
warn(message: any, metadata?: any) {
|
||||
this.sendLog(this.createLogEntry("warn", message, metadata));
|
||||
}
|
||||
|
||||
info(message: any, metadata?: any) {
|
||||
this.sendLog(this.createLogEntry("info", message, metadata));
|
||||
}
|
||||
|
||||
http(message: any, metadata?: any) {
|
||||
this.sendLog(this.createLogEntry("http", message, metadata));
|
||||
}
|
||||
|
||||
debug(message: any, metadata?: any) {
|
||||
this.sendLog(this.createLogEntry("debug", message, metadata));
|
||||
}
|
||||
|
||||
// Manual flush method for advanced use cases
|
||||
async forceFlush() {
|
||||
await this.flush();
|
||||
}
|
||||
|
||||
// Cleanup method
|
||||
destroy() {
|
||||
if (this.timer) {
|
||||
clearInterval(this.timer);
|
||||
this.timer = null;
|
||||
}
|
||||
this.flushSync();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory function to create a BrowserLogger instance
|
||||
*
|
||||
* @param isDev - Whether the app is running in development mode
|
||||
* @returns A new BrowserLogger instance
|
||||
*
|
||||
* @example
|
||||
* // SvelteKit
|
||||
* import { dev } from '$app/environment';
|
||||
* const logger = getLoggerInstance(dev);
|
||||
*
|
||||
* @example
|
||||
* // Next.js
|
||||
* const logger = getLoggerInstance(process.env.NODE_ENV === 'development');
|
||||
*
|
||||
* @example
|
||||
* // Vite
|
||||
* const logger = getLoggerInstance(import.meta.env.DEV);
|
||||
*/
|
||||
function getLoggerInstance(isDev: boolean): BrowserLogger {
|
||||
return new BrowserLogger(isDev);
|
||||
}
|
||||
|
||||
function getError(logger: BrowserLogger, payload: Error, error?: any) {
|
||||
logger.error(payload);
|
||||
if (error) {
|
||||
logger.error(error);
|
||||
}
|
||||
return {
|
||||
code: payload.code,
|
||||
message: payload.message,
|
||||
description: payload.description,
|
||||
detail: payload.detail,
|
||||
error: error,
|
||||
actionable: payload.actionable,
|
||||
} as Error;
|
||||
}
|
||||
|
||||
export { getError, getLoggerInstance };
|
||||
143
packages/logger/index.ts
Normal file
143
packages/logger/index.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import DailyRotateFile from "winston-daily-rotate-file";
|
||||
import { settings } from "@pkg/settings";
|
||||
import type { Err } from "@pkg/result";
|
||||
import winston from "winston";
|
||||
import util from "util";
|
||||
import path from "path";
|
||||
|
||||
process.on("warning", (warning) => {
|
||||
const msg = String(warning?.message || "");
|
||||
const name = String((warning as any)?.name || "");
|
||||
|
||||
// Ignore the noisy timer warning from Node/kafkajs interplay
|
||||
if (
|
||||
name === "TimeoutNegativeWarning" ||
|
||||
msg.includes("TimeoutNegativeWarning") ||
|
||||
msg.includes("Timeout duration was set to 1")
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Keep other warnings visible
|
||||
console.warn(warning);
|
||||
});
|
||||
|
||||
const levels = {
|
||||
error: 0,
|
||||
warn: 1,
|
||||
info: 2,
|
||||
http: 3,
|
||||
debug: 4,
|
||||
};
|
||||
|
||||
const colors = {
|
||||
error: "red",
|
||||
warn: "yellow",
|
||||
info: "green",
|
||||
http: "magenta",
|
||||
debug: "white",
|
||||
};
|
||||
|
||||
const level = () => {
|
||||
const envLevel = process.env.LOG_LEVEL?.toLowerCase();
|
||||
if (envLevel && envLevel in levels) {
|
||||
return envLevel;
|
||||
}
|
||||
return settings.isDevelopment ? "debug" : "warn";
|
||||
};
|
||||
|
||||
// Console format with colors
|
||||
const consoleFormat = winston.format.combine(
|
||||
winston.format.errors({ stack: true }),
|
||||
winston.format.timestamp({ format: "YYYY-MM-DD HH:mm:ss:ms" }),
|
||||
winston.format.colorize({ all: true }),
|
||||
winston.format.printf((info) => {
|
||||
const { level, message, timestamp, ...extra } = info;
|
||||
|
||||
let formattedMessage = "";
|
||||
if (message instanceof Error) {
|
||||
formattedMessage = message.stack || message.message;
|
||||
} else if (typeof message === "object") {
|
||||
formattedMessage = util.inspect(message, {
|
||||
depth: null,
|
||||
colors: true,
|
||||
});
|
||||
} else {
|
||||
formattedMessage = message as any as string;
|
||||
}
|
||||
|
||||
// Handle extra fields (if any)
|
||||
const formattedExtra =
|
||||
Object.keys(extra).length > 0
|
||||
? `\n${util.inspect(extra, { depth: null, colors: true })}`
|
||||
: "";
|
||||
|
||||
return `[${level}] ${timestamp}: ${formattedMessage}${formattedExtra}`;
|
||||
}),
|
||||
);
|
||||
|
||||
// JSON format for file logging
|
||||
const fileFormat = winston.format.combine(
|
||||
winston.format.errors({ stack: true }),
|
||||
winston.format.timestamp(),
|
||||
winston.format.json(),
|
||||
);
|
||||
|
||||
// File transport with daily rotation
|
||||
const fileTransport = new DailyRotateFile({
|
||||
filename: path.join("logs", "app-%DATE%.log"),
|
||||
datePattern: "YYYY-MM-DD",
|
||||
maxSize: "20m",
|
||||
maxFiles: "14d",
|
||||
format: fileFormat,
|
||||
});
|
||||
|
||||
// Error file transport with daily rotation
|
||||
const errorFileTransport = new DailyRotateFile({
|
||||
filename: path.join("logs", "error-%DATE%.log"),
|
||||
datePattern: "YYYY-MM-DD",
|
||||
maxSize: "20m",
|
||||
maxFiles: "14d",
|
||||
level: "error",
|
||||
format: fileFormat,
|
||||
});
|
||||
|
||||
const transports: winston.transport[] = [
|
||||
new winston.transports.Console({ format: consoleFormat }),
|
||||
fileTransport,
|
||||
errorFileTransport,
|
||||
];
|
||||
|
||||
winston.addColors(colors);
|
||||
|
||||
const logger = winston.createLogger({
|
||||
level: level(),
|
||||
levels,
|
||||
transports,
|
||||
format: fileFormat,
|
||||
exceptionHandlers: [
|
||||
new winston.transports.Console({ format: consoleFormat }),
|
||||
fileTransport,
|
||||
],
|
||||
rejectionHandlers: [
|
||||
new winston.transports.Console({ format: consoleFormat }),
|
||||
fileTransport,
|
||||
],
|
||||
});
|
||||
|
||||
const stream = { write: (message: string) => logger.http(message.trim()) };
|
||||
|
||||
function getError(payload: Err, error?: any) {
|
||||
logger.error(JSON.stringify({ payload, error }, null, 2));
|
||||
console.error(error);
|
||||
return {
|
||||
code: payload.code,
|
||||
message: payload.message,
|
||||
description: payload.description,
|
||||
detail: payload.detail,
|
||||
error: error instanceof Error ? error.message : error,
|
||||
actionable: payload.actionable,
|
||||
} as Err;
|
||||
}
|
||||
|
||||
export { getError, logger, stream };
|
||||
18
packages/logger/package.json
Normal file
18
packages/logger/package.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"name": "@pkg/logger",
|
||||
"module": "index.ts",
|
||||
"type": "module",
|
||||
"devDependencies": {
|
||||
"@types/bun": "latest"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@axiomhq/winston": "^1.3.1",
|
||||
"@pkg/result": "workspace:*",
|
||||
"@pkg/settings": "workspace:*",
|
||||
"winston": "^3.17.0",
|
||||
"winston-daily-rotate-file": "^5.0.0"
|
||||
}
|
||||
}
|
||||
5
packages/logger/tsconfig.json
Normal file
5
packages/logger/tsconfig.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"esModuleInterop": true,
|
||||
},
|
||||
}
|
||||
7
packages/logic/core/array.utils.ts
Normal file
7
packages/logic/core/array.utils.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
export function chunk<T>(arr: T[], size: number): T[][] {
|
||||
const result = [];
|
||||
for (let i = 0; i < arr.length; i += size) {
|
||||
result.push(arr.slice(i, i + size));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
264
packages/logic/core/data/countries.ts
Normal file
264
packages/logic/core/data/countries.ts
Normal file
@@ -0,0 +1,264 @@
|
||||
export const COUNTRIES = [
|
||||
{ id: "1", name: "Afghanistan", code: "AF" },
|
||||
{ id: "2", name: "Albania", code: "AL" },
|
||||
{ id: "3", name: "Algeria", code: "DZ" },
|
||||
{ id: "4", name: "American Samoa", code: "AS" },
|
||||
{ id: "5", name: "Andorra", code: "AD" },
|
||||
{ id: "6", name: "Angola", code: "AO" },
|
||||
{ id: "7", name: "Anguilla", code: "AI" },
|
||||
{ id: "8", name: "Antarctica", code: "AQ" },
|
||||
{ id: "9", name: "Antigua and Barbuda", code: "AG" },
|
||||
{ id: "10", name: "Argentina", code: "AR" },
|
||||
{ id: "11", name: "Armenia", code: "AM" },
|
||||
{ id: "12", name: "Aruba", code: "AW" },
|
||||
{ id: "13", name: "Australia", code: "AU" },
|
||||
{ id: "14", name: "Austria", code: "AT" },
|
||||
{ id: "15", name: "Azerbaijan", code: "AZ" },
|
||||
{ id: "16", name: "Bahamas", code: "BS" },
|
||||
{ id: "17", name: "Bahrain", code: "BH" },
|
||||
{ id: "18", name: "Bangladesh", code: "BD" },
|
||||
{ id: "19", name: "Barbados", code: "BB" },
|
||||
{ id: "20", name: "Belarus", code: "BY" },
|
||||
{ id: "21", name: "Belgium", code: "BE" },
|
||||
{ id: "22", name: "Belize", code: "BZ" },
|
||||
{ id: "23", name: "Benin", code: "BJ" },
|
||||
{ id: "24", name: "Bermuda", code: "BM" },
|
||||
{ id: "25", name: "Bhutan", code: "BT" },
|
||||
{ id: "26", name: "Bolivia", code: "BO" },
|
||||
{ id: "27", name: "Bosnia and Herzegovina", code: "BA" },
|
||||
{ id: "28", name: "Botswana", code: "BW" },
|
||||
{ id: "29", name: "Bouvet Island", code: "BV" },
|
||||
{ id: "30", name: "Brazil", code: "BR" },
|
||||
{ id: "31", name: "British Indian Ocean Territory", code: "IO" },
|
||||
{ id: "32", name: "British Virgin Islands", code: "VG" },
|
||||
{ id: "33", name: "Brunei", code: "BN" },
|
||||
{ id: "34", name: "Bulgaria", code: "BG" },
|
||||
{ id: "35", name: "Burkina Faso", code: "BF" },
|
||||
{ id: "36", name: "Burundi", code: "BI" },
|
||||
{ id: "37", name: "Cambodia", code: "KH" },
|
||||
{ id: "38", name: "Cameroon", code: "CM" },
|
||||
{ id: "39", name: "Canada", code: "CA" },
|
||||
{ id: "40", name: "Cape Verde", code: "CV" },
|
||||
{ id: "41", name: "Caribbean Netherlands", code: "BQ" },
|
||||
{ id: "42", name: "Cayman Islands", code: "KY" },
|
||||
{ id: "43", name: "Central African Republic", code: "CF" },
|
||||
{ id: "44", name: "Chad", code: "TD" },
|
||||
{ id: "45", name: "Chile", code: "CL" },
|
||||
{ id: "46", name: "China", code: "CN" },
|
||||
{ id: "47", name: "Christmas Island", code: "CX" },
|
||||
{ id: "48", name: "Cocos (Keeling) Islands", code: "CC" },
|
||||
{ id: "49", name: "Colombia", code: "CO" },
|
||||
{ id: "50", name: "Comoros", code: "KM" },
|
||||
{ id: "51", name: "Cook Islands", code: "CK" },
|
||||
{ id: "52", name: "Costa Rica", code: "CR" },
|
||||
{ id: "53", name: "Croatia", code: "HR" },
|
||||
{ id: "54", name: "Cuba", code: "CU" },
|
||||
{ id: "55", name: "Curaçao", code: "CW" },
|
||||
{ id: "56", name: "Cyprus", code: "CY" },
|
||||
{ id: "57", name: "Czechia", code: "CZ" },
|
||||
{ id: "58", name: "DR Congo", code: "CD" },
|
||||
{ id: "59", name: "Denmark", code: "DK" },
|
||||
{ id: "60", name: "Djibouti", code: "DJ" },
|
||||
{ id: "61", name: "Dominica", code: "DM" },
|
||||
{ id: "62", name: "Dominican Republic", code: "DO" },
|
||||
{ id: "63", name: "Ecuador", code: "EC" },
|
||||
{ id: "64", name: "Egypt", code: "EG" },
|
||||
{ id: "65", name: "El Salvador", code: "SV" },
|
||||
{ id: "66", name: "Equatorial Guinea", code: "GQ" },
|
||||
{ id: "67", name: "Eritrea", code: "ER" },
|
||||
{ id: "68", name: "Estonia", code: "EE" },
|
||||
{ id: "69", name: "Eswatini", code: "SZ" },
|
||||
{ id: "70", name: "Ethiopia", code: "ET" },
|
||||
{ id: "71", name: "Falkland Islands", code: "FK" },
|
||||
{ id: "72", name: "Faroe Islands", code: "FO" },
|
||||
{ id: "73", name: "Fiji", code: "FJ" },
|
||||
{ id: "74", name: "Finland", code: "FI" },
|
||||
{ id: "75", name: "France", code: "FR" },
|
||||
{ id: "76", name: "French Guiana", code: "GF" },
|
||||
{ id: "77", name: "French Polynesia", code: "PF" },
|
||||
{ id: "78", name: "French Southern and Antarctic Lands", code: "TF" },
|
||||
{ id: "79", name: "Gabon", code: "GA" },
|
||||
{ id: "80", name: "Gambia", code: "GM" },
|
||||
{ id: "81", name: "Georgia", code: "GE" },
|
||||
{ id: "82", name: "Germany", code: "DE" },
|
||||
{ id: "83", name: "Ghana", code: "GH" },
|
||||
{ id: "84", name: "Gibraltar", code: "GI" },
|
||||
{ id: "85", name: "Greece", code: "GR" },
|
||||
{ id: "86", name: "Greenland", code: "GL" },
|
||||
{ id: "87", name: "Grenada", code: "GD" },
|
||||
{ id: "88", name: "Guadeloupe", code: "GP" },
|
||||
{ id: "89", name: "Guam", code: "GU" },
|
||||
{ id: "90", name: "Guatemala", code: "GT" },
|
||||
{ id: "91", name: "Guernsey", code: "GG" },
|
||||
{ id: "92", name: "Guinea", code: "GN" },
|
||||
{ id: "93", name: "Guinea-Bissau", code: "GW" },
|
||||
{ id: "94", name: "Guyana", code: "GY" },
|
||||
{ id: "95", name: "Haiti", code: "HT" },
|
||||
{ id: "96", name: "Heard Island and McDonald Islands", code: "HM" },
|
||||
{ id: "97", name: "Honduras", code: "HN" },
|
||||
{ id: "98", name: "Hong Kong", code: "HK" },
|
||||
{ id: "99", name: "Hungary", code: "HU" },
|
||||
{ id: "100", name: "Iceland", code: "IS" },
|
||||
{ id: "101", name: "India", code: "IN" },
|
||||
{ id: "102", name: "Indonesia", code: "ID" },
|
||||
{ id: "103", name: "Iran", code: "IR" },
|
||||
{ id: "104", name: "Iraq", code: "IQ" },
|
||||
{ id: "105", name: "Ireland", code: "IE" },
|
||||
{ id: "106", name: "Isle of Man", code: "IM" },
|
||||
{ id: "107", name: "Israel", code: "IL" },
|
||||
{ id: "108", name: "Italy", code: "IT" },
|
||||
{ id: "109", name: "Ivory Coast", code: "CI" },
|
||||
{ id: "110", name: "Jamaica", code: "JM" },
|
||||
{ id: "111", name: "Japan", code: "JP" },
|
||||
{ id: "112", name: "Jersey", code: "JE" },
|
||||
{ id: "113", name: "Jordan", code: "JO" },
|
||||
{ id: "114", name: "Kazakhstan", code: "KZ" },
|
||||
{ id: "115", name: "Kenya", code: "KE" },
|
||||
{ id: "116", name: "Kiribati", code: "KI" },
|
||||
{ id: "117", name: "Kosovo", code: "XK" },
|
||||
{ id: "118", name: "Kuwait", code: "KW" },
|
||||
{ id: "119", name: "Kyrgyzstan", code: "KG" },
|
||||
{ id: "120", name: "Laos", code: "LA" },
|
||||
{ id: "121", name: "Latvia", code: "LV" },
|
||||
{ id: "122", name: "Lebanon", code: "LB" },
|
||||
{ id: "123", name: "Lesotho", code: "LS" },
|
||||
{ id: "124", name: "Liberia", code: "LR" },
|
||||
{ id: "125", name: "Libya", code: "LY" },
|
||||
{ id: "126", name: "Liechtenstein", code: "LI" },
|
||||
{ id: "127", name: "Lithuania", code: "LT" },
|
||||
{ id: "128", name: "Luxembourg", code: "LU" },
|
||||
{ id: "129", name: "Macau", code: "MO" },
|
||||
{ id: "130", name: "Madagascar", code: "MG" },
|
||||
{ id: "131", name: "Malawi", code: "MW" },
|
||||
{ id: "132", name: "Malaysia", code: "MY" },
|
||||
{ id: "133", name: "Maldives", code: "MV" },
|
||||
{ id: "134", name: "Mali", code: "ML" },
|
||||
{ id: "135", name: "Malta", code: "MT" },
|
||||
{ id: "136", name: "Marshall Islands", code: "MH" },
|
||||
{ id: "137", name: "Martinique", code: "MQ" },
|
||||
{ id: "138", name: "Mauritania", code: "MR" },
|
||||
{ id: "139", name: "Mauritius", code: "MU" },
|
||||
{ id: "140", name: "Mayotte", code: "YT" },
|
||||
{ id: "141", name: "Mexico", code: "MX" },
|
||||
{ id: "142", name: "Micronesia", code: "FM" },
|
||||
{ id: "143", name: "Moldova", code: "MD" },
|
||||
{ id: "144", name: "Monaco", code: "MC" },
|
||||
{ id: "145", name: "Mongolia", code: "MN" },
|
||||
{ id: "146", name: "Montenegro", code: "ME" },
|
||||
{ id: "147", name: "Montserrat", code: "MS" },
|
||||
{ id: "148", name: "Morocco", code: "MA" },
|
||||
{ id: "149", name: "Mozambique", code: "MZ" },
|
||||
{ id: "150", name: "Myanmar", code: "MM" },
|
||||
{ id: "151", name: "Namibia", code: "NA" },
|
||||
{ id: "152", name: "Nauru", code: "NR" },
|
||||
{ id: "153", name: "Nepal", code: "NP" },
|
||||
{ id: "154", name: "Netherlands", code: "NL" },
|
||||
{ id: "155", name: "New Caledonia", code: "NC" },
|
||||
{ id: "156", name: "New Zealand", code: "NZ" },
|
||||
{ id: "157", name: "Nicaragua", code: "NI" },
|
||||
{ id: "158", name: "Niger", code: "NE" },
|
||||
{ id: "159", name: "Nigeria", code: "NG" },
|
||||
{ id: "160", name: "Niue", code: "NU" },
|
||||
{ id: "161", name: "Norfolk Island", code: "NF" },
|
||||
{ id: "162", name: "North Korea", code: "KP" },
|
||||
{ id: "163", name: "North Macedonia", code: "MK" },
|
||||
{ id: "164", name: "Northern Mariana Islands", code: "MP" },
|
||||
{ id: "165", name: "Norway", code: "NO" },
|
||||
{ id: "166", name: "Oman", code: "OM" },
|
||||
{ id: "167", name: "Pakistan", code: "PK" },
|
||||
{ id: "168", name: "Palau", code: "PW" },
|
||||
{ id: "169", name: "Palestine", code: "PS" },
|
||||
{ id: "170", name: "Panama", code: "PA" },
|
||||
{ id: "171", name: "Papua New Guinea", code: "PG" },
|
||||
{ id: "172", name: "Paraguay", code: "PY" },
|
||||
{ id: "173", name: "Peru", code: "PE" },
|
||||
{ id: "174", name: "Philippines", code: "PH" },
|
||||
{ id: "175", name: "Pitcairn Islands", code: "PN" },
|
||||
{ id: "176", name: "Poland", code: "PL" },
|
||||
{ id: "177", name: "Portugal", code: "PT" },
|
||||
{ id: "178", name: "Puerto Rico", code: "PR" },
|
||||
{ id: "179", name: "Qatar", code: "QA" },
|
||||
{ id: "180", name: "Republic of the Congo", code: "CG" },
|
||||
{ id: "181", name: "Romania", code: "RO" },
|
||||
{ id: "182", name: "Russia", code: "RU" },
|
||||
{ id: "183", name: "Rwanda", code: "RW" },
|
||||
{ id: "184", name: "Réunion", code: "RE" },
|
||||
{ id: "185", name: "Saint Barthélemy", code: "BL" },
|
||||
{
|
||||
id: "186",
|
||||
name: "Saint Helena, Ascension and Tristan da Cunha",
|
||||
code: "SH",
|
||||
},
|
||||
{ id: "187", name: "Saint Kitts and Nevis", code: "KN" },
|
||||
{ id: "188", name: "Saint Lucia", code: "LC" },
|
||||
{ id: "189", name: "Saint Martin", code: "MF" },
|
||||
{ id: "190", name: "Saint Pierre and Miquelon", code: "PM" },
|
||||
{ id: "191", name: "Saint Vincent and the Grenadines", code: "VC" },
|
||||
{ id: "192", name: "Samoa", code: "WS" },
|
||||
{ id: "193", name: "San Marino", code: "SM" },
|
||||
{ id: "194", name: "Saudi Arabia", code: "SA" },
|
||||
{ id: "195", name: "Senegal", code: "SN" },
|
||||
{ id: "196", name: "Serbia", code: "RS" },
|
||||
{ id: "197", name: "Seychelles", code: "SC" },
|
||||
{ id: "198", name: "Sierra Leone", code: "SL" },
|
||||
{ id: "199", name: "Singapore", code: "SG" },
|
||||
{ id: "200", name: "Sint Maarten", code: "SX" },
|
||||
{ id: "201", name: "Slovakia", code: "SK" },
|
||||
{ id: "202", name: "Slovenia", code: "SI" },
|
||||
{ id: "203", name: "Solomon Islands", code: "SB" },
|
||||
{ id: "204", name: "Somalia", code: "SO" },
|
||||
{ id: "205", name: "South Africa", code: "ZA" },
|
||||
{ id: "206", name: "South Georgia", code: "GS" },
|
||||
{ id: "207", name: "South Korea", code: "KR" },
|
||||
{ id: "208", name: "South Sudan", code: "SS" },
|
||||
{ id: "209", name: "Spain", code: "ES" },
|
||||
{ id: "210", name: "Sri Lanka", code: "LK" },
|
||||
{ id: "211", name: "Sudan", code: "SD" },
|
||||
{ id: "212", name: "Suriname", code: "SR" },
|
||||
{ id: "213", name: "Svalbard and Jan Mayen", code: "SJ" },
|
||||
{ id: "214", name: "Sweden", code: "SE" },
|
||||
{ id: "215", name: "Switzerland", code: "CH" },
|
||||
{ id: "216", name: "Syria", code: "SY" },
|
||||
{ id: "217", name: "São Tomé and Príncipe", code: "ST" },
|
||||
{ id: "218", name: "Taiwan", code: "TW" },
|
||||
{ id: "219", name: "Tajikistan", code: "TJ" },
|
||||
{ id: "220", name: "Tanzania", code: "TZ" },
|
||||
{ id: "221", name: "Thailand", code: "TH" },
|
||||
{ id: "222", name: "Timor-Leste", code: "TL" },
|
||||
{ id: "223", name: "Togo", code: "TG" },
|
||||
{ id: "224", name: "Tokelau", code: "TK" },
|
||||
{ id: "225", name: "Tonga", code: "TO" },
|
||||
{ id: "226", name: "Trinidad and Tobago", code: "TT" },
|
||||
{ id: "227", name: "Tunisia", code: "TN" },
|
||||
{ id: "228", name: "Turkey", code: "TR" },
|
||||
{ id: "229", name: "Turkmenistan", code: "TM" },
|
||||
{ id: "230", name: "Turks and Caicos Islands", code: "TC" },
|
||||
{ id: "231", name: "Tuvalu", code: "TV" },
|
||||
{ id: "232", name: "Uganda", code: "UG" },
|
||||
{ id: "233", name: "Ukraine", code: "UA" },
|
||||
{ id: "234", name: "United Arab Emirates", code: "AE" },
|
||||
{ id: "235", name: "United Kingdom", code: "GB" },
|
||||
{ id: "236", name: "United States", code: "US" },
|
||||
{ id: "237", name: "United States Minor Outlying Islands", code: "UM" },
|
||||
{ id: "238", name: "United States Virgin Islands", code: "VI" },
|
||||
{ id: "239", name: "Uruguay", code: "UY" },
|
||||
{ id: "240", name: "Uzbekistan", code: "UZ" },
|
||||
{ id: "241", name: "Vanuatu", code: "VU" },
|
||||
{ id: "242", name: "Vatican City", code: "VA" },
|
||||
{ id: "243", name: "Venezuela", code: "VE" },
|
||||
{ id: "244", name: "Vietnam", code: "VN" },
|
||||
{ id: "245", name: "Wallis and Futuna", code: "WF" },
|
||||
{ id: "246", name: "Western Sahara", code: "EH" },
|
||||
{ id: "247", name: "Yemen", code: "YE" },
|
||||
{ id: "248", name: "Zambia", code: "ZM" },
|
||||
{ id: "249", name: "Zimbabwe", code: "ZW" },
|
||||
{ id: "250", name: "Åland Islands", code: "AX" },
|
||||
];
|
||||
|
||||
export const COUNTRIES_SELECT = COUNTRIES.map((c) => {
|
||||
return {
|
||||
id: c.id,
|
||||
label: `${c.code} (${c.name})`,
|
||||
value: c.name.toLowerCase(),
|
||||
};
|
||||
});
|
||||
1227
packages/logic/core/data/phonecc.ts
Normal file
1227
packages/logic/core/data/phonecc.ts
Normal file
File diff suppressed because it is too large
Load Diff
83
packages/logic/core/date.utils.ts
Normal file
83
packages/logic/core/date.utils.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
import type { CalendarDate } from "@internationalized/date";
|
||||
|
||||
export function formatDuration(ms: number): string {
|
||||
const seconds = Math.floor(ms / 1000);
|
||||
if (seconds < 60) return `${seconds}s`;
|
||||
|
||||
const minutes = Math.floor(seconds / 60);
|
||||
const remainingSeconds = seconds % 60;
|
||||
return `${minutes}m ${remainingSeconds}s`;
|
||||
}
|
||||
|
||||
export function formatDateTimeFromIsoString(isoString: string): string {
|
||||
try {
|
||||
const date = new Date(isoString);
|
||||
return new Intl.DateTimeFormat("en-US", {
|
||||
dateStyle: "medium",
|
||||
timeStyle: "short",
|
||||
}).format(date);
|
||||
} catch (e) {
|
||||
return "Invalid date";
|
||||
}
|
||||
}
|
||||
|
||||
export function getJustDateString(d: Date): string {
|
||||
return d.toISOString().split("T")[0];
|
||||
}
|
||||
|
||||
export function formatDateTime(dateTimeStr: string) {
|
||||
const date = new Date(dateTimeStr);
|
||||
return {
|
||||
time: date.toLocaleTimeString("en-US", {
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
hour12: false,
|
||||
}),
|
||||
date: date.toLocaleDateString("en-US", {
|
||||
weekday: "short",
|
||||
day: "2-digit",
|
||||
month: "short",
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
export function formatDate(dateStr: string) {
|
||||
return new Date(dateStr).toLocaleDateString("en-US", {
|
||||
weekday: "short",
|
||||
day: "2-digit",
|
||||
month: "short",
|
||||
});
|
||||
}
|
||||
|
||||
export function isTimestampMoreThan1MinAgo(ts: string): boolean {
|
||||
const lastPingedDate = new Date(ts);
|
||||
const now = new Date();
|
||||
const diff = now.getTime() - lastPingedDate.getTime();
|
||||
return diff > 60000;
|
||||
}
|
||||
|
||||
export function isTimestampOlderThan(ts: string, seconds: number): boolean {
|
||||
const lastPingedDate = new Date(ts);
|
||||
const now = new Date();
|
||||
const diff = now.getTime() - lastPingedDate.getTime();
|
||||
return diff > seconds * 1000;
|
||||
}
|
||||
|
||||
export function makeDateStringISO(ds: string): string {
|
||||
if (ds.includes("T")) {
|
||||
return `${ds.split("T")[0]}T00:00:00.000Z`;
|
||||
}
|
||||
return `${ds}T00:00:00.000Z`;
|
||||
}
|
||||
|
||||
export function parseCalDateToDateString(v: CalendarDate) {
|
||||
let month: string | number = v.month;
|
||||
if (month < 10) {
|
||||
month = `0${month}`;
|
||||
}
|
||||
let day: string | number = v.day;
|
||||
if (day < 10) {
|
||||
day = `0${day}`;
|
||||
}
|
||||
return `${v.year}-${month}-${day}`;
|
||||
}
|
||||
8
packages/logic/core/error.ts
Normal file
8
packages/logic/core/error.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
export type Err = {
|
||||
code: string;
|
||||
message: string;
|
||||
description: string;
|
||||
detail: string;
|
||||
actionable?: boolean;
|
||||
error?: any;
|
||||
};
|
||||
5
packages/logic/core/flow.execution.context.ts
Normal file
5
packages/logic/core/flow.execution.context.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
export type FlowExecCtx = {
|
||||
flowId: string;
|
||||
userId?: string;
|
||||
sessionId?: string;
|
||||
};
|
||||
31
packages/logic/core/hash.utils.ts
Normal file
31
packages/logic/core/hash.utils.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import { argon2id, hash as argonHash, verify as argonVerify } from "argon2";
|
||||
|
||||
export async function hashString(target: string): Promise<string> {
|
||||
const salt = Buffer.from(crypto.getRandomValues(new Uint8Array(16))).toString(
|
||||
"hex",
|
||||
);
|
||||
const hash = await argonHash(target, {
|
||||
type: argon2id,
|
||||
salt: Buffer.from(salt, "hex"),
|
||||
hashLength: 32,
|
||||
timeCost: 3,
|
||||
memoryCost: 65536,
|
||||
parallelism: 1,
|
||||
});
|
||||
return hash;
|
||||
}
|
||||
|
||||
export async function verifyHash({
|
||||
hash,
|
||||
target,
|
||||
}: {
|
||||
hash: string;
|
||||
target: string;
|
||||
}): Promise<boolean> {
|
||||
try {
|
||||
const isValid = await argonVerify(hash, `${target}`);
|
||||
return isValid;
|
||||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
9
packages/logic/core/hono.helpers.ts
Normal file
9
packages/logic/core/hono.helpers.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import type { Session, User } from "@/domains/user/data";
|
||||
import { FlowExecCtx } from "./flow.execution.context";
|
||||
import { Env } from "hono";
|
||||
|
||||
export interface HonoContext extends Env {
|
||||
Bindings: {
|
||||
locals: { user: User; session: Session; fCtx: FlowExecCtx };
|
||||
};
|
||||
}
|
||||
12
packages/logic/core/pagination.utils.ts
Normal file
12
packages/logic/core/pagination.utils.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import * as v from "valibot";
|
||||
|
||||
export const paginationModel = v.object({
|
||||
cursor: v.optional(v.string()),
|
||||
limit: v.pipe(v.number(), v.integer(), v.maxValue(100)),
|
||||
asc: v.optional(v.boolean(), true),
|
||||
totalItemCount: v.optional(v.pipe(v.number(), v.integer()), 0),
|
||||
totalPages: v.pipe(v.number(), v.integer()),
|
||||
page: v.pipe(v.number(), v.integer()),
|
||||
});
|
||||
|
||||
export type PaginationModel = v.InferOutput<typeof paginationModel>;
|
||||
40
packages/logic/core/rate.limiter.ts
Normal file
40
packages/logic/core/rate.limiter.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { logger } from "@pkg/logger";
|
||||
|
||||
export class RateLimiter {
|
||||
private requestTimestamps: number[] = [];
|
||||
private readonly callsPerMinute: number;
|
||||
|
||||
constructor(callsPerMinute: number = 60) {
|
||||
this.callsPerMinute = Math.min(callsPerMinute, 60);
|
||||
}
|
||||
|
||||
async checkRateLimit(): Promise<void> {
|
||||
const currentTime = Date.now();
|
||||
const oneMinuteAgo = currentTime - 60000; // 60 seconds in milliseconds
|
||||
|
||||
// Remove timestamps older than 1 minute
|
||||
this.requestTimestamps = this.requestTimestamps.filter(
|
||||
(timestamp) => timestamp > oneMinuteAgo,
|
||||
);
|
||||
|
||||
// If we're approaching the limit, wait until we have capacity
|
||||
if (this.requestTimestamps.length >= this.callsPerMinute) {
|
||||
const oldestRequest = this.requestTimestamps[0];
|
||||
const waitTime = oldestRequest + 60000 - currentTime;
|
||||
|
||||
if (waitTime > 0) {
|
||||
logger.warn(
|
||||
`Rate limit approaching (${this.requestTimestamps.length} requests in last minute). Sleeping for ${waitTime}ms`,
|
||||
);
|
||||
await new Promise((resolve) => setTimeout(resolve, waitTime));
|
||||
// After waiting, some timestamps may have expired
|
||||
this.requestTimestamps = this.requestTimestamps.filter(
|
||||
(timestamp) => timestamp > Date.now() - 60000,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Add current request to timestamps
|
||||
this.requestTimestamps.push(Date.now());
|
||||
}
|
||||
}
|
||||
1
packages/logic/core/settings.ts
Normal file
1
packages/logic/core/settings.ts
Normal file
@@ -0,0 +1 @@
|
||||
export { getSetting, settings } from "@pkg/settings";
|
||||
106
packages/logic/core/string.utils/index.ts
Normal file
106
packages/logic/core/string.utils/index.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import * as v from "valibot";
|
||||
|
||||
export function capitalize(input: string, firstOfAllWords?: boolean): string {
|
||||
// capitalize first letter of input
|
||||
if (!firstOfAllWords) {
|
||||
return input.charAt(0).toUpperCase() + input.slice(1);
|
||||
}
|
||||
let out = "";
|
||||
for (const word of input.split(" ")) {
|
||||
out += word.charAt(0).toUpperCase() + word.slice(1) + " ";
|
||||
}
|
||||
return out.slice(0, -1);
|
||||
}
|
||||
|
||||
export function camelToSpacedPascal(input: string): string {
|
||||
let result = "";
|
||||
let previousChar = "";
|
||||
for (const char of input) {
|
||||
if (char === char.toUpperCase() && previousChar !== " ") {
|
||||
result += " ";
|
||||
}
|
||||
result += char;
|
||||
previousChar = char;
|
||||
}
|
||||
return result.charAt(0).toUpperCase() + result.slice(1);
|
||||
}
|
||||
|
||||
export function snakeToCamel(input: string): string {
|
||||
if (!input) {
|
||||
return input;
|
||||
}
|
||||
// also account for numbers and kebab-case
|
||||
const splits = input.split(/[-_]/);
|
||||
let result = splits[0];
|
||||
for (const split of splits.slice(1)) {
|
||||
result += capitalize(split, true);
|
||||
}
|
||||
return result ?? "";
|
||||
}
|
||||
|
||||
export function snakeToSpacedPascal(input: string): string {
|
||||
return camelToSpacedPascal(snakeToCamel(input));
|
||||
}
|
||||
|
||||
export function spacedPascalToSnake(input: string): string {
|
||||
return input.split(" ").join("_").toLowerCase();
|
||||
}
|
||||
|
||||
export function convertDashedLowerToTitleCase(input: string): string {
|
||||
return input
|
||||
.split("-")
|
||||
.map(
|
||||
(word) =>
|
||||
word.charAt(0).toUpperCase() + word.slice(1).toLowerCase(),
|
||||
)
|
||||
.join(" "); // Join the words with a space
|
||||
}
|
||||
|
||||
export function encodeCursor<T>(cursor: T): string {
|
||||
try {
|
||||
// Convert the object to a JSON string
|
||||
const jsonString = JSON.stringify(cursor);
|
||||
// Convert to UTF-8 bytes, then base64
|
||||
return btoa(
|
||||
encodeURIComponent(jsonString).replace(/%([0-9A-F]{2})/g, (_, p1) =>
|
||||
String.fromCharCode(parseInt(p1, 16)),
|
||||
),
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Error encoding cursor:", error);
|
||||
throw new Error("Failed to encode cursor");
|
||||
}
|
||||
}
|
||||
|
||||
export function decodeCursor<T>(
|
||||
cursor: string,
|
||||
parser: v.BaseSchema<any, T, any>,
|
||||
) {
|
||||
try {
|
||||
// Decode base64 back to UTF-8 string
|
||||
const decoded = decodeURIComponent(
|
||||
Array.prototype.map
|
||||
.call(atob(cursor), (c) => {
|
||||
return (
|
||||
"%" + ("00" + c.charCodeAt(0).toString(16)).slice(-2)
|
||||
);
|
||||
})
|
||||
.join(""),
|
||||
);
|
||||
// Parse back to object
|
||||
const parsedData = JSON.parse(decoded);
|
||||
const result = v.safeParse(parser, parsedData);
|
||||
return result.success
|
||||
? { success: true, data: result.output as T }
|
||||
: {
|
||||
success: false,
|
||||
error: new Error(
|
||||
result.issues.map((i) => i.message).join(", "),
|
||||
),
|
||||
data: undefined,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Error decoding cursor:", error);
|
||||
return { error: new Error("Failed to decode cursor"), data: undefined };
|
||||
}
|
||||
}
|
||||
555
packages/logic/core/string.utils/sequence.matcher.ts
Normal file
555
packages/logic/core/string.utils/sequence.matcher.ts
Normal file
@@ -0,0 +1,555 @@
|
||||
/**
|
||||
* Similar to Python's difflib.SequenceMatcher
|
||||
*
|
||||
* A flexible class for comparing pairs of sequences of any type.
|
||||
* Uses the Ratcliff-Obershelp algorithm with "gestalt pattern matching"
|
||||
* to find the longest contiguous matching subsequences.
|
||||
*/
|
||||
|
||||
export interface Match {
|
||||
/** Starting position in sequence a */
|
||||
a: number;
|
||||
/** Starting position in sequence b */
|
||||
b: number;
|
||||
/** Length of the matching block */
|
||||
size: number;
|
||||
}
|
||||
|
||||
export type OpCode = "replace" | "delete" | "insert" | "equal";
|
||||
|
||||
export interface OpCodeTuple {
|
||||
/** Operation type */
|
||||
tag: OpCode;
|
||||
/** Start index in sequence a */
|
||||
i1: number;
|
||||
/** End index in sequence a */
|
||||
i2: number;
|
||||
/** Start index in sequence b */
|
||||
j1: number;
|
||||
/** End index in sequence b */
|
||||
j2: number;
|
||||
}
|
||||
|
||||
export type JunkFunction<T> = (element: T) => boolean;
|
||||
|
||||
export class SequenceMatcher<T> {
|
||||
private isjunk: JunkFunction<T> | null;
|
||||
private a: T[];
|
||||
private b: T[];
|
||||
private autojunk: boolean;
|
||||
|
||||
// Cached data structures for sequence b
|
||||
private bjunk: Set<T>;
|
||||
private bpopular: Set<T>;
|
||||
private b2j: Map<T, number[]>;
|
||||
|
||||
// Cached results
|
||||
private fullbcount: Map<T, number> | null = null;
|
||||
private matchingBlocks: Match[] | null = null;
|
||||
private opcodes: OpCodeTuple[] | null = null;
|
||||
|
||||
constructor(
|
||||
isjunk: JunkFunction<T> | null = null,
|
||||
a: T[] = [],
|
||||
b: T[] = [],
|
||||
autojunk: boolean = true,
|
||||
) {
|
||||
this.isjunk = isjunk;
|
||||
this.a = [];
|
||||
this.b = [];
|
||||
this.autojunk = autojunk;
|
||||
this.bjunk = new Set();
|
||||
this.bpopular = new Set();
|
||||
this.b2j = new Map();
|
||||
|
||||
this.setSeqs(a, b);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set both sequences to be compared
|
||||
*/
|
||||
setSeqs(a: T[], b: T[]): void {
|
||||
this.setSeq1(a);
|
||||
this.setSeq2(b);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the first sequence to be compared
|
||||
*/
|
||||
setSeq1(a: T[]): void {
|
||||
if (a === this.a) return;
|
||||
this.a = [...a];
|
||||
this.matchingBlocks = null;
|
||||
this.opcodes = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the second sequence to be compared
|
||||
*/
|
||||
setSeq2(b: T[]): void {
|
||||
if (b === this.b) return;
|
||||
this.b = [...b];
|
||||
this.matchingBlocks = null;
|
||||
this.opcodes = null;
|
||||
this.fullbcount = null;
|
||||
this.chainB();
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyze sequence b and build lookup structures
|
||||
*/
|
||||
private chainB(): void {
|
||||
const b = this.b;
|
||||
this.bjunk = new Set();
|
||||
this.bpopular = new Set();
|
||||
this.b2j = new Map();
|
||||
|
||||
// Count occurrences of each element
|
||||
const elementCounts = new Map<T, number>();
|
||||
for (const element of b) {
|
||||
elementCounts.set(element, (elementCounts.get(element) || 0) + 1);
|
||||
}
|
||||
|
||||
// Determine junk and popular elements
|
||||
const n = b.length;
|
||||
const popularThreshold = Math.floor(n / 100) + 1; // > 1% of sequence
|
||||
|
||||
for (const [element, count] of elementCounts) {
|
||||
if (this.isjunk && this.isjunk(element)) {
|
||||
this.bjunk.add(element);
|
||||
} else if (this.autojunk && n >= 200 && count > popularThreshold) {
|
||||
this.bpopular.add(element);
|
||||
}
|
||||
}
|
||||
|
||||
// Build position mapping for non-junk, non-popular elements
|
||||
for (let i = 0; i < b.length; i++) {
|
||||
const element = b[i];
|
||||
if (!this.bjunk.has(element) && !this.bpopular.has(element)) {
|
||||
if (!this.b2j.has(element)) {
|
||||
this.b2j.set(element, []);
|
||||
}
|
||||
this.b2j.get(element)!.push(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the longest matching block in a[alo:ahi] and b[blo:bhi]
|
||||
*/
|
||||
findLongestMatch(
|
||||
alo: number = 0,
|
||||
ahi: number | null = null,
|
||||
blo: number = 0,
|
||||
bhi: number | null = null,
|
||||
): Match {
|
||||
if (ahi === null) ahi = this.a.length;
|
||||
if (bhi === null) bhi = this.b.length;
|
||||
|
||||
let besti = alo;
|
||||
let bestj = blo;
|
||||
let bestsize = 0;
|
||||
|
||||
// Find all positions where a[i] appears in b
|
||||
const j2len = new Map<number, number>();
|
||||
|
||||
for (let i = alo; i < ahi; i++) {
|
||||
const element = this.a[i];
|
||||
const positions = this.b2j.get(element) || [];
|
||||
const newj2len = new Map<number, number>();
|
||||
|
||||
for (const j of positions) {
|
||||
if (j < blo) continue;
|
||||
if (j >= bhi) break;
|
||||
|
||||
const prevLen = j2len.get(j - 1) || 0;
|
||||
const k = prevLen + 1;
|
||||
newj2len.set(j, k);
|
||||
|
||||
if (k > bestsize) {
|
||||
besti = i - k + 1;
|
||||
bestj = j - k + 1;
|
||||
bestsize = k;
|
||||
}
|
||||
}
|
||||
|
||||
j2len.clear();
|
||||
for (const [key, value] of newj2len) {
|
||||
j2len.set(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
// Extend match with junk elements
|
||||
while (
|
||||
besti > alo &&
|
||||
bestj > blo &&
|
||||
!this.isBJunk(this.b[bestj - 1]) &&
|
||||
this.elementsEqual(this.a[besti - 1], this.b[bestj - 1])
|
||||
) {
|
||||
besti--;
|
||||
bestj--;
|
||||
bestsize++;
|
||||
}
|
||||
|
||||
while (
|
||||
besti + bestsize < ahi &&
|
||||
bestj + bestsize < bhi &&
|
||||
!this.isBJunk(this.b[bestj + bestsize]) &&
|
||||
this.elementsEqual(this.a[besti + bestsize], this.b[bestj + bestsize])
|
||||
) {
|
||||
bestsize++;
|
||||
}
|
||||
|
||||
// Extend match with junk elements at the beginning
|
||||
while (besti > alo && bestj > blo && this.isBJunk(this.b[bestj - 1])) {
|
||||
besti--;
|
||||
bestj--;
|
||||
bestsize++;
|
||||
}
|
||||
|
||||
// Extend match with junk elements at the end
|
||||
while (
|
||||
besti + bestsize < ahi &&
|
||||
bestj + bestsize < bhi &&
|
||||
this.isBJunk(this.b[bestj + bestsize])
|
||||
) {
|
||||
bestsize++;
|
||||
}
|
||||
|
||||
return { a: besti, b: bestj, size: bestsize };
|
||||
}
|
||||
|
||||
/**
|
||||
* Return list of non-overlapping matching blocks
|
||||
*/
|
||||
getMatchingBlocks(): Match[] {
|
||||
if (this.matchingBlocks !== null) {
|
||||
return this.matchingBlocks;
|
||||
}
|
||||
|
||||
const matches: Match[] = [];
|
||||
this.getMatchingBlocksRecursive(
|
||||
0,
|
||||
this.a.length,
|
||||
0,
|
||||
this.b.length,
|
||||
matches,
|
||||
);
|
||||
|
||||
// Add sentinel
|
||||
matches.push({ a: this.a.length, b: this.b.length, size: 0 });
|
||||
|
||||
this.matchingBlocks = matches;
|
||||
return matches;
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively find matching blocks
|
||||
*/
|
||||
private getMatchingBlocksRecursive(
|
||||
alo: number,
|
||||
ahi: number,
|
||||
blo: number,
|
||||
bhi: number,
|
||||
matches: Match[],
|
||||
): void {
|
||||
const match = this.findLongestMatch(alo, ahi, blo, bhi);
|
||||
|
||||
if (match.size > 0) {
|
||||
// Recurse on the pieces before and after the match
|
||||
if (alo < match.a && blo < match.b) {
|
||||
this.getMatchingBlocksRecursive(
|
||||
alo,
|
||||
match.a,
|
||||
blo,
|
||||
match.b,
|
||||
matches,
|
||||
);
|
||||
}
|
||||
|
||||
matches.push(match);
|
||||
|
||||
if (match.a + match.size < ahi && match.b + match.size < bhi) {
|
||||
this.getMatchingBlocksRecursive(
|
||||
match.a + match.size,
|
||||
ahi,
|
||||
match.b + match.size,
|
||||
bhi,
|
||||
matches,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return list of 5-tuples describing how to turn a into b
|
||||
*/
|
||||
getOpcodes(): OpCodeTuple[] {
|
||||
if (this.opcodes !== null) {
|
||||
return this.opcodes;
|
||||
}
|
||||
|
||||
let i = 0;
|
||||
let j = 0;
|
||||
const opcodes: OpCodeTuple[] = [];
|
||||
|
||||
for (const match of this.getMatchingBlocks()) {
|
||||
let tag: OpCode = "equal";
|
||||
|
||||
if (i < match.a && j < match.b) {
|
||||
tag = "replace";
|
||||
} else if (i < match.a) {
|
||||
tag = "delete";
|
||||
} else if (j < match.b) {
|
||||
tag = "insert";
|
||||
}
|
||||
|
||||
if (tag !== "equal") {
|
||||
opcodes.push({
|
||||
tag,
|
||||
i1: i,
|
||||
i2: match.a,
|
||||
j1: j,
|
||||
j2: match.b,
|
||||
});
|
||||
}
|
||||
|
||||
i = match.a + match.size;
|
||||
j = match.b + match.size;
|
||||
|
||||
// Don't add the sentinel match
|
||||
if (match.size > 0) {
|
||||
opcodes.push({
|
||||
tag: "equal",
|
||||
i1: match.a,
|
||||
i2: i,
|
||||
j1: match.b,
|
||||
j2: j,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
this.opcodes = opcodes;
|
||||
return opcodes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a measure of sequences' similarity (0.0-1.0)
|
||||
*/
|
||||
ratio(): number {
|
||||
const matches = this.getMatchingBlocks()
|
||||
.slice(0, -1) // Exclude sentinel
|
||||
.reduce((sum, match) => sum + match.size, 0);
|
||||
|
||||
const total = this.a.length + this.b.length;
|
||||
return total === 0 ? 1.0 : (2.0 * matches) / total;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return an upper bound on ratio() relatively quickly
|
||||
*/
|
||||
quickRatio(): number {
|
||||
if (this.fullbcount === null) {
|
||||
this.fullbcount = new Map();
|
||||
for (const element of this.b) {
|
||||
this.fullbcount.set(
|
||||
element,
|
||||
(this.fullbcount.get(element) || 0) + 1,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let matches = 0;
|
||||
const tempCounts = new Map(this.fullbcount);
|
||||
|
||||
for (const element of this.a) {
|
||||
const count = tempCounts.get(element);
|
||||
if (count && count > 0) {
|
||||
matches++;
|
||||
tempCounts.set(element, count - 1);
|
||||
}
|
||||
}
|
||||
|
||||
const total = this.a.length + this.b.length;
|
||||
return total === 0 ? 1.0 : (2.0 * matches) / total;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return an upper bound on ratio() very quickly
|
||||
*/
|
||||
realQuickRatio(): number {
|
||||
const total = this.a.length + this.b.length;
|
||||
return total === 0
|
||||
? 1.0
|
||||
: (2.0 * Math.min(this.a.length, this.b.length)) / total;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if element is junk in sequence b
|
||||
*/
|
||||
private isBJunk(element: T): boolean {
|
||||
return this.bjunk.has(element);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if two elements are equal
|
||||
*/
|
||||
private elementsEqual(a: T, b: T): boolean {
|
||||
return a === b;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function to get close matches similar to Python's get_close_matches
|
||||
*/
|
||||
export function getCloseMatches<T>(
|
||||
word: T[],
|
||||
possibilities: T[][],
|
||||
n: number = 3,
|
||||
cutoff: number = 0.6,
|
||||
): T[][] {
|
||||
if (n <= 0) {
|
||||
throw new Error("n must be greater than 0");
|
||||
}
|
||||
|
||||
const matches: Array<{ sequence: T[]; ratio: number }> = [];
|
||||
|
||||
for (const possibility of possibilities) {
|
||||
const matcher = new SequenceMatcher(null, word, possibility);
|
||||
const ratio = matcher.ratio();
|
||||
|
||||
if (ratio >= cutoff) {
|
||||
matches.push({ sequence: possibility, ratio });
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by ratio (descending) and take top n
|
||||
matches.sort((a, b) => b.ratio - a.ratio);
|
||||
return matches.slice(0, n).map((match) => match.sequence);
|
||||
}
|
||||
|
||||
/**
|
||||
* String-specific version of SequenceMatcher for character-by-character comparison.
|
||||
* This class treats strings as sequences of characters while providing a string-friendly API.
|
||||
*/
|
||||
export class StringSequenceMatcher {
|
||||
private matcher: SequenceMatcher<string>;
|
||||
|
||||
constructor(
|
||||
isjunk: JunkFunction<string> | null = null,
|
||||
a: string = "",
|
||||
b: string = "",
|
||||
autojunk: boolean = true,
|
||||
) {
|
||||
this.matcher = new SequenceMatcher(
|
||||
isjunk,
|
||||
Array.from(a),
|
||||
Array.from(b),
|
||||
autojunk,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set both sequences to be compared
|
||||
*/
|
||||
setSeqs(a: string, b: string): void {
|
||||
this.matcher.setSeqs(Array.from(a), Array.from(b));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the first sequence to be compared
|
||||
*/
|
||||
setSeq1(a: string): void {
|
||||
this.matcher.setSeq1(Array.from(a));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the second sequence to be compared
|
||||
*/
|
||||
setSeq2(b: string): void {
|
||||
this.matcher.setSeq2(Array.from(b));
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the longest matching block in a[alo:ahi] and b[blo:bhi]
|
||||
*/
|
||||
findLongestMatch(
|
||||
alo: number = 0,
|
||||
ahi: number | null = null,
|
||||
blo: number = 0,
|
||||
bhi: number | null = null,
|
||||
): Match {
|
||||
return this.matcher.findLongestMatch(alo, ahi, blo, bhi);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return list of non-overlapping matching blocks
|
||||
*/
|
||||
getMatchingBlocks(): Match[] {
|
||||
return this.matcher.getMatchingBlocks();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return list of 5-tuples describing how to turn a into b
|
||||
*/
|
||||
getOpcodes(): OpCodeTuple[] {
|
||||
return this.matcher.getOpcodes();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a measure of sequences' similarity (0.0-1.0)
|
||||
*/
|
||||
ratio(): number {
|
||||
return this.matcher.ratio();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return an upper bound on ratio() relatively quickly
|
||||
*/
|
||||
quickRatio(): number {
|
||||
return this.matcher.quickRatio();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return an upper bound on ratio() very quickly
|
||||
*/
|
||||
realQuickRatio(): number {
|
||||
return this.matcher.realQuickRatio();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function for string similarity
|
||||
*/
|
||||
export function getStringSimilarity(a: string, b: string): number {
|
||||
const matcher = new StringSequenceMatcher(null, a, b);
|
||||
return matcher.ratio();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get close string matches
|
||||
*/
|
||||
export function getCloseStringMatches(
|
||||
word: string,
|
||||
possibilities: string[],
|
||||
n: number = 3,
|
||||
cutoff: number = 0.6,
|
||||
): string[] {
|
||||
if (n <= 0) {
|
||||
throw new Error("n must be greater than 0");
|
||||
}
|
||||
|
||||
const matches: Array<{ string: string; ratio: number }> = [];
|
||||
|
||||
for (const possibility of possibilities) {
|
||||
const ratio = getStringSimilarity(word, possibility);
|
||||
|
||||
if (ratio >= cutoff) {
|
||||
matches.push({ string: possibility, ratio });
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by ratio (descending) and take top n
|
||||
matches.sort((a, b) => b.ratio - a.ratio);
|
||||
return matches.slice(0, n).map((match) => match.string);
|
||||
}
|
||||
349
packages/logic/domains/2fa/controller.ts
Normal file
349
packages/logic/domains/2fa/controller.ts
Normal file
@@ -0,0 +1,349 @@
|
||||
import { errAsync, okAsync, ResultAsync } from "neverthrow";
|
||||
import { FlowExecCtx } from "@core/flow.execution.context";
|
||||
import { UserRepository } from "@domains/user/repository";
|
||||
import { getRedisInstance, Redis } from "@pkg/redis";
|
||||
import { TwofaRepository } from "./repository";
|
||||
import { auth } from "../auth/config.base";
|
||||
import type { TwoFaSession } from "./data";
|
||||
import { User } from "@domains/user/data";
|
||||
import { settings } from "@core/settings";
|
||||
import { type Err } from "@pkg/result";
|
||||
import { twofaErrors } from "./errors";
|
||||
import { logger } from "@pkg/logger";
|
||||
import { db } from "@pkg/db";
|
||||
|
||||
export class TwofaController {
|
||||
constructor(
|
||||
private twofaRepo: TwofaRepository,
|
||||
private userRepo: UserRepository,
|
||||
private store: Redis,
|
||||
) {}
|
||||
|
||||
checkTotp(secret: string, code: string) {
|
||||
return this.twofaRepo.checkTotp(secret, code);
|
||||
}
|
||||
|
||||
is2faEnabled(fctx: FlowExecCtx, userId: string) {
|
||||
return this.twofaRepo
|
||||
.getUsers2FAInfo(fctx, userId, true)
|
||||
.map((data) => !!data)
|
||||
.orElse(() => okAsync(false));
|
||||
}
|
||||
|
||||
isUserBanned(fctx: FlowExecCtx, userId: string) {
|
||||
return this.userRepo.isUserBanned(fctx, userId).orElse((error) => {
|
||||
logger.error("Error checking user ban status:", error);
|
||||
return okAsync(false);
|
||||
});
|
||||
}
|
||||
|
||||
setup2FA(fctx: FlowExecCtx, user: User) {
|
||||
return this.is2faEnabled(fctx, user.id)
|
||||
.andThen((enabled) =>
|
||||
enabled
|
||||
? errAsync(twofaErrors.alreadyEnabled(fctx))
|
||||
: this.twofaRepo.setup(fctx, user.id),
|
||||
)
|
||||
.map((secret) => {
|
||||
const appName = settings.appName;
|
||||
const totpUri = `otpauth://totp/${appName}:${user.email}?secret=${secret}&issuer=${appName}`;
|
||||
return { totpURI: totpUri, secret };
|
||||
});
|
||||
}
|
||||
|
||||
verifyAndEnable2FA(
|
||||
fctx: FlowExecCtx,
|
||||
user: User,
|
||||
code: string,
|
||||
headers: Headers,
|
||||
) {
|
||||
return this.is2faEnabled(fctx, user.id)
|
||||
.andThen((enabled) => {
|
||||
if (enabled) {
|
||||
return errAsync(twofaErrors.alreadyEnabled(fctx));
|
||||
}
|
||||
return okAsync(undefined);
|
||||
})
|
||||
.andThen(() => {
|
||||
logger.info(`Verifying 2fa for ${user.id} : ${code}`, {
|
||||
flowId: fctx.flowId,
|
||||
});
|
||||
return this.twofaRepo.verifyAndEnable2FA(fctx, user.id, code);
|
||||
})
|
||||
.andThen((verified) => {
|
||||
if (verified) {
|
||||
return ResultAsync.combine([
|
||||
ResultAsync.fromPromise(
|
||||
auth.api.revokeOtherSessions({ headers }),
|
||||
() => twofaErrors.revokeSessionsFailed(fctx),
|
||||
),
|
||||
this.userRepo.updateLastVerified2FaAtToNow(
|
||||
fctx,
|
||||
user.id,
|
||||
),
|
||||
]).map(() => true);
|
||||
}
|
||||
return okAsync(verified);
|
||||
});
|
||||
}
|
||||
|
||||
disable(fctx: FlowExecCtx, user: User, code: string) {
|
||||
return this.is2faEnabled(fctx, user.id)
|
||||
.andThen((enabled) => {
|
||||
if (!enabled) {
|
||||
return errAsync(twofaErrors.notEnabled(fctx));
|
||||
}
|
||||
return okAsync(undefined);
|
||||
})
|
||||
.andThen(() => this.twofaRepo.get2FASecret(fctx, user.id))
|
||||
.andThen((secret) => {
|
||||
if (!secret) {
|
||||
return errAsync(twofaErrors.invalidSetup(fctx));
|
||||
}
|
||||
if (!this.checkTotp(secret, code)) {
|
||||
return errAsync(twofaErrors.invalidCode(fctx));
|
||||
}
|
||||
return okAsync(undefined);
|
||||
})
|
||||
.andThen(() => this.twofaRepo.disable(fctx, user.id));
|
||||
}
|
||||
|
||||
generateBackupCodes(fctx: FlowExecCtx, user: User) {
|
||||
return this.is2faEnabled(fctx, user.id)
|
||||
.andThen((enabled) => {
|
||||
if (!enabled) {
|
||||
return errAsync(twofaErrors.notEnabled(fctx));
|
||||
}
|
||||
return okAsync(undefined);
|
||||
})
|
||||
.andThen(() => this.twofaRepo.generateBackupCodes(fctx, user.id));
|
||||
}
|
||||
|
||||
requiresInitialVerification(
|
||||
fctx: FlowExecCtx,
|
||||
user: User,
|
||||
sessionId: string,
|
||||
) {
|
||||
return this.is2faEnabled(fctx, user.id).andThen((enabled) => {
|
||||
if (!enabled) {
|
||||
return okAsync(false);
|
||||
}
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.store.get(`initial_2fa_completed:${sessionId}`),
|
||||
() => null,
|
||||
)
|
||||
.map((completed) => !completed && completed !== "0")
|
||||
.orElse(() => okAsync(true));
|
||||
});
|
||||
}
|
||||
|
||||
requiresSensitiveActionVerification(fctx: FlowExecCtx, user: User) {
|
||||
return this.is2faEnabled(fctx, user.id).andThen((enabled) => {
|
||||
if (!enabled) {
|
||||
return okAsync(false);
|
||||
}
|
||||
|
||||
if (!user.last2FAVerifiedAt) {
|
||||
return okAsync(true);
|
||||
}
|
||||
|
||||
const requiredHours = settings.twofaRequiredHours || 24;
|
||||
const verificationAge =
|
||||
Date.now() - user.last2FAVerifiedAt.getTime();
|
||||
const maxAge = requiredHours * 60 * 60 * 1000;
|
||||
|
||||
return okAsync(verificationAge > maxAge);
|
||||
});
|
||||
}
|
||||
|
||||
markInitialVerificationComplete(sessionId: string) {
|
||||
return ResultAsync.fromPromise(
|
||||
this.store.setex(
|
||||
`initial_2fa_completed:${sessionId}`,
|
||||
60 * 60 * 24 * 7,
|
||||
"true",
|
||||
),
|
||||
() => null,
|
||||
)
|
||||
.map(() => undefined)
|
||||
.orElse((error) => {
|
||||
logger.error("Error marking initial 2FA as complete:", error);
|
||||
return okAsync(undefined);
|
||||
});
|
||||
}
|
||||
|
||||
startVerification(
|
||||
fctx: FlowExecCtx,
|
||||
params: {
|
||||
userId: string;
|
||||
sessionId: string;
|
||||
ipAddress?: string;
|
||||
userAgent?: string;
|
||||
},
|
||||
) {
|
||||
return this.twofaRepo.createSession(fctx, params).map((session) => ({
|
||||
verificationToken: session.verificationToken,
|
||||
}));
|
||||
}
|
||||
|
||||
private validateSession(fctx: FlowExecCtx, session: TwoFaSession) {
|
||||
if (session.status !== "pending") {
|
||||
return errAsync(twofaErrors.sessionNotActive(fctx));
|
||||
}
|
||||
|
||||
if (session.expiresAt < new Date()) {
|
||||
return this.twofaRepo
|
||||
.updateSession(fctx, session.id, { status: "expired" })
|
||||
.andThen(() => errAsync(twofaErrors.sessionExpired(fctx)));
|
||||
}
|
||||
|
||||
return okAsync(session);
|
||||
}
|
||||
|
||||
private handleMaxAttempts(
|
||||
fctx: FlowExecCtx,
|
||||
session: TwoFaSession,
|
||||
userId: string,
|
||||
) {
|
||||
const banExpiresAt = new Date();
|
||||
banExpiresAt.setHours(banExpiresAt.getHours() + 1);
|
||||
|
||||
return this.twofaRepo
|
||||
.updateSession(fctx, session.id, { status: "failed" })
|
||||
.andThen(() =>
|
||||
this.userRepo.banUser(
|
||||
fctx,
|
||||
userId,
|
||||
"Too many failed 2FA verification attempts",
|
||||
banExpiresAt,
|
||||
),
|
||||
)
|
||||
.andThen(() => errAsync(twofaErrors.tooManyAttempts(fctx)));
|
||||
}
|
||||
|
||||
private checkAttemptsLimit(
|
||||
fctx: FlowExecCtx,
|
||||
session: TwoFaSession,
|
||||
userId: string,
|
||||
) {
|
||||
if (session.attempts >= session.maxAttempts) {
|
||||
return this.handleMaxAttempts(fctx, session, userId);
|
||||
}
|
||||
return okAsync(session);
|
||||
}
|
||||
|
||||
private checkCodeReplay(
|
||||
fctx: FlowExecCtx,
|
||||
session: TwoFaSession,
|
||||
code: string,
|
||||
): ResultAsync<TwoFaSession, Err> {
|
||||
if (session.codeUsed === code) {
|
||||
return this.twofaRepo
|
||||
.incrementAttempts(fctx, session.id)
|
||||
.andThen(() => errAsync(twofaErrors.codeReplay(fctx)));
|
||||
}
|
||||
return okAsync(session);
|
||||
}
|
||||
|
||||
private verifyTotpCode(
|
||||
fctx: FlowExecCtx,
|
||||
session: TwoFaSession,
|
||||
userId: string,
|
||||
code: string,
|
||||
) {
|
||||
return this.twofaRepo.get2FASecret(fctx, userId).andThen((secret) => {
|
||||
if (!secret) {
|
||||
return errAsync(twofaErrors.invalidSetup(fctx));
|
||||
}
|
||||
|
||||
if (!this.checkTotp(secret, code)) {
|
||||
return this.twofaRepo
|
||||
.incrementAttempts(fctx, session.id)
|
||||
.andThen(() => errAsync(twofaErrors.invalidCode(fctx)));
|
||||
}
|
||||
|
||||
return okAsync(session);
|
||||
});
|
||||
}
|
||||
|
||||
private completeVerification(
|
||||
fctx: FlowExecCtx,
|
||||
session: TwoFaSession,
|
||||
userId: string,
|
||||
code: string,
|
||||
) {
|
||||
return this.twofaRepo
|
||||
.updateSession(fctx, session.id, {
|
||||
status: "verified",
|
||||
verifiedAt: new Date(),
|
||||
codeUsed: code,
|
||||
})
|
||||
.andThen(() =>
|
||||
ResultAsync.combine([
|
||||
this.userRepo.updateLastVerified2FaAtToNow(fctx, userId),
|
||||
this.markInitialVerificationComplete(session.sessionId),
|
||||
]),
|
||||
)
|
||||
.map(() => undefined);
|
||||
}
|
||||
|
||||
verifyCode(
|
||||
fctx: FlowExecCtx,
|
||||
params: { verificationSessToken: string; code: string },
|
||||
user?: User,
|
||||
) {
|
||||
if (!user) {
|
||||
return errAsync(twofaErrors.userNotFound(fctx));
|
||||
}
|
||||
|
||||
return this.is2faEnabled(fctx, user.id)
|
||||
.andThen((enabled) => {
|
||||
if (!enabled) {
|
||||
return errAsync(
|
||||
twofaErrors.notEnabledForVerification(fctx),
|
||||
);
|
||||
}
|
||||
return okAsync(undefined);
|
||||
})
|
||||
.andThen(() =>
|
||||
this.twofaRepo.getSessionByToken(
|
||||
fctx,
|
||||
params.verificationSessToken,
|
||||
),
|
||||
)
|
||||
.andThen((session) => {
|
||||
if (!session) {
|
||||
return errAsync(twofaErrors.sessionNotFound(fctx));
|
||||
}
|
||||
return okAsync(session);
|
||||
})
|
||||
.andThen((session) => this.validateSession(fctx, session))
|
||||
.andThen((session) =>
|
||||
this.checkAttemptsLimit(fctx, session, user.id),
|
||||
)
|
||||
.andThen((session) =>
|
||||
this.checkCodeReplay(fctx, session, params.code),
|
||||
)
|
||||
.andThen((session) =>
|
||||
this.verifyTotpCode(fctx, session, user.id, params.code),
|
||||
)
|
||||
.andThen((session) =>
|
||||
this.completeVerification(fctx, session, user.id, params.code),
|
||||
)
|
||||
.map(() => ({ success: true }));
|
||||
}
|
||||
|
||||
cleanupExpiredSessions(fctx: FlowExecCtx) {
|
||||
return this.twofaRepo.cleanupExpiredSessions(fctx);
|
||||
}
|
||||
}
|
||||
|
||||
export function getTwofaController() {
|
||||
const _redis = getRedisInstance();
|
||||
return new TwofaController(
|
||||
new TwofaRepository(db, _redis),
|
||||
new UserRepository(db),
|
||||
_redis,
|
||||
);
|
||||
}
|
||||
48
packages/logic/domains/2fa/data.ts
Normal file
48
packages/logic/domains/2fa/data.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import * as v from "valibot";
|
||||
|
||||
export const startVerificationSchema = v.object({
|
||||
userId: v.string(),
|
||||
sessionId: v.string(),
|
||||
});
|
||||
|
||||
export const verifyCodeSchema = v.object({
|
||||
verificationToken: v.string(),
|
||||
code: v.string(),
|
||||
});
|
||||
|
||||
export const enable2FACodeSchema = v.object({
|
||||
code: v.string(),
|
||||
});
|
||||
|
||||
export const disable2FASchema = v.object({
|
||||
code: v.string(),
|
||||
});
|
||||
|
||||
export const twoFactorSchema = v.object({
|
||||
id: v.string(),
|
||||
secret: v.string(),
|
||||
backupCodes: v.array(v.string()),
|
||||
userId: v.string(),
|
||||
createdAt: v.date(),
|
||||
updatedAt: v.date(),
|
||||
});
|
||||
export type TwoFactor = v.InferOutput<typeof twoFactorSchema>;
|
||||
|
||||
export type TwoFaSessionStatus = "pending" | "verified" | "failed" | "expired";
|
||||
|
||||
export const twoFaSessionSchema = v.object({
|
||||
id: v.string(),
|
||||
userId: v.string(),
|
||||
sessionId: v.string(),
|
||||
verificationToken: v.string(),
|
||||
codeUsed: v.optional(v.string()),
|
||||
status: v.picklist(["pending", "verified", "failed", "expired"]),
|
||||
attempts: v.number(),
|
||||
maxAttempts: v.number(),
|
||||
verifiedAt: v.optional(v.date()),
|
||||
expiresAt: v.date(),
|
||||
createdAt: v.date(),
|
||||
ipAddress: v.string(),
|
||||
userAgent: v.string(),
|
||||
});
|
||||
export type TwoFaSession = v.InferOutput<typeof twoFaSessionSchema>;
|
||||
180
packages/logic/domains/2fa/errors.ts
Normal file
180
packages/logic/domains/2fa/errors.ts
Normal file
@@ -0,0 +1,180 @@
|
||||
import { FlowExecCtx } from "@/core/flow.execution.context";
|
||||
import { ERROR_CODES, type Err } from "@pkg/result";
|
||||
import { getError } from "@pkg/logger";
|
||||
|
||||
export const twofaErrors = {
|
||||
dbError: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Database operation failed",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
alreadyEnabled: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "2FA already enabled",
|
||||
description: "Disable it first if you want to re-enable it",
|
||||
detail: "2FA already enabled",
|
||||
}),
|
||||
|
||||
notEnabled: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "2FA not enabled for this user",
|
||||
description: "Enable 2FA to perform this action",
|
||||
detail: "2FA not enabled for this user",
|
||||
}),
|
||||
|
||||
userNotFound: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "User not found",
|
||||
description: "Session is invalid or expired",
|
||||
detail: "User ID not found in database",
|
||||
}),
|
||||
|
||||
sessionNotActive: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "Verification session is no longer active",
|
||||
description: "Please request a new verification code",
|
||||
detail: "Session status is not 'pending'",
|
||||
}),
|
||||
|
||||
sessionExpired: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "Verification session has expired",
|
||||
description: "Please request a new verification code",
|
||||
detail: "Session expired timestamp passed",
|
||||
}),
|
||||
|
||||
sessionNotFound: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.NOT_FOUND,
|
||||
message: "Invalid or expired verification session",
|
||||
description: "Your verification session has expired or is invalid",
|
||||
detail: "Session not found by verification token",
|
||||
}),
|
||||
|
||||
tooManyAttempts: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.BANNED,
|
||||
message: "Too many failed attempts",
|
||||
description:
|
||||
"Your account has been banned, contact us to resolve this issue",
|
||||
detail: "Max attempts reached for 2FA verification",
|
||||
}),
|
||||
|
||||
codeReplay: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "This code has already been used",
|
||||
description: "Please request a new verification code",
|
||||
detail: "Code replay attempt detected",
|
||||
}),
|
||||
|
||||
invalidSetup: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "Invalid 2FA setup found",
|
||||
description: "Please contact us to resolve this issue",
|
||||
detail: "Invalid 2FA data found",
|
||||
}),
|
||||
|
||||
invalidCode: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "Invalid verification code",
|
||||
description: "Please try again with the correct code",
|
||||
detail: "Code is invalid",
|
||||
}),
|
||||
|
||||
notEnabledForVerification: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "2FA not enabled for this user",
|
||||
description:
|
||||
"Two-factor authentication is not enabled on your account",
|
||||
detail: "User has 2FA disabled but verification attempted",
|
||||
}),
|
||||
|
||||
revokeSessionsFailed: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "Failed to revoke sessions",
|
||||
description: "Please try again later",
|
||||
detail: "Failed to revoke other sessions",
|
||||
}),
|
||||
|
||||
// Repository errors
|
||||
notFound: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.NOT_FOUND,
|
||||
message: "2FA not found",
|
||||
description: "Likely not enabled, otherwise please contact us :)",
|
||||
detail: "2FA not found",
|
||||
}),
|
||||
|
||||
setupNotFound: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.VALIDATION_ERROR,
|
||||
message: "Cannot perform action",
|
||||
description: "If 2FA is not enabled, please refresh and try again",
|
||||
detail: "2FA setup not found",
|
||||
}),
|
||||
|
||||
maxAttemptsReached: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "Too many failed attempts",
|
||||
description: "Please refresh and try again",
|
||||
detail: "Max attempts reached for session",
|
||||
}),
|
||||
|
||||
backupCodesNotFound: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.NOT_FOUND,
|
||||
message: "2FA info not found",
|
||||
description: "Please setup 2FA or contact us if this is unexpected",
|
||||
detail: "2FA info not found for user",
|
||||
}),
|
||||
|
||||
backupCodesAlreadyGenerated: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: "Backup codes already generated",
|
||||
description:
|
||||
"Can only generate if not already present, or all are used up",
|
||||
detail: "Backup codes already generated",
|
||||
}),
|
||||
|
||||
sessionNotFoundById: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.NOT_FOUND,
|
||||
message: "2FA session not found",
|
||||
description: "The verification session may have expired",
|
||||
detail: "Session ID not found in database",
|
||||
}),
|
||||
};
|
||||
554
packages/logic/domains/2fa/repository.ts
Normal file
554
packages/logic/domains/2fa/repository.ts
Normal file
@@ -0,0 +1,554 @@
|
||||
import { errAsync, okAsync, ResultAsync } from "neverthrow";
|
||||
import { FlowExecCtx } from "@core/flow.execution.context";
|
||||
import { hashString, verifyHash } from "@/core/hash.utils";
|
||||
import { twoFactor, twofaSessions } from "@pkg/db/schema";
|
||||
import { TwoFactor, type TwoFaSession } from "./data";
|
||||
import { and, Database, eq, gt, lt } from "@pkg/db";
|
||||
import { settings } from "@core/settings";
|
||||
import type { Err } from "@pkg/result";
|
||||
import { twofaErrors } from "./errors";
|
||||
import { authenticator } from "otplib";
|
||||
import { logger } from "@pkg/logger";
|
||||
import { Redis } from "@pkg/redis";
|
||||
import { nanoid } from "nanoid";
|
||||
|
||||
type TwoFaSetup = {
|
||||
secret: string;
|
||||
lastUsedCode: string;
|
||||
tries: number;
|
||||
};
|
||||
|
||||
export class TwofaRepository {
|
||||
private PENDING_KEY_PREFIX = "pending_enabling_2fa:";
|
||||
private EXPIRY_TIME = 60 * 20; // 20 mins
|
||||
private DEFAULT_BACKUP_CODES_AMT = 8;
|
||||
private MAX_SETUP_ATTEMPTS = 3;
|
||||
|
||||
constructor(
|
||||
private db: Database,
|
||||
private store: Redis,
|
||||
) {}
|
||||
|
||||
checkTotp(secret: string, code: string) {
|
||||
const checked = authenticator.verify({ secret, token: code });
|
||||
logger.debug("TOTP check result", { checked });
|
||||
return checked;
|
||||
}
|
||||
|
||||
async checkBackupCode(hash: string, code: string) {
|
||||
return verifyHash({ hash, target: code });
|
||||
}
|
||||
|
||||
private getKey(userId: string) {
|
||||
if (userId.includes(this.PENDING_KEY_PREFIX)) {
|
||||
return userId;
|
||||
}
|
||||
return `${this.PENDING_KEY_PREFIX}${userId}`;
|
||||
}
|
||||
|
||||
getUsers2FAInfo(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
returnUndefined?: boolean,
|
||||
): ResultAsync<TwoFactor | undefined, Err> {
|
||||
logger.info("Getting user 2FA info", { ...fctx, userId });
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db.query.twoFactor.findFirst({
|
||||
where: eq(twoFactor.userId, userId),
|
||||
}),
|
||||
() => twofaErrors.dbError(fctx, "Failed to query 2FA info"),
|
||||
).andThen((found) => {
|
||||
if (!found) {
|
||||
logger.debug("2FA info not found for user", {
|
||||
...fctx,
|
||||
userId,
|
||||
});
|
||||
if (returnUndefined) {
|
||||
return okAsync(undefined);
|
||||
}
|
||||
return errAsync(twofaErrors.notFound(fctx));
|
||||
}
|
||||
logger.info("2FA info retrieved successfully", { ...fctx, userId });
|
||||
return okAsync(found as TwoFactor);
|
||||
});
|
||||
}
|
||||
|
||||
isSetupPending(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
): ResultAsync<boolean, Err> {
|
||||
logger.debug("Checking if 2FA setup is pending", { ...fctx, userId });
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.store.get(this.getKey(userId)),
|
||||
() =>
|
||||
twofaErrors.dbError(
|
||||
fctx,
|
||||
"Failed to check setup pending status",
|
||||
),
|
||||
).map((found) => {
|
||||
const isPending = !!found;
|
||||
logger.debug("Setup pending status checked", {
|
||||
...fctx,
|
||||
userId,
|
||||
isPending,
|
||||
});
|
||||
return isPending;
|
||||
});
|
||||
}
|
||||
|
||||
setup(fctx: FlowExecCtx, userId: string): ResultAsync<string, Err> {
|
||||
logger.info("Starting 2FA setup", { ...fctx, userId });
|
||||
|
||||
return ResultAsync.fromSafePromise(
|
||||
(async () => {
|
||||
const secret = authenticator.generateSecret();
|
||||
const payload = {
|
||||
secret,
|
||||
lastUsedCode: "",
|
||||
tries: 0,
|
||||
} as TwoFaSetup;
|
||||
await this.store.setex(
|
||||
this.getKey(userId),
|
||||
this.EXPIRY_TIME,
|
||||
JSON.stringify(payload),
|
||||
);
|
||||
logger.info("Created temp 2FA session", {
|
||||
...fctx,
|
||||
userId,
|
||||
expiresIn: this.EXPIRY_TIME,
|
||||
});
|
||||
return secret;
|
||||
})(),
|
||||
).mapErr(() =>
|
||||
twofaErrors.dbError(fctx, "Setting to data store failed"),
|
||||
);
|
||||
}
|
||||
|
||||
verifyAndEnable2FA(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
code: string,
|
||||
): ResultAsync<boolean, Err> {
|
||||
logger.info("Verifying and enabling 2FA", { ...fctx, userId });
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.store.get(this.getKey(userId)),
|
||||
() => twofaErrors.dbError(fctx, "Failed to get setup session"),
|
||||
)
|
||||
.andThen((payload) => {
|
||||
if (!payload) {
|
||||
logger.error("Setup session not found", {
|
||||
...fctx,
|
||||
userId,
|
||||
});
|
||||
return errAsync(twofaErrors.setupNotFound(fctx));
|
||||
}
|
||||
return okAsync(JSON.parse(payload) as TwoFaSetup);
|
||||
})
|
||||
.andThen((payloadObj) => {
|
||||
const key = this.getKey(userId);
|
||||
|
||||
if (payloadObj.tries >= this.MAX_SETUP_ATTEMPTS) {
|
||||
logger.warn("Max setup attempts reached", {
|
||||
...fctx,
|
||||
userId,
|
||||
tries: payloadObj.tries,
|
||||
});
|
||||
return ResultAsync.fromPromise(this.store.del(key), () =>
|
||||
twofaErrors.dbError(
|
||||
fctx,
|
||||
"Failed to delete setup session",
|
||||
),
|
||||
).andThen(() =>
|
||||
errAsync(twofaErrors.maxAttemptsReached(fctx)),
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
!this.checkTotp(payloadObj.secret, code) ||
|
||||
code === payloadObj.lastUsedCode
|
||||
) {
|
||||
logger.warn("Invalid 2FA code during setup", {
|
||||
...fctx,
|
||||
userId,
|
||||
tries: payloadObj.tries + 1,
|
||||
codeReused: code === payloadObj.lastUsedCode,
|
||||
});
|
||||
return ResultAsync.fromPromise(
|
||||
this.store.setex(
|
||||
key,
|
||||
this.EXPIRY_TIME,
|
||||
JSON.stringify({
|
||||
secret: payloadObj.secret,
|
||||
lastUsedCode: code,
|
||||
tries: payloadObj.tries + 1,
|
||||
}),
|
||||
),
|
||||
() =>
|
||||
twofaErrors.dbError(
|
||||
fctx,
|
||||
"Failed to update setup session",
|
||||
),
|
||||
).map(() => false);
|
||||
}
|
||||
|
||||
logger.info("2FA code verified successfully, enabling 2FA", {
|
||||
...fctx,
|
||||
userId,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(this.store.del(key), () =>
|
||||
twofaErrors.dbError(fctx, "Failed to delete setup session"),
|
||||
)
|
||||
.andThen(() =>
|
||||
ResultAsync.fromPromise(
|
||||
this.db
|
||||
.insert(twoFactor)
|
||||
.values({
|
||||
id: nanoid(),
|
||||
secret: payloadObj.secret,
|
||||
userId: userId,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.execute(),
|
||||
() =>
|
||||
twofaErrors.dbError(
|
||||
fctx,
|
||||
"Failed to insert 2FA record",
|
||||
),
|
||||
),
|
||||
)
|
||||
.map(() => {
|
||||
logger.info("2FA enabled successfully", {
|
||||
...fctx,
|
||||
userId,
|
||||
});
|
||||
return true;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
disable(fctx: FlowExecCtx, userId: string): ResultAsync<boolean, Err> {
|
||||
logger.info("Disabling 2FA", { ...fctx, userId });
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.delete(twoFactor)
|
||||
.where(eq(twoFactor.userId, userId))
|
||||
.execute(),
|
||||
() => twofaErrors.dbError(fctx, "Failed to delete 2FA record"),
|
||||
).map((result) => {
|
||||
logger.info("2FA disabled successfully", { ...fctx, userId });
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
generateBackupCodes(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
): ResultAsync<string[], Err> {
|
||||
logger.info("Generating backup codes", { ...fctx, userId });
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db.query.twoFactor.findFirst({
|
||||
where: eq(twoFactor.userId, userId),
|
||||
}),
|
||||
() => twofaErrors.dbError(fctx, "Failed to query 2FA info"),
|
||||
)
|
||||
.andThen((found) => {
|
||||
if (!found) {
|
||||
logger.error("2FA not enabled for user", {
|
||||
...fctx,
|
||||
userId,
|
||||
});
|
||||
return errAsync(twofaErrors.backupCodesNotFound(fctx));
|
||||
}
|
||||
if (found.backupCodes && found.backupCodes.length) {
|
||||
logger.warn("Backup codes already generated", {
|
||||
...fctx,
|
||||
userId,
|
||||
});
|
||||
return errAsync(
|
||||
twofaErrors.backupCodesAlreadyGenerated(fctx),
|
||||
);
|
||||
}
|
||||
return okAsync(found);
|
||||
})
|
||||
.andThen(() => {
|
||||
const codes = Array.from(
|
||||
{ length: this.DEFAULT_BACKUP_CODES_AMT },
|
||||
() => nanoid(12),
|
||||
);
|
||||
|
||||
logger.debug("Backup codes generated, hashing", {
|
||||
...fctx,
|
||||
userId,
|
||||
count: codes.length,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
(async () => {
|
||||
const hashed = [];
|
||||
for (const code of codes) {
|
||||
const hash = await hashString(code);
|
||||
hashed.push(hash);
|
||||
}
|
||||
return { codes, hashed };
|
||||
})(),
|
||||
() =>
|
||||
twofaErrors.dbError(
|
||||
fctx,
|
||||
"Failed to hash backup codes",
|
||||
),
|
||||
).andThen(({ codes, hashed }) =>
|
||||
ResultAsync.fromPromise(
|
||||
this.db
|
||||
.update(twoFactor)
|
||||
.set({ backupCodes: hashed })
|
||||
.where(eq(twoFactor.userId, userId))
|
||||
.returning(),
|
||||
() =>
|
||||
twofaErrors.dbError(
|
||||
fctx,
|
||||
"Failed to update backup codes",
|
||||
),
|
||||
).map(() => {
|
||||
logger.info("Backup codes generated successfully", {
|
||||
...fctx,
|
||||
userId,
|
||||
});
|
||||
return codes;
|
||||
}),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
get2FASecret(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
): ResultAsync<string | null, Err> {
|
||||
logger.debug("Getting 2FA secret", { ...fctx, userId });
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.select()
|
||||
.from(twoFactor)
|
||||
.where(eq(twoFactor.userId, userId))
|
||||
.limit(1),
|
||||
() => twofaErrors.dbError(fctx, "Failed to query 2FA secret"),
|
||||
).map((result) => {
|
||||
if (!result.length) {
|
||||
logger.debug("No 2FA secret found", { ...fctx, userId });
|
||||
return null;
|
||||
}
|
||||
logger.debug("2FA secret retrieved", { ...fctx, userId });
|
||||
return result[0].secret;
|
||||
});
|
||||
}
|
||||
|
||||
createSession(
|
||||
fctx: FlowExecCtx,
|
||||
params: {
|
||||
userId: string;
|
||||
sessionId: string;
|
||||
ipAddress?: string;
|
||||
userAgent?: string;
|
||||
},
|
||||
): ResultAsync<TwoFaSession, Err> {
|
||||
logger.info("Creating 2FA verification session", {
|
||||
...fctx,
|
||||
userId: params.userId,
|
||||
sessionId: params.sessionId,
|
||||
});
|
||||
|
||||
return ResultAsync.fromSafePromise(
|
||||
(async () => {
|
||||
const expiryMinutes = settings.twofaSessionExpiryMinutes || 10;
|
||||
const now = new Date();
|
||||
const expiresAt = new Date(
|
||||
now.getTime() + expiryMinutes * 60 * 1000,
|
||||
);
|
||||
|
||||
return { expiresAt, now, params };
|
||||
})(),
|
||||
).andThen(({ expiresAt, now, params }) =>
|
||||
ResultAsync.fromPromise(
|
||||
this.db
|
||||
.insert(twofaSessions)
|
||||
.values({
|
||||
id: nanoid(),
|
||||
userId: params.userId,
|
||||
sessionId: params.sessionId,
|
||||
verificationToken: nanoid(32),
|
||||
status: "pending",
|
||||
attempts: 0,
|
||||
maxAttempts: 5,
|
||||
expiresAt,
|
||||
createdAt: now,
|
||||
ipAddress: params.ipAddress,
|
||||
userAgent: params.userAgent,
|
||||
})
|
||||
.returning(),
|
||||
() => twofaErrors.dbError(fctx, "Failed to create 2FA session"),
|
||||
).map(([session]) => {
|
||||
logger.info("2FA verification session created", {
|
||||
...fctx,
|
||||
sessionId: session.id,
|
||||
userId: params.userId,
|
||||
});
|
||||
return session as TwoFaSession;
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
getSessionByToken(
|
||||
fctx: FlowExecCtx,
|
||||
token: string,
|
||||
): ResultAsync<TwoFaSession | null, Err> {
|
||||
logger.debug("Getting 2FA session by token", { ...fctx });
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.select()
|
||||
.from(twofaSessions)
|
||||
.where(
|
||||
and(
|
||||
eq(twofaSessions.verificationToken, token),
|
||||
gt(twofaSessions.expiresAt, new Date()),
|
||||
),
|
||||
)
|
||||
.limit(1),
|
||||
() => twofaErrors.dbError(fctx, "Failed to query 2FA session"),
|
||||
).map((result) => {
|
||||
if (!result.length) {
|
||||
logger.warn("2FA session not found or expired", { ...fctx });
|
||||
return null;
|
||||
}
|
||||
logger.debug("2FA session found", {
|
||||
...fctx,
|
||||
sessionId: result[0].id,
|
||||
});
|
||||
return result[0] as TwoFaSession;
|
||||
});
|
||||
}
|
||||
|
||||
updateSession(
|
||||
fctx: FlowExecCtx,
|
||||
id: string,
|
||||
updates: Partial<
|
||||
Pick<
|
||||
TwoFaSession,
|
||||
"status" | "attempts" | "verifiedAt" | "codeUsed"
|
||||
>
|
||||
>,
|
||||
): ResultAsync<TwoFaSession, Err> {
|
||||
logger.debug("Updating 2FA session", {
|
||||
...fctx,
|
||||
sessionId: id,
|
||||
updates,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.update(twofaSessions)
|
||||
.set(updates)
|
||||
.where(eq(twofaSessions.id, id))
|
||||
.returning(),
|
||||
() => twofaErrors.dbError(fctx, "Failed to update 2FA session"),
|
||||
).andThen(([session]) => {
|
||||
if (!session) {
|
||||
logger.error("2FA session not found for update", {
|
||||
...fctx,
|
||||
sessionId: id,
|
||||
});
|
||||
return errAsync(twofaErrors.sessionNotFoundById(fctx));
|
||||
}
|
||||
logger.debug("2FA session updated successfully", {
|
||||
...fctx,
|
||||
sessionId: id,
|
||||
});
|
||||
return okAsync(session as TwoFaSession);
|
||||
});
|
||||
}
|
||||
|
||||
incrementAttempts(
|
||||
fctx: FlowExecCtx,
|
||||
id: string,
|
||||
): ResultAsync<TwoFaSession, Err> {
|
||||
logger.debug("Incrementing session attempts", {
|
||||
...fctx,
|
||||
sessionId: id,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db.query.twofaSessions.findFirst({
|
||||
where: eq(twofaSessions.id, id),
|
||||
columns: { id: true, attempts: true },
|
||||
}),
|
||||
() =>
|
||||
twofaErrors.dbError(
|
||||
fctx,
|
||||
"Failed to query session for increment",
|
||||
),
|
||||
)
|
||||
.andThen((s) => {
|
||||
if (!s) {
|
||||
logger.error("Session not found for increment", {
|
||||
...fctx,
|
||||
sessionId: id,
|
||||
});
|
||||
return errAsync(twofaErrors.sessionNotFoundById(fctx));
|
||||
}
|
||||
return okAsync(s);
|
||||
})
|
||||
.andThen((s) =>
|
||||
ResultAsync.fromPromise(
|
||||
this.db
|
||||
.update(twofaSessions)
|
||||
.set({ attempts: s.attempts + 1 })
|
||||
.where(eq(twofaSessions.id, id))
|
||||
.returning(),
|
||||
() =>
|
||||
twofaErrors.dbError(
|
||||
fctx,
|
||||
"Failed to increment attempts",
|
||||
),
|
||||
).andThen(([session]) => {
|
||||
if (!session) {
|
||||
logger.error("Session not found after increment", {
|
||||
...fctx,
|
||||
sessionId: id,
|
||||
});
|
||||
return errAsync(twofaErrors.sessionNotFoundById(fctx));
|
||||
}
|
||||
|
||||
logger.warn("Failed verification attempt", {
|
||||
...fctx,
|
||||
sessionId: session.id,
|
||||
attempts: session.attempts,
|
||||
});
|
||||
|
||||
return okAsync(session as TwoFaSession);
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
cleanupExpiredSessions(fctx: FlowExecCtx): ResultAsync<number, Err> {
|
||||
logger.info("Cleaning up expired 2FA sessions", { ...fctx });
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.delete(twofaSessions)
|
||||
.where(lt(twofaSessions.expiresAt, new Date())),
|
||||
() =>
|
||||
twofaErrors.dbError(fctx, "Failed to cleanup expired sessions"),
|
||||
).map((result) => {
|
||||
const count = result.rowCount || 0;
|
||||
logger.info("Expired sessions cleaned up", { ...fctx, count });
|
||||
return count;
|
||||
});
|
||||
}
|
||||
}
|
||||
170
packages/logic/domains/2fa/router.ts
Normal file
170
packages/logic/domains/2fa/router.ts
Normal file
@@ -0,0 +1,170 @@
|
||||
import {
|
||||
disable2FASchema,
|
||||
enable2FACodeSchema,
|
||||
startVerificationSchema,
|
||||
verifyCodeSchema,
|
||||
} from "./data";
|
||||
import { sValidator } from "@hono/standard-validator";
|
||||
import { HonoContext } from "@/core/hono.helpers";
|
||||
import { getTwofaController } from "./controller";
|
||||
import { auth } from "@domains/auth/config.base";
|
||||
import { Hono } from "hono";
|
||||
|
||||
const twofaController = getTwofaController();
|
||||
|
||||
export const twofaRouter = new Hono<HonoContext>()
|
||||
.post("/setup", async (c) => {
|
||||
const res = await twofaController.setup2FA(
|
||||
c.env.locals.fCtx,
|
||||
c.env.locals.user,
|
||||
);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
})
|
||||
.post(
|
||||
"/verify-and-enable",
|
||||
sValidator("json", enable2FACodeSchema),
|
||||
async (c) => {
|
||||
const data = c.req.valid("json");
|
||||
const res = await twofaController.verifyAndEnable2FA(
|
||||
c.env.locals.fCtx,
|
||||
c.env.locals.user,
|
||||
data.code,
|
||||
c.req.raw.headers,
|
||||
);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
},
|
||||
)
|
||||
.get("/generate-backup-codes", async (c) => {
|
||||
const res = await twofaController.generateBackupCodes(
|
||||
c.env.locals.fCtx,
|
||||
c.env.locals.user,
|
||||
);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
})
|
||||
.delete("/disable", sValidator("json", disable2FASchema), async (c) => {
|
||||
const data = c.req.valid("json");
|
||||
const res = await twofaController.disable(
|
||||
c.env.locals.fCtx,
|
||||
c.env.locals.user,
|
||||
data.code,
|
||||
);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
})
|
||||
.get("/requires-verification", async (c) => {
|
||||
const user = c.env.locals.user;
|
||||
const sessionId = c.req.query("sessionId")?.toString() ?? "";
|
||||
const res = await twofaController.requiresInitialVerification(
|
||||
c.env.locals.fCtx,
|
||||
user,
|
||||
sessionId,
|
||||
);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
})
|
||||
.get("/requires-sensitive-action", async (c) => {
|
||||
const res = await twofaController.requiresSensitiveActionVerification(
|
||||
c.env.locals.fCtx,
|
||||
c.env.locals.user,
|
||||
);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
})
|
||||
.post(
|
||||
"/start-verification-session",
|
||||
sValidator("json", startVerificationSchema),
|
||||
async (c) => {
|
||||
const data = c.req.valid("json");
|
||||
|
||||
const ipAddress =
|
||||
c.req.header("x-forwarded-for") ||
|
||||
c.req.header("x-real-ip") ||
|
||||
"unknown";
|
||||
const userAgent = c.req.header("user-agent") || "unknown";
|
||||
|
||||
const res = await twofaController.startVerification(
|
||||
c.env.locals.fCtx,
|
||||
{
|
||||
userId: data.userId,
|
||||
sessionId: data.sessionId,
|
||||
ipAddress,
|
||||
userAgent,
|
||||
},
|
||||
);
|
||||
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
);
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/verify-session-code",
|
||||
sValidator("json", verifyCodeSchema),
|
||||
async (c) => {
|
||||
const data = c.req.valid("json");
|
||||
|
||||
let user = c.env.locals.user;
|
||||
if (!user) {
|
||||
const out = await auth.api.getSession({
|
||||
headers: c.req.raw.headers,
|
||||
});
|
||||
user = out?.user as any;
|
||||
}
|
||||
|
||||
const res = await twofaController.verifyCode(
|
||||
c.env.locals.fCtx,
|
||||
{
|
||||
verificationSessToken: data.verificationToken,
|
||||
code: data.code,
|
||||
},
|
||||
user,
|
||||
);
|
||||
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
},
|
||||
)
|
||||
.post("/cleanup-expired-sessions", async (c) => {
|
||||
const res = await twofaController.cleanupExpiredSessions(
|
||||
c.env.locals.fCtx,
|
||||
);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
});
|
||||
43
packages/logic/domains/2fa/sensitive-actions.ts
Normal file
43
packages/logic/domains/2fa/sensitive-actions.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { FlowExecCtx } from "@core/flow.execution.context";
|
||||
import { getTwofaController } from "./controller";
|
||||
import type { User } from "@/domains/user/data";
|
||||
|
||||
const twofaController = getTwofaController();
|
||||
|
||||
/**
|
||||
* Check if user needs 2FA verification for sensitive actions
|
||||
* Call this before executing sensitive operations like:
|
||||
* - Changing password
|
||||
* - Viewing billing info
|
||||
* - Deleting account
|
||||
* - etc.
|
||||
*/
|
||||
export async function requiresSensitiveAction2FA(
|
||||
fctx: FlowExecCtx,
|
||||
user: User,
|
||||
): Promise<boolean> {
|
||||
const result = await twofaController.requiresSensitiveActionVerification(
|
||||
fctx,
|
||||
user,
|
||||
);
|
||||
return result.match(
|
||||
(data) => data,
|
||||
() => true, // On error, require verification for security
|
||||
);
|
||||
}
|
||||
|
||||
export async function checkInitial2FaRequired(
|
||||
fctx: FlowExecCtx,
|
||||
user: User,
|
||||
sessionId: string,
|
||||
): Promise<boolean> {
|
||||
const result = await twofaController.requiresInitialVerification(
|
||||
fctx,
|
||||
user,
|
||||
sessionId,
|
||||
);
|
||||
return result.match(
|
||||
(data) => data,
|
||||
() => true,
|
||||
);
|
||||
}
|
||||
205
packages/logic/domains/auth/config.base.ts
Normal file
205
packages/logic/domains/auth/config.base.ts
Normal file
@@ -0,0 +1,205 @@
|
||||
import {
|
||||
admin,
|
||||
customSession,
|
||||
magicLink,
|
||||
multiSession,
|
||||
username,
|
||||
} from "better-auth/plugins";
|
||||
import { getUserController, UserController } from "../user/controller";
|
||||
import { AuthController, getAuthController } from "./controller";
|
||||
import { drizzleAdapter } from "better-auth/adapters/drizzle";
|
||||
import { FlowExecCtx } from "@/core/flow.execution.context";
|
||||
import { UserRoleMap } from "@domains/user/data";
|
||||
import { getRedisInstance } from "@pkg/redis";
|
||||
import { APIError } from "better-auth/api";
|
||||
import { settings } from "@core/settings";
|
||||
import { betterAuth } from "better-auth";
|
||||
import { logger } from "@pkg/logger";
|
||||
import { db, schema } from "@pkg/db";
|
||||
import { nanoid } from "nanoid";
|
||||
|
||||
// Constants
|
||||
const EMAIL_EXPIRES_IN_MINS = 10;
|
||||
const EMAIL_EXPIRES_IN_SECONDS = 60 * EMAIL_EXPIRES_IN_MINS;
|
||||
const COOKIE_CACHE_MAX_AGE = 60 * 5;
|
||||
|
||||
// Helper to create flow context for better-auth callbacks
|
||||
function createAuthFlowContext(contextLabel: string): FlowExecCtx {
|
||||
return {
|
||||
flowId: `auth:${contextLabel}:${nanoid(10)}`,
|
||||
};
|
||||
}
|
||||
|
||||
// Singleton controller instances
|
||||
let authControllerInstance: AuthController | null = null;
|
||||
let userControllerInstance: UserController | null = null;
|
||||
|
||||
function getAuthControllerInstance(): AuthController {
|
||||
if (!authControllerInstance) {
|
||||
authControllerInstance = getAuthController();
|
||||
}
|
||||
return authControllerInstance;
|
||||
}
|
||||
|
||||
function getUserControllerInstance(): UserController {
|
||||
if (!userControllerInstance) {
|
||||
userControllerInstance = getUserController();
|
||||
}
|
||||
return userControllerInstance;
|
||||
}
|
||||
|
||||
export const auth = betterAuth({
|
||||
trustedOrigins: ["http://localhost:5173", settings.betterAuthUrl],
|
||||
advanced: { useSecureCookies: settings.nodeEnv === "production" },
|
||||
appName: settings.appName,
|
||||
emailAndPassword: {
|
||||
enabled: true,
|
||||
disableSignUp: true,
|
||||
requireEmailVerification: false,
|
||||
},
|
||||
plugins: [
|
||||
customSession(async ({ user, session }) => {
|
||||
session.id = session.token;
|
||||
return { user, session };
|
||||
}),
|
||||
username({
|
||||
minUsernameLength: 5,
|
||||
maxUsernameLength: 20,
|
||||
usernameValidator: async (username) => {
|
||||
const fctx = createAuthFlowContext("username-check");
|
||||
const uc = getUserControllerInstance();
|
||||
|
||||
const result = await uc
|
||||
.isUsernameAvailable(fctx, username)
|
||||
.match(
|
||||
(isAvailable) => ({ success: true, isAvailable }),
|
||||
(error) => {
|
||||
logger.error(
|
||||
`[${fctx.flowId}] Failed to check username availability`,
|
||||
error,
|
||||
);
|
||||
return { success: false, isAvailable: false };
|
||||
},
|
||||
);
|
||||
|
||||
return result.isAvailable;
|
||||
},
|
||||
}),
|
||||
magicLink({
|
||||
expiresIn: EMAIL_EXPIRES_IN_SECONDS,
|
||||
rateLimit: { window: 60, max: 4 },
|
||||
sendMagicLink: async ({ email, token, url }, request) => {
|
||||
const fctx = createAuthFlowContext("magic-link");
|
||||
const ac = getAuthControllerInstance();
|
||||
|
||||
const result = await ac
|
||||
.sendMagicLink(fctx, email, token, url)
|
||||
.match(
|
||||
() => ({ success: true, error: undefined }),
|
||||
(error) => ({ success: false, error }),
|
||||
);
|
||||
|
||||
if (!result.success || result?.error) {
|
||||
logger.error(
|
||||
`[${fctx.flowId}] Failed to send magic link`,
|
||||
result.error,
|
||||
);
|
||||
throw new APIError("INTERNAL_SERVER_ERROR", {
|
||||
message: result.error?.message,
|
||||
});
|
||||
}
|
||||
},
|
||||
}),
|
||||
admin({
|
||||
defaultRole: UserRoleMap.admin,
|
||||
defaultBanReason:
|
||||
"Stop fanum taxing the server bub, losing aura points fr",
|
||||
defaultBanExpiresIn: 60 * 60 * 24,
|
||||
}),
|
||||
multiSession({ maximumSessions: 5 }),
|
||||
],
|
||||
logger: {
|
||||
log: (level, message, metadata) => {
|
||||
logger.log(level, message, metadata);
|
||||
},
|
||||
level: settings.isDevelopment ? "debug" : "info",
|
||||
},
|
||||
database: drizzleAdapter(db, { provider: "pg", schema: { ...schema } }),
|
||||
secondaryStorage: {
|
||||
get: async (key) => {
|
||||
const redis = getRedisInstance();
|
||||
return await redis.get(key);
|
||||
},
|
||||
set: async (key, value, ttl) => {
|
||||
const redis = getRedisInstance();
|
||||
if (ttl) {
|
||||
await redis.setex(key, ttl, value);
|
||||
} else {
|
||||
await redis.set(key, value);
|
||||
}
|
||||
},
|
||||
delete: async (key) => {
|
||||
const redis = getRedisInstance();
|
||||
const out = await redis.del(key);
|
||||
if (!out && out !== 0) {
|
||||
return null;
|
||||
}
|
||||
return out.toString() as any;
|
||||
},
|
||||
},
|
||||
session: {
|
||||
modelName: "session",
|
||||
expiresIn: 60 * 60 * 24 * 7,
|
||||
updateAge: 60 * 60 * 24,
|
||||
cookieCache: {
|
||||
enabled: true,
|
||||
maxAge: COOKIE_CACHE_MAX_AGE,
|
||||
},
|
||||
},
|
||||
user: {
|
||||
changeEmail: {
|
||||
enabled: true,
|
||||
sendChangeEmailVerification: async (
|
||||
{ user, newEmail, url, token },
|
||||
request,
|
||||
) => {
|
||||
const fctx = createAuthFlowContext("email-change");
|
||||
const ac = getAuthControllerInstance();
|
||||
|
||||
const result = await ac
|
||||
.sendEmailChangeVerificationEmail(
|
||||
fctx,
|
||||
newEmail,
|
||||
token,
|
||||
url,
|
||||
)
|
||||
.match(
|
||||
() => ({ success: true, error: undefined }),
|
||||
(error) => ({ success: false, error }),
|
||||
);
|
||||
|
||||
if (!result.success || result?.error) {
|
||||
logger.error(
|
||||
`[${fctx.flowId}] Failed to send email change verification`,
|
||||
result.error,
|
||||
);
|
||||
throw new APIError("INTERNAL_SERVER_ERROR", {
|
||||
message: result.error?.message,
|
||||
});
|
||||
}
|
||||
},
|
||||
},
|
||||
modelName: "user",
|
||||
additionalFields: {
|
||||
onboardingDone: {
|
||||
type: "boolean",
|
||||
defaultValue: false,
|
||||
required: false,
|
||||
},
|
||||
last2FAVerifiedAt: { type: "date", required: false },
|
||||
parentId: { required: false, type: "string" },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// - - -
|
||||
148
packages/logic/domains/auth/controller.ts
Normal file
148
packages/logic/domains/auth/controller.ts
Normal file
@@ -0,0 +1,148 @@
|
||||
import { AuthContext, MiddlewareContext, MiddlewareOptions } from "better-auth";
|
||||
import { AccountRepository } from "../user/account.repository";
|
||||
import { FlowExecCtx } from "@/core/flow.execution.context";
|
||||
import { ResultAsync } from "neverthrow";
|
||||
import type { Err } from "@pkg/result";
|
||||
import { authErrors } from "./errors";
|
||||
import { logger } from "@pkg/logger";
|
||||
import { nanoid } from "nanoid";
|
||||
import { db } from "@pkg/db";
|
||||
|
||||
export class AuthController {
|
||||
private readonly mins = 10;
|
||||
|
||||
constructor(private accountRepo: AccountRepository) {}
|
||||
|
||||
sendEmailChangeVerificationEmail(
|
||||
fctx: FlowExecCtx,
|
||||
newEmail: string,
|
||||
token: string,
|
||||
url: string,
|
||||
): ResultAsync<void, Err> {
|
||||
logger.info("Sending email change verification link", {
|
||||
...fctx,
|
||||
newEmail,
|
||||
});
|
||||
logger.debug("Original URL", { ...fctx, url });
|
||||
|
||||
const transformedUrl = url
|
||||
.replace("/api/auth/verify-email", "/account/verify-email")
|
||||
.replace("/api/", "/");
|
||||
|
||||
logger.debug("Transformed URL", { ...fctx, transformedUrl });
|
||||
|
||||
// Simulate email sending with 90/10 success/failure
|
||||
const success = Math.random() > 0.1;
|
||||
|
||||
if (!success) {
|
||||
logger.error("Failed to send email change verification link", {
|
||||
...fctx,
|
||||
error: "Simulated email service failure",
|
||||
});
|
||||
return ResultAsync.fromPromise(
|
||||
Promise.reject(
|
||||
authErrors.emailChangeVerificationFailed(
|
||||
fctx,
|
||||
"Simulated email service failure",
|
||||
),
|
||||
),
|
||||
(error) => error as Err,
|
||||
);
|
||||
}
|
||||
|
||||
logger.info("Email change verification sent successfully", {
|
||||
...fctx,
|
||||
newEmail,
|
||||
});
|
||||
return ResultAsync.fromSafePromise(Promise.resolve(undefined));
|
||||
}
|
||||
|
||||
swapAccountPasswordForTwoFactor(
|
||||
fctx: FlowExecCtx,
|
||||
ctx: MiddlewareContext<
|
||||
MiddlewareOptions,
|
||||
AuthContext & { returned?: unknown; responseHeaders?: Headers }
|
||||
>,
|
||||
) {
|
||||
logger.info("Swapping account password for 2FA", {
|
||||
...fctx,
|
||||
});
|
||||
|
||||
if (!ctx.path.includes("two-factor")) {
|
||||
return ResultAsync.fromSafePromise(Promise.resolve(ctx));
|
||||
}
|
||||
|
||||
if (!ctx.body.password || ctx.body.password.length === 0) {
|
||||
return ResultAsync.fromSafePromise(Promise.resolve(ctx));
|
||||
}
|
||||
|
||||
logger.info("Rotating password for 2FA setup for user", {
|
||||
...fctx,
|
||||
userId: ctx.body.userId,
|
||||
});
|
||||
|
||||
return this.accountRepo
|
||||
.rotatePassword(fctx, ctx.body.userId, nanoid())
|
||||
.mapErr((err) => {
|
||||
logger.error("Failed to rotate password for 2FA", {
|
||||
...fctx,
|
||||
error: err,
|
||||
});
|
||||
return authErrors.passwordRotationFailed(fctx, err.detail);
|
||||
})
|
||||
.map((newPassword) => {
|
||||
logger.info("Password rotated successfully for 2FA setup", {
|
||||
...fctx,
|
||||
});
|
||||
return {
|
||||
...ctx,
|
||||
body: { ...ctx.body, password: newPassword },
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
sendMagicLink(
|
||||
fctx: FlowExecCtx,
|
||||
email: string,
|
||||
token: string,
|
||||
url: string,
|
||||
): ResultAsync<void, Err> {
|
||||
logger.info("Sending magic link", { ...fctx, email });
|
||||
logger.debug("Original URL", { ...fctx, url });
|
||||
|
||||
const transformedUrl = url
|
||||
.replace("/api/auth/magic-link/verify", "/auth/magic-link")
|
||||
.replace("/api/", "/");
|
||||
|
||||
logger.debug("Transformed URL", { ...fctx, transformedUrl });
|
||||
|
||||
// Simulate email sending with 90/10 success/failure
|
||||
const success = Math.random() > 0.1;
|
||||
|
||||
if (!success) {
|
||||
logger.error("Failed to send magic link email", {
|
||||
...fctx,
|
||||
error: "Simulated email service failure",
|
||||
});
|
||||
return ResultAsync.fromPromise(
|
||||
Promise.reject(
|
||||
authErrors.magicLinkEmailFailed(
|
||||
fctx,
|
||||
"Simulated email service failure",
|
||||
),
|
||||
),
|
||||
(error) => error as Err,
|
||||
);
|
||||
}
|
||||
|
||||
logger.info("Magic link email sent successfully", {
|
||||
...fctx,
|
||||
email,
|
||||
});
|
||||
return ResultAsync.fromSafePromise(Promise.resolve(undefined));
|
||||
}
|
||||
}
|
||||
|
||||
export function getAuthController(): AuthController {
|
||||
return new AuthController(new AccountRepository(db));
|
||||
}
|
||||
59
packages/logic/domains/auth/errors.ts
Normal file
59
packages/logic/domains/auth/errors.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { FlowExecCtx } from "@/core/flow.execution.context";
|
||||
import { getError } from "@pkg/logger";
|
||||
import { ERROR_CODES, type Err } from "@pkg/result";
|
||||
|
||||
export const authErrors = {
|
||||
emailSendFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.INTERNAL_SERVER_ERROR,
|
||||
message: "Failed to send email",
|
||||
description: "An error occurred while sending the email",
|
||||
detail,
|
||||
}),
|
||||
|
||||
magicLinkEmailFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.INTERNAL_SERVER_ERROR,
|
||||
message: "Failed to send magic link email",
|
||||
description: "An error occurred while sending the magic link",
|
||||
detail,
|
||||
}),
|
||||
|
||||
emailChangeVerificationFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.INTERNAL_SERVER_ERROR,
|
||||
message: "Failed to send email change verification link",
|
||||
description: "An error occurred while sending the verification email",
|
||||
detail,
|
||||
}),
|
||||
|
||||
passwordRotationFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.INTERNAL_SERVER_ERROR,
|
||||
message: "Failed to begin 2FA setup",
|
||||
description: "An error occurred while rotating the password for 2FA",
|
||||
detail,
|
||||
}),
|
||||
|
||||
dbError: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Database operation failed",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
accountNotFound: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.NOT_FOUND,
|
||||
message: "Account not found",
|
||||
description: "Please try again later",
|
||||
detail: "Account not found for user",
|
||||
}),
|
||||
};
|
||||
96
packages/logic/domains/notifications/controller.ts
Normal file
96
packages/logic/domains/notifications/controller.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import { FlowExecCtx } from "@/core/flow.execution.context";
|
||||
import { okAsync } from "neverthrow";
|
||||
import {
|
||||
NotificationFilters,
|
||||
PaginationOptions,
|
||||
} from "./data";
|
||||
import { NotificationRepository } from "./repository";
|
||||
import { db } from "@pkg/db";
|
||||
|
||||
export class NotificationController {
|
||||
constructor(private notifsRepo: NotificationRepository) {}
|
||||
|
||||
getNotifications(
|
||||
fctx: FlowExecCtx,
|
||||
filters: NotificationFilters,
|
||||
pagination: PaginationOptions,
|
||||
) {
|
||||
return this.notifsRepo.getNotifications(fctx, filters, pagination);
|
||||
}
|
||||
|
||||
markAsRead(
|
||||
fctx: FlowExecCtx,
|
||||
notificationIds: number[],
|
||||
userId: string,
|
||||
) {
|
||||
return this.notifsRepo.markAsRead(fctx, notificationIds, userId);
|
||||
}
|
||||
|
||||
markAsUnread(
|
||||
fctx: FlowExecCtx,
|
||||
notificationIds: number[],
|
||||
userId: string,
|
||||
) {
|
||||
return this.notifsRepo.markAsUnread(fctx, notificationIds, userId);
|
||||
}
|
||||
|
||||
archive(
|
||||
fctx: FlowExecCtx,
|
||||
notificationIds: number[],
|
||||
userId: string,
|
||||
) {
|
||||
return this.notifsRepo.archive(fctx, notificationIds, userId);
|
||||
}
|
||||
|
||||
unarchive(
|
||||
fctx: FlowExecCtx,
|
||||
notificationIds: number[],
|
||||
userId: string,
|
||||
) {
|
||||
return this.notifsRepo.unarchive(fctx, notificationIds, userId);
|
||||
}
|
||||
|
||||
deleteNotifications(
|
||||
fctx: FlowExecCtx,
|
||||
notificationIds: number[],
|
||||
userId: string,
|
||||
) {
|
||||
return this.notifsRepo.deleteNotifications(fctx, notificationIds, userId);
|
||||
}
|
||||
|
||||
getUnreadCount(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
) {
|
||||
return this.notifsRepo.getUnreadCount(fctx, userId);
|
||||
}
|
||||
|
||||
markAllAsRead(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
) {
|
||||
// Get all unread notification IDs for this user
|
||||
const filters: NotificationFilters = {
|
||||
userId,
|
||||
isRead: false,
|
||||
isArchived: false,
|
||||
};
|
||||
|
||||
// Get a large number to handle bulk operations
|
||||
const pagination: PaginationOptions = { page: 1, pageSize: 1000 };
|
||||
|
||||
return this.notifsRepo
|
||||
.getNotifications(fctx, filters, pagination)
|
||||
.map((paginated) => paginated.data.map((n) => n.id))
|
||||
.andThen((notificationIds) => {
|
||||
if (notificationIds.length === 0) {
|
||||
return okAsync(true);
|
||||
}
|
||||
return this.notifsRepo.markAsRead(fctx, notificationIds, userId);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export function getNotificationController(): NotificationController {
|
||||
return new NotificationController(new NotificationRepository(db));
|
||||
}
|
||||
102
packages/logic/domains/notifications/data.ts
Normal file
102
packages/logic/domains/notifications/data.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
import * as v from "valibot";
|
||||
|
||||
// Notification schema
|
||||
export const notificationSchema = v.object({
|
||||
id: v.pipe(v.number(), v.integer()),
|
||||
title: v.string(),
|
||||
body: v.string(),
|
||||
priority: v.string(),
|
||||
type: v.string(),
|
||||
category: v.string(),
|
||||
isRead: v.boolean(),
|
||||
isArchived: v.boolean(),
|
||||
actionUrl: v.string(),
|
||||
actionType: v.string(),
|
||||
actionData: v.string(),
|
||||
icon: v.string(),
|
||||
userId: v.string(),
|
||||
sentAt: v.date(),
|
||||
readAt: v.nullable(v.date()),
|
||||
expiresAt: v.nullable(v.date()),
|
||||
createdAt: v.date(),
|
||||
updatedAt: v.date(),
|
||||
});
|
||||
|
||||
export type Notification = v.InferOutput<typeof notificationSchema>;
|
||||
export type Notifications = Notification[];
|
||||
|
||||
// Notification filters schema
|
||||
export const notificationFiltersSchema = v.object({
|
||||
userId: v.string(),
|
||||
isRead: v.optional(v.boolean()),
|
||||
isArchived: v.optional(v.boolean()),
|
||||
type: v.optional(v.string()),
|
||||
category: v.optional(v.string()),
|
||||
priority: v.optional(v.string()),
|
||||
search: v.optional(v.string()),
|
||||
});
|
||||
export type NotificationFilters = v.InferOutput<
|
||||
typeof notificationFiltersSchema
|
||||
>;
|
||||
|
||||
// Pagination options schema
|
||||
export const paginationOptionsSchema = v.object({
|
||||
page: v.pipe(v.number(), v.integer()),
|
||||
pageSize: v.pipe(v.number(), v.integer()),
|
||||
sortBy: v.optional(v.string()),
|
||||
sortOrder: v.optional(v.string()),
|
||||
});
|
||||
export type PaginationOptions = v.InferOutput<typeof paginationOptionsSchema>;
|
||||
|
||||
// Paginated notifications schema
|
||||
export const paginatedNotificationsSchema = v.object({
|
||||
data: v.array(notificationSchema),
|
||||
total: v.pipe(v.number(), v.integer()),
|
||||
page: v.pipe(v.number(), v.integer()),
|
||||
pageSize: v.pipe(v.number(), v.integer()),
|
||||
totalPages: v.pipe(v.number(), v.integer()),
|
||||
});
|
||||
export type PaginatedNotifications = v.InferOutput<
|
||||
typeof paginatedNotificationsSchema
|
||||
>;
|
||||
|
||||
// Get notifications schema
|
||||
export const getNotificationsSchema = v.object({
|
||||
filters: notificationFiltersSchema,
|
||||
pagination: paginationOptionsSchema,
|
||||
});
|
||||
export type GetNotifications = v.InferOutput<typeof getNotificationsSchema>;
|
||||
|
||||
// Bulk notification IDs schema
|
||||
export const bulkNotificationIdsSchema = v.object({
|
||||
notificationIds: v.array(v.pipe(v.number(), v.integer())),
|
||||
});
|
||||
export type BulkNotificationIds = v.InferOutput<
|
||||
typeof bulkNotificationIdsSchema
|
||||
>;
|
||||
|
||||
// View Model specific types
|
||||
export const clientNotificationFiltersSchema = v.object({
|
||||
userId: v.string(),
|
||||
isRead: v.optional(v.boolean()),
|
||||
isArchived: v.optional(v.boolean()),
|
||||
type: v.optional(v.string()),
|
||||
category: v.optional(v.string()),
|
||||
priority: v.optional(v.string()),
|
||||
search: v.optional(v.string()),
|
||||
});
|
||||
export type ClientNotificationFilters = v.InferOutput<
|
||||
typeof clientNotificationFiltersSchema
|
||||
>;
|
||||
|
||||
export const clientPaginationStateSchema = v.object({
|
||||
page: v.pipe(v.number(), v.integer()),
|
||||
pageSize: v.pipe(v.number(), v.integer()),
|
||||
total: v.pipe(v.number(), v.integer()),
|
||||
totalPages: v.pipe(v.number(), v.integer()),
|
||||
sortBy: v.picklist(["createdAt", "sentAt", "readAt", "priority"]),
|
||||
sortOrder: v.picklist(["asc", "desc"]),
|
||||
});
|
||||
export type ClientPaginationState = v.InferOutput<
|
||||
typeof clientPaginationStateSchema
|
||||
>;
|
||||
78
packages/logic/domains/notifications/errors.ts
Normal file
78
packages/logic/domains/notifications/errors.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import { FlowExecCtx } from "@/core/flow.execution.context";
|
||||
import { ERROR_CODES, type Err } from "@pkg/result";
|
||||
import { getError } from "@pkg/logger";
|
||||
|
||||
export const notificationErrors = {
|
||||
dbError: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Database operation failed",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
getNotificationsFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Failed to fetch notifications",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
markAsReadFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Failed to mark notifications as read",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
markAsUnreadFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Failed to mark notifications as unread",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
archiveFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Failed to archive notifications",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
unarchiveFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Failed to unarchive notifications",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
deleteNotificationsFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Failed to delete notifications",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
getUnreadCountFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Failed to get unread count",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
};
|
||||
|
||||
384
packages/logic/domains/notifications/repository.ts
Normal file
384
packages/logic/domains/notifications/repository.ts
Normal file
@@ -0,0 +1,384 @@
|
||||
import { and, asc, count, Database, desc, eq, like, or, sql } from "@pkg/db";
|
||||
import { notifications } from "@pkg/db/schema";
|
||||
import { ResultAsync } from "neverthrow";
|
||||
import { FlowExecCtx } from "@core/flow.execution.context";
|
||||
import type {
|
||||
Notification,
|
||||
NotificationFilters,
|
||||
PaginatedNotifications,
|
||||
PaginationOptions,
|
||||
} from "./data";
|
||||
import { type Err } from "@pkg/result";
|
||||
import { notificationErrors } from "./errors";
|
||||
import { logger } from "@pkg/logger";
|
||||
|
||||
export class NotificationRepository {
|
||||
constructor(private db: Database) {}
|
||||
|
||||
getNotifications(
|
||||
fctx: FlowExecCtx,
|
||||
filters: NotificationFilters,
|
||||
pagination: PaginationOptions,
|
||||
): ResultAsync<PaginatedNotifications, Err> {
|
||||
logger.info("Getting notifications with filters", { ...fctx, filters });
|
||||
|
||||
const { userId, isRead, isArchived, type, category, priority, search } =
|
||||
filters;
|
||||
const {
|
||||
page,
|
||||
pageSize,
|
||||
sortBy = "createdAt",
|
||||
sortOrder = "desc",
|
||||
} = pagination;
|
||||
|
||||
// Build WHERE conditions
|
||||
const conditions = [eq(notifications.userId, userId)];
|
||||
|
||||
if (isRead !== undefined) {
|
||||
conditions.push(eq(notifications.isRead, isRead));
|
||||
}
|
||||
|
||||
if (isArchived !== undefined) {
|
||||
conditions.push(eq(notifications.isArchived, isArchived));
|
||||
}
|
||||
|
||||
if (type) {
|
||||
conditions.push(eq(notifications.type, type));
|
||||
}
|
||||
|
||||
if (category) {
|
||||
conditions.push(eq(notifications.category, category));
|
||||
}
|
||||
|
||||
if (priority) {
|
||||
conditions.push(eq(notifications.priority, priority));
|
||||
}
|
||||
|
||||
if (search) {
|
||||
conditions.push(
|
||||
or(
|
||||
like(notifications.title, `%${search}%`),
|
||||
like(notifications.body, `%${search}%`),
|
||||
)!,
|
||||
);
|
||||
}
|
||||
|
||||
const whereClause = and(...conditions);
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db.select({ count: count() }).from(notifications).where(whereClause),
|
||||
(error) => {
|
||||
logger.error("Failed to get notifications count", {
|
||||
...fctx,
|
||||
error,
|
||||
});
|
||||
return notificationErrors.getNotificationsFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).andThen((totalResult) => {
|
||||
const total = totalResult[0]?.count || 0;
|
||||
const offset = (page - 1) * pageSize;
|
||||
|
||||
// Map sortBy to proper column
|
||||
const getOrderColumn = (sortBy: string) => {
|
||||
switch (sortBy) {
|
||||
case "createdAt":
|
||||
return notifications.createdAt;
|
||||
case "sentAt":
|
||||
return notifications.sentAt;
|
||||
case "readAt":
|
||||
return notifications.readAt;
|
||||
case "priority":
|
||||
return notifications.priority;
|
||||
default:
|
||||
return notifications.createdAt;
|
||||
}
|
||||
};
|
||||
|
||||
const orderColumn = getOrderColumn(sortBy);
|
||||
const orderFunc = sortOrder === "asc" ? asc : desc;
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.select()
|
||||
.from(notifications)
|
||||
.where(whereClause)
|
||||
.orderBy(orderFunc(orderColumn))
|
||||
.limit(pageSize)
|
||||
.offset(offset),
|
||||
(error) => {
|
||||
logger.error("Failed to get notifications data", {
|
||||
...fctx,
|
||||
error,
|
||||
});
|
||||
return notificationErrors.getNotificationsFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).map((data) => {
|
||||
const totalPages = Math.ceil(total / pageSize);
|
||||
logger.info("Retrieved notifications", {
|
||||
...fctx,
|
||||
count: data.length,
|
||||
page,
|
||||
totalPages,
|
||||
});
|
||||
|
||||
return {
|
||||
data: data as Notification[],
|
||||
total,
|
||||
page,
|
||||
pageSize,
|
||||
totalPages,
|
||||
};
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
markAsRead(
|
||||
fctx: FlowExecCtx,
|
||||
notificationIds: number[],
|
||||
userId: string,
|
||||
): ResultAsync<boolean, Err> {
|
||||
logger.info("Marking notifications as read", {
|
||||
...fctx,
|
||||
notificationIds,
|
||||
userId,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.update(notifications)
|
||||
.set({
|
||||
isRead: true,
|
||||
readAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(
|
||||
and(
|
||||
eq(notifications.userId, userId),
|
||||
sql`${notifications.id} = ANY(${notificationIds})`,
|
||||
),
|
||||
),
|
||||
(error) => {
|
||||
logger.error("Failed to mark notifications as read", {
|
||||
...fctx,
|
||||
error,
|
||||
});
|
||||
return notificationErrors.markAsReadFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).map(() => {
|
||||
logger.info("Notifications marked as read successfully", {
|
||||
...fctx,
|
||||
notificationIds,
|
||||
});
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
markAsUnread(
|
||||
fctx: FlowExecCtx,
|
||||
notificationIds: number[],
|
||||
userId: string,
|
||||
): ResultAsync<boolean, Err> {
|
||||
logger.info("Marking notifications as unread", {
|
||||
...fctx,
|
||||
notificationIds,
|
||||
userId,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.update(notifications)
|
||||
.set({
|
||||
isRead: false,
|
||||
readAt: null,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(
|
||||
and(
|
||||
eq(notifications.userId, userId),
|
||||
sql`${notifications.id} = ANY(${notificationIds})`,
|
||||
),
|
||||
),
|
||||
(error) => {
|
||||
logger.error("Failed to mark notifications as unread", {
|
||||
...fctx,
|
||||
error,
|
||||
});
|
||||
return notificationErrors.markAsUnreadFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).map(() => {
|
||||
logger.info("Notifications marked as unread successfully", {
|
||||
...fctx,
|
||||
notificationIds,
|
||||
});
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
archive(
|
||||
fctx: FlowExecCtx,
|
||||
notificationIds: number[],
|
||||
userId: string,
|
||||
): ResultAsync<boolean, Err> {
|
||||
logger.info("Archiving notifications", {
|
||||
...fctx,
|
||||
notificationIds,
|
||||
userId,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.update(notifications)
|
||||
.set({
|
||||
isArchived: true,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(
|
||||
and(
|
||||
eq(notifications.userId, userId),
|
||||
sql`${notifications.id} = ANY(${notificationIds})`,
|
||||
),
|
||||
),
|
||||
(error) => {
|
||||
logger.error("Failed to archive notifications", {
|
||||
...fctx,
|
||||
error,
|
||||
});
|
||||
return notificationErrors.archiveFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).map(() => {
|
||||
logger.info("Notifications archived successfully", {
|
||||
...fctx,
|
||||
notificationIds,
|
||||
});
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
unarchive(
|
||||
fctx: FlowExecCtx,
|
||||
notificationIds: number[],
|
||||
userId: string,
|
||||
): ResultAsync<boolean, Err> {
|
||||
logger.info("Unarchiving notifications", {
|
||||
...fctx,
|
||||
notificationIds,
|
||||
userId,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.update(notifications)
|
||||
.set({
|
||||
isArchived: false,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(
|
||||
and(
|
||||
eq(notifications.userId, userId),
|
||||
sql`${notifications.id} = ANY(${notificationIds})`,
|
||||
),
|
||||
),
|
||||
(error) => {
|
||||
logger.error("Failed to unarchive notifications", {
|
||||
...fctx,
|
||||
error,
|
||||
});
|
||||
return notificationErrors.unarchiveFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).map(() => {
|
||||
logger.info("Notifications unarchived successfully", {
|
||||
...fctx,
|
||||
notificationIds,
|
||||
});
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
deleteNotifications(
|
||||
fctx: FlowExecCtx,
|
||||
notificationIds: number[],
|
||||
userId: string,
|
||||
): ResultAsync<boolean, Err> {
|
||||
logger.info("Deleting notifications", {
|
||||
...fctx,
|
||||
notificationIds,
|
||||
userId,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.delete(notifications)
|
||||
.where(
|
||||
and(
|
||||
eq(notifications.userId, userId),
|
||||
sql`${notifications.id} = ANY(${notificationIds})`,
|
||||
),
|
||||
),
|
||||
(error) => {
|
||||
logger.error("Failed to delete notifications", {
|
||||
...fctx,
|
||||
error,
|
||||
});
|
||||
return notificationErrors.deleteNotificationsFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).map(() => {
|
||||
logger.info("Notifications deleted successfully", {
|
||||
...fctx,
|
||||
notificationIds,
|
||||
});
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
getUnreadCount(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
): ResultAsync<number, Err> {
|
||||
logger.info("Getting unread count", { ...fctx, userId });
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.select({ count: count() })
|
||||
.from(notifications)
|
||||
.where(
|
||||
and(
|
||||
eq(notifications.userId, userId),
|
||||
eq(notifications.isRead, false),
|
||||
eq(notifications.isArchived, false),
|
||||
),
|
||||
),
|
||||
(error) => {
|
||||
logger.error("Failed to get unread count", { ...fctx, error });
|
||||
return notificationErrors.getUnreadCountFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).map((result) => {
|
||||
const count = result[0]?.count || 0;
|
||||
logger.info("Retrieved unread count", { ...fctx, count });
|
||||
return count;
|
||||
});
|
||||
}
|
||||
}
|
||||
160
packages/logic/domains/notifications/router.ts
Normal file
160
packages/logic/domains/notifications/router.ts
Normal file
@@ -0,0 +1,160 @@
|
||||
import { bulkNotificationIdsSchema, getNotificationsSchema } from "./data";
|
||||
import { getNotificationController } from "./controller";
|
||||
import { sValidator } from "@hono/standard-validator";
|
||||
import { HonoContext } from "@core/hono.helpers";
|
||||
import { Hono } from "hono";
|
||||
|
||||
const nc = getNotificationController();
|
||||
|
||||
export const notificationsRouter = new Hono<HonoContext>()
|
||||
.get("/", async (c) => {
|
||||
const fctx = c.env.locals.fCtx;
|
||||
const userId = c.env.locals.user.id;
|
||||
const url = new URL(c.req.url);
|
||||
|
||||
const filters = {
|
||||
userId,
|
||||
isRead: url.searchParams.get("isRead")
|
||||
? url.searchParams.get("isRead") === "true"
|
||||
: undefined,
|
||||
isArchived: url.searchParams.get("isArchived")
|
||||
? url.searchParams.get("isArchived") === "true"
|
||||
: undefined,
|
||||
type: url.searchParams.get("type") || undefined,
|
||||
category: url.searchParams.get("category") || undefined,
|
||||
priority: url.searchParams.get("priority") || undefined,
|
||||
search: url.searchParams.get("search") || undefined,
|
||||
};
|
||||
|
||||
const pagination = {
|
||||
page: parseInt(url.searchParams.get("page") || "1"),
|
||||
pageSize: parseInt(url.searchParams.get("pageSize") || "20"),
|
||||
sortBy: url.searchParams.get("sortBy") || "createdAt",
|
||||
sortOrder: url.searchParams.get("sortOrder") || "desc",
|
||||
};
|
||||
|
||||
const res = await nc.getNotifications(fctx, filters, pagination);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
})
|
||||
.post(
|
||||
"/get-notifications",
|
||||
sValidator("json", getNotificationsSchema),
|
||||
async (c) => {
|
||||
const fctx = c.env.locals.fCtx;
|
||||
const data = c.req.valid("json");
|
||||
const res = await nc.getNotifications(fctx, data.filters, data.pagination);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
},
|
||||
)
|
||||
.put(
|
||||
"/mark-read",
|
||||
sValidator("json", bulkNotificationIdsSchema),
|
||||
async (c) => {
|
||||
const fctx = c.env.locals.fCtx;
|
||||
const data = c.req.valid("json");
|
||||
const userId = c.env.locals.user.id;
|
||||
const res = await nc.markAsRead(fctx, [...data.notificationIds], userId);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
},
|
||||
)
|
||||
.put(
|
||||
"/mark-unread",
|
||||
sValidator("json", bulkNotificationIdsSchema),
|
||||
async (c) => {
|
||||
const fctx = c.env.locals.fCtx;
|
||||
const data = c.req.valid("json");
|
||||
const userId = c.env.locals.user.id;
|
||||
const res = await nc.markAsUnread(fctx, [...data.notificationIds], userId);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
},
|
||||
)
|
||||
.put(
|
||||
"/archive",
|
||||
sValidator("json", bulkNotificationIdsSchema),
|
||||
async (c) => {
|
||||
const fctx = c.env.locals.fCtx;
|
||||
const data = c.req.valid("json");
|
||||
const userId = c.env.locals.user.id;
|
||||
const res = await nc.archive(fctx, [...data.notificationIds], userId);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
},
|
||||
)
|
||||
.put(
|
||||
"/unarchive",
|
||||
sValidator("json", bulkNotificationIdsSchema),
|
||||
async (c) => {
|
||||
const fctx = c.env.locals.fCtx;
|
||||
const data = c.req.valid("json");
|
||||
const userId = c.env.locals.user.id;
|
||||
const res = await nc.unarchive(fctx, [...data.notificationIds], userId);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
},
|
||||
)
|
||||
.delete(
|
||||
"/delete",
|
||||
sValidator("json", bulkNotificationIdsSchema),
|
||||
async (c) => {
|
||||
const fctx = c.env.locals.fCtx;
|
||||
const data = c.req.valid("json");
|
||||
const userId = c.env.locals.user.id;
|
||||
const res = await nc.deleteNotifications(fctx, [...data.notificationIds], userId);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
},
|
||||
)
|
||||
.put("/mark-all-read", async (c) => {
|
||||
const fctx = c.env.locals.fCtx;
|
||||
const userId = c.env.locals.user.id;
|
||||
const res = await nc.markAllAsRead(fctx, userId);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
})
|
||||
.get("/unread-count", async (c) => {
|
||||
const fctx = c.env.locals.fCtx;
|
||||
const userId = c.env.locals.user.id;
|
||||
const res = await nc.getUnreadCount(fctx, userId);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
});
|
||||
213
packages/logic/domains/user/account.repository.ts
Normal file
213
packages/logic/domains/user/account.repository.ts
Normal file
@@ -0,0 +1,213 @@
|
||||
import { FlowExecCtx } from "@/core/flow.execution.context";
|
||||
import { ERROR_CODES, type Err } from "@pkg/result";
|
||||
import { getError, logger } from "@pkg/logger";
|
||||
import { auth } from "../auth/config.base";
|
||||
import { account } from "@pkg/db/schema";
|
||||
import { ResultAsync } from "neverthrow";
|
||||
import { Database, eq } from "@pkg/db";
|
||||
import { nanoid } from "nanoid";
|
||||
|
||||
export class AccountRepository {
|
||||
constructor(private db: Database) {}
|
||||
|
||||
private dbError(fctx: FlowExecCtx, detail: string): Err {
|
||||
return getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Database operation failed",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
});
|
||||
}
|
||||
|
||||
private accountNotFound(fctx: FlowExecCtx): Err {
|
||||
return getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.NOT_FOUND,
|
||||
message: "Account not found",
|
||||
description: "Please try again later",
|
||||
detail: "Account not found for user",
|
||||
});
|
||||
}
|
||||
|
||||
ensureAccountExists(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
): ResultAsync<boolean, Err> {
|
||||
logger.info("Checking if account exists for user", {
|
||||
...fctx,
|
||||
userId,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db.query.account.findFirst({
|
||||
where: eq(account.userId, userId),
|
||||
}),
|
||||
(error) => {
|
||||
logger.error("Failed to check account existence", {
|
||||
...fctx,
|
||||
error,
|
||||
});
|
||||
return this.dbError(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).andThen((existingAccount) => {
|
||||
if (existingAccount) {
|
||||
logger.info("Account already exists for user", {
|
||||
...fctx,
|
||||
userId,
|
||||
});
|
||||
return ResultAsync.fromSafePromise(Promise.resolve(true));
|
||||
}
|
||||
|
||||
logger.info(
|
||||
"Account does not exist, creating new account for user",
|
||||
{
|
||||
...fctx,
|
||||
userId,
|
||||
},
|
||||
);
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
auth.$context.then((ctx) => ctx.password.hash(nanoid())),
|
||||
(error) => {
|
||||
logger.error("Failed to hash password", {
|
||||
...fctx,
|
||||
error,
|
||||
});
|
||||
return this.dbError(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).andThen((password) => {
|
||||
const aid = nanoid();
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.insert(account)
|
||||
.values({
|
||||
id: aid,
|
||||
accountId: userId,
|
||||
providerId: "credential",
|
||||
userId: userId,
|
||||
password,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.execute(),
|
||||
(error) => {
|
||||
logger.error("Failed to create account", {
|
||||
...fctx,
|
||||
error,
|
||||
});
|
||||
return this.dbError(
|
||||
fctx,
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: String(error),
|
||||
);
|
||||
},
|
||||
).map(() => {
|
||||
logger.info("Account created successfully for user", {
|
||||
...fctx,
|
||||
userId,
|
||||
});
|
||||
return false;
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
rotatePassword(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
password: string,
|
||||
): ResultAsync<string, Err> {
|
||||
logger.info("Starting password rotation for user", {
|
||||
...fctx,
|
||||
userId,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db.query.account.findFirst({
|
||||
where: eq(account.userId, userId),
|
||||
}),
|
||||
(error) => {
|
||||
logger.error(
|
||||
"Failed to check account existence for password rotation",
|
||||
{
|
||||
...fctx,
|
||||
error,
|
||||
},
|
||||
);
|
||||
return this.dbError(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).andThen((existingAccount) => {
|
||||
if (!existingAccount) {
|
||||
logger.error("Account not found for user", {
|
||||
...fctx,
|
||||
userId,
|
||||
});
|
||||
return ResultAsync.fromSafePromise(
|
||||
Promise.resolve(this.accountNotFound(fctx)),
|
||||
).andThen((err) =>
|
||||
ResultAsync.fromSafePromise(Promise.reject(err)),
|
||||
);
|
||||
}
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
auth.$context.then((ctx) => ctx.password.hash(password)),
|
||||
(error) => {
|
||||
logger.error("Failed to hash password for rotation", {
|
||||
...fctx,
|
||||
error,
|
||||
});
|
||||
return this.dbError(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).andThen((hashed) => {
|
||||
logger.info("Updating user's password in database", {
|
||||
...fctx,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.update(account)
|
||||
.set({ password: hashed })
|
||||
.where(eq(account.userId, userId))
|
||||
.returning()
|
||||
.execute(),
|
||||
(error) => {
|
||||
logger.error("Failed to update password", {
|
||||
...fctx,
|
||||
error,
|
||||
});
|
||||
return this.dbError(
|
||||
fctx,
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: String(error),
|
||||
);
|
||||
},
|
||||
).map((result) => {
|
||||
logger.info("User's password updated successfully", {
|
||||
...fctx,
|
||||
});
|
||||
logger.debug("Password rotation result", {
|
||||
...fctx,
|
||||
result,
|
||||
});
|
||||
return password;
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
55
packages/logic/domains/user/controller.ts
Normal file
55
packages/logic/domains/user/controller.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
import { FlowExecCtx } from "@/core/flow.execution.context";
|
||||
import { AccountRepository } from "./account.repository";
|
||||
import { UserRepository } from "./repository";
|
||||
import { db } from "@pkg/db";
|
||||
|
||||
export class UserController {
|
||||
constructor(
|
||||
private userRepository: UserRepository,
|
||||
private accountRepo: AccountRepository,
|
||||
) {}
|
||||
|
||||
getUserInfo(fctx: FlowExecCtx, userId: string) {
|
||||
return this.userRepository.getUserInfo(fctx, userId);
|
||||
}
|
||||
|
||||
ensureAccountExists(fctx: FlowExecCtx, userId: string) {
|
||||
return this.accountRepo.ensureAccountExists(fctx, userId);
|
||||
}
|
||||
|
||||
isUsernameAvailable(fctx: FlowExecCtx, username: string) {
|
||||
return this.userRepository.isUsernameAvailable(fctx, username);
|
||||
}
|
||||
|
||||
updateLastVerified2FaAtToNow(fctx: FlowExecCtx, userId: string) {
|
||||
return this.userRepository.updateLastVerified2FaAtToNow(fctx, userId);
|
||||
}
|
||||
|
||||
banUser(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
reason: string,
|
||||
banExpiresAt: Date,
|
||||
) {
|
||||
return this.userRepository.banUser(fctx, userId, reason, banExpiresAt);
|
||||
}
|
||||
|
||||
isUserBanned(fctx: FlowExecCtx, userId: string) {
|
||||
return this.userRepository.isUserBanned(fctx, userId);
|
||||
}
|
||||
|
||||
getBanInfo(fctx: FlowExecCtx, userId: string) {
|
||||
return this.userRepository.getBanInfo(fctx, userId);
|
||||
}
|
||||
|
||||
rotatePassword(fctx: FlowExecCtx, userId: string, password: string) {
|
||||
return this.accountRepo.rotatePassword(fctx, userId, password);
|
||||
}
|
||||
}
|
||||
|
||||
export function getUserController(): UserController {
|
||||
return new UserController(
|
||||
new UserRepository(db),
|
||||
new AccountRepository(db),
|
||||
);
|
||||
}
|
||||
159
packages/logic/domains/user/data.ts
Normal file
159
packages/logic/domains/user/data.ts
Normal file
@@ -0,0 +1,159 @@
|
||||
import { Session } from "better-auth";
|
||||
import * as v from "valibot";
|
||||
|
||||
export type { Session } from "better-auth";
|
||||
|
||||
export type ModifiedSession = Session & { isCurrent?: boolean };
|
||||
|
||||
// User role enum
|
||||
export enum UserRoleMap {
|
||||
user = "user",
|
||||
admin = "admin",
|
||||
}
|
||||
|
||||
// User role schema
|
||||
export const userRoleSchema = v.picklist(["user", "admin"]);
|
||||
export type UserRole = v.InferOutput<typeof userRoleSchema>;
|
||||
|
||||
// User schema
|
||||
export const userSchema = v.object({
|
||||
id: v.string(),
|
||||
name: v.string(),
|
||||
email: v.string(),
|
||||
emailVerified: v.boolean(),
|
||||
image: v.optional(v.string()),
|
||||
createdAt: v.date(),
|
||||
updatedAt: v.date(),
|
||||
username: v.optional(v.string()),
|
||||
displayUsername: v.optional(v.string()),
|
||||
role: v.optional(v.string()),
|
||||
banned: v.optional(v.boolean()),
|
||||
banReason: v.optional(v.string()),
|
||||
banExpires: v.optional(v.date()),
|
||||
onboardingDone: v.optional(v.boolean()),
|
||||
last2FAVerifiedAt: v.optional(v.date()),
|
||||
parentId: v.optional(v.string()),
|
||||
});
|
||||
export type User = v.InferOutput<typeof userSchema>;
|
||||
|
||||
// Account schema
|
||||
export const accountSchema = v.object({
|
||||
id: v.string(),
|
||||
accountId: v.string(),
|
||||
providerId: v.string(),
|
||||
userId: v.string(),
|
||||
accessToken: v.string(),
|
||||
refreshToken: v.string(),
|
||||
idToken: v.string(),
|
||||
accessTokenExpiresAt: v.date(),
|
||||
refreshTokenExpiresAt: v.date(),
|
||||
scope: v.string(),
|
||||
password: v.string(),
|
||||
createdAt: v.date(),
|
||||
updatedAt: v.date(),
|
||||
});
|
||||
export type Account = v.InferOutput<typeof accountSchema>;
|
||||
|
||||
// Ensure account exists schema
|
||||
export const ensureAccountExistsSchema = v.object({
|
||||
userId: v.string(),
|
||||
});
|
||||
export type EnsureAccountExists = v.InferOutput<
|
||||
typeof ensureAccountExistsSchema
|
||||
>;
|
||||
|
||||
// Ban info schema
|
||||
export const banInfoSchema = v.object({
|
||||
banned: v.boolean(),
|
||||
reason: v.optional(v.string()),
|
||||
expires: v.optional(v.date()),
|
||||
});
|
||||
export type BanInfo = v.InferOutput<typeof banInfoSchema>;
|
||||
|
||||
// Ban user schema
|
||||
export const banUserSchema = v.object({
|
||||
userId: v.string(),
|
||||
reason: v.string(),
|
||||
banExpiresAt: v.date(),
|
||||
});
|
||||
export type BanUser = v.InferOutput<typeof banUserSchema>;
|
||||
|
||||
// Check username availability schema
|
||||
export const checkUsernameSchema = v.object({
|
||||
username: v.string(),
|
||||
});
|
||||
export type CheckUsername = v.InferOutput<typeof checkUsernameSchema>;
|
||||
|
||||
// Rotate password schema
|
||||
export const rotatePasswordSchema = v.object({
|
||||
userId: v.string(),
|
||||
password: v.string(),
|
||||
});
|
||||
export type RotatePassword = v.InferOutput<typeof rotatePasswordSchema>;
|
||||
|
||||
// View Model specific types
|
||||
|
||||
// Search and filter types
|
||||
export const searchFieldSchema = v.picklist(["email", "name", "username"]);
|
||||
export type SearchField = v.InferOutput<typeof searchFieldSchema>;
|
||||
|
||||
export const searchOperatorSchema = v.picklist([
|
||||
"contains",
|
||||
"starts_with",
|
||||
"ends_with",
|
||||
]);
|
||||
export type SearchOperator = v.InferOutput<typeof searchOperatorSchema>;
|
||||
|
||||
export const filterOperatorSchema = v.picklist([
|
||||
"eq",
|
||||
"ne",
|
||||
"lt",
|
||||
"lte",
|
||||
"gt",
|
||||
"gte",
|
||||
]);
|
||||
export type FilterOperator = v.InferOutput<typeof filterOperatorSchema>;
|
||||
|
||||
export const sortDirectionSchema = v.picklist(["asc", "desc"]);
|
||||
export type SortDirection = v.InferOutput<typeof sortDirectionSchema>;
|
||||
|
||||
// Users query state
|
||||
export const usersQueryStateSchema = v.object({
|
||||
// searching
|
||||
searchValue: v.optional(v.string()),
|
||||
searchField: v.optional(searchFieldSchema),
|
||||
searchOperator: v.optional(searchOperatorSchema),
|
||||
|
||||
// pagination
|
||||
limit: v.pipe(v.number(), v.integer()),
|
||||
offset: v.pipe(v.number(), v.integer()),
|
||||
|
||||
// sorting
|
||||
sortBy: v.optional(v.string()),
|
||||
sortDirection: v.optional(sortDirectionSchema),
|
||||
|
||||
// filtering
|
||||
filterField: v.optional(v.string()),
|
||||
filterValue: v.optional(v.union([v.string(), v.number(), v.boolean()])),
|
||||
filterOperator: v.optional(filterOperatorSchema),
|
||||
});
|
||||
export type UsersQueryState = v.InferOutput<typeof usersQueryStateSchema>;
|
||||
|
||||
// UI View Model types
|
||||
|
||||
export const banExpiryModeSchema = v.picklist([
|
||||
"never",
|
||||
"1d",
|
||||
"7d",
|
||||
"30d",
|
||||
"custom",
|
||||
]);
|
||||
export type BanExpiryMode = v.InferOutput<typeof banExpiryModeSchema>;
|
||||
|
||||
export const createUserFormSchema = v.object({
|
||||
email: v.string(),
|
||||
password: v.string(),
|
||||
name: v.string(),
|
||||
role: v.union([userRoleSchema, v.array(userRoleSchema)]),
|
||||
});
|
||||
export type CreateUserForm = v.InferOutput<typeof createUserFormSchema>;
|
||||
77
packages/logic/domains/user/errors.ts
Normal file
77
packages/logic/domains/user/errors.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import { FlowExecCtx } from "@/core/flow.execution.context";
|
||||
import { ERROR_CODES, type Err } from "@pkg/result";
|
||||
import { getError } from "@pkg/logger";
|
||||
|
||||
export const userErrors = {
|
||||
dbError: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Database operation failed",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
userNotFound: (fctx: FlowExecCtx): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.NOT_FOUND,
|
||||
message: "User not found",
|
||||
description: "Try with a different user id",
|
||||
detail: "User not found in database",
|
||||
}),
|
||||
|
||||
usernameCheckFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "An error occurred while checking username availability",
|
||||
description: "Try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
banOperationFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Failed to perform ban operation",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
unbanFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Failed to unban user",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
updateFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "Failed to update user",
|
||||
description: "Please try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
getUserInfoFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "An error occurred while getting user info",
|
||||
description: "Try again later",
|
||||
detail,
|
||||
}),
|
||||
|
||||
getBanInfoFailed: (fctx: FlowExecCtx, detail: string): Err =>
|
||||
getError({
|
||||
flowId: fctx.flowId,
|
||||
code: ERROR_CODES.DATABASE_ERROR,
|
||||
message: "An error occurred while getting ban info",
|
||||
description: "Try again later",
|
||||
detail,
|
||||
}),
|
||||
};
|
||||
289
packages/logic/domains/user/repository.ts
Normal file
289
packages/logic/domains/user/repository.ts
Normal file
@@ -0,0 +1,289 @@
|
||||
import { ResultAsync, errAsync, okAsync } from "neverthrow";
|
||||
import { FlowExecCtx } from "@core/flow.execution.context";
|
||||
import { type Err } from "@pkg/result";
|
||||
import { Database, eq } from "@pkg/db";
|
||||
import { BanInfo, User } from "./data";
|
||||
import { user } from "@pkg/db/schema";
|
||||
import { userErrors } from "./errors";
|
||||
import { logger } from "@pkg/logger";
|
||||
|
||||
export class UserRepository {
|
||||
constructor(private db: Database) {}
|
||||
|
||||
getUserInfo(fctx: FlowExecCtx, userId: string): ResultAsync<User, Err> {
|
||||
logger.info("Getting user info for user", {
|
||||
flowId: fctx.flowId,
|
||||
userId,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db.query.user.findFirst({
|
||||
where: eq(user.id, userId),
|
||||
}),
|
||||
(error) => {
|
||||
logger.error("Failed to get user info", {
|
||||
flowId: fctx.flowId,
|
||||
error,
|
||||
});
|
||||
return userErrors.getUserInfoFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).andThen((userData) => {
|
||||
if (!userData) {
|
||||
logger.error("User not found with id", {
|
||||
flowId: fctx.flowId,
|
||||
userId,
|
||||
});
|
||||
return errAsync(userErrors.userNotFound(fctx));
|
||||
}
|
||||
|
||||
logger.info("User info retrieved successfully for user", {
|
||||
flowId: fctx.flowId,
|
||||
userId,
|
||||
});
|
||||
return okAsync(userData as User);
|
||||
});
|
||||
}
|
||||
|
||||
updateLastVerified2FaAtToNow(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
): ResultAsync<boolean, Err> {
|
||||
logger.info("Updating last 2FA verified timestamp for user", {
|
||||
flowId: fctx.flowId,
|
||||
userId,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.update(user)
|
||||
.set({ last2FAVerifiedAt: new Date() })
|
||||
.where(eq(user.id, userId))
|
||||
.execute(),
|
||||
(error) => {
|
||||
logger.error("Failed to update last 2FA verified timestamp", {
|
||||
...fctx,
|
||||
error,
|
||||
});
|
||||
return userErrors.updateFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).map(() => {
|
||||
logger.info("Last 2FA verified timestamp updated successfully", {
|
||||
...fctx,
|
||||
});
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
isUsernameAvailable(
|
||||
fctx: FlowExecCtx,
|
||||
username: string,
|
||||
): ResultAsync<boolean, Err> {
|
||||
logger.info("Checking username availability", {
|
||||
...fctx,
|
||||
username,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db.query.user.findFirst({
|
||||
where: eq(user.username, username),
|
||||
}),
|
||||
(error) => {
|
||||
logger.error("Failed to check username availability", {
|
||||
...fctx,
|
||||
error,
|
||||
});
|
||||
return userErrors.usernameCheckFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).map((existingUser) => {
|
||||
const isAvailable = !existingUser?.id;
|
||||
logger.info("Username availability checked", {
|
||||
...fctx,
|
||||
username,
|
||||
isAvailable,
|
||||
});
|
||||
return isAvailable;
|
||||
});
|
||||
}
|
||||
|
||||
banUser(
|
||||
fctx: FlowExecCtx,
|
||||
userId: string,
|
||||
reason: string,
|
||||
banExpiresAt: Date,
|
||||
): ResultAsync<boolean, Err> {
|
||||
logger.info("Banning user", {
|
||||
...fctx,
|
||||
userId,
|
||||
banExpiresAt: banExpiresAt.toISOString(),
|
||||
reason,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.update(user)
|
||||
.set({
|
||||
banned: true,
|
||||
banReason: reason,
|
||||
banExpires: banExpiresAt,
|
||||
})
|
||||
.where(eq(user.id, userId))
|
||||
.execute(),
|
||||
(error) => {
|
||||
logger.error("Failed to ban user", { ...fctx, error });
|
||||
return userErrors.banOperationFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).map(() => {
|
||||
logger.info("User has been banned", {
|
||||
...fctx,
|
||||
userId,
|
||||
banExpiresAt: banExpiresAt.toISOString(),
|
||||
});
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
isUserBanned(fctx: FlowExecCtx, userId: string): ResultAsync<boolean, Err> {
|
||||
logger.info("Checking ban status for user", { ...fctx, userId });
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db.query.user.findFirst({
|
||||
where: eq(user.id, userId),
|
||||
columns: {
|
||||
banned: true,
|
||||
banExpires: true,
|
||||
},
|
||||
}),
|
||||
(error) => {
|
||||
logger.error("Failed to check ban status", {
|
||||
...fctx,
|
||||
error,
|
||||
});
|
||||
return userErrors.dbError(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).andThen((userData) => {
|
||||
if (!userData) {
|
||||
logger.error("User not found when checking ban status", {
|
||||
...fctx,
|
||||
});
|
||||
return errAsync(userErrors.userNotFound(fctx));
|
||||
}
|
||||
|
||||
// If not banned, return false
|
||||
if (!userData.banned) {
|
||||
logger.info("User is not banned", { ...fctx, userId });
|
||||
return okAsync(false);
|
||||
}
|
||||
|
||||
// If banned but no expiry date, consider permanently banned
|
||||
if (!userData.banExpires) {
|
||||
logger.info("User is permanently banned", { ...fctx, userId });
|
||||
return okAsync(true);
|
||||
}
|
||||
|
||||
const now = new Date();
|
||||
if (userData.banExpires <= now) {
|
||||
logger.info("User ban has expired, removing ban status", {
|
||||
...fctx,
|
||||
userId,
|
||||
});
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db
|
||||
.update(user)
|
||||
.set({
|
||||
banned: false,
|
||||
banReason: null,
|
||||
banExpires: null,
|
||||
})
|
||||
.where(eq(user.id, userId))
|
||||
.execute(),
|
||||
(error) => {
|
||||
logger.error("Failed to unban user after expiry", {
|
||||
...fctx,
|
||||
error,
|
||||
});
|
||||
return userErrors.unbanFailed(
|
||||
fctx,
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: String(error),
|
||||
);
|
||||
},
|
||||
)
|
||||
.map(() => {
|
||||
logger.info("User has been unbanned after expiry", {
|
||||
...fctx,
|
||||
userId,
|
||||
});
|
||||
return false;
|
||||
})
|
||||
.orElse((error) => {
|
||||
logger.error(
|
||||
"Failed to unban user after expiry, still returning banned status",
|
||||
{ ...fctx, userId, error },
|
||||
);
|
||||
// Still return banned status since we couldn't update
|
||||
return okAsync(true);
|
||||
});
|
||||
}
|
||||
|
||||
logger.info("User is banned", {
|
||||
...fctx,
|
||||
userId,
|
||||
banExpires: userData.banExpires.toISOString(),
|
||||
});
|
||||
return okAsync(true);
|
||||
});
|
||||
}
|
||||
|
||||
getBanInfo(fctx: FlowExecCtx, userId: string): ResultAsync<BanInfo, Err> {
|
||||
logger.info("Getting ban info for user", { ...fctx, userId });
|
||||
|
||||
return ResultAsync.fromPromise(
|
||||
this.db.query.user.findFirst({
|
||||
where: eq(user.id, userId),
|
||||
columns: { banned: true, banReason: true, banExpires: true },
|
||||
}),
|
||||
(error) => {
|
||||
logger.error("Failed to get ban info", { ...fctx, error });
|
||||
return userErrors.getBanInfoFailed(
|
||||
fctx,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
},
|
||||
).andThen((userData) => {
|
||||
if (!userData) {
|
||||
logger.error("User not found when getting ban info", {
|
||||
...fctx,
|
||||
});
|
||||
return errAsync(userErrors.userNotFound(fctx));
|
||||
}
|
||||
|
||||
logger.info("Ban info retrieved successfully for user", {
|
||||
...fctx,
|
||||
userId,
|
||||
});
|
||||
|
||||
return okAsync({
|
||||
banned: userData.banned || false,
|
||||
reason: userData.banReason || undefined,
|
||||
expires: userData.banExpires || undefined,
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
165
packages/logic/domains/user/router.ts
Normal file
165
packages/logic/domains/user/router.ts
Normal file
@@ -0,0 +1,165 @@
|
||||
import {
|
||||
banUserSchema,
|
||||
checkUsernameSchema,
|
||||
ensureAccountExistsSchema,
|
||||
rotatePasswordSchema,
|
||||
} from "./data";
|
||||
import { HonoContext } from "@core/hono.helpers";
|
||||
import { sValidator } from "@hono/standard-validator";
|
||||
import { getUserController } from "./controller";
|
||||
import { Hono } from "hono";
|
||||
|
||||
const uc = getUserController();
|
||||
|
||||
export const usersRouter = new Hono<HonoContext>()
|
||||
// Get current user info
|
||||
.get("/me", async (c) => {
|
||||
const fctx = c.env.locals.fCtx;
|
||||
const userId = c.env.locals.user?.id;
|
||||
|
||||
if (!userId) {
|
||||
return c.json(
|
||||
{
|
||||
error: {
|
||||
code: "UNAUTHORIZED",
|
||||
message: "User not authenticated",
|
||||
description: "Please log in",
|
||||
detail: "No user ID found in session",
|
||||
},
|
||||
},
|
||||
401,
|
||||
);
|
||||
}
|
||||
|
||||
const res = await uc.getUserInfo(fctx, userId);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
})
|
||||
|
||||
// Get user info by ID
|
||||
.get("/:userId", async (c) => {
|
||||
const fctx = c.env.locals.fCtx;
|
||||
const userId = c.req.param("userId");
|
||||
|
||||
const res = await uc.getUserInfo(fctx, userId);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
})
|
||||
|
||||
// Ensure account exists
|
||||
.put(
|
||||
"/ensure-account-exists",
|
||||
sValidator("json", ensureAccountExistsSchema),
|
||||
async (c) => {
|
||||
const fctx = c.env.locals.fCtx;
|
||||
const data = c.req.valid("json");
|
||||
|
||||
const res = await uc.ensureAccountExists(fctx, data.userId);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
},
|
||||
)
|
||||
|
||||
// Check username availability
|
||||
.post(
|
||||
"/check-username",
|
||||
sValidator("json", checkUsernameSchema),
|
||||
async (c) => {
|
||||
const fctx = c.env.locals.fCtx;
|
||||
const data = c.req.valid("json");
|
||||
|
||||
const res = await uc.isUsernameAvailable(fctx, data.username);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
},
|
||||
)
|
||||
|
||||
// Update last 2FA verification time
|
||||
.put("/update-2fa-verified/:userId", async (c) => {
|
||||
const fctx = c.env.locals.fCtx;
|
||||
const userId = c.req.param("userId");
|
||||
|
||||
const res = await uc.updateLastVerified2FaAtToNow(fctx, userId);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
})
|
||||
|
||||
// Ban user
|
||||
.post("/ban", sValidator("json", banUserSchema), async (c) => {
|
||||
const fctx = c.env.locals.fCtx;
|
||||
const data = c.req.valid("json");
|
||||
|
||||
const res = await uc.banUser(fctx, data.userId, data.reason, data.banExpiresAt);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
})
|
||||
|
||||
// Check if user is banned
|
||||
.get("/:userId/is-banned", async (c) => {
|
||||
const fctx = c.env.locals.fCtx;
|
||||
const userId = c.req.param("userId");
|
||||
|
||||
const res = await uc.isUserBanned(fctx, userId);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
})
|
||||
|
||||
// Get ban info
|
||||
.get("/:userId/ban-info", async (c) => {
|
||||
const fctx = c.env.locals.fCtx;
|
||||
const userId = c.req.param("userId");
|
||||
|
||||
const res = await uc.getBanInfo(fctx, userId);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
})
|
||||
|
||||
// Rotate password
|
||||
.put(
|
||||
"/rotate-password",
|
||||
sValidator("json", rotatePasswordSchema),
|
||||
async (c) => {
|
||||
const fctx = c.env.locals.fCtx;
|
||||
const data = c.req.valid("json");
|
||||
|
||||
const res = await uc.rotatePassword(fctx, data.userId, data.password);
|
||||
return c.json(
|
||||
res.isOk()
|
||||
? { data: res.value, error: null }
|
||||
: { data: null, error: res.error },
|
||||
res.isOk() ? 200 : 400,
|
||||
);
|
||||
},
|
||||
);
|
||||
40
packages/logic/package.json
Normal file
40
packages/logic/package.json
Normal file
@@ -0,0 +1,40 @@
|
||||
{
|
||||
"name": "@pkg/logic",
|
||||
"scripts": {
|
||||
"auth:schemagen": "bun x @better-auth/cli generate --config ./domains/auth/config.base.ts --output ../../packages/db/schema/better.auth.schema.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@hono/standard-validator": "^0.2.1",
|
||||
"@pkg/db": "workspace:*",
|
||||
"@pkg/logger": "workspace:*",
|
||||
"@pkg/redis": "workspace:*",
|
||||
"@pkg/result": "workspace:*",
|
||||
"@pkg/settings": "workspace:*",
|
||||
"@types/pdfkit": "^0.14.0",
|
||||
"argon2": "^0.43.0",
|
||||
"better-auth": "^1.4.7",
|
||||
"date-fns-tz": "^3.2.0",
|
||||
"dotenv": "^16.5.0",
|
||||
"hono": "^4.11.1",
|
||||
"imapflow": "^1.0.188",
|
||||
"mailparser": "^3.7.3",
|
||||
"nanoid": "^5.1.5",
|
||||
"neverthrow": "^8.2.0",
|
||||
"otplib": "^12.0.1",
|
||||
"pdfkit": "^0.17.1",
|
||||
"tmp": "^0.2.3",
|
||||
"uuid": "^11.1.0",
|
||||
"valibot": "^1.2.0",
|
||||
"xlsx": "^0.18.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bun": "latest",
|
||||
"@types/imapflow": "^1.0.22",
|
||||
"@types/mailparser": "^3.4.6",
|
||||
"@types/tmp": "^0.2.6",
|
||||
"@types/uuid": "^10.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5.0.0"
|
||||
}
|
||||
}
|
||||
16
packages/logic/tsconfig.json
Normal file
16
packages/logic/tsconfig.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"strict": true,
|
||||
"jsx": "react-jsx",
|
||||
"jsxImportSource": "hono/jsx",
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"@/*": ["./*"],
|
||||
"@domains/*": ["./domains/*"],
|
||||
"@core/*": ["./core/*"]
|
||||
},
|
||||
"moduleResolution": "bundler",
|
||||
"module": "esnext",
|
||||
"target": "esnext"
|
||||
}
|
||||
}
|
||||
18
packages/redis/index.ts
Normal file
18
packages/redis/index.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { Redis } from "ioredis";
|
||||
export * from "ioredis";
|
||||
|
||||
let redis: Redis | undefined;
|
||||
|
||||
let defaultRedisUrl = process.env.REDIS_URL ?? "";
|
||||
|
||||
export function getRedisInstance(url: string = defaultRedisUrl) {
|
||||
if (redis) {
|
||||
return redis;
|
||||
}
|
||||
redis = new Redis(url, {
|
||||
lazyConnect: true,
|
||||
connectTimeout: 5000,
|
||||
commandTimeout: 5000,
|
||||
});
|
||||
return redis;
|
||||
}
|
||||
15
packages/redis/package.json
Normal file
15
packages/redis/package.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "@pkg/redis",
|
||||
"module": "index.ts",
|
||||
"type": "module",
|
||||
"private": true,
|
||||
"devDependencies": {
|
||||
"@types/bun": "latest"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5"
|
||||
},
|
||||
"dependencies": {
|
||||
"ioredis": "^5.6.1"
|
||||
}
|
||||
}
|
||||
81
packages/result/index.ts
Normal file
81
packages/result/index.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
export const ERROR_CODES = {
|
||||
API_ERROR: "API_ERROR",
|
||||
EXTERNAL_API_ERROR: "EXTERNAL_API_ERROR",
|
||||
RATE_LIMIT_ERROR: "RATE_LIMIT_ERROR",
|
||||
DATABASE_ERROR: "DATABASE_ERROR",
|
||||
NETWORK_ERROR: "NETWORK_ERROR",
|
||||
BANNED: "BANNED",
|
||||
AUTH_ERROR: "AUTH_ERROR",
|
||||
PERMISSION_ERROR: "PERMISSION_ERROR",
|
||||
VALIDATION_ERROR: "VALIDATION_ERROR",
|
||||
UNKNOWN_ERROR: "UNKNOWN_ERROR",
|
||||
NOT_FOUND_ERROR: "NOT_FOUND_ERROR",
|
||||
NOT_FOUND: "NOT_FOUND",
|
||||
INPUT_ERROR: "INPUT_ERROR",
|
||||
INTERNAL_SERVER_ERROR: "INTERNAL_SERVER_ERROR",
|
||||
EXTERNAL_SERVICE_ERROR: "EXTERNAL_SERVICE_ERROR",
|
||||
FILE_SYSTEM_ERROR: "FILE_SYSTEM_ERROR",
|
||||
STORAGE_ERROR: "STORAGE_ERROR",
|
||||
NOT_ALLOWED: "NOT_ALLOWED",
|
||||
NOT_IMPLEMENTED: "NOT_IMPLEMENTED",
|
||||
PROCESSING_ERROR: "PROCESSING_ERROR",
|
||||
PARSING_ERROR: "PARSING_ERROR",
|
||||
} as const;
|
||||
|
||||
export const errorStatusMap = {
|
||||
[ERROR_CODES.VALIDATION_ERROR]: 400,
|
||||
[ERROR_CODES.AUTH_ERROR]: 403,
|
||||
[ERROR_CODES.BANNED]: 403,
|
||||
[ERROR_CODES.NOT_FOUND]: 404,
|
||||
[ERROR_CODES.NOT_ALLOWED]: 405,
|
||||
[ERROR_CODES.RATE_LIMIT_ERROR]: 429,
|
||||
[ERROR_CODES.DATABASE_ERROR]: 500,
|
||||
[ERROR_CODES.NETWORK_ERROR]: 500,
|
||||
[ERROR_CODES.EXTERNAL_API_ERROR]: 500,
|
||||
[ERROR_CODES.API_ERROR]: 500,
|
||||
[ERROR_CODES.INTERNAL_SERVER_ERROR]: 500,
|
||||
[ERROR_CODES.EXTERNAL_SERVICE_ERROR]: 500,
|
||||
[ERROR_CODES.FILE_SYSTEM_ERROR]: 500,
|
||||
[ERROR_CODES.STORAGE_ERROR]: 500,
|
||||
[ERROR_CODES.PROCESSING_ERROR]: 500,
|
||||
[ERROR_CODES.PARSING_ERROR]: 500,
|
||||
[ERROR_CODES.NOT_IMPLEMENTED]: 501,
|
||||
} as Record<string, number>;
|
||||
|
||||
export type Err = {
|
||||
flowId?: string;
|
||||
code: string;
|
||||
message: string;
|
||||
description: string;
|
||||
detail: string;
|
||||
actionable?: boolean;
|
||||
error?: any;
|
||||
};
|
||||
|
||||
type Success<T> = { data: T; error?: undefined | null };
|
||||
type Failure<E> = { data?: undefined | null; error: E };
|
||||
|
||||
// Legacy now, making use of Effect throughout the project
|
||||
export type Result<T, E = Err> = Success<T> | Failure<E>;
|
||||
|
||||
export async function tryCatch<T, E = Err>(
|
||||
promise: Promise<T>,
|
||||
err?: E,
|
||||
): Promise<Result<T, E>> {
|
||||
try {
|
||||
const data = await promise;
|
||||
return { data };
|
||||
} catch (e) {
|
||||
return {
|
||||
// @ts-ignore
|
||||
error: !!err
|
||||
? err
|
||||
: {
|
||||
code: "UNKNOWN_ERROR",
|
||||
message: "An unknown error occurred",
|
||||
description: "An unknown error occurred",
|
||||
detail: "An unknown error occurred",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
9
packages/result/package.json
Normal file
9
packages/result/package.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"name": "@pkg/result",
|
||||
"devDependencies": {
|
||||
"@types/bun": "latest"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5.0.0"
|
||||
}
|
||||
}
|
||||
178
packages/settings/index.ts
Normal file
178
packages/settings/index.ts
Normal file
@@ -0,0 +1,178 @@
|
||||
import * as v from "valibot";
|
||||
|
||||
import "dotenv/config";
|
||||
|
||||
/**
|
||||
* Settings schema using Valibot for validation
|
||||
*/
|
||||
export const settingsSchema = v.object({
|
||||
appName: v.string(),
|
||||
nodeEnv: v.string(),
|
||||
logLevel: v.string(),
|
||||
|
||||
isDevelopment: v.optional(v.boolean()),
|
||||
|
||||
redisUrl: v.string(),
|
||||
databaseUrl: v.string(),
|
||||
|
||||
internalApiKey: v.string(),
|
||||
|
||||
processorApiUrl: v.string(),
|
||||
|
||||
betterAuthUrl: v.string(),
|
||||
betterAuthSecret: v.string(),
|
||||
|
||||
twofaSessionExpiryMinutes: v.optional(v.number()),
|
||||
twofaRequiredHours: v.optional(v.number()),
|
||||
|
||||
defaultAdminEmail: v.string(),
|
||||
|
||||
googleClientId: v.string(),
|
||||
googleClientSecret: v.string(),
|
||||
|
||||
resendApiKey: v.string(),
|
||||
fromEmail: v.string(),
|
||||
|
||||
qstashUrl: v.string(),
|
||||
qstashToken: v.string(),
|
||||
qstashCurrentSigningKey: v.string(),
|
||||
qstashNextSigningKey: v.string(),
|
||||
|
||||
axiomDatasetName: v.string(),
|
||||
axiomApiToken: v.string(),
|
||||
|
||||
// R2/Object Storage settings
|
||||
r2BucketName: v.string(),
|
||||
r2Region: v.string(),
|
||||
r2Endpoint: v.string(),
|
||||
r2AccessKey: v.string(),
|
||||
r2SecretKey: v.string(),
|
||||
r2PublicUrl: v.optional(v.string()),
|
||||
|
||||
// File upload settings
|
||||
maxFileSize: v.number(),
|
||||
allowedMimeTypes: v.array(v.string()),
|
||||
allowedExtensions: v.array(v.string()),
|
||||
});
|
||||
|
||||
export type Settings = v.InferOutput<typeof settingsSchema>;
|
||||
|
||||
/**
|
||||
* Helper to get environment variable with default value
|
||||
*/
|
||||
function getEnv(key: string, defaultValue: string = ""): string {
|
||||
return process.env[key] ?? defaultValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to get environment variable as number with default value
|
||||
*/
|
||||
function getEnvNumber(key: string, defaultValue: number): number {
|
||||
const value = process.env[key];
|
||||
if (!value) return defaultValue;
|
||||
const parsed = Number(value);
|
||||
return Number.isNaN(parsed) ? defaultValue : parsed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse comma-separated string into array
|
||||
*/
|
||||
function parseCommaSeparated(value: string): string[] {
|
||||
return value
|
||||
.split(",")
|
||||
.map((item) => item.trim())
|
||||
.filter((item) => item.length > 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Load and validate settings from environment variables
|
||||
*/
|
||||
function loadSettings(): Settings {
|
||||
const nodeEnv = getEnv("NODE_ENV", "development");
|
||||
|
||||
const rawSettings = {
|
||||
appName: getEnv("APP_NAME", "App"),
|
||||
nodeEnv,
|
||||
logLevel: getEnv("LOG_LEVEL", "info"),
|
||||
|
||||
isDevelopment: nodeEnv === "development",
|
||||
|
||||
redisUrl: getEnv("REDIS_URL", "redis://localhost:6379"),
|
||||
databaseUrl: getEnv("DATABASE_URL"),
|
||||
|
||||
internalApiKey: getEnv("INTERNAL_API_KEY"),
|
||||
|
||||
processorApiUrl: getEnv("PROCESSOR_API_URL", "http://localhost:3000"),
|
||||
|
||||
betterAuthUrl: getEnv("BETTER_AUTH_URL"),
|
||||
betterAuthSecret: getEnv("BETTER_AUTH_SECRET"),
|
||||
|
||||
twofaSessionExpiryMinutes: getEnvNumber(
|
||||
"TWOFA_SESSION_EXPIRY_MINUTES",
|
||||
10,
|
||||
),
|
||||
twofaRequiredHours: getEnvNumber("TWOFA_REQUIRED_HOURS", 24),
|
||||
|
||||
defaultAdminEmail: getEnv("DEFAULT_ADMIN_EMAIL"),
|
||||
|
||||
googleClientId: getEnv("GOOGLE_CLIENT_ID"),
|
||||
googleClientSecret: getEnv("GOOGLE_CLIENT_SECRET"),
|
||||
|
||||
resendApiKey: getEnv("RESEND_API_KEY"),
|
||||
fromEmail: getEnv("FROM_EMAIL"),
|
||||
|
||||
qstashUrl: getEnv("QSTASH_URL"),
|
||||
qstashToken: getEnv("QSTASH_TOKEN"),
|
||||
qstashCurrentSigningKey: getEnv("QSTASH_CURRENT_SIGNING_KEY"),
|
||||
qstashNextSigningKey: getEnv("QSTASH_NEXT_SIGNING_KEY"),
|
||||
|
||||
axiomDatasetName: getEnv("AXIOM_DATASET_NAME"),
|
||||
axiomApiToken: getEnv("AXIOM_API_TOKEN"),
|
||||
|
||||
// R2/Object Storage settings
|
||||
r2BucketName: getEnv("R2_BUCKET_NAME"),
|
||||
r2Region: getEnv("R2_REGION", "auto"),
|
||||
r2Endpoint: getEnv("R2_ENDPOINT"),
|
||||
r2AccessKey: getEnv("R2_ACCESS_KEY"),
|
||||
r2SecretKey: getEnv("R2_SECRET_KEY"),
|
||||
r2PublicUrl: getEnv("R2_PUBLIC_URL") || undefined,
|
||||
|
||||
// File upload settings
|
||||
maxFileSize: getEnvNumber("MAX_FILE_SIZE", 10485760), // 10MB default
|
||||
allowedMimeTypes: parseCommaSeparated(
|
||||
getEnv(
|
||||
"ALLOWED_MIME_TYPES",
|
||||
"image/jpeg,image/png,image/webp,image/gif,application/pdf,text/plain",
|
||||
),
|
||||
),
|
||||
allowedExtensions: parseCommaSeparated(
|
||||
getEnv("ALLOWED_EXTENSIONS", "jpg,jpeg,png,webp,gif,pdf,txt"),
|
||||
),
|
||||
};
|
||||
|
||||
try {
|
||||
return v.parse(settingsSchema, rawSettings);
|
||||
} catch (error) {
|
||||
console.error("❌ Settings validation failed:");
|
||||
if (error instanceof v.ValiError) {
|
||||
for (const issue of error.issues) {
|
||||
console.error(
|
||||
` - ${issue.path?.map((p: any) => p.key).join(".")}: ${issue.message}`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
console.error(error);
|
||||
}
|
||||
throw new Error(
|
||||
"Failed to load settings. Check environment variables.",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export const settings = loadSettings();
|
||||
|
||||
export const getSetting = <K extends keyof Settings>(key: K): Settings[K] => {
|
||||
return settings[key];
|
||||
};
|
||||
|
||||
console.log(`✅ Settings loaded | ${settings.appName} (${settings.nodeEnv})`);
|
||||
15
packages/settings/package.json
Normal file
15
packages/settings/package.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "@pkg/settings",
|
||||
"module": "index.ts",
|
||||
"type": "module",
|
||||
"devDependencies": {
|
||||
"@types/bun": "latest"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"dotenv": "^17.2.3",
|
||||
"valibot": "^1.2.0"
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user