initial commit ??
This commit is contained in:
9
src/lib/server/array.chunk.ts
Executable file
9
src/lib/server/array.chunk.ts
Executable file
@@ -0,0 +1,9 @@
|
||||
export function chunkArray<T>(array: T[], size: number): T[][] {
|
||||
const chunked_arr = [];
|
||||
let index = 0;
|
||||
while (index < array.length) {
|
||||
chunked_arr.push(array.slice(index, size + index));
|
||||
index += size;
|
||||
}
|
||||
return chunked_arr;
|
||||
}
|
||||
10
src/lib/server/connectors/redis.ts
Executable file
10
src/lib/server/connectors/redis.ts
Executable file
@@ -0,0 +1,10 @@
|
||||
import Redis from "ioredis";
|
||||
|
||||
const redisUrl = process.env.REDIS_URL ?? "";
|
||||
|
||||
console.log(`\n\n[//] Redis URL: ${redisUrl}`);
|
||||
|
||||
const _redis =
|
||||
redisUrl && redisUrl.length > 0 ? new Redis(redisUrl) : undefined;
|
||||
|
||||
export const redis = _redis as Redis;
|
||||
36
src/lib/server/connectors/surreal.db.ts
Executable file
36
src/lib/server/connectors/surreal.db.ts
Executable file
@@ -0,0 +1,36 @@
|
||||
import Surreal from "surrealdb.js";
|
||||
export type { QueryResult } from "surrealdb.js/script/types";
|
||||
|
||||
try {
|
||||
if (document || window) {
|
||||
throw new Error("SurrealDB needs a NodeJS environment to run.");
|
||||
}
|
||||
} catch (err) {}
|
||||
|
||||
const CONFIG = {
|
||||
url: process.env.SURREAL_URL ?? "",
|
||||
user: process.env.SURREAL_USER ?? "",
|
||||
pass: process.env.SURREAL_PASS ?? "",
|
||||
ns: process.env.SURREAL_NS ?? "",
|
||||
db: process.env.SURREAL_DB ?? "",
|
||||
} as const;
|
||||
|
||||
// for (let key in CONFIG) {
|
||||
// if (
|
||||
// !CONFIG[key as keyof typeof CONFIG] ||
|
||||
// CONFIG[key as keyof typeof CONFIG] === ""
|
||||
// ) {
|
||||
// throw new Error(`Missing configuration for ${key}`);
|
||||
// }
|
||||
// }
|
||||
|
||||
let _surreal =
|
||||
CONFIG.url.length > 0
|
||||
? new Surreal(`http://${CONFIG.url}/rpc`, {
|
||||
auth: { user: CONFIG.user, pass: CONFIG.pass },
|
||||
ns: CONFIG.ns,
|
||||
db: CONFIG.db,
|
||||
})
|
||||
: undefined;
|
||||
|
||||
export const surreal = _surreal as Surreal;
|
||||
50
src/lib/server/cookie.functions.ts
Executable file
50
src/lib/server/cookie.functions.ts
Executable file
@@ -0,0 +1,50 @@
|
||||
import { redis } from "$lib/server/connectors/redis";
|
||||
|
||||
export type SessionData = {
|
||||
username: string;
|
||||
userType: string;
|
||||
};
|
||||
|
||||
export const parseCookieString = (cookieString: string) => {
|
||||
const cookies: Record<string, string> = cookieString
|
||||
.split(";")
|
||||
.reduce((acc, cookie) => {
|
||||
const [key, value] = cookie.split("=");
|
||||
if (!key || !value) {
|
||||
return acc;
|
||||
}
|
||||
return { ...acc, [key.trim()]: decodeURIComponent(value) };
|
||||
}, {});
|
||||
for (const key in cookies) {
|
||||
if (!key.length || !cookies[key] || cookies[key] === "undefined") {
|
||||
delete cookies[key];
|
||||
}
|
||||
}
|
||||
return cookies;
|
||||
};
|
||||
|
||||
export const parseSessionData = (sessionData: string) => {
|
||||
const splits = sessionData.split("|");
|
||||
if (splits.length < 5) return false;
|
||||
return { username: splits[2], userType: splits[3] } as SessionData;
|
||||
};
|
||||
|
||||
export const getParsedSession = async (sId: string) => {
|
||||
const session = await redis.get(sId);
|
||||
if (!session) {
|
||||
return false;
|
||||
}
|
||||
const parsed = parseSessionData(session);
|
||||
if (!parsed) {
|
||||
return false;
|
||||
}
|
||||
return parsed;
|
||||
};
|
||||
|
||||
export const isSessionValid = async (sId: string) => {
|
||||
if (!sId || sId.length === 0 || sId.length < 20) {
|
||||
return false;
|
||||
}
|
||||
const session = await redis.get(sId);
|
||||
return session && session.length > 1;
|
||||
};
|
||||
212
src/lib/server/db/apidata.db.ts
Executable file
212
src/lib/server/db/apidata.db.ts
Executable file
@@ -0,0 +1,212 @@
|
||||
import type { BookingEntry } from "$lib/utils/data.types";
|
||||
import { chunkArray } from "../array.chunk";
|
||||
import { surreal } from "../connectors/surreal.db";
|
||||
|
||||
const getTableName = (date: string) => {
|
||||
return `apidata${date.replaceAll("-", "")}`;
|
||||
};
|
||||
|
||||
const upsertData = async (
|
||||
data: BookingEntry[],
|
||||
date: string,
|
||||
tries: number = 0,
|
||||
): Promise<void> => {
|
||||
const tableName = getTableName(date);
|
||||
console.log(`[...] Upserting ${data.length} entries into ${tableName}`);
|
||||
const alreadyPresentIds = new Set();
|
||||
try {
|
||||
const [alreadyPresent] = await surreal.query<[string[]]>(
|
||||
`select value id from type::table($tableName) where bookDate = $bookDate`,
|
||||
{ tableName, bookDate: date },
|
||||
);
|
||||
for (let eId of alreadyPresent.result ?? []) {
|
||||
alreadyPresentIds.add(eId);
|
||||
}
|
||||
} catch (err) {
|
||||
console.log("Failed to fetch, seeing if can try again");
|
||||
if (tries >= 3) {
|
||||
console.log("Max tries exceeded for initial fetch for upserting data");
|
||||
return;
|
||||
}
|
||||
return await upsertData(data, date, tries++);
|
||||
}
|
||||
const oldEntries = [] as any[];
|
||||
const newEntries = [] as BookingEntry[];
|
||||
for (let entry of data) {
|
||||
if (!alreadyPresentIds.has(entry.id)) {
|
||||
newEntries.push({
|
||||
...entry,
|
||||
id: `${tableName}:${entry.id}`,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
bookDate: entry.bookDate.split(" ")[0],
|
||||
requestId: entry.requestId ?? "",
|
||||
});
|
||||
} else {
|
||||
oldEntries.push({
|
||||
distributorId: entry.distributorId,
|
||||
dealerId: entry.dealerId,
|
||||
drawId: entry.drawId,
|
||||
bookDate: entry.bookDate.split(" ")[0],
|
||||
number: entry.number,
|
||||
first: entry.first,
|
||||
second: entry.second,
|
||||
changedBalance: entry.changedBalance,
|
||||
sheetName: entry.sheetName,
|
||||
sheetId: entry.sheetId,
|
||||
requestId: entry.requestId,
|
||||
updatedAt: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
}
|
||||
console.log(
|
||||
`[+] Inserting ${newEntries.length} new entries into ${tableName}`,
|
||||
);
|
||||
|
||||
// 5 to 25% of the total data length
|
||||
let chunkSize = Math.floor(
|
||||
Math.random() * (data.length * 0.25 - data.length * 0.05) +
|
||||
data.length * 0.05,
|
||||
);
|
||||
if (chunkSize > 10_000) {
|
||||
chunkSize = 10_000;
|
||||
}
|
||||
|
||||
console.log(`Chunk Size : ${chunkSize}`);
|
||||
|
||||
console.log(`[+] Inserting new entries`);
|
||||
console.time("insertion time");
|
||||
const chunks = chunkArray(newEntries, chunkSize).map(async (chunk) => {
|
||||
await surreal.insert<BookingEntry>(tableName, chunk);
|
||||
});
|
||||
for (let i = 0; i < chunks.length; i += 2) {
|
||||
await Promise.all(chunks.slice(i, i + 2));
|
||||
}
|
||||
console.timeEnd("insertion time");
|
||||
|
||||
console.log(
|
||||
`[+] Updating ${oldEntries.length} old entries into ${tableName}`,
|
||||
);
|
||||
|
||||
const chunks2 = chunkArray(oldEntries, chunkSize).map(async (chunk) => {
|
||||
await Promise.all(
|
||||
chunk.map(async (entry) => {
|
||||
// @ts-ignore
|
||||
await surreal.update<BookingEntry>(`${tableName}:${entry.id}`, {
|
||||
distributorId: entry.distributorId,
|
||||
dealerId: entry.dealerId,
|
||||
drawId: entry.drawId,
|
||||
bookDate: entry.bookDate.split(" ")[0],
|
||||
number: entry.number,
|
||||
first: entry.first,
|
||||
second: entry.second,
|
||||
changedBalance: entry.changedBalance,
|
||||
sheetName: entry.sheetName,
|
||||
sheetId: entry.sheetId,
|
||||
requestId: entry.requestId,
|
||||
updatedAt: new Date().toISOString(),
|
||||
});
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
console.time("update time");
|
||||
for (let i = 0; i < chunks2.length; i += 10) {
|
||||
await Promise.all(chunks2.slice(i, i + 10));
|
||||
}
|
||||
console.timeEnd("update time");
|
||||
|
||||
console.log(
|
||||
`[+] Successfully upserted ${data.length} entries into ${tableName}`,
|
||||
);
|
||||
};
|
||||
|
||||
const getBookingEntriesForDealer = async (
|
||||
date: string,
|
||||
drawId: string,
|
||||
userId: string,
|
||||
sorted?: boolean,
|
||||
) => {
|
||||
const tableName = getTableName(date);
|
||||
let query = `select * from type::table($tableName) where bookDate = $date and dealerId = $userId and drawId = $drawId`;
|
||||
if (sorted) {
|
||||
query += " order by requestId desc";
|
||||
}
|
||||
const [data] = await surreal.query<[BookingEntry[]]>(query, {
|
||||
tableName,
|
||||
date: `${date}`,
|
||||
userId: parseInt(userId),
|
||||
drawId: parseInt(drawId),
|
||||
});
|
||||
console.log(
|
||||
`Found ${JSON.stringify(
|
||||
data,
|
||||
)} entries for ${userId}, filters are ${date}, ${drawId} for ${tableName}`,
|
||||
);
|
||||
if (data.status === "OK") {
|
||||
return data.result ?? [];
|
||||
}
|
||||
return [];
|
||||
};
|
||||
|
||||
const getBookingEntriesByDraw = async (date: string, drawId: string) => {
|
||||
const tableName = getTableName(date);
|
||||
const [data] = await surreal.query<[BookingEntry[]]>(
|
||||
`select * from type::table($tableName) where bookDate = $date and drawId = $drawId`,
|
||||
{
|
||||
tableName,
|
||||
date: date,
|
||||
drawId: parseInt(drawId.includes(":") ? drawId.split(":")[1] : drawId),
|
||||
},
|
||||
);
|
||||
if (data.status === "OK") {
|
||||
return data.result ?? [];
|
||||
}
|
||||
return [];
|
||||
};
|
||||
|
||||
const deleteDataOlderThan2Weeks = async () => {
|
||||
const [out] = await surreal.query("info for db");
|
||||
// @ts-ignore
|
||||
const tableNames = Object.keys(out.result.tables);
|
||||
|
||||
const twoWeeksAgo = new Date();
|
||||
twoWeeksAgo.setDate(twoWeeksAgo.getDate() - 14);
|
||||
|
||||
for (const tableName of tableNames) {
|
||||
if (tableName.startsWith("apidata")) {
|
||||
const datePart = tableName.slice(7);
|
||||
const d = new Date(
|
||||
parseInt(datePart.slice(0, 4), 10),
|
||||
parseInt(datePart.slice(4, 6), 10) - 1, // Month is 0-based in JavaScript Date
|
||||
parseInt(datePart.slice(6, 8), 10),
|
||||
);
|
||||
if (d < twoWeeksAgo) {
|
||||
console.log(`[...] Deleting ${tableName}`);
|
||||
await surreal.query("remove table if exists " + tableName);
|
||||
console.log(`[+] Deleted ${tableName}`);
|
||||
}
|
||||
} else if (tableName.startsWith("apipostdata_")) {
|
||||
const datePart = tableName.slice(12);
|
||||
const d = new Date(
|
||||
parseInt(datePart.slice(0, 4), 10),
|
||||
parseInt(datePart.slice(4, 6), 10) - 1, // Month is 0-based in JavaScript Date
|
||||
parseInt(datePart.slice(6, 8), 10),
|
||||
);
|
||||
if (d < twoWeeksAgo) {
|
||||
console.log(`[...] Deleting ${tableName}`);
|
||||
await surreal.query("remove table if exists " + tableName);
|
||||
console.log(`[+] Deleted ${tableName}`);
|
||||
}
|
||||
} else {
|
||||
console.log(`Skipping ${tableName}`);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const dbApiData = {
|
||||
upsertData,
|
||||
getBookingEntriesForDealer,
|
||||
getBookingEntriesByDraw,
|
||||
deleteDataOlderThan2Weeks,
|
||||
};
|
||||
100
src/lib/server/db/apidraw.db.ts
Executable file
100
src/lib/server/db/apidraw.db.ts
Executable file
@@ -0,0 +1,100 @@
|
||||
import { constants } from "$lib/utils/constants";
|
||||
import type { Draw } from "$lib/utils/data.types";
|
||||
import { getDraws } from "../external/api.scraping.helpers";
|
||||
import { surreal } from "../connectors/surreal.db";
|
||||
import { getSessionFromStore } from "../utils/session.service";
|
||||
|
||||
const tableName = "apidraw";
|
||||
|
||||
const _populateDrawsTable = async () => {
|
||||
const session = await getSessionFromStore(constants.SCRAP_API_SESSION_KEY);
|
||||
if (!session) {
|
||||
return;
|
||||
}
|
||||
const draws = await getDraws(session?.sessionToken);
|
||||
if (draws.data.length === 0 || !draws.ok) {
|
||||
return;
|
||||
}
|
||||
await surreal.insert<Draw>(
|
||||
tableName,
|
||||
draws.data.map((e) => {
|
||||
return {
|
||||
id: e.id,
|
||||
drawType: e.drawType,
|
||||
adminId: e.adminId,
|
||||
title: e.title,
|
||||
closeTime: e.closeTime,
|
||||
filterDuplicatesWhilePosting: false,
|
||||
abRateF: 0,
|
||||
abcRateF: 0,
|
||||
abRateS: 0,
|
||||
abcRateS: 0,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
};
|
||||
}),
|
||||
);
|
||||
};
|
||||
|
||||
const getAllDraws = async (
|
||||
skipOptional?: boolean,
|
||||
retry: number = 0,
|
||||
): Promise<Draw[]> => {
|
||||
let query = `select * from apidraw order by closeTime`;
|
||||
const [out] = await surreal.query<[Draw[]]>(query);
|
||||
if (out.status === "OK") {
|
||||
const draws = out.result ?? [];
|
||||
if (draws.length > 0) {
|
||||
return draws;
|
||||
}
|
||||
await _populateDrawsTable();
|
||||
if (retry < 3) {
|
||||
return getAllDraws(skipOptional, retry + 1);
|
||||
}
|
||||
}
|
||||
return [];
|
||||
};
|
||||
|
||||
async function setFilterDuplicatesFlag(drawId: string, flag: boolean) {
|
||||
const [d] = await surreal.select<Draw>(drawId);
|
||||
if (!d || !d.id) {
|
||||
return;
|
||||
}
|
||||
console.log("setFilterDuplicatesFlag :: ", drawId, flag);
|
||||
await surreal.update(drawId, {
|
||||
...d,
|
||||
filterDuplicatesWhilePosting: flag,
|
||||
updatedAt: new Date().toISOString(),
|
||||
} as Draw);
|
||||
}
|
||||
|
||||
async function updateDrawPresetInfo(draw: Draw) {
|
||||
const drawId = draw.id;
|
||||
const [d] = await surreal.select<Draw>(drawId);
|
||||
if (!d || !d.id) {
|
||||
return;
|
||||
}
|
||||
await surreal.update(drawId, {
|
||||
...d,
|
||||
filterDuplicatesWhilePosting: draw.filterDuplicatesWhilePosting,
|
||||
abRateF: draw.abRateF,
|
||||
abcRateF: draw.abcRateF,
|
||||
abRateS: draw.abRateS,
|
||||
abcRateS: draw.abcRateS,
|
||||
updatedAt: new Date().toISOString(),
|
||||
} as Draw);
|
||||
}
|
||||
|
||||
const getDraw = async (drawId: string): Promise<Draw | undefined> => {
|
||||
const draws = await surreal.select<Draw>(
|
||||
drawId.includes("apidraw") ? drawId : `apidraw:${drawId}`,
|
||||
);
|
||||
return draws[0];
|
||||
};
|
||||
|
||||
export const dbDraw = {
|
||||
getAllDraws,
|
||||
getDraw,
|
||||
setFilterDuplicatesFlag,
|
||||
updateDrawPresetInfo,
|
||||
};
|
||||
96
src/lib/server/db/apipostdata.db.ts
Executable file
96
src/lib/server/db/apipostdata.db.ts
Executable file
@@ -0,0 +1,96 @@
|
||||
import type { PostDataEntry, PostDataHistory } from "$lib/utils/data.types";
|
||||
import { surreal } from "../connectors/surreal.db";
|
||||
|
||||
const getTableName = (date: string) => {
|
||||
return `apipostdata_${date.replaceAll("-", "")}`;
|
||||
};
|
||||
|
||||
const upsertData = async (data: PostDataHistory) => {
|
||||
const tableName = getTableName(data.bookDate);
|
||||
const [check] = await surreal.query<[PostDataHistory[]]>(
|
||||
`select * from type::table($tableName) where bookDate = $bookDate and drawId = $drawId`,
|
||||
{ tableName, bookDate: data.bookDate, drawId: data.drawId },
|
||||
);
|
||||
console.log(check);
|
||||
const firstOut = check.result ? check.result[0] : undefined;
|
||||
if (check.status === "OK" && !!firstOut && !!firstOut.id) {
|
||||
console.log(
|
||||
`Adding ${data.data.length} entries to ${firstOut.data.length} existing array`,
|
||||
);
|
||||
firstOut.data.push(...data.data);
|
||||
console.log(`Now have ${firstOut.data.length} entries in data list`);
|
||||
console.log(`[...] Updating data row in db now`);
|
||||
await surreal.update<PostDataHistory>(firstOut.id, {
|
||||
id: firstOut.id,
|
||||
data: firstOut.data,
|
||||
drawId: firstOut.drawId,
|
||||
bookDate: firstOut.bookDate,
|
||||
updatedAt: new Date().toISOString(),
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
await surreal.insert<PostDataHistory>(tableName, data);
|
||||
console.log(
|
||||
`[+] Inserted post data in ${tableName} for ${data.bookDate} - ${data.drawId}`,
|
||||
);
|
||||
};
|
||||
|
||||
const getPostDataByDraw = async (date: string, drawId: string) => {
|
||||
const tableName = getTableName(date);
|
||||
const [data] = await surreal.query<[PostDataHistory[]]>(
|
||||
`select * from type::table($tableName) where bookDate = $date and drawId = $drawId`,
|
||||
{
|
||||
tableName,
|
||||
date: date,
|
||||
drawId: parseInt(drawId.includes(":") ? drawId.split(":")[1] : drawId),
|
||||
},
|
||||
);
|
||||
let out = [] as PostDataEntry[];
|
||||
if (data.status === "OK" && data.result.length > 0) {
|
||||
out = data.result[0].data;
|
||||
}
|
||||
return out;
|
||||
};
|
||||
|
||||
async function doesPostHistoryDataExist(date: string, drawId: string) {
|
||||
const tableName = getTableName(date);
|
||||
const [data] = await surreal.query<[PostDataHistory[]]>(
|
||||
`select id from type::table($tableName) where bookDate = $date and drawId = $drawId`,
|
||||
{
|
||||
tableName,
|
||||
date: date,
|
||||
drawId: parseInt(drawId.includes(":") ? drawId.split(":")[1] : drawId),
|
||||
},
|
||||
);
|
||||
|
||||
if (data.status === "OK") {
|
||||
return data.result[0]?.id.length > 0;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
async function deletePostDataByDraw(date: string, drawId: string) {
|
||||
const tableName = getTableName(date);
|
||||
const [data] = await surreal.query<[PostDataHistory[]]>(
|
||||
`select id from type::table($tableName) where bookDate = $date and drawId = $drawId`,
|
||||
{
|
||||
tableName,
|
||||
date: date,
|
||||
drawId: parseInt(drawId.includes(":") ? drawId.split(":")[1] : drawId),
|
||||
},
|
||||
);
|
||||
|
||||
if (data.status === "OK") {
|
||||
await surreal.delete(tableName);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export const dbApiPostData = {
|
||||
upsertData,
|
||||
getPostDataByDraw,
|
||||
deletePostDataByDraw,
|
||||
doesPostHistoryDataExist,
|
||||
};
|
||||
279
src/lib/server/db/apiuser.db.ts
Executable file
279
src/lib/server/db/apiuser.db.ts
Executable file
@@ -0,0 +1,279 @@
|
||||
import {
|
||||
type ApiUser,
|
||||
type LooseApiUser,
|
||||
type ApiPostUser,
|
||||
type ApiPostUserWithParent,
|
||||
ApiUserTypes,
|
||||
DEFAULT_RANDOM_DISTRIBUTOR,
|
||||
} from "$lib/utils/data.types";
|
||||
import { surreal } from "../connectors/surreal.db";
|
||||
|
||||
const getUserById = async (userId: string) => {
|
||||
const query = `select * from apiuser where id = $id`;
|
||||
const [rizzult] = await surreal.query<[ApiUser[]]>(query, { id: userId });
|
||||
return rizzult.result?.[0];
|
||||
};
|
||||
|
||||
const getAllIdsByUserType = async (userType: number) => {
|
||||
const query = `select value id from apiuser where userType = $userType`;
|
||||
const rizzult = (await surreal.query<[string[]]>(query, { userType }))[0];
|
||||
return (rizzult.result ?? []).map((e) => {
|
||||
return e.split(":")[1];
|
||||
});
|
||||
};
|
||||
|
||||
async function allUsersOfTypeLimitedInfo(userType: number) {
|
||||
const rizzult = (
|
||||
await surreal.query<[ApiPostUser[]]>(
|
||||
`select id,userName,userId,postData from apiuser where userType = $userType`,
|
||||
{ userType: userType },
|
||||
)
|
||||
)[0];
|
||||
if (rizzult.status == "OK") {
|
||||
return rizzult.result ?? [];
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
async function setPostDataFlagForUsers(users: ApiPostUser[]) {
|
||||
for (const user of users) {
|
||||
const [u] = await surreal.select<ApiUser>(user.id);
|
||||
if (!u || !u.id) {
|
||||
continue;
|
||||
}
|
||||
await surreal.update<LooseApiUser>(user.id, {
|
||||
...u,
|
||||
postData: user.postData ?? false,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const getUserTypeCount = async (userType: number) => {
|
||||
const queryBase = `select count() from apiuser where userType = $userType`;
|
||||
let query = `${queryBase} and disable = 0 group all`;
|
||||
let disabledQuery = `${queryBase} and disable = 1 group all`;
|
||||
const enabledRizzult = (
|
||||
await surreal.query<[{ count: number }[]]>(query, { userType: userType })
|
||||
)[0];
|
||||
const count = { enabled: 0, disabled: 0 };
|
||||
if (enabledRizzult.status == "OK") {
|
||||
count.enabled = enabledRizzult.result[0]?.count ?? 0;
|
||||
}
|
||||
const disabledRizzult = (
|
||||
await surreal.query<[{ count: number }[]]>(disabledQuery, {
|
||||
userType: userType,
|
||||
})
|
||||
)[0];
|
||||
if (disabledRizzult.status == "OK") {
|
||||
count.disabled = disabledRizzult.result[0]?.count ?? 0;
|
||||
}
|
||||
return count;
|
||||
};
|
||||
|
||||
const allUsersOfType = async (userType: number) => {
|
||||
const rizzult = (
|
||||
await surreal.query<[ApiUser[]]>(
|
||||
`select * from apiuser where userType = $userType`,
|
||||
{ userType: userType },
|
||||
)
|
||||
)[0];
|
||||
if (rizzult.status == "OK") {
|
||||
return rizzult.result ?? [];
|
||||
}
|
||||
return [];
|
||||
};
|
||||
|
||||
async function updatePostUsersBalances(
|
||||
payload: { balance: number; id: string }[],
|
||||
) {
|
||||
console.log("Updating users balances");
|
||||
console.log(payload);
|
||||
for (const each of payload) {
|
||||
const [rizzult] = await surreal.query<[ApiUser[]]>(
|
||||
`update $userId set balance = $balance`,
|
||||
{ userId: each.id, balance: each.balance },
|
||||
);
|
||||
if (rizzult.status !== "OK") {
|
||||
console.error("updatePostUsersBalance :: ", rizzult);
|
||||
}
|
||||
}
|
||||
console.log("Users balances updated");
|
||||
}
|
||||
|
||||
async function getAllPostUsers() {
|
||||
const [rizzult] = await surreal.query<[ApiPostUser[]]>(
|
||||
`select id,userName,userId,postData from apiuser where postData = true`,
|
||||
);
|
||||
if (rizzult.status === "OK") {
|
||||
return rizzult.result ?? [];
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
async function getAllPostUsersWithParentUsers() {
|
||||
const [rizzult] = await surreal.query<[ApiPostUserWithParent[]]>(
|
||||
`select id,userName,userId,postData,parentDistributor,parentAdmin from apiuser where postData = true`,
|
||||
);
|
||||
if (rizzult.status === "OK") {
|
||||
return rizzult.result ?? [];
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
const getAllDistributorsWithTheirChildren = async () => {
|
||||
const distributorIds = await getAllIdsByUserType(ApiUserTypes.DISTRIBUTOR);
|
||||
const out = distributorIds.map(async (id) => {
|
||||
const [rizzult] = await surreal.query<[ApiUser[]]>(
|
||||
`select *, (select * from apiuser where parentDistributor = $id) as children from apiuser where id = $prefixedId`,
|
||||
{ id, prefixedId: `apiuser:${id}` },
|
||||
);
|
||||
if (rizzult.status == "OK") {
|
||||
return rizzult.result[0];
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
const responses = await Promise.all(out);
|
||||
return responses;
|
||||
};
|
||||
|
||||
const getRandomDistributor = async (): Promise<ApiUser> => {
|
||||
const ignoreList = ["001OP9"];
|
||||
const randomUser = await _getRandomUser(ApiUserTypes.DISTRIBUTOR, ignoreList);
|
||||
if (!randomUser) {
|
||||
return DEFAULT_RANDOM_DISTRIBUTOR;
|
||||
}
|
||||
return randomUser as any as ApiUser;
|
||||
};
|
||||
|
||||
const getRandomDealer = async (): Promise<ApiUser | undefined> => {
|
||||
const ignoreList = ["rizgnore"];
|
||||
return _getRandomUser(ApiUserTypes.DEALER, ignoreList);
|
||||
};
|
||||
|
||||
const _getRandomUser = async (
|
||||
userType: number,
|
||||
ignoreList: string[],
|
||||
): Promise<ApiUser | undefined> => {
|
||||
console.log("_getRandomUser :: ", userType);
|
||||
const rizzult = (
|
||||
await surreal.query<[ApiUser[]]>(
|
||||
`select * from apiuser where disable = 0 and userType = $userType and userId notinside $ignoreList order by rand() limit 1`,
|
||||
{ userType: userType, ignoreList: ignoreList },
|
||||
)
|
||||
)[0];
|
||||
if (rizzult.status == "OK") {
|
||||
return rizzult.result[0];
|
||||
}
|
||||
};
|
||||
|
||||
const doesExist = async (userId?: string) => {
|
||||
console.log("doesExist :: ", userId);
|
||||
if (userId) {
|
||||
const [rizzult] = await surreal.query<{ count: number }[]>(
|
||||
"select count() from apiuser where userId = $userId group all",
|
||||
{ userId: userId },
|
||||
);
|
||||
if (rizzult.status == "OK") {
|
||||
return rizzult.result?.count > 0;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
const insertMany = async (data: LooseApiUser[], postUsers: ApiPostUser[]) => {
|
||||
console.log("insertMany :: ", data.length);
|
||||
await surreal.insert<LooseApiUser>(
|
||||
"apiuser",
|
||||
data.map((e) => {
|
||||
return {
|
||||
...e,
|
||||
postData: !!postUsers.find((u) => u.userId === e.userId),
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
};
|
||||
}),
|
||||
);
|
||||
};
|
||||
|
||||
async function upsertMany(
|
||||
data: LooseApiUser[],
|
||||
wipeTable: boolean,
|
||||
deleteUserType: typeof ApiUserTypes.DISTRIBUTOR | typeof ApiUserTypes.DEALER,
|
||||
) {
|
||||
const postUsers = await getAllPostUsers();
|
||||
console.log(postUsers);
|
||||
if (wipeTable) {
|
||||
console.log("[wipeTable] :: deleting all previous users");
|
||||
await surreal.query("delete from apiuser where userType = $userType", {
|
||||
userType: deleteUserType,
|
||||
});
|
||||
}
|
||||
console.log("upsertMany :: ", data.length);
|
||||
const toCreate = [] as LooseApiUser[];
|
||||
const out = data.map(async (apiUser) => {
|
||||
// INFO: if you do want to keep disabled users, remove this check
|
||||
if (apiUser.disable === 1) {
|
||||
return;
|
||||
}
|
||||
const [u] = await surreal.select<ApiUser>(`apiuser:${apiUser.id}`);
|
||||
if (!u || !u.id) {
|
||||
toCreate.push(apiUser);
|
||||
return;
|
||||
}
|
||||
let postData =
|
||||
u.postData ?? !!postUsers.find((pu) => pu.userId === u.userId) ?? false;
|
||||
const qId = u.id;
|
||||
await surreal.update<LooseApiUser>(qId, {
|
||||
id: u.id,
|
||||
userId: apiUser.userId,
|
||||
userType: apiUser.userType,
|
||||
disableBooking: apiUser.disableBooking,
|
||||
sendVoucher: apiUser.sendVoucher,
|
||||
voucherGenerated: apiUser.voucherGenerated,
|
||||
parentAdmin: apiUser.parentAdmin,
|
||||
parentDistributor: apiUser.parentDistributor,
|
||||
userName: apiUser.userName,
|
||||
userCity: apiUser.userCity,
|
||||
password: apiUser.password,
|
||||
accessDenied: apiUser.accessDenied,
|
||||
phoneNumber: apiUser.phoneNumber,
|
||||
emailAddress: apiUser.emailAddress,
|
||||
disable: apiUser.disable,
|
||||
commission: apiUser.commission,
|
||||
commissionPangora: apiUser.commissionPangora,
|
||||
allowTitles: apiUser.allowTitles,
|
||||
specialDealer: apiUser.specialDealer,
|
||||
allowBalance: apiUser.allowBalance,
|
||||
balance: apiUser.balance,
|
||||
profitlossShare: apiUser.profitlossShare,
|
||||
shareProfitonly: apiUser.shareProfitonly,
|
||||
allowRemoveold: apiUser.allowRemoveold,
|
||||
removeDays: apiUser.removeDays,
|
||||
language: apiUser.language,
|
||||
postData,
|
||||
createdAt: u.createdAt,
|
||||
updatedAt: new Date().toISOString(),
|
||||
});
|
||||
});
|
||||
await Promise.allSettled(out);
|
||||
if (toCreate.length > 0) {
|
||||
await insertMany(toCreate, postUsers);
|
||||
}
|
||||
}
|
||||
|
||||
export const dbApiUser = {
|
||||
allUsersOfType,
|
||||
allUsersOfTypeLimitedInfo,
|
||||
getUserById,
|
||||
getAllDistributorsWithTheirChildren,
|
||||
getUserTypeCount,
|
||||
getAllIdsByUserType,
|
||||
getAllPostUsers,
|
||||
getAllPostUsersWithParentUsers,
|
||||
getRandomDistributor,
|
||||
getRandomDealer,
|
||||
doesExist,
|
||||
upsertMany,
|
||||
setPostDataFlagForUsers,
|
||||
updatePostUsersBalances,
|
||||
};
|
||||
4
src/lib/server/db/booking.db.ts
Executable file
4
src/lib/server/db/booking.db.ts
Executable file
@@ -0,0 +1,4 @@
|
||||
|
||||
|
||||
export const dbBooking = {
|
||||
};
|
||||
40
src/lib/server/db/finalsheet.db.ts
Executable file
40
src/lib/server/db/finalsheet.db.ts
Executable file
@@ -0,0 +1,40 @@
|
||||
import type { FinalSheetData } from "$lib/utils/data.types";
|
||||
import { surreal } from "../connectors/surreal.db";
|
||||
|
||||
const getTableName = (date: string) => {
|
||||
return `finalsheet${date.replaceAll("-", "")}`;
|
||||
};
|
||||
|
||||
const upsertData = async (data: FinalSheetData, date: string) => {
|
||||
const tableName = getTableName(date);
|
||||
const [present] = await surreal.query<[FinalSheetData[]]>(
|
||||
`select id from type::table($tableName) where date = $date and drawId = $drawId`,
|
||||
{ tableName, date: `${date}`, drawId: data.drawId }
|
||||
);
|
||||
if (present) {
|
||||
// @ts-ignore
|
||||
await surreal.update<FinalSheetData>(`${tableName}:${data.id}`, {
|
||||
date: data.date,
|
||||
drawId: data.drawId,
|
||||
data: data.data,
|
||||
totals: data.totals,
|
||||
// @ts-ignore
|
||||
createdAt: present?.result[0]?.createdAt ?? new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
});
|
||||
} else {
|
||||
// @ts-ignore
|
||||
await surreal.create<FinalSheetData>(`${tableName}:${data.id}`, {
|
||||
date: data.date,
|
||||
drawId: data.drawId,
|
||||
data: data.data,
|
||||
totals: data.totals,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const dbFinalSheet = {
|
||||
upsertData,
|
||||
};
|
||||
65
src/lib/server/db/presetdata.db.ts
Executable file
65
src/lib/server/db/presetdata.db.ts
Executable file
@@ -0,0 +1,65 @@
|
||||
import type {
|
||||
ApiPostUser,
|
||||
PostDataEntry,
|
||||
PresetDataEntry,
|
||||
} from "$lib/utils/data.types";
|
||||
import { surreal } from "../connectors/surreal.db";
|
||||
|
||||
const getTableName = (date: string) => {
|
||||
return `presetdata_${date.replaceAll("-", "")}`;
|
||||
};
|
||||
|
||||
const insertData = async (data: PresetDataEntry[]) => {
|
||||
if (data.length < 1) return;
|
||||
const tableName = getTableName(data[0].bookDate);
|
||||
const out = await surreal.insert<PresetDataEntry>(tableName, data);
|
||||
console.log(
|
||||
`[+] Inserted post data in ${tableName} for ${data[0].bookDate} - ${data[0].drawId}`,
|
||||
);
|
||||
return out;
|
||||
};
|
||||
|
||||
const getDataByDraw = async (date: string, drawId: number) => {
|
||||
const tableName = getTableName(date);
|
||||
const [data] = await surreal.query<[PresetDataEntry[]]>(
|
||||
`select * from type::table($tableName) where bookDate = $date and drawId = $drawId`,
|
||||
{ tableName, date, drawId },
|
||||
);
|
||||
return data.result || ([] as PresetDataEntry[]);
|
||||
};
|
||||
|
||||
const getDataGroupedBySheetByDraw = async (date: string, drawId: number) => {
|
||||
const tableName = getTableName(date);
|
||||
const [data] = await surreal.query<[PresetDataEntry[]]>(
|
||||
`select * from type::table($tableName) where bookDate = $date and drawId = $drawId`,
|
||||
{ tableName, date, drawId },
|
||||
);
|
||||
const out = {
|
||||
abData: [] as PresetDataEntry[],
|
||||
abcData: [] as PresetDataEntry[],
|
||||
all: data.result || ([] as PresetDataEntry[]),
|
||||
};
|
||||
for (const row of data.result ?? []) {
|
||||
if (row.number.length === 2) {
|
||||
out.abData.push(row);
|
||||
} else if (row.number.length === 3) {
|
||||
out.abcData.push(row);
|
||||
}
|
||||
}
|
||||
return out;
|
||||
};
|
||||
|
||||
async function deleteDataByIds(date: string, ids: string[]) {
|
||||
const tableName = getTableName(date);
|
||||
await surreal.query<[PresetDataEntry[]]>(
|
||||
`delete from type::table($tableName) where id in $ids`,
|
||||
{ tableName, ids },
|
||||
);
|
||||
}
|
||||
|
||||
export const dbPresetData = {
|
||||
insertData,
|
||||
getDataGroupedBySheetByDraw,
|
||||
getDataByDraw,
|
||||
deleteDataByIds,
|
||||
};
|
||||
90
src/lib/server/db/user.db.ts
Executable file
90
src/lib/server/db/user.db.ts
Executable file
@@ -0,0 +1,90 @@
|
||||
import type { User } from "$lib/utils/data.types";
|
||||
import { surreal, type QueryResult } from "../connectors/surreal.db";
|
||||
|
||||
export const dbUser = {
|
||||
doesExist: async (username?: string) => {
|
||||
if (username) {
|
||||
const [rizzult] = await surreal.query<{ count: number }[]>(
|
||||
"select count() from user where username = $username group all",
|
||||
{ username: username }
|
||||
);
|
||||
if (rizzult.status == "OK") {
|
||||
return rizzult.result?.count > 0;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
},
|
||||
create: async (data: {
|
||||
username: string;
|
||||
password: string;
|
||||
userType: string;
|
||||
association: string;
|
||||
}) => {
|
||||
const doesUserAlreadyExist = await dbUser.doesExist(data.username);
|
||||
console.log("doesUserAlreadyExist :: ", doesUserAlreadyExist);
|
||||
if (doesUserAlreadyExist) {
|
||||
return [{ message: "User already exists." }];
|
||||
}
|
||||
const { username, password, association, userType } = data;
|
||||
const out = await surreal.create<any>(`user:ulid()`, {
|
||||
createdAt: Date.now().toString(),
|
||||
updatedAt: Date.now().toString(),
|
||||
username,
|
||||
password,
|
||||
userType,
|
||||
association,
|
||||
});
|
||||
return out as User[];
|
||||
},
|
||||
all: async () => {
|
||||
return await surreal.select<User>("user");
|
||||
},
|
||||
get: async (d: {
|
||||
username?: string;
|
||||
id?: string;
|
||||
}): Promise<User | undefined> => {
|
||||
if (d.id) {
|
||||
return (await surreal.select<User>(`user:${d.id}`))[0];
|
||||
}
|
||||
if (d.username) {
|
||||
const rizzult = (
|
||||
await surreal.query<[User[]]>(
|
||||
`select * from user where username = $username`,
|
||||
{ username: d.username }
|
||||
)
|
||||
)[0];
|
||||
if (rizzult.status == "OK") {
|
||||
return rizzult.result[0];
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
},
|
||||
getChildren: async (username?: string) => {
|
||||
const rizzult = await surreal.query<User[]>(
|
||||
`select * from user where association = $username`,
|
||||
{ username: username }
|
||||
);
|
||||
return getParsedUsers(rizzult);
|
||||
},
|
||||
update: async (id: string, data: { association: string }) => {
|
||||
const [rizzult] = await surreal.update<User>(`user:${id}`, {
|
||||
updatedAt: Date.now().toString(),
|
||||
association: data.association,
|
||||
} as User);
|
||||
return rizzult;
|
||||
},
|
||||
delete: async (id: string) => {
|
||||
const out = await surreal.delete(`user:${id}`);
|
||||
return out[0].id;
|
||||
},
|
||||
};
|
||||
|
||||
const getParsedUsers = (data: QueryResult<User>[]) => {
|
||||
const users = [] as User[];
|
||||
for (const each of data) {
|
||||
if (each.status == "OK") {
|
||||
users.push(each.result);
|
||||
}
|
||||
}
|
||||
return users;
|
||||
};
|
||||
291
src/lib/server/deprecated.fs.hlprz.ts
Executable file
291
src/lib/server/deprecated.fs.hlprz.ts
Executable file
@@ -0,0 +1,291 @@
|
||||
import {
|
||||
COMMISSION_PERCENTAGE,
|
||||
LEXICODE_MATHCER_PATTERNS,
|
||||
LEXICODE_PRIZE_PERCENTAGES,
|
||||
NUMBERS_IN_FIRST_DRAW,
|
||||
NUMBERS_IN_SECOND_DRAW,
|
||||
} from "$lib/utils/constants";
|
||||
import type {
|
||||
BookingEntry,
|
||||
FinalSheetData,
|
||||
FinalSheetRow,
|
||||
LexiCodeCacheObject,
|
||||
ReducedFinalSheetData,
|
||||
ServerError,
|
||||
} from "$lib/utils/data.types";
|
||||
import { dbApiData } from "./db/apidata.db";
|
||||
import { redis } from "./connectors/redis";
|
||||
import { getULID, getUUID } from "$lib/utils";
|
||||
|
||||
export const getCompiledFinalSheet = async (date: string, drawId: string) => {
|
||||
const bookingEntries = await dbApiData.getBookingEntriesByDraw(date, drawId);
|
||||
const finalSheet = {
|
||||
id: getULID(),
|
||||
date,
|
||||
drawId,
|
||||
data: [],
|
||||
totals: getDefaultTotals(),
|
||||
} as FinalSheetData;
|
||||
const lexiCodeCache = await getLexiCodeCache();
|
||||
const { sheetCache, failed } = await getSheetCache(bookingEntries);
|
||||
if (failed.length > 0) {
|
||||
console.log(`[-] Failed to find lexicodes for ${failed.length} entries`);
|
||||
}
|
||||
console.log("[...] Now compiling the final sheet");
|
||||
for (const number of get4DigitGenerator()) {
|
||||
const fsRow = {
|
||||
rate: { first: 0, second: 0 },
|
||||
prize: { first: 0, second: 0 },
|
||||
profit: { first: 0, second: 0 },
|
||||
} as FinalSheetRow;
|
||||
const childNumbers = lexiCodeCache[number];
|
||||
let vals = getLexiCodeCacheObject(number);
|
||||
const sheetCacheKeys = Object.keys(sheetCache);
|
||||
for (const child of childNumbers) {
|
||||
const lco = getLexiCodeCacheObject(child.number);
|
||||
if (sheetCacheKeys.includes(child.number)) {
|
||||
lco.number = child.number;
|
||||
// TODO: make this into a loop for first and second
|
||||
vals.frequency.first += sheetCache[child.number].frequency.first;
|
||||
vals.rate.first += sheetCache[child.number].rate.first;
|
||||
vals.prize.first += sheetCache[child.number].prize.first;
|
||||
lco.frequency.first += sheetCache[child.number].frequency.first;
|
||||
lco.rate.first += sheetCache[child.number].rate.first;
|
||||
lco.prize.first += sheetCache[child.number].prize.first;
|
||||
|
||||
vals.frequency.second += sheetCache[child.number].frequency.second;
|
||||
vals.rate.second += sheetCache[child.number].rate.second;
|
||||
vals.prize.second += sheetCache[child.number].prize.second;
|
||||
lco.frequency.second += sheetCache[child.number].frequency.second;
|
||||
lco.rate.second += sheetCache[child.number].rate.second;
|
||||
lco.prize.second += sheetCache[child.number].prize.second;
|
||||
}
|
||||
// @ts-ignore
|
||||
fsRow[child.lexiCode] = lco;
|
||||
}
|
||||
fsRow.id = getUUID();
|
||||
fsRow.number = number;
|
||||
fsRow.frequency = vals.frequency;
|
||||
fsRow.rate = { first: vals.rate.first, second: vals.rate.second };
|
||||
// TODO: MAYBE: recalculate these
|
||||
fsRow.prize = { first: vals.prize.first, second: vals.prize.second };
|
||||
fsRow.profit = {
|
||||
first: calculateProfit(vals.rate.first, vals.prize.first),
|
||||
second: calculateProfit(vals.rate.second, vals.prize.second),
|
||||
};
|
||||
finalSheet.data.push(fsRow);
|
||||
}
|
||||
console.log("[+] Final sheet compilation complete");
|
||||
return { finalSheet, ok: true, errors: [] as ServerError };
|
||||
};
|
||||
|
||||
const getSheetCache = async (bookingEntries: BookingEntry[]) => {
|
||||
const sheetCache = {} as Record<string, LexiCodeCacheObject>;
|
||||
const failed = [] as BookingEntry[];
|
||||
console.log("[...] Preparing the FS final sheet cache");
|
||||
// INFO: first loop we calculate the vals for each indiv. number of the first and second
|
||||
for (const entry of bookingEntries) {
|
||||
const no = entry.number;
|
||||
const lexiCode = getLexiCode(no);
|
||||
if (!lexiCode) {
|
||||
failed.push(entry);
|
||||
continue;
|
||||
}
|
||||
if (sheetCache[no] === undefined) {
|
||||
sheetCache[no] = getLexiCodeCacheObject(no);
|
||||
}
|
||||
sheetCache[no].frequency.first++;
|
||||
sheetCache[no].rate.first += entry.first;
|
||||
sheetCache[no].prize.first += calculatePrize(
|
||||
getRateAfterCommission(sheetCache[no].rate.first),
|
||||
lexiCode,
|
||||
"first",
|
||||
getNoOfDigits(lexiCode),
|
||||
NUMBERS_IN_FIRST_DRAW
|
||||
);
|
||||
sheetCache[no].frequency.second++;
|
||||
sheetCache[no].rate.second += entry.second;
|
||||
sheetCache[no].prize.second += calculatePrize(
|
||||
getRateAfterCommission(sheetCache[no].rate.second),
|
||||
lexiCode,
|
||||
"second",
|
||||
getNoOfDigits(lexiCode),
|
||||
NUMBERS_IN_SECOND_DRAW
|
||||
);
|
||||
}
|
||||
return { sheetCache, failed };
|
||||
};
|
||||
|
||||
const getDefaultTotals = () => {
|
||||
return {
|
||||
commission: { first: 0, second: 0 },
|
||||
netRate: { first: 0, second: 0 },
|
||||
rate: { first: 0, second: 0 },
|
||||
prize: { first: 0, second: 0 },
|
||||
profit: { first: 0, second: 0 },
|
||||
frequency: { first: 0, second: 0 },
|
||||
};
|
||||
};
|
||||
|
||||
const getNoOfDigits = (lexiCode: string) => {
|
||||
const lens = { 1: 10, 2: 100, 3: 1000, 4: 10000 };
|
||||
return lens[lexiCode.replaceAll("+", "").length as keyof typeof lens];
|
||||
};
|
||||
|
||||
const getRateAfterCommission = (rate: number) => {
|
||||
return rate - rate * COMMISSION_PERCENTAGE;
|
||||
};
|
||||
|
||||
const getLexiCodeCache = async () => {
|
||||
type CacheType = Record<string, { number: string; lexiCode: string }[]>;
|
||||
const rKey = "lexicodecache";
|
||||
const found = await redis.get(rKey);
|
||||
if (found) {
|
||||
return JSON.parse(found) as CacheType;
|
||||
}
|
||||
const cache = {} as CacheType;
|
||||
for (const number of get4DigitGenerator()) {
|
||||
cache[number] = getAllMatchingChildNumbers(number);
|
||||
}
|
||||
await redis.set(rKey, JSON.stringify(cache));
|
||||
return cache;
|
||||
};
|
||||
|
||||
const getAllMatchingChildNumbers = (parent: string) => {
|
||||
const out = [] as { number: string; lexiCode: string }[];
|
||||
out.push({ number: `${parent[0]}`, lexiCode: "a" });
|
||||
out.push({ number: `+${parent[0]}`, lexiCode: "+a" });
|
||||
out.push({ number: `++${parent[0]}`, lexiCode: "++a" });
|
||||
out.push({ number: `+++${parent[0]}`, lexiCode: "+++a" });
|
||||
out.push({ number: `${parent[0]}${parent[1]}`, lexiCode: "ab" });
|
||||
out.push({ number: `+${parent[0]}${parent[1]}`, lexiCode: "+ab" });
|
||||
out.push({ number: `${parent[0]}+${parent[1]}`, lexiCode: "a+b" });
|
||||
out.push({ number: `+${parent[0]}+${parent[1]}`, lexiCode: "+a+b" });
|
||||
out.push({ number: `++${parent[0]}${parent[1]}`, lexiCode: "++ab" });
|
||||
out.push({ number: `${parent[0]}++${parent[1]}`, lexiCode: "a++b" });
|
||||
out.push({ number: `${parent[0]}${parent[1]}${parent[2]}`, lexiCode: "abc" });
|
||||
out.push({
|
||||
number: `+${parent[0]}${parent[1]}${parent[2]}`,
|
||||
lexiCode: "+abc",
|
||||
});
|
||||
out.push({
|
||||
number: `${parent[0]}+${parent[1]}${parent[2]}`,
|
||||
lexiCode: "a+bc",
|
||||
});
|
||||
out.push({
|
||||
number: `${parent[0]}${parent[1]}+${parent[2]}`,
|
||||
lexiCode: "ab+c",
|
||||
});
|
||||
out.push({ number: parent, lexiCode: "abcd" });
|
||||
return out;
|
||||
};
|
||||
|
||||
function* get4DigitGenerator(): Generator<string> {
|
||||
for (let i = 0; i < 10; i++) {
|
||||
for (let j = 0; j < 10; j++) {
|
||||
for (let k = 0; k < 10; k++) {
|
||||
for (let l = 0; l < 10; l++) {
|
||||
yield `${i}${j}${k}${l}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const calculatePrize = (
|
||||
amount: number,
|
||||
lexiCode: string,
|
||||
type: "first" | "second",
|
||||
noOfDigits: number,
|
||||
noOfDrawNumbers: number
|
||||
) => {
|
||||
const lexiCodePercentage =
|
||||
LEXICODE_PRIZE_PERCENTAGES[type][
|
||||
lexiCode as keyof typeof LEXICODE_PRIZE_PERCENTAGES.first
|
||||
];
|
||||
if (amount && lexiCodePercentage > 0 && noOfDrawNumbers > 0) {
|
||||
return Number(
|
||||
(
|
||||
(amount * noOfDigits * (lexiCodePercentage / 100)) /
|
||||
noOfDrawNumbers
|
||||
).toFixed(2)
|
||||
);
|
||||
}
|
||||
return 0;
|
||||
};
|
||||
|
||||
const calculateProfit = (rate: number, prize: number) => {
|
||||
return getRateAfterCommission(rate) - prize;
|
||||
};
|
||||
|
||||
const getLexiCode = (no: string) => {
|
||||
for (const [lexicode, pattern] of Object.entries(LEXICODE_MATHCER_PATTERNS)) {
|
||||
if (pattern.test(no)) {
|
||||
return lexicode;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
const getLexiCodeCacheObject = (no: string) => {
|
||||
return {
|
||||
number: no,
|
||||
rate: { first: 0, second: 0 },
|
||||
prize: { first: 0, second: 0 },
|
||||
frequency: { first: 0, second: 0 },
|
||||
} as LexiCodeCacheObject;
|
||||
};
|
||||
|
||||
// const getMatchingParents = (no: string) => {
|
||||
// const matching = [] as string[];
|
||||
// for (const parent of get4DigitGenerator()) {
|
||||
// if (doesNumberMatch(parent, no)) {
|
||||
// matching.push(parent);
|
||||
// }
|
||||
// }
|
||||
// return matching;
|
||||
// };
|
||||
|
||||
// const doesNumberMatch = (parent: string, child: string) => {
|
||||
// let allMatch = true;
|
||||
// for (let i = 0; i < child.length; i++) {
|
||||
// if (parent[i] !== child[i] && child[i] !== "+") {
|
||||
// allMatch = false;
|
||||
// }
|
||||
// }
|
||||
// return allMatch;
|
||||
// };
|
||||
|
||||
// const getLexiCodeCache = async () => {
|
||||
// const rKey = "lexicodecache";
|
||||
// const keyCount = await redis.keys(rKey);
|
||||
// if (keyCount.length === 1) {
|
||||
// return JSON.parse((await redis.get(rKey)) ?? "");
|
||||
// }
|
||||
// const lexicodeCache: Record<string, string[]> = {};
|
||||
// for (let i = 0; i < 10; i++) {
|
||||
// lexicodeCache[i.toString()] = getMatchingParents(i.toString());
|
||||
// lexicodeCache[`+${i}`] = getMatchingParents(`+${i}`);
|
||||
// lexicodeCache[`++${i}`] = getMatchingParents(`++${i}`);
|
||||
// lexicodeCache[`+++${i}`] = getMatchingParents(`+++${i}`);
|
||||
// for (let j = 0; j < 10; j++) {
|
||||
// lexicodeCache[`${i}${j}`] = getMatchingParents(`${i}${j}`);
|
||||
// lexicodeCache[`+${i}${j}`] = getMatchingParents(`+${i}${j}`);
|
||||
// lexicodeCache[`${i}+${j}`] = getMatchingParents(`${i}+${j}`);
|
||||
// lexicodeCache[`+${i}+${j}`] = getMatchingParents(`+${i}+${j}`);
|
||||
// lexicodeCache[`++${i}${j}`] = getMatchingParents(`++${i}${j}`);
|
||||
// lexicodeCache[`${i}++${j}`] = getMatchingParents(`${i}++${j}`);
|
||||
// for (let k = 0; k < 10; k++) {
|
||||
// lexicodeCache[`${i}${j}${k}`] = getMatchingParents(`${i}${j}${k}`);
|
||||
// lexicodeCache[`+${i}${j}${k}`] = getMatchingParents(`+${i}${j}${k}`);
|
||||
// lexicodeCache[`${i}+${j}${k}`] = getMatchingParents(`${i}+${j}${k}`);
|
||||
// lexicodeCache[`${i}${j}+${k}`] = getMatchingParents(`${i}${j}+${k}`);
|
||||
// for (let l = 0; l < 10; l++) {
|
||||
// lexicodeCache[`${i}${j}${k}${l}`] = [`${i}${j}${k}${l}`];
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// await redis.set(rKey, JSON.stringify(lexicodeCache));
|
||||
// return lexicodeCache;
|
||||
// };
|
||||
316
src/lib/server/external/api.scraping.helpers.ts
vendored
Executable file
316
src/lib/server/external/api.scraping.helpers.ts
vendored
Executable file
@@ -0,0 +1,316 @@
|
||||
import { getRandomUserAgent, getULID, sleep } from "$lib/utils";
|
||||
import { constants } from "$lib/utils/constants";
|
||||
import type { BookingEntry, Draw, LooseApiUser } from "$lib/utils/data.types";
|
||||
import { rng } from "$lib/utils/rng";
|
||||
|
||||
export const testIfSessionIsValid = async (jwt: string) => {
|
||||
try {
|
||||
const res = await fetch(
|
||||
`${constants.SCRAP_API_URL}/v1/user/get-balance?userId=6339`,
|
||||
{
|
||||
headers: {
|
||||
...constants.SCRAP_API_BASE_HEADERS,
|
||||
Authorization: jwt,
|
||||
"User-Agent": getRandomUserAgent(),
|
||||
},
|
||||
},
|
||||
);
|
||||
if (res.status !== 200) {
|
||||
return false;
|
||||
}
|
||||
const rj = (await res.json()) as {
|
||||
code: number;
|
||||
success: boolean;
|
||||
message: string;
|
||||
data: any;
|
||||
time: string;
|
||||
};
|
||||
return rj.code == 200 && rj.success;
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
export const getSessionToken = async (payload: {
|
||||
userId: string;
|
||||
password: string;
|
||||
verifyToken: string;
|
||||
code: string;
|
||||
userType: number;
|
||||
}): Promise<{ ok: boolean; message: string }> => {
|
||||
console.log("Requesting...");
|
||||
const res = await fetch(`${constants.SCRAP_API_URL}/v1/auth/login`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
...constants.SCRAP_API_BASE_HEADERS,
|
||||
"User-Agent": getRandomUserAgent(),
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(payload),
|
||||
});
|
||||
const out = await res.json();
|
||||
if (out.code !== 200) {
|
||||
return { ok: false, message: out.message };
|
||||
}
|
||||
return { ok: true, message: out.data.token };
|
||||
};
|
||||
|
||||
export async function getUsersBalance(userId: number, jwt: string) {
|
||||
try {
|
||||
const res = await fetch(
|
||||
`${constants.SCRAP_API_URL}/v1/user/get-balance?userId=${userId}`,
|
||||
{
|
||||
headers: {
|
||||
...constants.SCRAP_API_BASE_HEADERS,
|
||||
Authorization: jwt,
|
||||
"User-Agent": getRandomUserAgent(),
|
||||
},
|
||||
},
|
||||
);
|
||||
const rj = (await res.json()) as {
|
||||
code: number;
|
||||
success: boolean;
|
||||
message: string;
|
||||
data: { allowedBalance: number; balance: number };
|
||||
time: string;
|
||||
};
|
||||
if (res.status !== 200 || rj.code !== 200 || !rj.success) {
|
||||
console.log(
|
||||
`[!] Error getting balance for ${userId} :: ${JSON.stringify(rj)}`,
|
||||
);
|
||||
return false;
|
||||
}
|
||||
return rj.data.balance;
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export const getDealers = async (jwt: string, distributor_ids: string[]) => {
|
||||
try {
|
||||
// // Create an array of promises for each fetch request
|
||||
const requests = distributor_ids.map(async (did) => {
|
||||
await sleep(rng(100, 10000));
|
||||
const res = await fetch(
|
||||
`${constants.SCRAP_API_URL}/v1/user/dealer-list`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
...constants.SCRAP_API_BASE_HEADERS,
|
||||
"Content-Type": "application/json",
|
||||
"User-Agent": getRandomUserAgent(),
|
||||
Authorization: jwt,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
page: 1,
|
||||
pageSize: 999999,
|
||||
parentDistributor: parseInt(did),
|
||||
}),
|
||||
},
|
||||
);
|
||||
const data = (await res.json()) as {
|
||||
code: number;
|
||||
success: boolean;
|
||||
message: string;
|
||||
data: {
|
||||
items: any[];
|
||||
total: number;
|
||||
};
|
||||
};
|
||||
if (data.code !== 200 || !data.success) {
|
||||
return {
|
||||
dealers: [],
|
||||
ok: false,
|
||||
code: data.code,
|
||||
message: data.message,
|
||||
};
|
||||
}
|
||||
const dealers = data.data.items.map((item) => item.dealer);
|
||||
return {
|
||||
dealers,
|
||||
ok: res.status === 200 && data.success,
|
||||
code: data.code,
|
||||
message: data.message,
|
||||
};
|
||||
});
|
||||
// // Wait for all promises to resolve
|
||||
const responses = await Promise.all(requests);
|
||||
const dealers: LooseApiUser[] = [];
|
||||
const errors: { message: string }[] = [];
|
||||
for (const res of responses) {
|
||||
if (res.code !== 200 || !res.ok) {
|
||||
errors.push({ message: res.message });
|
||||
continue;
|
||||
}
|
||||
for (const dealer of res.dealers) {
|
||||
dealers.push(dealer);
|
||||
}
|
||||
}
|
||||
|
||||
// fs.writeFileSync("dealers.json", JSON.stringify(dealers, null, 2));
|
||||
|
||||
return { dealers, errors };
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
return {
|
||||
dealers: [],
|
||||
errors: [{ message: "An error occured during fetching dealers" }],
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
export const getDistributors = async (jwt: string) => {
|
||||
const res = await fetch(
|
||||
`${constants.SCRAP_API_URL}/v1/user/distributor-list`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
...constants.SCRAP_API_BASE_HEADERS,
|
||||
Authorization: jwt,
|
||||
"Content-Type": "application/json",
|
||||
"User-Agent": getRandomUserAgent(),
|
||||
},
|
||||
body: JSON.stringify({
|
||||
page: 1,
|
||||
pageSize: 999999,
|
||||
parentDistributor: 15,
|
||||
}),
|
||||
},
|
||||
);
|
||||
const json = (await res.json()) as {
|
||||
code: number;
|
||||
success: boolean;
|
||||
message: string;
|
||||
data: { total: number; items: any[] };
|
||||
};
|
||||
|
||||
if (!json.data.items || json.code !== 200 || !json.success) {
|
||||
return { ok: false, message: json.message, data: [] };
|
||||
}
|
||||
|
||||
// fs.writeFileSync(
|
||||
// "distributors.json",
|
||||
// JSON.stringify(json.data.items, null, 2),
|
||||
// );
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
message: "",
|
||||
data: json.data.items.map((item) => item.distributor),
|
||||
};
|
||||
};
|
||||
|
||||
export const getDraws = async (jwt: string) => {
|
||||
const res = await fetch(
|
||||
`${constants.SCRAP_API_URL}/v1/draw/list-my?userId=15`,
|
||||
{
|
||||
method: "GET",
|
||||
headers: {
|
||||
...constants.SCRAP_API_BASE_HEADERS,
|
||||
Authorization: jwt,
|
||||
"Content-Type": "application/json",
|
||||
"User-Agent": getRandomUserAgent(),
|
||||
},
|
||||
},
|
||||
);
|
||||
type J = {
|
||||
code: number;
|
||||
success: boolean;
|
||||
message: string;
|
||||
data: { draw: Draw }[];
|
||||
};
|
||||
let decoded = (await res.json()) as { data: J };
|
||||
const json = (decoded.data.success ? decoded.data : decoded) as any as J;
|
||||
if (json.code !== 200 || !json.success || !json.data) {
|
||||
return { ok: false, message: json.message, data: [] };
|
||||
}
|
||||
return {
|
||||
ok: true,
|
||||
message: "",
|
||||
data: json.data.map((item) => item.draw),
|
||||
};
|
||||
};
|
||||
|
||||
export const getData = async (
|
||||
jwt: string,
|
||||
userIds: number[],
|
||||
drawId: number,
|
||||
chosenDate: string,
|
||||
) => {
|
||||
const res = await fetch(`${constants.SCRAP_API_URL}/v1/book/list2`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
...constants.SCRAP_API_BASE_HEADERS,
|
||||
Authorization: jwt,
|
||||
"Content-Type": "application/json",
|
||||
"User-Agent": getRandomUserAgent(),
|
||||
},
|
||||
body: JSON.stringify({
|
||||
userType: 3,
|
||||
userIds,
|
||||
drawId: drawId,
|
||||
startDate: chosenDate,
|
||||
endDate: chosenDate,
|
||||
beAdmin: false,
|
||||
containImported: false,
|
||||
keyword: "",
|
||||
}),
|
||||
});
|
||||
type J = {
|
||||
code: number;
|
||||
success: boolean;
|
||||
message: string;
|
||||
data: { book: BookingEntry; user: any }[];
|
||||
};
|
||||
let decoded = (await res.json()) as { data: J };
|
||||
const json = (decoded.data.success ? decoded.data : decoded) as any as J;
|
||||
if (json.code !== 200 || !json.success || !json.data) {
|
||||
return { ok: false, message: json.message, data: [] };
|
||||
}
|
||||
return { ok: true, message: "", data: json.data.map((e) => e.book) };
|
||||
};
|
||||
|
||||
export const mockGetUserData = async (
|
||||
jwt: string,
|
||||
userIds: number[],
|
||||
drawId: number,
|
||||
chosenDate: string,
|
||||
) => {
|
||||
console.log("Rizzzzard of Mogwards!");
|
||||
const entries = [] as BookingEntry[];
|
||||
|
||||
const rng = (min: number, max: number) => {
|
||||
return Math.floor(Math.random() * (max - min + 1)) + min;
|
||||
};
|
||||
const randomCeil = rng(10_000, 200_000);
|
||||
|
||||
await sleep(rng(100, 1000));
|
||||
|
||||
for (let i = 0; i < randomCeil; i++) {
|
||||
const _f = rng(5, 50);
|
||||
const _s = rng(5, 50);
|
||||
const f = _f - (_f % 5);
|
||||
const s = _s - (_s % 5);
|
||||
|
||||
entries.push({
|
||||
id: getULID(),
|
||||
bookDate: chosenDate,
|
||||
changedBalance: f + s,
|
||||
first: f,
|
||||
second: s,
|
||||
dealerId: userIds[rng(0, userIds.length - 1)],
|
||||
distributorId: 6339,
|
||||
drawId: drawId,
|
||||
number: rng(0, 9999).toString(),
|
||||
requestId: new Date().getTime().toString(),
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
sheetId: "0",
|
||||
sheetName: "",
|
||||
});
|
||||
}
|
||||
|
||||
return { ok: true, message: "", data: entries };
|
||||
};
|
||||
270
src/lib/server/finalsheet.helpers.ts
Executable file
270
src/lib/server/finalsheet.helpers.ts
Executable file
@@ -0,0 +1,270 @@
|
||||
import type {
|
||||
BookingEntry,
|
||||
FSTotals,
|
||||
FinalSheetRow,
|
||||
LexiCodeCacheObject,
|
||||
ReducedFinalSheetData,
|
||||
ReducedFinalSheetRow,
|
||||
ServerError,
|
||||
} from "$lib/utils/data.types";
|
||||
import { dbApiData } from "./db/apidata.db";
|
||||
import { redis } from "./connectors/redis";
|
||||
import { getDefaultTotals, getULID } from "$lib/utils";
|
||||
import {
|
||||
calculatePrize,
|
||||
calculateProfit,
|
||||
get4DigitGenerator,
|
||||
getAllMatchingChildNumbers,
|
||||
getCommisionAmt,
|
||||
getLexiCode,
|
||||
getLexiCodeCacheObject,
|
||||
getNoOfDigits,
|
||||
getNetRate,
|
||||
} from "$lib/utils/finalsheet.utils";
|
||||
|
||||
export const getReducedFinalSheet = async (fsData: ReducedFinalSheetData) => {
|
||||
const bookingEntries = await dbApiData.getBookingEntriesByDraw(
|
||||
fsData.date,
|
||||
fsData.drawId
|
||||
);
|
||||
console.log(
|
||||
`[...] Got ${bookingEntries.length} booking entries for ${fsData.date}, draw ${fsData.drawId}`
|
||||
);
|
||||
const lexiCodeCache = await getLexiCodeCache();
|
||||
const { sheetCache, totals, failed } = await getSheetCache(bookingEntries);
|
||||
const sheetCacheKeys = new Set();
|
||||
for (const each of Object.keys(sheetCache)) {
|
||||
sheetCacheKeys.add(each);
|
||||
}
|
||||
if (failed.length > 0) {
|
||||
console.log(`[-] Failed to find lexicodes for ${failed.length} entries`);
|
||||
console.log(failed.map((e) => e.number));
|
||||
}
|
||||
fsData.totals = totals;
|
||||
console.log("[...] Compiling the final sheet");
|
||||
// fs.writeFileSync("test.json", JSON.stringify(sheetCache, null, 2));
|
||||
let highestRate = { first: 0, second: 0 };
|
||||
for (const number of get4DigitGenerator()) {
|
||||
const fsRow = {
|
||||
id: getULID(),
|
||||
number: number,
|
||||
rate: { first: 0, second: 0 },
|
||||
prize: { first: 0, second: 0 },
|
||||
profit: { first: 0, second: 0 },
|
||||
frequency: { first: 0, second: 0 },
|
||||
frequencies: { abcd: { first: 0, second: 0 } },
|
||||
} as ReducedFinalSheetRow;
|
||||
const childNumbers = lexiCodeCache[number];
|
||||
let vals = getLexiCodeCacheObject(number);
|
||||
|
||||
for (const child of childNumbers) {
|
||||
if (!sheetCacheKeys.has(child.number)) {
|
||||
continue;
|
||||
}
|
||||
for (const each of ["first", "second"] as const) {
|
||||
vals.frequency[each] += sheetCache[child.number].frequency[each];
|
||||
vals.rate[each] += sheetCache[child.number].rate[each];
|
||||
vals.prize[each] += sheetCache[child.number].prize[each];
|
||||
if (child.lexiCode !== "abcd") {
|
||||
continue;
|
||||
}
|
||||
fsRow.frequencies[child.lexiCode][each] =
|
||||
sheetCache[child.number].frequency[each];
|
||||
}
|
||||
}
|
||||
|
||||
for (const each of ["first", "second"] as const) {
|
||||
fsRow.frequency[each] = vals.frequency[each];
|
||||
fsRow.rate[each] = vals.rate[each];
|
||||
fsRow.prize[each] = vals.prize[each];
|
||||
fsRow.profit[each] = calculateProfit(
|
||||
totals.netRate[each],
|
||||
vals.prize[each]
|
||||
);
|
||||
}
|
||||
if (fsRow.rate.first > highestRate.first) {
|
||||
highestRate.first = fsRow.rate.first;
|
||||
}
|
||||
if (fsRow.rate.second > highestRate.second) {
|
||||
highestRate.second = fsRow.rate.second;
|
||||
}
|
||||
// console.log(fsRow);
|
||||
// throw new Error("test");
|
||||
fsData.data.push(fsRow);
|
||||
}
|
||||
console.log("[+] Reduced Final sheet compilation complete");
|
||||
return { ok: true, errors: [] as ServerError };
|
||||
};
|
||||
|
||||
export const getTargetFSRow = async (
|
||||
drawId: string,
|
||||
date: string,
|
||||
number: string
|
||||
) => {
|
||||
const bookingEntries = await dbApiData.getBookingEntriesByDraw(date, drawId);
|
||||
const lexiCodeCache = await getLexiCodeCache();
|
||||
const { sheetCache, totals, failed } = await getSheetCache(bookingEntries);
|
||||
if (failed.length > 0) {
|
||||
console.log(`[-] Failed to find lexicodes for ${failed.length} entries`);
|
||||
console.log(failed.map((e) => e.number));
|
||||
}
|
||||
console.log("[...] Preparing the FS row");
|
||||
// fs.writeFileSync("test.json", JSON.stringify(sheetCache, null, 2));
|
||||
let highestRate = { first: 0, second: 0 };
|
||||
const fsRow = {
|
||||
id: getULID(),
|
||||
number: number,
|
||||
rate: { first: 0, second: 0 },
|
||||
prize: { first: 0, second: 0 },
|
||||
profit: { first: 0, second: 0 },
|
||||
frequency: { first: 0, second: 0 },
|
||||
} as FinalSheetRow;
|
||||
const childNumbers = lexiCodeCache[number];
|
||||
let vals = getLexiCodeCacheObject(number);
|
||||
const sheetCacheKeys = Object.keys(sheetCache);
|
||||
for (const child of childNumbers) {
|
||||
if (sheetCacheKeys.includes(child.number)) {
|
||||
for (const each of ["first", "second"] as const) {
|
||||
vals.frequency[each] += sheetCache[child.number].frequency[each];
|
||||
vals.rate[each] += sheetCache[child.number].rate[each];
|
||||
vals.prize[each] += sheetCache[child.number].prize[each];
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const each of ["first", "second"] as const) {
|
||||
fsRow.frequency[each] = vals.frequency[each];
|
||||
fsRow.rate[each] = vals.rate[each];
|
||||
fsRow.prize[each] = vals.prize[each];
|
||||
fsRow.profit[each] = calculateProfit(
|
||||
totals.netRate[each],
|
||||
vals.prize[each]
|
||||
);
|
||||
}
|
||||
if (fsRow.rate.first > highestRate.first) {
|
||||
highestRate.first = fsRow.rate.first;
|
||||
}
|
||||
if (fsRow.rate.second > highestRate.second) {
|
||||
highestRate.second = fsRow.rate.second;
|
||||
}
|
||||
console.log("[+] FS Row prepared");
|
||||
return { ok: true, errors: [] as ServerError, data: fsRow };
|
||||
};
|
||||
|
||||
const getSheetCache = async (bookingEntries: BookingEntry[]) => {
|
||||
const sheetCache = {} as Record<string, LexiCodeCacheObject>;
|
||||
const totals = getDefaultTotals() as FSTotals;
|
||||
const failed = [] as BookingEntry[];
|
||||
console.log("[...] Preparing the pre-FS cache");
|
||||
// INFO: first loop we calculate the vals for each indiv. number of the first and second
|
||||
for (const entry of bookingEntries) {
|
||||
const no = entry.number;
|
||||
const lexiCode = getLexiCode(no);
|
||||
if (!lexiCode) {
|
||||
failed.push(entry);
|
||||
continue;
|
||||
}
|
||||
if (sheetCache[no] === undefined) {
|
||||
sheetCache[no] = getLexiCodeCacheObject(no);
|
||||
}
|
||||
for (const each of ["first", "second"] as const) {
|
||||
if (entry[each] > 0) {
|
||||
sheetCache[no].frequency[each]++;
|
||||
}
|
||||
sheetCache[no].rate[each] += entry[each];
|
||||
sheetCache[no].prize[each] = calculatePrize(
|
||||
sheetCache[no].rate[each],
|
||||
lexiCode,
|
||||
each,
|
||||
getNoOfDigits(lexiCode)
|
||||
);
|
||||
}
|
||||
}
|
||||
if (bookingEntries.length > 0) {
|
||||
for (const value of Object.values(sheetCache)) {
|
||||
for (const each of ["first", "second"] as const) {
|
||||
totals.frequency[each] += value.frequency[each];
|
||||
totals.prize[each] += value.prize[each];
|
||||
totals.rate[each] += value.rate[each];
|
||||
}
|
||||
}
|
||||
for (const each of ["first", "second"] as const) {
|
||||
totals.commission[each] = getCommisionAmt(totals.rate[each]);
|
||||
totals.netRate[each] = getNetRate(totals.rate[each]);
|
||||
}
|
||||
}
|
||||
return { sheetCache, failed, totals };
|
||||
};
|
||||
|
||||
const getLexiCodeCache = async () => {
|
||||
type CacheType = Record<string, { number: string; lexiCode: string }[]>;
|
||||
const rKey = "lexicodecache";
|
||||
const found = await redis.get(rKey);
|
||||
if (found) {
|
||||
return JSON.parse(found) as CacheType;
|
||||
}
|
||||
const cache = {} as CacheType;
|
||||
for (const number of get4DigitGenerator()) {
|
||||
cache[number] = getAllMatchingChildNumbers(number);
|
||||
}
|
||||
await redis.setex(rKey, 3600 * 24, JSON.stringify(cache));
|
||||
return cache;
|
||||
};
|
||||
|
||||
const getCachedReducedFinalSheet = async (date: string, drawId: string) => {
|
||||
const key = `cfinalsheet:${date}:${drawId}`;
|
||||
const cached = await redis.get(key);
|
||||
if (cached) {
|
||||
return JSON.parse(cached);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
const setCachedReducedFinalSheet = async (
|
||||
date: string,
|
||||
drawId: string,
|
||||
data: ReducedFinalSheetData
|
||||
) => {
|
||||
const key = `cfinalsheet:${date}:${drawId}`;
|
||||
await redis.setex(key, 3600 * 24, JSON.stringify(data));
|
||||
};
|
||||
|
||||
export const deleteCachedReducedFinalSheet = async (
|
||||
date: string,
|
||||
drawId: string
|
||||
) => {
|
||||
await redis.del(`cfinalsheet:${date}:${drawId}`);
|
||||
};
|
||||
|
||||
export const getTestBookingData = async (drawId: string, date: string) => {
|
||||
const numbers = {} as Record<string, Set<string>>;
|
||||
for (const each of get4DigitGenerator()) {
|
||||
const childNumbers = getAllMatchingChildNumbers(each);
|
||||
for (const child of childNumbers) {
|
||||
if (numbers[child.lexiCode] === undefined) {
|
||||
numbers[child.lexiCode] = new Set();
|
||||
}
|
||||
numbers[child.lexiCode].add(child.number);
|
||||
}
|
||||
}
|
||||
const out = [] as BookingEntry[];
|
||||
for (const v of Object.values(numbers)) {
|
||||
for (const child of v) {
|
||||
const entry = {
|
||||
id: getULID(),
|
||||
drawId: Number(drawId.split(":")[1]),
|
||||
bookDate: date,
|
||||
date,
|
||||
number: child,
|
||||
first: 10,
|
||||
second: 10,
|
||||
dealerId: 4677,
|
||||
sheetId: "test",
|
||||
requestId: getULID(),
|
||||
distributorId: 6339,
|
||||
changedBalance: 0,
|
||||
} as BookingEntry;
|
||||
out.push(entry);
|
||||
}
|
||||
}
|
||||
return out;
|
||||
};
|
||||
11
src/lib/server/hashing.ts
Executable file
11
src/lib/server/hashing.ts
Executable file
@@ -0,0 +1,11 @@
|
||||
import bcrypt from "bcryptjs";
|
||||
|
||||
const rounds = 10;
|
||||
|
||||
export const hashData = (data: string): string => {
|
||||
return bcrypt.hashSync(data, rounds);
|
||||
};
|
||||
|
||||
export const compareData = (data: string, hash: string): boolean => {
|
||||
return bcrypt.compareSync(data, hash);
|
||||
};
|
||||
445
src/lib/server/postdata/post.handler.ts
Normal file
445
src/lib/server/postdata/post.handler.ts
Normal file
@@ -0,0 +1,445 @@
|
||||
import { getRandomUserAgent, getULID, sleep } from "$lib/utils";
|
||||
import { constants } from "$lib/utils/constants";
|
||||
import type {
|
||||
ApiPostUserWithParent,
|
||||
APISession,
|
||||
Draw,
|
||||
PostDataEntry,
|
||||
ServerError,
|
||||
} from "$lib/utils/data.types";
|
||||
import Fetch from "node-fetch";
|
||||
import { HttpsProxyAgent } from "https-proxy-agent";
|
||||
|
||||
export type APIRespnose<T> = {
|
||||
code: number;
|
||||
success: boolean;
|
||||
message: string;
|
||||
data: T;
|
||||
time: string;
|
||||
};
|
||||
|
||||
export function buildMessageString(
|
||||
i: number,
|
||||
rows: PostDataEntry[],
|
||||
distributorId: number,
|
||||
dealerId: number,
|
||||
drawId: number,
|
||||
date: string,
|
||||
) {
|
||||
let message = "";
|
||||
let jumpSize = Math.floor(Math.random() * 490) + 10;
|
||||
let total = 0;
|
||||
let startReqId = new Date().getTime();
|
||||
let x = 0;
|
||||
for (let j = i; j < i + jumpSize; j++) {
|
||||
if (j >= rows.length) {
|
||||
break;
|
||||
}
|
||||
const row = rows[j];
|
||||
const reqId = startReqId + x++;
|
||||
const no = row.number.trim();
|
||||
const f = row.first;
|
||||
const s = row.second;
|
||||
const mTotal = f + s;
|
||||
if (mTotal <= 0) {
|
||||
continue;
|
||||
}
|
||||
total += mTotal;
|
||||
message += `${reqId},${distributorId},${dealerId},${drawId},${date},${no},${f},${s},${mTotal};`;
|
||||
}
|
||||
message = message.slice(0, -1);
|
||||
return { message, total, jumped: i + jumpSize };
|
||||
}
|
||||
|
||||
export async function postDataToApi(payload: {
|
||||
sessions: Record<string, APISession>;
|
||||
draw: Draw;
|
||||
data: PostDataEntry[];
|
||||
users: ApiPostUserWithParent[];
|
||||
}) {
|
||||
const responses = [] as APIRespnose<[]>[];
|
||||
const responsesIds = [] as { requestId: number; bookId: string }[];
|
||||
let failedResponses = 0;
|
||||
let successResponses = 0;
|
||||
|
||||
console.log(`[+] Sending ${payload.data.length} requests...`);
|
||||
|
||||
const dataByUser = {} as Record<string, PostDataEntry[]>;
|
||||
for (const row of payload.data) {
|
||||
const userId = row.userId ?? "";
|
||||
if (userId.length < 1) {
|
||||
console.log(`[!] User not found for request ${row.userId}`);
|
||||
return {
|
||||
ok: false,
|
||||
detail: "User not found to post data with",
|
||||
errors: [{ message: "User not found for request" }] as ServerError,
|
||||
};
|
||||
}
|
||||
if (!dataByUser[userId]) {
|
||||
dataByUser[userId] = [];
|
||||
}
|
||||
dataByUser[userId].push(row);
|
||||
}
|
||||
|
||||
try {
|
||||
for (const userId in dataByUser) {
|
||||
const session = payload.sessions[userId];
|
||||
const usr = payload.users.find((u) => u.userId === userId);
|
||||
if (!usr) {
|
||||
console.log(`[!] User ${userId} not found for posting to api`);
|
||||
return {
|
||||
ok: false,
|
||||
detail: "User not found to post data with",
|
||||
errors: [{ message: "User not found for request" }] as ServerError,
|
||||
};
|
||||
}
|
||||
const distId = usr.parentDistributor ?? 0;
|
||||
const dealerId = Number(session.userId.split(":")[1]);
|
||||
const drawId = Number(payload.draw.id.split(":")[1]);
|
||||
const date = new Date().toISOString().split("T")[0];
|
||||
|
||||
let i = 0;
|
||||
while (i < dataByUser[userId].length) {
|
||||
let tries = 0;
|
||||
while (tries < 3) {
|
||||
let { message, total, jumped } = buildMessageString(
|
||||
i,
|
||||
dataByUser[userId],
|
||||
distId,
|
||||
dealerId,
|
||||
drawId,
|
||||
date,
|
||||
);
|
||||
const res = await sendBatchRequest(
|
||||
session,
|
||||
dealerId,
|
||||
payload.draw,
|
||||
total,
|
||||
message,
|
||||
);
|
||||
const rj = (await res.json()) as APIRespnose<{
|
||||
bookDtos: { bookId: string; requestId: number }[];
|
||||
}>;
|
||||
if (rj.code === 200 && res.status === 200) {
|
||||
i = jumped;
|
||||
responsesIds.push(
|
||||
...rj.data.bookDtos.map((b) => ({
|
||||
requestId: b.requestId as number,
|
||||
bookId: b.bookId as string,
|
||||
})),
|
||||
);
|
||||
successResponses++;
|
||||
break;
|
||||
}
|
||||
failedResponses++;
|
||||
tries++;
|
||||
}
|
||||
|
||||
if (tries >= 3) {
|
||||
console.log(
|
||||
`[!] Failed to send data to api for user ${userId}, deleting all booked entries...`,
|
||||
);
|
||||
console.log(responsesIds);
|
||||
if (responsesIds.length > 0) {
|
||||
const out = await deleteAllBookedEntries({
|
||||
data: responsesIds,
|
||||
closeTime: payload.draw.closeTime,
|
||||
dealerId,
|
||||
drawId,
|
||||
session,
|
||||
});
|
||||
console.log(await out.text());
|
||||
}
|
||||
return {
|
||||
ok: false,
|
||||
detail: "Failed to post data to API halfway through",
|
||||
errors: [
|
||||
{ message: "Failed to post data to API halfway through" },
|
||||
] as ServerError,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`[+] Finished sending ${payload.data.length} requests`);
|
||||
console.log(`[?] Failed responses: ${failedResponses}`);
|
||||
console.log(`[?] Success responses: ${successResponses}`);
|
||||
return {
|
||||
ok: true,
|
||||
detail: "Successfully sent data to api",
|
||||
data: responses,
|
||||
};
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
return {
|
||||
ok: false,
|
||||
detail: "Failed to send data to api",
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async function sendBatchRequest(
|
||||
session: APISession,
|
||||
dealerId: number,
|
||||
draw: Draw,
|
||||
changedBalance: number,
|
||||
body: string,
|
||||
) {
|
||||
return Fetch(`${constants.SCRAP_API_URL}/v1/book/add-multiple`, {
|
||||
agent: new HttpsProxyAgent(`http://${session.ip}`),
|
||||
method: "POST",
|
||||
headers: {
|
||||
...constants.SCRAP_API_BASE_HEADERS,
|
||||
"Content-Type": "application/json;charset=UTF-8",
|
||||
Authorization: session.sessionToken,
|
||||
"User-Agent": getRandomUserAgent(),
|
||||
},
|
||||
body: JSON.stringify({
|
||||
changedBalance,
|
||||
closeTime: draw.closeTime,
|
||||
date: new Date().toISOString().split("T")[0],
|
||||
dealerId,
|
||||
drawId: Number(draw.id.split(":")[1]),
|
||||
insertData: body,
|
||||
}),
|
||||
});
|
||||
}
|
||||
|
||||
async function mockSendBatchRequest(
|
||||
session: APISession,
|
||||
dealerId: number,
|
||||
draw: Draw,
|
||||
changedBalance: number,
|
||||
body: string,
|
||||
) {
|
||||
// between 5 to 20 ms
|
||||
await sleep(Math.floor(Math.random() * 1000) + 50);
|
||||
if (Math.random() < 0.005) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
code: 500,
|
||||
success: false,
|
||||
message: "Failed",
|
||||
data: {},
|
||||
time: new Date().toISOString(),
|
||||
}),
|
||||
{
|
||||
status: 500,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
statusText: "Failed",
|
||||
},
|
||||
);
|
||||
}
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
code: 200,
|
||||
success: true,
|
||||
message: "Success",
|
||||
data: {
|
||||
bookDtos: body.split(";").map((e) => {
|
||||
return {
|
||||
bookId: getULID(),
|
||||
requestId: +e.split(",")[0],
|
||||
};
|
||||
}),
|
||||
},
|
||||
time: new Date().toISOString(),
|
||||
}),
|
||||
{
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
statusText: "OK",
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
async function sendRequest(
|
||||
requestId: number,
|
||||
session: APISession,
|
||||
body: PostDataEntry,
|
||||
dealerId: number,
|
||||
distributorId: number,
|
||||
draw: Draw,
|
||||
) {
|
||||
return Fetch(`${constants.SCRAP_API_URL}/v1/book/add`, {
|
||||
agent: new HttpsProxyAgent(`http://${session.ip}`),
|
||||
method: "POST",
|
||||
headers: {
|
||||
...constants.SCRAP_API_BASE_HEADERS,
|
||||
"Content-Type": "application/json;charset=UTF-8",
|
||||
Authorization: session.sessionToken,
|
||||
"User-Agent": getRandomUserAgent(),
|
||||
},
|
||||
body: JSON.stringify({
|
||||
retryIndex: 0,
|
||||
requestId: requestId,
|
||||
date: new Date().toISOString().split("T")[0],
|
||||
drawId: Number(draw.id.split(":")[1]),
|
||||
closeTime: draw.closeTime,
|
||||
dealerId: dealerId,
|
||||
distributorId: distributorId,
|
||||
number: body.number,
|
||||
first: body.first,
|
||||
second: body.second,
|
||||
changedBalance: body.first + body.second,
|
||||
}),
|
||||
});
|
||||
}
|
||||
|
||||
async function mockSendRequest(
|
||||
requestId: number,
|
||||
session: APISession,
|
||||
body: PostDataEntry,
|
||||
dealerId: number,
|
||||
distributorId: number,
|
||||
draw: Draw,
|
||||
) {
|
||||
// between 5 to 15 ms
|
||||
await sleep(Math.floor(Math.random() * 10 + 5));
|
||||
// // simulate a failed response, 20% of the time
|
||||
if (Math.random() < 0.05) {
|
||||
// return a failed response
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
code: 500,
|
||||
success: false,
|
||||
message: "Failed",
|
||||
data: {},
|
||||
time: new Date().toISOString(),
|
||||
}),
|
||||
{
|
||||
status: 500,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
statusText: "Failed",
|
||||
},
|
||||
);
|
||||
}
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
code: 200,
|
||||
success: true,
|
||||
message: "Success",
|
||||
data: {},
|
||||
time: new Date().toISOString(),
|
||||
}),
|
||||
{
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
statusText: "OK",
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
async function deleteAllBookedEntries({
|
||||
session,
|
||||
data,
|
||||
dealerId,
|
||||
drawId,
|
||||
closeTime,
|
||||
}: {
|
||||
session: APISession;
|
||||
data: { bookId: string; requestId: number }[];
|
||||
dealerId: number;
|
||||
drawId: number;
|
||||
closeTime: string;
|
||||
}) {
|
||||
return Fetch(`${constants.SCRAP_API_URL}/v1/book/delete-multiple`, {
|
||||
agent: new HttpsProxyAgent(`http://${session.ip}`),
|
||||
method: "POST",
|
||||
headers: {
|
||||
...constants.SCRAP_API_BASE_HEADERS,
|
||||
"Content-Type": "application/json;charset=UTF-8",
|
||||
Authorization: session.sessionToken,
|
||||
"User-Agent": getRandomUserAgent(),
|
||||
},
|
||||
body: JSON.stringify({
|
||||
dealerId,
|
||||
drawId,
|
||||
closeTime,
|
||||
bookIds: data.map((e) => e.bookId),
|
||||
}),
|
||||
});
|
||||
}
|
||||
|
||||
// export async function postDataToApi(payload: {
|
||||
// sessions: Record<string, APISession>;
|
||||
// draw: Draw;
|
||||
// data: PostDataEntry[];
|
||||
// users: ApiPostUserWithParent[];
|
||||
// }) {
|
||||
// const MAX_CONCURRENT_REQUESTS = 20;
|
||||
// const responses = [] as APIRespnose<{}>[];
|
||||
// const baseReqId = new Date().getTime();
|
||||
// const reqIds = Array.from(
|
||||
// { length: payload.data.length },
|
||||
// (_, i) => baseReqId + i,
|
||||
// );
|
||||
// let failedResponses = 0;
|
||||
// let successResponses = 0;
|
||||
// console.log(`[+] Sending ${payload.data.length} requests...`);
|
||||
// // "1723223505822,6339,6352,16,2024-08-09,123,10,10,20;1723223505823,6339,6352,16,2024-08-09,12,10,10,20",
|
||||
// async function processBatch(batch: PostDataEntry[], indexes: number[]) {
|
||||
// const promises = batch.map(async (row, idx) => {
|
||||
// await sleep(Math.floor(Math.random() * 10 + 10));
|
||||
// const session = payload.sessions[row.userId ?? ""];
|
||||
// const usr = payload.users.find((u) => u.userId === row.userId);
|
||||
// if (!usr) {
|
||||
// return null;
|
||||
// }
|
||||
// let ok = false,
|
||||
// tries = 0;
|
||||
// while (!ok && tries < 3) {
|
||||
// await sleep(Math.floor(Math.random() * 10 + 10));
|
||||
// const res = await mockSendRequest(
|
||||
// reqIds[indexes[idx]],
|
||||
// session,
|
||||
// row,
|
||||
// Number(session.userId.split(":")[1]),
|
||||
// usr?.parentDistributor,
|
||||
// payload.draw,
|
||||
// );
|
||||
// let rj: APIRespnose<{}>;
|
||||
// try {
|
||||
// rj = (await res.json()) as APIRespnose<{}>;
|
||||
// } catch (err) {
|
||||
// console.log(err);
|
||||
// tries++;
|
||||
// continue;
|
||||
// }
|
||||
// if (res.status !== 200 || rj.code !== 200) {
|
||||
// console.log(
|
||||
// `Failed to send request ${reqIds[indexes[idx]]}, try ${tries}`,
|
||||
// );
|
||||
// tries++;
|
||||
// continue;
|
||||
// }
|
||||
// ok = true;
|
||||
// successResponses++;
|
||||
// return rj;
|
||||
// }
|
||||
// failedResponses++;
|
||||
// return null;
|
||||
// });
|
||||
// const results = await Promise.all(promises);
|
||||
// for (const result of results) {
|
||||
// if (!result) continue;
|
||||
// responses.push(result);
|
||||
// }
|
||||
// }
|
||||
// try {
|
||||
// for (let i = 0; i < payload.data.length; i += MAX_CONCURRENT_REQUESTS) {
|
||||
// const batch = payload.data.slice(i, i + MAX_CONCURRENT_REQUESTS);
|
||||
// await processBatch(
|
||||
// batch,
|
||||
// batch.map((_, x) => i + x),
|
||||
// );
|
||||
// }
|
||||
// console.log(`[+] Finished sending ${payload.data.length} requests`);
|
||||
// console.log(`[?] Failed responses: ${failedResponses}`);
|
||||
// console.log(`[?] Success responses: ${successResponses}`);
|
||||
// return responses;
|
||||
// } catch (err) {
|
||||
// console.log(err);
|
||||
// return false;
|
||||
// }
|
||||
// }
|
||||
447
src/lib/server/postdata/postdata.gen.controller.ts
Normal file
447
src/lib/server/postdata/postdata.gen.controller.ts
Normal file
@@ -0,0 +1,447 @@
|
||||
import { dbApiPostData } from "$lib/server/db/apipostdata.db";
|
||||
import { dbApiUser } from "$lib/server/db/apiuser.db";
|
||||
import { getReducedFinalSheet } from "$lib/server/finalsheet.helpers";
|
||||
import {
|
||||
adjustRatesIfDuplicatesFound,
|
||||
pairRatesWithNumbers,
|
||||
removeNumbersWithRepeatingDigits,
|
||||
splitRatesIntoSmallerForRowsWithLargerRates,
|
||||
spreadRatesForNumbersBetweenUsers,
|
||||
} from "$lib/server/postdata/postdata.gen.helpers";
|
||||
import { getAllSessions } from "$lib/server/utils/session.service";
|
||||
import { getDefaultTotals, getULID } from "$lib/utils";
|
||||
import type {
|
||||
ApiPostUser,
|
||||
ApiPostUserWithParent,
|
||||
PresetDataEntry,
|
||||
PostDataEntry,
|
||||
PostDataFilters,
|
||||
PostDataHistoryFilters,
|
||||
ReducedFinalSheetData,
|
||||
ReducedFinalSheetRow,
|
||||
ServerError,
|
||||
} from "$lib/utils/data.types";
|
||||
import { dbPresetData } from "$lib/server/db/presetdata.db";
|
||||
import { getUsersBalance } from "$lib/server/external/api.scraping.helpers";
|
||||
|
||||
function filterMatching(
|
||||
data: ReducedFinalSheetRow[],
|
||||
min: number,
|
||||
max: number,
|
||||
sheetType: "first" | "second",
|
||||
) {
|
||||
let abNums = new Set<string>();
|
||||
let abcNums = new Set<string>();
|
||||
|
||||
for (const row of data) {
|
||||
if (row.prize[sheetType] >= min && row.prize[sheetType] <= max) {
|
||||
abNums.add(`${row.number[0]}${row.number[1]}`);
|
||||
abcNums.add(`${row.number[0]}${row.number[1]}${row.number[2]}`);
|
||||
}
|
||||
}
|
||||
return { abNums, abcNums };
|
||||
}
|
||||
|
||||
export async function updateBalanceOfPostUsers(users: ApiPostUserWithParent[]) {
|
||||
const sessions = await getAllSessions();
|
||||
const balances = [] as { id: string; balance: number }[];
|
||||
for (const user of users) {
|
||||
const session = sessions.find((e) => e.value.userId === user.id);
|
||||
const jwt = session?.value.sessionToken;
|
||||
if (!jwt) {
|
||||
return {
|
||||
ok: false,
|
||||
detail: `Session not found for user ${user.userId}`,
|
||||
};
|
||||
}
|
||||
const out = await getUsersBalance(+user.id.split(":")[1], jwt);
|
||||
if (!out) {
|
||||
return {
|
||||
ok: false,
|
||||
detail: `Error fetching balance for user ${user.userName}`,
|
||||
};
|
||||
}
|
||||
balances.push({ id: user.id, balance: out });
|
||||
}
|
||||
await dbApiUser.updatePostUsersBalances(balances);
|
||||
return {
|
||||
ok: true,
|
||||
detail: "",
|
||||
data: users.map((u) => {
|
||||
const bal = balances.find((b) => b.id === u.id);
|
||||
if (!bal) {
|
||||
console.log(`ERROR: Balance not found for user ${u.userName}`);
|
||||
}
|
||||
return { ...u, balance: bal?.balance ?? 0 };
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
export async function fetchDataForPosting(
|
||||
date: string,
|
||||
input: PostDataFilters,
|
||||
users: ApiPostUser[],
|
||||
) {
|
||||
console.log(`The input ${JSON.stringify(input, null, 2)}`);
|
||||
const { minPrize, maxPrize } = input;
|
||||
const draw = input.draw!;
|
||||
const fsData = {
|
||||
id: getULID(),
|
||||
date,
|
||||
drawId: draw.id,
|
||||
data: [],
|
||||
totals: getDefaultTotals(),
|
||||
} as ReducedFinalSheetData;
|
||||
if (!draw) {
|
||||
return {
|
||||
ok: false,
|
||||
detail: `Draw for the passed draw ID not found`,
|
||||
data: [],
|
||||
users: [],
|
||||
errors: [
|
||||
{ message: `Draw for the passed draw ID not found` },
|
||||
] as ServerError,
|
||||
};
|
||||
}
|
||||
const data = await getReducedFinalSheet(fsData);
|
||||
if (!data.ok) {
|
||||
return {
|
||||
ok: false,
|
||||
detail: `Error compiling final sheet`,
|
||||
data: [],
|
||||
users: [],
|
||||
errors: data.errors,
|
||||
};
|
||||
}
|
||||
|
||||
console.log("[+] Filtering the fs data to get the numbers");
|
||||
const filteredF = filterMatching(fsData.data, minPrize, maxPrize, "first");
|
||||
console.log(
|
||||
`Filtered data: ${filteredF.abNums.size}; ${filteredF.abcNums.size}`,
|
||||
);
|
||||
|
||||
// ------------------------------------------
|
||||
|
||||
let _abNums = new Set<string>(),
|
||||
_abcNums = new Set<string>();
|
||||
for (const each of filteredF.abNums) _abNums.add(each);
|
||||
for (const each of filteredF.abcNums) _abcNums.add(each);
|
||||
let abNums = Array.from(_abNums),
|
||||
abcNums = Array.from(_abcNums);
|
||||
|
||||
if (draw.filterDuplicatesWhilePosting === true) {
|
||||
console.log(`[+] Removing numbers that have repeating digits`);
|
||||
console.log(`[=] Original : AB: ${abNums.length}, ABC: ${abcNums.length}`);
|
||||
abNums = removeNumbersWithRepeatingDigits(abNums);
|
||||
abcNums = removeNumbersWithRepeatingDigits(abcNums);
|
||||
}
|
||||
console.log(`[=] AB: ${abNums.length}, ABC: ${abcNums.length}`);
|
||||
|
||||
console.log(`Fetching preset data`);
|
||||
const presetData = await dbPresetData.getDataGroupedBySheetByDraw(
|
||||
date,
|
||||
+draw.id.split(":")[1],
|
||||
);
|
||||
console.log(`${presetData.all.length} preset entries found`);
|
||||
|
||||
for (let tries = 0; tries < 3; tries++) {
|
||||
console.log(`[✍️] Try ${tries + 1} of generating the result`);
|
||||
const out = await generatePostDataArrayFromBaseInfo(
|
||||
input,
|
||||
users,
|
||||
abNums,
|
||||
abcNums,
|
||||
presetData,
|
||||
);
|
||||
if (out.ok) {
|
||||
return out;
|
||||
}
|
||||
if (out.detail.includes("Not enough balance")) {
|
||||
return {
|
||||
ok: false,
|
||||
detail: `Users don't have enough balance to post the data, try reducing the rates`,
|
||||
data: [],
|
||||
users: [],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
ok: false,
|
||||
detail: `Could not generate data, please try adjusting the filters`,
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
|
||||
export async function generatePostDataArrayFromBaseInfo(
|
||||
input: PostDataFilters,
|
||||
users: ApiPostUser[],
|
||||
abNums: string[],
|
||||
abcNums: string[],
|
||||
presetData: {
|
||||
all: PostDataEntry[];
|
||||
abData: PresetDataEntry[];
|
||||
abcData: PresetDataEntry[];
|
||||
},
|
||||
) {
|
||||
console.log("[+] Spreading the rates for the numbers for all post user");
|
||||
const abData = splitRatesIntoSmallerForRowsWithLargerRates(
|
||||
spreadRatesForNumbersBetweenUsers(
|
||||
adjustRatesIfDuplicatesFound(
|
||||
pairRatesWithNumbers(abNums, input.twoDigitRates),
|
||||
presetData.abData,
|
||||
),
|
||||
users.map((u) => u.userId),
|
||||
),
|
||||
);
|
||||
const abcData = splitRatesIntoSmallerForRowsWithLargerRates(
|
||||
spreadRatesForNumbersBetweenUsers(
|
||||
adjustRatesIfDuplicatesFound(
|
||||
pairRatesWithNumbers(abcNums, input.threeDigitRates),
|
||||
presetData.abcData,
|
||||
),
|
||||
users.map((u) => u.userId),
|
||||
),
|
||||
);
|
||||
|
||||
// ------------------------------------------
|
||||
|
||||
console.log(`[+] Adding ${abData.length} ab entries to final list`);
|
||||
console.log(`[+] Adding ${abcData.length} abc entries to final list`);
|
||||
|
||||
const result = [] as PostDataEntry[];
|
||||
const alreadyPresent = new Set<string>();
|
||||
for (const each of abData) {
|
||||
alreadyPresent.add(each.number);
|
||||
result.push(each);
|
||||
}
|
||||
for (const each of abcData) {
|
||||
alreadyPresent.add(each.number);
|
||||
result.push(each);
|
||||
}
|
||||
|
||||
// ------------------------------------------
|
||||
|
||||
const balanceCounts = {} as Record<string, number>;
|
||||
for (const each of result) {
|
||||
const uid = each.userId ?? "";
|
||||
if (balanceCounts[uid] === undefined) {
|
||||
balanceCounts[uid] = 0;
|
||||
}
|
||||
balanceCounts[uid] += each.first + each.second;
|
||||
}
|
||||
|
||||
// ------------------------------------------
|
||||
|
||||
console.log(
|
||||
`[+] Appending up to ${presetData.all.length} entries that are not ab, abc`,
|
||||
);
|
||||
for (const entry of presetData.all) {
|
||||
if (
|
||||
alreadyPresent.has(entry.number) ||
|
||||
(entry.first < 5 && entry.second < 5)
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
const randomUserId = users[Math.floor(Math.random() * users.length)].userId;
|
||||
if (balanceCounts[randomUserId] === undefined) {
|
||||
balanceCounts[randomUserId] = 0;
|
||||
}
|
||||
balanceCounts[randomUserId] += entry.first + entry.second;
|
||||
result.push({ ...entry, userId: randomUserId });
|
||||
}
|
||||
|
||||
// ------------------------------------------
|
||||
|
||||
const usersTotalbalance = users.reduce((a, b) => a + (b.balance ?? 0), 0);
|
||||
let totalAmtForPostingData = Object.values(balanceCounts).reduce(
|
||||
(acc, curr) => acc + curr,
|
||||
0,
|
||||
);
|
||||
if (usersTotalbalance < totalAmtForPostingData) {
|
||||
return {
|
||||
ok: false,
|
||||
detail: `Not enough balance to book overall with ${usersTotalbalance} < ${totalAmtForPostingData}`,
|
||||
data: [],
|
||||
users: [],
|
||||
errors: [
|
||||
{ message: `Not enough balance to book overall` },
|
||||
] as ServerError,
|
||||
};
|
||||
}
|
||||
|
||||
function isDistributionUnbalanced() {
|
||||
let out = false;
|
||||
for (const key in balanceCounts) {
|
||||
if (
|
||||
balanceCounts[key] > (users.find((u) => u.userId === key)?.balance ?? 0)
|
||||
) {
|
||||
out = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
for (let tries = 0; tries < 5; tries++) {
|
||||
console.log(
|
||||
`Balance counts start : ${JSON.stringify(balanceCounts, null, 2)}`,
|
||||
);
|
||||
|
||||
rebalancePostDataListByBalanceOfUsers(balanceCounts, users, result);
|
||||
|
||||
console.log(`Balance counts final : ${JSON.stringify(balanceCounts)}`);
|
||||
|
||||
let totalAmtForPostingDataAfterRebalance = Object.values(
|
||||
balanceCounts,
|
||||
).reduce((acc, curr) => acc + curr, 0);
|
||||
|
||||
console.log(
|
||||
`Total amount for posting data after rebalance: ${totalAmtForPostingDataAfterRebalance}`,
|
||||
`Total balance of users: ${JSON.stringify(users.map((u) => ({ un: u.userName, b: u.balance })))}`,
|
||||
);
|
||||
|
||||
if (!isDistributionUnbalanced()) {
|
||||
console.log(`[+] Distribution is balanced`);
|
||||
break;
|
||||
}
|
||||
console.log(`[!] Rebalancing again`);
|
||||
}
|
||||
|
||||
if (isDistributionUnbalanced()) {
|
||||
return {
|
||||
ok: false,
|
||||
detail: `Please regenerate dataset as the some users have not enough balance to book their entries`,
|
||||
data: [],
|
||||
users: [],
|
||||
};
|
||||
}
|
||||
|
||||
// ------------------------------------------
|
||||
|
||||
console.log(`[+] Shuffling ${result.length} entries for posting`);
|
||||
shuffleArray(result);
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
detail: `Fetched the data successfully`,
|
||||
data: result,
|
||||
users,
|
||||
errors: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
function shuffleArray<T>(array: T[]): T[] {
|
||||
for (let i = array.length - 1; i > 0; i--) {
|
||||
const j = Math.floor(Math.random() * (i + 1));
|
||||
[array[i], array[j]] = [array[j], array[i]];
|
||||
}
|
||||
return array;
|
||||
}
|
||||
|
||||
export function rebalancePostDataListByBalanceOfUsers(
|
||||
balanceCounts: Record<string, number>,
|
||||
users: ApiPostUser[],
|
||||
result: PostDataEntry[],
|
||||
) {
|
||||
console.log(
|
||||
`[+] Checking if the users have enough balance to book their assigned data`,
|
||||
);
|
||||
|
||||
for (const user of users) {
|
||||
const usersBalance = user.balance ?? 0;
|
||||
const dueForUser = balanceCounts[user.userId] ?? 0;
|
||||
if (usersBalance === 0) {
|
||||
console.log(`\n[!] ${user.userName} has no balance\n`);
|
||||
continue;
|
||||
}
|
||||
if (usersBalance >= dueForUser) {
|
||||
console.log(
|
||||
`[✅] ${user.userName} can book the data of ${usersBalance} > ${dueForUser} `,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
console.log(
|
||||
`[!❎!] ${user.userName} can't book it all ${usersBalance} < ${dueForUser}`,
|
||||
);
|
||||
|
||||
const difference = dueForUser - usersBalance;
|
||||
let differenceLeft = Number(difference); // make a copy
|
||||
const entriesToMove = result
|
||||
.filter((r) => {
|
||||
if (r.userId === user.userId && differenceLeft > 0) {
|
||||
differenceLeft -= r.first + r.second;
|
||||
return true;
|
||||
}
|
||||
})
|
||||
.map((r) => r.id);
|
||||
console.log(`Have to move ${entriesToMove.length} entries to other users`);
|
||||
|
||||
// find a user who has enough balance
|
||||
|
||||
const userWithEnoughBalance = users.find((u) => {
|
||||
return (
|
||||
(u.balance ?? 0) - balanceCounts[u.userId] >= difference &&
|
||||
u.userId !== user.userId
|
||||
);
|
||||
});
|
||||
if (!userWithEnoughBalance) {
|
||||
return {
|
||||
ok: false,
|
||||
detail: `No user found with enough balance to cover balance shortage of ${difference} for ${user.userName}`,
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
console.log(
|
||||
`Dude has enough balance to take on this other user's expenses ': ${JSON.stringify(userWithEnoughBalance)}`,
|
||||
);
|
||||
|
||||
for (let i = 0; i < result.length; i++) {
|
||||
if (!entriesToMove.includes(result[i].id)) {
|
||||
continue;
|
||||
}
|
||||
const entry = result[i];
|
||||
let amountMoved = 0;
|
||||
if (entry.userId !== user.userId) {
|
||||
continue;
|
||||
}
|
||||
entry.userId = userWithEnoughBalance.userId;
|
||||
balanceCounts[userWithEnoughBalance.userId] += entry.first + entry.second;
|
||||
balanceCounts[user.userId] -= entry.first + entry.second;
|
||||
amountMoved += entry.first + entry.second;
|
||||
if (amountMoved >= difference) {
|
||||
// don't move more than the difference'
|
||||
break;
|
||||
}
|
||||
}
|
||||
console.log(
|
||||
`[+] Moved ${entriesToMove.length} entries to ${userWithEnoughBalance.userName}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function fetchPostDataHistory(input: PostDataHistoryFilters) {
|
||||
const { draw, date } = input;
|
||||
console.log(`Fetching post data from HISTORY for draw: ${date} - ${draw.id}`);
|
||||
const found = await dbApiPostData.getPostDataByDraw(date, draw.id);
|
||||
if (!found) {
|
||||
return { data: [], users: [], ok: false, detail: "Data not found" };
|
||||
}
|
||||
console.log(
|
||||
`Data found for the passed draw: ${date} - ${draw.id}, returning that`,
|
||||
);
|
||||
const users = await dbApiUser.getAllPostUsers();
|
||||
const uniqueUserIds = [] as string[];
|
||||
for (const each of found) {
|
||||
if (!each.userId || uniqueUserIds.includes(each.userId)) {
|
||||
continue;
|
||||
}
|
||||
uniqueUserIds.push(each.userId);
|
||||
}
|
||||
return {
|
||||
data: found,
|
||||
users: users.filter((u) => uniqueUserIds.includes(u.userId)),
|
||||
ok: true,
|
||||
detail: "Data found",
|
||||
};
|
||||
}
|
||||
213
src/lib/server/postdata/postdata.gen.helpers.ts
Normal file
213
src/lib/server/postdata/postdata.gen.helpers.ts
Normal file
@@ -0,0 +1,213 @@
|
||||
import { getULID } from "$lib/utils";
|
||||
|
||||
import type {
|
||||
PostDataEntry,
|
||||
FSPair,
|
||||
PresetDataEntry,
|
||||
} from "$lib/utils/data.types";
|
||||
|
||||
function splitRatesRelativelyEvenly(
|
||||
totalRate: number,
|
||||
parts: number,
|
||||
): number[] {
|
||||
if (totalRate < 5) {
|
||||
return Array.from({ length: parts }, () => 0);
|
||||
}
|
||||
if (totalRate === 5) {
|
||||
return Array.from({ length: parts }, (_, i) => (i === 0 ? 5 : 0));
|
||||
}
|
||||
if (totalRate % 5 !== 0) {
|
||||
throw new Error("Total rate must be a multiple of 5");
|
||||
}
|
||||
const splits: number[] = [];
|
||||
let remainingRate = totalRate;
|
||||
// Distribute the rate using a weighted random approach
|
||||
for (let i = 0; i < parts; i++) {
|
||||
if (i === parts - 1) {
|
||||
splits.push(remainingRate);
|
||||
} else {
|
||||
const minRate = 5; // Minimum rate for each part
|
||||
const maxRate = Math.min(
|
||||
remainingRate - (parts - i - 1) * minRate,
|
||||
remainingRate * 0.6,
|
||||
);
|
||||
const rate =
|
||||
Math.floor((Math.random() * (maxRate - minRate + 1)) / 5) * 5 + minRate;
|
||||
splits.push(rate);
|
||||
remainingRate -= rate;
|
||||
}
|
||||
}
|
||||
// Shuffle the array
|
||||
for (let i = splits.length - 1; i > 0; i--) {
|
||||
const j = Math.floor(Math.random() * (i + 1));
|
||||
[splits[i], splits[j]] = [splits[j], splits[i]];
|
||||
}
|
||||
// Occasionally allow for extreme distributions, but ensure at least one non-zero value
|
||||
if (Math.random() < 0.05) {
|
||||
// 5% chance
|
||||
const extremeIndex = Math.floor(Math.random() * parts);
|
||||
splits.fill(0);
|
||||
splits[extremeIndex] = totalRate;
|
||||
}
|
||||
if (splits.reduce((a, b) => a + b, 0) !== totalRate) {
|
||||
throw new Error("Splitting error");
|
||||
}
|
||||
return splits;
|
||||
}
|
||||
|
||||
export function pairRatesWithNumbers(numbers: string[], rates: FSPair) {
|
||||
const out = [];
|
||||
for (let i = 0; i < numbers.length; i++) {
|
||||
out.push({
|
||||
id: getULID(),
|
||||
number: numbers[i].toString(),
|
||||
first: rates.first,
|
||||
second: rates.second,
|
||||
createdAt: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
export function adjustRatesIfDuplicatesFound(
|
||||
original: PostDataEntry[],
|
||||
newRows: PresetDataEntry[],
|
||||
) {
|
||||
const originalNumbers = original.map((e) => e.number);
|
||||
const newNumbers = newRows.map((e) => e.number);
|
||||
const duplicates = originalNumbers.filter((n) => newNumbers.includes(n));
|
||||
if (duplicates.length === 0) {
|
||||
return original;
|
||||
}
|
||||
const out = [] as PostDataEntry[];
|
||||
for (const entry of original) {
|
||||
if (!duplicates.includes(entry.number)) {
|
||||
out.push(entry);
|
||||
continue;
|
||||
}
|
||||
let first = entry.first;
|
||||
let second = entry.second;
|
||||
// now add rates from newRows
|
||||
const newRow = newRows.find((e) => e.number === entry.number);
|
||||
if (newRow) {
|
||||
first += newRow.first;
|
||||
second += newRow.second;
|
||||
}
|
||||
out.push({ ...entry, first, second });
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
export function spreadRatesForNumbersBetweenUsers(
|
||||
originalData: PostDataEntry[],
|
||||
userIds: string[],
|
||||
) {
|
||||
const dataWithSplitRates = [] as { first: number[]; second: number[] }[];
|
||||
for (const entry of originalData) {
|
||||
const { first, second } = entry;
|
||||
if (first % 5 !== 0 || second % 5 !== 0) {
|
||||
throw new Error("Rates must be multiples of 5");
|
||||
}
|
||||
const fRates = splitRatesRelativelyEvenly(first, userIds.length);
|
||||
const sRates = splitRatesRelativelyEvenly(second, userIds.length);
|
||||
dataWithSplitRates.push({ first: fRates, second: sRates });
|
||||
}
|
||||
const out = [] as PostDataEntry[];
|
||||
for (let uIdx = 0; uIdx < userIds.length; uIdx++) {
|
||||
let rIdx = 0;
|
||||
for (const entry of originalData) {
|
||||
let f = dataWithSplitRates[rIdx].first[uIdx] ?? 0;
|
||||
let s = dataWithSplitRates[rIdx].second[uIdx] ?? 0;
|
||||
if (f > 0 || s > 0) {
|
||||
out.push({
|
||||
...entry,
|
||||
first: f,
|
||||
second: s,
|
||||
userId: userIds[Math.floor(Math.random() * userIds.length)],
|
||||
});
|
||||
}
|
||||
rIdx++;
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
export function splitRatesIntoSmallerForRowsWithLargerRates(
|
||||
data: PostDataEntry[],
|
||||
) {
|
||||
const out = [] as PostDataEntry[];
|
||||
|
||||
function getRNBtw1And6(n: number) {
|
||||
const quotient = n / 5;
|
||||
return Math.floor(((Math.random() * quotient) % 6) + 1);
|
||||
}
|
||||
|
||||
for (const entry of data) {
|
||||
if (entry.first < 5 && entry.second < 5) {
|
||||
continue;
|
||||
} else if (entry.first === 5 && entry.second === 5) {
|
||||
out.push(entry);
|
||||
continue;
|
||||
}
|
||||
|
||||
const firstSplit = splitRatesRelativelyEvenly(
|
||||
entry.first,
|
||||
getRNBtw1And6(entry.first),
|
||||
);
|
||||
const secondSplit = splitRatesRelativelyEvenly(
|
||||
entry.second,
|
||||
getRNBtw1And6(entry.second),
|
||||
);
|
||||
|
||||
const maxLength = Math.max(firstSplit.length, secondSplit.length);
|
||||
|
||||
const firstPadded = firstSplit.concat(
|
||||
Array(maxLength - firstSplit.length).fill(0),
|
||||
);
|
||||
const secondPadded = secondSplit.concat(
|
||||
Array(maxLength - secondSplit.length).fill(0),
|
||||
);
|
||||
|
||||
// shuffle the two
|
||||
for (let i = firstPadded.length - 1; i > 0; i--) {
|
||||
const j = Math.floor(Math.random() * (i + 1));
|
||||
[firstPadded[i], firstPadded[j]] = [firstPadded[j], firstPadded[i]];
|
||||
const k = Math.floor(Math.random() * (i + 1));
|
||||
[secondPadded[i], secondPadded[k]] = [secondPadded[k], secondPadded[i]];
|
||||
}
|
||||
|
||||
for (let i = 0; i < maxLength; i++) {
|
||||
const f = firstPadded[i];
|
||||
const s = secondPadded[i];
|
||||
if (f < 5 && s < 5) {
|
||||
continue;
|
||||
}
|
||||
out.push({ ...entry, first: f, second: s });
|
||||
}
|
||||
}
|
||||
|
||||
// verify that all original rates are preserved, and that the sum of the new rates is equal to the sum of the original rates
|
||||
const originalSum = data.reduce(
|
||||
(acc, curr) => acc + curr.first + curr.second,
|
||||
0,
|
||||
);
|
||||
const newSum = out.reduce((acc, curr) => acc + curr.first + curr.second, 0);
|
||||
if (originalSum !== newSum) {
|
||||
console.log(
|
||||
`[---] Original and new sums are not matching at all (${originalSum} !== ${newSum})`,
|
||||
);
|
||||
throw new Error("Sum of rates is not equal");
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
export function removeNumbersWithRepeatingDigits(nos: string[]): string[] {
|
||||
const out = new Set<string>();
|
||||
for (const no of nos) {
|
||||
if (new Set(no).size === no.length) {
|
||||
out.add(no);
|
||||
}
|
||||
}
|
||||
return Array.from(out);
|
||||
}
|
||||
103
src/lib/server/session.helpers.ts
Executable file
103
src/lib/server/session.helpers.ts
Executable file
@@ -0,0 +1,103 @@
|
||||
import { redis } from "./connectors/redis";
|
||||
import { getUUID } from "$lib/utils";
|
||||
import type { Session } from "$lib/utils/data.types";
|
||||
|
||||
export const defaultTTL = 60 * 60 * 6;
|
||||
|
||||
export const generateSession = async (
|
||||
username: string,
|
||||
userType: string,
|
||||
ip: string,
|
||||
userAgent: string,
|
||||
ttl?: number,
|
||||
) => {
|
||||
ttl = ttl || defaultTTL;
|
||||
const sId = getUUID();
|
||||
await deleteMatchingSessions(username, ip, userAgent);
|
||||
await redis.setex(
|
||||
sId,
|
||||
ttl,
|
||||
ip + "|" + userAgent + "|" + username + "|" + userType + "|" + sId,
|
||||
);
|
||||
await redis.sadd("session:" + username, sId);
|
||||
return sId;
|
||||
};
|
||||
|
||||
export const getSession = async (sId: string) => {
|
||||
const session = await redis.get(sId);
|
||||
if (!session) return null;
|
||||
return parseSession(session);
|
||||
};
|
||||
|
||||
export const getSessions = async (username: string) => {
|
||||
const sIds = await redis.smembers("session:" + username);
|
||||
if (!sIds) return null;
|
||||
const _sessions = await redis.mget(...sIds);
|
||||
const sessions = [];
|
||||
for (const session of _sessions) {
|
||||
if (!session) continue;
|
||||
sessions.push(parseSession(session));
|
||||
}
|
||||
};
|
||||
|
||||
export const isSessionValid = async (
|
||||
sId: string,
|
||||
ip: string,
|
||||
userAgent: string,
|
||||
) => {
|
||||
const s = await getSession(sId);
|
||||
if (!s) return false;
|
||||
if (s.ip !== ip && s.userAgent === userAgent) return false;
|
||||
if (s.userAgent !== userAgent && s.ip === ip) return false;
|
||||
if (s.userAgent !== userAgent && s.ip !== ip) return false;
|
||||
return true;
|
||||
};
|
||||
|
||||
export const isAlreadyLoggedIn = async (
|
||||
username: string,
|
||||
ip: string,
|
||||
userAgent: string,
|
||||
) => {
|
||||
const sIds = await redis.smembers("session:" + username);
|
||||
if (!sIds) return false;
|
||||
for (const sId of sIds) {
|
||||
if (await isSessionValid(sId, ip, userAgent)) return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
export const deleteSession = async (sId: string) => {
|
||||
const session = await getSession(sId);
|
||||
if (!session) return false;
|
||||
await redis.del(sId);
|
||||
await redis.srem("session:" + session.username, sId);
|
||||
return true;
|
||||
};
|
||||
|
||||
export const deleteMatchingSessions = async (
|
||||
username: string,
|
||||
ip: string,
|
||||
userAgent: string,
|
||||
) => {
|
||||
const sIds = await redis.smembers("session:" + username);
|
||||
if (!sIds) return false;
|
||||
for (const sId of sIds) {
|
||||
if (await isSessionValid(sId, ip, userAgent)) {
|
||||
await deleteSession(sId);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
export const deleteAllSessions = async (username: string) => {
|
||||
const sIds = await redis.smembers("session:" + username);
|
||||
if (!sIds) return false;
|
||||
await redis.del(...sIds);
|
||||
await redis.del("session:" + username);
|
||||
return true;
|
||||
};
|
||||
|
||||
const parseSession = (session: string) => {
|
||||
const [ip, userAgent, username, userType, sId] = session.split("|");
|
||||
return { sId, ip, userAgent, username, userType } as Session;
|
||||
};
|
||||
0
src/lib/server/test.booking.helpers.ts
Executable file
0
src/lib/server/test.booking.helpers.ts
Executable file
63
src/lib/server/utils/session.service.ts
Normal file
63
src/lib/server/utils/session.service.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { pickRandomIP } from "$lib/utils";
|
||||
import { constants } from "$lib/utils/constants";
|
||||
import type { APISession } from "$lib/utils/data.types";
|
||||
import { redis } from "../connectors/redis";
|
||||
import { testIfSessionIsValid } from "../external/api.scraping.helpers";
|
||||
|
||||
export async function getSessionFromStore(sid: string) {
|
||||
const out = await redis.get(sid);
|
||||
if (out === null) {
|
||||
return;
|
||||
}
|
||||
return JSON.parse(out) as APISession;
|
||||
}
|
||||
|
||||
export async function setSessionToRedis(sessionKey: string, userId: string) {
|
||||
let key = constants.SCRAP_API_SESSION_KEY;
|
||||
if (userId) {
|
||||
key = `apisession:${userId}`;
|
||||
}
|
||||
console.log("Setting session to redis", key, sessionKey);
|
||||
const session: APISession = {
|
||||
ip: pickRandomIP(),
|
||||
sessionToken: sessionKey,
|
||||
userId,
|
||||
};
|
||||
await redis.setex(key, 86400, JSON.stringify(session));
|
||||
}
|
||||
|
||||
export async function isSessionValidInStore(userId?: string) {
|
||||
let key = constants.SCRAP_API_SESSION_KEY;
|
||||
if (userId) {
|
||||
key = `apisession:${userId}`;
|
||||
}
|
||||
try {
|
||||
const value = JSON.parse((await redis.get(key)) ?? "") as APISession | null;
|
||||
if (value === null) {
|
||||
return { valid: false };
|
||||
}
|
||||
return await testIfSessionIsValid(value.sessionToken);
|
||||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function removeSessionFromStore(userId?: string) {
|
||||
try {
|
||||
let key = constants.SCRAP_API_SESSION_KEY;
|
||||
if (userId) {
|
||||
key = `apisession:${userId}`;
|
||||
}
|
||||
await redis.del(key);
|
||||
} catch (err) {}
|
||||
}
|
||||
|
||||
export async function getAllSessions() {
|
||||
const keys = await redis.keys("apisession:*");
|
||||
const sessions = [];
|
||||
for (const key of keys) {
|
||||
const value = JSON.parse((await redis.get(key)) ?? "{}") as APISession;
|
||||
sessions.push({ key, value });
|
||||
}
|
||||
return sessions;
|
||||
}
|
||||
Reference in New Issue
Block a user