done with upgrades..... I believe

This commit is contained in:
bootunloader
2025-01-25 23:41:57 +02:00
parent 15f2b238b0
commit aa73254a87
23 changed files with 2442 additions and 2368 deletions

View File

@@ -32,7 +32,7 @@
"ioredis": "^5.3.2",
"lucide-svelte": "^0.424.0",
"node-fetch": "^3.3.2",
"surrealdb.js": "^0.8.2",
"surrealdb": "^1.1.0",
"svelte-french-toast": "^1.1.0",
"svelte-headlessui": "^0.0.20",
"tailwind-merge": "^2.4.0",

50
pnpm-lock.yaml generated
View File

@@ -65,9 +65,9 @@ importers:
node-fetch:
specifier: ^3.3.2
version: 3.3.2
surrealdb.js:
specifier: ^0.8.2
version: 0.8.4
surrealdb:
specifier: ^1.1.0
version: 1.1.0(tslib@2.6.2)(typescript@5.2.2)(ws@8.14.2)
svelte-french-toast:
specifier: ^1.1.0
version: 1.2.0(svelte@4.2.1)
@@ -1141,6 +1141,11 @@ packages:
isexe@2.0.0:
resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==}
isows@1.0.6:
resolution: {integrity: sha512-lPHCayd40oW98/I0uvgaHKWCSvkzY27LjWLbtzOm64yQ+G3Q5npjjbdppU65iZXkK1Zt+kH9pfegli0AYfwYYw==}
peerDependencies:
ws: '*'
jiti@1.21.6:
resolution: {integrity: sha512-2yTgeWTWzMWkHu6Jp9NKgePDaYHbntiwvYuuJLbbN9vl7DC9DvXKOB2BC3ZZ92D3cvV/aflH0osDfwpHepQ53w==}
hasBin: true
@@ -1561,8 +1566,12 @@ packages:
resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==}
engines: {node: '>= 0.4'}
surrealdb.js@0.8.4:
resolution: {integrity: sha512-ToCyBHxpVPGXth31ZktQvv+s7fvZG6+sR3mXHNAlhq0/43yYiYx3+3cYvCDGZQNBNUI42KENv8/aBQ5mGQZEEA==}
surrealdb@1.1.0:
resolution: {integrity: sha512-EAopFKeIo1lVCR+XEE/oMuh8LCPAIa2xC9MyVs5nw+BJyHc0Fr6F/UWVF9399LRtTBCoqe5G1W0hgxqLqkWaYA==}
engines: {node: '>=18.0.0'}
peerDependencies:
tslib: ^2.6.3
typescript: ^5.0.0
svelte-check@3.5.2:
resolution: {integrity: sha512-5a/YWbiH4c+AqAUP+0VneiV5bP8YOk9JL3jwvN+k2PEPLgpu85bjQc5eE67+eIZBBwUEJzmO3I92OqKcqbp3fw==}
@@ -1760,12 +1769,6 @@ packages:
unplugin@1.5.0:
resolution: {integrity: sha512-9ZdRwbh/4gcm1JTOkp9lAkIDrtOyOxgHmY7cjuwI8L/2RTikMcVG25GsZwNAgRuap3iDw2jeq7eoqtAsz5rW3A==}
unws@0.2.4:
resolution: {integrity: sha512-/N1ajiqrSp0A/26/LBg7r10fOcPtGXCqJRJ61sijUFoGZMr6ESWGYn7i0cwr7fR7eEECY5HsitqtjGHDZLAu2w==}
engines: {node: '>=16.14.0'}
peerDependencies:
ws: '*'
update-browserslist-db@1.1.0:
resolution: {integrity: sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ==}
hasBin: true
@@ -1779,6 +1782,10 @@ packages:
resolution: {integrity: sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==}
hasBin: true
uuidv7@1.0.2:
resolution: {integrity: sha512-8JQkH4ooXnm1JCIhqTMbtmdnYEn6oKukBxHn1Ic9878jMkL7daTI7anTExfY18VRCX7tcdn5quzvCb6EWrR8PA==}
hasBin: true
vite-node@0.33.0:
resolution: {integrity: sha512-19FpHYbwWWxDr73ruNahC+vtEdza52kA90Qb3La98yZ0xULqV8A5JLNPUff0f5zID4984tW7l3DH2przTJUZSw==}
engines: {node: '>=v14.18.0'}
@@ -2830,6 +2837,10 @@ snapshots:
isexe@2.0.0: {}
isows@1.0.6(ws@8.14.2):
dependencies:
ws: 8.14.2
jiti@1.21.6: {}
jsonc-parser@3.2.0: {}
@@ -3202,13 +3213,14 @@ snapshots:
supports-preserve-symlinks-flag@1.0.0: {}
surrealdb.js@0.8.4:
surrealdb@1.1.0(tslib@2.6.2)(typescript@5.2.2)(ws@8.14.2):
dependencies:
unws: 0.2.4(ws@8.14.2)
ws: 8.14.2
isows: 1.0.6(ws@8.14.2)
tslib: 2.6.2
typescript: 5.2.2
uuidv7: 1.0.2
transitivePeerDependencies:
- bufferutil
- utf-8-validate
- ws
svelte-check@3.5.2(postcss-load-config@4.0.1(postcss@8.4.40))(postcss@8.4.40)(svelte@4.2.1):
dependencies:
@@ -3426,10 +3438,6 @@ snapshots:
webpack-sources: 3.2.3
webpack-virtual-modules: 0.5.0
unws@0.2.4(ws@8.14.2):
dependencies:
ws: 8.14.2
update-browserslist-db@1.1.0(browserslist@4.23.3):
dependencies:
browserslist: 4.23.3
@@ -3440,6 +3448,8 @@ snapshots:
uuid@9.0.1: {}
uuidv7@1.0.2: {}
vite-node@0.33.0(@types/node@20.6.4):
dependencies:
cac: 6.7.14

View File

@@ -1,5 +1,5 @@
import Surreal from "surrealdb.js";
export type { QueryResult } from "surrealdb.js/script/types";
import Surreal, { RecordId, StringRecordId } from "surrealdb";
export type { QueryResult } from "surrealdb";
try {
if (document || window) {
@@ -15,22 +15,18 @@ const CONFIG = {
db: process.env.SURREAL_DB ?? "",
} as const;
// for (let key in CONFIG) {
// if (
// !CONFIG[key as keyof typeof CONFIG] ||
// CONFIG[key as keyof typeof CONFIG] === ""
// ) {
// throw new Error(`Missing configuration for ${key}`);
// }
// }
const db = new Surreal();
let _surreal =
CONFIG.url.length > 0
? new Surreal(`http://${CONFIG.url}/rpc`, {
auth: { user: CONFIG.user, pass: CONFIG.pass },
ns: CONFIG.ns,
db: CONFIG.db,
})
: undefined;
export function parseToRID(idStr: string) {
return new StringRecordId(idStr);
// const [a, b] = idStr.split(":");
// return new RecordId(a, b);
}
export const surreal = _surreal as Surreal;
if (CONFIG.url.length > 0) {
await db.connect(`http://${CONFIG.url}/rpc`);
await db.use({ namespace: CONFIG.ns, database: CONFIG.db });
await db.signin({ username: CONFIG.user, password: CONFIG.pass });
}
export const surreal = db as Surreal;

View File

@@ -10,6 +10,77 @@ const upsertData = async (
data: BookingEntry[],
date: string,
tries: number = 0,
): Promise<void> => {
if (tries >= 3) {
console.log("Max tries exceeded for processing data");
return;
}
const tableName = getTableName(date);
const drawId = data[0].drawId;
console.log(`[...] Processing ${data.length} entries for ${tableName}`);
try {
// Delete existing entries for this date
console.log(
`[...] Deleting existing entries for ${date} in ${tableName}`,
);
console.time("deletion time");
await surreal.query(
`DELETE type::table($tableName) WHERE bookDate = $bookDate AND drawId = $drawId`,
{ tableName, bookDate: date, drawId },
);
console.timeEnd("deletion time");
// Prepare new entries
const entries = data.map((entry) => ({
...entry,
id: `${tableName}:${entry.id}`,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
bookDate: entry.bookDate.split(" ")[0],
requestId: entry.requestId ?? "",
}));
// Calculate chunk size (5 to 25% of total data length, max 10,000)
let chunkSize = Math.floor(
Math.random() * (data.length * 0.2 - data.length * 0.05) +
data.length * 0.05,
);
if (chunkSize > 1_000) {
chunkSize = 1_000;
}
console.log(`Chunk Size: ${chunkSize}`);
console.log(
`[+] Inserting ${entries.length} entries into ${tableName}`,
);
// Insert new entries in chunks
console.time("insertion time");
const chunks = chunkArray(entries, chunkSize).map(async (chunk) => {
await surreal.insert<BookingEntry>(tableName, chunk);
});
for (let i = 0; i < chunks.length; i += 2) {
await Promise.all(chunks.slice(i, i + 2));
}
console.timeEnd("insertion time");
console.log(
`[+] Successfully processed ${data.length} entries into ${tableName}`,
);
} catch (err) {
console.log("Failed to process data, attempting retry");
return await upsertData(data, date, tries + 1);
}
};
const upsertDataDED = async (
data: BookingEntry[],
date: string,
tries: number = 0,
): Promise<void> => {
const tableName = getTableName(date);
console.log(`[...] Upserting ${data.length} entries into ${tableName}`);
@@ -19,13 +90,15 @@ const upsertData = async (
`select value id from type::table($tableName) where bookDate = $bookDate`,
{ tableName, bookDate: date },
);
for (let eId of alreadyPresent.result ?? []) {
for (let eId of alreadyPresent ?? []) {
alreadyPresentIds.add(eId);
}
} catch (err) {
console.log("Failed to fetch, seeing if can try again");
if (tries >= 3) {
console.log("Max tries exceeded for initial fetch for upserting data");
console.log(
"Max tries exceeded for initial fetch for upserting data",
);
return;
}
return await upsertData(data, date, tries++);
@@ -143,10 +216,7 @@ const getBookingEntriesForDealer = async (
data,
)} entries for ${userId}, filters are ${date}, ${drawId} for ${tableName}`,
);
if (data.status === "OK") {
return data.result ?? [];
}
return [];
return data ?? [];
};
const getBookingEntriesByDraw = async (date: string, drawId: string) => {
@@ -156,13 +226,12 @@ const getBookingEntriesByDraw = async (date: string, drawId: string) => {
{
tableName,
date: date,
drawId: parseInt(drawId.includes(":") ? drawId.split(":")[1] : drawId),
drawId: parseInt(
drawId.includes(":") ? drawId.split(":")[1] : drawId,
),
},
);
if (data.status === "OK") {
return data.result ?? [];
}
return [];
return data ?? [];
};
const deleteDataOlderThan2Weeks = async () => {

View File

@@ -1,7 +1,7 @@
import { constants } from "$lib/utils/constants";
import type { Draw } from "$lib/utils/data.types";
import { getDraws } from "../external/api.scraping.helpers";
import { surreal } from "../connectors/surreal.db";
import { parseToRID, surreal } from "../connectors/surreal.db";
import { getSessionFromStore } from "../utils/session.service";
const tableName = "apidraw";
@@ -40,28 +40,27 @@ const getAllDraws = async (
skipOptional?: boolean,
retry: number = 0,
): Promise<Draw[]> => {
let query = `select * from apidraw order by closeTime`;
const [out] = await surreal.query<[Draw[]]>(query);
if (out.status === "OK") {
const draws = out.result ?? [];
if (draws.length > 0) {
return draws;
const [out] = await surreal.query<[Draw[]]>(
`select * from apidraw order by closeTime`,
);
if (out.length > 0) {
return out;
}
await _populateDrawsTable();
if (retry < 3) {
return getAllDraws(skipOptional, retry + 1);
}
}
return [];
};
async function setFilterDuplicatesFlag(drawId: string, flag: boolean) {
const [d] = await surreal.select<Draw>(drawId);
const rid = parseToRID(drawId);
const d = await surreal.select<Draw>(rid);
if (!d || !d.id) {
return;
}
console.log("setFilterDuplicatesFlag :: ", drawId, flag);
await surreal.update(drawId, {
console.log("setFilterDuplicatesFlag :: ", rid, flag);
await surreal.update(rid, {
...d,
filterDuplicatesWhilePosting: flag,
updatedAt: new Date().toISOString(),
@@ -69,12 +68,14 @@ async function setFilterDuplicatesFlag(drawId: string, flag: boolean) {
}
async function updateDrawPresetInfo(draw: Draw) {
const drawId = draw.id;
const [d] = await surreal.select<Draw>(drawId);
const drawId = parseToRID(draw.id);
const d = await surreal.select<Draw>(drawId);
if (!d || !d.id) {
console.log(`Draw not present for ${drawId}`);
return;
}
await surreal.update(drawId, {
console.log(`Updating draw info ${drawId}`);
const out = await surreal.update(drawId, {
...d,
filterDuplicatesWhilePosting: draw.filterDuplicatesWhilePosting,
abRateF: draw.abRateF,
@@ -87,9 +88,9 @@ async function updateDrawPresetInfo(draw: Draw) {
const getDraw = async (drawId: string): Promise<Draw | undefined> => {
const draws = await surreal.select<Draw>(
drawId.includes("apidraw") ? drawId : `apidraw:${drawId}`,
parseToRID(drawId.includes("apidraw") ? drawId : `apidraw:${drawId}`),
);
return draws[0];
return draws;
};
export const dbDraw = {

View File

@@ -12,8 +12,8 @@ const upsertData = async (data: PostDataHistory) => {
{ tableName, bookDate: data.bookDate, drawId: data.drawId },
);
console.log(check);
const firstOut = check.result ? check.result[0] : undefined;
if (check.status === "OK" && !!firstOut && !!firstOut.id) {
const firstOut = check && check.length > 0 ? check[0] : undefined;
if (check.length > 0 && !!firstOut && !!firstOut.id) {
console.log(
`Adding ${data.data.length} entries to ${firstOut.data.length} existing array`,
);
@@ -42,13 +42,15 @@ const getPostDataByDraw = async (date: string, drawId: string) => {
`select * from type::table($tableName) where bookDate = $date and drawId = $drawId`,
{
tableName,
date: date,
drawId: parseInt(drawId.includes(":") ? drawId.split(":")[1] : drawId),
date,
drawId: parseInt(
drawId.includes(":") ? drawId.split(":")[1] : drawId,
),
},
);
let out = [] as PostDataEntry[];
if (data.status === "OK" && data.result.length > 0) {
out = data.result[0].data;
if (data.length > 0) {
out = data[0].data;
}
return out;
};
@@ -60,14 +62,13 @@ async function doesPostHistoryDataExist(date: string, drawId: string) {
{
tableName,
date: date,
drawId: parseInt(drawId.includes(":") ? drawId.split(":")[1] : drawId),
drawId: parseInt(
drawId.includes(":") ? drawId.split(":")[1] : drawId,
),
},
);
if (data.status === "OK") {
return data.result[0]?.id.length > 0;
}
return false;
return data[0]?.id.length > 0;
}
async function deletePostDataByDraw(date: string, drawId: string) {
@@ -77,11 +78,13 @@ async function deletePostDataByDraw(date: string, drawId: string) {
{
tableName,
date: date,
drawId: parseInt(drawId.includes(":") ? drawId.split(":")[1] : drawId),
drawId: parseInt(
drawId.includes(":") ? drawId.split(":")[1] : drawId,
),
},
);
if (data.status === "OK") {
if (data.length > 0) {
await surreal.delete(tableName);
return true;
}

View File

@@ -5,43 +5,41 @@ import {
type ApiPostUserWithParent,
ApiUserTypes,
DEFAULT_RANDOM_DISTRIBUTOR,
zApiPostUser,
} from "$lib/utils/data.types";
import { surreal } from "../connectors/surreal.db";
import { parseToRID, surreal } from "../connectors/surreal.db";
const getUserById = async (userId: string) => {
const query = `select * from apiuser where id = $id`;
const [rizzult] = await surreal.query<[ApiUser[]]>(query, { id: userId });
return rizzult.result?.[0];
return await surreal.select<ApiUser>(parseToRID(userId));
// const [rizzult] = await surreal.query<[ApiUser[]]>(query, { id: userId });
// return rizzult[0];
};
const getAllIdsByUserType = async (userType: number) => {
const query = `select value id from apiuser where userType = $userType`;
const rizzult = (await surreal.query<[string[]]>(query, { userType }))[0];
return (rizzult.result ?? []).map((e) => {
return e.split(":")[1];
const [rizzult] = await surreal.query<[any[]]>(query, { userType });
return (rizzult ?? []).map((e) => {
return e.id;
});
};
async function allUsersOfTypeLimitedInfo(userType: number) {
const rizzult = (
await surreal.query<[ApiPostUser[]]>(
const [rizzult] = await surreal.query<[ApiPostUser[]]>(
`select id,userName,userId,postData from apiuser where userType = $userType`,
{ userType: userType },
)
)[0];
if (rizzult.status == "OK") {
return rizzult.result ?? [];
}
return [];
);
return rizzult ?? [];
}
async function setPostDataFlagForUsers(users: ApiPostUser[]) {
for (const user of users) {
const [u] = await surreal.select<ApiUser>(user.id);
const uid = parseToRID(user.id);
const u = await surreal.select<ApiUser>(uid);
if (!u || !u.id) {
continue;
}
await surreal.update<LooseApiUser>(user.id, {
console.log("Updating user :: ", uid);
await surreal.update<LooseApiUser>(uid, {
...u,
postData: user.postData ?? false,
});
@@ -53,19 +51,21 @@ const getUserTypeCount = async (userType: number) => {
let query = `${queryBase} and disable = 0 group all`;
let disabledQuery = `${queryBase} and disable = 1 group all`;
const enabledRizzult = (
await surreal.query<[{ count: number }[]]>(query, { userType: userType })
await surreal.query<[{ count: number }[]]>(query, {
userType: userType,
})
)[0];
const count = { enabled: 0, disabled: 0 };
if (enabledRizzult.status == "OK") {
count.enabled = enabledRizzult.result[0]?.count ?? 0;
if (enabledRizzult.length > 0) {
count.enabled = enabledRizzult[0]?.count ?? 0;
}
const disabledRizzult = (
await surreal.query<[{ count: number }[]]>(disabledQuery, {
userType: userType,
})
)[0];
if (disabledRizzult.status == "OK") {
count.disabled = disabledRizzult.result[0]?.count ?? 0;
if (disabledRizzult.length > 0) {
count.disabled = disabledRizzult[0]?.count ?? 0;
}
return count;
};
@@ -77,10 +77,7 @@ const allUsersOfType = async (userType: number) => {
{ userType: userType },
)
)[0];
if (rizzult.status == "OK") {
return rizzult.result ?? [];
}
return [];
return rizzult ?? [];
};
async function updatePostUsersBalances(
@@ -90,10 +87,10 @@ async function updatePostUsersBalances(
console.log(payload);
for (const each of payload) {
const [rizzult] = await surreal.query<[ApiUser[]]>(
`update $userId set balance = $balance`,
{ userId: each.id, balance: each.balance },
`update type::table($tableName) set balance = $balance`,
{ tableName: each.id, balance: each.balance },
);
if (rizzult.status !== "OK") {
if (rizzult.length < 1) {
console.error("updatePostUsersBalance :: ", rizzult);
}
}
@@ -104,41 +101,27 @@ async function getAllPostUsers() {
const [rizzult] = await surreal.query<[ApiPostUser[]]>(
`select id,userName,userId,postData from apiuser where postData = true`,
);
if (rizzult.status === "OK") {
return rizzult.result ?? [];
}
return [];
return rizzult ?? [];
}
async function getAllPostUsersWithParentUsers() {
const [rizzult] = await surreal.query<[ApiPostUserWithParent[]]>(
`select id,userName,userId,postData,parentDistributor,parentAdmin from apiuser where postData = true`,
);
if (rizzult.status === "OK") {
return rizzult.result ?? [];
const out = [];
for (const each of rizzult) {
out.push({ ...each, id: each.id.toString() });
}
return [];
return out;
// return rizzult ?? [];
}
const getAllDistributorsWithTheirChildren = async () => {
const distributorIds = await getAllIdsByUserType(ApiUserTypes.DISTRIBUTOR);
const out = distributorIds.map(async (id) => {
const [rizzult] = await surreal.query<[ApiUser[]]>(
`select *, (select * from apiuser where parentDistributor = $id) as children from apiuser where id = $prefixedId`,
{ id, prefixedId: `apiuser:${id}` },
);
if (rizzult.status == "OK") {
return rizzult.result[0];
}
return undefined;
});
const responses = await Promise.all(out);
return responses;
};
const getRandomDistributor = async (): Promise<ApiUser> => {
const ignoreList = ["001OP9"];
const randomUser = await _getRandomUser(ApiUserTypes.DISTRIBUTOR, ignoreList);
const randomUser = await _getRandomUser(
ApiUserTypes.DISTRIBUTOR,
ignoreList,
);
if (!randomUser) {
console.log("getting random distributor....");
return DEFAULT_RANDOM_DISTRIBUTOR;
@@ -156,16 +139,12 @@ const _getRandomUser = async (
ignoreList: string[],
): Promise<ApiUser | undefined> => {
console.log("_getRandomUser :: ", userType);
const rizzult = (
await surreal.query<[ApiUser[]]>(
const [rizzult] = await surreal.query<[ApiUser[]]>(
`select * from apiuser where disable = 0 and userType = $userType and userId notinside $ignoreList order by rand() limit 1`,
{ userType: userType, ignoreList: ignoreList },
)
)[0];
if (rizzult.status == "OK") {
);
console.log("found random user");
return rizzult.result[0];
}
return rizzult[0];
};
const doesExist = async (userId?: string) => {
@@ -175,9 +154,7 @@ const doesExist = async (userId?: string) => {
"select count() from apiuser where userId = $userId group all",
{ userId: userId },
);
if (rizzult.status == "OK") {
return rizzult.result?.count > 0;
}
return rizzult?.count > 0;
}
return false;
};
@@ -200,7 +177,9 @@ const insertMany = async (data: LooseApiUser[], postUsers: ApiPostUser[]) => {
async function upsertMany(
data: LooseApiUser[],
wipeTable: boolean,
deleteUserType: typeof ApiUserTypes.DISTRIBUTOR | typeof ApiUserTypes.DEALER,
deleteUserType:
| typeof ApiUserTypes.DISTRIBUTOR
| typeof ApiUserTypes.DEALER,
) {
const postUsers = await getAllPostUsers();
console.log(postUsers);
@@ -217,14 +196,17 @@ async function upsertMany(
if (apiUser.disable === 1) {
return;
}
const [u] = await surreal.select<ApiUser>(`apiuser:${apiUser.id}`);
const uid = parseToRID(`apiuser:${apiUser.id}`);
const u = await surreal.select<ApiUser>(uid);
if (!u || !u.id) {
toCreate.push(apiUser);
return;
}
let postData =
u.postData ?? !!postUsers.find((pu) => pu.userId === u.userId) ?? false;
const qId = u.id;
u.postData ??
!!postUsers.find((pu) => pu.userId === u.userId) ??
false;
const qId = parseToRID(u.id);
await surreal.update<LooseApiUser>(qId, {
id: u.id,
userId: apiUser.userId,
@@ -267,7 +249,6 @@ export const dbApiUser = {
allUsersOfType,
allUsersOfTypeLimitedInfo,
getUserById,
getAllDistributorsWithTheirChildren,
getUserTypeCount,
getAllIdsByUserType,
getAllPostUsers,

View File

@@ -1,4 +0,0 @@
export const dbBooking = {
};

View File

@@ -1,5 +1,5 @@
import type { FinalSheetData } from "$lib/utils/data.types";
import { surreal } from "../connectors/surreal.db";
import { parseToRID, surreal } from "../connectors/surreal.db";
const getTableName = (date: string) => {
return `finalsheet${date.replaceAll("-", "")}`;
@@ -9,22 +9,23 @@ const upsertData = async (data: FinalSheetData, date: string) => {
const tableName = getTableName(date);
const [present] = await surreal.query<[FinalSheetData[]]>(
`select id from type::table($tableName) where date = $date and drawId = $drawId`,
{ tableName, date: `${date}`, drawId: data.drawId }
{ tableName, date: `${date}`, drawId: data.drawId },
);
const id = parseToRID(`${tableName}:${data.id}`);
if (present) {
// @ts-ignore
await surreal.update<FinalSheetData>(`${tableName}:${data.id}`, {
await surreal.update<FinalSheetData>(id, {
date: data.date,
drawId: data.drawId,
data: data.data,
totals: data.totals,
// @ts-ignore
createdAt: present?.result[0]?.createdAt ?? new Date().toISOString(),
createdAt: present[0]?.createdAt ?? new Date().toISOString(),
updatedAt: new Date().toISOString(),
});
} else {
// @ts-ignore
await surreal.create<FinalSheetData>(`${tableName}:${data.id}`, {
await surreal.create<FinalSheetData>(id, {
date: data.date,
drawId: data.drawId,
data: data.data,
@@ -35,6 +36,4 @@ const upsertData = async (data: FinalSheetData, date: string) => {
}
};
export const dbFinalSheet = {
upsertData,
};
export const dbFinalSheet = { upsertData };

View File

@@ -1,8 +1,4 @@
import type {
ApiPostUser,
PostDataEntry,
PresetDataEntry,
} from "$lib/utils/data.types";
import type { PresetDataEntry } from "$lib/utils/data.types";
import { surreal } from "../connectors/surreal.db";
const getTableName = (date: string) => {
@@ -12,6 +8,7 @@ const getTableName = (date: string) => {
const insertData = async (data: PresetDataEntry[]) => {
if (data.length < 1) return;
const tableName = getTableName(data[0].bookDate);
console.log(`Inserting ${data.length} rows in ${tableName}`);
const out = await surreal.insert<PresetDataEntry>(tableName, data);
console.log(
`[+] Inserted post data in ${tableName} for ${data[0].bookDate} - ${data[0].drawId}`,
@@ -25,7 +22,7 @@ const getDataByDraw = async (date: string, drawId: number) => {
`select * from type::table($tableName) where bookDate = $date and drawId = $drawId`,
{ tableName, date, drawId },
);
return data.result || ([] as PresetDataEntry[]);
return data || ([] as PresetDataEntry[]);
};
const getDataGroupedBySheetByDraw = async (date: string, drawId: number) => {
@@ -37,9 +34,9 @@ const getDataGroupedBySheetByDraw = async (date: string, drawId: number) => {
const out = {
abData: [] as PresetDataEntry[],
abcData: [] as PresetDataEntry[],
all: data.result || ([] as PresetDataEntry[]),
all: data || ([] as PresetDataEntry[]),
};
for (const row of data.result ?? []) {
for (const row of data ?? []) {
if (row.number.length === 2) {
out.abData.push(row);
} else if (row.number.length === 3) {

View File

@@ -1,16 +1,14 @@
import type { User } from "$lib/utils/data.types";
import { surreal, type QueryResult } from "../connectors/surreal.db";
import { surreal } from "../connectors/surreal.db";
export const dbUser = {
doesExist: async (username?: string) => {
if (username) {
const [rizzult] = await surreal.query<{ count: number }[]>(
"select count() from user where username = $username group all",
{ username: username }
{ username },
);
if (rizzult.status == "OK") {
return rizzult.result?.count > 0;
}
return rizzult?.count > 0;
}
return false;
},
@@ -26,7 +24,7 @@ export const dbUser = {
return [{ message: "User already exists." }];
}
const { username, password, association, userType } = data;
const out = await surreal.create<any>(`user:ulid()`, {
const out = await surreal.create<any>(`user`, {
createdAt: Date.now().toString(),
updatedAt: Date.now().toString(),
username,
@@ -50,21 +48,19 @@ export const dbUser = {
const rizzult = (
await surreal.query<[User[]]>(
`select * from user where username = $username`,
{ username: d.username }
{ username: d.username },
)
)[0];
if (rizzult.status == "OK") {
return rizzult.result[0];
}
return rizzult[0];
}
return undefined;
},
getChildren: async (username?: string) => {
const rizzult = await surreal.query<User[]>(
const [rizzult] = await surreal.query<User[]>(
`select * from user where association = $username`,
{ username: username }
{ username: username },
);
return getParsedUsers(rizzult);
return rizzult;
},
update: async (id: string, data: { association: string }) => {
const [rizzult] = await surreal.update<User>(`user:${id}`, {
@@ -78,13 +74,3 @@ export const dbUser = {
return out[0].id;
},
};
const getParsedUsers = (data: QueryResult<User>[]) => {
const users = [] as User[];
for (const each of data) {
if (each.status == "OK") {
users.push(each.result);
}
}
return users;
};

View File

@@ -72,7 +72,9 @@ export async function postDataToApi(payload: {
return {
ok: false,
detail: "User not found to post data with",
errors: [{ message: "User not found for request" }] as ServerError,
errors: [
{ message: "User not found for request" },
] as ServerError,
};
}
if (!dataByUser[userId]) {
@@ -87,7 +89,9 @@ export async function postDataToApi(payload: {
const session = payload.sessions[userId];
const usr = payload.users.find((u) => u.userId === userId);
if (!usr) {
throw new Error(`User ${userId} not found for posting to api`);
throw new Error(
`User ${userId} not found for posting to api`,
);
}
const distId = usr.parentDistributor ?? 0;
@@ -96,7 +100,10 @@ export async function postDataToApi(payload: {
const date = new Date().toISOString().split("T")[0];
let ptr = 0;
const userResponseIds = [] as { requestId: number; bookId: string }[];
const userResponseIds = [] as {
requestId: number;
bookId: string;
}[];
while (ptr < userData.length) {
let tries = 0;
@@ -109,7 +116,7 @@ export async function postDataToApi(payload: {
drawId,
date,
);
const res = await sendBatchRequest(
const res = await mockSendBatchRequest(
session,
dealerId,
payload.draw,
@@ -118,13 +125,18 @@ export async function postDataToApi(payload: {
);
let rj:
| APIRespnose<{
bookDtos: { bookId: string; requestId: number }[];
bookDtos: {
bookId: string;
requestId: number;
}[];
}>
| undefined = undefined;
try {
rj = (await res.json()) as any;
} catch (err) {
console.log("Encountered error while parsing post response");
console.log(
"Encountered error while parsing post response",
);
console.log(res.status, err);
}
if (rj && rj.code === 200 && res.status === 200) {
@@ -155,7 +167,9 @@ export async function postDataToApi(payload: {
});
console.log(await out.text());
}
throw new Error(`Failed to send data to api for user ${userId}`);
throw new Error(
`Failed to send data to api for user ${userId}`,
);
}
}
return userResponseIds;

View File

@@ -44,9 +44,12 @@ function filterMatching(
export async function updateBalanceOfPostUsers(users: ApiPostUserWithParent[]) {
const sessions = await getAllSessions();
console.log(sessions);
console.log(`Fetching balances`);
const balances = [] as { id: string; balance: number }[];
for (const user of users) {
const session = sessions.find((e) => e.value.userId === user.id);
console.log(`Finding ${user.id} in sessions`);
const session = sessions.find((e) => e.value.userId.includes(user.id));
const jwt = session?.value.sessionToken;
if (!jwt) {
return {
@@ -54,15 +57,19 @@ export async function updateBalanceOfPostUsers(users: ApiPostUserWithParent[]) {
detail: `Session not found for user ${user.userId}`,
};
}
const out = await getUsersBalance(+user.id.split(":")[1], jwt);
const uid = session.value.userId.toString();
console.log(`Found ${uid} in session`);
const out = await getUsersBalance(+uid.split(":")[1], jwt);
if (!out) {
return {
ok: false,
detail: `Error fetching balance for user ${user.userName}`,
};
}
balances.push({ id: user.id, balance: out });
balances.push({ id: uid, balance: out });
}
console.log(`Updating balances`);
console.log(balances);
await dbApiUser.updatePostUsersBalances(balances);
return {
ok: true,
@@ -131,7 +138,9 @@ export async function fetchDataForPosting(
if (draw.filterDuplicatesWhilePosting === true) {
console.log(`[+] Removing numbers that have repeating digits`);
console.log(`[=] Original : AB: ${abNums.length}, ABC: ${abcNums.length}`);
console.log(
`[=] Original : AB: ${abNums.length}, ABC: ${abcNums.length}`,
);
abNums = removeNumbersWithRepeatingDigits(abNums);
abcNums = removeNumbersWithRepeatingDigits(abcNums);
}
@@ -243,7 +252,8 @@ export async function generatePostDataArrayFromBaseInfo(
) {
continue;
}
const randomUserId = users[Math.floor(Math.random() * users.length)].userId;
const randomUserId =
users[Math.floor(Math.random() * users.length)].userId;
if (balanceCounts[randomUserId] === undefined) {
balanceCounts[randomUserId] = 0;
}
@@ -274,7 +284,8 @@ export async function generatePostDataArrayFromBaseInfo(
let out = false;
for (const key in balanceCounts) {
if (
balanceCounts[key] > (users.find((u) => u.userId === key)?.balance ?? 0)
balanceCounts[key] >
(users.find((u) => u.userId === key)?.balance ?? 0)
) {
out = true;
break;
@@ -375,7 +386,9 @@ export function rebalancePostDataListByBalanceOfUsers(
}
})
.map((r) => r.id);
console.log(`Have to move ${entriesToMove.length} entries to other users`);
console.log(
`Have to move ${entriesToMove.length} entries to other users`,
);
// find a user who has enough balance
@@ -406,7 +419,8 @@ export function rebalancePostDataListByBalanceOfUsers(
continue;
}
entry.userId = userWithEnoughBalance.userId;
balanceCounts[userWithEnoughBalance.userId] += entry.first + entry.second;
balanceCounts[userWithEnoughBalance.userId] +=
entry.first + entry.second;
balanceCounts[user.userId] -= entry.first + entry.second;
amountMoved += entry.first + entry.second;
if (amountMoved >= difference) {
@@ -422,7 +436,9 @@ export function rebalancePostDataListByBalanceOfUsers(
export async function fetchPostDataHistory(input: PostDataHistoryFilters) {
const { draw, date } = input;
console.log(`Fetching post data from HISTORY for draw: ${date} - ${draw.id}`);
console.log(
`Fetching post data from HISTORY for draw: ${date} - ${draw.id}`,
);
const found = await dbApiPostData.getPostDataByDraw(date, draw.id);
if (!found) {
return { data: [], users: [], ok: false, detail: "Data not found" };

View File

@@ -21,8 +21,7 @@ export const apiAuthRouter = createTRPCRouter({
{
headers: {
...constants.SCRAP_API_BASE_HEADERS,
Accept:
"image/avif,image/webp,image/apng,image/svg+xml,image/*,*/*;q=0.8",
Accept: "image/avif,image/webp,image/apng,image/svg+xml,image/*,*/*;q=0.8",
},
},
);
@@ -75,7 +74,10 @@ export const apiAuthRouter = createTRPCRouter({
userType: userType,
password: password,
});
console.log("[=] Token Response :: ", JSON.stringify(token, null, 2));
console.log(
"[=] Token Response :: ",
JSON.stringify(token, null, 2),
);
if (!token.ok) {
return {
success: false,

View File

@@ -70,6 +70,9 @@ export const apiDataRouter = createTRPCRouter({
] as ServerError,
};
}
console.log(
`Fetching data for ${userIds.length} users for draw ${drawId}`,
);
const userIdsInt = userIds.map((x) => parseInt(x.split(":")[1]));
const out = await getData(
sess.sessionToken,
@@ -95,7 +98,11 @@ export const apiDataRouter = createTRPCRouter({
getDataByFilters: protectedProcedure
.input(
z.object({ date: z.string(), drawId: z.string(), userId: z.string() }),
z.object({
date: z.string(),
drawId: z.string(),
userId: z.string(),
}),
)
.mutation(async ({ input }) => {
const { date, drawId, userId } = input;
@@ -151,7 +158,11 @@ export const apiDataRouter = createTRPCRouter({
getFinalSheetRow: protectedProcedure
.input(
z.object({ date: z.string(), drawId: z.string(), number: z.string() }),
z.object({
date: z.string(),
drawId: z.string(),
number: z.string(),
}),
)
.mutation(async ({ input }) => {
return {

View File

@@ -16,10 +16,6 @@ export const apiUserRouter = createTRPCRouter({
getAllDealersCount: protectedProcedure.query(async () => {
return await dbApiUser.getUserTypeCount(ApiUserTypes.DEALER);
}),
getDistributorsWithTheirChildren: protectedProcedure.query(async () => {
const users = await dbApiUser.getAllDistributorsWithTheirChildren();
return { users };
}),
getAllDealersPostUserFormat: protectedProcedure.query(async () => {
return await dbApiUser.allUsersOfTypeLimitedInfo(ApiUserTypes.DEALER);

View File

@@ -33,9 +33,9 @@ export const bookingRouter = createTRPCRouter({
const did = parseInt(drawId.split(":")[1]);
const [out] = await surreal.query<[BookingEntry[]]>(
`select * from type::table($table) where drawId = $drawId and bookDate = $bookDate order by requestId desc`,
{ table: tn, drawId: did, bookDate: date }
{ table: tn, drawId: did, bookDate: date },
);
return { data: out.result ?? [], errors: [] as ServerError };
return { data: out ?? [], errors: [] as ServerError };
}),
syncBooking: protectedProcedure
@@ -53,7 +53,7 @@ export const bookingRouter = createTRPCRouter({
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
};
})
}),
);
}
return { detail: "Add Booking api donezo", syncedEntriesIds };
@@ -65,7 +65,7 @@ export const bookingRouter = createTRPCRouter({
await Promise.all(
input.bookingIds.map(async (id) => {
await surreal.delete(id);
})
}),
);
return { detail: `Deleted ${input.bookingIds.length} Entries` };
}),

View File

@@ -21,10 +21,11 @@ export const drawRouter = createTRPCRouter({
savePresetInfoForDraws: protectedProcedure
.input(z.object({ draws: z.array(zDraw) }))
.mutation(async ({ input }) => {
// console.log("savePresetInfoForDraws", input);
console.log("Saving preset info for draws");
for (const draw of input.draws) {
await dbDraw.updateDrawPresetInfo(draw);
}
console.log("Done saving preset info for draws");
return { success: true };
}),
});

View File

@@ -28,6 +28,7 @@ export const zUser = z.object({
userType: z.string().min(4).max(5),
association: z.string(),
});
export type User = z.infer<typeof zUser>;
export const zLooseUser = z.object({
id: z.string().length(16).optional(),
@@ -38,6 +39,7 @@ export const zLooseUser = z.object({
userType: z.string().min(4).max(5),
association: z.string(),
});
export type LooseUser = z.infer<typeof zLooseUser>;
export const zApiUser = z.object({
id: z.string().length(16),
@@ -70,6 +72,7 @@ export const zApiUser = z.object({
createdAt: z.string().nullable(),
updatedAt: z.string().nullable(),
});
export type ApiUser = z.infer<typeof zApiUser>;
export const zApiPostUser = z.object({
id: z.string(),
@@ -78,6 +81,7 @@ export const zApiPostUser = z.object({
postData: z.boolean(),
balance: z.number().optional(),
});
export type ApiPostUser = z.infer<typeof zApiPostUser>;
export const zLooseApiUser = z.object({
id: z.string().length(16).optional(),
@@ -110,6 +114,7 @@ export const zLooseApiUser = z.object({
createdAt: z.string().nullable().optional(),
updatedAt: z.string().nullable().optional(),
});
export type LooseApiUser = z.infer<typeof zLooseApiUser>;
export const zDraw = z.object({
id: z.string(),
@@ -125,7 +130,6 @@ export const zDraw = z.object({
createdAt: z.string().nullable().optional(),
updatedAt: z.string().nullable().optional(),
});
export type Draw = z.infer<typeof zDraw>;
export const zBookingEntry = z.object({
@@ -245,21 +249,11 @@ export const UserTypes = { ADMIN: "ADMIN", USER: "USER" };
export const ApiUserTypes = { ADMIN: 1, DISTRIBUTOR: 2, DEALER: 3 };
export type User = z.infer<typeof zUser>;
export type LooseUser = z.infer<typeof zLooseUser>;
export type ApiUser = z.infer<typeof zApiUser>;
export type ApiPostUser = z.infer<typeof zApiPostUser>;
export type ApiPostUserWithParent = ApiPostUser & {
parentAdmin: number;
parentDistributor: number;
};
export type LooseApiUser = z.infer<typeof zLooseApiUser>;
export type LexiCodeCacheObject = z.infer<typeof zLexiCodeCacheObject>;
export type SimpleLexiCodeObject = { number: string; lexiCode: string };

View File

@@ -7,7 +7,6 @@ import {
} from "$lib/server/external/api.scraping.helpers";
import { dbApiUser } from "$lib/server/db/apiuser.db";
import { constants } from "$lib/utils/constants";
import fs from "fs";
import { getSessionFromStore } from "$lib/server/utils/session.service";
export const actions = {
@@ -43,6 +42,7 @@ export const actions = {
const distributor_ids = await dbApiUser.getAllIdsByUserType(
ApiUserTypes.DISTRIBUTOR,
);
console.log(distributor_ids);
const done = await getDealers(sess.sessionToken, distributor_ids);
console.log(`[+] ${done.dealers.length} dealers found`);
// fs.writeFileSync("dealers.json", JSON.stringify(done.dealers, null, 2));

View File

@@ -40,6 +40,7 @@
let refetchDataM = api.apiData.refetchData.createMutation({
onSuccess: (o) => {
console.log(o);
if (!o.success) {
for (const each of o.errors) {
toast.error(each.message);
@@ -49,6 +50,7 @@
}
},
onError: (e) => {
console.log(e);
toast.error(e.message);
},
});

View File

@@ -19,12 +19,12 @@
let updateDrawFilterM = api.draw.savePresetInfoForDraws.createMutation({
onSuccess: (d) => {
console.log(d);
toast("Saved successfully.");
toast("Saved successfully");
},
onError: (e) => {
console.error(e);
toast(
"An error occurred while fetching data. Try again after a page refresh.",
"An error occurred while fetching data. Try again after a page refresh",
);
},
});