Files
rdv/src/lib/server/db/apidata.db.ts

285 lines
9.5 KiB
TypeScript
Executable File

import type { BookingEntry } from "$lib/utils/data.types";
import { chunkArray } from "../array.chunk";
import { surreal } from "../connectors/surreal.db";
const getTableName = (date: string) => {
return `apidata${date.replaceAll("-", "")}`;
};
const upsertData = async (
data: BookingEntry[],
date: string,
tries: number = 0,
): Promise<void> => {
if (tries >= 3) {
console.log("Max tries exceeded for processing data");
return;
}
const tableName = getTableName(date);
const drawId = data[0].drawId;
console.log(`[...] Processing ${data.length} entries for ${tableName}`);
try {
// Delete existing entries for this date
console.log(
`[...] Deleting existing entries for ${date} in ${tableName}`,
);
console.time("deletion time");
await surreal.query(
`DELETE type::table($tableName) WHERE bookDate = $bookDate AND drawId = $drawId`,
{ tableName, bookDate: date, drawId },
);
console.timeEnd("deletion time");
// Prepare new entries
const entries = data.map((entry) => ({
...entry,
id: `${tableName}:${entry.id}`,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
bookDate: entry.bookDate.split(" ")[0],
requestId: entry.requestId ?? "",
}));
// Calculate chunk size (5 to 25% of total data length, max 10,000)
let chunkSize = Math.floor(
Math.random() * (data.length * 0.2 - data.length * 0.05) +
data.length * 0.05,
);
if (chunkSize > 4_000) {
chunkSize = 4_000;
}
console.log(`Chunk Size: ${chunkSize}`);
console.log(
`[+] Inserting ${entries.length} entries into ${tableName}`,
);
// Insert new entries in chunks
console.time("insertion time");
const chunks = chunkArray(entries, chunkSize);
// .map(async (chunk) => {
// await surreal.insert<BookingEntry>(tableName, chunk);
// });
// for (let i = 0; i < chunks.length; i += 2) {
// await Promise.all(chunks.slice(i, i + 2));
// }
for (const chunk of chunks) {
await surreal.insert<BookingEntry>(tableName, chunk);
}
console.timeEnd("insertion time");
console.log(
`[+] Successfully processed ${data.length} entries into ${tableName}`,
);
} catch (err) {
console.log("Failed to process data, attempting retry");
return await upsertData(data, date, tries + 1);
}
};
const upsertDataDED = async (
data: BookingEntry[],
date: string,
tries: number = 0,
): Promise<void> => {
const tableName = getTableName(date);
console.log(`[...] Upserting ${data.length} entries into ${tableName}`);
const alreadyPresentIds = new Set<string>();
try {
const [alreadyPresent] = await surreal.query<[string[]]>(
`select value id from type::table($tableName) where bookDate = $bookDate`,
{ tableName, bookDate: date },
);
for (let eId of alreadyPresent ?? []) {
alreadyPresentIds.add(eId);
}
} catch (err) {
console.log("Failed to fetch, seeing if can try again");
if (tries >= 3) {
console.log(
"Max tries exceeded for initial fetch for upserting data",
);
return;
}
return await upsertData(data, date, tries++);
}
const oldEntries = [] as any[];
const newEntries = [] as BookingEntry[];
for (let entry of data) {
if (alreadyPresentIds.has(`${tableName}:${entry.id}`)) {
oldEntries.push({
distributorId: entry.distributorId,
dealerId: entry.dealerId,
drawId: entry.drawId,
bookDate: entry.bookDate.split(" ")[0],
number: entry.number,
first: entry.first,
second: entry.second,
changedBalance: entry.changedBalance,
sheetName: entry.sheetName,
sheetId: entry.sheetId,
requestId: entry.requestId,
updatedAt: new Date().toISOString(),
});
continue;
}
newEntries.push({
...entry,
id: `${tableName}:${entry.id}`,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
bookDate: entry.bookDate.split(" ")[0],
requestId: entry.requestId ?? "",
});
}
console.log(
`[+] Inserting ${newEntries.length} new entries into ${tableName}`,
);
// 5 to 25% of the total data length
let chunkSize = Math.floor(
Math.random() * (data.length * 0.2 - data.length * 0.05) +
data.length * 0.05,
);
if (chunkSize > 10_000) {
chunkSize = 10_000;
}
console.log(`Chunk Size : ${chunkSize}`);
console.log(`[+] Inserting new entries`);
console.time("insertion time");
const chunks = chunkArray(newEntries, chunkSize).map(async (chunk) => {
await surreal.insert<BookingEntry>(tableName, chunk);
});
for (let i = 0; i < chunks.length; i += 2) {
await Promise.all(chunks.slice(i, i + 2));
}
console.timeEnd("insertion time");
console.log(
`[+] Updating ${oldEntries.length} old entries into ${tableName}`,
);
const chunks2 = chunkArray(oldEntries, chunkSize).map(async (chunk) => {
await Promise.all(
chunk.map(async (entry) => {
// @ts-ignore
await surreal.update<BookingEntry>(`${tableName}:${entry.id}`, {
distributorId: entry.distributorId,
dealerId: entry.dealerId,
drawId: entry.drawId,
bookDate: entry.bookDate.split(" ")[0],
number: entry.number,
first: entry.first,
second: entry.second,
changedBalance: entry.changedBalance,
sheetName: entry.sheetName,
sheetId: entry.sheetId,
requestId: entry.requestId,
updatedAt: new Date().toISOString(),
});
}),
);
});
console.time("update time");
for (let i = 0; i < chunks2.length; i += 10) {
await Promise.all(chunks2.slice(i, i + 10));
}
console.timeEnd("update time");
console.log(
`[+] Successfully upserted ${data.length} entries into ${tableName}`,
);
};
const getBookingEntriesForDealer = async (
date: string,
drawId: string,
userId: string,
sorted?: boolean,
) => {
const tableName = getTableName(date);
let query = `select * from type::table($tableName) where bookDate = $date and dealerId = $userId and drawId = $drawId`;
if (sorted) {
query += " order by requestId desc";
}
const [data] = await surreal.query<[BookingEntry[]]>(query, {
tableName,
date: `${date}`,
userId: parseInt(userId),
drawId: parseInt(drawId),
});
console.log(
`Found ${JSON.stringify(
data,
)} entries for ${userId}, filters are ${date}, ${drawId} for ${tableName}`,
);
return data ?? [];
};
const getBookingEntriesByDraw = async (date: string, drawId: string) => {
const tableName = getTableName(date);
const [data] = await surreal.query<[BookingEntry[]]>(
`select * from type::table($tableName) where bookDate = $date and drawId = $drawId`,
{
tableName,
date: date,
drawId: parseInt(
drawId.includes(":") ? drawId.split(":")[1] : drawId,
),
},
);
return data ?? [];
};
const deleteDataOlderThan2Weeks = async () => {
const [out] = await surreal.query("info for db");
// @ts-ignore
const tableNames = Object.keys(out.result.tables);
const twoWeeksAgo = new Date();
twoWeeksAgo.setDate(twoWeeksAgo.getDate() - 14);
for (const tableName of tableNames) {
if (tableName.startsWith("apidata")) {
const datePart = tableName.slice(7);
const d = new Date(
parseInt(datePart.slice(0, 4), 10),
parseInt(datePart.slice(4, 6), 10) - 1, // Month is 0-based in JavaScript Date
parseInt(datePart.slice(6, 8), 10),
);
if (d < twoWeeksAgo) {
console.log(`[...] Deleting ${tableName}`);
await surreal.query("remove table if exists " + tableName);
console.log(`[+] Deleted ${tableName}`);
}
} else if (tableName.startsWith("apipostdata_")) {
const datePart = tableName.slice(12);
const d = new Date(
parseInt(datePart.slice(0, 4), 10),
parseInt(datePart.slice(4, 6), 10) - 1, // Month is 0-based in JavaScript Date
parseInt(datePart.slice(6, 8), 10),
);
if (d < twoWeeksAgo) {
console.log(`[...] Deleting ${tableName}`);
await surreal.query("remove table if exists " + tableName);
console.log(`[+] Deleted ${tableName}`);
}
} else {
console.log(`Skipping ${tableName}`);
}
}
};
export const dbApiData = {
upsertData,
getBookingEntriesForDealer,
getBookingEntriesByDraw,
deleteDataOlderThan2Weeks,
};