Almost done for the fetchers

This commit is contained in:
bootunloader
2026-01-03 22:40:04 +02:00
parent 81264ddb00
commit 17cd8b3c19
7 changed files with 8749 additions and 221 deletions

8412
distributors_raw.json Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,10 +1,10 @@
import Redis from "ioredis"; import Redis from "ioredis";
import { logger } from "../logger";
const redisUrl = process.env.REDIS_URL ?? ""; const redisUrl = process.env.REDIS_URL ?? "";
console.log(`\n\n[//] Redis URL: ${redisUrl}`); logger.info(`\n\n[//] Redis URL: ${redisUrl}`);
const _redis = const _redis = redisUrl && redisUrl.length > 0 ? new Redis(redisUrl) : undefined;
redisUrl && redisUrl.length > 0 ? new Redis(redisUrl) : undefined;
export const redis = _redis as Redis; export const redis = _redis as Redis;

View File

@@ -1,67 +1,69 @@
import Surreal, { StringRecordId } from "surrealdb"; import Surreal, { StringRecordId } from "surrealdb";
import { logger } from "../logger";
export type { QueryResult } from "surrealdb"; export type { QueryResult } from "surrealdb";
try { try {
if (document || window) { if (document || window) {
throw new Error("SurrealDB needs a NodeJS environment to run."); throw new Error("SurrealDB needs a NodeJS environment to run.");
} }
} catch (err) { } } catch (err) {}
const CONFIG = { const CONFIG = {
url: process.env.SURREAL_URL ?? "", url: process.env.SURREAL_URL ?? "",
user: process.env.SURREAL_USER ?? "", user: process.env.SURREAL_USER ?? "",
pass: process.env.SURREAL_PASS ?? "", pass: process.env.SURREAL_PASS ?? "",
ns: process.env.SURREAL_NS ?? "", ns: process.env.SURREAL_NS ?? "",
db: process.env.SURREAL_DB ?? "", db: process.env.SURREAL_DB ?? "",
} as const; } as const;
const db = new Surreal(); const db = new Surreal();
async function connectDB() { async function connectDB() {
try { try {
await db.connect(`http://${CONFIG.url}/rpc`); await db.connect(`http://${CONFIG.url}/rpc`);
await db.use({ namespace: CONFIG.ns, database: CONFIG.db }); await db.use({ namespace: CONFIG.ns, database: CONFIG.db });
await authenticateDB(); await authenticateDB();
return true; return true;
} catch (error) { } catch (error) {
console.error("Error connecting to SurrealDB:", error); logger.error("Error connecting to SurrealDB:", error);
return false; return false;
} }
} }
async function authenticateDB() { async function authenticateDB() {
try { try {
await db.signin({ username: CONFIG.user, password: CONFIG.pass }); await db.signin({ username: CONFIG.user, password: CONFIG.pass });
console.log("🔑 Successfully authenticated with SurrealDB"); logger.info("🔑 Successfully authenticated with SurrealDB");
return true; return true;
} catch (error) { } catch (error) {
console.error("❌ Authentication failed:", error); logger.warn("❌ Authentication failed:");
return false; logger.error(error);
} return false;
}
} }
async function ensureAuthenticated() { async function ensureAuthenticated() {
try { try {
await db.query("RETURN 1"); await db.query("RETURN 1");
} catch (error: any) { } catch (error: any) {
if (error.status === 401) { if (error.status === 401) {
console.warn("⚠️ SurrealDB Auth token expired. Attempting reconnection..."); logger.warn("⚠️ SurrealDB Auth token expired. Attempting reconnection...");
try { try {
// Full reconnection instead of just re-authentication // Full reconnection instead of just re-authentication
await db.close(); await db.close();
const success = await connectDB(); const success = await connectDB();
if (success) { if (success) {
console.log("✅ Successfully reconnected to database"); logger.info("✅ Successfully reconnected to database");
} else {
console.error("❌ Failed to reconnect to database");
}
} catch (reconnectError) {
console.error("❌ Reconnection failed:", reconnectError);
}
} else { } else {
console.error("Unexpected database error:", error); logger.error("❌ Failed to reconnect to database");
} }
} catch (reconnectError) {
logger.error("❌ Reconnection failed:", reconnectError);
}
} else {
logger.error("Unexpected database error:", error);
} }
}
} }
// 1 minute // 1 minute
@@ -69,21 +71,21 @@ const CHECK_INTERVAL = 60 * 1000;
let intervalId: NodeJS.Timeout; let intervalId: NodeJS.Timeout;
async function initializeDB() { async function initializeDB() {
const success = await connectDB(); const success = await connectDB();
if (success) { if (success) {
// Only start the interval if initial connection was successful // Only start the interval if initial connection was successful
intervalId = setInterval(ensureAuthenticated, CHECK_INTERVAL); intervalId = setInterval(ensureAuthenticated, CHECK_INTERVAL);
} else { } else {
console.error("Failed to initialize database connection"); logger.error("Failed to initialize database connection");
// Optionally implement retry logic here // Optionally implement retry logic here
} }
} }
export function cleanup() { export function cleanup() {
if (intervalId) { if (intervalId) {
clearInterval(intervalId); clearInterval(intervalId);
} }
return db.close(); return db.close();
} }
await initializeDB(); await initializeDB();
@@ -91,7 +93,7 @@ await initializeDB();
export const surreal = db as Surreal; export const surreal = db as Surreal;
export function parseToRID(idStr: string) { export function parseToRID(idStr: string) {
return new StringRecordId(idStr); return new StringRecordId(idStr);
} }
process.on("SIGTERM", cleanup); process.on("SIGTERM", cleanup);

View File

@@ -1,11 +1,11 @@
import { env } from "$env/dynamic/private";
import { logger } from "$lib/server/logger";
import { getULID, sleep } from "$lib/utils"; import { getULID, sleep } from "$lib/utils";
import { baseDistributorId, constants } from "$lib/utils/constants"; import { baseDistributorId, constants } from "$lib/utils/constants";
import type { BookingEntry, Draw, LooseApiUser } from "$lib/utils/data.types"; import type { BookingEntry, Draw, LooseApiUser } from "$lib/utils/data.types";
import { rng } from "$lib/utils/rng"; import { rng } from "$lib/utils/rng";
import { env } from "$env/dynamic/private"; import fs from "fs";
import { logger } from "$lib/server/logger";
import fetch from "node-fetch"; import fetch from "node-fetch";
// import fs from "fs";
// function dumpDistributors(distributors: LooseApiUser[]) { // function dumpDistributors(distributors: LooseApiUser[]) {
// fs.writeFileSync("distributors.json", JSON.stringify(distributors, null, 2)); // fs.writeFileSync("distributors.json", JSON.stringify(distributors, null, 2));
@@ -15,19 +15,38 @@ import fetch from "node-fetch";
// fs.writeFileSync("dealers.json", JSON.stringify(dealers, null, 2)); // fs.writeFileSync("dealers.json", JSON.stringify(dealers, null, 2));
// } // }
function dumpDistributorsRaw(distributors: any) {
fs.writeFileSync("distributors_raw.json", JSON.stringify(distributors, null, 2));
}
function dumpDealersRaw(dealers: any) {
fs.writeFileSync("dealers_raw.json", JSON.stringify(dealers, null, 2));
}
export const testIfSessionIsValid = async (jwt: string) => { export const testIfSessionIsValid = async (jwt: string) => {
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
if (!scrapingbeeApiKey) {
logger.error("[testIfSessionIsValid] ScrapingBee API key not configured");
return false;
}
try { try {
const res = await fetch( const targetUrl = `${constants.SCRAP_API_URL}/v1/user/get-balance?userId=${baseDistributorId}`;
`${constants.PROXY_API_URL}/v1/user/get-balance?userId=${baseDistributorId}&authorization=${encodeURIComponent(jwt)}`,
{ const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
headers: { scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
"Content-Type": "application/json", scrapingbeeUrl.searchParams.set("url", targetUrl);
}, scrapingbeeUrl.searchParams.set("forward_headers", "true");
}, scrapingbeeUrl.searchParams.set("forward_headers_pure", "true");
);
if (res.status !== 200) { const res = await fetch(scrapingbeeUrl.toString(), {
headers: { "Spb-Authorization": jwt },
});
if (res.status !== 200 || !res.ok) {
return false; return false;
} }
const rj = (await res.json()) as { const rj = (await res.json()) as {
code: number; code: number;
success: boolean; success: boolean;
@@ -35,9 +54,10 @@ export const testIfSessionIsValid = async (jwt: string) => {
data: any; data: any;
time: string; time: string;
}; };
logger.debug(`[testIfSessionIsValid] response from api : ${JSON.stringify(rj)}`);
return rj.code == 200 && rj.success; return rj.code == 200 && rj.success;
} catch (err) { } catch (err) {
console.log(err); logger.error("[testIfSessionIsValid] Error testing session validity", err);
return false; return false;
} }
}; };
@@ -56,38 +76,42 @@ export const getSessionToken = async (payload: {
} }
const targetUrl = `${constants.SCRAP_API_URL}/v1/auth/login`; const targetUrl = `${constants.SCRAP_API_URL}/v1/auth/login`;
logger.info(`[getSessionToken] Requesting session token for user ${payload.userId}`); logger.info(`[getSessionToken] Requesting session token for user ${payload.userId}`);
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1"); const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey); scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
scrapingbeeUrl.searchParams.set("url", targetUrl); scrapingbeeUrl.searchParams.set("url", targetUrl);
scrapingbeeUrl.searchParams.set("forward_headers", "true"); scrapingbeeUrl.searchParams.set("forward_headers", "true");
scrapingbeeUrl.searchParams.set("render_js", "false"); scrapingbeeUrl.searchParams.set("forward_headers_pure", "true");
scrapingbeeUrl.searchParams.set("block_resources", "false");
// Prefix headers with Spb- for ScrapingBee to forward them
const forwardHeaders = Object.fromEntries(
Object.entries(constants.SCRAP_API_BASE_HEADERS).map(([key, value]) => [`Spb-${key}`, value]),
);
try { try {
const res = await fetch(scrapingbeeUrl.toString(), { const res = await fetch(scrapingbeeUrl.toString(), {
method: "POST", method: "POST",
body: JSON.stringify(payload), body: JSON.stringify(payload),
headers: { headers: { "Spb-Content-Type": "application/json", ...forwardHeaders },
"Spb-Content-Type": "application/json",
}
}); });
if (!res.ok) { if (!res.ok) {
const errorText = await res.text().catch(() => "Unknown error"); const errorText = await res.text().catch(() => "Unknown error");
logger.error(`[getSessionToken] ScrapingBee error ${res.status}: ${errorText.substring(0, 200)}`); logger.error(
`[getSessionToken] ScrapingBee error ${res.status}: ${errorText.substring(0, 200)}`,
);
return { ok: false, message: `Failed to get session token: ${res.status}` }; return { ok: false, message: `Failed to get session token: ${res.status}` };
} }
const out = await res.json() as any; const out = (await res.json()) as any;
if (out.code !== 200 || !out.ok) { if (out.code !== 200) {
logger.warn(`[getSessionToken] API returned error: ${out.message}`); logger.warn(`[getSessionToken] API returned error: ${out.message}`);
return { ok: false, message: out.message }; return { ok: false, message: out.message };
} }
logger.info(`[getSessionToken] Successfully obtained session token for user ${payload.userId}`); logger.info(`[getSessionToken] Successfully obtained session token for user ${payload.userId}`);
return { ok: true, message: out.data.token }; return { ok: true, message: out.data.token };
} catch (err) { } catch (err) {
@@ -97,15 +121,30 @@ export const getSessionToken = async (payload: {
}; };
export async function getUsersBalance(userId: number, jwt: string) { export async function getUsersBalance(userId: number, jwt: string) {
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
if (!scrapingbeeApiKey) {
logger.error("[getUsersBalance] ScrapingBee API key not configured");
return false;
}
// Prefix headers with Spb- for ScrapingBee to forward them
const forwardHeaders = Object.fromEntries(
Object.entries(constants.SCRAP_API_BASE_HEADERS).map(([key, value]) => [`Spb-${key}`, value]),
);
try { try {
const res = await fetch( const targetUrl = `${constants.SCRAP_API_URL}/v1/user/get-balance?userId=${userId}`;
`${constants.PROXY_API_URL}/v1/user/get-balance?userId=${userId}&authorization=${encodeURIComponent(jwt)}`,
{ const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
headers: { scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
"Content-Type": "application/json", scrapingbeeUrl.searchParams.set("url", targetUrl);
}, scrapingbeeUrl.searchParams.set("forward_headers", "true");
}, scrapingbeeUrl.searchParams.set("forward_headers_pure", "true");
);
const res = await fetch(scrapingbeeUrl.toString(), {
headers: { "Spb-Authorization": jwt, ...forwardHeaders },
});
const rj = (await res.json()) as { const rj = (await res.json()) as {
code: number; code: number;
success: boolean; success: boolean;
@@ -114,35 +153,47 @@ export async function getUsersBalance(userId: number, jwt: string) {
time: string; time: string;
}; };
if (res.status !== 200 || rj.code !== 200 || !rj.success) { if (res.status !== 200 || rj.code !== 200 || !rj.success) {
console.log(`[!] Error getting balance for ${userId} :: ${JSON.stringify(rj)}`); logger.warn(`[getUsersBalance] Error getting balance for ${userId}: ${rj.message}`);
return false; return false;
} }
return rj.data.balance; return rj.data.balance;
} catch (err) { } catch (err) {
console.log(err); logger.error(`[getUsersBalance] Error getting balance for ${userId}`, err);
return false; return false;
} }
} }
export const getDealers = async (jwt: string, distributor_ids: string[]) => { export const getDealers = async (jwt: string, distributor_ids: string[]) => {
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
if (!scrapingbeeApiKey) {
logger.error("[getDealers] ScrapingBee API key not configured");
return {
dealers: [],
errors: [{ message: "ScrapingBee API key not configured" }],
};
}
try { try {
// // Create an array of promises for each fetch request
const requests = distributor_ids.map(async (did) => { const requests = distributor_ids.map(async (did) => {
await sleep(rng(100, 10000)); await sleep(rng(100, 10000));
const res = await fetch(
`${constants.PROXY_API_URL}/v1/user/dealer-list?authorization=${encodeURIComponent(jwt)}`, const targetUrl = `${constants.SCRAP_API_URL}/v1/user/dealer-list`;
{ const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
method: "POST", scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
headers: { scrapingbeeUrl.searchParams.set("url", targetUrl);
"Content-Type": "application/json", scrapingbeeUrl.searchParams.set("forward_headers", "true");
}, scrapingbeeUrl.searchParams.set("forward_headers_pure", "true");
body: JSON.stringify({
page: 1, const res = await fetch(scrapingbeeUrl.toString(), {
pageSize: 999999, method: "POST",
parentDistributor: parseInt(did), headers: { "Spb-Authorization": jwt, "Spb-Content-Type": "application/json" },
}), body: JSON.stringify({
}, page: 1,
); pageSize: 999999,
parentDistributor: parseInt(did),
}),
});
const data = (await res.json()) as { const data = (await res.json()) as {
code: number; code: number;
success: boolean; success: boolean;
@@ -152,6 +203,9 @@ export const getDealers = async (jwt: string, distributor_ids: string[]) => {
total: number; total: number;
}; };
}; };
dumpDealersRaw(data);
if (data.code !== 200 || !data.success) { if (data.code !== 200 || !data.success) {
return { return {
dealers: [], dealers: [],
@@ -162,8 +216,6 @@ export const getDealers = async (jwt: string, distributor_ids: string[]) => {
} }
const dealers = data.data.items.map((item) => item.dealer); const dealers = data.data.items.map((item) => item.dealer);
// dumpDealers(dealers);
return { return {
dealers, dealers,
ok: res.status === 200 && data.success, ok: res.status === 200 && data.success,
@@ -171,7 +223,7 @@ export const getDealers = async (jwt: string, distributor_ids: string[]) => {
message: data.message, message: data.message,
}; };
}); });
// // Wait for all promises to resolve
const responses = await Promise.all(requests); const responses = await Promise.all(requests);
const dealers: LooseApiUser[] = []; const dealers: LooseApiUser[] = [];
const errors: { message: string }[] = []; const errors: { message: string }[] = [];
@@ -185,11 +237,12 @@ export const getDealers = async (jwt: string, distributor_ids: string[]) => {
} }
} }
// fs.writeFileSync("dealers.json", JSON.stringify(dealers, null, 2)); logger.info(
`[getDealers] Fetched ${dealers.length} dealers from ${distributor_ids.length} distributors`,
);
return { dealers, errors }; return { dealers, errors };
} catch (err) { } catch (err) {
console.error(err); logger.error("[getDealers] Error fetching dealers", err);
return { return {
dealers: [], dealers: [],
errors: [{ message: "An error occured during fetching dealers" }], errors: [{ message: "An error occured during fetching dealers" }],
@@ -198,55 +251,67 @@ export const getDealers = async (jwt: string, distributor_ids: string[]) => {
}; };
export const getDistributors = async (jwt: string) => { export const getDistributors = async (jwt: string) => {
const res = await fetch( const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
`${constants.PROXY_API_URL}/v1/user/distributor-list?authorization=${encodeURIComponent(jwt)}`, if (!scrapingbeeApiKey) {
{ logger.error("[getDistributors] ScrapingBee API key not configured");
method: "POST", return { ok: false, message: "ScrapingBee API key not configured", data: [] };
headers: { }
"Content-Type": "application/json",
}, const targetUrl = `${constants.SCRAP_API_URL}/v1/user/distributor-list`;
body: JSON.stringify({ const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
page: 1, scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
pageSize: 999999, scrapingbeeUrl.searchParams.set("url", targetUrl);
parentDistributor: 15, scrapingbeeUrl.searchParams.set("forward_headers", "true");
}), scrapingbeeUrl.searchParams.set("forward_headers_pure", "true");
},
); const res = await fetch(scrapingbeeUrl.toString(), {
const json = (await res.json()) as { method: "POST",
headers: { "Spb-Authorization": jwt, "Spb-Content-Type": "application/json" },
body: JSON.stringify({
page: 1,
pageSize: 999999,
parentDistributor: 15,
}),
});
const data = (await res.json()) as {
code: number; code: number;
success: boolean; success: boolean;
message: string; message: string;
data: { total: number; items: any[] }; data: { total: number; items: any[] };
}; };
if (!json.data.items || json.code !== 200 || !json.success) { dumpDistributorsRaw(data);
return { ok: false, message: json.message, data: [] };
if (!data.data.items || data.code !== 200 || !data.success) {
logger.warn(`[getDistributors] Error: ${data.message}`);
return { ok: false, message: data.message, data: [] };
} }
// fs.writeFileSync( logger.info(`[getDistributors] Fetched ${data.data.items.length} distributors`);
// "distributors.json",
// JSON.stringify(json.data.items, null, 2),
// );
// dumpDistributors(json.data.items.map((item) => item.distributor));
return { return {
ok: true, ok: true,
message: "", message: "",
data: json.data.items.map((item) => item.distributor), data: data.data.items.map((item) => item.distributor),
}; };
}; };
export const getDraws = async (jwt: string) => { export const getDraws = async (jwt: string) => {
const res = await fetch( const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
`${constants.PROXY_API_URL}/v1/draw/list-my?userId=15&authorization=${encodeURIComponent(jwt)}`,
{ logger.info(`[getDraws] Fetching draws from the API`);
method: "GET",
headers: { const targetUrl = `${constants.SCRAP_API_URL}/v1/draw/list-my?userId=15`;
"Content-Type": "application/json", const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
}, scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
}, scrapingbeeUrl.searchParams.set("url", targetUrl);
); scrapingbeeUrl.searchParams.set("forward_headers", "true");
scrapingbeeUrl.searchParams.set("forward_headers_pure", "true");
const res = await fetch(scrapingbeeUrl.toString(), {
headers: { "Spb-Authorization": jwt },
});
type J = { type J = {
code: number; code: number;
success: boolean; success: boolean;
@@ -256,8 +321,11 @@ export const getDraws = async (jwt: string) => {
let decoded = (await res.json()) as { data: J }; let decoded = (await res.json()) as { data: J };
const json = (decoded.data.success ? decoded.data : decoded) as any as J; const json = (decoded.data.success ? decoded.data : decoded) as any as J;
if (json.code !== 200 || !json.success || !json.data) { if (json.code !== 200 || !json.success || !json.data) {
logger.warn(`[getDraws] Error: ${json.message}`);
return { ok: false, message: json.message, data: [] }; return { ok: false, message: json.message, data: [] };
} }
logger.info(`[getDraws] Fetched ${json.data.length} draws`);
return { return {
ok: true, ok: true,
message: "", message: "",
@@ -271,25 +339,34 @@ export const getData = async (
drawId: number, drawId: number,
chosenDate: string, chosenDate: string,
) => { ) => {
const res = await fetch( const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
`${constants.PROXY_API_URL}/v1/book/list2?authorization=${encodeURIComponent(jwt)}`, if (!scrapingbeeApiKey) {
{ logger.error("[getData] ScrapingBee API key not configured");
method: "POST", return { ok: false, message: "ScrapingBee API key not configured", data: [] };
headers: { }
"Content-Type": "application/json",
}, const targetUrl = `${constants.SCRAP_API_URL}/v1/book/list2`;
body: JSON.stringify({ const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
userType: 3, scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
userIds, scrapingbeeUrl.searchParams.set("url", targetUrl);
drawId: drawId, scrapingbeeUrl.searchParams.set("forward_headers", "true");
startDate: chosenDate, scrapingbeeUrl.searchParams.set("forward_headers_pure", "true");
endDate: chosenDate,
beAdmin: false, const res = await fetch(scrapingbeeUrl.toString(), {
containImported: false, method: "POST",
keyword: "", headers: { "Spb-Authorization": jwt },
}), body: JSON.stringify({
}, userType: 3,
); userIds,
drawId: drawId,
startDate: chosenDate,
endDate: chosenDate,
beAdmin: false,
containImported: false,
keyword: "",
}),
});
type J = { type J = {
code: number; code: number;
success: boolean; success: boolean;
@@ -299,8 +376,11 @@ export const getData = async (
let decoded = (await res.json()) as { data: J }; let decoded = (await res.json()) as { data: J };
const json = (decoded.data.success ? decoded.data : decoded) as any as J; const json = (decoded.data.success ? decoded.data : decoded) as any as J;
if (json.code !== 200 || !json.success || !json.data) { if (json.code !== 200 || !json.success || !json.data) {
logger.warn(`[getData] Error: ${json.message}`);
return { ok: false, message: json.message, data: [] }; return { ok: false, message: json.message, data: [] };
} }
logger.info(`[getData] Fetched ${json.data.length} booking entries for ${userIds.length} users`);
return { ok: true, message: "", data: json.data.map((e) => e.book) }; return { ok: true, message: "", data: json.data.map((e) => e.book) };
}; };

View File

@@ -7,6 +7,8 @@ import type {
PostDataEntry, PostDataEntry,
ServerError, ServerError,
} from "$lib/utils/data.types"; } from "$lib/utils/data.types";
import { env } from "$env/dynamic/private";
import { logger } from "$lib/server/logger";
export type APIResponse<T> = { export type APIResponse<T> = {
code: number; code: number;
@@ -60,13 +62,13 @@ export async function postDataToApi(payload: {
let failedResponses = 0; let failedResponses = 0;
let successResponses = 0; let successResponses = 0;
console.log(`[+] Sending ${payload.data.length} requests...`); logger.info(`[postDataToApi] Sending ${payload.data.length} requests...`);
const dataByUser = {} as Record<string, PostDataEntry[]>; const dataByUser = {} as Record<string, PostDataEntry[]>;
for (const row of payload.data) { for (const row of payload.data) {
const userId = row.userId ?? ""; const userId = row.userId ?? "";
if (userId.length < 1) { if (userId.length < 1) {
console.log(`[!] User not found for request ${row.userId}`); logger.warn(`[postDataToApi] User not found for request ${row.userId}`);
return { return {
ok: false, ok: false,
detail: "User not found to post data with", detail: "User not found to post data with",
@@ -121,8 +123,8 @@ export async function postDataToApi(payload: {
try { try {
rj = (await res.json()) as any; rj = (await res.json()) as any;
} catch (err) { } catch (err) {
console.log("Encountered error while parsing post response"); logger.error("[postDataToApi] Encountered error while parsing post response", err);
console.log(res.status, err); logger.error(`[postDataToApi] Response status: ${res.status}`);
} }
if (rj && rj.code === 200 && res.status === 200) { if (rj && rj.code === 200 && res.status === 200) {
ptr = jumped; ptr = jumped;
@@ -135,8 +137,7 @@ export async function postDataToApi(payload: {
successResponses++; successResponses++;
break; break;
} }
console.log("Failed to send send post request"); logger.warn(`[postDataToApi] Failed to send post request, status: ${res.status}`, rj);
console.log(res.status, rj);
failedResponses++; failedResponses++;
tries++; tries++;
} }
@@ -150,7 +151,8 @@ export async function postDataToApi(payload: {
drawId, drawId,
session, session,
}); });
console.log(await out.text()); const deleteResponse = await out.text();
logger.warn(`[postDataToApi] Deleted entries after failure: ${deleteResponse.substring(0, 200)}`);
} }
throw new Error(`Failed to send data to api for user ${userId}`); throw new Error(`Failed to send data to api for user ${userId}`);
} }
@@ -168,8 +170,7 @@ export async function postDataToApi(payload: {
responsesIds.push(...result.value); responsesIds.push(...result.value);
} else { } else {
hasErrors = true; hasErrors = true;
console.log(`[!] Error processing user`); logger.error("[postDataToApi] Error processing user", result.reason);
console.log(result.reason);
} }
}); });
@@ -181,16 +182,15 @@ export async function postDataToApi(payload: {
}; };
} }
console.log(`[+] Finished sending ${payload.data.length} requests`); logger.info(`[postDataToApi] Finished sending ${payload.data.length} requests`);
console.log(`[?] Failed responses: ${failedResponses}`); logger.info(`[postDataToApi] Failed responses: ${failedResponses}, Success responses: ${successResponses}`);
console.log(`[?] Success responses: ${successResponses}`);
return { return {
ok: true, ok: true,
detail: "Successfully sent data to api", detail: "Successfully sent data to api",
data: responses, data: responses,
}; };
} catch (err) { } catch (err) {
console.log(err); logger.error("[postDataToApi] Error sending data to API", err);
return { return {
ok: false, ok: false,
detail: "Failed to send data to api", detail: "Failed to send data to api",
@@ -205,23 +205,41 @@ async function sendBatchRequest(
changedBalance: number, changedBalance: number,
body: string, body: string,
) { ) {
return fetch( const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
`${constants.PROXY_API_URL}/v1/book/add-multiple?authorization=${encodeURIComponent(session.sessionToken)}`, if (!scrapingbeeApiKey) {
{ logger.error("[sendBatchRequest] ScrapingBee API key not configured");
method: "POST", throw new Error("ScrapingBee API key not configured");
headers: { }
"Content-Type": "application/json",
}, const targetUrl = `${constants.SCRAP_API_URL}/v1/book/add-multiple`;
body: JSON.stringify({ const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
dealerId, scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
drawId: Number(draw.id.split(":")[1]), scrapingbeeUrl.searchParams.set("url", targetUrl);
closeTime: draw.closeTime, scrapingbeeUrl.searchParams.set("forward_headers", "true");
date: new Date().toISOString().split("T")[0],
changedBalance, // Prefix headers with Spb- for ScrapingBee to forward them
insertData: body, const forwardHeaders = Object.fromEntries(
}), Object.entries(constants.SCRAP_API_BASE_HEADERS).map(([key, value]) => [`Spb-${key}`, value]),
},
); );
logger.debug(`[sendBatchRequest] Sending batch request for dealer ${dealerId}, draw ${draw.id}`);
return fetch(scrapingbeeUrl.toString(), {
method: "POST",
headers: {
"Spb-Authorization": session.sessionToken,
"Spb-Content-Type": "application/json",
...forwardHeaders,
},
body: JSON.stringify({
dealerId,
drawId: Number(draw.id.split(":")[1]),
closeTime: draw.closeTime,
date: new Date().toISOString().split("T")[0],
changedBalance,
insertData: body,
}),
});
} }
async function mockSendBatchRequest( async function mockSendBatchRequest(
@@ -292,21 +310,39 @@ async function deleteAllBookedEntries({
drawId: number; drawId: number;
closeTime: string; closeTime: string;
}) { }) {
return fetch( const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
`${constants.PROXY_API_URL}/v1/book/delete-multiple?authorization=${encodeURIComponent(session.sessionToken)}`, if (!scrapingbeeApiKey) {
{ logger.error("[deleteAllBookedEntries] ScrapingBee API key not configured");
method: "POST", throw new Error("ScrapingBee API key not configured");
headers: { }
"Content-Type": "application/json",
}, const targetUrl = `${constants.SCRAP_API_URL}/v1/book/delete-multiple`;
body: JSON.stringify({ const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
bookIds: data.map((e) => e.bookId), scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
closeTime, scrapingbeeUrl.searchParams.set("url", targetUrl);
dealerId, scrapingbeeUrl.searchParams.set("forward_headers", "true");
drawId,
}), // Prefix headers with Spb- for ScrapingBee to forward them
}, const forwardHeaders = Object.fromEntries(
Object.entries(constants.SCRAP_API_BASE_HEADERS).map(([key, value]) => [`Spb-${key}`, value]),
); );
logger.debug(`[deleteAllBookedEntries] Deleting ${data.length} entries for dealer ${dealerId}, draw ${drawId}`);
return fetch(scrapingbeeUrl.toString(), {
method: "POST",
headers: {
"Spb-Authorization": session.sessionToken,
"Spb-Content-Type": "application/json",
...forwardHeaders,
},
body: JSON.stringify({
bookIds: data.map((e) => e.bookId),
closeTime,
dealerId,
drawId,
}),
});
} }
// export async function postDataToApi(payload: { // export async function postDataToApi(payload: {

View File

@@ -19,12 +19,12 @@ export const apiUserRouter = createTRPCRouter({
}), }),
getAllDistributorsCount: protectedProcedure.query(async () => { getAllDistributorsCount: protectedProcedure.query(async () => {
const count = await dbApiUser.getUserTypeCount(ApiUserTypes.DISTRIBUTOR); const count = await dbApiUser.getUserTypeCount(ApiUserTypes.DISTRIBUTOR);
logger.debug(`[getAllDistributorsCount] Count: ${count}`); logger.debug(`[getAllDistributorsCount] Count: ${JSON.stringify(count)}`);
return count; return count;
}), }),
getAllDealersCount: protectedProcedure.query(async () => { getAllDealersCount: protectedProcedure.query(async () => {
const count = await dbApiUser.getUserTypeCount(ApiUserTypes.DEALER); const count = await dbApiUser.getUserTypeCount(ApiUserTypes.DEALER);
logger.debug(`[getAllDealersCount] Count: ${count}`); logger.debug(`[getAllDealersCount] Count: ${JSON.stringify(count)}`);
return count; return count;
}), }),

View File

@@ -10,19 +10,17 @@ export const constants = {
SCRAP_API_SESSION_KEY: "SRAJWT", SCRAP_API_SESSION_KEY: "SRAJWT",
SCRAP_API_BASE_HEADERS: { SCRAP_API_BASE_HEADERS: {
Host: "gamebooking24.com", Host: "gamebooking24.com",
"Sec-Ch-Ua": '"Not/A)Brand";v="8", "Chromium";v="126"', "Sec-GPC": "1",
"Sec-Ch-Ua-Mobile": "?0",
"Sec-Ch-Ua-Platform": '"Windows"',
"Sec-Fetch-Site": "cross-site", "Sec-Fetch-Site": "cross-site",
"Sec-Fetch-Mode": "no-cors", "Sec-Fetch-Mode": "cors",
"Sec-Fetch-Dest": "image", "Sec-Fetch-Dest": "empty",
"Accept-Encoding": "gzip, deflate", "Accept-Encoding": "gzip, deflate",
"Accept-Language": "en-US,en;q=0.9", "Accept-Language": "en-US,en;q=0.5",
"Access-Control-Allow-Origin": "*", "Access-Control-Allow-Origin": "*",
Accept: "application/json, text/plain, */*", Accept: "application/json, text/plain, */*",
Origin: "https://gamebooking24.com", Origin: "https://gamebooking24.com",
Referer: "https://gamebooking24.com/", Referer: "https://gamebooking24.com/",
Priority: "u=1, i", Priority: "u=0",
}, },
}; };