Almost done for the fetchers
This commit is contained in:
8412
distributors_raw.json
Normal file
8412
distributors_raw.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,10 +1,10 @@
|
||||
import Redis from "ioredis";
|
||||
import { logger } from "../logger";
|
||||
|
||||
const redisUrl = process.env.REDIS_URL ?? "";
|
||||
|
||||
console.log(`\n\n[//] Redis URL: ${redisUrl}`);
|
||||
logger.info(`\n\n[//] Redis URL: ${redisUrl}`);
|
||||
|
||||
const _redis =
|
||||
redisUrl && redisUrl.length > 0 ? new Redis(redisUrl) : undefined;
|
||||
const _redis = redisUrl && redisUrl.length > 0 ? new Redis(redisUrl) : undefined;
|
||||
|
||||
export const redis = _redis as Redis;
|
||||
|
||||
@@ -1,67 +1,69 @@
|
||||
import Surreal, { StringRecordId } from "surrealdb";
|
||||
import { logger } from "../logger";
|
||||
export type { QueryResult } from "surrealdb";
|
||||
|
||||
try {
|
||||
if (document || window) {
|
||||
throw new Error("SurrealDB needs a NodeJS environment to run.");
|
||||
}
|
||||
} catch (err) { }
|
||||
if (document || window) {
|
||||
throw new Error("SurrealDB needs a NodeJS environment to run.");
|
||||
}
|
||||
} catch (err) {}
|
||||
|
||||
const CONFIG = {
|
||||
url: process.env.SURREAL_URL ?? "",
|
||||
user: process.env.SURREAL_USER ?? "",
|
||||
pass: process.env.SURREAL_PASS ?? "",
|
||||
ns: process.env.SURREAL_NS ?? "",
|
||||
db: process.env.SURREAL_DB ?? "",
|
||||
url: process.env.SURREAL_URL ?? "",
|
||||
user: process.env.SURREAL_USER ?? "",
|
||||
pass: process.env.SURREAL_PASS ?? "",
|
||||
ns: process.env.SURREAL_NS ?? "",
|
||||
db: process.env.SURREAL_DB ?? "",
|
||||
} as const;
|
||||
|
||||
const db = new Surreal();
|
||||
|
||||
async function connectDB() {
|
||||
try {
|
||||
await db.connect(`http://${CONFIG.url}/rpc`);
|
||||
await db.use({ namespace: CONFIG.ns, database: CONFIG.db });
|
||||
await authenticateDB();
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error("Error connecting to SurrealDB:", error);
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
await db.connect(`http://${CONFIG.url}/rpc`);
|
||||
await db.use({ namespace: CONFIG.ns, database: CONFIG.db });
|
||||
await authenticateDB();
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error("Error connecting to SurrealDB:", error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function authenticateDB() {
|
||||
try {
|
||||
await db.signin({ username: CONFIG.user, password: CONFIG.pass });
|
||||
console.log("🔑 Successfully authenticated with SurrealDB");
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error("❌ Authentication failed:", error);
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
await db.signin({ username: CONFIG.user, password: CONFIG.pass });
|
||||
logger.info("🔑 Successfully authenticated with SurrealDB");
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.warn("❌ Authentication failed:");
|
||||
logger.error(error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function ensureAuthenticated() {
|
||||
try {
|
||||
await db.query("RETURN 1");
|
||||
} catch (error: any) {
|
||||
if (error.status === 401) {
|
||||
console.warn("⚠️ SurrealDB Auth token expired. Attempting reconnection...");
|
||||
try {
|
||||
// Full reconnection instead of just re-authentication
|
||||
await db.close();
|
||||
const success = await connectDB();
|
||||
if (success) {
|
||||
console.log("✅ Successfully reconnected to database");
|
||||
} else {
|
||||
console.error("❌ Failed to reconnect to database");
|
||||
}
|
||||
} catch (reconnectError) {
|
||||
console.error("❌ Reconnection failed:", reconnectError);
|
||||
}
|
||||
try {
|
||||
await db.query("RETURN 1");
|
||||
} catch (error: any) {
|
||||
if (error.status === 401) {
|
||||
logger.warn("⚠️ SurrealDB Auth token expired. Attempting reconnection...");
|
||||
try {
|
||||
// Full reconnection instead of just re-authentication
|
||||
await db.close();
|
||||
const success = await connectDB();
|
||||
if (success) {
|
||||
logger.info("✅ Successfully reconnected to database");
|
||||
} else {
|
||||
console.error("Unexpected database error:", error);
|
||||
logger.error("❌ Failed to reconnect to database");
|
||||
}
|
||||
} catch (reconnectError) {
|
||||
logger.error("❌ Reconnection failed:", reconnectError);
|
||||
}
|
||||
} else {
|
||||
logger.error("Unexpected database error:", error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 1 minute
|
||||
@@ -69,21 +71,21 @@ const CHECK_INTERVAL = 60 * 1000;
|
||||
let intervalId: NodeJS.Timeout;
|
||||
|
||||
async function initializeDB() {
|
||||
const success = await connectDB();
|
||||
if (success) {
|
||||
// Only start the interval if initial connection was successful
|
||||
intervalId = setInterval(ensureAuthenticated, CHECK_INTERVAL);
|
||||
} else {
|
||||
console.error("Failed to initialize database connection");
|
||||
// Optionally implement retry logic here
|
||||
}
|
||||
const success = await connectDB();
|
||||
if (success) {
|
||||
// Only start the interval if initial connection was successful
|
||||
intervalId = setInterval(ensureAuthenticated, CHECK_INTERVAL);
|
||||
} else {
|
||||
logger.error("Failed to initialize database connection");
|
||||
// Optionally implement retry logic here
|
||||
}
|
||||
}
|
||||
|
||||
export function cleanup() {
|
||||
if (intervalId) {
|
||||
clearInterval(intervalId);
|
||||
}
|
||||
return db.close();
|
||||
if (intervalId) {
|
||||
clearInterval(intervalId);
|
||||
}
|
||||
return db.close();
|
||||
}
|
||||
|
||||
await initializeDB();
|
||||
@@ -91,7 +93,7 @@ await initializeDB();
|
||||
export const surreal = db as Surreal;
|
||||
|
||||
export function parseToRID(idStr: string) {
|
||||
return new StringRecordId(idStr);
|
||||
return new StringRecordId(idStr);
|
||||
}
|
||||
|
||||
process.on("SIGTERM", cleanup);
|
||||
|
||||
298
src/lib/server/external/api.scraping.helpers.ts
vendored
298
src/lib/server/external/api.scraping.helpers.ts
vendored
@@ -1,11 +1,11 @@
|
||||
import { env } from "$env/dynamic/private";
|
||||
import { logger } from "$lib/server/logger";
|
||||
import { getULID, sleep } from "$lib/utils";
|
||||
import { baseDistributorId, constants } from "$lib/utils/constants";
|
||||
import type { BookingEntry, Draw, LooseApiUser } from "$lib/utils/data.types";
|
||||
import { rng } from "$lib/utils/rng";
|
||||
import { env } from "$env/dynamic/private";
|
||||
import { logger } from "$lib/server/logger";
|
||||
import fs from "fs";
|
||||
import fetch from "node-fetch";
|
||||
// import fs from "fs";
|
||||
|
||||
// function dumpDistributors(distributors: LooseApiUser[]) {
|
||||
// fs.writeFileSync("distributors.json", JSON.stringify(distributors, null, 2));
|
||||
@@ -15,19 +15,38 @@ import fetch from "node-fetch";
|
||||
// fs.writeFileSync("dealers.json", JSON.stringify(dealers, null, 2));
|
||||
// }
|
||||
|
||||
function dumpDistributorsRaw(distributors: any) {
|
||||
fs.writeFileSync("distributors_raw.json", JSON.stringify(distributors, null, 2));
|
||||
}
|
||||
|
||||
function dumpDealersRaw(dealers: any) {
|
||||
fs.writeFileSync("dealers_raw.json", JSON.stringify(dealers, null, 2));
|
||||
}
|
||||
|
||||
export const testIfSessionIsValid = async (jwt: string) => {
|
||||
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
|
||||
if (!scrapingbeeApiKey) {
|
||||
logger.error("[testIfSessionIsValid] ScrapingBee API key not configured");
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
const res = await fetch(
|
||||
`${constants.PROXY_API_URL}/v1/user/get-balance?userId=${baseDistributorId}&authorization=${encodeURIComponent(jwt)}`,
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
},
|
||||
);
|
||||
if (res.status !== 200) {
|
||||
const targetUrl = `${constants.SCRAP_API_URL}/v1/user/get-balance?userId=${baseDistributorId}`;
|
||||
|
||||
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
|
||||
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
|
||||
scrapingbeeUrl.searchParams.set("url", targetUrl);
|
||||
scrapingbeeUrl.searchParams.set("forward_headers", "true");
|
||||
scrapingbeeUrl.searchParams.set("forward_headers_pure", "true");
|
||||
|
||||
const res = await fetch(scrapingbeeUrl.toString(), {
|
||||
headers: { "Spb-Authorization": jwt },
|
||||
});
|
||||
|
||||
if (res.status !== 200 || !res.ok) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const rj = (await res.json()) as {
|
||||
code: number;
|
||||
success: boolean;
|
||||
@@ -35,9 +54,10 @@ export const testIfSessionIsValid = async (jwt: string) => {
|
||||
data: any;
|
||||
time: string;
|
||||
};
|
||||
logger.debug(`[testIfSessionIsValid] response from api : ${JSON.stringify(rj)}`);
|
||||
return rj.code == 200 && rj.success;
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
logger.error("[testIfSessionIsValid] Error testing session validity", err);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
@@ -56,38 +76,42 @@ export const getSessionToken = async (payload: {
|
||||
}
|
||||
|
||||
const targetUrl = `${constants.SCRAP_API_URL}/v1/auth/login`;
|
||||
|
||||
|
||||
logger.info(`[getSessionToken] Requesting session token for user ${payload.userId}`);
|
||||
|
||||
|
||||
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
|
||||
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
|
||||
scrapingbeeUrl.searchParams.set("url", targetUrl);
|
||||
scrapingbeeUrl.searchParams.set("forward_headers", "true");
|
||||
scrapingbeeUrl.searchParams.set("render_js", "false");
|
||||
scrapingbeeUrl.searchParams.set("block_resources", "false");
|
||||
scrapingbeeUrl.searchParams.set("forward_headers_pure", "true");
|
||||
|
||||
// Prefix headers with Spb- for ScrapingBee to forward them
|
||||
const forwardHeaders = Object.fromEntries(
|
||||
Object.entries(constants.SCRAP_API_BASE_HEADERS).map(([key, value]) => [`Spb-${key}`, value]),
|
||||
);
|
||||
|
||||
try {
|
||||
const res = await fetch(scrapingbeeUrl.toString(), {
|
||||
method: "POST",
|
||||
body: JSON.stringify(payload),
|
||||
headers: {
|
||||
"Spb-Content-Type": "application/json",
|
||||
}
|
||||
headers: { "Spb-Content-Type": "application/json", ...forwardHeaders },
|
||||
});
|
||||
|
||||
|
||||
if (!res.ok) {
|
||||
const errorText = await res.text().catch(() => "Unknown error");
|
||||
logger.error(`[getSessionToken] ScrapingBee error ${res.status}: ${errorText.substring(0, 200)}`);
|
||||
logger.error(
|
||||
`[getSessionToken] ScrapingBee error ${res.status}: ${errorText.substring(0, 200)}`,
|
||||
);
|
||||
return { ok: false, message: `Failed to get session token: ${res.status}` };
|
||||
}
|
||||
|
||||
const out = await res.json() as any;
|
||||
const out = (await res.json()) as any;
|
||||
|
||||
if (out.code !== 200 || !out.ok) {
|
||||
if (out.code !== 200) {
|
||||
logger.warn(`[getSessionToken] API returned error: ${out.message}`);
|
||||
return { ok: false, message: out.message };
|
||||
}
|
||||
|
||||
|
||||
logger.info(`[getSessionToken] Successfully obtained session token for user ${payload.userId}`);
|
||||
return { ok: true, message: out.data.token };
|
||||
} catch (err) {
|
||||
@@ -97,15 +121,30 @@ export const getSessionToken = async (payload: {
|
||||
};
|
||||
|
||||
export async function getUsersBalance(userId: number, jwt: string) {
|
||||
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
|
||||
if (!scrapingbeeApiKey) {
|
||||
logger.error("[getUsersBalance] ScrapingBee API key not configured");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Prefix headers with Spb- for ScrapingBee to forward them
|
||||
const forwardHeaders = Object.fromEntries(
|
||||
Object.entries(constants.SCRAP_API_BASE_HEADERS).map(([key, value]) => [`Spb-${key}`, value]),
|
||||
);
|
||||
|
||||
try {
|
||||
const res = await fetch(
|
||||
`${constants.PROXY_API_URL}/v1/user/get-balance?userId=${userId}&authorization=${encodeURIComponent(jwt)}`,
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
},
|
||||
);
|
||||
const targetUrl = `${constants.SCRAP_API_URL}/v1/user/get-balance?userId=${userId}`;
|
||||
|
||||
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
|
||||
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
|
||||
scrapingbeeUrl.searchParams.set("url", targetUrl);
|
||||
scrapingbeeUrl.searchParams.set("forward_headers", "true");
|
||||
scrapingbeeUrl.searchParams.set("forward_headers_pure", "true");
|
||||
|
||||
const res = await fetch(scrapingbeeUrl.toString(), {
|
||||
headers: { "Spb-Authorization": jwt, ...forwardHeaders },
|
||||
});
|
||||
|
||||
const rj = (await res.json()) as {
|
||||
code: number;
|
||||
success: boolean;
|
||||
@@ -114,35 +153,47 @@ export async function getUsersBalance(userId: number, jwt: string) {
|
||||
time: string;
|
||||
};
|
||||
if (res.status !== 200 || rj.code !== 200 || !rj.success) {
|
||||
console.log(`[!] Error getting balance for ${userId} :: ${JSON.stringify(rj)}`);
|
||||
logger.warn(`[getUsersBalance] Error getting balance for ${userId}: ${rj.message}`);
|
||||
return false;
|
||||
}
|
||||
return rj.data.balance;
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
logger.error(`[getUsersBalance] Error getting balance for ${userId}`, err);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export const getDealers = async (jwt: string, distributor_ids: string[]) => {
|
||||
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
|
||||
if (!scrapingbeeApiKey) {
|
||||
logger.error("[getDealers] ScrapingBee API key not configured");
|
||||
return {
|
||||
dealers: [],
|
||||
errors: [{ message: "ScrapingBee API key not configured" }],
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
// // Create an array of promises for each fetch request
|
||||
const requests = distributor_ids.map(async (did) => {
|
||||
await sleep(rng(100, 10000));
|
||||
const res = await fetch(
|
||||
`${constants.PROXY_API_URL}/v1/user/dealer-list?authorization=${encodeURIComponent(jwt)}`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
page: 1,
|
||||
pageSize: 999999,
|
||||
parentDistributor: parseInt(did),
|
||||
}),
|
||||
},
|
||||
);
|
||||
|
||||
const targetUrl = `${constants.SCRAP_API_URL}/v1/user/dealer-list`;
|
||||
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
|
||||
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
|
||||
scrapingbeeUrl.searchParams.set("url", targetUrl);
|
||||
scrapingbeeUrl.searchParams.set("forward_headers", "true");
|
||||
scrapingbeeUrl.searchParams.set("forward_headers_pure", "true");
|
||||
|
||||
const res = await fetch(scrapingbeeUrl.toString(), {
|
||||
method: "POST",
|
||||
headers: { "Spb-Authorization": jwt, "Spb-Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
page: 1,
|
||||
pageSize: 999999,
|
||||
parentDistributor: parseInt(did),
|
||||
}),
|
||||
});
|
||||
|
||||
const data = (await res.json()) as {
|
||||
code: number;
|
||||
success: boolean;
|
||||
@@ -152,6 +203,9 @@ export const getDealers = async (jwt: string, distributor_ids: string[]) => {
|
||||
total: number;
|
||||
};
|
||||
};
|
||||
|
||||
dumpDealersRaw(data);
|
||||
|
||||
if (data.code !== 200 || !data.success) {
|
||||
return {
|
||||
dealers: [],
|
||||
@@ -162,8 +216,6 @@ export const getDealers = async (jwt: string, distributor_ids: string[]) => {
|
||||
}
|
||||
const dealers = data.data.items.map((item) => item.dealer);
|
||||
|
||||
// dumpDealers(dealers);
|
||||
|
||||
return {
|
||||
dealers,
|
||||
ok: res.status === 200 && data.success,
|
||||
@@ -171,7 +223,7 @@ export const getDealers = async (jwt: string, distributor_ids: string[]) => {
|
||||
message: data.message,
|
||||
};
|
||||
});
|
||||
// // Wait for all promises to resolve
|
||||
|
||||
const responses = await Promise.all(requests);
|
||||
const dealers: LooseApiUser[] = [];
|
||||
const errors: { message: string }[] = [];
|
||||
@@ -185,11 +237,12 @@ export const getDealers = async (jwt: string, distributor_ids: string[]) => {
|
||||
}
|
||||
}
|
||||
|
||||
// fs.writeFileSync("dealers.json", JSON.stringify(dealers, null, 2));
|
||||
|
||||
logger.info(
|
||||
`[getDealers] Fetched ${dealers.length} dealers from ${distributor_ids.length} distributors`,
|
||||
);
|
||||
return { dealers, errors };
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
logger.error("[getDealers] Error fetching dealers", err);
|
||||
return {
|
||||
dealers: [],
|
||||
errors: [{ message: "An error occured during fetching dealers" }],
|
||||
@@ -198,55 +251,67 @@ export const getDealers = async (jwt: string, distributor_ids: string[]) => {
|
||||
};
|
||||
|
||||
export const getDistributors = async (jwt: string) => {
|
||||
const res = await fetch(
|
||||
`${constants.PROXY_API_URL}/v1/user/distributor-list?authorization=${encodeURIComponent(jwt)}`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
page: 1,
|
||||
pageSize: 999999,
|
||||
parentDistributor: 15,
|
||||
}),
|
||||
},
|
||||
);
|
||||
const json = (await res.json()) as {
|
||||
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
|
||||
if (!scrapingbeeApiKey) {
|
||||
logger.error("[getDistributors] ScrapingBee API key not configured");
|
||||
return { ok: false, message: "ScrapingBee API key not configured", data: [] };
|
||||
}
|
||||
|
||||
const targetUrl = `${constants.SCRAP_API_URL}/v1/user/distributor-list`;
|
||||
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
|
||||
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
|
||||
scrapingbeeUrl.searchParams.set("url", targetUrl);
|
||||
scrapingbeeUrl.searchParams.set("forward_headers", "true");
|
||||
scrapingbeeUrl.searchParams.set("forward_headers_pure", "true");
|
||||
|
||||
const res = await fetch(scrapingbeeUrl.toString(), {
|
||||
method: "POST",
|
||||
headers: { "Spb-Authorization": jwt, "Spb-Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
page: 1,
|
||||
pageSize: 999999,
|
||||
parentDistributor: 15,
|
||||
}),
|
||||
});
|
||||
|
||||
const data = (await res.json()) as {
|
||||
code: number;
|
||||
success: boolean;
|
||||
message: string;
|
||||
data: { total: number; items: any[] };
|
||||
};
|
||||
|
||||
if (!json.data.items || json.code !== 200 || !json.success) {
|
||||
return { ok: false, message: json.message, data: [] };
|
||||
dumpDistributorsRaw(data);
|
||||
|
||||
if (!data.data.items || data.code !== 200 || !data.success) {
|
||||
logger.warn(`[getDistributors] Error: ${data.message}`);
|
||||
return { ok: false, message: data.message, data: [] };
|
||||
}
|
||||
|
||||
// fs.writeFileSync(
|
||||
// "distributors.json",
|
||||
// JSON.stringify(json.data.items, null, 2),
|
||||
// );
|
||||
|
||||
// dumpDistributors(json.data.items.map((item) => item.distributor));
|
||||
|
||||
logger.info(`[getDistributors] Fetched ${data.data.items.length} distributors`);
|
||||
return {
|
||||
ok: true,
|
||||
message: "",
|
||||
data: json.data.items.map((item) => item.distributor),
|
||||
data: data.data.items.map((item) => item.distributor),
|
||||
};
|
||||
};
|
||||
|
||||
export const getDraws = async (jwt: string) => {
|
||||
const res = await fetch(
|
||||
`${constants.PROXY_API_URL}/v1/draw/list-my?userId=15&authorization=${encodeURIComponent(jwt)}`,
|
||||
{
|
||||
method: "GET",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
},
|
||||
);
|
||||
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
|
||||
|
||||
logger.info(`[getDraws] Fetching draws from the API`);
|
||||
|
||||
const targetUrl = `${constants.SCRAP_API_URL}/v1/draw/list-my?userId=15`;
|
||||
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
|
||||
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
|
||||
scrapingbeeUrl.searchParams.set("url", targetUrl);
|
||||
scrapingbeeUrl.searchParams.set("forward_headers", "true");
|
||||
scrapingbeeUrl.searchParams.set("forward_headers_pure", "true");
|
||||
|
||||
const res = await fetch(scrapingbeeUrl.toString(), {
|
||||
headers: { "Spb-Authorization": jwt },
|
||||
});
|
||||
|
||||
type J = {
|
||||
code: number;
|
||||
success: boolean;
|
||||
@@ -256,8 +321,11 @@ export const getDraws = async (jwt: string) => {
|
||||
let decoded = (await res.json()) as { data: J };
|
||||
const json = (decoded.data.success ? decoded.data : decoded) as any as J;
|
||||
if (json.code !== 200 || !json.success || !json.data) {
|
||||
logger.warn(`[getDraws] Error: ${json.message}`);
|
||||
return { ok: false, message: json.message, data: [] };
|
||||
}
|
||||
|
||||
logger.info(`[getDraws] Fetched ${json.data.length} draws`);
|
||||
return {
|
||||
ok: true,
|
||||
message: "",
|
||||
@@ -271,25 +339,34 @@ export const getData = async (
|
||||
drawId: number,
|
||||
chosenDate: string,
|
||||
) => {
|
||||
const res = await fetch(
|
||||
`${constants.PROXY_API_URL}/v1/book/list2?authorization=${encodeURIComponent(jwt)}`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
userType: 3,
|
||||
userIds,
|
||||
drawId: drawId,
|
||||
startDate: chosenDate,
|
||||
endDate: chosenDate,
|
||||
beAdmin: false,
|
||||
containImported: false,
|
||||
keyword: "",
|
||||
}),
|
||||
},
|
||||
);
|
||||
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
|
||||
if (!scrapingbeeApiKey) {
|
||||
logger.error("[getData] ScrapingBee API key not configured");
|
||||
return { ok: false, message: "ScrapingBee API key not configured", data: [] };
|
||||
}
|
||||
|
||||
const targetUrl = `${constants.SCRAP_API_URL}/v1/book/list2`;
|
||||
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
|
||||
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
|
||||
scrapingbeeUrl.searchParams.set("url", targetUrl);
|
||||
scrapingbeeUrl.searchParams.set("forward_headers", "true");
|
||||
scrapingbeeUrl.searchParams.set("forward_headers_pure", "true");
|
||||
|
||||
const res = await fetch(scrapingbeeUrl.toString(), {
|
||||
method: "POST",
|
||||
headers: { "Spb-Authorization": jwt },
|
||||
body: JSON.stringify({
|
||||
userType: 3,
|
||||
userIds,
|
||||
drawId: drawId,
|
||||
startDate: chosenDate,
|
||||
endDate: chosenDate,
|
||||
beAdmin: false,
|
||||
containImported: false,
|
||||
keyword: "",
|
||||
}),
|
||||
});
|
||||
|
||||
type J = {
|
||||
code: number;
|
||||
success: boolean;
|
||||
@@ -299,8 +376,11 @@ export const getData = async (
|
||||
let decoded = (await res.json()) as { data: J };
|
||||
const json = (decoded.data.success ? decoded.data : decoded) as any as J;
|
||||
if (json.code !== 200 || !json.success || !json.data) {
|
||||
logger.warn(`[getData] Error: ${json.message}`);
|
||||
return { ok: false, message: json.message, data: [] };
|
||||
}
|
||||
|
||||
logger.info(`[getData] Fetched ${json.data.length} booking entries for ${userIds.length} users`);
|
||||
return { ok: true, message: "", data: json.data.map((e) => e.book) };
|
||||
};
|
||||
|
||||
|
||||
@@ -7,6 +7,8 @@ import type {
|
||||
PostDataEntry,
|
||||
ServerError,
|
||||
} from "$lib/utils/data.types";
|
||||
import { env } from "$env/dynamic/private";
|
||||
import { logger } from "$lib/server/logger";
|
||||
|
||||
export type APIResponse<T> = {
|
||||
code: number;
|
||||
@@ -60,13 +62,13 @@ export async function postDataToApi(payload: {
|
||||
let failedResponses = 0;
|
||||
let successResponses = 0;
|
||||
|
||||
console.log(`[+] Sending ${payload.data.length} requests...`);
|
||||
logger.info(`[postDataToApi] Sending ${payload.data.length} requests...`);
|
||||
|
||||
const dataByUser = {} as Record<string, PostDataEntry[]>;
|
||||
for (const row of payload.data) {
|
||||
const userId = row.userId ?? "";
|
||||
if (userId.length < 1) {
|
||||
console.log(`[!] User not found for request ${row.userId}`);
|
||||
logger.warn(`[postDataToApi] User not found for request ${row.userId}`);
|
||||
return {
|
||||
ok: false,
|
||||
detail: "User not found to post data with",
|
||||
@@ -121,8 +123,8 @@ export async function postDataToApi(payload: {
|
||||
try {
|
||||
rj = (await res.json()) as any;
|
||||
} catch (err) {
|
||||
console.log("Encountered error while parsing post response");
|
||||
console.log(res.status, err);
|
||||
logger.error("[postDataToApi] Encountered error while parsing post response", err);
|
||||
logger.error(`[postDataToApi] Response status: ${res.status}`);
|
||||
}
|
||||
if (rj && rj.code === 200 && res.status === 200) {
|
||||
ptr = jumped;
|
||||
@@ -135,8 +137,7 @@ export async function postDataToApi(payload: {
|
||||
successResponses++;
|
||||
break;
|
||||
}
|
||||
console.log("Failed to send send post request");
|
||||
console.log(res.status, rj);
|
||||
logger.warn(`[postDataToApi] Failed to send post request, status: ${res.status}`, rj);
|
||||
failedResponses++;
|
||||
tries++;
|
||||
}
|
||||
@@ -150,7 +151,8 @@ export async function postDataToApi(payload: {
|
||||
drawId,
|
||||
session,
|
||||
});
|
||||
console.log(await out.text());
|
||||
const deleteResponse = await out.text();
|
||||
logger.warn(`[postDataToApi] Deleted entries after failure: ${deleteResponse.substring(0, 200)}`);
|
||||
}
|
||||
throw new Error(`Failed to send data to api for user ${userId}`);
|
||||
}
|
||||
@@ -168,8 +170,7 @@ export async function postDataToApi(payload: {
|
||||
responsesIds.push(...result.value);
|
||||
} else {
|
||||
hasErrors = true;
|
||||
console.log(`[!] Error processing user`);
|
||||
console.log(result.reason);
|
||||
logger.error("[postDataToApi] Error processing user", result.reason);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -181,16 +182,15 @@ export async function postDataToApi(payload: {
|
||||
};
|
||||
}
|
||||
|
||||
console.log(`[+] Finished sending ${payload.data.length} requests`);
|
||||
console.log(`[?] Failed responses: ${failedResponses}`);
|
||||
console.log(`[?] Success responses: ${successResponses}`);
|
||||
logger.info(`[postDataToApi] Finished sending ${payload.data.length} requests`);
|
||||
logger.info(`[postDataToApi] Failed responses: ${failedResponses}, Success responses: ${successResponses}`);
|
||||
return {
|
||||
ok: true,
|
||||
detail: "Successfully sent data to api",
|
||||
data: responses,
|
||||
};
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
logger.error("[postDataToApi] Error sending data to API", err);
|
||||
return {
|
||||
ok: false,
|
||||
detail: "Failed to send data to api",
|
||||
@@ -205,23 +205,41 @@ async function sendBatchRequest(
|
||||
changedBalance: number,
|
||||
body: string,
|
||||
) {
|
||||
return fetch(
|
||||
`${constants.PROXY_API_URL}/v1/book/add-multiple?authorization=${encodeURIComponent(session.sessionToken)}`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
dealerId,
|
||||
drawId: Number(draw.id.split(":")[1]),
|
||||
closeTime: draw.closeTime,
|
||||
date: new Date().toISOString().split("T")[0],
|
||||
changedBalance,
|
||||
insertData: body,
|
||||
}),
|
||||
},
|
||||
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
|
||||
if (!scrapingbeeApiKey) {
|
||||
logger.error("[sendBatchRequest] ScrapingBee API key not configured");
|
||||
throw new Error("ScrapingBee API key not configured");
|
||||
}
|
||||
|
||||
const targetUrl = `${constants.SCRAP_API_URL}/v1/book/add-multiple`;
|
||||
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
|
||||
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
|
||||
scrapingbeeUrl.searchParams.set("url", targetUrl);
|
||||
scrapingbeeUrl.searchParams.set("forward_headers", "true");
|
||||
|
||||
// Prefix headers with Spb- for ScrapingBee to forward them
|
||||
const forwardHeaders = Object.fromEntries(
|
||||
Object.entries(constants.SCRAP_API_BASE_HEADERS).map(([key, value]) => [`Spb-${key}`, value]),
|
||||
);
|
||||
|
||||
logger.debug(`[sendBatchRequest] Sending batch request for dealer ${dealerId}, draw ${draw.id}`);
|
||||
|
||||
return fetch(scrapingbeeUrl.toString(), {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Spb-Authorization": session.sessionToken,
|
||||
"Spb-Content-Type": "application/json",
|
||||
...forwardHeaders,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
dealerId,
|
||||
drawId: Number(draw.id.split(":")[1]),
|
||||
closeTime: draw.closeTime,
|
||||
date: new Date().toISOString().split("T")[0],
|
||||
changedBalance,
|
||||
insertData: body,
|
||||
}),
|
||||
});
|
||||
}
|
||||
|
||||
async function mockSendBatchRequest(
|
||||
@@ -292,21 +310,39 @@ async function deleteAllBookedEntries({
|
||||
drawId: number;
|
||||
closeTime: string;
|
||||
}) {
|
||||
return fetch(
|
||||
`${constants.PROXY_API_URL}/v1/book/delete-multiple?authorization=${encodeURIComponent(session.sessionToken)}`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
bookIds: data.map((e) => e.bookId),
|
||||
closeTime,
|
||||
dealerId,
|
||||
drawId,
|
||||
}),
|
||||
},
|
||||
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
|
||||
if (!scrapingbeeApiKey) {
|
||||
logger.error("[deleteAllBookedEntries] ScrapingBee API key not configured");
|
||||
throw new Error("ScrapingBee API key not configured");
|
||||
}
|
||||
|
||||
const targetUrl = `${constants.SCRAP_API_URL}/v1/book/delete-multiple`;
|
||||
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
|
||||
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
|
||||
scrapingbeeUrl.searchParams.set("url", targetUrl);
|
||||
scrapingbeeUrl.searchParams.set("forward_headers", "true");
|
||||
|
||||
// Prefix headers with Spb- for ScrapingBee to forward them
|
||||
const forwardHeaders = Object.fromEntries(
|
||||
Object.entries(constants.SCRAP_API_BASE_HEADERS).map(([key, value]) => [`Spb-${key}`, value]),
|
||||
);
|
||||
|
||||
logger.debug(`[deleteAllBookedEntries] Deleting ${data.length} entries for dealer ${dealerId}, draw ${drawId}`);
|
||||
|
||||
return fetch(scrapingbeeUrl.toString(), {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Spb-Authorization": session.sessionToken,
|
||||
"Spb-Content-Type": "application/json",
|
||||
...forwardHeaders,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
bookIds: data.map((e) => e.bookId),
|
||||
closeTime,
|
||||
dealerId,
|
||||
drawId,
|
||||
}),
|
||||
});
|
||||
}
|
||||
|
||||
// export async function postDataToApi(payload: {
|
||||
|
||||
@@ -19,12 +19,12 @@ export const apiUserRouter = createTRPCRouter({
|
||||
}),
|
||||
getAllDistributorsCount: protectedProcedure.query(async () => {
|
||||
const count = await dbApiUser.getUserTypeCount(ApiUserTypes.DISTRIBUTOR);
|
||||
logger.debug(`[getAllDistributorsCount] Count: ${count}`);
|
||||
logger.debug(`[getAllDistributorsCount] Count: ${JSON.stringify(count)}`);
|
||||
return count;
|
||||
}),
|
||||
getAllDealersCount: protectedProcedure.query(async () => {
|
||||
const count = await dbApiUser.getUserTypeCount(ApiUserTypes.DEALER);
|
||||
logger.debug(`[getAllDealersCount] Count: ${count}`);
|
||||
logger.debug(`[getAllDealersCount] Count: ${JSON.stringify(count)}`);
|
||||
return count;
|
||||
}),
|
||||
|
||||
|
||||
@@ -10,19 +10,17 @@ export const constants = {
|
||||
SCRAP_API_SESSION_KEY: "SRAJWT",
|
||||
SCRAP_API_BASE_HEADERS: {
|
||||
Host: "gamebooking24.com",
|
||||
"Sec-Ch-Ua": '"Not/A)Brand";v="8", "Chromium";v="126"',
|
||||
"Sec-Ch-Ua-Mobile": "?0",
|
||||
"Sec-Ch-Ua-Platform": '"Windows"',
|
||||
"Sec-GPC": "1",
|
||||
"Sec-Fetch-Site": "cross-site",
|
||||
"Sec-Fetch-Mode": "no-cors",
|
||||
"Sec-Fetch-Dest": "image",
|
||||
"Sec-Fetch-Mode": "cors",
|
||||
"Sec-Fetch-Dest": "empty",
|
||||
"Accept-Encoding": "gzip, deflate",
|
||||
"Accept-Language": "en-US,en;q=0.9",
|
||||
"Accept-Language": "en-US,en;q=0.5",
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
Accept: "application/json, text/plain, */*",
|
||||
Origin: "https://gamebooking24.com",
|
||||
Referer: "https://gamebooking24.com/",
|
||||
Priority: "u=1, i",
|
||||
Priority: "u=0",
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
Reference in New Issue
Block a user