IT WORKSSSSSS... anyways migrating over
This commit is contained in:
59
src/lib/server/external/api.scraping.helpers.ts
vendored
59
src/lib/server/external/api.scraping.helpers.ts
vendored
@@ -2,6 +2,9 @@ import { getULID, sleep } from "$lib/utils";
|
||||
import { baseDistributorId, constants } from "$lib/utils/constants";
|
||||
import type { BookingEntry, Draw, LooseApiUser } from "$lib/utils/data.types";
|
||||
import { rng } from "$lib/utils/rng";
|
||||
import { env } from "$env/dynamic/private";
|
||||
import { logger } from "$lib/server/logger";
|
||||
import fetch from "node-fetch";
|
||||
// import fs from "fs";
|
||||
|
||||
// function dumpDistributors(distributors: LooseApiUser[]) {
|
||||
@@ -46,19 +49,51 @@ export const getSessionToken = async (payload: {
|
||||
code: string;
|
||||
userType: number;
|
||||
}): Promise<{ ok: boolean; message: string }> => {
|
||||
console.log("Requesting...");
|
||||
const res = await fetch(`${constants.PROXY_API_URL}/v1/auth/login`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(payload),
|
||||
});
|
||||
const out = await res.json();
|
||||
if (out.code !== 200) {
|
||||
return { ok: false, message: out.message };
|
||||
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
|
||||
if (!scrapingbeeApiKey) {
|
||||
logger.error("[getSessionToken] ScrapingBee API key not configured");
|
||||
return { ok: false, message: "ScrapingBee API key not configured" };
|
||||
}
|
||||
|
||||
const targetUrl = `${constants.SCRAP_API_URL}/v1/auth/login`;
|
||||
|
||||
logger.info(`[getSessionToken] Requesting session token for user ${payload.userId}`);
|
||||
|
||||
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
|
||||
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
|
||||
scrapingbeeUrl.searchParams.set("url", targetUrl);
|
||||
scrapingbeeUrl.searchParams.set("forward_headers", "true");
|
||||
scrapingbeeUrl.searchParams.set("render_js", "false");
|
||||
scrapingbeeUrl.searchParams.set("block_resources", "false");
|
||||
|
||||
try {
|
||||
const res = await fetch(scrapingbeeUrl.toString(), {
|
||||
method: "POST",
|
||||
body: JSON.stringify(payload),
|
||||
headers: {
|
||||
"Spb-Content-Type": "application/json",
|
||||
}
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
const errorText = await res.text().catch(() => "Unknown error");
|
||||
logger.error(`[getSessionToken] ScrapingBee error ${res.status}: ${errorText.substring(0, 200)}`);
|
||||
return { ok: false, message: `Failed to get session token: ${res.status}` };
|
||||
}
|
||||
|
||||
const out = await res.json() as any;
|
||||
|
||||
if (out.code !== 200 || !out.ok) {
|
||||
logger.warn(`[getSessionToken] API returned error: ${out.message}`);
|
||||
return { ok: false, message: out.message };
|
||||
}
|
||||
|
||||
logger.info(`[getSessionToken] Successfully obtained session token for user ${payload.userId}`);
|
||||
return { ok: true, message: out.data.token };
|
||||
} catch (err) {
|
||||
logger.error("[getSessionToken] Error getting session token", err);
|
||||
return { ok: false, message: "Error getting session token" };
|
||||
}
|
||||
return { ok: true, message: out.data.token };
|
||||
};
|
||||
|
||||
export async function getUsersBalance(userId: number, jwt: string) {
|
||||
|
||||
@@ -6,7 +6,6 @@ import { getULID } from "$lib/utils";
|
||||
import {
|
||||
type APISession,
|
||||
type ServerError,
|
||||
zPostDataEntry,
|
||||
zPostDataFilters,
|
||||
zPostDataHistoryFilters,
|
||||
} from "$lib/utils/data.types";
|
||||
|
||||
@@ -7,7 +7,6 @@ export const constants = {
|
||||
POST_SESSION_KEY: "postsession",
|
||||
LAST_FETCHED_KEY: "LAST_FETCHED",
|
||||
SCRAP_API_URL: "https://gamebooking24.com/lottery-api",
|
||||
PROXY_API_URL: "http://localhost:3070",
|
||||
SCRAP_API_SESSION_KEY: "SRAJWT",
|
||||
SCRAP_API_BASE_HEADERS: {
|
||||
Host: "gamebooking24.com",
|
||||
|
||||
Reference in New Issue
Block a user