ugh stashing before http-agenting
This commit is contained in:
82
src/lib/server/external/api.scraping.helpers.ts
vendored
82
src/lib/server/external/api.scraping.helpers.ts
vendored
@@ -107,7 +107,10 @@ export const getSessionToken = async (payload: {
|
||||
logger.error(
|
||||
`[getSessionToken] ScrapingBee error ${res.status}: ${errorText.substring(0, 200)}`,
|
||||
);
|
||||
return { ok: false, message: `Failed to get session token: ${res.status}` };
|
||||
return {
|
||||
ok: false,
|
||||
message: `Failed to get session token: ${res.status}`,
|
||||
};
|
||||
}
|
||||
|
||||
const out = (await res.json()) as any;
|
||||
@@ -201,7 +204,10 @@ export const getDealers = async (jwt: string, distributor_ids: string[]) => {
|
||||
|
||||
const res = await fetch(scrapingbeeUrl.toString(), {
|
||||
method: "POST",
|
||||
headers: { "Spb-Authorization": jwt, "Spb-Content-Type": "application/json" },
|
||||
headers: {
|
||||
"Spb-Authorization": jwt,
|
||||
"Spb-Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
page: 1,
|
||||
pageSize: 999999,
|
||||
@@ -273,7 +279,11 @@ export const getDistributors = async (jwt: string) => {
|
||||
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
|
||||
if (!scrapingbeeApiKey) {
|
||||
logger.error("[getDistributors] ScrapingBee API key not configured");
|
||||
return { ok: false, message: "ScrapingBee API key not configured", data: [] };
|
||||
return {
|
||||
ok: false,
|
||||
message: "ScrapingBee API key not configured",
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
|
||||
const targetUrl = `${constants.SCRAP_API_URL}/v1/user/distributor-list`;
|
||||
@@ -285,7 +295,10 @@ export const getDistributors = async (jwt: string) => {
|
||||
|
||||
const res = await fetch(scrapingbeeUrl.toString(), {
|
||||
method: "POST",
|
||||
headers: { "Spb-Authorization": jwt, "Spb-Content-Type": "application/json" },
|
||||
headers: {
|
||||
"Spb-Authorization": jwt,
|
||||
"Spb-Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
page: 1,
|
||||
pageSize: 999999,
|
||||
@@ -358,34 +371,39 @@ export const getData = async (
|
||||
drawId: number,
|
||||
chosenDate: string,
|
||||
) => {
|
||||
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
|
||||
if (!scrapingbeeApiKey) {
|
||||
logger.error("[getData] ScrapingBee API key not configured");
|
||||
return { ok: false, message: "ScrapingBee API key not configured", data: [] };
|
||||
}
|
||||
|
||||
logger.info(`[getData] Fetching draw data from API for ${chosenDate} ${drawId}`);
|
||||
|
||||
const scraperApiKey = env.SCRAPER_API_KEY ?? "";
|
||||
const targetUrl = `${constants.SCRAP_API_URL}/v1/book/list2`;
|
||||
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
|
||||
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
|
||||
scrapingbeeUrl.searchParams.set("url", targetUrl);
|
||||
scrapingbeeUrl.searchParams.set("block_resources", "true");
|
||||
scrapingbeeUrl.searchParams.set("forward_headers", "true");
|
||||
scrapingbeeUrl.searchParams.set("forward_headers_pure", "true");
|
||||
scrapingbeeUrl.searchParams.set("transparent_status_code", "true");
|
||||
|
||||
const forwardHeaders = Object.fromEntries(
|
||||
Object.entries(constants.SCRAP_API_BASE_HEADERS).map(([key, value]) => [`Spb-${key}`, value]),
|
||||
logger.info(
|
||||
`[getData] Fetching draw data from API for ${chosenDate} ${drawId} for ${userIds.length} users`,
|
||||
);
|
||||
|
||||
const res = await fetch(scrapingbeeUrl.toString(), {
|
||||
const proxyConfig = {
|
||||
host: "proxy-server.scraperapi.com",
|
||||
port: 8001,
|
||||
auth: {
|
||||
user: "scraperapi",
|
||||
password: scraperApiKey,
|
||||
},
|
||||
protocol: "http",
|
||||
};
|
||||
|
||||
const apiUrl = new URL(`https://api.scraperapi.com/`);
|
||||
apiUrl.searchParams.append("api_key", scraperApiKey);
|
||||
apiUrl.searchParams.append("url", targetUrl);
|
||||
apiUrl.searchParams.append("follow_redirect", "false");
|
||||
apiUrl.searchParams.append("keep_headers", "true");
|
||||
apiUrl.searchParams.append("device_type", "desktop");
|
||||
|
||||
logger.debug(`[getData] Scraping for data at : ${apiUrl.toString()}`);
|
||||
|
||||
const res = await fetch(apiUrl.toString(), {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Spb-Authorization": jwt,
|
||||
"Spb-Content-Type": "application/json",
|
||||
...forwardHeaders,
|
||||
"Spb-Accept-Encoding": "gzip, deflate, br, zstd",
|
||||
Authorization: jwt,
|
||||
"Content-Type": "application/json",
|
||||
...constants.SCRAP_API_BASE_HEADERS,
|
||||
"Accept-Encoding": "gzip, deflate, br, zstd",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
userType: 3,
|
||||
@@ -406,10 +424,20 @@ export const getData = async (
|
||||
data: { book: BookingEntry; user: any }[];
|
||||
};
|
||||
|
||||
res.headers.forEach((value, key) => {
|
||||
res.headers.forEach((value: string, key: string) => {
|
||||
logger.debug(`[getData] response headers - ${key}: ${value}`);
|
||||
});
|
||||
|
||||
if (res.headers.get("content-type") !== "application/json") {
|
||||
logger.warn(`[getData] Error: Content type is not application/json`);
|
||||
const rt = await res.text();
|
||||
logger.debug(`[getData] Response: ${rt}`);
|
||||
return {
|
||||
ok: false,
|
||||
message: `Content type is not application/json`,
|
||||
data: [],
|
||||
};
|
||||
}
|
||||
let decoded = (await res.json()) as { data: J };
|
||||
|
||||
dumpDataRaw(decoded, `getData_${chosenDate}_${drawId}`);
|
||||
|
||||
Reference in New Issue
Block a user