data overloading????? maybe but yeah did fetchers before
This commit is contained in:
5
data_raw_getData_2026-01-03_8.json
Normal file
5
data_raw_getData_2026-01-03_8.json
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"error": "Error with your request, please try again (you will not be charged for this request).You should: 1) check that your URL is correctly encoded 2) try with render_js=True (5 credits per request) 3) try with premium_proxy=True see documentation: https://www.scrapingbee.com/documentation#premium_proxy (10-25 credits per request) 4) try with stealth_proxy=True see documentation: https://www.scrapingbee.com/documentation#stealth_proxy (75 credits per request)Do not hesitate to check our troubleshooting guide:https://www.scrapingbee.com/help",
|
||||||
|
"reason": "Server responded with 500",
|
||||||
|
"help": "('Received response with content-encoding: br, but failed to decode it.', error("brotli: decoder process called with data when 'can_accept_more_data()' is False"))"
|
||||||
|
}
|
||||||
61
src/lib/server/external/api.scraping.helpers.ts
vendored
61
src/lib/server/external/api.scraping.helpers.ts
vendored
@@ -1,4 +1,5 @@
|
|||||||
import { env } from "$env/dynamic/private";
|
import { env } from "$env/dynamic/private";
|
||||||
|
import { chunkArray } from "$lib/server/array.chunk";
|
||||||
import { logger } from "$lib/server/logger";
|
import { logger } from "$lib/server/logger";
|
||||||
import { getULID, sleep } from "$lib/utils";
|
import { getULID, sleep } from "$lib/utils";
|
||||||
import { baseDistributorId, constants } from "$lib/utils/constants";
|
import { baseDistributorId, constants } from "$lib/utils/constants";
|
||||||
@@ -19,8 +20,12 @@ function dumpDistributorsRaw(distributors: any) {
|
|||||||
fs.writeFileSync("distributors_raw.json", JSON.stringify(distributors, null, 2));
|
fs.writeFileSync("distributors_raw.json", JSON.stringify(distributors, null, 2));
|
||||||
}
|
}
|
||||||
|
|
||||||
function dumpDealersRaw(dealers: any) {
|
function dumpDealersRaw(dealers: any, prefix: string) {
|
||||||
fs.writeFileSync("dealers_raw.json", JSON.stringify(dealers, null, 2));
|
fs.writeFileSync(`dealers_raw_${prefix}.json`, JSON.stringify(dealers, null, 2));
|
||||||
|
}
|
||||||
|
|
||||||
|
function dumpDataRaw(data: any, prefix: string) {
|
||||||
|
fs.writeFileSync(`data_raw_${prefix}.json`, JSON.stringify(data, null, 2));
|
||||||
}
|
}
|
||||||
|
|
||||||
export const testIfSessionIsValid = async (jwt: string) => {
|
export const testIfSessionIsValid = async (jwt: string) => {
|
||||||
@@ -174,8 +179,18 @@ export const getDealers = async (jwt: string, distributor_ids: string[]) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const requests = distributor_ids.map(async (did) => {
|
const batches = chunkArray(distributor_ids, 5);
|
||||||
await sleep(rng(100, 10000));
|
const allResponses: Array<{
|
||||||
|
dealers: any[];
|
||||||
|
ok: boolean;
|
||||||
|
code: number;
|
||||||
|
message: string;
|
||||||
|
}> = [];
|
||||||
|
|
||||||
|
// Process each batch sequentially
|
||||||
|
for (const batch of batches) {
|
||||||
|
const batchRequests = batch.map(async (did, index) => {
|
||||||
|
await sleep(rng(100, 2000));
|
||||||
|
|
||||||
const targetUrl = `${constants.SCRAP_API_URL}/v1/user/dealer-list`;
|
const targetUrl = `${constants.SCRAP_API_URL}/v1/user/dealer-list`;
|
||||||
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
|
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
|
||||||
@@ -204,7 +219,7 @@ export const getDealers = async (jwt: string, distributor_ids: string[]) => {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
dumpDealersRaw(data);
|
dumpDealersRaw(data, `batch_${index}_${did}`);
|
||||||
|
|
||||||
if (data.code !== 200 || !data.success) {
|
if (data.code !== 200 || !data.success) {
|
||||||
return {
|
return {
|
||||||
@@ -224,10 +239,14 @@ export const getDealers = async (jwt: string, distributor_ids: string[]) => {
|
|||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
const responses = await Promise.all(requests);
|
// Wait for all requests in this batch to complete before moving to next batch
|
||||||
|
const batchResponses = await Promise.all(batchRequests);
|
||||||
|
allResponses.push(...batchResponses);
|
||||||
|
}
|
||||||
|
|
||||||
const dealers: LooseApiUser[] = [];
|
const dealers: LooseApiUser[] = [];
|
||||||
const errors: { message: string }[] = [];
|
const errors: { message: string }[] = [];
|
||||||
for (const res of responses) {
|
for (const res of allResponses) {
|
||||||
if (res.code !== 200 || !res.ok) {
|
if (res.code !== 200 || !res.ok) {
|
||||||
errors.push({ message: res.message });
|
errors.push({ message: res.message });
|
||||||
continue;
|
continue;
|
||||||
@@ -345,16 +364,29 @@ export const getData = async (
|
|||||||
return { ok: false, message: "ScrapingBee API key not configured", data: [] };
|
return { ok: false, message: "ScrapingBee API key not configured", data: [] };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
logger.info(`[getData] Fetching draw data from API for ${chosenDate} ${drawId}`);
|
||||||
|
|
||||||
const targetUrl = `${constants.SCRAP_API_URL}/v1/book/list2`;
|
const targetUrl = `${constants.SCRAP_API_URL}/v1/book/list2`;
|
||||||
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
|
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
|
||||||
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
|
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
|
||||||
scrapingbeeUrl.searchParams.set("url", targetUrl);
|
scrapingbeeUrl.searchParams.set("url", targetUrl);
|
||||||
|
scrapingbeeUrl.searchParams.set("block_resources", "true");
|
||||||
scrapingbeeUrl.searchParams.set("forward_headers", "true");
|
scrapingbeeUrl.searchParams.set("forward_headers", "true");
|
||||||
scrapingbeeUrl.searchParams.set("forward_headers_pure", "true");
|
scrapingbeeUrl.searchParams.set("forward_headers_pure", "true");
|
||||||
|
scrapingbeeUrl.searchParams.set("transparent_status_code", "true");
|
||||||
|
|
||||||
|
const forwardHeaders = Object.fromEntries(
|
||||||
|
Object.entries(constants.SCRAP_API_BASE_HEADERS).map(([key, value]) => [`Spb-${key}`, value]),
|
||||||
|
);
|
||||||
|
|
||||||
const res = await fetch(scrapingbeeUrl.toString(), {
|
const res = await fetch(scrapingbeeUrl.toString(), {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
headers: { "Spb-Authorization": jwt },
|
headers: {
|
||||||
|
"Spb-Authorization": jwt,
|
||||||
|
"Spb-Content-Type": "application/json",
|
||||||
|
...forwardHeaders,
|
||||||
|
"Spb-Accept-Encoding": "gzip, deflate, br, zstd",
|
||||||
|
},
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
userType: 3,
|
userType: 3,
|
||||||
userIds,
|
userIds,
|
||||||
@@ -373,10 +405,19 @@ export const getData = async (
|
|||||||
message: string;
|
message: string;
|
||||||
data: { book: BookingEntry; user: any }[];
|
data: { book: BookingEntry; user: any }[];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
res.headers.forEach((value, key) => {
|
||||||
|
logger.debug(`[getData] response headers - ${key}: ${value}`);
|
||||||
|
});
|
||||||
|
|
||||||
let decoded = (await res.json()) as { data: J };
|
let decoded = (await res.json()) as { data: J };
|
||||||
const json = (decoded.data.success ? decoded.data : decoded) as any as J;
|
|
||||||
|
dumpDataRaw(decoded, `getData_${chosenDate}_${drawId}`);
|
||||||
|
|
||||||
|
const json = (decoded.data && decoded.data.success ? decoded.data : decoded) as any as J;
|
||||||
|
|
||||||
if (json.code !== 200 || !json.success || !json.data) {
|
if (json.code !== 200 || !json.success || !json.data) {
|
||||||
logger.warn(`[getData] Error: ${json.message}`);
|
logger.warn(`[getData] Error: ${JSON.stringify(json)}`);
|
||||||
return { ok: false, message: json.message, data: [] };
|
return { ok: false, message: json.message, data: [] };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user