Bullshit MF headers not being passed ahead

This commit is contained in:
bootunloader
2026-01-04 21:59:01 +02:00
parent 1665ffb9bb
commit 714b8f737b
9 changed files with 26 additions and 1816 deletions

View File

@@ -29,6 +29,7 @@
"date-fns-tz": "^3.1.3",
"dayjs": "^1.11.9",
"dotenv": "^16.3.1",
"http-proxy-agent": "^7.0.2",
"https-proxy-agent": "^7.0.5",
"ioredis": "^5.3.2",
"lucide-svelte": "^0.424.0",

14
pnpm-lock.yaml generated
View File

@@ -56,6 +56,9 @@ importers:
dotenv:
specifier: ^16.3.1
version: 16.3.1
http-proxy-agent:
specifier: ^7.0.2
version: 7.0.2
https-proxy-agent:
specifier: ^7.0.5
version: 7.0.5
@@ -1208,6 +1211,10 @@ packages:
resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==}
engines: {node: '>= 0.4'}
http-proxy-agent@7.0.2:
resolution: {integrity: sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==}
engines: {node: '>= 14'}
https-proxy-agent@7.0.5:
resolution: {integrity: sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw==}
engines: {node: '>= 14'}
@@ -3089,6 +3096,13 @@ snapshots:
dependencies:
function-bind: 1.1.2
http-proxy-agent@7.0.2:
dependencies:
agent-base: 7.1.1
debug: 4.3.4
transitivePeerDependencies:
- supports-color
https-proxy-agent@7.0.5:
dependencies:
agent-base: 7.1.1

View File

@@ -1 +0,0 @@
3.13

View File

@@ -1,27 +0,0 @@
import cloudscraper
def main():
scraper = cloudscraper.create_scraper()
res = scraper.post(
"https://gamebooking24.com/lottery-api/v1/user/dealer-list",
headers={
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:140.0) Gecko/20100101 Firefox/140.0",
"Accept": "application/json, text/plain, */*",
"Accept-Language": "en-US,en;q=0.5",
"Content-Type": "application/json;charset=utf-8",
"Authorization": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJhdWQiOiJ3ZWIiLCJpc3MiOiJsb3R0ZXJ5IiwiZXhwIjoxNzY2OTc5NDE5LCJpYXQiOjE3NjY4OTMwMTksImp0aSI6IjEzZGQ4MDVjYWYxOTRhNzZiMjljNzlhYmVjZjM1MDAxIiwidXNlcm5hbWUiOiIyOThIQU0ifQ.CllSMlgycFWaIvq43oh3c_f_t1z-YlQ0lmWGd1EaIYU",
"Access-Control-Allow-Origin": "*",
"Sec-GPC": "1",
"Sec-Fetch-Dest": "empty",
"Sec-Fetch-Mode": "cors",
"Sec-Fetch-Site": "same-origin",
},
json={"page": 1, "pageSize": 999999, "parentDistributor": 179},
)
print(res.status_code)
print(res.text)
if __name__ == "__main__":
main()

View File

@@ -1,517 +0,0 @@
import json
import logging
import os
import time
from typing import Dict, List, Optional
from urllib.parse import urlencode, quote
import aiohttp
from dotenv import load_dotenv
from fastapi import FastAPI, HTTPException, Request
from fastapi.responses import JSONResponse, Response
from pydantic import BaseModel
load_dotenv()
# Configure logging
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s - %(levelname)s - %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)
logger = logging.getLogger(__name__)
app = FastAPI()
logger.info("FastAPI Proxy Server initialized")
SCRAPINGBEE_API_KEY = os.getenv("SCRAPINGBEE_API_KEY")
if not SCRAPINGBEE_API_KEY:
raise ValueError("SCRAPINGBEE_API_KEY is not set")
CONSTANTS = {
"SESSION_KEY_NAME": "SID",
"SESSION_EXPIRE_TIME_MS": 6 * 60 * 60 * 1000,
"POST_SESSION_KEY": "postsession",
"LAST_FETCHED_KEY": "LAST_FETCHED",
"SCRAP_API_URL": "https://gamebooking24.com/lottery-api",
"SCRAP_API_SESSION_KEY": "SRAJWT",
"SCRAPINGBEE_BASE_URL": "https://app.scrapingbee.com/api/v1",
"SCRAP_API_BASE_HEADERS": {
"Host": "gamebooking24.com",
"Sec-Ch-Ua": '"Not/A)Brand";v="8", "Chromium";v="126"',
"Sec-Ch-Ua-Mobile": "?0",
"Sec-Fetch-Site": "cross-site",
"Sec-Fetch-Mode": "no-cors",
"Sec-Fetch-Dest": "image",
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "en-US,en;q=0.9",
"Access-Control-Allow-Origin": "*",
"Accept": "application/json, text/plain, */*",
"Origin": "https://gamebooking24.com",
"Referer": "https://gamebooking24.com/",
"Priority": "u=1, i",
},
}
# Middleware for logging all requests
@app.middleware("http")
async def log_requests(request: Request, call_next):
start_time = time.time()
# Log incoming request
logger.info(f"{request.method} {request.url.path}")
if request.query_params:
logger.debug(f" Query params: {dict(request.query_params)}")
# Process request
response = await call_next(request)
# Log response
duration = (time.time() - start_time) * 1000
logger.info(
f"{request.method} {request.url.path} [{response.status_code}] ({duration:.2f}ms)"
)
return response
def build_headers(
authorization: Optional[str] = None, extra_headers: Optional[Dict[str, str]] = None
) -> Dict[str, str]:
"""Build headers dict for requests"""
headers = CONSTANTS["SCRAP_API_BASE_HEADERS"].copy()
if authorization:
headers["Authorization"] = authorization
if extra_headers:
headers.update(extra_headers)
return headers
async def make_get_request(
url: str, params: Optional[Dict] = None, headers: Optional[Dict] = None
):
"""Make a GET request using ScrapingBee"""
# Add query params to the target URL if provided
if params:
url_with_params = f"{url}?{urlencode(params)}"
else:
url_with_params = url
logger.debug(f"[ScrapingBee GET] Target URL: {url_with_params}")
# Build the ScrapingBee request params
# Note: aiohttp will automatically URL-encode the params, including the 'url' value
scrapingbee_params = {
"api_key": SCRAPINGBEE_API_KEY,
"url": url_with_params,
"render_js": "true",
"block_resources": "false",
"transparent_status_code": "true", # Pass through the actual status code from target site
}
# Forward headers to target site if provided (for Authorization, etc.)
if headers and "Authorization" in headers:
scrapingbee_params["forward_headers"] = "true"
# Make the request to ScrapingBee using aiohttp
# Note: Don't pass custom headers to ScrapingBee - they're for the target site
# If needed, use ScrapingBee's forward_headers parameter instead
async with aiohttp.ClientSession() as session:
async with session.get(
CONSTANTS["SCRAPINGBEE_BASE_URL"],
params=scrapingbee_params,
timeout=aiohttp.ClientTimeout(total=60),
) as response:
# Read content before context manager exits
content = await response.read()
# Log error responses for debugging
if response.status != 200:
try:
error_text = content.decode('utf-8')[:500]
logger.error(f"[ScrapingBee GET] Status {response.status}, Response: {error_text}")
except:
logger.error(f"[ScrapingBee GET] Status {response.status}, Response (non-text): {len(content)} bytes")
# Create a simple response object with the data
class SimpleResponse:
def __init__(self, status, headers, content_bytes):
self.status_code = status
self.headers = headers
self._content = content_bytes
self._text = None
self._json = None
async def text(self):
if self._text is None:
self._text = self._content.decode('utf-8')
return self._text
async def json(self):
if self._json is None:
self._json = json.loads(await self.text())
return self._json
async def content(self):
return self._content
return SimpleResponse(response.status, response.headers, content)
async def make_post_request(url: str, data: dict, headers: Optional[Dict] = None):
"""Make a POST request using ScrapingBee"""
# Build the ScrapingBee request params
scrapingbee_params = {
"api_key": SCRAPINGBEE_API_KEY,
"url": url,
"render_js": "true",
"block_resources": "false",
}
# ScrapingBee POST requests: pass JSON body as a parameter
scrapingbee_params["body"] = json.dumps(data)
# Forward headers to target site if provided
# Note: ScrapingBee's forward_headers forwards common headers automatically
# For custom headers like Authorization, we may need to use cookies parameter
if headers and "Authorization" in headers:
scrapingbee_params["forward_headers"] = "true"
# TODO: May need to pass Authorization via cookies if forward_headers doesn't work
# Make the POST request to ScrapingBee using aiohttp
# ScrapingBee HTML API uses GET even for POST requests - the body is passed as a param
async with aiohttp.ClientSession() as session:
async with session.get(
CONSTANTS["SCRAPINGBEE_BASE_URL"],
params=scrapingbee_params,
timeout=aiohttp.ClientTimeout(total=60),
) as response:
# Read content before context manager exits
content = await response.read()
# Create a simple response object with the data
class SimpleResponse:
def __init__(self, status, headers, content_bytes):
self.status_code = status
self.headers = headers
self._content = content_bytes
self._text = None
self._json = None
async def text(self):
if self._text is None:
self._text = self._content.decode('utf-8')
return self._text
async def json(self):
if self._json is None:
self._json = json.loads(await self.text())
return self._json
async def content(self):
return self._content
return SimpleResponse(response.status, response.headers, content)
# Pydantic models for request bodies
class LoginPayload(BaseModel):
userId: str
password: str
verifyToken: str
code: str
userType: int
class DealerListPayload(BaseModel):
page: int
pageSize: int
parentDistributor: int
class DistributorListPayload(BaseModel):
page: int
pageSize: int
parentDistributor: int
class BookListPayload(BaseModel):
userType: int
userIds: List[int]
drawId: int
startDate: str
endDate: str
beAdmin: bool
containImported: bool
keyword: str
class AddMultiplePayload(BaseModel):
dealerId: int
drawId: int
closeTime: str
date: str
changedBalance: int
insertData: str
class DeleteMultiplePayload(BaseModel):
bookIds: List[str]
closeTime: str
dealerId: int
drawId: int
@app.get("/ping")
def ping():
logger.info("Ping request received")
return {"status": "pong"}
@app.get("/v1/user/get-balance")
async def get_balance(userId: int, authorization: str):
logger.info(f"[GET /v1/user/get-balance] userId={userId}")
try:
headers = build_headers(authorization=authorization)
response = await make_get_request(
f"{CONSTANTS['SCRAP_API_URL']}/v1/user/get-balance",
params={"userId": userId},
headers=headers,
)
logger.info(f"[GET /v1/user/get-balance] Response: {response.status_code}")
return JSONResponse(
content=await response.json(), status_code=response.status_code
)
except Exception as e:
logger.error(f"[GET /v1/user/get-balance] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@app.post("/v1/auth/login")
async def login(payload: LoginPayload):
logger.info(f"[POST /v1/auth/login] - payload={payload.model_dump()}")
try:
headers = build_headers(extra_headers={"Content-Type": "application/json"})
response = await make_post_request(
f"{CONSTANTS['SCRAP_API_URL']}/v1/auth/login",
data=payload.model_dump(),
headers=headers,
)
logger.info(f"[POST /v1/auth/login] Response: {response.status_code}")
# Handle non-JSON responses (e.g., 403 HTML pages)
if response.status_code == 403:
response_text = await response.text()
logger.error(
f"[POST /v1/auth/login] 403 Forbidden - Response: {response_text[:500]}"
)
raise HTTPException(status_code=403, detail="Request blocked")
# Try to parse as JSON
try:
response_json = await response.json()
except Exception as json_error:
response_text = await response.text()
logger.error(
f"[POST /v1/auth/login] Failed to parse JSON response: {json_error}"
)
logger.error(f"[POST /v1/auth/login] Response text: {response_text[:500]}")
raise HTTPException(
status_code=500, detail=f"Invalid JSON response: {str(json_error)}"
)
return JSONResponse(content=response_json, status_code=response.status_code)
except HTTPException:
raise
except Exception as e:
logger.error(f"[POST /v1/auth/login] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@app.get("/verify/image")
async def get_captcha(uuid: str):
logger.info(f"[GET /verify/image] uuid={uuid}")
try:
headers = build_headers(
extra_headers={
"Accept": "image/avif,image/webp,image/apng,image/svg+xml,image/*,*/*;q=0.8"
}
)
response = await make_get_request(
f"{CONSTANTS['SCRAP_API_URL']}/verify/image",
params={"uuid": uuid},
headers=headers,
)
if response.status_code == 403:
logger.error("[GET /verify/image] 403 Forbidden - Request blocked")
logger.error(
f"[GET /verify/image] Response headers: {dict(response.headers)}"
)
response_text = await response.text()
logger.error(f"[GET /verify/image] Response text: {response_text[:500]}")
content = await response.content()
logger.info(
f"[GET /verify/image] Response: {response.status_code}, size={len(content)} bytes"
)
return Response(
content=content,
media_type="image/png",
status_code=response.status_code,
)
except Exception as e:
logger.error(f"[GET /verify/image] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@app.post("/v1/user/dealer-list")
async def dealer_list(payload: DealerListPayload, authorization: str):
logger.info(
f"[POST /v1/user/dealer-list] parentDistributor={payload.parentDistributor}, page={payload.page}, pageSize={payload.pageSize}"
)
try:
headers = build_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json"},
)
response = await make_post_request(
f"{CONSTANTS['SCRAP_API_URL']}/v1/user/dealer-list",
data=payload.model_dump(),
headers=headers,
)
logger.info(f"[POST /v1/user/dealer-list] Response: {response.status_code}")
return JSONResponse(
content=await response.json(), status_code=response.status_code
)
except Exception as e:
logger.error(f"[POST /v1/user/dealer-list] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@app.post("/v1/user/distributor-list")
async def distributor_list(payload: DistributorListPayload, authorization: str):
logger.info(
f"[POST /v1/user/distributor-list] parentDistributor={payload.parentDistributor}, page={payload.page}, pageSize={payload.pageSize}"
)
try:
headers = build_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json"},
)
response = await make_post_request(
f"{CONSTANTS['SCRAP_API_URL']}/v1/user/distributor-list",
data=payload.model_dump(),
headers=headers,
)
logger.info(
f"[POST /v1/user/distributor-list] Response: {response.status_code}"
)
return JSONResponse(
content=await response.json(), status_code=response.status_code
)
except Exception as e:
logger.error(f"[POST /v1/user/distributor-list] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@app.get("/v1/draw/list-my")
async def list_draws(userId: int, authorization: str):
logger.info(f"[GET /v1/draw/list-my] userId={userId}")
try:
headers = build_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json"},
)
response = await make_get_request(
f"{CONSTANTS['SCRAP_API_URL']}/v1/draw/list-my",
params={"userId": userId},
headers=headers,
)
logger.info(f"[GET /v1/draw/list-my] Response: {response.status_code}")
return JSONResponse(
content=await response.json(), status_code=response.status_code
)
except Exception as e:
logger.error(f"[GET /v1/draw/list-my] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@app.post("/v1/book/list2")
async def book_list(payload: BookListPayload, authorization: str):
logger.info(
f"[POST /v1/book/list2] drawId={payload.drawId}, userIds={len(payload.userIds)}, date={payload.startDate}"
)
try:
headers = build_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json"},
)
response = await make_post_request(
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/list2",
data=payload.model_dump(),
headers=headers,
)
logger.info(f"[POST /v1/book/list2] Response: {response.status_code}")
return JSONResponse(
content=await response.json(), status_code=response.status_code
)
except Exception as e:
logger.error(f"[POST /v1/book/list2] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@app.post("/v1/book/add-multiple")
async def add_multiple(payload: AddMultiplePayload, authorization: str):
entries_count = len(payload.insertData.split(";")) if payload.insertData else 0
logger.info(
f"[POST /v1/book/add-multiple] dealerId={payload.dealerId}, drawId={payload.drawId}, entries={entries_count}, balance={payload.changedBalance}"
)
try:
headers = build_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json;charset=UTF-8"},
)
response = await make_post_request(
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/add-multiple",
data=payload.model_dump(),
headers=headers,
)
logger.info(f"[POST /v1/book/add-multiple] Response: {response.status_code}")
return JSONResponse(
content=await response.json(), status_code=response.status_code
)
except Exception as e:
logger.error(f"[POST /v1/book/add-multiple] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@app.post("/v1/book/delete-multiple")
async def delete_multiple(payload: DeleteMultiplePayload, authorization: str):
logger.info(
f"[POST /v1/book/delete-multiple] dealerId={payload.dealerId}, drawId={payload.drawId}, bookIds={len(payload.bookIds)}"
)
try:
headers = build_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json;charset=UTF-8"},
)
response = await make_post_request(
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/delete-multiple",
data=payload.model_dump(),
headers=headers,
)
logger.info(f"[POST /v1/book/delete-multiple] Response: {response.status_code}")
return JSONResponse(
content=await response.json(), status_code=response.status_code
)
except Exception as e:
logger.error(f"[POST /v1/book/delete-multiple] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))

View File

@@ -1,12 +0,0 @@
[project]
name = "pyapi"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.13"
dependencies = [
"aiohttp>=3.13.2",
"fastapi[standard]>=0.128.0",
"pydantic>=2.12.5",
"python-dotenv>=1.2.1",
]

View File

@@ -1,7 +0,0 @@
#!/bin/bash
# FastAPI Proxy Server Startup Script
# This script starts the FastAPI proxy server that bypasses Cloudflare
echo "Starting FastAPI Proxy Server on port 3070..."
uv run python -m fastapi run --host=0.0.0.0 --port=3070 main.py

1231
pyapi/uv.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -6,6 +6,7 @@ import { baseDistributorId, constants } from "$lib/utils/constants";
import type { BookingEntry, Draw, LooseApiUser } from "$lib/utils/data.types";
import { rng } from "$lib/utils/rng";
import fs from "fs";
import { HttpProxyAgent } from "http-proxy-agent";
import fetch from "node-fetch";
// function dumpDistributors(distributors: LooseApiUser[]) {
@@ -371,39 +372,27 @@ export const getData = async (
drawId: number,
chosenDate: string,
) => {
const scraperApiKey = env.SCRAPER_API_KEY ?? "";
const scraperApiKey = env.SCRAPERAPI_API_KEY ?? "";
const targetUrl = `${constants.SCRAP_API_URL}/v1/book/list2`;
logger.info(
`[getData] Fetching draw data from API for ${chosenDate} ${drawId} for ${userIds.length} users`,
);
const proxyConfig = {
host: "proxy-server.scraperapi.com",
port: 8001,
auth: {
user: "scraperapi",
password: scraperApiKey,
},
protocol: "http",
};
const proxyUsername = "scraperapi.keep_headers=true";
const apiUrl = new URL(`https://api.scraperapi.com/`);
apiUrl.searchParams.append("api_key", scraperApiKey);
apiUrl.searchParams.append("url", targetUrl);
apiUrl.searchParams.append("follow_redirect", "false");
apiUrl.searchParams.append("keep_headers", "true");
apiUrl.searchParams.append("device_type", "desktop");
// Configure HTTP proxy agent
const proxyAgent = new HttpProxyAgent(
`http://${proxyUsername}:${scraperApiKey}@proxy-server.scraperapi.com:8001`,
);
logger.debug(`[getData] Scraping for data at : ${apiUrl.toString()}`);
logger.debug(`[getData] Using proxy to fetch data from : ${targetUrl}`);
const res = await fetch(apiUrl.toString(), {
const res = await fetch(targetUrl, {
method: "POST",
headers: {
Authorization: jwt,
"Content-Type": "application/json",
...constants.SCRAP_API_BASE_HEADERS,
"Accept-Encoding": "gzip, deflate, br, zstd",
Authorization: jwt,
},
body: JSON.stringify({
userType: 3,
@@ -415,6 +404,7 @@ export const getData = async (
containImported: false,
keyword: "",
}),
agent: proxyAgent,
});
type J = {