well it 'works' but blocked by cloudflare

This commit is contained in:
bootunloader
2026-01-01 20:00:23 +02:00
parent 9fa4a0c113
commit 4d5bea3dce
9 changed files with 798 additions and 75 deletions

View File

@@ -1,13 +1,25 @@
import random
import logging
import time
from typing import Dict, List, Optional
import cloudscraper
from fastapi import FastAPI, HTTPException
from fastapi import FastAPI, HTTPException, Request
from fastapi.responses import JSONResponse, Response
from pydantic import BaseModel
# Configure logging
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s - %(levelname)s - %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)
logger = logging.getLogger(__name__)
app = FastAPI()
logger.info("FastAPI Proxy Server initialized")
CONSTANTS = {
"SESSION_KEY_NAME": "SID",
"SESSION_EXPIRE_TIME_MS": 6 * 60 * 60 * 1000,
@@ -19,7 +31,6 @@ CONSTANTS = {
"Host": "gamebooking24.com",
"Sec-Ch-Ua": '"Not/A)Brand";v="8", "Chromium";v="126"',
"Sec-Ch-Ua-Mobile": "?0",
"Sec-Ch-Ua-Platform": '"Windows"',
"Sec-Fetch-Site": "cross-site",
"Sec-Fetch-Mode": "no-cors",
"Sec-Fetch-Dest": "image",
@@ -33,27 +44,40 @@ CONSTANTS = {
},
}
USER_AGENTS = [
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:127.0) Gecko/20100101 Firefox/127.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:127.0) Gecko/20100101 Firefox/127.0",
]
# Create a single global cloudscraper instance to maintain session/cookies
# This solves the Cloudflare challenge once and reuses the session
scraper = cloudscraper.create_scraper()
scraper.headers.update(CONSTANTS["SCRAP_API_BASE_HEADERS"])
logger.info("Cloudscraper instance created")
def get_random_user_agent() -> str:
return random.choice(USER_AGENTS)
# Middleware for logging all requests
@app.middleware("http")
async def log_requests(request: Request, call_next):
start_time = time.time()
# Log incoming request
logger.info(f"{request.method} {request.url.path}")
if request.query_params:
logger.debug(f" Query params: {dict(request.query_params)}")
# Process request
response = await call_next(request)
# Log response
duration = (time.time() - start_time) * 1000
logger.info(
f"{request.method} {request.url.path} [{response.status_code}] ({duration:.2f}ms)"
)
return response
def create_scraper_with_headers(
def update_scraper_headers(
authorization: Optional[str] = None, extra_headers: Optional[Dict[str, str]] = None
) -> cloudscraper.CloudScraper:
scraper = cloudscraper.create_scraper()
headers = {
**CONSTANTS["SCRAP_API_BASE_HEADERS"],
"User-Agent": get_random_user_agent(),
}
) -> Dict[str, str]:
"""Build headers dict to update the global scraper with"""
headers = {}
if authorization:
headers["Authorization"] = authorization
@@ -61,8 +85,7 @@ def create_scraper_with_headers(
if extra_headers:
headers.update(extra_headers)
scraper.headers.update(headers)
return scraper
return headers
# Pydantic models for request bodies
@@ -115,140 +138,205 @@ class DeleteMultiplePayload(BaseModel):
@app.get("/ping")
def ping():
logger.info("Ping request received")
return {"status": "pong"}
@app.get("/v1/user/get-balance")
async def get_balance(userId: int, authorization: str):
logger.info(f"[GET /v1/user/get-balance] userId={userId}")
try:
scraper = create_scraper_with_headers(authorization=authorization)
headers = update_scraper_headers(authorization=authorization)
res = scraper.get(
f"{CONSTANTS['SCRAP_API_URL']}/v1/user/get-balance",
params={"userId": userId},
headers=headers,
)
logger.info(f"[GET /v1/user/get-balance] Response: {res.status_code}")
return JSONResponse(content=res.json(), status_code=res.status_code)
except Exception as e:
logger.error(f"[GET /v1/user/get-balance] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@app.post("/v1/auth/login")
async def login(payload: LoginPayload):
logger.info(f"[POST /v1/auth/login] - payload={payload.model_dump()}")
try:
scraper = create_scraper_with_headers(
headers = update_scraper_headers(
extra_headers={"Content-Type": "application/json"}
)
res = scraper.post(
f"{CONSTANTS['SCRAP_API_URL']}/v1/auth/login", json=payload.dict()
f"{CONSTANTS['SCRAP_API_URL']}/v1/auth/login",
json=payload.model_dump(),
headers=headers,
)
logger.info(f"[POST /v1/auth/login] Response: {res.status_code}")
return JSONResponse(content=res.json(), status_code=res.status_code)
except Exception as e:
logger.error(f"[POST /v1/auth/login] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@app.get("/verify/image")
async def get_captcha(uuid: str):
logger.info(f"[GET /verify/image] uuid={uuid}")
try:
scraper = create_scraper_with_headers(
headers = update_scraper_headers(
extra_headers={
"Accept": "image/avif,image/webp,image/apng,image/svg+xml,image/*,*/*;q=0.8"
}
)
res = scraper.get(
f"{CONSTANTS['SCRAP_API_URL']}/verify/image", params={"uuid": uuid}
f"{CONSTANTS['SCRAP_API_URL']}/verify/image",
params={"uuid": uuid},
headers=headers,
)
if res.status_code == 403:
logger.error(
"[GET /verify/image] 403 Forbidden - Cloudflare blocked the request"
)
logger.error(f"[GET /verify/image] Response headers: {dict(res.headers)}")
logger.error(f"[GET /verify/image] Response text: {res.text}")
logger.info(
f"[GET /verify/image] Response: {res.status_code}, size={len(res.content)} bytes"
)
return Response(
content=res.content, media_type="image/png", status_code=res.status_code
)
except Exception as e:
logger.error(f"[GET /verify/image] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@app.post("/v1/user/dealer-list")
async def dealer_list(payload: DealerListPayload, authorization: str):
logger.info(
f"[POST /v1/user/dealer-list] parentDistributor={payload.parentDistributor}, page={payload.page}, pageSize={payload.pageSize}"
)
try:
scraper = create_scraper_with_headers(
headers = update_scraper_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json"},
)
res = scraper.post(
f"{CONSTANTS['SCRAP_API_URL']}/v1/user/dealer-list", json=payload.dict()
f"{CONSTANTS['SCRAP_API_URL']}/v1/user/dealer-list",
json=payload.dict(),
headers=headers,
)
logger.info(f"[POST /v1/user/dealer-list] Response: {res.status_code}")
return JSONResponse(content=res.json(), status_code=res.status_code)
except Exception as e:
logger.error(f"[POST /v1/user/dealer-list] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@app.post("/v1/user/distributor-list")
async def distributor_list(payload: DistributorListPayload, authorization: str):
logger.info(
f"[POST /v1/user/distributor-list] parentDistributor={payload.parentDistributor}, page={payload.page}, pageSize={payload.pageSize}"
)
try:
scraper = create_scraper_with_headers(
headers = update_scraper_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json"},
)
res = scraper.post(
f"{CONSTANTS['SCRAP_API_URL']}/v1/user/distributor-list",
json=payload.dict(),
headers=headers,
)
logger.info(f"[POST /v1/user/distributor-list] Response: {res.status_code}")
return JSONResponse(content=res.json(), status_code=res.status_code)
except Exception as e:
logger.error(f"[POST /v1/user/distributor-list] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@app.get("/v1/draw/list-my")
async def list_draws(userId: int, authorization: str):
logger.info(f"[GET /v1/draw/list-my] userId={userId}")
try:
scraper = create_scraper_with_headers(
headers = update_scraper_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json"},
)
res = scraper.get(
f"{CONSTANTS['SCRAP_API_URL']}/v1/draw/list-my", params={"userId": userId}
f"{CONSTANTS['SCRAP_API_URL']}/v1/draw/list-my",
params={"userId": userId},
headers=headers,
)
logger.info(f"[GET /v1/draw/list-my] Response: {res.status_code}")
return JSONResponse(content=res.json(), status_code=res.status_code)
except Exception as e:
logger.error(f"[GET /v1/draw/list-my] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@app.post("/v1/book/list2")
async def book_list(payload: BookListPayload, authorization: str):
logger.info(
f"[POST /v1/book/list2] drawId={payload.drawId}, userIds={len(payload.userIds)}, date={payload.startDate}"
)
try:
scraper = create_scraper_with_headers(
headers = update_scraper_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json"},
)
res = scraper.post(
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/list2", json=payload.dict()
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/list2",
json=payload.dict(),
headers=headers,
)
logger.info(f"[POST /v1/book/list2] Response: {res.status_code}")
return JSONResponse(content=res.json(), status_code=res.status_code)
except Exception as e:
logger.error(f"[POST /v1/book/list2] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@app.post("/v1/book/add-multiple")
async def add_multiple(payload: AddMultiplePayload, authorization: str):
entries_count = len(payload.insertData.split(";")) if payload.insertData else 0
logger.info(
f"[POST /v1/book/add-multiple] dealerId={payload.dealerId}, drawId={payload.drawId}, entries={entries_count}, balance={payload.changedBalance}"
)
try:
scraper = create_scraper_with_headers(
headers = update_scraper_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json;charset=UTF-8"},
)
res = scraper.post(
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/add-multiple", json=payload.dict()
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/add-multiple",
json=payload.dict(),
headers=headers,
)
logger.info(f"[POST /v1/book/add-multiple] Response: {res.status_code}")
return JSONResponse(content=res.json(), status_code=res.status_code)
except Exception as e:
logger.error(f"[POST /v1/book/add-multiple] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@app.post("/v1/book/delete-multiple")
async def delete_multiple(payload: DeleteMultiplePayload, authorization: str):
logger.info(
f"[POST /v1/book/delete-multiple] dealerId={payload.dealerId}, drawId={payload.drawId}, bookIds={len(payload.bookIds)}"
)
try:
scraper = create_scraper_with_headers(
headers = update_scraper_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json;charset=UTF-8"},
)
res = scraper.post(
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/delete-multiple", json=payload.dict()
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/delete-multiple",
json=payload.dict(),
headers=headers,
)
logger.info(f"[POST /v1/book/delete-multiple] Response: {res.status_code}")
return JSONResponse(content=res.json(), status_code=res.status_code)
except Exception as e:
logger.error(f"[POST /v1/book/delete-multiple] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))