almost transitioned

This commit is contained in:
bootunloader
2026-01-01 20:11:36 +02:00
parent f09987ef12
commit b36492dc52
4 changed files with 1558 additions and 56 deletions

1
.gitignore vendored
View File

@@ -3,3 +3,4 @@ build
.env
.svelte-kit
**/.venv
**/__pycache__

View File

@@ -2,10 +2,10 @@ import logging
import time
from typing import Dict, List, Optional
import cloudscraper
from fastapi import FastAPI, HTTPException, Request
from fastapi.responses import JSONResponse, Response
from pydantic import BaseModel
from scrapling.fetchers import StealthySession
# Configure logging
logging.basicConfig(
@@ -44,11 +44,32 @@ CONSTANTS = {
},
}
# Create a single global cloudscraper instance to maintain session/cookies
# This solves the Cloudflare challenge once and reuses the session
scraper = cloudscraper.create_scraper()
scraper.headers.update(CONSTANTS["SCRAP_API_BASE_HEADERS"])
logger.info("Cloudscraper instance created")
# Global StealthySession instance - will be initialized on startup
stealthy_session: Optional[StealthySession] = None
@app.on_event("startup")
async def startup_event():
"""Initialize the StealthySession when the app starts"""
global stealthy_session
logger.info("Initializing StealthySession...")
stealthy_session = StealthySession(
headless=True,
solve_cloudflare=True,
max_pages=10, # Allow up to 10 concurrent requests
google_search=False, # Skip Google search simulation for faster startup
)
logger.info("StealthySession initialized successfully")
@app.on_event("shutdown")
async def shutdown_event():
"""Close the StealthySession when the app shuts down"""
global stealthy_session
if stealthy_session:
logger.info("Closing StealthySession...")
await stealthy_session.close()
logger.info("StealthySession closed successfully")
# Middleware for logging all requests
@@ -73,11 +94,11 @@ async def log_requests(request: Request, call_next):
return response
def update_scraper_headers(
def build_headers(
authorization: Optional[str] = None, extra_headers: Optional[Dict[str, str]] = None
) -> Dict[str, str]:
"""Build headers dict to update the global scraper with"""
headers = {}
"""Build headers dict for requests"""
headers = CONSTANTS["SCRAP_API_BASE_HEADERS"].copy()
if authorization:
headers["Authorization"] = authorization
@@ -146,14 +167,14 @@ def ping():
async def get_balance(userId: int, authorization: str):
logger.info(f"[GET /v1/user/get-balance] userId={userId}")
try:
headers = update_scraper_headers(authorization=authorization)
res = scraper.get(
headers = build_headers(authorization=authorization)
page = stealthy_session.fetch(
f"{CONSTANTS['SCRAP_API_URL']}/v1/user/get-balance",
params={"userId": userId},
headers=headers,
)
logger.info(f"[GET /v1/user/get-balance] Response: {res.status_code}")
return JSONResponse(content=res.json(), status_code=res.status_code)
logger.info(f"[GET /v1/user/get-balance] Response: {page.status}")
return JSONResponse(content=page.response.json(), status_code=page.status)
except Exception as e:
logger.error(f"[GET /v1/user/get-balance] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@@ -163,16 +184,15 @@ async def get_balance(userId: int, authorization: str):
async def login(payload: LoginPayload):
logger.info(f"[POST /v1/auth/login] - payload={payload.model_dump()}")
try:
headers = update_scraper_headers(
extra_headers={"Content-Type": "application/json"}
)
res = scraper.post(
headers = build_headers(extra_headers={"Content-Type": "application/json"})
page = stealthy_session.fetch(
f"{CONSTANTS['SCRAP_API_URL']}/v1/auth/login",
json=payload.model_dump(),
method="POST",
data=payload.model_dump(),
headers=headers,
)
logger.info(f"[POST /v1/auth/login] Response: {res.status_code}")
return JSONResponse(content=res.json(), status_code=res.status_code)
logger.info(f"[POST /v1/auth/login] Response: {page.status}")
return JSONResponse(content=page.response.json(), status_code=page.status)
except Exception as e:
logger.error(f"[POST /v1/auth/login] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@@ -182,29 +202,33 @@ async def login(payload: LoginPayload):
async def get_captcha(uuid: str):
logger.info(f"[GET /verify/image] uuid={uuid}")
try:
headers = update_scraper_headers(
headers = build_headers(
extra_headers={
"Accept": "image/avif,image/webp,image/apng,image/svg+xml,image/*,*/*;q=0.8"
}
)
res = scraper.get(
page = stealthy_session.fetch(
f"{CONSTANTS['SCRAP_API_URL']}/verify/image",
params={"uuid": uuid},
headers=headers,
)
if res.status_code == 403:
if page.status == 403:
logger.error(
"[GET /verify/image] 403 Forbidden - Cloudflare blocked the request"
)
logger.error(f"[GET /verify/image] Response headers: {dict(res.headers)}")
logger.error(f"[GET /verify/image] Response text: {res.text}")
logger.error(
f"[GET /verify/image] Response headers: {dict(page.response.headers)}"
)
logger.error(f"[GET /verify/image] Response text: {page.response.text}")
logger.info(
f"[GET /verify/image] Response: {res.status_code}, size={len(res.content)} bytes"
f"[GET /verify/image] Response: {page.status}, size={len(page.response.content)} bytes"
)
return Response(
content=res.content, media_type="image/png", status_code=res.status_code
content=page.response.content,
media_type="image/png",
status_code=page.status,
)
except Exception as e:
logger.error(f"[GET /verify/image] Error: {str(e)}")
@@ -217,17 +241,18 @@ async def dealer_list(payload: DealerListPayload, authorization: str):
f"[POST /v1/user/dealer-list] parentDistributor={payload.parentDistributor}, page={payload.page}, pageSize={payload.pageSize}"
)
try:
headers = update_scraper_headers(
headers = build_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json"},
)
res = scraper.post(
page = stealthy_session.fetch(
f"{CONSTANTS['SCRAP_API_URL']}/v1/user/dealer-list",
json=payload.dict(),
method="POST",
data=payload.model_dump(),
headers=headers,
)
logger.info(f"[POST /v1/user/dealer-list] Response: {res.status_code}")
return JSONResponse(content=res.json(), status_code=res.status_code)
logger.info(f"[POST /v1/user/dealer-list] Response: {page.status}")
return JSONResponse(content=page.response.json(), status_code=page.status)
except Exception as e:
logger.error(f"[POST /v1/user/dealer-list] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@@ -239,17 +264,18 @@ async def distributor_list(payload: DistributorListPayload, authorization: str):
f"[POST /v1/user/distributor-list] parentDistributor={payload.parentDistributor}, page={payload.page}, pageSize={payload.pageSize}"
)
try:
headers = update_scraper_headers(
headers = build_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json"},
)
res = scraper.post(
page = stealthy_session.fetch(
f"{CONSTANTS['SCRAP_API_URL']}/v1/user/distributor-list",
json=payload.dict(),
method="POST",
data=payload.model_dump(),
headers=headers,
)
logger.info(f"[POST /v1/user/distributor-list] Response: {res.status_code}")
return JSONResponse(content=res.json(), status_code=res.status_code)
logger.info(f"[POST /v1/user/distributor-list] Response: {page.status}")
return JSONResponse(content=page.response.json(), status_code=page.status)
except Exception as e:
logger.error(f"[POST /v1/user/distributor-list] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@@ -259,17 +285,17 @@ async def distributor_list(payload: DistributorListPayload, authorization: str):
async def list_draws(userId: int, authorization: str):
logger.info(f"[GET /v1/draw/list-my] userId={userId}")
try:
headers = update_scraper_headers(
headers = build_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json"},
)
res = scraper.get(
page = stealthy_session.fetch(
f"{CONSTANTS['SCRAP_API_URL']}/v1/draw/list-my",
params={"userId": userId},
headers=headers,
)
logger.info(f"[GET /v1/draw/list-my] Response: {res.status_code}")
return JSONResponse(content=res.json(), status_code=res.status_code)
logger.info(f"[GET /v1/draw/list-my] Response: {page.status}")
return JSONResponse(content=page.response.json(), status_code=page.status)
except Exception as e:
logger.error(f"[GET /v1/draw/list-my] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@@ -281,17 +307,18 @@ async def book_list(payload: BookListPayload, authorization: str):
f"[POST /v1/book/list2] drawId={payload.drawId}, userIds={len(payload.userIds)}, date={payload.startDate}"
)
try:
headers = update_scraper_headers(
headers = build_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json"},
)
res = scraper.post(
page = stealthy_session.fetch(
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/list2",
json=payload.dict(),
method="POST",
data=payload.model_dump(),
headers=headers,
)
logger.info(f"[POST /v1/book/list2] Response: {res.status_code}")
return JSONResponse(content=res.json(), status_code=res.status_code)
logger.info(f"[POST /v1/book/list2] Response: {page.status}")
return JSONResponse(content=page.response.json(), status_code=page.status)
except Exception as e:
logger.error(f"[POST /v1/book/list2] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@@ -304,17 +331,18 @@ async def add_multiple(payload: AddMultiplePayload, authorization: str):
f"[POST /v1/book/add-multiple] dealerId={payload.dealerId}, drawId={payload.drawId}, entries={entries_count}, balance={payload.changedBalance}"
)
try:
headers = update_scraper_headers(
headers = build_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json;charset=UTF-8"},
)
res = scraper.post(
page = stealthy_session.fetch(
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/add-multiple",
json=payload.dict(),
method="POST",
data=payload.model_dump(),
headers=headers,
)
logger.info(f"[POST /v1/book/add-multiple] Response: {res.status_code}")
return JSONResponse(content=res.json(), status_code=res.status_code)
logger.info(f"[POST /v1/book/add-multiple] Response: {page.status}")
return JSONResponse(content=page.response.json(), status_code=page.status)
except Exception as e:
logger.error(f"[POST /v1/book/add-multiple] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@@ -326,17 +354,18 @@ async def delete_multiple(payload: DeleteMultiplePayload, authorization: str):
f"[POST /v1/book/delete-multiple] dealerId={payload.dealerId}, drawId={payload.drawId}, bookIds={len(payload.bookIds)}"
)
try:
headers = update_scraper_headers(
headers = build_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json;charset=UTF-8"},
)
res = scraper.post(
page = stealthy_session.fetch(
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/delete-multiple",
json=payload.dict(),
method="POST",
data=payload.model_dump(),
headers=headers,
)
logger.info(f"[POST /v1/book/delete-multiple] Response: {res.status_code}")
return JSONResponse(content=res.json(), status_code=res.status_code)
logger.info(f"[POST /v1/book/delete-multiple] Response: {page.status}")
return JSONResponse(content=page.response.json(), status_code=page.status)
except Exception as e:
logger.error(f"[POST /v1/book/delete-multiple] Error: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))

View File

@@ -9,4 +9,5 @@ dependencies = [
"fastapi[standard]>=0.128.0",
"pydantic>=2.12.5",
"python-dotenv>=1.2.1",
"scrapling[all]>=0.3.12",
]

1471
pyapi/uv.lock generated

File diff suppressed because it is too large Load Diff