almost transitioned
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -3,3 +3,4 @@ build
|
|||||||
.env
|
.env
|
||||||
.svelte-kit
|
.svelte-kit
|
||||||
**/.venv
|
**/.venv
|
||||||
|
**/__pycache__
|
||||||
|
|||||||
141
pyapi/main.py
141
pyapi/main.py
@@ -2,10 +2,10 @@ import logging
|
|||||||
import time
|
import time
|
||||||
from typing import Dict, List, Optional
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
import cloudscraper
|
|
||||||
from fastapi import FastAPI, HTTPException, Request
|
from fastapi import FastAPI, HTTPException, Request
|
||||||
from fastapi.responses import JSONResponse, Response
|
from fastapi.responses import JSONResponse, Response
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
from scrapling.fetchers import StealthySession
|
||||||
|
|
||||||
# Configure logging
|
# Configure logging
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
@@ -44,11 +44,32 @@ CONSTANTS = {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
# Create a single global cloudscraper instance to maintain session/cookies
|
# Global StealthySession instance - will be initialized on startup
|
||||||
# This solves the Cloudflare challenge once and reuses the session
|
stealthy_session: Optional[StealthySession] = None
|
||||||
scraper = cloudscraper.create_scraper()
|
|
||||||
scraper.headers.update(CONSTANTS["SCRAP_API_BASE_HEADERS"])
|
|
||||||
logger.info("Cloudscraper instance created")
|
@app.on_event("startup")
|
||||||
|
async def startup_event():
|
||||||
|
"""Initialize the StealthySession when the app starts"""
|
||||||
|
global stealthy_session
|
||||||
|
logger.info("Initializing StealthySession...")
|
||||||
|
stealthy_session = StealthySession(
|
||||||
|
headless=True,
|
||||||
|
solve_cloudflare=True,
|
||||||
|
max_pages=10, # Allow up to 10 concurrent requests
|
||||||
|
google_search=False, # Skip Google search simulation for faster startup
|
||||||
|
)
|
||||||
|
logger.info("StealthySession initialized successfully")
|
||||||
|
|
||||||
|
|
||||||
|
@app.on_event("shutdown")
|
||||||
|
async def shutdown_event():
|
||||||
|
"""Close the StealthySession when the app shuts down"""
|
||||||
|
global stealthy_session
|
||||||
|
if stealthy_session:
|
||||||
|
logger.info("Closing StealthySession...")
|
||||||
|
await stealthy_session.close()
|
||||||
|
logger.info("StealthySession closed successfully")
|
||||||
|
|
||||||
|
|
||||||
# Middleware for logging all requests
|
# Middleware for logging all requests
|
||||||
@@ -73,11 +94,11 @@ async def log_requests(request: Request, call_next):
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
def update_scraper_headers(
|
def build_headers(
|
||||||
authorization: Optional[str] = None, extra_headers: Optional[Dict[str, str]] = None
|
authorization: Optional[str] = None, extra_headers: Optional[Dict[str, str]] = None
|
||||||
) -> Dict[str, str]:
|
) -> Dict[str, str]:
|
||||||
"""Build headers dict to update the global scraper with"""
|
"""Build headers dict for requests"""
|
||||||
headers = {}
|
headers = CONSTANTS["SCRAP_API_BASE_HEADERS"].copy()
|
||||||
|
|
||||||
if authorization:
|
if authorization:
|
||||||
headers["Authorization"] = authorization
|
headers["Authorization"] = authorization
|
||||||
@@ -146,14 +167,14 @@ def ping():
|
|||||||
async def get_balance(userId: int, authorization: str):
|
async def get_balance(userId: int, authorization: str):
|
||||||
logger.info(f"[GET /v1/user/get-balance] userId={userId}")
|
logger.info(f"[GET /v1/user/get-balance] userId={userId}")
|
||||||
try:
|
try:
|
||||||
headers = update_scraper_headers(authorization=authorization)
|
headers = build_headers(authorization=authorization)
|
||||||
res = scraper.get(
|
page = stealthy_session.fetch(
|
||||||
f"{CONSTANTS['SCRAP_API_URL']}/v1/user/get-balance",
|
f"{CONSTANTS['SCRAP_API_URL']}/v1/user/get-balance",
|
||||||
params={"userId": userId},
|
params={"userId": userId},
|
||||||
headers=headers,
|
headers=headers,
|
||||||
)
|
)
|
||||||
logger.info(f"[GET /v1/user/get-balance] Response: {res.status_code}")
|
logger.info(f"[GET /v1/user/get-balance] Response: {page.status}")
|
||||||
return JSONResponse(content=res.json(), status_code=res.status_code)
|
return JSONResponse(content=page.response.json(), status_code=page.status)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"[GET /v1/user/get-balance] Error: {str(e)}")
|
logger.error(f"[GET /v1/user/get-balance] Error: {str(e)}")
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
@@ -163,16 +184,15 @@ async def get_balance(userId: int, authorization: str):
|
|||||||
async def login(payload: LoginPayload):
|
async def login(payload: LoginPayload):
|
||||||
logger.info(f"[POST /v1/auth/login] - payload={payload.model_dump()}")
|
logger.info(f"[POST /v1/auth/login] - payload={payload.model_dump()}")
|
||||||
try:
|
try:
|
||||||
headers = update_scraper_headers(
|
headers = build_headers(extra_headers={"Content-Type": "application/json"})
|
||||||
extra_headers={"Content-Type": "application/json"}
|
page = stealthy_session.fetch(
|
||||||
)
|
|
||||||
res = scraper.post(
|
|
||||||
f"{CONSTANTS['SCRAP_API_URL']}/v1/auth/login",
|
f"{CONSTANTS['SCRAP_API_URL']}/v1/auth/login",
|
||||||
json=payload.model_dump(),
|
method="POST",
|
||||||
|
data=payload.model_dump(),
|
||||||
headers=headers,
|
headers=headers,
|
||||||
)
|
)
|
||||||
logger.info(f"[POST /v1/auth/login] Response: {res.status_code}")
|
logger.info(f"[POST /v1/auth/login] Response: {page.status}")
|
||||||
return JSONResponse(content=res.json(), status_code=res.status_code)
|
return JSONResponse(content=page.response.json(), status_code=page.status)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"[POST /v1/auth/login] Error: {str(e)}")
|
logger.error(f"[POST /v1/auth/login] Error: {str(e)}")
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
@@ -182,29 +202,33 @@ async def login(payload: LoginPayload):
|
|||||||
async def get_captcha(uuid: str):
|
async def get_captcha(uuid: str):
|
||||||
logger.info(f"[GET /verify/image] uuid={uuid}")
|
logger.info(f"[GET /verify/image] uuid={uuid}")
|
||||||
try:
|
try:
|
||||||
headers = update_scraper_headers(
|
headers = build_headers(
|
||||||
extra_headers={
|
extra_headers={
|
||||||
"Accept": "image/avif,image/webp,image/apng,image/svg+xml,image/*,*/*;q=0.8"
|
"Accept": "image/avif,image/webp,image/apng,image/svg+xml,image/*,*/*;q=0.8"
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
res = scraper.get(
|
page = stealthy_session.fetch(
|
||||||
f"{CONSTANTS['SCRAP_API_URL']}/verify/image",
|
f"{CONSTANTS['SCRAP_API_URL']}/verify/image",
|
||||||
params={"uuid": uuid},
|
params={"uuid": uuid},
|
||||||
headers=headers,
|
headers=headers,
|
||||||
)
|
)
|
||||||
|
|
||||||
if res.status_code == 403:
|
if page.status == 403:
|
||||||
logger.error(
|
logger.error(
|
||||||
"[GET /verify/image] 403 Forbidden - Cloudflare blocked the request"
|
"[GET /verify/image] 403 Forbidden - Cloudflare blocked the request"
|
||||||
)
|
)
|
||||||
logger.error(f"[GET /verify/image] Response headers: {dict(res.headers)}")
|
logger.error(
|
||||||
logger.error(f"[GET /verify/image] Response text: {res.text}")
|
f"[GET /verify/image] Response headers: {dict(page.response.headers)}"
|
||||||
|
)
|
||||||
|
logger.error(f"[GET /verify/image] Response text: {page.response.text}")
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f"[GET /verify/image] Response: {res.status_code}, size={len(res.content)} bytes"
|
f"[GET /verify/image] Response: {page.status}, size={len(page.response.content)} bytes"
|
||||||
)
|
)
|
||||||
return Response(
|
return Response(
|
||||||
content=res.content, media_type="image/png", status_code=res.status_code
|
content=page.response.content,
|
||||||
|
media_type="image/png",
|
||||||
|
status_code=page.status,
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"[GET /verify/image] Error: {str(e)}")
|
logger.error(f"[GET /verify/image] Error: {str(e)}")
|
||||||
@@ -217,17 +241,18 @@ async def dealer_list(payload: DealerListPayload, authorization: str):
|
|||||||
f"[POST /v1/user/dealer-list] parentDistributor={payload.parentDistributor}, page={payload.page}, pageSize={payload.pageSize}"
|
f"[POST /v1/user/dealer-list] parentDistributor={payload.parentDistributor}, page={payload.page}, pageSize={payload.pageSize}"
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
headers = update_scraper_headers(
|
headers = build_headers(
|
||||||
authorization=authorization,
|
authorization=authorization,
|
||||||
extra_headers={"Content-Type": "application/json"},
|
extra_headers={"Content-Type": "application/json"},
|
||||||
)
|
)
|
||||||
res = scraper.post(
|
page = stealthy_session.fetch(
|
||||||
f"{CONSTANTS['SCRAP_API_URL']}/v1/user/dealer-list",
|
f"{CONSTANTS['SCRAP_API_URL']}/v1/user/dealer-list",
|
||||||
json=payload.dict(),
|
method="POST",
|
||||||
|
data=payload.model_dump(),
|
||||||
headers=headers,
|
headers=headers,
|
||||||
)
|
)
|
||||||
logger.info(f"[POST /v1/user/dealer-list] Response: {res.status_code}")
|
logger.info(f"[POST /v1/user/dealer-list] Response: {page.status}")
|
||||||
return JSONResponse(content=res.json(), status_code=res.status_code)
|
return JSONResponse(content=page.response.json(), status_code=page.status)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"[POST /v1/user/dealer-list] Error: {str(e)}")
|
logger.error(f"[POST /v1/user/dealer-list] Error: {str(e)}")
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
@@ -239,17 +264,18 @@ async def distributor_list(payload: DistributorListPayload, authorization: str):
|
|||||||
f"[POST /v1/user/distributor-list] parentDistributor={payload.parentDistributor}, page={payload.page}, pageSize={payload.pageSize}"
|
f"[POST /v1/user/distributor-list] parentDistributor={payload.parentDistributor}, page={payload.page}, pageSize={payload.pageSize}"
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
headers = update_scraper_headers(
|
headers = build_headers(
|
||||||
authorization=authorization,
|
authorization=authorization,
|
||||||
extra_headers={"Content-Type": "application/json"},
|
extra_headers={"Content-Type": "application/json"},
|
||||||
)
|
)
|
||||||
res = scraper.post(
|
page = stealthy_session.fetch(
|
||||||
f"{CONSTANTS['SCRAP_API_URL']}/v1/user/distributor-list",
|
f"{CONSTANTS['SCRAP_API_URL']}/v1/user/distributor-list",
|
||||||
json=payload.dict(),
|
method="POST",
|
||||||
|
data=payload.model_dump(),
|
||||||
headers=headers,
|
headers=headers,
|
||||||
)
|
)
|
||||||
logger.info(f"[POST /v1/user/distributor-list] Response: {res.status_code}")
|
logger.info(f"[POST /v1/user/distributor-list] Response: {page.status}")
|
||||||
return JSONResponse(content=res.json(), status_code=res.status_code)
|
return JSONResponse(content=page.response.json(), status_code=page.status)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"[POST /v1/user/distributor-list] Error: {str(e)}")
|
logger.error(f"[POST /v1/user/distributor-list] Error: {str(e)}")
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
@@ -259,17 +285,17 @@ async def distributor_list(payload: DistributorListPayload, authorization: str):
|
|||||||
async def list_draws(userId: int, authorization: str):
|
async def list_draws(userId: int, authorization: str):
|
||||||
logger.info(f"[GET /v1/draw/list-my] userId={userId}")
|
logger.info(f"[GET /v1/draw/list-my] userId={userId}")
|
||||||
try:
|
try:
|
||||||
headers = update_scraper_headers(
|
headers = build_headers(
|
||||||
authorization=authorization,
|
authorization=authorization,
|
||||||
extra_headers={"Content-Type": "application/json"},
|
extra_headers={"Content-Type": "application/json"},
|
||||||
)
|
)
|
||||||
res = scraper.get(
|
page = stealthy_session.fetch(
|
||||||
f"{CONSTANTS['SCRAP_API_URL']}/v1/draw/list-my",
|
f"{CONSTANTS['SCRAP_API_URL']}/v1/draw/list-my",
|
||||||
params={"userId": userId},
|
params={"userId": userId},
|
||||||
headers=headers,
|
headers=headers,
|
||||||
)
|
)
|
||||||
logger.info(f"[GET /v1/draw/list-my] Response: {res.status_code}")
|
logger.info(f"[GET /v1/draw/list-my] Response: {page.status}")
|
||||||
return JSONResponse(content=res.json(), status_code=res.status_code)
|
return JSONResponse(content=page.response.json(), status_code=page.status)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"[GET /v1/draw/list-my] Error: {str(e)}")
|
logger.error(f"[GET /v1/draw/list-my] Error: {str(e)}")
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
@@ -281,17 +307,18 @@ async def book_list(payload: BookListPayload, authorization: str):
|
|||||||
f"[POST /v1/book/list2] drawId={payload.drawId}, userIds={len(payload.userIds)}, date={payload.startDate}"
|
f"[POST /v1/book/list2] drawId={payload.drawId}, userIds={len(payload.userIds)}, date={payload.startDate}"
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
headers = update_scraper_headers(
|
headers = build_headers(
|
||||||
authorization=authorization,
|
authorization=authorization,
|
||||||
extra_headers={"Content-Type": "application/json"},
|
extra_headers={"Content-Type": "application/json"},
|
||||||
)
|
)
|
||||||
res = scraper.post(
|
page = stealthy_session.fetch(
|
||||||
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/list2",
|
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/list2",
|
||||||
json=payload.dict(),
|
method="POST",
|
||||||
|
data=payload.model_dump(),
|
||||||
headers=headers,
|
headers=headers,
|
||||||
)
|
)
|
||||||
logger.info(f"[POST /v1/book/list2] Response: {res.status_code}")
|
logger.info(f"[POST /v1/book/list2] Response: {page.status}")
|
||||||
return JSONResponse(content=res.json(), status_code=res.status_code)
|
return JSONResponse(content=page.response.json(), status_code=page.status)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"[POST /v1/book/list2] Error: {str(e)}")
|
logger.error(f"[POST /v1/book/list2] Error: {str(e)}")
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
@@ -304,17 +331,18 @@ async def add_multiple(payload: AddMultiplePayload, authorization: str):
|
|||||||
f"[POST /v1/book/add-multiple] dealerId={payload.dealerId}, drawId={payload.drawId}, entries={entries_count}, balance={payload.changedBalance}"
|
f"[POST /v1/book/add-multiple] dealerId={payload.dealerId}, drawId={payload.drawId}, entries={entries_count}, balance={payload.changedBalance}"
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
headers = update_scraper_headers(
|
headers = build_headers(
|
||||||
authorization=authorization,
|
authorization=authorization,
|
||||||
extra_headers={"Content-Type": "application/json;charset=UTF-8"},
|
extra_headers={"Content-Type": "application/json;charset=UTF-8"},
|
||||||
)
|
)
|
||||||
res = scraper.post(
|
page = stealthy_session.fetch(
|
||||||
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/add-multiple",
|
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/add-multiple",
|
||||||
json=payload.dict(),
|
method="POST",
|
||||||
|
data=payload.model_dump(),
|
||||||
headers=headers,
|
headers=headers,
|
||||||
)
|
)
|
||||||
logger.info(f"[POST /v1/book/add-multiple] Response: {res.status_code}")
|
logger.info(f"[POST /v1/book/add-multiple] Response: {page.status}")
|
||||||
return JSONResponse(content=res.json(), status_code=res.status_code)
|
return JSONResponse(content=page.response.json(), status_code=page.status)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"[POST /v1/book/add-multiple] Error: {str(e)}")
|
logger.error(f"[POST /v1/book/add-multiple] Error: {str(e)}")
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
@@ -326,17 +354,18 @@ async def delete_multiple(payload: DeleteMultiplePayload, authorization: str):
|
|||||||
f"[POST /v1/book/delete-multiple] dealerId={payload.dealerId}, drawId={payload.drawId}, bookIds={len(payload.bookIds)}"
|
f"[POST /v1/book/delete-multiple] dealerId={payload.dealerId}, drawId={payload.drawId}, bookIds={len(payload.bookIds)}"
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
headers = update_scraper_headers(
|
headers = build_headers(
|
||||||
authorization=authorization,
|
authorization=authorization,
|
||||||
extra_headers={"Content-Type": "application/json;charset=UTF-8"},
|
extra_headers={"Content-Type": "application/json;charset=UTF-8"},
|
||||||
)
|
)
|
||||||
res = scraper.post(
|
page = stealthy_session.fetch(
|
||||||
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/delete-multiple",
|
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/delete-multiple",
|
||||||
json=payload.dict(),
|
method="POST",
|
||||||
|
data=payload.model_dump(),
|
||||||
headers=headers,
|
headers=headers,
|
||||||
)
|
)
|
||||||
logger.info(f"[POST /v1/book/delete-multiple] Response: {res.status_code}")
|
logger.info(f"[POST /v1/book/delete-multiple] Response: {page.status}")
|
||||||
return JSONResponse(content=res.json(), status_code=res.status_code)
|
return JSONResponse(content=page.response.json(), status_code=page.status)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"[POST /v1/book/delete-multiple] Error: {str(e)}")
|
logger.error(f"[POST /v1/book/delete-multiple] Error: {str(e)}")
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|||||||
@@ -9,4 +9,5 @@ dependencies = [
|
|||||||
"fastapi[standard]>=0.128.0",
|
"fastapi[standard]>=0.128.0",
|
||||||
"pydantic>=2.12.5",
|
"pydantic>=2.12.5",
|
||||||
"python-dotenv>=1.2.1",
|
"python-dotenv>=1.2.1",
|
||||||
|
"scrapling[all]>=0.3.12",
|
||||||
]
|
]
|
||||||
|
|||||||
1471
pyapi/uv.lock
generated
1471
pyapi/uv.lock
generated
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user