api endpoints setup for proxy

This commit is contained in:
bootunloader
2025-12-31 08:06:20 +02:00
parent c91fed55bb
commit 2a122e1551
4 changed files with 236 additions and 3 deletions

View File

View File

@@ -1,4 +1,10 @@
from fastapi import FastAPI
import random
from typing import Dict, List, Optional
import cloudscraper
from fastapi import FastAPI, HTTPException
from fastapi.responses import JSONResponse, Response
from pydantic import BaseModel
app = FastAPI()
@@ -27,10 +33,222 @@ CONSTANTS = {
},
}
USER_AGENTS = [
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:127.0) Gecko/20100101 Firefox/127.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:127.0) Gecko/20100101 Firefox/127.0",
]
def get_random_user_agent() -> str:
return random.choice(USER_AGENTS)
def create_scraper_with_headers(
authorization: Optional[str] = None, extra_headers: Optional[Dict[str, str]] = None
) -> cloudscraper.CloudScraper:
scraper = cloudscraper.create_scraper()
headers = {
**CONSTANTS["SCRAP_API_BASE_HEADERS"],
"User-Agent": get_random_user_agent(),
}
if authorization:
headers["Authorization"] = authorization
if extra_headers:
headers.update(extra_headers)
scraper.headers.update(headers)
return scraper
# Pydantic models for request bodies
class LoginPayload(BaseModel):
userId: str
password: str
verifyToken: str
code: str
userType: int
class DealerListPayload(BaseModel):
page: int
pageSize: int
parentDistributor: int
class DistributorListPayload(BaseModel):
page: int
pageSize: int
parentDistributor: int
class BookListPayload(BaseModel):
userType: int
userIds: List[int]
drawId: int
startDate: str
endDate: str
beAdmin: bool
containImported: bool
keyword: str
class AddMultiplePayload(BaseModel):
dealerId: int
drawId: int
closeTime: str
date: str
changedBalance: int
insertData: str
class DeleteMultiplePayload(BaseModel):
bookIds: List[str]
closeTime: str
dealerId: int
drawId: int
@app.get("/ping")
def ping():
return "pong"
return {"status": "pong"}
# TODO: Implement every single "proxy" endpoint now
@app.get("/v1/user/get-balance")
async def get_balance(userId: int, authorization: str):
try:
scraper = create_scraper_with_headers(authorization=authorization)
res = scraper.get(
f"{CONSTANTS['SCRAP_API_URL']}/v1/user/get-balance",
params={"userId": userId},
)
return JSONResponse(content=res.json(), status_code=res.status_code)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.post("/v1/auth/login")
async def login(payload: LoginPayload):
try:
scraper = create_scraper_with_headers(
extra_headers={"Content-Type": "application/json"}
)
res = scraper.post(
f"{CONSTANTS['SCRAP_API_URL']}/v1/auth/login", json=payload.dict()
)
return JSONResponse(content=res.json(), status_code=res.status_code)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.get("/verify/image")
async def get_captcha(uuid: str):
try:
scraper = create_scraper_with_headers(
extra_headers={
"Accept": "image/avif,image/webp,image/apng,image/svg+xml,image/*,*/*;q=0.8"
}
)
res = scraper.get(
f"{CONSTANTS['SCRAP_API_URL']}/verify/image", params={"uuid": uuid}
)
return Response(
content=res.content, media_type="image/png", status_code=res.status_code
)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.post("/v1/user/dealer-list")
async def dealer_list(payload: DealerListPayload, authorization: str):
try:
scraper = create_scraper_with_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json"},
)
res = scraper.post(
f"{CONSTANTS['SCRAP_API_URL']}/v1/user/dealer-list", json=payload.dict()
)
return JSONResponse(content=res.json(), status_code=res.status_code)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.post("/v1/user/distributor-list")
async def distributor_list(payload: DistributorListPayload, authorization: str):
try:
scraper = create_scraper_with_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json"},
)
res = scraper.post(
f"{CONSTANTS['SCRAP_API_URL']}/v1/user/distributor-list",
json=payload.dict(),
)
return JSONResponse(content=res.json(), status_code=res.status_code)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.get("/v1/draw/list-my")
async def list_draws(userId: int, authorization: str):
try:
scraper = create_scraper_with_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json"},
)
res = scraper.get(
f"{CONSTANTS['SCRAP_API_URL']}/v1/draw/list-my", params={"userId": userId}
)
return JSONResponse(content=res.json(), status_code=res.status_code)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.post("/v1/book/list2")
async def book_list(payload: BookListPayload, authorization: str):
try:
scraper = create_scraper_with_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json"},
)
res = scraper.post(
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/list2", json=payload.dict()
)
return JSONResponse(content=res.json(), status_code=res.status_code)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.post("/v1/book/add-multiple")
async def add_multiple(payload: AddMultiplePayload, authorization: str):
try:
scraper = create_scraper_with_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json;charset=UTF-8"},
)
res = scraper.post(
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/add-multiple", json=payload.dict()
)
return JSONResponse(content=res.json(), status_code=res.status_code)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.post("/v1/book/delete-multiple")
async def delete_multiple(payload: DeleteMultiplePayload, authorization: str):
try:
scraper = create_scraper_with_headers(
authorization=authorization,
extra_headers={"Content-Type": "application/json;charset=UTF-8"},
)
res = scraper.post(
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/delete-multiple", json=payload.dict()
)
return JSONResponse(content=res.json(), status_code=res.status_code)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))

View File

@@ -7,4 +7,6 @@ requires-python = ">=3.13"
dependencies = [
"cloudscraper>=1.2.71",
"fastapi>=0.128.0",
"pydantic>=2.12.5",
"python-dotenv>=1.2.1",
]

13
pyapi/uv.lock generated
View File

@@ -126,12 +126,16 @@ source = { virtual = "." }
dependencies = [
{ name = "cloudscraper" },
{ name = "fastapi" },
{ name = "pydantic" },
{ name = "python-dotenv" },
]
[package.metadata]
requires-dist = [
{ name = "cloudscraper", specifier = ">=1.2.71" },
{ name = "fastapi", specifier = ">=0.128.0" },
{ name = "pydantic", specifier = ">=2.12.5" },
{ name = "python-dotenv", specifier = ">=1.2.1" },
]
[[package]]
@@ -219,6 +223,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/8b/40/2614036cdd416452f5bf98ec037f38a1afb17f327cb8e6b652d4729e0af8/pyparsing-3.3.1-py3-none-any.whl", hash = "sha256:023b5e7e5520ad96642e2c6db4cb683d3970bd640cdf7115049a6e9c3682df82", size = 121793 },
]
[[package]]
name = "python-dotenv"
version = "1.2.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230 },
]
[[package]]
name = "requests"
version = "2.32.5"