api endpoints setup for proxy
This commit is contained in:
224
pyapi/main.py
224
pyapi/main.py
@@ -1,4 +1,10 @@
|
||||
from fastapi import FastAPI
|
||||
import random
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import cloudscraper
|
||||
from fastapi import FastAPI, HTTPException
|
||||
from fastapi.responses import JSONResponse, Response
|
||||
from pydantic import BaseModel
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
@@ -27,10 +33,222 @@ CONSTANTS = {
|
||||
},
|
||||
}
|
||||
|
||||
USER_AGENTS = [
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36",
|
||||
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:127.0) Gecko/20100101 Firefox/127.0",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:127.0) Gecko/20100101 Firefox/127.0",
|
||||
]
|
||||
|
||||
|
||||
def get_random_user_agent() -> str:
|
||||
return random.choice(USER_AGENTS)
|
||||
|
||||
|
||||
def create_scraper_with_headers(
|
||||
authorization: Optional[str] = None, extra_headers: Optional[Dict[str, str]] = None
|
||||
) -> cloudscraper.CloudScraper:
|
||||
scraper = cloudscraper.create_scraper()
|
||||
headers = {
|
||||
**CONSTANTS["SCRAP_API_BASE_HEADERS"],
|
||||
"User-Agent": get_random_user_agent(),
|
||||
}
|
||||
|
||||
if authorization:
|
||||
headers["Authorization"] = authorization
|
||||
|
||||
if extra_headers:
|
||||
headers.update(extra_headers)
|
||||
|
||||
scraper.headers.update(headers)
|
||||
return scraper
|
||||
|
||||
|
||||
# Pydantic models for request bodies
|
||||
class LoginPayload(BaseModel):
|
||||
userId: str
|
||||
password: str
|
||||
verifyToken: str
|
||||
code: str
|
||||
userType: int
|
||||
|
||||
|
||||
class DealerListPayload(BaseModel):
|
||||
page: int
|
||||
pageSize: int
|
||||
parentDistributor: int
|
||||
|
||||
|
||||
class DistributorListPayload(BaseModel):
|
||||
page: int
|
||||
pageSize: int
|
||||
parentDistributor: int
|
||||
|
||||
|
||||
class BookListPayload(BaseModel):
|
||||
userType: int
|
||||
userIds: List[int]
|
||||
drawId: int
|
||||
startDate: str
|
||||
endDate: str
|
||||
beAdmin: bool
|
||||
containImported: bool
|
||||
keyword: str
|
||||
|
||||
|
||||
class AddMultiplePayload(BaseModel):
|
||||
dealerId: int
|
||||
drawId: int
|
||||
closeTime: str
|
||||
date: str
|
||||
changedBalance: int
|
||||
insertData: str
|
||||
|
||||
|
||||
class DeleteMultiplePayload(BaseModel):
|
||||
bookIds: List[str]
|
||||
closeTime: str
|
||||
dealerId: int
|
||||
drawId: int
|
||||
|
||||
|
||||
@app.get("/ping")
|
||||
def ping():
|
||||
return "pong"
|
||||
return {"status": "pong"}
|
||||
|
||||
|
||||
# TODO: Implement every single "proxy" endpoint now
|
||||
@app.get("/v1/user/get-balance")
|
||||
async def get_balance(userId: int, authorization: str):
|
||||
try:
|
||||
scraper = create_scraper_with_headers(authorization=authorization)
|
||||
res = scraper.get(
|
||||
f"{CONSTANTS['SCRAP_API_URL']}/v1/user/get-balance",
|
||||
params={"userId": userId},
|
||||
)
|
||||
return JSONResponse(content=res.json(), status_code=res.status_code)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.post("/v1/auth/login")
|
||||
async def login(payload: LoginPayload):
|
||||
try:
|
||||
scraper = create_scraper_with_headers(
|
||||
extra_headers={"Content-Type": "application/json"}
|
||||
)
|
||||
res = scraper.post(
|
||||
f"{CONSTANTS['SCRAP_API_URL']}/v1/auth/login", json=payload.dict()
|
||||
)
|
||||
return JSONResponse(content=res.json(), status_code=res.status_code)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.get("/verify/image")
|
||||
async def get_captcha(uuid: str):
|
||||
try:
|
||||
scraper = create_scraper_with_headers(
|
||||
extra_headers={
|
||||
"Accept": "image/avif,image/webp,image/apng,image/svg+xml,image/*,*/*;q=0.8"
|
||||
}
|
||||
)
|
||||
res = scraper.get(
|
||||
f"{CONSTANTS['SCRAP_API_URL']}/verify/image", params={"uuid": uuid}
|
||||
)
|
||||
return Response(
|
||||
content=res.content, media_type="image/png", status_code=res.status_code
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.post("/v1/user/dealer-list")
|
||||
async def dealer_list(payload: DealerListPayload, authorization: str):
|
||||
try:
|
||||
scraper = create_scraper_with_headers(
|
||||
authorization=authorization,
|
||||
extra_headers={"Content-Type": "application/json"},
|
||||
)
|
||||
res = scraper.post(
|
||||
f"{CONSTANTS['SCRAP_API_URL']}/v1/user/dealer-list", json=payload.dict()
|
||||
)
|
||||
return JSONResponse(content=res.json(), status_code=res.status_code)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.post("/v1/user/distributor-list")
|
||||
async def distributor_list(payload: DistributorListPayload, authorization: str):
|
||||
try:
|
||||
scraper = create_scraper_with_headers(
|
||||
authorization=authorization,
|
||||
extra_headers={"Content-Type": "application/json"},
|
||||
)
|
||||
res = scraper.post(
|
||||
f"{CONSTANTS['SCRAP_API_URL']}/v1/user/distributor-list",
|
||||
json=payload.dict(),
|
||||
)
|
||||
return JSONResponse(content=res.json(), status_code=res.status_code)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.get("/v1/draw/list-my")
|
||||
async def list_draws(userId: int, authorization: str):
|
||||
try:
|
||||
scraper = create_scraper_with_headers(
|
||||
authorization=authorization,
|
||||
extra_headers={"Content-Type": "application/json"},
|
||||
)
|
||||
res = scraper.get(
|
||||
f"{CONSTANTS['SCRAP_API_URL']}/v1/draw/list-my", params={"userId": userId}
|
||||
)
|
||||
return JSONResponse(content=res.json(), status_code=res.status_code)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.post("/v1/book/list2")
|
||||
async def book_list(payload: BookListPayload, authorization: str):
|
||||
try:
|
||||
scraper = create_scraper_with_headers(
|
||||
authorization=authorization,
|
||||
extra_headers={"Content-Type": "application/json"},
|
||||
)
|
||||
res = scraper.post(
|
||||
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/list2", json=payload.dict()
|
||||
)
|
||||
return JSONResponse(content=res.json(), status_code=res.status_code)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.post("/v1/book/add-multiple")
|
||||
async def add_multiple(payload: AddMultiplePayload, authorization: str):
|
||||
try:
|
||||
scraper = create_scraper_with_headers(
|
||||
authorization=authorization,
|
||||
extra_headers={"Content-Type": "application/json;charset=UTF-8"},
|
||||
)
|
||||
res = scraper.post(
|
||||
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/add-multiple", json=payload.dict()
|
||||
)
|
||||
return JSONResponse(content=res.json(), status_code=res.status_code)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.post("/v1/book/delete-multiple")
|
||||
async def delete_multiple(payload: DeleteMultiplePayload, authorization: str):
|
||||
try:
|
||||
scraper = create_scraper_with_headers(
|
||||
authorization=authorization,
|
||||
extra_headers={"Content-Type": "application/json;charset=UTF-8"},
|
||||
)
|
||||
res = scraper.post(
|
||||
f"{CONSTANTS['SCRAP_API_URL']}/v1/book/delete-multiple", json=payload.dict()
|
||||
)
|
||||
return JSONResponse(content=res.json(), status_code=res.status_code)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
Reference in New Issue
Block a user