yuh and so it beginzzzzz

This commit is contained in:
bootunloader
2026-01-03 13:21:39 +02:00
parent c7ff53e8ac
commit a39f137b03
15 changed files with 690 additions and 132 deletions

2
.gitignore vendored
View File

@@ -4,3 +4,5 @@ build
.svelte-kit
**/.venv
**/__pycache__
**/**.log
**/**-audit.json

View File

@@ -42,6 +42,8 @@
"ulid": "^2.3.0",
"uuid": "^9.0.0",
"vite": "^5.3.5",
"winston": "^3.19.0",
"winston-daily-rotate-file": "^5.0.0",
"zod": "^3.21.4"
},
"devDependencies": {

208
pnpm-lock.yaml generated
View File

@@ -95,6 +95,12 @@ importers:
vite:
specifier: ^5.3.5
version: 5.3.5(@types/node@20.6.4)
winston:
specifier: ^3.19.0
version: 3.19.0
winston-daily-rotate-file:
specifier: ^5.0.0
version: 5.0.0(winston@3.19.0)
zod:
specifier: ^3.21.4
version: 3.22.2
@@ -170,6 +176,13 @@ packages:
'@antfu/utils@0.7.6':
resolution: {integrity: sha512-pvFiLP2BeOKA/ZOS6jxx4XhKzdVLHDhGlFEaZ2flWWYf2xOqVniqpk38I04DFRyz+L0ASggl7SkItTc+ZLju4w==}
'@colors/colors@1.6.0':
resolution: {integrity: sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==}
engines: {node: '>=0.1.90'}
'@dabh/diagnostics@2.0.8':
resolution: {integrity: sha512-R4MSXTVnuMzGD7bzHdW2ZhhdPC/igELENcq5IjEverBvq5hn1SXCWcsi6eSsdWP0/Ur+SItRRjAktmdoX/8R/Q==}
'@esbuild/aix-ppc64@0.21.5':
resolution: {integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==}
engines: {node: '>=12'}
@@ -630,6 +643,9 @@ packages:
'@sinclair/typebox@0.27.8':
resolution: {integrity: sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==}
'@so-ric/colorspace@1.1.6':
resolution: {integrity: sha512-/KiKkpHNOBgkFJwu9sh48LkHSMYGyuTcSFK/qMBdnOAlrRJzRSXAOFB5qwzaVQuDl8wAvHVMkaASQDReTahxuw==}
'@sveltejs/adapter-auto@2.1.0':
resolution: {integrity: sha512-o2pZCfATFtA/Gw/BB0Xm7k4EYaekXxaPGER3xGSY3FvzFJGTlJlZjBseaXwYSM94lZ0HniOjTokN3cWaLX6fow==}
peerDependencies:
@@ -731,6 +747,9 @@ packages:
'@types/resolve@1.20.2':
resolution: {integrity: sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==}
'@types/triple-beam@1.3.5':
resolution: {integrity: sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw==}
'@types/uuid@9.0.4':
resolution: {integrity: sha512-zAuJWQflfx6dYJM62vna+Sn5aeSWhh3OB+wfUEACNcqUSc0AGc5JKl+ycL1vrH7frGTXhJchYjE1Hak8L819dA==}
@@ -785,6 +804,9 @@ packages:
assertion-error@1.1.0:
resolution: {integrity: sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==}
async@3.2.6:
resolution: {integrity: sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==}
autoprefixer@10.4.20:
resolution: {integrity: sha512-XY25y5xSv/wEoqzDyXXME4AFfkZI0P23z6Fs3YgymDnKJkCGOnkL0iTxCa85UTqaSgfcqyf3UA6+c7wUvx/16g==}
engines: {node: ^10 || ^12 || >=14}
@@ -873,6 +895,22 @@ packages:
code-red@1.0.4:
resolution: {integrity: sha512-7qJWqItLA8/VPVlKJlFXU+NBlo/qyfs39aJcuMT/2ere32ZqvF5OSxgdM5xOfJJ7O429gg2HM47y8v9P+9wrNw==}
color-convert@3.1.3:
resolution: {integrity: sha512-fasDH2ont2GqF5HpyO4w0+BcewlhHEZOFn9c1ckZdHpJ56Qb7MHhH/IcJZbBGgvdtwdwNbLvxiBEdg336iA9Sg==}
engines: {node: '>=14.6'}
color-name@2.1.0:
resolution: {integrity: sha512-1bPaDNFm0axzE4MEAzKPuqKWeRaT43U/hyxKPBdqTfmPF+d6n7FSoTFxLVULUJOmiLp01KjhIPPH+HrXZJN4Rg==}
engines: {node: '>=12.20'}
color-string@2.1.4:
resolution: {integrity: sha512-Bb6Cq8oq0IjDOe8wJmi4JeNn763Xs9cfrBcaylK1tPypWzyoy2G3l90v9k64kjphl/ZJjPIShFztenRomi8WTg==}
engines: {node: '>=18'}
color@5.0.3:
resolution: {integrity: sha512-ezmVcLR3xAVp8kYOm4GS45ZLLgIE6SPAFoduLr6hTDajwb3KZ2F46gulK3XpcwRFb5KKGCSezCBAY4Dw4HsyXA==}
engines: {node: '>=18'}
commander@4.1.1:
resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==}
engines: {node: '>= 6'}
@@ -964,6 +1002,9 @@ packages:
electron-to-chromium@1.5.4:
resolution: {integrity: sha512-orzA81VqLyIGUEA77YkVA1D+N+nNfl2isJVjjmOyrlxuooZ19ynb+dOlaDTqd/idKRS9lDCSBmtzM+kyCsMnkA==}
enabled@2.0.0:
resolution: {integrity: sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ==}
es6-promise@3.3.1:
resolution: {integrity: sha512-SOp9Phqvqn7jtEUxPWdWfWoLmyt2VaJ6MpvP9Comy1MceMXqE6bxvaTu4iaxpYYPzhny28Lc+M87/c2cPK6lDg==}
@@ -1005,10 +1046,16 @@ packages:
fastq@1.15.0:
resolution: {integrity: sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==}
fecha@4.2.3:
resolution: {integrity: sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw==}
fetch-blob@3.2.0:
resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==}
engines: {node: ^12.20 || >= 14.13}
file-stream-rotator@0.6.1:
resolution: {integrity: sha512-u+dBid4PvZw17PmDeRcNOtCP9CCK/9lRN2w+r1xIS7yOL9JFrIBKTvrYsxT4P0pGtThYTn++QS5ChHaUov3+zQ==}
fill-range@7.0.1:
resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==}
engines: {node: '>=8'}
@@ -1017,6 +1064,9 @@ packages:
resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==}
engines: {node: '>=10'}
fn.name@1.1.0:
resolution: {integrity: sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==}
focus-trap@7.5.2:
resolution: {integrity: sha512-p6vGNNWLDGwJCiEjkSK6oERj/hEyI9ITsSwIUICBoKLlWiTWXJRfQibCwcoi50rTZdbi87qDtUlMCmQwsGSgPw==}
@@ -1160,6 +1210,9 @@ packages:
kolorist@1.8.0:
resolution: {integrity: sha512-Y+60/zizpJ3HRH8DCss+q95yr6145JXZo46OTpFvDZWLfRCE4qChOyk1b26nMaNpfHHgxagk9dXT5OP0Tfe+dQ==}
kuler@2.0.0:
resolution: {integrity: sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==}
lilconfig@2.1.0:
resolution: {integrity: sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==}
engines: {node: '>=10'}
@@ -1193,6 +1246,10 @@ packages:
lodash.merge@4.6.2:
resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==}
logform@2.7.0:
resolution: {integrity: sha512-TFYA4jnP7PVbmlBIfhlSe+WKxs9dklXMTEGcBCIvLhE/Tn3H6Gk1norupVW7m5Cnd4bLcr08AytbyV/xj7f/kQ==}
engines: {node: '>= 12.0.0'}
loupe@2.3.6:
resolution: {integrity: sha512-RaPMZKiMy8/JruncMU5Bt6na1eftNoo++R4Y+N2FrxkDVTrGvcyzFTsaGif4QTeKESheMGegbhw6iUAq+5A8zA==}
@@ -1257,6 +1314,9 @@ packages:
mlly@1.4.2:
resolution: {integrity: sha512-i/Ykufi2t1EZ6NaPLdfnZk2AX8cs0d+mTzVKuPfqPKPatxLApaBoxJQ9x1/uckXtrS/U5oisPMDkNs0yQTaBRg==}
moment@2.30.1:
resolution: {integrity: sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==}
mri@1.2.0:
resolution: {integrity: sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==}
engines: {node: '>=4'}
@@ -1320,6 +1380,9 @@ packages:
once@1.4.0:
resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==}
one-time@1.0.0:
resolution: {integrity: sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g==}
onetime@5.1.2:
resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==}
engines: {node: '>=6'}
@@ -1446,6 +1509,10 @@ packages:
read-cache@1.0.0:
resolution: {integrity: sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==}
readable-stream@3.6.2:
resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==}
engines: {node: '>= 6'}
readdirp@3.6.0:
resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==}
engines: {node: '>=8.10.0'}
@@ -1494,6 +1561,13 @@ packages:
resolution: {integrity: sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==}
engines: {node: '>=6'}
safe-buffer@5.2.1:
resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==}
safe-stable-stringify@2.5.0:
resolution: {integrity: sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA==}
engines: {node: '>=10'}
sander@0.5.1:
resolution: {integrity: sha512-3lVqBir7WuKDHGrKRDn/1Ye3kwpXaDOMsiRP1wd6wpZW56gJhsbp5RqQpA6JG/P+pkXizygnr1dKR8vzWaVsfA==}
@@ -1533,6 +1607,9 @@ packages:
resolution: {integrity: sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==}
engines: {node: '>=0.10.0'}
stack-trace@0.0.10:
resolution: {integrity: sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg==}
stackback@0.0.2:
resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==}
@@ -1546,6 +1623,9 @@ packages:
resolution: {integrity: sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==}
engines: {node: '>=10.0.0'}
string_decoder@1.3.0:
resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==}
strip-final-newline@2.0.0:
resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==}
engines: {node: '>=6'}
@@ -1665,6 +1745,9 @@ packages:
engines: {node: '>=14.0.0'}
hasBin: true
text-hex@1.0.0:
resolution: {integrity: sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==}
thenify-all@1.6.0:
resolution: {integrity: sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==}
engines: {node: '>=0.8'}
@@ -1694,6 +1777,10 @@ packages:
resolution: {integrity: sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==}
engines: {node: '>=6'}
triple-beam@1.4.1:
resolution: {integrity: sha512-aZbgViZrg1QNcG+LULa7nhZpJTZSLm/mXnHXnbAbjmN5aSa0y7V+wvv6+4WaBtpISJzThKy+PIPxc1Nq1EJ9mg==}
engines: {node: '>= 14.0.0'}
trpc-svelte-query-adapter@2.1.0:
resolution: {integrity: sha512-PQP/OurS8Vr6NtZyk0SOXry4yQaQpz9PBvnBHNHT7TMASyUavHqe/x95QuUlMsa6+KXN2o2bz2Qcz6Qt2JDBvQ==}
@@ -1907,6 +1994,20 @@ packages:
engines: {node: '>=8'}
hasBin: true
winston-daily-rotate-file@5.0.0:
resolution: {integrity: sha512-JDjiXXkM5qvwY06733vf09I2wnMXpZEhxEVOSPenZMii+g7pcDcTBt2MRugnoi8BwVSuCT2jfRXBUy+n1Zz/Yw==}
engines: {node: '>=8'}
peerDependencies:
winston: ^3
winston-transport@4.9.0:
resolution: {integrity: sha512-8drMJ4rkgaPo1Me4zD/3WLfI/zPdA9o2IipKODunnGDcuqbHwjsbB79ylv04LCGGzU0xQ6vTznOMpQGaLhhm6A==}
engines: {node: '>= 12.0.0'}
winston@3.19.0:
resolution: {integrity: sha512-LZNJgPzfKR+/J3cHkxcpHKpKKvGfDZVPS4hfJCc4cCG0CgYzvlD6yE/S3CIL/Yt91ak327YCpiF/0MyeZHEHKA==}
engines: {node: '>= 12.0.0'}
wrappy@1.0.2:
resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==}
@@ -1953,6 +2054,14 @@ snapshots:
'@antfu/utils@0.7.6': {}
'@colors/colors@1.6.0': {}
'@dabh/diagnostics@2.0.8':
dependencies:
'@so-ric/colorspace': 1.1.6
enabled: 2.0.0
kuler: 2.0.0
'@esbuild/aix-ppc64@0.21.5':
optional: true
@@ -2262,6 +2371,11 @@ snapshots:
'@sinclair/typebox@0.27.8': {}
'@so-ric/colorspace@1.1.6':
dependencies:
color: 5.0.3
text-hex: 1.0.0
'@sveltejs/adapter-auto@2.1.0(@sveltejs/kit@1.25.0(svelte@4.2.1)(vite@5.3.5(@types/node@20.6.4)))':
dependencies:
'@sveltejs/kit': 1.25.0(svelte@4.2.1)(vite@5.3.5(@types/node@20.6.4))
@@ -2381,6 +2495,8 @@ snapshots:
'@types/resolve@1.20.2': {}
'@types/triple-beam@1.3.5': {}
'@types/uuid@9.0.4': {}
'@vitest/expect@0.33.0':
@@ -2440,6 +2556,8 @@ snapshots:
assertion-error@1.1.0: {}
async@3.2.6: {}
autoprefixer@10.4.20(postcss@8.4.40):
dependencies:
browserslist: 4.23.3
@@ -2539,6 +2657,21 @@ snapshots:
estree-walker: 3.0.3
periscopic: 3.1.0
color-convert@3.1.3:
dependencies:
color-name: 2.1.0
color-name@2.1.0: {}
color-string@2.1.4:
dependencies:
color-name: 2.1.0
color@5.0.3:
dependencies:
color-convert: 3.1.3
color-string: 2.1.4
commander@4.1.1: {}
commondir@1.0.1: {}
@@ -2598,6 +2731,8 @@ snapshots:
electron-to-chromium@1.5.4: {}
enabled@2.0.0: {}
es6-promise@3.3.1: {}
esbuild@0.18.20:
@@ -2687,11 +2822,17 @@ snapshots:
dependencies:
reusify: 1.0.4
fecha@4.2.3: {}
fetch-blob@3.2.0:
dependencies:
node-domexception: 1.0.0
web-streams-polyfill: 3.3.3
file-stream-rotator@0.6.1:
dependencies:
moment: 2.30.1
fill-range@7.0.1:
dependencies:
to-regex-range: 5.0.1
@@ -2701,6 +2842,8 @@ snapshots:
locate-path: 6.0.0
path-exists: 4.0.0
fn.name@1.1.0: {}
focus-trap@7.5.2:
dependencies:
tabbable: 6.2.0
@@ -2849,6 +2992,8 @@ snapshots:
kolorist@1.8.0: {}
kuler@2.0.0: {}
lilconfig@2.1.0: {}
lines-and-columns@1.2.4: {}
@@ -2871,6 +3016,15 @@ snapshots:
lodash.merge@4.6.2: {}
logform@2.7.0:
dependencies:
'@colors/colors': 1.6.0
'@types/triple-beam': 1.3.5
fecha: 4.2.3
ms: 2.1.2
safe-stable-stringify: 2.5.0
triple-beam: 1.4.1
loupe@2.3.6:
dependencies:
get-func-name: 2.0.0
@@ -2929,6 +3083,8 @@ snapshots:
pkg-types: 1.0.3
ufo: 1.3.0
moment@2.30.1: {}
mri@1.2.0: {}
mrmime@1.0.1: {}
@@ -2973,6 +3129,10 @@ snapshots:
dependencies:
wrappy: 1.0.2
one-time@1.0.0:
dependencies:
fn.name: 1.1.0
onetime@5.1.2:
dependencies:
mimic-fn: 2.1.0
@@ -3085,6 +3245,12 @@ snapshots:
dependencies:
pify: 2.3.0
readable-stream@3.6.2:
dependencies:
inherits: 2.0.4
string_decoder: 1.3.0
util-deprecate: 1.0.2
readdirp@3.6.0:
dependencies:
picomatch: 2.3.1
@@ -3145,6 +3311,10 @@ snapshots:
dependencies:
mri: 1.2.0
safe-buffer@5.2.1: {}
safe-stable-stringify@2.5.0: {}
sander@0.5.1:
dependencies:
es6-promise: 3.3.1
@@ -3183,6 +3353,8 @@ snapshots:
source-map-js@1.2.0: {}
stack-trace@0.0.10: {}
stackback@0.0.2: {}
standard-as-callback@2.1.0: {}
@@ -3191,6 +3363,10 @@ snapshots:
streamsearch@1.1.0: {}
string_decoder@1.3.0:
dependencies:
safe-buffer: 5.2.1
strip-final-newline@2.0.0: {}
strip-indent@3.0.0:
@@ -3334,6 +3510,8 @@ snapshots:
transitivePeerDependencies:
- ts-node
text-hex@1.0.0: {}
thenify-all@1.6.0:
dependencies:
thenify: 3.3.1
@@ -3359,6 +3537,8 @@ snapshots:
totalist@3.0.1: {}
triple-beam@1.4.1: {}
trpc-svelte-query-adapter@2.1.0:
dependencies:
'@tanstack/svelte-query': 4.35.3(svelte@3.59.2)
@@ -3540,6 +3720,34 @@ snapshots:
siginfo: 2.0.0
stackback: 0.0.2
winston-daily-rotate-file@5.0.0(winston@3.19.0):
dependencies:
file-stream-rotator: 0.6.1
object-hash: 3.0.0
triple-beam: 1.4.1
winston: 3.19.0
winston-transport: 4.9.0
winston-transport@4.9.0:
dependencies:
logform: 2.7.0
readable-stream: 3.6.2
triple-beam: 1.4.1
winston@3.19.0:
dependencies:
'@colors/colors': 1.6.0
'@dabh/diagnostics': 2.0.8
async: 3.2.6
is-stream: 2.0.1
logform: 2.7.0
one-time: 1.0.0
readable-stream: 3.6.2
safe-stable-stringify: 2.5.0
stack-trace: 0.0.10
triple-beam: 1.4.1
winston-transport: 4.9.0
wrappy@1.0.2: {}
ws@8.14.2: {}

View File

@@ -1,8 +1,9 @@
import json
import logging
import os
import time
from typing import Dict, List, Optional
from urllib.parse import urlencode
from urllib.parse import urlencode, quote
import aiohttp
from dotenv import load_dotenv
@@ -24,9 +25,9 @@ app = FastAPI()
logger.info("FastAPI Proxy Server initialized")
SCRAPERAPI_API_KEY = os.getenv("SCRAPERAPI_API_KEY")
if not SCRAPERAPI_API_KEY:
raise ValueError("SCRAPERAPI_API_KEY is not set")
SCRAPINGBEE_API_KEY = os.getenv("SCRAPINGBEE_API_KEY")
if not SCRAPINGBEE_API_KEY:
raise ValueError("SCRAPINGBEE_API_KEY is not set")
CONSTANTS = {
@@ -36,7 +37,7 @@ CONSTANTS = {
"LAST_FETCHED_KEY": "LAST_FETCHED",
"SCRAP_API_URL": "https://gamebooking24.com/lottery-api",
"SCRAP_API_SESSION_KEY": "SRAJWT",
"SCRAPERAPI_BASE_URL": "http://api.scraperapi.com",
"SCRAPINGBEE_BASE_URL": "https://app.scrapingbee.com/api/v1",
"SCRAP_API_BASE_HEADERS": {
"Host": "gamebooking24.com",
"Sec-Ch-Ua": '"Not/A)Brand";v="8", "Chromium";v="126"',
@@ -95,105 +96,129 @@ def build_headers(
async def make_get_request(
url: str, params: Optional[Dict] = None, headers: Optional[Dict] = None
):
"""Make a GET request using ScraperAPI"""
if SCRAPERAPI_API_KEY == "<TODO: get and put the key in here>":
raise HTTPException(status_code=500, detail="ScraperAPI API key not configured")
# Build the ScraperAPI request params
scraperapi_params = {
"api_key": SCRAPERAPI_API_KEY,
"url": url,
"render": "true",
}
"""Make a GET request using ScrapingBee"""
# Add query params to the target URL if provided
if params:
url_with_params = f"{url}?{urlencode(params)}"
scraperapi_params["url"] = url_with_params
else:
url_with_params = url
# Make the request to ScraperAPI using aiohttp
logger.debug(f"[ScrapingBee GET] Target URL: {url_with_params}")
# Build the ScrapingBee request params
# Note: aiohttp will automatically URL-encode the params, including the 'url' value
scrapingbee_params = {
"api_key": SCRAPINGBEE_API_KEY,
"url": url_with_params,
"render_js": "true",
"block_resources": "false",
"transparent_status_code": "true", # Pass through the actual status code from target site
}
# Forward headers to target site if provided (for Authorization, etc.)
if headers and "Authorization" in headers:
scrapingbee_params["forward_headers"] = "true"
# Make the request to ScrapingBee using aiohttp
# Note: Don't pass custom headers to ScrapingBee - they're for the target site
# If needed, use ScrapingBee's forward_headers parameter instead
async with aiohttp.ClientSession() as session:
async with session.get(
CONSTANTS["SCRAPERAPI_BASE_URL"],
params=scraperapi_params,
headers=headers,
CONSTANTS["SCRAPINGBEE_BASE_URL"],
params=scrapingbee_params,
timeout=aiohttp.ClientTimeout(total=60),
) as response:
# Create a simple response-like object
class AsyncResponse:
def __init__(self, aiohttp_response):
self._response = aiohttp_response
self.status_code = aiohttp_response.status
self.headers = aiohttp_response.headers
# Read content before context manager exits
content = await response.read()
# Log error responses for debugging
if response.status != 200:
try:
error_text = content.decode('utf-8')[:500]
logger.error(f"[ScrapingBee GET] Status {response.status}, Response: {error_text}")
except:
logger.error(f"[ScrapingBee GET] Status {response.status}, Response (non-text): {len(content)} bytes")
# Create a simple response object with the data
class SimpleResponse:
def __init__(self, status, headers, content_bytes):
self.status_code = status
self.headers = headers
self._content = content_bytes
self._text = None
self._json = None
self._content = None
async def text(self):
if self._text is None:
self._text = await self._response.text()
self._text = self._content.decode('utf-8')
return self._text
async def json(self):
if self._json is None:
self._json = await self._response.json()
self._json = json.loads(await self.text())
return self._json
async def content(self):
if self._content is None:
self._content = await self._response.read()
return self._content
return AsyncResponse(response)
return SimpleResponse(response.status, response.headers, content)
async def make_post_request(url: str, data: dict, headers: Optional[Dict] = None):
"""Make a POST request using ScraperAPI"""
if SCRAPERAPI_API_KEY == "<TODO: get and put the key in here>":
raise HTTPException(status_code=500, detail="ScraperAPI API key not configured")
"""Make a POST request using ScrapingBee"""
# Build the ScraperAPI request params
scraperapi_params = {
"api_key": SCRAPERAPI_API_KEY,
# Build the ScrapingBee request params
scrapingbee_params = {
"api_key": SCRAPINGBEE_API_KEY,
"url": url,
"render": "true",
"render_js": "true",
"block_resources": "false",
}
# Make the POST request to ScraperAPI using aiohttp
# ScrapingBee POST requests: pass JSON body as a parameter
scrapingbee_params["body"] = json.dumps(data)
# Forward headers to target site if provided
# Note: ScrapingBee's forward_headers forwards common headers automatically
# For custom headers like Authorization, we may need to use cookies parameter
if headers and "Authorization" in headers:
scrapingbee_params["forward_headers"] = "true"
# TODO: May need to pass Authorization via cookies if forward_headers doesn't work
# Make the POST request to ScrapingBee using aiohttp
# ScrapingBee HTML API uses GET even for POST requests - the body is passed as a param
async with aiohttp.ClientSession() as session:
async with session.post(
CONSTANTS["SCRAPERAPI_BASE_URL"],
params=scraperapi_params,
json=data, # Use json= for JSON payloads (sets Content-Type automatically)
headers=headers,
async with session.get(
CONSTANTS["SCRAPINGBEE_BASE_URL"],
params=scrapingbee_params,
timeout=aiohttp.ClientTimeout(total=60),
) as response:
# Create a simple response-like object
class AsyncResponse:
def __init__(self, aiohttp_response):
self._response = aiohttp_response
self.status_code = aiohttp_response.status
self.headers = aiohttp_response.headers
# Read content before context manager exits
content = await response.read()
# Create a simple response object with the data
class SimpleResponse:
def __init__(self, status, headers, content_bytes):
self.status_code = status
self.headers = headers
self._content = content_bytes
self._text = None
self._json = None
self._content = None
async def text(self):
if self._text is None:
self._text = await self._response.text()
self._text = self._content.decode('utf-8')
return self._text
async def json(self):
if self._json is None:
self._json = await self._response.json()
self._json = json.loads(await self.text())
return self._json
async def content(self):
if self._content is None:
self._content = await self._response.read()
return self._content
return AsyncResponse(response)
return SimpleResponse(response.status, response.headers, content)
# Pydantic models for request bodies

143
src/lib/server/logger.ts Normal file
View File

@@ -0,0 +1,143 @@
import winston from "winston";
import DailyRotateFile from "winston-daily-rotate-file";
import util from "util";
import { Err } from "./result";
import { env } from "$env/dynamic/private";
import path from "path";
process.on("warning", (warning) => {
const msg = String(warning?.message || "");
const name = String((warning as any)?.name || "");
// Ignore the noisy timer warning from Node/kafkajs interplay
if (
name === "TimeoutNegativeWarning" ||
msg.includes("TimeoutNegativeWarning") ||
msg.includes("Timeout duration was set to 1")
) {
return;
}
// Keep other warnings visible
console.warn(warning);
});
const levels = {
error: 0,
warn: 1,
info: 2,
http: 3,
debug: 4,
};
const colors = {
error: "red",
warn: "yellow",
info: "green",
http: "magenta",
debug: "white",
};
const level = () => {
const envLevel = env.LOG_LEVEL?.toLowerCase();
if (envLevel && envLevel in levels) {
return envLevel;
}
return env.NODE_ENV === "development" ? "debug" : "warn";
};
// Console format with colors
const consoleFormat = winston.format.combine(
winston.format.errors({ stack: true }),
winston.format.timestamp({ format: "YYYY-MM-DD HH:mm:ss:ms" }),
winston.format.colorize({ all: true }),
winston.format.printf((info: any) => {
const { level, message, timestamp, ...extra } = info;
let formattedMessage = "";
if (message instanceof Error) {
formattedMessage = message.stack || message.message;
} else if (typeof message === "object") {
formattedMessage = util.inspect(message, {
depth: null,
colors: true,
});
} else {
formattedMessage = message as any as string;
}
// Handle extra fields (if any)
const formattedExtra =
Object.keys(extra).length > 0
? `\n${util.inspect(extra, { depth: null, colors: true })}`
: "";
return `[${level}] ${timestamp}: ${formattedMessage}${formattedExtra}`;
})
);
// JSON format for file logging
const fileFormat = winston.format.combine(
winston.format.errors({ stack: true }),
winston.format.timestamp(),
winston.format.json()
);
// Log directory - use logs folder in project root
const logDir = path.join(process.cwd(), "logs");
// Daily rotate file transport for all logs
const dailyRotateFileTransport = new DailyRotateFile({
filename: path.join(logDir, "app-%DATE%.log"),
datePattern: "YYYY-MM-DD",
zippedArchive: true,
maxSize: "20m",
maxFiles: "14d", // Keep logs for 14 days
format: fileFormat,
});
// Daily rotate file transport for errors only
const dailyRotateErrorTransport = new DailyRotateFile({
filename: path.join(logDir, "error-%DATE%.log"),
datePattern: "YYYY-MM-DD",
zippedArchive: true,
maxSize: "20m",
maxFiles: "30d", // Keep error logs for 30 days
level: "error",
format: fileFormat,
});
// Build transports
const transports: winston.transport[] = [
new winston.transports.Console({ format: consoleFormat }),
dailyRotateFileTransport,
dailyRotateErrorTransport,
];
winston.addColors(colors);
const logger = winston.createLogger({
level: level(),
levels,
transports,
format: fileFormat, // Default format for all transports
exceptionHandlers: [dailyRotateFileTransport, dailyRotateErrorTransport],
rejectionHandlers: [dailyRotateFileTransport, dailyRotateErrorTransport],
});
const stream = { write: (message: string) => logger.http(message.trim()) };
function getError(payload: Err, error?: any) {
logger.error(JSON.stringify({ payload, error }, null, 2));
console.error(error);
return {
code: payload.code,
message: payload.message,
description: payload.description,
detail: payload.detail,
error: error instanceof Error ? error.message : error,
actionable: payload.actionable,
} as Err;
}
export { getError, logger, stream };

81
src/lib/server/result.ts Normal file
View File

@@ -0,0 +1,81 @@
export const ERROR_CODES = {
API_ERROR: "API_ERROR",
EXTERNAL_API_ERROR: "EXTERNAL_API_ERROR",
RATE_LIMIT_ERROR: "RATE_LIMIT_ERROR",
DATABASE_ERROR: "DATABASE_ERROR",
NETWORK_ERROR: "NETWORK_ERROR",
BANNED: "BANNED",
AUTH_ERROR: "AUTH_ERROR",
PERMISSION_ERROR: "PERMISSION_ERROR",
VALIDATION_ERROR: "VALIDATION_ERROR",
UNKNOWN_ERROR: "UNKNOWN_ERROR",
NOT_FOUND_ERROR: "NOT_FOUND_ERROR",
NOT_FOUND: "NOT_FOUND",
INPUT_ERROR: "INPUT_ERROR",
INTERNAL_SERVER_ERROR: "INTERNAL_SERVER_ERROR",
EXTERNAL_SERVICE_ERROR: "EXTERNAL_SERVICE_ERROR",
FILE_SYSTEM_ERROR: "FILE_SYSTEM_ERROR",
STORAGE_ERROR: "STORAGE_ERROR",
NOT_ALLOWED: "NOT_ALLOWED",
NOT_IMPLEMENTED: "NOT_IMPLEMENTED",
PROCESSING_ERROR: "PROCESSING_ERROR",
PARSING_ERROR: "PARSING_ERROR",
} as const;
export const errorStatusMap = {
[ERROR_CODES.VALIDATION_ERROR]: 400,
[ERROR_CODES.AUTH_ERROR]: 403,
[ERROR_CODES.BANNED]: 403,
[ERROR_CODES.NOT_FOUND]: 404,
[ERROR_CODES.NOT_ALLOWED]: 405,
[ERROR_CODES.RATE_LIMIT_ERROR]: 429,
[ERROR_CODES.DATABASE_ERROR]: 500,
[ERROR_CODES.NETWORK_ERROR]: 500,
[ERROR_CODES.EXTERNAL_API_ERROR]: 500,
[ERROR_CODES.API_ERROR]: 500,
[ERROR_CODES.INTERNAL_SERVER_ERROR]: 500,
[ERROR_CODES.EXTERNAL_SERVICE_ERROR]: 500,
[ERROR_CODES.FILE_SYSTEM_ERROR]: 500,
[ERROR_CODES.STORAGE_ERROR]: 500,
[ERROR_CODES.PROCESSING_ERROR]: 500,
[ERROR_CODES.PARSING_ERROR]: 500,
[ERROR_CODES.NOT_IMPLEMENTED]: 501,
} as Record<string, number>;
export type Err = {
flowId?: string;
code: string;
message: string;
description: string;
detail: string;
actionable?: boolean;
error?: any;
};
type Success<T> = { data: T; error?: undefined | null };
type Failure<E> = { data?: undefined | null; error: E };
// Legacy now, making use of Effect throughout the project
export type Result<T, E = Err> = Success<T> | Failure<E>;
export async function tryCatch<T, E = Err>(
promise: Promise<T>,
err?: E
): Promise<Result<T, E>> {
try {
const data = await promise;
return { data };
} catch (e) {
return {
// @ts-ignore
error: !!err
? err
: {
code: "UNKNOWN_ERROR",
message: "An unknown error occurred",
description: "An unknown error occurred",
detail: "An unknown error occurred",
},
};
}
}

View File

@@ -11,22 +11,50 @@ import type { ServerError } from "$lib/utils/data.types";
import { TRPCError } from "@trpc/server";
import { z } from "zod";
import { createTRPCRouter, protectedProcedure } from "../t";
import { env } from "$env/dynamic/private";
import { logger } from "$lib/server/logger";
import fetch from "node-fetch";
export const apiAuthRouter = createTRPCRouter({
getCaptcha: protectedProcedure.mutation(async () => {
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
try {
const uuid = getUUID();
const res = await fetch(`${constants.PROXY_API_URL}/verify/image?uuid=${uuid}`, {
headers: {
"Content-Type": "application/json",
},
const targetUrl = `${constants.SCRAP_API_URL}/verify/image?uuid=${uuid}`;
logger.info(`[getCaptcha] Fetching captcha image for uuid: ${uuid}`);
// Build ScrapingBee API URL with params
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
scrapingbeeUrl.searchParams.set("url", targetUrl);
scrapingbeeUrl.searchParams.set("render_js", "false");
scrapingbeeUrl.searchParams.set("block_resources", "false");
const res = await fetch(scrapingbeeUrl.toString());
if (!res.ok || res.status !== 200) {
// Clone response before reading to avoid consuming body
const clonedRes = res.clone();
const errorText = await clonedRes.text().catch(() => "Unknown error");
logger.error(`[getCaptcha] ScrapingBee error ${res.status}: ${errorText.substring(0, 200)}`);
throw new TRPCError({
code: "INTERNAL_SERVER_ERROR",
message: `Failed to fetch captcha image: ${res.status}`,
});
const bloob = await res.blob();
const imageBuffer = Buffer.from(await bloob.arrayBuffer());
}
// Read the response as arrayBuffer (recommended method)
const arrayBuffer = await res.arrayBuffer();
const imageBuffer = Buffer.from(arrayBuffer);
const base64String = imageBuffer.toString("base64");
logger.info(`[getCaptcha] Successfully fetched captcha image for uuid: ${uuid}, size: ${imageBuffer.length} bytes`);
return { id: uuid, image: base64String };
} catch (err) {
console.log(err);
logger.error("[getCaptcha] Error getting captcha image", err);
throw new TRPCError({
code: "INTERNAL_SERVER_ERROR",
message: "Error getting captcha image.",
@@ -43,15 +71,15 @@ export const apiAuthRouter = createTRPCRouter({
}),
)
.mutation(async ({ input }) => {
console.log("[=] Getting new session... ", input);
logger.info(`[getNewSession] Getting new session for userId: ${input.userId || "random"}`);
try {
const { captchaId, captchaAnswer } = input;
let { userId, userType, password } = await dbApiUser.getRandomDistributor();
if (input.userId) {
let _user = await dbApiUser.getUserById(input.userId);
console.log("[=] User :: ", _user?.userId);
if (!_user) {
logger.warn(`[getNewSession] User not found: ${input.userId}`);
return {
success: false,
errors: [{ message: "User not found." }],
@@ -60,8 +88,10 @@ export const apiAuthRouter = createTRPCRouter({
userId = _user.userId;
userType = _user.userType;
password = _user.password;
logger.info(`[getNewSession] Using specific user: ${userId}`);
}
console.log(`[=] Getting session token for user ${userId}...`);
logger.info(`[getNewSession] Getting session token for user ${userId}`);
const token = await getSessionToken({
code: captchaAnswer,
verifyToken: captchaId,
@@ -69,17 +99,20 @@ export const apiAuthRouter = createTRPCRouter({
userType: userType,
password: password,
});
console.log("[=] Token Response :: ", JSON.stringify(token, null, 2));
if (!token.ok) {
logger.warn(`[getNewSession] Failed to get session token: ${token.message}`);
return {
success: false,
errors: [{ message: token.message }],
};
}
await setSessionToRedis(token.message, input.userId ?? "");
logger.info(`[getNewSession] Successfully created session for user ${userId}`);
return { success: true, errors: [] as ServerError };
} catch (err) {
console.log(err);
logger.error("[getNewSession] Error getting new session", err);
throw new TRPCError({
code: "INTERNAL_SERVER_ERROR",
message: "Error getting new session.",
@@ -102,6 +135,7 @@ export const apiAuthRouter = createTRPCRouter({
.input(z.object({ userId: z.string().optional() }))
.mutation(async ({ input }) => {
const { userId } = input;
logger.info(`[logoutUser] Logging out user: ${userId || "all"}`);
await removeSessionFromStore(userId);
return { success: true, errors: [] as ServerError };
}),

View File

@@ -14,6 +14,7 @@ import {
} from "$lib/utils/data.types";
import { z } from "zod";
import { createTRPCRouter, protectedProcedure } from "../t";
import { logger } from "$lib/server/logger";
const lastFetched = {
get: async () => {
@@ -30,9 +31,11 @@ const lastFetched = {
export const apiDataRouter = createTRPCRouter({
getDealersAndDraws: protectedProcedure.query(async () => {
logger.debug("[getDealersAndDraws] Fetching dealers and draws");
const draws = await dbDraw.getAllDraws(true);
const dealers = await dbApiUser.allUsersOfType(ApiUserTypes.DEALER);
const lf = await lastFetched.get();
logger.info(`[getDealersAndDraws] Found ${draws.length} draws and ${dealers.length} dealers`);
return { users: dealers, draws, lastFetched: lf };
}),
@@ -47,6 +50,7 @@ export const apiDataRouter = createTRPCRouter({
.mutation(async ({ input }) => {
const { userIds, targetDate, drawId } = input;
if (userIds.length < 1) {
logger.warn("[refetchData] No users selected");
return {
detail: "No users selected",
success: false,
@@ -59,6 +63,7 @@ export const apiDataRouter = createTRPCRouter({
(await redis.get(constants.SCRAP_API_SESSION_KEY)) ?? "",
) as APISession;
if (sess === null) {
logger.warn("[refetchData] API session expired");
return {
detail: "API Session expired",
success: false,
@@ -70,9 +75,7 @@ export const apiDataRouter = createTRPCRouter({
] as ServerError,
};
}
console.log(
`Fetching data for ${userIds.length} users for draw ${drawId}`,
);
logger.info(`[refetchData] Fetching data for ${userIds.length} users, draw ${drawId}, date ${targetDate}`);
const userIdsInt = userIds.map((x) => parseInt(x.split(":")[1]));
const out = await getData(
sess.sessionToken,
@@ -81,6 +84,7 @@ export const apiDataRouter = createTRPCRouter({
targetDate,
);
if (!out.ok) {
logger.error(`[refetchData] Error fetching data: ${out.message}`);
return {
success: false,
detail: "Error fetching data",
@@ -88,7 +92,9 @@ export const apiDataRouter = createTRPCRouter({
};
}
const dataCount = out.data.length;
logger.info(`[refetchData] Fetched ${dataCount} entries, upserting to database`);
await dbApiData.upsertData(out.data, targetDate);
logger.info(`[refetchData] Successfully scraped and saved ${dataCount} entries for ${userIds.length} users`);
return {
detail: `Scraped ${dataCount} entries for ${userIds.length} users`,
success: true,
@@ -106,12 +112,14 @@ export const apiDataRouter = createTRPCRouter({
)
.mutation(async ({ input }) => {
const { date, drawId, userId } = input;
logger.info(`[getDataByFilters] Fetching data for date ${date}, draw ${drawId}, user ${userId}`);
const data = await dbApiData.getBookingEntriesForDealer(
date,
drawId.split(":")[1],
userId.split(":")[1],
true,
);
logger.info(`[getDataByFilters] Found ${data.length} entries`);
return { data };
}),
@@ -119,6 +127,7 @@ export const apiDataRouter = createTRPCRouter({
.input(z.object({ date: z.string(), drawId: z.string() }))
.mutation(async ({ input }) => {
const { date, drawId } = input;
logger.info(`[getReducedFinalSheet] Compiling final sheet for date ${date}, draw ${drawId}`);
const draw = await dbDraw.getDraw(drawId);
const fsData = {
id: getULID(),
@@ -128,6 +137,7 @@ export const apiDataRouter = createTRPCRouter({
totals: getDefaultTotals(),
} as ReducedFinalSheetData;
if (!draw) {
logger.warn(`[getReducedFinalSheet] Draw not found: ${drawId}`);
return {
ok: false,
detail: `Draw for the passed draw ID not found`,
@@ -137,10 +147,9 @@ export const apiDataRouter = createTRPCRouter({
] as ServerError,
};
}
console.log("Fetching data");
const data = await getReducedFinalSheet(fsData);
console.log(data);
if (!data.ok) {
logger.error(`[getReducedFinalSheet] Error compiling final sheet: ${data.errors?.map(e => e.message).join(", ")}`);
return {
ok: false,
detail: `Error compiling final sheet`,
@@ -148,6 +157,7 @@ export const apiDataRouter = createTRPCRouter({
errors: data.errors,
};
}
logger.info(`[getReducedFinalSheet] Successfully compiled final sheet for ${date}, draw ${draw.title}`);
return {
ok: true,
detail: `Final sheet for ${date}, draw ${draw.title} has been compiled`,
@@ -165,6 +175,7 @@ export const apiDataRouter = createTRPCRouter({
}),
)
.mutation(async ({ input }) => {
logger.debug(`[getFinalSheetRow] Getting final sheet row for number ${input.number}`);
return {
ok: true,
data: {},
@@ -173,26 +184,20 @@ export const apiDataRouter = createTRPCRouter({
}),
delDataOlderThan2Weeks: protectedProcedure.mutation(async () => {
logger.info("[delDataOlderThan2Weeks] Deleting data older than 2 weeks");
await dbApiData.deleteDataOlderThan2Weeks();
logger.info("[delDataOlderThan2Weeks] Successfully deleted old data");
return { ok: true, detail: "Data older than 2 weeks has been deleted" };
}),
postTestBooking: protectedProcedure
.input(z.object({ drawId: z.string(), date: z.string() }))
.mutation(async () => {
logger.debug("[postTestBooking] Test booking endpoint called (not live)");
return {
ok: true,
detail: "API not live",
errors: [] as ServerError,
};
// console.log("GENERATING TEST DATA :: ", drawId, date);
// const testData = await getTestBookingData(drawId, date);
// // console.log(testData);
// await dbApiData.upsertData(testData, date);
// return {
// ok: true,
// detail: "Test booking committed",
// errors: [] as ServerError,
// };
}),
});

View File

@@ -2,32 +2,51 @@ import { createTRPCRouter, protectedProcedure } from "../t";
import { ApiUserTypes, zApiPostUser } from "$lib/utils/data.types";
import { dbApiUser } from "$lib/server/db/apiuser.db";
import { z } from "zod";
import { logger } from "$lib/server/logger";
export const apiUserRouter = createTRPCRouter({
getAllDistributors: protectedProcedure.query(async () => {
return await dbApiUser.allUsersOfType(ApiUserTypes.DISTRIBUTOR);
logger.debug("[getAllDistributors] Fetching all distributors");
const distributors = await dbApiUser.allUsersOfType(ApiUserTypes.DISTRIBUTOR);
logger.info(`[getAllDistributors] Found ${distributors.length} distributors`);
return distributors;
}),
getAllDealers: protectedProcedure.query(async () => {
return await dbApiUser.allUsersOfType(ApiUserTypes.DEALER);
logger.debug("[getAllDealers] Fetching all dealers");
const dealers = await dbApiUser.allUsersOfType(ApiUserTypes.DEALER);
logger.info(`[getAllDealers] Found ${dealers.length} dealers`);
return dealers;
}),
getAllDistributorsCount: protectedProcedure.query(async () => {
return await dbApiUser.getUserTypeCount(ApiUserTypes.DISTRIBUTOR);
const count = await dbApiUser.getUserTypeCount(ApiUserTypes.DISTRIBUTOR);
logger.debug(`[getAllDistributorsCount] Count: ${count}`);
return count;
}),
getAllDealersCount: protectedProcedure.query(async () => {
return await dbApiUser.getUserTypeCount(ApiUserTypes.DEALER);
const count = await dbApiUser.getUserTypeCount(ApiUserTypes.DEALER);
logger.debug(`[getAllDealersCount] Count: ${count}`);
return count;
}),
getAllDealersPostUserFormat: protectedProcedure.query(async () => {
return await dbApiUser.allUsersOfTypeLimitedInfo(ApiUserTypes.DEALER);
logger.debug("[getAllDealersPostUserFormat] Fetching dealers in post user format");
const dealers = await dbApiUser.allUsersOfTypeLimitedInfo(ApiUserTypes.DEALER);
logger.info(`[getAllDealersPostUserFormat] Found ${dealers.length} dealers`);
return dealers;
}),
getAllPostUsers: protectedProcedure.query(async () => {
return await dbApiUser.getAllPostUsers();
logger.debug("[getAllPostUsers] Fetching all post users");
const users = await dbApiUser.getAllPostUsers();
logger.info(`[getAllPostUsers] Found ${users.length} post users`);
return users;
}),
setPostDataFlagForUser: protectedProcedure
.input(z.object({ users: z.array(zApiPostUser) }))
.mutation(async ({ input }) => {
logger.info(`[setPostDataFlagForUser] Setting post data flag for ${input.users.length} users`);
await dbApiUser.setPostDataFlagForUsers(input.users);
logger.info("[setPostDataFlagForUser] Successfully updated post data flags");
}),
});

View File

@@ -9,6 +9,7 @@ import {
} from "$lib/utils/data.types";
import { surreal } from "$lib/server/connectors/surreal.db";
import { parseToDateString } from "$lib/utils/datetime.helper.utils";
import { logger } from "$lib/server/logger";
function getTodaysTableName() {
const today = parseToDateString(new Date());
@@ -17,10 +18,12 @@ function getTodaysTableName() {
export const bookingRouter = createTRPCRouter({
getPanelData: protectedProcedure.query(async () => {
logger.debug("[getPanelData] Fetching panel data");
const draws = await dbDraw.getAllDraws(true);
const timeInDrawsTz = new Date().toLocaleString("en-US", {
timeZone: DEFAULT_TZ,
});
logger.info(`[getPanelData] Found ${draws.length} draws`);
return { draws, timeInDrawsTz: timeInDrawsTz };
}),
@@ -31,17 +34,21 @@ export const bookingRouter = createTRPCRouter({
const date = parseToDateString(new Date());
const tn = getTodaysTableName();
const did = parseInt(drawId.split(":")[1]);
logger.info(`[getBookingData] Fetching booking data for draw ${did}, date ${date}`);
const [out] = await surreal.query<[BookingEntry[]]>(
`select * from type::table($table) where drawId = $drawId and bookDate = $bookDate order by requestId desc`,
{ table: tn, drawId: did, bookDate: date },
);
return { data: out ?? [], errors: [] as ServerError };
const data = out ?? [];
logger.info(`[getBookingData] Found ${data.length} booking entries`);
return { data, errors: [] as ServerError };
}),
syncBooking: protectedProcedure
.input(z.object({ data: z.array(zBookingEntry) }))
.mutation(async ({ input }) => {
const tableName = getTodaysTableName();
logger.info(`[syncBooking] Syncing ${input.data.length} booking entries`);
const syncedEntriesIds = [] as string[];
if (input.data.length > 0) {
await surreal.insert<BookingEntry>(
@@ -56,17 +63,20 @@ export const bookingRouter = createTRPCRouter({
}),
);
}
logger.info(`[syncBooking] Successfully synced ${syncedEntriesIds.length} booking entries`);
return { detail: "Add Booking api donezo", syncedEntriesIds };
}),
deleteBooking: protectedProcedure
.input(z.object({ bookingIds: z.array(z.string()) }))
.mutation(async ({ input }) => {
logger.info(`[deleteBooking] Deleting ${input.bookingIds.length} booking entries`);
await Promise.all(
input.bookingIds.map(async (id) => {
await surreal.delete(id);
}),
);
logger.info(`[deleteBooking] Successfully deleted ${input.bookingIds.length} entries`);
return { detail: `Deleted ${input.bookingIds.length} Entries` };
}),
});

View File

@@ -2,10 +2,14 @@ import { createTRPCRouter, protectedProcedure } from "../t";
import { dbDraw } from "$lib/server/db/apidraw.db";
import { z } from "zod";
import { zDraw } from "$lib/utils/data.types";
import { logger } from "$lib/server/logger";
export const drawRouter = createTRPCRouter({
getAllDraws: protectedProcedure.query(async () => {
return await dbDraw.getAllDraws(true);
logger.debug("[getAllDraws] Fetching all draws");
const draws = await dbDraw.getAllDraws(true);
logger.info(`[getAllDraws] Found ${draws.length} draws`);
return draws;
}),
getCurrentTime: protectedProcedure.query(async () => {
@@ -14,18 +18,17 @@ export const drawRouter = createTRPCRouter({
const nowKarachi = new Date(
now.toLocaleString("en-US", { timeZone: timezone }),
);
// console.log(nowKarachi.toLocaleString());
return { now: nowKarachi };
}),
savePresetInfoForDraws: protectedProcedure
.input(z.object({ draws: z.array(zDraw) }))
.mutation(async ({ input }) => {
console.log("Saving preset info for draws");
logger.info(`[savePresetInfoForDraws] Saving preset info for ${input.draws.length} draws`);
for (const draw of input.draws) {
await dbDraw.updateDrawPresetInfo(draw);
}
console.log("Done saving preset info for draws");
logger.info("[savePresetInfoForDraws] Successfully saved preset info for all draws");
return { success: true };
}),
});

View File

@@ -19,6 +19,7 @@ import {
} from "$lib/server/postdata/postdata.gen.controller";
import { redis } from "$lib/server/connectors/redis";
import { constants } from "$lib/utils/constants";
import { logger } from "$lib/server/logger";
async function hasPostSession() {
const out = await redis.get(constants.POST_SESSION_KEY);
@@ -49,18 +50,21 @@ export const postDataApiRouter = createTRPCRouter({
fetchPostDataHistory: protectedProcedure
.input(zPostDataHistoryFilters)
.mutation(async ({ input }) => {
return await fetchPostDataHistory(input);
logger.info(`[fetchPostDataHistory] Fetching post data history for date ${input.date}, draw ${input.draw?.id}`);
const result = await fetchPostDataHistory(input);
logger.info(`[fetchPostDataHistory] Found ${result.data?.length || 0} history entries`);
return result;
}),
hasPosted: protectedProcedure
.input(zPostDataHistoryFilters)
.query(async ({ input }) => {
return {
hasPosted: await dbApiPostData.doesPostHistoryDataExist(
const hasPosted = await dbApiPostData.doesPostHistoryDataExist(
input.date,
input.draw?.id ?? "",
),
};
);
logger.debug(`[hasPosted] Checked for date ${input.date}, draw ${input.draw?.id}: ${hasPosted}`);
return { hasPosted };
}),
getPostDataForPreview: protectedProcedure
@@ -69,6 +73,7 @@ export const postDataApiRouter = createTRPCRouter({
const date = input.date;
const cacheKey = getULID();
if (!input.draw) {
logger.warn("[getPostDataForPreview] Draw is required but not provided");
return {
ok: false,
detail: "Draw is required",
@@ -78,11 +83,12 @@ export const postDataApiRouter = createTRPCRouter({
};
}
console.log("[+] Fetching the users with updated balances");
logger.info(`[getPostDataForPreview] Fetching users with updated balances for date ${date}, draw ${input.draw.id}`);
const balOut = await updateBalanceOfPostUsers(
await dbApiUser.getAllPostUsersWithParentUsers(),
);
if (!balOut.ok || !balOut.data) {
logger.error(`[getPostDataForPreview] Failed to update balances: ${balOut.detail}`);
return {
ok: false,
key: cacheKey,
@@ -92,31 +98,37 @@ export const postDataApiRouter = createTRPCRouter({
};
}
const users = balOut.data;
console.log(`[=] ${users.length} users found`);
console.log(users);
logger.info(`[getPostDataForPreview] Found ${users.length} users with updated balances`);
const result = await fetchDataForPosting(date, input, users);
postDataCacheStore.set(cacheKey, result.data).catch(console.error);
console.log("result.data.length = ", result.data.length);
postDataCacheStore.set(cacheKey, result.data).catch((err) => {
logger.error(`[getPostDataForPreview] Error caching data: ${err}`);
});
logger.info(`[getPostDataForPreview] Generated ${result.data.length} entries for preview, cache key: ${cacheKey}`);
return { ...result, key: cacheKey };
}),
post: protectedProcedure
.input(z.object({ yes: zPostDataFilters, cachedDataKey: z.string() }))
.mutation(async ({ input }) => {
const date = input.yes.date;
const draw = input.yes.draw;
logger.info(`[post] Starting post data process for date ${date}, draw ${draw?.id}, cache key ${input.cachedDataKey}`);
if (await hasPostSession()) {
const m =
"Already posting data, please wait for the current session to finish";
const m = "Already posting data, please wait for the current session to finish";
logger.warn(`[post] Post session already in progress`);
return {
ok: false,
detail: m,
errors: [{ message: m }] as ServerError,
};
}
const date = input.yes.date;
const draw = input.yes.draw;
if (!draw) {
await removePostSession();
logger.warn("[post] Draw is required but not provided");
return {
ok: false,
detail: "Draw is required",
@@ -125,12 +137,12 @@ export const postDataApiRouter = createTRPCRouter({
}
const drawId = draw.id;
console.log("[+] Fetching the users");
logger.info("[post] Fetching users and updating balances");
const users = await dbApiUser.getAllPostUsersWithParentUsers();
console.log(users);
const balOut = await updateBalanceOfPostUsers(users);
if (!balOut.ok || !balOut.data) {
await removePostSession();
logger.error(`[post] Failed to update balances: ${balOut.detail}`);
return {
ok: false,
detail: balOut.detail,
@@ -139,10 +151,9 @@ export const postDataApiRouter = createTRPCRouter({
errors: [],
};
}
console.log(`[=] ${users.length} users found`);
console.log(users);
logger.info(`[post] Found ${users.length} users with updated balances`);
console.log("[+] Preparing the sessions for posting");
logger.info("[post] Preparing sessions for posting");
const sessions = await getAllSessions();
const userSessions = {} as Record<string, APISession>;
for (const each of sessions) {
@@ -155,6 +166,7 @@ export const postDataApiRouter = createTRPCRouter({
if (Object.keys(userSessions).length !== users.length) {
await removePostSession();
logger.error(`[post] Missing sessions: ${users.length} users but only ${Object.keys(userSessions).length} sessions`);
return {
ok: false,
detail: `Some users don't have a session to post data with`,
@@ -164,22 +176,25 @@ export const postDataApiRouter = createTRPCRouter({
data: [],
};
}
logger.info(`[post] Prepared ${Object.keys(userSessions).length} user sessions`);
let data: any[] = await postDataCacheStore.get(input.cachedDataKey);
console.log("cached.data.length = ", data.length);
logger.info(`[post] Retrieved ${data.length} entries from cache`);
if (data.length < 1) {
console.log("No data found from preview, generating a list");
logger.info("[post] No cached data found, generating new data list");
const _out = await fetchDataForPosting(date, input.yes, balOut.data);
if (!_out.ok) {
await removePostSession();
logger.error(`[post] Failed to fetch data for posting: ${_out.detail}`);
return _out;
}
data = _out.data;
console.log("data.length = ", data.length);
logger.info(`[post] Generated ${data.length} entries for posting`);
}
if (data.length < 1) {
await removePostSession();
logger.warn("[post] No data found to post");
return {
ok: false,
detail: "No data found to post for the selected date and draw",
@@ -190,25 +205,24 @@ export const postDataApiRouter = createTRPCRouter({
};
}
console.log(`[+] Posting ${data.length} entries to the API`);
let ts = new Date().getTime();
logger.info(`[post] Posting ${data.length} entries to the API`);
const ts = new Date().getTime();
const res = await postDataToApi({
sessions: userSessions,
data,
users,
draw,
});
let done = new Date().getTime();
console.log(`Time taken to post data to the API: ${done - ts} ms`);
const done = new Date().getTime();
logger.info(`[post] API posting completed in ${done - ts}ms`);
if (!res.ok) {
await removePostSession();
logger.error(`[post] Failed to post data to API: ${res.detail}`);
return { ok: false, detail: res.detail };
}
console.log(`[+] Data posted to the API successfully`);
logger.info("[post] Data posted to API successfully, saving to database");
await dbApiPostData.upsertData({
id: getULID(),
drawId: +drawId.split(":")[1],
@@ -218,13 +232,12 @@ export const postDataApiRouter = createTRPCRouter({
updatedAt: new Date().toISOString(),
});
// Update the balance of the users after posting to the API
await updateBalanceOfPostUsers(users);
console.log("[+] Data saved to the database");
logger.info("[post] Data saved to database and balances updated");
await postDataCacheStore.del(input.cachedDataKey);
await removePostSession();
logger.info(`[post] Successfully completed posting ${data.length} entries`);
return {
ok: true,
detail: "Data successfully posted to API",

View File

@@ -2,15 +2,20 @@ import { dbPresetData } from "$lib/server/db/presetdata.db";
import { zDDFilters, zPresetDataEntry } from "$lib/utils/data.types";
import { z } from "zod";
import { createTRPCRouter, protectedProcedure } from "../t";
import { logger } from "$lib/server/logger";
export const presetDataRouter = createTRPCRouter({
getAll: protectedProcedure.input(zDDFilters).mutation(async ({ input }) => {
const { draw, date } = input;
if (!draw) {
logger.warn("[presetData.getAll] Draw is required but not provided");
return { ok: false, detail: "Draw is required to fetch data", data: [] };
}
logger.info(`[presetData.getAll] Fetching preset data for draw ${draw.id}, date ${date}`);
const data = await dbPresetData.getDataByDraw(date, +draw.id.split(":")[1]);
logger.info(`[presetData.getAll] Found ${data.length} preset data entries`);
return {
data: await dbPresetData.getDataByDraw(date, +draw.id.split(":")[1]),
data,
ok: true,
detail: "Data found",
};
@@ -19,17 +24,22 @@ export const presetDataRouter = createTRPCRouter({
insert: protectedProcedure
.input(z.array(zPresetDataEntry))
.mutation(async ({ input }) => {
logger.info(`[presetData.insert] Inserting ${input.length} preset data entries`);
const data = await dbPresetData.insertData(input);
logger.info(`[presetData.insert] Successfully inserted ${data.length} entries`);
return {
ok: true,
detail: "Data inserted",
data: await dbPresetData.insertData(input),
data,
};
}),
delete: protectedProcedure
.input(z.object({ date: z.string(), ids: z.array(z.string()) }))
.mutation(async ({ input }) => {
logger.info(`[presetData.delete] Deleting ${input.ids.length} preset data entries for date ${input.date}`);
await dbPresetData.deleteDataByIds(input.date, input.ids);
logger.info("[presetData.delete] Successfully deleted preset data entries");
return { ok: true, detail: "Data deleted" };
}),
});

View File

@@ -1,8 +1,10 @@
import type { SessionData } from "$lib/utils/data.types";
import { createTRPCRouter, protectedProcedure } from "../t";
import { logger } from "$lib/server/logger";
export const sessionRouter = createTRPCRouter({
getSession: protectedProcedure.query(async ({ ctx }) => {
logger.debug(`[getSession] Getting session for user: ${ctx.session.username}`);
return {
user: {
username: ctx.session.username,

View File

@@ -41,6 +41,7 @@
let captchaQ = api.apiAuth.getCaptcha.createMutation({
onSuccess: (d) => {
console.log("[=] Captcha Response :: ", JSON.stringify(d, null, 2));
captchaId = d.id;
captchaImage = d.image;
},