Compare commits

..

5 Commits

Author SHA1 Message Date
bootunloader
905405d0ae removed scrapingbee cuse yuh 2026-01-05 23:58:49 +02:00
bootunloader
653fb266e5 yuh kuh moar build time fixes #2!!!!! -_- 2026-01-05 22:59:28 +02:00
bootunloader
f3d186b195 yuh kuh moar build time fixes -_- 2026-01-05 22:47:38 +02:00
bootunloader
0d8cbe295c yuh kuh - type error fixes 2026-01-05 22:40:27 +02:00
bootunloader
4d4d7cfa93 sum dickerfile update 2026-01-05 22:36:13 +02:00
7 changed files with 174 additions and 151 deletions

View File

@@ -1,4 +1,4 @@
FROM node:18-alpine
FROM node:24-alpine
RUN apk add --no-cache libc6-compat

View File

@@ -6,7 +6,7 @@
"type": "module",
"scripts": {
"dev": "vite dev",
"start": "HOST=0.0.0.0 PORT=80 node ./build/index.js",
"start": "HOST=0.0.0.0 PORT=3000 node ./build/index.js",
"build": "pnpm run check && vite build",
"preview": "vite preview",
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",

View File

@@ -30,23 +30,26 @@ function dumpDataRaw(data: any, prefix: string) {
}
export const testIfSessionIsValid = async (jwt: string) => {
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
if (!scrapingbeeApiKey) {
logger.error("[testIfSessionIsValid] ScrapingBee API key not configured");
const scrapeDoApiKey = env.SCRAPEDO_API_KEY ?? "";
if (!scrapeDoApiKey) {
logger.error("[testIfSessionIsValid] Scrape.do API key not configured");
return false;
}
try {
const targetUrl = `${constants.SCRAP_API_URL}/v1/user/get-balance?userId=${baseDistributorId}`;
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
scrapingbeeUrl.searchParams.set("url", targetUrl);
scrapingbeeUrl.searchParams.set("forward_headers", "true");
scrapingbeeUrl.searchParams.set("forward_headers_pure", "true");
const jwtUrlEncoded = encodeURIComponent(jwt);
const finalUrl = new URL("http://api.scrape.do/");
finalUrl.searchParams.append("url", targetUrl);
finalUrl.searchParams.append("token", scrapeDoApiKey);
finalUrl.searchParams.append("extraHeaders", "true");
const res = await fetch(scrapingbeeUrl.toString(), {
headers: { "Spb-Authorization": jwt },
const res = await fetch(finalUrl.toString(), {
headers: {
"sd-Authorization": jwt,
"sd-Cookie": `AuthorizationToken=${jwtUrlEncoded}`,
},
});
if (res.status !== 200 || !res.ok) {
@@ -75,38 +78,32 @@ export const getSessionToken = async (payload: {
code: string;
userType: number;
}): Promise<{ ok: boolean; message: string }> => {
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
if (!scrapingbeeApiKey) {
logger.error("[getSessionToken] ScrapingBee API key not configured");
return { ok: false, message: "ScrapingBee API key not configured" };
const scrapeDoApiKey = env.SCRAPEDO_API_KEY ?? "";
if (!scrapeDoApiKey) {
logger.error("[getSessionToken] Scrape.do API key not configured");
return { ok: false, message: "Scrape.do API key not configured" };
}
const targetUrl = `${constants.SCRAP_API_URL}/v1/auth/login`;
logger.info(`[getSessionToken] Requesting session token for user ${payload.userId}`);
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
scrapingbeeUrl.searchParams.set("url", targetUrl);
scrapingbeeUrl.searchParams.set("forward_headers", "true");
scrapingbeeUrl.searchParams.set("forward_headers_pure", "true");
// Prefix headers with Spb- for ScrapingBee to forward them
const forwardHeaders = Object.fromEntries(
Object.entries(constants.SCRAP_API_BASE_HEADERS).map(([key, value]) => [`Spb-${key}`, value]),
);
const finalUrl = new URL("http://api.scrape.do/");
finalUrl.searchParams.append("url", targetUrl);
finalUrl.searchParams.append("token", scrapeDoApiKey);
finalUrl.searchParams.append("extraHeaders", "true");
try {
const res = await fetch(scrapingbeeUrl.toString(), {
const res = await fetch(finalUrl.toString(), {
method: "POST",
body: JSON.stringify(payload),
headers: { "Spb-Content-Type": "application/json", ...forwardHeaders },
headers: { "sd-Content-Type": "application/json" },
});
if (!res.ok) {
const errorText = await res.text().catch(() => "Unknown error");
logger.error(
`[getSessionToken] ScrapingBee error ${res.status}: ${errorText.substring(0, 200)}`,
`[getSessionToken] Scrape.do error ${res.status}: ${errorText.substring(0, 200)}`,
);
return {
ok: false,
@@ -130,30 +127,32 @@ export const getSessionToken = async (payload: {
};
export async function getUsersBalance(userId: number, jwt: string) {
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
if (!scrapingbeeApiKey) {
logger.error("[getUsersBalance] ScrapingBee API key not configured");
const scrapeDoApiKey = env.SCRAPEDO_API_KEY ?? "";
if (!scrapeDoApiKey) {
logger.error("[getUsersBalance] Scrape.do API key not configured");
return false;
}
// Prefix headers with Spb- for ScrapingBee to forward them
// Prefix headers with sd- for Scrape.do to forward them
const forwardHeaders = Object.fromEntries(
Object.entries(constants.SCRAP_API_BASE_HEADERS).map(([key, value]) => [`Spb-${key}`, value]),
Object.entries(constants.SCRAP_API_BASE_HEADERS).map(([key, value]) => [`sd-${key}`, value]),
);
try {
const targetUrl = `${constants.SCRAP_API_URL}/v1/user/get-balance?userId=${userId}`;
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
scrapingbeeUrl.searchParams.set("url", targetUrl);
scrapingbeeUrl.searchParams.set("forward_headers", "true");
scrapingbeeUrl.searchParams.set("forward_headers_pure", "true");
const jwtUrlEncoded = encodeURIComponent(jwt);
const finalUrl = new URL("http://api.scrape.do/");
finalUrl.searchParams.append("url", targetUrl);
finalUrl.searchParams.append("token", scrapeDoApiKey);
finalUrl.searchParams.append("extraHeaders", "true");
const encodedJwt = encodeURIComponent(jwt);
const res = await fetch(scrapingbeeUrl.toString(), {
headers: { "Spb-Authorization": jwt, "Spb-Cookie": `AuthorizationToken=${encodedJwt}` },
const res = await fetch(finalUrl.toString(), {
headers: {
"sd-Authorization": jwt,
"sd-Cookie": `AuthorizationToken=${jwtUrlEncoded}`,
...forwardHeaders,
},
});
const rj = (await res.json()) as {
@@ -176,12 +175,12 @@ export async function getUsersBalance(userId: number, jwt: string) {
}
export const getDealers = async (jwt: string, distributor_ids: string[]) => {
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
if (!scrapingbeeApiKey) {
logger.error("[getDealers] ScrapingBee API key not configured");
const scrapeDoApiKey = env.SCRAPEDO_API_KEY ?? "";
if (!scrapeDoApiKey) {
logger.error("[getDealers] Scrape.do API key not configured");
return {
dealers: [],
errors: [{ message: "ScrapingBee API key not configured" }],
errors: [{ message: "Scrape.do API key not configured" }],
};
}
@@ -194,23 +193,30 @@ export const getDealers = async (jwt: string, distributor_ids: string[]) => {
message: string;
}> = [];
// Prefix headers with sd- for Scrape.do to forward them
const forwardHeaders = Object.fromEntries(
Object.entries(constants.SCRAP_API_BASE_HEADERS).map(([key, value]) => [`sd-${key}`, value]),
);
// Process each batch sequentially
for (const batch of batches) {
const batchRequests = batch.map(async (did, index) => {
await sleep(rng(100, 2000));
const targetUrl = `${constants.SCRAP_API_URL}/v1/user/dealer-list`;
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
scrapingbeeUrl.searchParams.set("url", targetUrl);
scrapingbeeUrl.searchParams.set("forward_headers", "true");
scrapingbeeUrl.searchParams.set("forward_headers_pure", "true");
const jwtUrlEncoded = encodeURIComponent(jwt);
const finalUrl = new URL("http://api.scrape.do/");
finalUrl.searchParams.append("url", targetUrl);
finalUrl.searchParams.append("token", scrapeDoApiKey);
finalUrl.searchParams.append("extraHeaders", "true");
const res = await fetch(scrapingbeeUrl.toString(), {
const res = await fetch(finalUrl.toString(), {
method: "POST",
headers: {
"Spb-Authorization": jwt,
"Spb-Content-Type": "application/json",
"sd-Authorization": jwt,
"sd-Content-Type": "application/json",
"sd-Cookie": `AuthorizationToken=${jwtUrlEncoded}`,
...forwardHeaders,
},
body: JSON.stringify({
page: 1,
@@ -280,28 +286,35 @@ export const getDealers = async (jwt: string, distributor_ids: string[]) => {
};
export const getDistributors = async (jwt: string) => {
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
if (!scrapingbeeApiKey) {
logger.error("[getDistributors] ScrapingBee API key not configured");
const scrapeDoApiKey = env.SCRAPEDO_API_KEY ?? "";
if (!scrapeDoApiKey) {
logger.error("[getDistributors] Scrape.do API key not configured");
return {
ok: false,
message: "ScrapingBee API key not configured",
message: "Scrape.do API key not configured",
data: [],
};
}
const targetUrl = `${constants.SCRAP_API_URL}/v1/user/distributor-list`;
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
scrapingbeeUrl.searchParams.set("url", targetUrl);
scrapingbeeUrl.searchParams.set("forward_headers", "true");
scrapingbeeUrl.searchParams.set("forward_headers_pure", "true");
const jwtUrlEncoded = encodeURIComponent(jwt);
const finalUrl = new URL("http://api.scrape.do/");
finalUrl.searchParams.append("url", targetUrl);
finalUrl.searchParams.append("token", scrapeDoApiKey);
finalUrl.searchParams.append("extraHeaders", "true");
const res = await fetch(scrapingbeeUrl.toString(), {
// Prefix headers with sd- for Scrape.do to forward them
const forwardHeaders = Object.fromEntries(
Object.entries(constants.SCRAP_API_BASE_HEADERS).map(([key, value]) => [`sd-${key}`, value]),
);
const res = await fetch(finalUrl.toString(), {
method: "POST",
headers: {
"Spb-Authorization": jwt,
"Spb-Content-Type": "application/json",
"sd-Authorization": jwt,
"sd-Content-Type": "application/json",
"sd-Cookie": `AuthorizationToken=${jwtUrlEncoded}`,
...forwardHeaders,
},
body: JSON.stringify({
page: 1,
@@ -333,19 +346,32 @@ export const getDistributors = async (jwt: string) => {
};
export const getDraws = async (jwt: string) => {
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
const scrapeDoApiKey = env.SCRAPEDO_API_KEY ?? "";
if (!scrapeDoApiKey) {
logger.error("[getDraws] Scrape.do API key not configured");
return { ok: false, message: "Scrape.do API key not configured", data: [] };
}
logger.info(`[getDraws] Fetching draws from the API`);
const targetUrl = `${constants.SCRAP_API_URL}/v1/draw/list-my?userId=15`;
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
scrapingbeeUrl.searchParams.set("url", targetUrl);
scrapingbeeUrl.searchParams.set("forward_headers", "true");
scrapingbeeUrl.searchParams.set("forward_headers_pure", "true");
const jwtUrlEncoded = encodeURIComponent(jwt);
const finalUrl = new URL("http://api.scrape.do/");
finalUrl.searchParams.append("url", targetUrl);
finalUrl.searchParams.append("token", scrapeDoApiKey);
finalUrl.searchParams.append("extraHeaders", "true");
const res = await fetch(scrapingbeeUrl.toString(), {
headers: { "Spb-Authorization": jwt },
// Prefix headers with sd- for Scrape.do to forward them
const forwardHeaders = Object.fromEntries(
Object.entries(constants.SCRAP_API_BASE_HEADERS).map(([key, value]) => [`sd-${key}`, value]),
);
const res = await fetch(finalUrl.toString(), {
headers: {
"sd-Authorization": jwt,
"sd-Cookie": `AuthorizationToken=${jwtUrlEncoded}`,
...forwardHeaders,
},
});
type J = {

View File

@@ -1,9 +1,9 @@
import winston from "winston";
import DailyRotateFile from "winston-daily-rotate-file";
import util from "util";
import { Err } from "./result";
import { env } from "$env/dynamic/private";
import path from "path";
import util from "util";
import winston from "winston";
import DailyRotateFile from "winston-daily-rotate-file";
import type { Err } from "./result";
process.on("warning", (warning) => {
const msg = String(warning?.message || "");
@@ -73,14 +73,14 @@ const consoleFormat = winston.format.combine(
: "";
return `[${level}] ${timestamp}: ${formattedMessage}${formattedExtra}`;
})
}),
);
// JSON format for file logging
const fileFormat = winston.format.combine(
winston.format.errors({ stack: true }),
winston.format.timestamp(),
winston.format.json()
winston.format.json(),
);
// Log directory - use logs folder in project root

View File

@@ -205,31 +205,27 @@ async function sendBatchRequest(
changedBalance: number,
body: string,
) {
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
if (!scrapingbeeApiKey) {
logger.error("[sendBatchRequest] ScrapingBee API key not configured");
throw new Error("ScrapingBee API key not configured");
const scrapeDoApiKey = env.SCRAPEDO_API_KEY ?? "";
if (!scrapeDoApiKey) {
logger.error("[sendBatchRequest] Scrape.do API key not configured");
throw new Error("Scrape.do API key not configured");
}
const targetUrl = `${constants.SCRAP_API_URL}/v1/book/add-multiple`;
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
scrapingbeeUrl.searchParams.set("url", targetUrl);
scrapingbeeUrl.searchParams.set("forward_headers", "true");
// Prefix headers with Spb- for ScrapingBee to forward them
const forwardHeaders = Object.fromEntries(
Object.entries(constants.SCRAP_API_BASE_HEADERS).map(([key, value]) => [`Spb-${key}`, value]),
);
const jwtUrlEncoded = encodeURIComponent(session.sessionToken);
const finalUrl = new URL("http://api.scrape.do/");
finalUrl.searchParams.append("url", targetUrl);
finalUrl.searchParams.append("token", scrapeDoApiKey);
finalUrl.searchParams.append("extraHeaders", "true");
logger.debug(`[sendBatchRequest] Sending batch request for dealer ${dealerId}, draw ${draw.id}`);
return fetch(scrapingbeeUrl.toString(), {
return fetch(finalUrl.toString(), {
method: "POST",
headers: {
"Spb-Authorization": session.sessionToken,
"Spb-Content-Type": "application/json",
...forwardHeaders,
"sd-Authorization": session.sessionToken,
"sd-Content-Type": "application/json",
"sd-Cookie": `AuthorizationToken=${jwtUrlEncoded}`,
},
body: JSON.stringify({
dealerId,
@@ -310,31 +306,27 @@ async function deleteAllBookedEntries({
drawId: number;
closeTime: string;
}) {
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
if (!scrapingbeeApiKey) {
logger.error("[deleteAllBookedEntries] ScrapingBee API key not configured");
throw new Error("ScrapingBee API key not configured");
const scrapeDoApiKey = env.SCRAPEDO_API_KEY ?? "";
if (!scrapeDoApiKey) {
logger.error("[deleteAllBookedEntries] Scrape.do API key not configured");
throw new Error("Scrape.do API key not configured");
}
const targetUrl = `${constants.SCRAP_API_URL}/v1/book/delete-multiple`;
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
scrapingbeeUrl.searchParams.set("url", targetUrl);
scrapingbeeUrl.searchParams.set("forward_headers", "true");
// Prefix headers with Spb- for ScrapingBee to forward them
const forwardHeaders = Object.fromEntries(
Object.entries(constants.SCRAP_API_BASE_HEADERS).map(([key, value]) => [`Spb-${key}`, value]),
);
const jwtUrlEncoded = encodeURIComponent(session.sessionToken);
const finalUrl = new URL("http://api.scrape.do/");
finalUrl.searchParams.append("url", targetUrl);
finalUrl.searchParams.append("token", scrapeDoApiKey);
finalUrl.searchParams.append("extraHeaders", "true");
logger.debug(`[deleteAllBookedEntries] Deleting ${data.length} entries for dealer ${dealerId}, draw ${drawId}`);
return fetch(scrapingbeeUrl.toString(), {
return fetch(finalUrl.toString(), {
method: "POST",
headers: {
"Spb-Authorization": session.sessionToken,
"Spb-Content-Type": "application/json",
...forwardHeaders,
"sd-Authorization": session.sessionToken,
"sd-Content-Type": "application/json",
"sd-Cookie": `AuthorizationToken=${jwtUrlEncoded}`,
},
body: JSON.stringify({
bookIds: data.map((e) => e.bookId),

View File

@@ -1,5 +1,7 @@
import { env } from "$env/dynamic/private";
import { dbApiUser } from "$lib/server/db/apiuser.db";
import { getSessionToken } from "$lib/server/external/api.scraping.helpers";
import { logger } from "$lib/server/logger";
import {
isSessionValidInStore,
removeSessionFromStore,
@@ -9,37 +11,38 @@ import { getUUID } from "$lib/utils";
import { constants } from "$lib/utils/constants";
import type { ServerError } from "$lib/utils/data.types";
import { TRPCError } from "@trpc/server";
import fetch from "node-fetch";
import { z } from "zod";
import { createTRPCRouter, protectedProcedure } from "../t";
import { env } from "$env/dynamic/private";
import { logger } from "$lib/server/logger";
import fetch from "node-fetch";
export const apiAuthRouter = createTRPCRouter({
getCaptcha: protectedProcedure.mutation(async () => {
const scrapingbeeApiKey = env.SCRAPINGBEE_API_KEY;
const scrapeDoApiKey = env.SCRAPEDO_API_KEY ?? "";
if (!scrapeDoApiKey) {
logger.error("[getCaptcha] Scrape.do API key not configured");
throw new TRPCError({
code: "INTERNAL_SERVER_ERROR",
message: "Scrape.do API key not configured",
});
}
try {
const uuid = getUUID();
const targetUrl = `${constants.SCRAP_API_URL}/verify/image?uuid=${uuid}`;
logger.info(`[getCaptcha] Fetching captcha image for uuid: ${uuid}`);
// Build ScrapingBee API URL with params
const scrapingbeeUrl = new URL("https://app.scrapingbee.com/api/v1");
scrapingbeeUrl.searchParams.set("api_key", scrapingbeeApiKey);
scrapingbeeUrl.searchParams.set("url", targetUrl);
scrapingbeeUrl.searchParams.set("render_js", "false");
scrapingbeeUrl.searchParams.set("block_resources", "false");
const res = await fetch(scrapingbeeUrl.toString());
logger.info(`[getCaptcha] Fetching captcha image for uuid: ${uuid}`);
const finalUrl = new URL("http://api.scrape.do/");
finalUrl.searchParams.append("url", targetUrl);
finalUrl.searchParams.append("token", scrapeDoApiKey);
const res = await fetch(finalUrl.toString());
if (!res.ok || res.status !== 200) {
// Clone response before reading to avoid consuming body
const clonedRes = res.clone();
const errorText = await clonedRes.text().catch(() => "Unknown error");
logger.error(`[getCaptcha] ScrapingBee error ${res.status}: ${errorText.substring(0, 200)}`);
logger.error(`[getCaptcha] Scrape.do error ${res.status}: ${errorText.substring(0, 200)}`);
throw new TRPCError({
code: "INTERNAL_SERVER_ERROR",
message: `Failed to fetch captcha image: ${res.status}`,
@@ -50,8 +53,10 @@ export const apiAuthRouter = createTRPCRouter({
const arrayBuffer = await res.arrayBuffer();
const imageBuffer = Buffer.from(arrayBuffer);
const base64String = imageBuffer.toString("base64");
logger.info(`[getCaptcha] Successfully fetched captcha image for uuid: ${uuid}, size: ${imageBuffer.length} bytes`);
logger.info(
`[getCaptcha] Successfully fetched captcha image for uuid: ${uuid}, size: ${imageBuffer.length} bytes`,
);
return { id: uuid, image: base64String };
} catch (err) {
logger.error("[getCaptcha] Error getting captcha image", err);
@@ -90,7 +95,7 @@ export const apiAuthRouter = createTRPCRouter({
password = _user.password;
logger.info(`[getNewSession] Using specific user: ${userId}`);
}
logger.info(`[getNewSession] Getting session token for user ${userId}`);
const token = await getSessionToken({
code: captchaAnswer,
@@ -99,7 +104,7 @@ export const apiAuthRouter = createTRPCRouter({
userType: userType,
password: password,
});
if (!token.ok) {
logger.warn(`[getNewSession] Failed to get session token: ${token.message}`);
return {
@@ -107,7 +112,7 @@ export const apiAuthRouter = createTRPCRouter({
errors: [{ message: token.message }],
};
}
await setSessionToRedis(token.message, input.userId ?? "");
logger.info(`[getNewSession] Successfully created session for user ${userId}`);
return { success: true, errors: [] as ServerError };

View File

@@ -1,8 +1,8 @@
import { dbPresetData } from "$lib/server/db/presetdata.db";
import { logger } from "$lib/server/logger";
import { zDDFilters, zPresetDataEntry } from "$lib/utils/data.types";
import { z } from "zod";
import { createTRPCRouter, protectedProcedure } from "../t";
import { logger } from "$lib/server/logger";
export const presetDataRouter = createTRPCRouter({
getAll: protectedProcedure.input(zDDFilters).mutation(async ({ input }) => {
@@ -21,23 +21,23 @@ export const presetDataRouter = createTRPCRouter({
};
}),
insert: protectedProcedure
.input(z.array(zPresetDataEntry))
.mutation(async ({ input }) => {
logger.info(`[presetData.insert] Inserting ${input.length} preset data entries`);
const data = await dbPresetData.insertData(input);
logger.info(`[presetData.insert] Successfully inserted ${data.length} entries`);
return {
ok: true,
detail: "Data inserted",
data,
};
}),
insert: protectedProcedure.input(z.array(zPresetDataEntry)).mutation(async ({ input }) => {
logger.info(`[presetData.insert] Inserting ${input.length} preset data entries`);
const data = await dbPresetData.insertData(input);
logger.info(`[presetData.insert] Successfully inserted ${data?.length} entries`);
return {
ok: true,
detail: "Data inserted",
data,
};
}),
delete: protectedProcedure
.input(z.object({ date: z.string(), ids: z.array(z.string()) }))
.mutation(async ({ input }) => {
logger.info(`[presetData.delete] Deleting ${input.ids.length} preset data entries for date ${input.date}`);
logger.info(
`[presetData.delete] Deleting ${input.ids.length} preset data entries for date ${input.date}`,
);
await dbPresetData.deleteDataByIds(input.date, input.ids);
logger.info("[presetData.delete] Successfully deleted preset data entries");
return { ok: true, detail: "Data deleted" };