uhhh so yeah, friking parallel uploads r broken with surrdb v2.1

This commit is contained in:
bootunloader
2025-01-25 23:54:24 +02:00
parent aa73254a87
commit 2d1c9f3fc1
2 changed files with 11 additions and 10 deletions

View File

@@ -1,4 +1,4 @@
import Surreal, { RecordId, StringRecordId } from "surrealdb";
import Surreal, { StringRecordId } from "surrealdb";
export type { QueryResult } from "surrealdb";
try {
@@ -19,8 +19,6 @@ const db = new Surreal();
export function parseToRID(idStr: string) {
return new StringRecordId(idStr);
// const [a, b] = idStr.split(":");
// return new RecordId(a, b);
}
if (CONFIG.url.length > 0) {

View File

@@ -48,8 +48,8 @@ const upsertData = async (
Math.random() * (data.length * 0.2 - data.length * 0.05) +
data.length * 0.05,
);
if (chunkSize > 1_000) {
chunkSize = 1_000;
if (chunkSize > 4_000) {
chunkSize = 4_000;
}
console.log(`Chunk Size: ${chunkSize}`);
@@ -59,12 +59,15 @@ const upsertData = async (
// Insert new entries in chunks
console.time("insertion time");
const chunks = chunkArray(entries, chunkSize).map(async (chunk) => {
const chunks = chunkArray(entries, chunkSize);
// .map(async (chunk) => {
// await surreal.insert<BookingEntry>(tableName, chunk);
// });
// for (let i = 0; i < chunks.length; i += 2) {
// await Promise.all(chunks.slice(i, i + 2));
// }
for (const chunk of chunks) {
await surreal.insert<BookingEntry>(tableName, chunk);
});
for (let i = 0; i < chunks.length; i += 2) {
await Promise.all(chunks.slice(i, i + 2));
}
console.timeEnd("insertion time");