Chunking scene updates to prevent database errors.

This commit is contained in:
DebaucheryLibrarian 2026-02-03 05:46:50 +01:00
parent 48af5939db
commit 39cdea17f3
2 changed files with 7 additions and 5 deletions

View File

@ -430,7 +430,7 @@ async function storeScenes(releases, useBatchId) {
const curatedDuplicateReleases = await Promise.all(duplicateReleasesWithId.map((release) => curateReleaseEntry(release, batchId)));
const releasesWithId = uniqueReleasesWithId.concat(duplicateReleasesWithId);
const updated = await knex.raw(`
const updatedChunks = await Promise.all(chunk(curatedDuplicateReleases, 500).map(async (chunkedReleases) => knex.raw(`
UPDATE releases
SET url = COALESCE(new.url, releases.url),
date = COALESCE(new.date, releases.date),
@ -447,8 +447,8 @@ async function storeScenes(releases, useBatchId) {
AS new(id int, url text, date timestamptz, entity json, title text, description text, shoot_id text, duration integer, comment text, attributes json, deep boolean)
WHERE releases.id = new.id
`, {
scenes: JSON.stringify(curatedDuplicateReleases),
});
scenes: JSON.stringify(chunkedReleases),
})));
const [actors, storedSeries] = await Promise.all([
argv.associateActors && associateActors(releasesWithId, batchId),
@ -469,7 +469,9 @@ async function storeScenes(releases, useBatchId) {
await scrapeActors(actors.map((actor) => actor.name));
}
logger.info(`Stored ${storedReleaseEntries.length}, updated ${updated.rowCount} releases`);
const updated = updatedChunks.reduce((acc, updatedChunk) => acc + updatedChunk.rowCount, 0);
logger.info(`Stored ${storedReleaseEntries.length}, updated ${updated} releases`);
await notify(releasesWithId);