Chunking scene updates to prevent database errors.

This commit is contained in:
DebaucheryLibrarian 2026-02-03 05:46:50 +01:00
parent 48af5939db
commit 39cdea17f3
2 changed files with 7 additions and 5 deletions

View File

@ -52,7 +52,7 @@ unprint.on('requestInit', (event) => logger.debug(`Unprint ${event.method} (${ev
unprint.on('requestError', (event) => logger.error(`Unprint failed ${event.isProxied ? 'proxied ' : ''}${event.isBrowser ? 'browser ' : ''}${event.method} ${event.url} (${event.status}): ${event.statusText}`)); unprint.on('requestError', (event) => logger.error(`Unprint failed ${event.isProxied ? 'proxied ' : ''}${event.isBrowser ? 'browser ' : ''}${event.method} ${event.url} (${event.status}): ${event.statusText}`));
unprint.on('browserOpen', (event) => logger.debug(`Unprint opened browsers ${event.keys} (${event.active}/${config.bypass.browser.clientRetirement} active, ${event.clients} clients)`)); unprint.on('browserOpen', (event) => logger.debug(`Unprint opened browsers ${event.keys} (${event.active}/${config.bypass.browser.clientRetirement} active, ${event.clients} clients)`));
unprint.on('browserClose', (event) => logger.debug(`Unprint closed ${event.retired ? ' retired' : ''} browsers ${event.keys} (${event.active}/${config.bypass.browser.clientRetirement} active, ${event.clients} clients)`)); unprint.on('browserClose', (event) => logger.debug(`Unprint closed${event.retired ? ' retired' : ''} browsers ${event.keys} (${event.active}/${config.bypass.browser.clientRetirement} active, ${event.clients} clients)`));
function logActive() { function logActive() {
setTimeout(() => { setTimeout(() => {

View File

@ -430,7 +430,7 @@ async function storeScenes(releases, useBatchId) {
const curatedDuplicateReleases = await Promise.all(duplicateReleasesWithId.map((release) => curateReleaseEntry(release, batchId))); const curatedDuplicateReleases = await Promise.all(duplicateReleasesWithId.map((release) => curateReleaseEntry(release, batchId)));
const releasesWithId = uniqueReleasesWithId.concat(duplicateReleasesWithId); const releasesWithId = uniqueReleasesWithId.concat(duplicateReleasesWithId);
const updated = await knex.raw(` const updatedChunks = await Promise.all(chunk(curatedDuplicateReleases, 500).map(async (chunkedReleases) => knex.raw(`
UPDATE releases UPDATE releases
SET url = COALESCE(new.url, releases.url), SET url = COALESCE(new.url, releases.url),
date = COALESCE(new.date, releases.date), date = COALESCE(new.date, releases.date),
@ -447,8 +447,8 @@ async function storeScenes(releases, useBatchId) {
AS new(id int, url text, date timestamptz, entity json, title text, description text, shoot_id text, duration integer, comment text, attributes json, deep boolean) AS new(id int, url text, date timestamptz, entity json, title text, description text, shoot_id text, duration integer, comment text, attributes json, deep boolean)
WHERE releases.id = new.id WHERE releases.id = new.id
`, { `, {
scenes: JSON.stringify(curatedDuplicateReleases), scenes: JSON.stringify(chunkedReleases),
}); })));
const [actors, storedSeries] = await Promise.all([ const [actors, storedSeries] = await Promise.all([
argv.associateActors && associateActors(releasesWithId, batchId), argv.associateActors && associateActors(releasesWithId, batchId),
@ -469,7 +469,9 @@ async function storeScenes(releases, useBatchId) {
await scrapeActors(actors.map((actor) => actor.name)); await scrapeActors(actors.map((actor) => actor.name));
} }
logger.info(`Stored ${storedReleaseEntries.length}, updated ${updated.rowCount} releases`); const updated = updatedChunks.reduce((acc, updatedChunk) => acc + updatedChunk.rowCount, 0);
logger.info(`Stored ${storedReleaseEntries.length}, updated ${updated} releases`);
await notify(releasesWithId); await notify(releasesWithId);