Chunking scene updates to prevent database errors.
This commit is contained in:
parent
48af5939db
commit
39cdea17f3
|
|
@ -52,7 +52,7 @@ unprint.on('requestInit', (event) => logger.debug(`Unprint ${event.method} (${ev
|
|||
unprint.on('requestError', (event) => logger.error(`Unprint failed ${event.isProxied ? 'proxied ' : ''}${event.isBrowser ? 'browser ' : ''}${event.method} ${event.url} (${event.status}): ${event.statusText}`));
|
||||
|
||||
unprint.on('browserOpen', (event) => logger.debug(`Unprint opened browsers ${event.keys} (${event.active}/${config.bypass.browser.clientRetirement} active, ${event.clients} clients)`));
|
||||
unprint.on('browserClose', (event) => logger.debug(`Unprint closed ${event.retired ? ' retired' : ''} browsers ${event.keys} (${event.active}/${config.bypass.browser.clientRetirement} active, ${event.clients} clients)`));
|
||||
unprint.on('browserClose', (event) => logger.debug(`Unprint closed${event.retired ? ' retired' : ''} browsers ${event.keys} (${event.active}/${config.bypass.browser.clientRetirement} active, ${event.clients} clients)`));
|
||||
|
||||
function logActive() {
|
||||
setTimeout(() => {
|
||||
|
|
|
|||
|
|
@ -430,7 +430,7 @@ async function storeScenes(releases, useBatchId) {
|
|||
const curatedDuplicateReleases = await Promise.all(duplicateReleasesWithId.map((release) => curateReleaseEntry(release, batchId)));
|
||||
const releasesWithId = uniqueReleasesWithId.concat(duplicateReleasesWithId);
|
||||
|
||||
const updated = await knex.raw(`
|
||||
const updatedChunks = await Promise.all(chunk(curatedDuplicateReleases, 500).map(async (chunkedReleases) => knex.raw(`
|
||||
UPDATE releases
|
||||
SET url = COALESCE(new.url, releases.url),
|
||||
date = COALESCE(new.date, releases.date),
|
||||
|
|
@ -447,8 +447,8 @@ async function storeScenes(releases, useBatchId) {
|
|||
AS new(id int, url text, date timestamptz, entity json, title text, description text, shoot_id text, duration integer, comment text, attributes json, deep boolean)
|
||||
WHERE releases.id = new.id
|
||||
`, {
|
||||
scenes: JSON.stringify(curatedDuplicateReleases),
|
||||
});
|
||||
scenes: JSON.stringify(chunkedReleases),
|
||||
})));
|
||||
|
||||
const [actors, storedSeries] = await Promise.all([
|
||||
argv.associateActors && associateActors(releasesWithId, batchId),
|
||||
|
|
@ -469,7 +469,9 @@ async function storeScenes(releases, useBatchId) {
|
|||
await scrapeActors(actors.map((actor) => actor.name));
|
||||
}
|
||||
|
||||
logger.info(`Stored ${storedReleaseEntries.length}, updated ${updated.rowCount} releases`);
|
||||
const updated = updatedChunks.reduce((acc, updatedChunk) => acc + updatedChunk.rowCount, 0);
|
||||
|
||||
logger.info(`Stored ${storedReleaseEntries.length}, updated ${updated} releases`);
|
||||
|
||||
await notify(releasesWithId);
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue