Improved upcoming update query.

This commit is contained in:
DebaucheryLibrarian 2021-06-06 01:09:32 +02:00
parent 3f473589ad
commit e9a0700742
12 changed files with 21 additions and 23 deletions

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 31 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 487 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 36 KiB

View File

@ -598,6 +598,7 @@ const tagMedia = [
['airtight', 7, 'Lana Rhoades in "Gangbang Me 3"', 'hardx'], ['airtight', 7, 'Lana Rhoades in "Gangbang Me 3"', 'hardx'],
['airtight', 6, 'Remy Lacroix in "Ass Worship 14"', 'julesjordan'], ['airtight', 6, 'Remy Lacroix in "Ass Worship 14"', 'julesjordan'],
['airtight', 11, 'Malena Nazionale in "Rocco\'s Perverted Secretaries 2: Italian Edition"', 'roccosiffredi'], ['airtight', 11, 'Malena Nazionale in "Rocco\'s Perverted Secretaries 2: Italian Edition"', 'roccosiffredi'],
['airtight', 'venera_maxima_legalporno', 'Venera Maxima in LegalPorno SZ2645', 'legalporno'],
['airtight', 1, 'Jynx Maze in "Pump My Ass Full of Cum 3"', 'julesjordan'], ['airtight', 1, 'Jynx Maze in "Pump My Ass Full of Cum 3"', 'julesjordan'],
['airtight', 10, 'Asa Akira in "Asa Akira To The Limit"', 'julesjordan'], ['airtight', 10, 'Asa Akira in "Asa Akira To The Limit"', 'julesjordan'],
['airtight', 8, 'Veronica Leal in SZ2520'], ['airtight', 8, 'Veronica Leal in SZ2520'],
@ -605,6 +606,7 @@ const tagMedia = [
['airtight', 5, 'Chloe Amour in "DP Masters 4"', 'julesjordan'], ['airtight', 5, 'Chloe Amour in "DP Masters 4"', 'julesjordan'],
['airtight', 9, 'Cindy Shine in GP1658'], ['airtight', 9, 'Cindy Shine in GP1658'],
['anal', 5, 'Abella Danger', 'hardx'], ['anal', 5, 'Abella Danger', 'hardx'],
['anal', 'kira_noir_julesjordan', 'Kira Noir in "Kira Noir Opens Her Ass For Manuel"', 'julesjordan'],
['anal', 7, 'Anastasia Brokelyn', 'bangbros'], ['anal', 7, 'Anastasia Brokelyn', 'bangbros'],
['anal', 'jane_wilde_evilangel_2', 'Jane Wilde and Brock Cooper in "The Cock Hungry Chronicles"', 'evilangel'], ['anal', 'jane_wilde_evilangel_2', 'Jane Wilde and Brock Cooper in "The Cock Hungry Chronicles"', 'evilangel'],
['anal', 0, 'Adriana Chechik in "Manuel Creampies Their Asses 3"', 'julesjordan'], ['anal', 0, 'Adriana Chechik in "Manuel Creampies Their Asses 3"', 'julesjordan'],
@ -858,6 +860,7 @@ const tagMedia = [
['mfm', 0, 'Vina Sky in "Jules Jordan\'s Three Ways"', 'julesjordan'], ['mfm', 0, 'Vina Sky in "Jules Jordan\'s Three Ways"', 'julesjordan'],
['mfm', 8, 'Ariana Marie in "DP Masters 7"', 'julesjordan'], ['mfm', 8, 'Ariana Marie in "DP Masters 7"', 'julesjordan'],
['mfm', 1, 'Lana Rhoades in "Gangbang Me 3"', 'hardx'], ['mfm', 1, 'Lana Rhoades in "Gangbang Me 3"', 'hardx'],
['mfm', 'hazel_moore_legalporno', 'Hazel Moore', 'legalporno'],
['mfm', 7, 'Rose Valerie', 'eurosexparties'], ['mfm', 7, 'Rose Valerie', 'eurosexparties'],
['mfm', 6, 'Honey Gold in "Slut Puppies 12"', 'julesjordan'], ['mfm', 6, 'Honey Gold in "Slut Puppies 12"', 'julesjordan'],
['natural-boobs', 1, 'Nia Nacci', 'firstclasspov'], ['natural-boobs', 1, 'Nia Nacci', 'firstclasspov'],

View File

@ -323,8 +323,7 @@ async function storeScenes(releases) {
const duplicateReleasesWithId = attachReleaseIds(duplicateReleases, duplicateReleaseEntries); const duplicateReleasesWithId = attachReleaseIds(duplicateReleases, duplicateReleaseEntries);
const releasesWithId = uniqueReleasesWithId.concat(duplicateReleasesWithId); const releasesWithId = uniqueReleasesWithId.concat(duplicateReleasesWithId);
try { const updated = await knex.raw(`
await knex.raw(`
UPDATE releases UPDATE releases
SET url = COALESCE(new.url, releases.url), SET url = COALESCE(new.url, releases.url),
date = COALESCE(new.date, releases.date), date = COALESCE(new.date, releases.date),
@ -339,9 +338,6 @@ async function storeScenes(releases) {
`, { `, {
scenes: JSON.stringify(duplicateReleasesWithId), scenes: JSON.stringify(duplicateReleasesWithId),
}); });
} catch (error) {
console.log(error);
}
const [actors] = await Promise.all([ const [actors] = await Promise.all([
associateActors(releasesWithId, batchId), associateActors(releasesWithId, batchId),
@ -359,7 +355,7 @@ async function storeScenes(releases) {
await scrapeActors(actors.map(actor => actor.name)); await scrapeActors(actors.map(actor => actor.name));
} }
logger.info(`Stored ${storedReleaseEntries.length} releases`); logger.info(`Stored ${storedReleaseEntries.length}, updated ${updated.rowCount} releases`);
await notify(releasesWithId); await notify(releasesWithId);

View File

@ -51,11 +51,10 @@ async function filterUniqueReleases(releases) {
.where('deep', true) // scene is already deep scraped .where('deep', true) // scene is already deep scraped
.orWhereNull('date') .orWhereNull('date')
.orWhereNotIn('date_precision', ['day', 'minute']) // don't worry about scenes without (accurate) dates for now .orWhereNotIn('date_precision', ['day', 'minute']) // don't worry about scenes without (accurate) dates for now
.orWhere(knex.raw('NOW() - date > INTERVAL \'12 hours\'')) // scene is still upcoming, with a rough offset to wait for the end of the day west of UTC .orWhere(knex.raw('date > NOW() - INTERVAL \'12 hours\'')) // scene is still upcoming, with a rough offset to wait for the end of the day west of UTC
.orWhere(knex.raw('updated_at - date > INTERVAL \'1 day\'')); // scene was updated after the release date, no updated expected .orWhere(knex.raw('updated_at - date > INTERVAL \'1 day\'')); // scene was updated after the release date, no updates expected
}); });
const duplicateReleases = duplicateReleaseEntries.map(release => curateRelease(release)); const duplicateReleases = duplicateReleaseEntries.map(release => curateRelease(release));
const duplicateReleasesByEntityIdAndEntryId = duplicateReleases.reduce(mapReleasesToEntityIdAndEntryId, {}); const duplicateReleasesByEntityIdAndEntryId = duplicateReleases.reduce(mapReleasesToEntityIdAndEntryId, {});