diff --git a/assets/js/ui/actions.js b/assets/js/ui/actions.js index 7c427c66..47f41797 100644 --- a/assets/js/ui/actions.js +++ b/assets/js/ui/actions.js @@ -41,7 +41,6 @@ function initUiActions(_store, _router) { slug date url - type isNew entity { id diff --git a/public/img/tags/dap/9.jpeg b/public/img/tags/dap/9.jpeg new file mode 100644 index 00000000..b2b5ff64 Binary files /dev/null and b/public/img/tags/dap/9.jpeg differ diff --git a/public/img/tags/dap/lazy/9.jpeg b/public/img/tags/dap/lazy/9.jpeg new file mode 100644 index 00000000..ba012d76 Binary files /dev/null and b/public/img/tags/dap/lazy/9.jpeg differ diff --git a/public/img/tags/dap/thumbs/9.jpeg b/public/img/tags/dap/thumbs/9.jpeg new file mode 100644 index 00000000..e8e3058b Binary files /dev/null and b/public/img/tags/dap/thumbs/9.jpeg differ diff --git a/public/img/tags/dp/4.jpeg b/public/img/tags/dp/4.jpeg new file mode 100644 index 00000000..67dc1bf1 Binary files /dev/null and b/public/img/tags/dp/4.jpeg differ diff --git a/public/img/tags/dp/lazy/4.jpeg b/public/img/tags/dp/lazy/4.jpeg new file mode 100644 index 00000000..dbdceef5 Binary files /dev/null and b/public/img/tags/dp/lazy/4.jpeg differ diff --git a/public/img/tags/dp/thumbs/4.jpeg b/public/img/tags/dp/thumbs/4.jpeg new file mode 100644 index 00000000..1957d300 Binary files /dev/null and b/public/img/tags/dp/thumbs/4.jpeg differ diff --git a/src/actors.js b/src/actors.js index 9d8ddcd1..1f825675 100644 --- a/src/actors.js +++ b/src/actors.js @@ -16,6 +16,7 @@ const scrapers = require('./scrapers/scrapers').actors; const argv = require('./argv'); const include = require('./utils/argv-include')(argv); +const bulkInsert = require('./utils/bulk-insert'); const logger = require('./logger')(__filename); const { toBaseReleases } = require('./deep'); @@ -485,7 +486,7 @@ async function upsertProfiles(profiles) { const updatingProfileEntries = profiles.filter(profile => profile.update).map(profile => curateProfileEntry(profile)); if (newProfileEntries.length > 0) { - await knex.batchInsert('actors_profiles', newProfileEntries); + await bulkInsert('actors_profiles', newProfileEntries); logger.info(`Saved ${newProfileEntries.length} actor profiles`); } @@ -639,9 +640,7 @@ async function scrapeActors(argNames) { // TODO: associate entity when entry ID is provided - const newActorEntries = batchId && await knex('actors') - .insert(curatedActorEntries) - .returning(['id', 'name', 'slug', 'entry_id']); + const newActorEntries = batchId && await bulkInsert('actors', curatedActorEntries); const actors = existingActorEntries.concat(Array.isArray(newActorEntries) ? newActorEntries : []); @@ -697,7 +696,7 @@ async function getOrCreateActors(baseActors, batchId) { const uniqueBaseActors = baseActors.filter(baseActor => !existingActorSlugs[baseActor.entity.id]?.[baseActor.slug] && !existingActorSlugs.null?.[baseActor.slug]); const curatedActorEntries = curateActorEntries(uniqueBaseActors, batchId); - const newActors = await knex('actors').insert(curatedActorEntries, ['id', 'alias_for', 'name', 'slug', 'entity_id']); + const newActors = await bulkInsert('actors', curatedActorEntries); if (Array.isArray(newActors)) { return newActors.concat(existingActors); @@ -743,7 +742,7 @@ async function associateActors(releases, batchId) { }))) .flat(); - await knex.raw(`${knex('releases_actors').insert(releaseActorAssociations).toString()} ON CONFLICT DO NOTHING;`); + await bulkInsert('releases_actors', releaseActorAssociations, false); return actors; } diff --git a/src/media.js b/src/media.js index 7d80c352..5cd19f2f 100644 --- a/src/media.js +++ b/src/media.js @@ -18,6 +18,7 @@ const logger = require('./logger')(__filename); const argv = require('./argv'); const knex = require('./knex'); const http = require('./utils/http'); +const bulkInsert = require('./utils/bulk-insert'); const { get } = require('./utils/qu'); const pipeline = util.promisify(stream.pipeline); @@ -607,7 +608,7 @@ async function storeMedias(baseMedias) { const newMediaWithEntries = savedMedias.map((media, index) => curateMediaEntry(media, index)); const newMediaEntries = newMediaWithEntries.filter(media => media.newEntry).map(media => media.entry); - await knex('media').insert(newMediaEntries); + await bulkInsert('media', newMediaEntries); return [...newMediaWithEntries, ...existingHashMedias]; } @@ -670,7 +671,7 @@ async function associateReleaseMedia(releases, type = 'releases') { .filter(Boolean); if (associations.length > 0) { - await knex.raw(`${knex(`${type}_${role}`).insert(associations)} ON CONFLICT DO NOTHING`); + await bulkInsert(`${type}_${role}`, associations, false); } }, Promise.resolve()); } diff --git a/src/store-releases.js b/src/store-releases.js index 18cbd712..40386844 100644 --- a/src/store-releases.js +++ b/src/store-releases.js @@ -224,8 +224,7 @@ async function updateReleasesSearch(releaseIds) { `, releaseIds && [releaseIds]); if (documents.rows?.length > 0) { - const query = knex('releases_search').insert(documents.rows).toString(); - await knex.raw(`${query} ON CONFLICT (release_id) DO UPDATE SET document = EXCLUDED.document`); + await bulkInsert('releases_search', documents.rows, ['release_id']); } } @@ -244,7 +243,7 @@ async function storeScenes(releases) { const curatedNewReleaseEntries = uniqueReleases.map(release => curateReleaseEntry(release, batchId)); - const storedReleases = await knex.batchInsert('releases', curatedNewReleaseEntries).returning('*'); + const storedReleases = await bulkInsert('releases', curatedNewReleaseEntries); // TODO: update duplicate releases const storedReleaseEntries = Array.isArray(storedReleases) ? storedReleases : []; @@ -297,7 +296,7 @@ async function associateMovieScenes(movies, movieScenes) { }); }).flat().filter(Boolean); - await bulkInsert('movies_scenes', associations, ['movie_id', 'scene_id']); + await bulkInsert('movies_scenes', associations, false); } async function storeMovies(movies, movieScenes) { diff --git a/src/utils/bulk-insert.js b/src/utils/bulk-insert.js new file mode 100644 index 00000000..7ef5fffc --- /dev/null +++ b/src/utils/bulk-insert.js @@ -0,0 +1,29 @@ +'use strict'; + +const knex = require('../knex'); +const chunk = require('./chunk'); + +async function bulkUpsert(table, items, conflict, update = true, chunkSize) { + const updated = (conflict === false && ':query ON CONFLICT DO NOTHING RETURNING *;') + || (conflict && update && ` + :query ON CONFLICT (${conflict}) + DO UPDATE SET ${(update === true + ? Object.keys(items[0]) // derive updating props + : update) + .reduce((acc, prop, index) => `${acc}${index > 0 ? ',' : ''}\n${prop} = ${conflict.includes(prop) ? table : 'EXCLUDED'}.${prop}`, '')} + RETURNING *; + `); + + return knex.transaction(async (transaction) => { + const queries = chunk(items, chunkSize) + .map(chunkItems => knex.raw(updated || ':query RETURNING *;', { + query: knex(table).insert(chunkItems).transacting(transaction), + })); + + const responses = await Promise.all(queries); + + return responses.flat().map(response => response.rows).flat(); + }); +} + +module.exports = bulkUpsert;