Using batch insert for profiles to prevent errors on large inserts.

This commit is contained in:
2020-05-21 03:44:44 +02:00
parent 703b77897d
commit 532a4b679b
1005 changed files with 13340 additions and 323 deletions

View File

@@ -39,6 +39,10 @@ const hairColors = {
raven: 'black',
red: 'red',
redhead: 'red',
blue: 'blue',
green: 'green',
purple: 'purple',
pink: 'pink',
};
const eyeColors = {
@@ -474,9 +478,9 @@ async function upsertProfiles(profiles) {
const updatingProfileEntries = profiles.filter(profile => profile.update).map(profile => curateProfileEntry(profile));
if (newProfileEntries.length > 0) {
await knex('actors_profiles').insert(newProfileEntries);
await knex.batchInsert('actors_profiles', newProfileEntries);
logger.info(`Saved ${newProfileEntries.length} new actor profiles`);
logger.info(`Saved ${newProfileEntries.length} actor profiles`);
}
if (argv.force && updatingProfileEntries.length > 0) {
@@ -583,8 +587,12 @@ async function scrapeActors(actorNames) {
.leftJoin('networks', 'sites.network_id', 'networks.id'),
knex('actors')
.select(['id', 'name', 'slug'])
.whereIn('slug', baseActors.map(baseActor => baseActor.slug))
.whereNull('network_id'),
.modify((queryBuilder) => {
if (actorNames.length > 0) {
queryBuilder.whereIn('slug', baseActors.map(baseActor => baseActor.slug));
}
})
.whereNull('alias_for'),
]);
const networksBySlug = networks.reduce((acc, network) => ({ ...acc, [network.slug]: network }), {});