Using batch insert module for media, calculating chunk size based on item size.

This commit is contained in:
DebaucheryLibrarian
2026-03-15 21:33:14 +01:00
parent 31aa1118e7
commit 1b6aaafe10
2 changed files with 24 additions and 12 deletions

View File

@@ -23,7 +23,7 @@ const logger = require('./logger')(__filename);
const argv = require('./argv');
const knex = require('./knex');
const http = require('./utils/http');
const bulkInsert = require('./utils/bulk-insert');
const batchInsert = require('./utils/batch-insert');
const chunk = require('./utils/chunk');
const { get } = require('./utils/qu');
const { fetchEntityReleaseIds } = require('./entity-releases');
@@ -924,7 +924,8 @@ async function storeMedias(baseMedias, options) {
const newMediaEntries = newMediaWithEntries.filter((media) => media.newEntry).map((media) => media.entry);
try {
await bulkInsert('media', newMediaEntries, false);
console.log('NEW MEDIA ENTRIES', newMediaEntries);
await batchInsert('media', newMediaEntries, { confict: false });
return [...newMediaWithEntries, ...existingHashMedias];
} catch (error) {
@@ -993,11 +994,11 @@ async function associateReleaseMedia(releases, type = 'release') {
.filter(Boolean);
if (associations.length > 0) {
await bulkInsert(`${type}s_${role}`, associations, false);
await batchInsert(`${type}s_${role}`, associations, { conflict: false });
}
} catch (error) {
if (error.entries) {
logger.error(util.inspect(error.entries, null, null, { color: true }));
logger.error(util.inspect(error.entries.slice(0, 2), null, null, { color: true }), `${Math.min(error.entries.length, 2)} of ${error.length}`);
}
logger.error(`Failed to store ${type} ${role}: ${error.message} (${error.detail || 'no detail'})`);