Fixed actor unique index preventing multiple actors with the same slug on a different network. Changed scene delete query to be able to handle more scene IDs.

This commit is contained in:
DebaucheryLibrarian 2021-02-18 04:44:04 +01:00
parent 58c01bdfcf
commit 5e2909c531
2 changed files with 4 additions and 3 deletions

View File

@ -999,7 +999,7 @@ exports.up = knex => Promise.resolve()
// allow vim fold
return knex.raw(`
CREATE UNIQUE INDEX unique_actor_slugs_network ON actors (slug, entity_id, entry_id);
CREATE UNIQUE INDEX unique_actor_slugs ON actors (slug, (entity_id IS NULL));
CREATE UNIQUE INDEX unique_actor_slugs ON actors (slug) WHERE entity_id IS NULL;
CREATE UNIQUE INDEX releases_search_unique ON releases_search (release_id);
CREATE INDEX releases_search_index ON releases_search USING GIN (document);

View File

@ -146,12 +146,13 @@ async function deleteScenes(sceneIds) {
return 0;
}
// there can be too many scene IDs for where in, causing a stack depth error
await knex('movies_scenes')
.whereIn('scene_id', sceneIds)
.whereRaw('scene_id = ANY(:sceneIds)', { sceneIds })
.delete();
const deleteCount = await knex('releases')
.whereIn('id', sceneIds)
.whereRaw('id = ANY(:sceneIds)', { sceneIds })
.delete();
logger.info(`Removed ${deleteCount}/${sceneIds.length} scenes`);