Improved duplicate media handling in transfer tool.
This commit is contained in:
parent
5ad5708e15
commit
54798f87da
|
@ -222,7 +222,7 @@ const movieFields = `
|
|||
|
||||
async function save() {
|
||||
const limit = args.limit || 1000;
|
||||
const offset = args.offset || 0;
|
||||
const offset = args.start || 0;
|
||||
|
||||
const { releases } = await graphql(`
|
||||
query SearchScenes(
|
||||
|
@ -271,6 +271,7 @@ async function save() {
|
|||
const entry = JSON.stringify({
|
||||
...release,
|
||||
type: 'release',
|
||||
description: release.description.replace(/\n/g, ' '),
|
||||
actors: release.actors.filter(Boolean).map(({ actor }) => actor),
|
||||
directors: release.directors.filter(Boolean).map(({ director }) => director),
|
||||
studio: release.studio?.slug,
|
||||
|
@ -448,7 +449,9 @@ async function transferMedia(media, target) {
|
|||
}
|
||||
|
||||
async function addReleaseMedia(medias, release, target) {
|
||||
return Promise.all(medias.filter(Boolean).map(async (media) => {
|
||||
await medias.filter(Boolean).reduce(async (chain, media) => {
|
||||
await chain;
|
||||
|
||||
const existingMedia = await knex('media')
|
||||
.where('hash', media.hash)
|
||||
.first();
|
||||
|
@ -480,7 +483,7 @@ async function addReleaseMedia(medias, release, target) {
|
|||
[`${release.type}_id`]: release.id,
|
||||
media_id: id,
|
||||
});
|
||||
}));
|
||||
}, Promise.resolve());
|
||||
}
|
||||
|
||||
async function linkMovieScenes(release, context) {
|
||||
|
@ -517,7 +520,7 @@ async function addRelease(release, context) {
|
|||
}
|
||||
|
||||
const [entity] = await Promise.all([
|
||||
knex('entities').select('id').where(release.entity).first(),
|
||||
knex('entities').select(['id', 'name']).where(release.entity).first(),
|
||||
]);
|
||||
|
||||
if (!entity) {
|
||||
|
@ -549,6 +552,7 @@ async function addRelease(release, context) {
|
|||
...release,
|
||||
id: releaseEntry.id,
|
||||
entityId: entity.id,
|
||||
entityName: entity.name,
|
||||
};
|
||||
|
||||
await Promise.all([
|
||||
|
@ -571,11 +575,19 @@ async function addRelease(release, context) {
|
|||
|
||||
async function load() {
|
||||
const file = await fs.promises.readFile(args.file, 'utf8');
|
||||
const start = args.start || 0;
|
||||
const end = args.limit ? start + args.limit : Infinity;
|
||||
|
||||
const releases = file.split('\n')
|
||||
.filter(Boolean)
|
||||
.map((data) => JSON.parse(data))
|
||||
.filter((release) => (args.entity ? release.entity.slug === args.entity : true))
|
||||
.slice(0, args.limit || Infinity);
|
||||
.slice(start, end);
|
||||
|
||||
if (releases.length === 0) {
|
||||
console.log('Nothing to load');
|
||||
return;
|
||||
}
|
||||
|
||||
const [batchId] = await knex('batches').insert({ comment: `import ${args.file}` }).returning('id');
|
||||
|
||||
|
@ -598,6 +610,8 @@ async function load() {
|
|||
const acc = await chain;
|
||||
const movie = await addRelease(release, { batchId, tagIdsBySlug, studioIdsBySlug });
|
||||
|
||||
console.log(`Loaded '${movie.entityName}' movie "${movie.title}"`);
|
||||
|
||||
return acc.concat(movie);
|
||||
}, Promise.resolve([]));
|
||||
|
||||
|
@ -605,6 +619,8 @@ async function load() {
|
|||
const acc = await chain;
|
||||
const scene = await addRelease(release, { batchId, movies: addedMovies, tagIdsBySlug, studioIdsBySlug });
|
||||
|
||||
console.log(`Loaded '${scene.entityName}' scene "${scene.title}"`);
|
||||
|
||||
return acc.concat(!!scene);
|
||||
}, Promise.resolve([]));
|
||||
|
||||
|
|
Loading…
Reference in New Issue