Accumulating boolean instead of full scene.

This commit is contained in:
DebaucheryLibrarian 2023-06-03 21:51:09 +02:00
parent 3fc63b1934
commit 5103a07e5f
3 changed files with 17 additions and 21 deletions

View File

@ -337,7 +337,10 @@ module.exports = {
}, },
media: { media: {
path: './media', path: './media',
transferSource: 'http://localhost:5000/media', transferSources: {
local: 'http://localhost:5000/media',
s3: 'https://cdn.traxxx.me',
},
maxSize: 1000, maxSize: 1000,
quality: 80, quality: 80,
thumbnailSize: 320, // width for 16:9 will be exactly 576px thumbnailSize: 320, // width for 16:9 will be exactly 576px

View File

@ -345,11 +345,6 @@ const { argv } = yargs
alias: ['timeout'], alias: ['timeout'],
default: 60000, default: 60000,
}) })
.option('media-source', {
describe: 'Traxxx host to use for media transfers.',
type: 'string',
default: config.media.transferSource,
})
.coerce('after', interpretAfter) .coerce('after', interpretAfter)
.coerce('actors-update', (after) => interpretAfter(after, true)); .coerce('actors-update', (after) => interpretAfter(after, true));

View File

@ -396,7 +396,7 @@ async function transferMedia(media, target) {
const filename = `${media.hash}${path.extname(media[type])}`; const filename = `${media.hash}${path.extname(media[type])}`;
const filepath = path.join(target, dirs[type], filename); const filepath = path.join(target, dirs[type], filename);
const temp = path.join('media/temp', filepath); const temp = path.join('media/temp', filepath);
const url = new URL(media[type], `${args.mediaSource}/`).href; const url = new URL(media[type], `${media.s3 ? config.media.transferSources.s3 : config.media.transferSources.local}/`).href;
console.log('Transferring media', url); console.log('Transferring media', url);
@ -420,16 +420,14 @@ async function transferMedia(media, target) {
fileStream.on('error', () => { reject(); }); fileStream.on('error', () => { reject(); });
}); });
if (args.s3) { await s3.upload({
await s3.upload({ Bucket: config.s3.bucket,
Bucket: config.s3.bucket, Body: fs.createReadStream(temp),
Body: fs.createReadStream(temp), Key: filepath,
Key: filepath, ContentType: media.mime,
ContentType: media.mime, }).promise();
}).promise();
await fs.promises.unlink(temp); await fs.promises.unlink(temp);
}
}, Promise.resolve()); }, Promise.resolve());
} }
@ -459,9 +457,7 @@ async function addReleaseMedia(medias, release, target) {
source_page: media.sourcePage, source_page: media.sourcePage,
}); });
if (args.mediaSource) { await transferMedia(media, target);
await transferMedia(media, target);
}
} }
await knex(`${release.type}s_${target}`).insert({ await knex(`${release.type}s_${target}`).insert({
@ -559,7 +555,9 @@ async function addRelease(release, context) {
async function load() { async function load() {
const file = await fs.promises.readFile(args.file, 'utf8'); const file = await fs.promises.readFile(args.file, 'utf8');
const releases = JSON.parse(file).slice(0, args.limit || Infinity); const releases = JSON.parse(file)
.filter((release) => (args.entity ? release.entity.slug === args.entity : true))
.slice(0, args.limit || Infinity);
const [batchId] = await knex('batches').insert({ comment: `import ${args.file}` }).returning('id'); const [batchId] = await knex('batches').insert({ comment: `import ${args.file}` }).returning('id');
@ -589,7 +587,7 @@ async function load() {
const acc = await chain; const acc = await chain;
const scene = await addRelease(release, { batchId, movies: addedMovies, tagIdsBySlug, studioIdsBySlug }); const scene = await addRelease(release, { batchId, movies: addedMovies, tagIdsBySlug, studioIdsBySlug });
return acc.concat(scene); return acc.concat(!!scene);
}, Promise.resolve([])); }, Promise.resolve([]));
console.log(`Loaded ${addedMovies.filter(Boolean).length}/${releases.filter((release) => release.type === 'movie').length} movies in batch ${batchId}`); console.log(`Loaded ${addedMovies.filter(Boolean).length}/${releases.filter((release) => release.type === 'movie').length} movies in batch ${batchId}`);