From 5103a07e5f8897f0e99412665ad210950845094f Mon Sep 17 00:00:00 2001 From: DebaucheryLibrarian Date: Sat, 3 Jun 2023 21:51:09 +0200 Subject: [PATCH] Accumulating boolean instead of full scene. --- config/default.js | 5 ++++- src/argv.js | 5 ----- src/tools/transfer.js | 28 +++++++++++++--------------- 3 files changed, 17 insertions(+), 21 deletions(-) diff --git a/config/default.js b/config/default.js index ad2adf6f9..844265679 100755 --- a/config/default.js +++ b/config/default.js @@ -337,7 +337,10 @@ module.exports = { }, media: { path: './media', - transferSource: 'http://localhost:5000/media', + transferSources: { + local: 'http://localhost:5000/media', + s3: 'https://cdn.traxxx.me', + }, maxSize: 1000, quality: 80, thumbnailSize: 320, // width for 16:9 will be exactly 576px diff --git a/src/argv.js b/src/argv.js index 92bfd5153..4eea79708 100755 --- a/src/argv.js +++ b/src/argv.js @@ -345,11 +345,6 @@ const { argv } = yargs alias: ['timeout'], default: 60000, }) - .option('media-source', { - describe: 'Traxxx host to use for media transfers.', - type: 'string', - default: config.media.transferSource, - }) .coerce('after', interpretAfter) .coerce('actors-update', (after) => interpretAfter(after, true)); diff --git a/src/tools/transfer.js b/src/tools/transfer.js index 81f496fb8..51a5607b4 100644 --- a/src/tools/transfer.js +++ b/src/tools/transfer.js @@ -396,7 +396,7 @@ async function transferMedia(media, target) { const filename = `${media.hash}${path.extname(media[type])}`; const filepath = path.join(target, dirs[type], filename); const temp = path.join('media/temp', filepath); - const url = new URL(media[type], `${args.mediaSource}/`).href; + const url = new URL(media[type], `${media.s3 ? config.media.transferSources.s3 : config.media.transferSources.local}/`).href; console.log('Transferring media', url); @@ -420,16 +420,14 @@ async function transferMedia(media, target) { fileStream.on('error', () => { reject(); }); }); - if (args.s3) { - await s3.upload({ - Bucket: config.s3.bucket, - Body: fs.createReadStream(temp), - Key: filepath, - ContentType: media.mime, - }).promise(); + await s3.upload({ + Bucket: config.s3.bucket, + Body: fs.createReadStream(temp), + Key: filepath, + ContentType: media.mime, + }).promise(); - await fs.promises.unlink(temp); - } + await fs.promises.unlink(temp); }, Promise.resolve()); } @@ -459,9 +457,7 @@ async function addReleaseMedia(medias, release, target) { source_page: media.sourcePage, }); - if (args.mediaSource) { - await transferMedia(media, target); - } + await transferMedia(media, target); } await knex(`${release.type}s_${target}`).insert({ @@ -559,7 +555,9 @@ async function addRelease(release, context) { async function load() { const file = await fs.promises.readFile(args.file, 'utf8'); - const releases = JSON.parse(file).slice(0, args.limit || Infinity); + const releases = JSON.parse(file) + .filter((release) => (args.entity ? release.entity.slug === args.entity : true)) + .slice(0, args.limit || Infinity); const [batchId] = await knex('batches').insert({ comment: `import ${args.file}` }).returning('id'); @@ -589,7 +587,7 @@ async function load() { const acc = await chain; const scene = await addRelease(release, { batchId, movies: addedMovies, tagIdsBySlug, studioIdsBySlug }); - return acc.concat(scene); + return acc.concat(!!scene); }, Promise.resolve([])); console.log(`Loaded ${addedMovies.filter(Boolean).length}/${releases.filter((release) => release.type === 'movie').length} movies in batch ${batchId}`);