Accumulating boolean instead of full scene.
This commit is contained in:
parent
3fc63b1934
commit
5103a07e5f
|
@ -337,7 +337,10 @@ module.exports = {
|
||||||
},
|
},
|
||||||
media: {
|
media: {
|
||||||
path: './media',
|
path: './media',
|
||||||
transferSource: 'http://localhost:5000/media',
|
transferSources: {
|
||||||
|
local: 'http://localhost:5000/media',
|
||||||
|
s3: 'https://cdn.traxxx.me',
|
||||||
|
},
|
||||||
maxSize: 1000,
|
maxSize: 1000,
|
||||||
quality: 80,
|
quality: 80,
|
||||||
thumbnailSize: 320, // width for 16:9 will be exactly 576px
|
thumbnailSize: 320, // width for 16:9 will be exactly 576px
|
||||||
|
|
|
@ -345,11 +345,6 @@ const { argv } = yargs
|
||||||
alias: ['timeout'],
|
alias: ['timeout'],
|
||||||
default: 60000,
|
default: 60000,
|
||||||
})
|
})
|
||||||
.option('media-source', {
|
|
||||||
describe: 'Traxxx host to use for media transfers.',
|
|
||||||
type: 'string',
|
|
||||||
default: config.media.transferSource,
|
|
||||||
})
|
|
||||||
.coerce('after', interpretAfter)
|
.coerce('after', interpretAfter)
|
||||||
.coerce('actors-update', (after) => interpretAfter(after, true));
|
.coerce('actors-update', (after) => interpretAfter(after, true));
|
||||||
|
|
||||||
|
|
|
@ -396,7 +396,7 @@ async function transferMedia(media, target) {
|
||||||
const filename = `${media.hash}${path.extname(media[type])}`;
|
const filename = `${media.hash}${path.extname(media[type])}`;
|
||||||
const filepath = path.join(target, dirs[type], filename);
|
const filepath = path.join(target, dirs[type], filename);
|
||||||
const temp = path.join('media/temp', filepath);
|
const temp = path.join('media/temp', filepath);
|
||||||
const url = new URL(media[type], `${args.mediaSource}/`).href;
|
const url = new URL(media[type], `${media.s3 ? config.media.transferSources.s3 : config.media.transferSources.local}/`).href;
|
||||||
|
|
||||||
console.log('Transferring media', url);
|
console.log('Transferring media', url);
|
||||||
|
|
||||||
|
@ -420,7 +420,6 @@ async function transferMedia(media, target) {
|
||||||
fileStream.on('error', () => { reject(); });
|
fileStream.on('error', () => { reject(); });
|
||||||
});
|
});
|
||||||
|
|
||||||
if (args.s3) {
|
|
||||||
await s3.upload({
|
await s3.upload({
|
||||||
Bucket: config.s3.bucket,
|
Bucket: config.s3.bucket,
|
||||||
Body: fs.createReadStream(temp),
|
Body: fs.createReadStream(temp),
|
||||||
|
@ -429,7 +428,6 @@ async function transferMedia(media, target) {
|
||||||
}).promise();
|
}).promise();
|
||||||
|
|
||||||
await fs.promises.unlink(temp);
|
await fs.promises.unlink(temp);
|
||||||
}
|
|
||||||
}, Promise.resolve());
|
}, Promise.resolve());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -459,10 +457,8 @@ async function addReleaseMedia(medias, release, target) {
|
||||||
source_page: media.sourcePage,
|
source_page: media.sourcePage,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (args.mediaSource) {
|
|
||||||
await transferMedia(media, target);
|
await transferMedia(media, target);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
await knex(`${release.type}s_${target}`).insert({
|
await knex(`${release.type}s_${target}`).insert({
|
||||||
[`${release.type}_id`]: release.id,
|
[`${release.type}_id`]: release.id,
|
||||||
|
@ -559,7 +555,9 @@ async function addRelease(release, context) {
|
||||||
|
|
||||||
async function load() {
|
async function load() {
|
||||||
const file = await fs.promises.readFile(args.file, 'utf8');
|
const file = await fs.promises.readFile(args.file, 'utf8');
|
||||||
const releases = JSON.parse(file).slice(0, args.limit || Infinity);
|
const releases = JSON.parse(file)
|
||||||
|
.filter((release) => (args.entity ? release.entity.slug === args.entity : true))
|
||||||
|
.slice(0, args.limit || Infinity);
|
||||||
|
|
||||||
const [batchId] = await knex('batches').insert({ comment: `import ${args.file}` }).returning('id');
|
const [batchId] = await knex('batches').insert({ comment: `import ${args.file}` }).returning('id');
|
||||||
|
|
||||||
|
@ -589,7 +587,7 @@ async function load() {
|
||||||
const acc = await chain;
|
const acc = await chain;
|
||||||
const scene = await addRelease(release, { batchId, movies: addedMovies, tagIdsBySlug, studioIdsBySlug });
|
const scene = await addRelease(release, { batchId, movies: addedMovies, tagIdsBySlug, studioIdsBySlug });
|
||||||
|
|
||||||
return acc.concat(scene);
|
return acc.concat(!!scene);
|
||||||
}, Promise.resolve([]));
|
}, Promise.resolve([]));
|
||||||
|
|
||||||
console.log(`Loaded ${addedMovies.filter(Boolean).length}/${releases.filter((release) => release.type === 'movie').length} movies in batch ${batchId}`);
|
console.log(`Loaded ${addedMovies.filter(Boolean).length}/${releases.filter((release) => release.type === 'movie').length} movies in batch ${batchId}`);
|
||||||
|
|
Loading…
Reference in New Issue