Adding scraper config by scraper slug to current 'includes' parameter.

This commit is contained in:
DebaucheryLibrarian
2020-10-28 01:36:13 +01:00
parent 1869877178
commit bf9b334b73
4 changed files with 37 additions and 13 deletions

View File

@@ -97,7 +97,7 @@ function maleNoun() {
return random([
'guy',
'stud',
'man',
'lad',
'boyfriend',
'stranger',
'stepbrother',
@@ -227,22 +227,29 @@ function actors(release) {
}));
}
async function fetchLatest(entity) {
async function fetchLatest(entity, page, options) {
return Promise.all(Array.from({ length: 100 }, async (value, index) => {
const release = {};
release.entryId = nanoid();
release.date = moment().subtract(Math.floor(Math.random() * index), 'days').toDate();
const [poster, ...photos] = await knex('media')
.select('path')
.where('is_sfw', true)
.pluck('path')
.orderByRaw('random()')
.limit(Math.floor(Math.random() * 10) + 1);
if (options.source) {
// select from configured random image source
release.poster = `${options.source}?id=${nanoid()}`; // ensure source is unique
release.photos = Array.from({ length: Math.floor(Math.random() * 10) + 1 }, () => `${options.source}?id=${nanoid()}`); // ensure source is unique
} else {
// select from local SFW database
const [poster, ...photos] = await knex('media')
.select('path')
.where('is_sfw', true)
.pluck('path')
.orderByRaw('random()')
.limit(Math.floor(Math.random() * 10) + 1);
release.poster = `http://${config.web.host}:${config.web.port}/img/${poster}?id=${nanoid()}`; // ensure source is unique
release.photos = photos.map(photo => `http://${config.web.host}:${config.web.port}/img/${photo}?id=${nanoid()}`);
release.poster = `http://${config.web.host}:${config.web.port}/img/${poster}?id=${nanoid()}`; // ensure source is unique
release.photos = photos.map(photo => `http://${config.web.host}:${config.web.port}/img/${photo}?id=${nanoid()}`);
}
release.tags = await knex('tags')
.select('name')