Redundant actor sources can now be bundled in configuration. Fixed Men network actor path.

This commit is contained in:
ThePendulum 2020-02-01 04:14:08 +01:00
parent 5ff916475a
commit cde9aba0cb
4 changed files with 37 additions and 20 deletions

View File

@ -12,27 +12,28 @@ module.exports = {
// include: [],
// exclude: [],
profiles: [
'evilangel',
[
'evilangel',
// Gamma; Evil angel and Wicked have their own assets
'xempire',
'blowpass',
'wicked',
],
[
// MindGeek; Brazzers and Mile High Media have their own assets
'realitykings',
'mofos',
'babes',
'digitalplayground',
'babes',
'fakehub',
'milehighmedia',
'wicked',
'metrohd',
'iconmale',
'men',
'metrohd',
'transangels',
],
'21sextury',
'brazzers',
'milehighmedia',
'wicked',
'21sextury',
'julesjordan',
'naughtyamerica',
'legalporno',

View File

@ -1,6 +1,6 @@
'use strict';
// const config = require('config');
const config = require('config');
const Promise = require('bluebird');
const UrlPattern = require('url-pattern');
const moment = require('moment');
@ -339,21 +339,36 @@ async function scrapeActors(actorNames) {
try {
const actorSlug = slugify(actorName);
const actorEntry = await knex('actors').where({ slug: actorSlug }).first();
const sources = argv.sources ? argv.sources.map(source => [source, scrapers.actors[source]]) : Object.entries(scrapers.actors);
const sources = argv.sources || config.profiles || Object.keys(scrapers.actors);
const profiles = await Promise.map(sources, async (source) => {
// const [scraperSlug, scraper] = source;
const profileScrapers = [].concat(source).map(slug => ({ scraperSlug: slug, scraper: scrapers.actors[slug] }));
const profiles = await Promise.map(sources, async ([scraperSlug, scraper]) => {
try {
const profile = await scraper.fetchProfile(actorEntry ? actorEntry.name : actorName, scraperSlug);
return profileScrapers.reduce(async (outcome, { scraper, scraperSlug }) => outcome.catch(async () => {
logger.verbose(`Searching '${actorName}' on ${scraperSlug}`);
return {
...profile,
name: actorName,
scraper: scraperSlug,
};
const profile = await scraper.fetchProfile(actorEntry ? actorEntry.name : actorName, scraperSlug);
if (profile) {
logger.verbose(`Found profile for '${actorName}' on ${scraperSlug}`);
return {
...profile,
name: actorName,
scraper: scraperSlug,
};
}
logger.verbose(`No profile for '${actorName}' available on ${scraperSlug}`);
throw new Error(`Profile for ${actorName} not available on ${scraperSlug}`);
}), Promise.reject(new Error()));
} catch (error) {
console.log(scraperSlug, error);
return null;
logger.error(error);
}
return null;
});
const profile = await mergeProfiles(profiles, actorEntry);

View File

@ -3,7 +3,7 @@
const { fetchScene, fetchLatest, fetchProfile } = require('./mindgeek');
async function networkFetchProfile(actorName) {
return fetchProfile(actorName, 'men');
return fetchProfile(actorName, 'men', 'modelprofile');
}
module.exports = {

View File

@ -217,7 +217,8 @@ async function fetchProfile(actorName, networkName, actorPath = 'model') {
const actorData = res.body.result.find(actor => actor.name.toLowerCase() === actorName.toLowerCase());
if (actorData) {
const actorRes = await bhttp.get(`https://www.${networkName}.com/${actorPath}/${actorData.id}/`);
const actorUrl = `https://www.${networkName}.com/${actorPath}/${actorData.id}/`;
const actorRes = await bhttp.get(actorUrl);
if (actorRes.statusCode === 200) {
return scrapeProfile(actorData, actorRes.body.toString());