forked from DebaucheryLibrarian/traxxx
Redundant actor sources can now be bundled in configuration. Fixed Men network actor path.
This commit is contained in:
parent
5ff916475a
commit
cde9aba0cb
|
@ -12,27 +12,28 @@ module.exports = {
|
|||
// include: [],
|
||||
// exclude: [],
|
||||
profiles: [
|
||||
[
|
||||
'evilangel',
|
||||
[
|
||||
// Gamma; Evil angel and Wicked have their own assets
|
||||
'xempire',
|
||||
'blowpass',
|
||||
'wicked',
|
||||
],
|
||||
[
|
||||
// MindGeek; Brazzers and Mile High Media have their own assets
|
||||
'realitykings',
|
||||
'mofos',
|
||||
'babes',
|
||||
'digitalplayground',
|
||||
'babes',
|
||||
'fakehub',
|
||||
'milehighmedia',
|
||||
'wicked',
|
||||
'metrohd',
|
||||
'iconmale',
|
||||
'men',
|
||||
'metrohd',
|
||||
'transangels',
|
||||
],
|
||||
'21sextury',
|
||||
'brazzers',
|
||||
'milehighmedia',
|
||||
'wicked',
|
||||
'21sextury',
|
||||
'julesjordan',
|
||||
'naughtyamerica',
|
||||
'legalporno',
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
'use strict';
|
||||
|
||||
// const config = require('config');
|
||||
const config = require('config');
|
||||
const Promise = require('bluebird');
|
||||
const UrlPattern = require('url-pattern');
|
||||
const moment = require('moment');
|
||||
|
@ -339,21 +339,36 @@ async function scrapeActors(actorNames) {
|
|||
try {
|
||||
const actorSlug = slugify(actorName);
|
||||
const actorEntry = await knex('actors').where({ slug: actorSlug }).first();
|
||||
const sources = argv.sources ? argv.sources.map(source => [source, scrapers.actors[source]]) : Object.entries(scrapers.actors);
|
||||
const sources = argv.sources || config.profiles || Object.keys(scrapers.actors);
|
||||
|
||||
const profiles = await Promise.map(sources, async (source) => {
|
||||
// const [scraperSlug, scraper] = source;
|
||||
const profileScrapers = [].concat(source).map(slug => ({ scraperSlug: slug, scraper: scrapers.actors[slug] }));
|
||||
|
||||
const profiles = await Promise.map(sources, async ([scraperSlug, scraper]) => {
|
||||
try {
|
||||
return profileScrapers.reduce(async (outcome, { scraper, scraperSlug }) => outcome.catch(async () => {
|
||||
logger.verbose(`Searching '${actorName}' on ${scraperSlug}`);
|
||||
|
||||
const profile = await scraper.fetchProfile(actorEntry ? actorEntry.name : actorName, scraperSlug);
|
||||
|
||||
if (profile) {
|
||||
logger.verbose(`Found profile for '${actorName}' on ${scraperSlug}`);
|
||||
|
||||
return {
|
||||
...profile,
|
||||
name: actorName,
|
||||
scraper: scraperSlug,
|
||||
};
|
||||
} catch (error) {
|
||||
console.log(scraperSlug, error);
|
||||
return null;
|
||||
}
|
||||
|
||||
logger.verbose(`No profile for '${actorName}' available on ${scraperSlug}`);
|
||||
throw new Error(`Profile for ${actorName} not available on ${scraperSlug}`);
|
||||
}), Promise.reject(new Error()));
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
}
|
||||
|
||||
return null;
|
||||
});
|
||||
|
||||
const profile = await mergeProfiles(profiles, actorEntry);
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
const { fetchScene, fetchLatest, fetchProfile } = require('./mindgeek');
|
||||
|
||||
async function networkFetchProfile(actorName) {
|
||||
return fetchProfile(actorName, 'men');
|
||||
return fetchProfile(actorName, 'men', 'modelprofile');
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
|
|
@ -217,7 +217,8 @@ async function fetchProfile(actorName, networkName, actorPath = 'model') {
|
|||
const actorData = res.body.result.find(actor => actor.name.toLowerCase() === actorName.toLowerCase());
|
||||
|
||||
if (actorData) {
|
||||
const actorRes = await bhttp.get(`https://www.${networkName}.com/${actorPath}/${actorData.id}/`);
|
||||
const actorUrl = `https://www.${networkName}.com/${actorPath}/${actorData.id}/`;
|
||||
const actorRes = await bhttp.get(actorUrl);
|
||||
|
||||
if (actorRes.statusCode === 200) {
|
||||
return scrapeProfile(actorData, actorRes.body.toString());
|
||||
|
|
Loading…
Reference in New Issue