Compare commits

...

4 Commits

6 changed files with 79 additions and 47 deletions

View File

@ -12,27 +12,28 @@ module.exports = {
// include: [],
// exclude: [],
profiles: [
'evilangel',
[
'evilangel',
// Gamma; Evil angel and Wicked have their own assets
'xempire',
'blowpass',
'wicked',
],
[
// MindGeek; Brazzers and Mile High Media have their own assets
'realitykings',
'mofos',
'babes',
'digitalplayground',
'babes',
'fakehub',
'milehighmedia',
'wicked',
'metrohd',
'iconmale',
'men',
'metrohd',
'transangels',
],
'21sextury',
'brazzers',
'milehighmedia',
'wicked',
'21sextury',
'julesjordan',
'naughtyamerica',
'legalporno',

2
package-lock.json generated
View File

@ -1,6 +1,6 @@
{
"name": "traxxx",
"version": "1.74.3",
"version": "1.76.0",
"lockfileVersion": 1,
"requires": true,
"dependencies": {

View File

@ -1,6 +1,6 @@
{
"name": "traxxx",
"version": "1.74.3",
"version": "1.76.0",
"description": "All the latest porn releases in one place",
"main": "src/app.js",
"scripts": {

View File

@ -1,6 +1,6 @@
'use strict';
// const config = require('config');
const config = require('config');
const Promise = require('bluebird');
const UrlPattern = require('url-pattern');
const moment = require('moment');
@ -339,21 +339,36 @@ async function scrapeActors(actorNames) {
try {
const actorSlug = slugify(actorName);
const actorEntry = await knex('actors').where({ slug: actorSlug }).first();
const sources = argv.sources ? argv.sources.map(source => [source, scrapers.actors[source]]) : Object.entries(scrapers.actors);
const sources = argv.sources || config.profiles || Object.keys(scrapers.actors);
const profiles = await Promise.map(sources, async (source) => {
// const [scraperSlug, scraper] = source;
const profileScrapers = [].concat(source).map(slug => ({ scraperSlug: slug, scraper: scrapers.actors[slug] }));
const profiles = await Promise.map(sources, async ([scraperSlug, scraper]) => {
try {
const profile = await scraper.fetchProfile(actorEntry ? actorEntry.name : actorName, scraperSlug);
return profileScrapers.reduce(async (outcome, { scraper, scraperSlug }) => outcome.catch(async () => {
logger.verbose(`Searching '${actorName}' on ${scraperSlug}`);
return {
...profile,
name: actorName,
scraper: scraperSlug,
};
const profile = await scraper.fetchProfile(actorEntry ? actorEntry.name : actorName, scraperSlug);
if (profile) {
logger.verbose(`Found profile for '${actorName}' on ${scraperSlug}`);
return {
...profile,
name: actorName,
scraper: scraperSlug,
};
}
logger.verbose(`No profile for '${actorName}' available on ${scraperSlug}`);
throw new Error(`Profile for ${actorName} not available on ${scraperSlug}`);
}), Promise.reject(new Error()));
} catch (error) {
console.log(scraperSlug, error);
return null;
logger.error(error);
}
return null;
});
const profile = await mergeProfiles(profiles, actorEntry);

View File

@ -3,7 +3,7 @@
const { fetchScene, fetchLatest, fetchProfile } = require('./mindgeek');
async function networkFetchProfile(actorName) {
return fetchProfile(actorName, 'men');
return fetchProfile(actorName, 'men', 'modelprofile');
}
module.exports = {

View File

@ -61,36 +61,36 @@ async function scrapeLatest(items, site) {
return Promise.all(items.map(async data => scrapeLatestX(data, site)));
}
async function scrapeScene(data, url, site) {
function scrapeScene(data, url, _site) {
const release = {};
const { id: entryId, title, description } = data;
const date = new Date(data.dateReleased);
const actors = data.actors.map(actor => actor.name);
const tags = data.tags.map(tag => tag.name);
release.entryId = data.id;
release.title = title;
release.description = description;
release.date = new Date(data.dateReleased);
release.actors = data.actors.map(actor => actor.name);
release.tags = data.tags.map(tag => tag.name);
[release.poster, ...release.photos] = getThumbs(data);
const [poster, ...photos] = getThumbs(data);
const trailer = data.videos.mediabook && (data.videos.mediabook.files['720p'] || data.videos.mediabook.files['320p']);
const siteName = data.collections[0].name;
const channel = siteName.replace(/\s+/g, '').toLowerCase();
return {
url,
entryId,
title,
description,
actors,
tags,
poster,
photos,
trailer: trailer && {
if (trailer) {
release.trailer = {
src: trailer.urls.view,
quality: parseInt(trailer.format, 10),
},
date,
site,
channel,
};
};
}
const siteName = data.collections[0].name;
release.channel = siteName.replace(/\s+/g, '').toLowerCase();
release.url = url || `https://www.realitykings.com/scene/${entryId}/`;
return release;
}
function getUrl(site) {
@ -124,7 +124,7 @@ async function getSession(url) {
return { session, instanceToken };
}
function scrapeProfile(data, html) {
function scrapeProfile(data, html, releases = []) {
const { qa, qd } = ex(html);
const profile = {
@ -155,6 +155,8 @@ function scrapeProfile(data, html) {
const birthdate = qa('li').find(el => /Date of Birth/.test(el.textContent));
if (birthdate) profile.birthdate = qd(birthdate, 'span', 'MMMM Do, YYYY');
profile.releases = releases.map(release => scrapeScene(release));
return profile;
}
@ -217,7 +219,21 @@ async function fetchProfile(actorName, networkName, actorPath = 'model') {
const actorData = res.body.result.find(actor => actor.name.toLowerCase() === actorName.toLowerCase());
if (actorData) {
const actorRes = await bhttp.get(`https://www.${networkName}.com/${actorPath}/${actorData.id}/`);
const actorUrl = `https://www.${networkName}.com/${actorPath}/${actorData.id}/`;
const actorReleasesUrl = `https://site-api.project1service.com/v2/releases?actorId=${actorData.id}&limit=100&offset=0&orderBy=-dateReleased&type=scene`;
const [actorRes, actorReleasesRes] = await Promise.all([
bhttp.get(actorUrl),
session.get(actorReleasesUrl, {
headers: {
Instance: instanceToken,
},
}),
]);
if (actorRes.statusCode === 200 && actorReleasesRes.statusCode === 200 && actorReleasesRes.body.result) {
return scrapeProfile(actorData, actorRes.body.toString(), actorReleasesRes.body.result);
}
if (actorRes.statusCode === 200) {
return scrapeProfile(actorData, actorRes.body.toString());