Added MindGeek profile scraper for all MG sites.

This commit is contained in:
2020-01-27 22:54:14 +01:00
parent 24fe61e064
commit 6d4fd5fd77
11 changed files with 124 additions and 23 deletions

View File

@@ -7,6 +7,8 @@ const bhttp = require('bhttp');
const { CookieJar } = Promise.promisifyAll(require('tough-cookie'));
const moment = require('moment');
const { ex } = require('../utils/q');
const { inchesToCm, lbsToKg } = require('../utils/convert');
const { cookieToData } = require('../utils/cookies');
function getThumbs(scene) {
@@ -105,11 +107,7 @@ function getUrl(site) {
throw new Error(`Mind Geek site '${site.name}' (${site.url}) not supported`);
}
async function fetchLatest(site, page = 1) {
const url = getUrl(site);
const { search } = new URL(url);
const siteId = new URLSearchParams(search).get('site');
async function getSession(url) {
const cookieJar = new CookieJar();
const session = bhttp.session({ cookieJar });
@@ -118,6 +116,47 @@ async function fetchLatest(site, page = 1) {
const cookieString = await cookieJar.getCookieStringAsync(url);
const { instance_token: instanceToken } = cookieToData(cookieString);
return { session, instanceToken };
}
function scrapeProfile(data, html) {
const { qa, qd } = ex(html);
const profile = {
gender: data.gender,
description: data.bio,
aliases: data.aliases,
};
const [bust, waist, hip] = data.measurements.split('-');
if (bust) profile.bust = bust.toUpperCase();
if (waist) profile.waist = waist;
if (hip) profile.hip = hip;
if (data.birthPlace) profile.birthPlace = data.birthPlace;
if (data.height) profile.height = inchesToCm(data.height);
if (data.weight) profile.weight = lbsToKg(data.weight);
profile.avatar = data.images.card_main_rect[0].xl?.url
|| data.images.card_main_rect[0].lg?.url
|| data.images.card_main_rect[0].md?.url
|| data.images.card_main_rect[0].sm?.url
|| data.images.card_main_rect[0].xs?.url;
const birthdate = qa('li').find(el => /Date of Birth/.test(el.textContent));
if (birthdate) profile.birthdate = qd(birthdate, 'span', 'MMMM Do, YYYY');
return profile;
}
async function fetchLatest(site, page = 1) {
const url = getUrl(site);
const { search } = new URL(url);
const siteId = new URLSearchParams(search).get('site');
const { session, instanceToken } = await getSession(url);
const beforeDate = moment().add('1', 'day').format('YYYY-MM-DD');
const limit = 10;
const apiUrl = `https://site-api.project1service.com/v2/releases?collectionId=${siteId}&dateReleased=<${beforeDate}&limit=${limit}&offset=${limit * (page - 1)}&orderBy=-dateReleased&type=scene`;
@@ -132,14 +171,7 @@ async function fetchLatest(site, page = 1) {
async function fetchScene(url, site) {
const entryId = url.match(/\d+/)[0];
const cookieJar = new CookieJar();
const session = bhttp.session({ cookieJar });
await session.get(url);
const cookieString = await cookieJar.getCookieStringAsync(url);
const { instance_token: instanceToken } = cookieToData(cookieString);
const { session, instanceToken } = await getSession(url);
const res = await session.get(`https://site-api.project1service.com/v2/releases/${entryId}`, {
headers: {
@@ -150,8 +182,31 @@ async function fetchScene(url, site) {
return scrapeScene(res.body.result, url, site);
}
async function fetchProfile(actorName, networkName, actorPath = 'model') {
const url = `https://www.${networkName}.com`;
const { session, instanceToken } = await getSession(url);
const res = await session.get(`https://site-api.project1service.com/v1/actors/?search=${encodeURI(actorName)}`, {
headers: {
Instance: instanceToken,
},
});
if (res.statusCode === 200) {
const actorData = res.body.result.find(actor => actor.name.toLowerCase() === actorName.toLowerCase());
const actorRes = await bhttp.get(`https://www.${networkName}.com/${actorPath}/${actorData.id}/`);
if (actorData && actorRes.statusCode === 200) {
return scrapeProfile(actorData, actorRes.body.toString());
}
}
return null;
}
module.exports = {
scrapeLatestX,
fetchLatest,
fetchScene,
fetchProfile,
};