traxxx/src/scrapers/fullpornnetwork.js

113 lines
2.7 KiB
JavaScript

'use strict';
const { get, geta, ctxa } = require('../utils/q');
const slugify = require('../utils/slugify');
function scrapeAll(scenes, site) {
return scenes.map(({ _el, qu }) => {
const release = {};
// release.entryId = el.dataset.setid || qu.q('.update_thumb', 'id').match(/\w+-\w+-(\d+)-\d+/)[1];
release.url = `${site.url}${qu.url('.scene-title a')}`;
release.entryId = new URL(release.url).pathname
.toLowerCase()
.replace(/\/$/, '')
.split('/')
.slice(-1)[0];
release.title = qu.q('.scene-title', true);
// release.description = qu.q('.title', 'title');
// release.date = qu.date('.video-data > span:last-child', 'YYYY-MM-DD');
const minutes = qu.q('.scene-details', true).match(/(\d+) minutes/)[1];
release.duration = Number(minutes) * 60;
release.actors = qu.text('.update-models').trim().split(/\s*,\s*/g);
const poster = qu.img('.scene-thumb img');
if (poster) {
release.poster = [
poster.replace('-1x', '-2x'),
poster,
];
}
return release;
});
}
function scrapeScene({ qu }, url, site) {
const release = { url };
release.entryId = new URL(url).pathname
.toLowerCase()
.replace(/\/$/, '')
.split('/')
.slice(-1)[0];
release.title = qu.q('h4.text-center', true);
release.description = qu.q('p.hide-for-small-only', true);
release.actors = qu.all('a[href*="/model"]', true);
release.tags = qu.all('a[href*="/category"]', true);
const trailer = qu.video('source');
if (trailer) release.trailer = { src: `${site.url}${trailer}` };
return release;
}
function scrapeProfile({ el, qu }, actorName) {
if (slugify(qu.q('h1', true)) !== slugify(actorName)) {
// no 404 when actor is not found
return null;
}
const profile = {};
const description = qu.q('h4 + p', true);
if (description) profile.description = description;
const avatar = qu.img('main img');
if (avatar) {
profile.avatar = [
avatar.replace('set-1x', 'set-2x'),
avatar,
];
}
profile.releases = scrapeAll(ctxa(el, '.update, .scene-update'));
return profile;
}
async function fetchLatest(site, page = 1) {
const url = `${site.url}/1/scenes/recent/${page}/`;
const res = await geta(url, '.latest-updates .update, .scene-update');
return res.ok ? scrapeAll(res.items, site) : res.status;
}
async function fetchScene(url, site) {
const res = await get(url, 'main');
return res.ok && res.item ? scrapeScene(res.item, url, site) : res.status;
}
async function fetchProfile({ name: actorName }, { site }) {
const actorSlug = slugify(actorName, '');
const url = `${site.url}/1/model/${actorSlug}`;
const res = await get(url);
return res.ok ? scrapeProfile(res.item, actorName) : res.status;
}
module.exports = {
fetchLatest,
fetchScene,
fetchProfile,
};