forked from DebaucheryLibrarian/traxxx
Fixed Nubiles base poster query, handling trailing commas in qu source set. Added profile scene scraper to Dogfart. Added tag photo.
This commit is contained in:
@@ -6,6 +6,8 @@ const { JSDOM } = require('jsdom');
|
||||
const moment = require('moment');
|
||||
|
||||
const http = require('../utils/http');
|
||||
const slugify = require('../utils/slugify');
|
||||
const qu = require('../utils/qu');
|
||||
|
||||
async function getPhotos(albumUrl) {
|
||||
const res = await http.get(albumUrl);
|
||||
@@ -20,51 +22,51 @@ async function getPhotos(albumUrl) {
|
||||
|
||||
return {
|
||||
url: pageUrl,
|
||||
extract: ({ qu }) => qu.q('.scenes-module img', 'src'),
|
||||
extract: ({ query }) => query.img('.scenes-module img'),
|
||||
};
|
||||
});
|
||||
|
||||
return photoUrls;
|
||||
}
|
||||
|
||||
function scrapeLatest(html, site) {
|
||||
function scrapeLatest(html, site, filter = true) {
|
||||
const { document } = new JSDOM(html).window;
|
||||
const sceneElements = Array.from(document.querySelectorAll('.recent-updates'));
|
||||
|
||||
return sceneElements.reduce((acc, element) => {
|
||||
const siteUrl = element.querySelector('.help-block').textContent;
|
||||
return sceneElements.map((element) => {
|
||||
const siteUrl = element.querySelector('.recent-details-title .help-block, .model-details-title .site-name').textContent;
|
||||
|
||||
if (`www.${siteUrl.toLowerCase()}` !== new URL(site.url).host) {
|
||||
if (filter && `www.${siteUrl.toLowerCase()}` !== new URL(site.url).host) {
|
||||
// different dogfart site
|
||||
return acc;
|
||||
return null;
|
||||
}
|
||||
|
||||
const sceneLinkElement = element.querySelector('.thumbnail');
|
||||
const url = `https://dogfartnetwork.com${sceneLinkElement.href}`;
|
||||
const url = qu.prefixUrl(sceneLinkElement.href, 'https://dogfartnetwork.com');
|
||||
const { pathname } = new URL(url);
|
||||
const entryId = `${site.slug}_${pathname.split('/')[4]}`;
|
||||
|
||||
const title = element.querySelector('.scene-title').textContent;
|
||||
const actors = title.split(/[,&]|\band\b/).map(actor => actor.trim());
|
||||
const actors = title.split(/[,&]|\band\b/).map(actor => actor.replace(/BTS/i, '').trim());
|
||||
|
||||
const poster = `https:${element.querySelector('img').src}`;
|
||||
const teaser = sceneLinkElement.dataset.preview_clip_url;
|
||||
|
||||
return [
|
||||
...acc,
|
||||
{
|
||||
url,
|
||||
entryId,
|
||||
title,
|
||||
actors,
|
||||
poster,
|
||||
teaser: {
|
||||
src: teaser,
|
||||
},
|
||||
site,
|
||||
const channel = siteUrl?.match(/(.*).com/)?.[1].toLowerCase();
|
||||
|
||||
return {
|
||||
url,
|
||||
entryId,
|
||||
title,
|
||||
actors,
|
||||
poster,
|
||||
teaser: {
|
||||
src: teaser,
|
||||
},
|
||||
];
|
||||
}, []);
|
||||
site,
|
||||
channel,
|
||||
};
|
||||
}).filter(Boolean);
|
||||
}
|
||||
|
||||
async function scrapeScene(html, url, site) {
|
||||
@@ -97,8 +99,8 @@ async function scrapeScene(html, url, site) {
|
||||
const poster = `https:${trailerElement.dataset.poster}`;
|
||||
const { trailer } = trailerElement.dataset;
|
||||
|
||||
const lastPhotosUrl = Array.from(document.querySelectorAll('.pagination a')).slice(-1)[0].href;
|
||||
const photos = await getPhotos(`${origin}${pathname}${lastPhotosUrl}`, site, url);
|
||||
const lastPhotosUrl = Array.from(document.querySelectorAll('.pagination a')).slice(-1)[0]?.href;
|
||||
const photos = lastPhotosUrl ? await getPhotos(`${origin}${pathname}${lastPhotosUrl}`, site, url) : [];
|
||||
|
||||
const stars = Math.floor(Number(document.querySelector('span[itemprop="average"]')?.textContent || document.querySelector('span[itemprop="ratingValue"]')?.textContent) / 2);
|
||||
const tags = Array.from(document.querySelectorAll('.scene-details .categories a')).map(({ textContent }) => textContent);
|
||||
@@ -137,7 +139,23 @@ async function fetchScene(url, site) {
|
||||
return scrapeScene(res.body.toString(), url, site);
|
||||
}
|
||||
|
||||
async function fetchProfile(baseActor, entity) {
|
||||
const slug = slugify(baseActor.name, '+');
|
||||
const url = `https://www.dogfartnetwork.com/tour/girls/${slug}/`;
|
||||
|
||||
const res = await http.get(url);
|
||||
|
||||
if (res.ok) {
|
||||
const scenes = scrapeLatest(res.body, entity, false);
|
||||
|
||||
return { scenes };
|
||||
}
|
||||
|
||||
return res.status;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
fetchLatest,
|
||||
fetchScene,
|
||||
fetchProfile,
|
||||
};
|
||||
|
||||
@@ -45,8 +45,9 @@ function scrapeAll(scenes, site, origin) {
|
||||
release.date = qu.date('.date', 'MMM D, YYYY');
|
||||
release.actors = qu.all('.models a.model', true);
|
||||
|
||||
const poster = qu.q('img').dataset.original;
|
||||
release.poster = [
|
||||
const poster = qu.sourceSet('img', 'data-srcset')?.[0];
|
||||
|
||||
release.poster = poster && [
|
||||
poster.replace('_640', '_1280'),
|
||||
poster,
|
||||
];
|
||||
|
||||
@@ -197,6 +197,7 @@ const scrapers = {
|
||||
devilsfilm: famedigital,
|
||||
digitalplayground,
|
||||
dtfsluts: fullpornnetwork,
|
||||
dogfartnetwork: dogfart,
|
||||
dorcelclub: dorcel,
|
||||
doubleviewcasting: firstanalquest,
|
||||
elegantangel,
|
||||
|
||||
@@ -253,7 +253,7 @@ function urls(context, selector = 'a', attr = 'href', { origin, protocol = 'http
|
||||
return attr ? urlEls.map(urlEl => prefixUrl(urlEl, origin, protocol)) : urlEls;
|
||||
}
|
||||
|
||||
function sourceSet(context, selector, attr, options = {}) {
|
||||
function sourceSet(context, selector, attr = 'srcset', options = {}) {
|
||||
const srcset = q(context, selector, attr);
|
||||
|
||||
if (!srcset) {
|
||||
@@ -265,11 +265,16 @@ function sourceSet(context, selector, attr, options = {}) {
|
||||
.map((source) => {
|
||||
const [link, descriptor] = source.split(' ');
|
||||
|
||||
return {
|
||||
descriptor: descriptor || 'fallback',
|
||||
url: prefixUrl(link, options.origin, options.protocol),
|
||||
};
|
||||
if (link) {
|
||||
return {
|
||||
descriptor: descriptor || 'fallback',
|
||||
url: prefixUrl(link, options.origin, options.protocol),
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
})
|
||||
.filter(Boolean)
|
||||
.sort((sourceA, sourceB) => {
|
||||
if (sourceB.descriptor === 'fallback' || parseInt(sourceA.descriptor, 10) > parseInt(sourceB.descriptor, 10)) {
|
||||
return -1;
|
||||
|
||||
Reference in New Issue
Block a user