forked from DebaucheryLibrarian/traxxx
Added Jesse Loads Monster Facials. Added various logos.
This commit is contained in:
85
src/scrapers/jesseloadsmonsterfacials.js
Normal file
85
src/scrapers/jesseloadsmonsterfacials.js
Normal file
@@ -0,0 +1,85 @@
|
||||
'use strict';
|
||||
|
||||
const { get, initAll } = require('../utils/qu');
|
||||
|
||||
function scrapeLatest(scenes, dates, site) {
|
||||
return scenes.map(({ qu }, index) => {
|
||||
const release = {};
|
||||
|
||||
const path = qu.url('a');
|
||||
release.url = `${site.url}/visitors/${path}`;
|
||||
release.entryId = path.match(/videos\/([a-zA-Z0-9]+)(?:_hd)?_trailer/)?.[1];
|
||||
|
||||
if (dates && dates[index]) {
|
||||
release.date = dates[index].qu.date(null, 'MM/DD/YYYY');
|
||||
}
|
||||
|
||||
release.description = qu.q('tbody tr:nth-child(3) font', true);
|
||||
|
||||
const infoLine = qu.q('font[color="#663366"]', true);
|
||||
if (infoLine) release.duration = Number(infoLine.match(/(\d+) min/)[1]) * 60;
|
||||
|
||||
const poster = qu.img('img[src*="photos/"][width="400"]');
|
||||
release.poster = `${site.url}/visitors/${poster}`;
|
||||
release.photos = qu.imgs('img[src*="photos/"]:not([width="400"])').map(source => `${site.url}/visitors/${source}`);
|
||||
|
||||
return release;
|
||||
});
|
||||
}
|
||||
|
||||
function scrapeScene({ qu }, url, site) {
|
||||
const release = { url };
|
||||
|
||||
const { pathname } = new URL(url);
|
||||
release.entryId = pathname.match(/videos\/(\w+)_hd_trailer/)[1];
|
||||
|
||||
const actor = qu.q('font[color="#990033"] strong', true);
|
||||
release.actors = [actor];
|
||||
|
||||
const hdTrailer = qu.url('a[href*="hd_trailer.mp4"]');
|
||||
const sdTrailer = qu.url('a[href*="hd_trailer_mobile.mp4"]');
|
||||
|
||||
release.trailer = [
|
||||
{
|
||||
src: `${site.url}/visitors/videos/${hdTrailer}`,
|
||||
quality: 1080,
|
||||
},
|
||||
{
|
||||
src: `${site.url}/visitors/videos/${sdTrailer}`,
|
||||
quality: 270,
|
||||
},
|
||||
];
|
||||
|
||||
return release;
|
||||
}
|
||||
|
||||
async function fetchLatest(site, page = 1) {
|
||||
const url = `https://jesseloadsmonsterfacials.com/visitors/tour_${page.toString().padStart(2, '0')}.html`;
|
||||
const res = await get(url);
|
||||
|
||||
if (!res.ok) {
|
||||
return res.status;
|
||||
}
|
||||
|
||||
const { el } = res.item;
|
||||
|
||||
const scenes = initAll(el, 'table[width="880"]');
|
||||
const dates = initAll(el, 'font[color="#000000"] strong:not(:empty)');
|
||||
|
||||
return scrapeLatest(scenes, dates, site);
|
||||
}
|
||||
|
||||
async function fetchScene(url, site) {
|
||||
const res = await get(url);
|
||||
|
||||
if (res.ok) {
|
||||
return scrapeScene(res.item, url, site);
|
||||
}
|
||||
|
||||
return res.status;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
fetchLatest,
|
||||
fetchScene,
|
||||
};
|
||||
@@ -25,6 +25,7 @@ const hush = require('./hush');
|
||||
const iconmale = require('./iconmale');
|
||||
const insex = require('./insex');
|
||||
const jayrock = require('./jayrock');
|
||||
const jesseloadsmonsterfacials = require('./jesseloadsmonsterfacials');
|
||||
const julesjordan = require('./julesjordan');
|
||||
const kellymadison = require('./kellymadison');
|
||||
const kink = require('./kink');
|
||||
@@ -96,6 +97,7 @@ module.exports = {
|
||||
insex,
|
||||
interracialpass: hush,
|
||||
jayrock,
|
||||
jesseloadsmonsterfacials,
|
||||
julesjordan,
|
||||
kellymadison,
|
||||
kink,
|
||||
|
||||
@@ -158,7 +158,7 @@ async function updateReleasesSearch(releaseIds) {
|
||||
releases.id AS release_id,
|
||||
TO_TSVECTOR(
|
||||
'traxxx',
|
||||
releases.title || ' ' ||
|
||||
COALESCE(releases.title, '') || ' ' ||
|
||||
networks.name || ' ' ||
|
||||
networks.slug || ' ' ||
|
||||
networks.url || ' ' ||
|
||||
|
||||
@@ -111,7 +111,7 @@ async function scrapeReleases(scraper, site, preData, upcoming = false) {
|
||||
return pageAccReleases;
|
||||
};
|
||||
|
||||
const rawReleases = await scrapePage(1, []);
|
||||
const rawReleases = await scrapePage(argv.page || 1, []);
|
||||
const releases = upcoming
|
||||
? rawReleases.map(rawRelease => ({ ...rawRelease, upcoming: true }))
|
||||
: rawReleases;
|
||||
@@ -135,7 +135,7 @@ async function scrapeLatestReleases(scraper, site, preData) {
|
||||
try {
|
||||
return await scrapeReleases(scraper, site, preData, false);
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to scrape latest updates for '${site.slug}' (${site.network.slug})`);
|
||||
logger.warn(`Failed to scrape latest updates for '${site.slug}' (${site.network.slug}): ${error.message}`);
|
||||
}
|
||||
|
||||
return [];
|
||||
@@ -149,7 +149,7 @@ async function scrapeUpcomingReleases(scraper, site, preData) {
|
||||
try {
|
||||
return await scrapeReleases(scraper, site, preData, true);
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to scrape upcoming updates for '${site.slug}' (${site.network.slug})`);
|
||||
logger.warn(`Failed to scrape upcoming updates for '${site.slug}' (${site.network.slug}): ${error.message}`);
|
||||
}
|
||||
|
||||
return [];
|
||||
|
||||
Reference in New Issue
Block a user