forked from DebaucheryLibrarian/traxxx
Added Interracial Pass sites. Fixed Hush removing poster from base release.
This commit is contained in:
@@ -117,8 +117,8 @@ async function fetchLatest(site, page = 1, models) {
|
||||
return qLatest && scrapeLatest(qLatest, site, models);
|
||||
}
|
||||
|
||||
async function fetchScene(url, site, release, preflight) {
|
||||
const models = preflight || await fetchModels(site);
|
||||
async function fetchScene(url, site, release, beforeFetchLatest) {
|
||||
const models = beforeFetchLatest || await fetchModels(site);
|
||||
const qScene = await get(url);
|
||||
|
||||
return qScene && scrapeScene(qScene, url, site, models);
|
||||
@@ -127,5 +127,5 @@ async function fetchScene(url, site, release, preflight) {
|
||||
module.exports = {
|
||||
fetchLatest,
|
||||
fetchScene,
|
||||
preflight: fetchModels,
|
||||
beforeFetchLatest: fetchModels,
|
||||
};
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
const util = require('util');
|
||||
|
||||
const knex = require('../knex');
|
||||
const { get, geta, fd } = require('../utils/q');
|
||||
const slugify = require('../utils/slugify');
|
||||
|
||||
@@ -22,7 +23,7 @@ function extractPoster(posterPath, site, baseRelease) {
|
||||
return [posterSources, []];
|
||||
}
|
||||
|
||||
return [null, []];
|
||||
return [baseRelease?.poster || null, []];
|
||||
}
|
||||
|
||||
function scrapeLatest(scenes, site) {
|
||||
@@ -59,8 +60,8 @@ function scrapeLatestT1(scenes, site) {
|
||||
release.date = qd('.more-info-div', 'MMM D, YYYY');
|
||||
release.duration = ql('.more-info-div');
|
||||
|
||||
release.entryId = q('.img-div img', 'id')?.match(/set-target-(\d+)/)[1]
|
||||
|| `${slugify(fd(release.date, 'YYYY-MM-DD'))}-${slugify(release.title)}`;
|
||||
// release.entryId = q('.img-div img', 'id')?.match(/set-target-(\d+)/)[1];
|
||||
release.entryId = `${slugify(fd(release.date, 'YYYY-MM-DD'))}-${slugify(release.title)}`;
|
||||
|
||||
const posterPath = q('.img-div img', 'src0_1x') || qi('img.video_placeholder');
|
||||
|
||||
@@ -102,7 +103,7 @@ function scrapeScene({ html, q, qa, qd, ql }, site, url, baseRelease) {
|
||||
return release;
|
||||
}
|
||||
|
||||
function scrapeSceneT1({ html, q, qa, qd, ql, qtx }, site, url, baseRelease) {
|
||||
function scrapeSceneT1({ html, q, qa, qd, ql, qtx }, site, url, baseRelease, channelRegExp) {
|
||||
const release = { url };
|
||||
|
||||
release.title = q('.trailer-section-head .section-title', true);
|
||||
@@ -111,8 +112,8 @@ function scrapeSceneT1({ html, q, qa, qd, ql, qtx }, site, url, baseRelease) {
|
||||
release.date = qd('.update-info-row', 'MMM D, YYYY', /\w+ \d{1,2}, \d{4}/);
|
||||
release.duration = ql('.update-info-row:nth-child(2)');
|
||||
|
||||
release.entryId = q('.player-thumb img', 'id').match(/set-target-(\d+)/)[1]
|
||||
|| `${slugify(fd(release.date, 'YYYY-MM-DD'))}-${slugify(release.title)}`;
|
||||
// release.entryId = q('.player-thumb img', 'id')?.match(/set-target-(\d+)/)[1];
|
||||
release.entryId = `${slugify(fd(release.date, 'YYYY-MM-DD'))}-${slugify(release.title)}`;
|
||||
|
||||
release.actors = qa('.models-list-thumbs a').map(el => ({
|
||||
name: q(el, 'span', true),
|
||||
@@ -136,8 +137,8 @@ function scrapeSceneT1({ html, q, qa, qd, ql, qtx }, site, url, baseRelease) {
|
||||
const stars = q('.update-rating', true).match(/\d.\d/)?.[0];
|
||||
if (stars) release.stars = Number(stars);
|
||||
|
||||
if (site.slug === 'hushpass') {
|
||||
const channel = release.tags.find(tag => /Shot Her First|WhiteZilla|Frat House Fuck Fest|Freaky First Timers|MILF Invaders|Housewives Need Cash|Bubble Butt Bonanza|Suburban Sex Party|Butt Naked In Streets/i.test(tag));
|
||||
if (channelRegExp) {
|
||||
const channel = release.tags.find(tag => channelRegExp.test(tag));
|
||||
|
||||
if (channel) {
|
||||
release.channel = {
|
||||
@@ -150,6 +151,14 @@ function scrapeSceneT1({ html, q, qa, qd, ql, qtx }, site, url, baseRelease) {
|
||||
return release;
|
||||
}
|
||||
|
||||
async function getChannelRegExp(site) {
|
||||
if (!['hushpass', 'interracialpass'].includes(site.network.slug)) return null;
|
||||
|
||||
const sites = await knex('sites').where('network_id', site.network.id);
|
||||
|
||||
return new RegExp(sites.map(channel => channel.parameters?.match || channel.name).join('|'), 'i');
|
||||
}
|
||||
|
||||
async function fetchLatest(site, page = 1) {
|
||||
const url = (site.parameters?.latest && util.format(site.parameters.latest, page))
|
||||
|| (site.parameters?.t1 && `${site.url}/t1/categories/movies_${page}_d.html`)
|
||||
@@ -162,16 +171,17 @@ async function fetchLatest(site, page = 1) {
|
||||
return site.parameters?.t1 ? scrapeLatestT1(qLatest, site) : scrapeLatest(qLatest, site);
|
||||
}
|
||||
|
||||
async function fetchScene(url, site, baseRelease) {
|
||||
async function fetchScene(url, site, baseRelease, beforeFetchLatest) {
|
||||
const channelRegExp = beforeFetchLatest || await getChannelRegExp(site);
|
||||
const qScene = await get(url);
|
||||
|
||||
if (!qScene) return null;
|
||||
|
||||
return site.parameters?.t1 ? scrapeSceneT1(qScene, site, url, baseRelease) : scrapeScene(qScene, site, url, baseRelease);
|
||||
return site.parameters?.t1 ? scrapeSceneT1(qScene, site, url, baseRelease, channelRegExp) : scrapeScene(qScene, site, url, baseRelease);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
// preflight,
|
||||
beforeFetchLatest: getChannelRegExp,
|
||||
fetchLatest,
|
||||
fetchScene,
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user