Merge branch 'experimental' into master

This commit is contained in:
DebaucheryLibrarian 2020-08-23 03:31:52 +02:00
commit 42247449f8
7 changed files with 78 additions and 70 deletions

View File

@ -230,24 +230,26 @@ const networks = [
{ {
slug: 'hussiepass', slug: 'hussiepass',
name: 'Hussie Pass', name: 'Hussie Pass',
url: 'http://www.hussiepass.com', url: 'https://www.hussiepass.com',
parent: 'hush', parent: 'hush',
}, },
{ {
slug: 'hushpass', slug: 'hushpass',
name: 'Hush Pass', name: 'Hush Pass',
url: 'http://www.hushpass.com', url: 'https://www.hushpass.com',
parent: 'hush', parent: 'hush',
parameters: { parameters: {
t1: true,
sequential: true, sequential: true,
}, },
}, },
{ {
slug: 'interracialpass', slug: 'interracialpass',
name: 'Interracial Pass', name: 'Interracial Pass',
url: 'http://www.interracialpass.com', url: 'https://www.interracialpass.com',
parent: 'hush', parent: 'hush',
parameters: { parameters: {
t1: true,
sequential: true, sequential: true,
}, },
}, },

View File

@ -2681,6 +2681,7 @@ const sites = [
name: 'Booty Annihilation', name: 'Booty Annihilation',
tags: ['interracial'], tags: ['interracial'],
parent: 'interracialpass', parent: 'interracialpass',
hasLogo: false,
parameters: { parameters: {
latest: 'https://www.interracialpass.com/t1/categories/BootyAnnihilation_%d_d.html', latest: 'https://www.interracialpass.com/t1/categories/BootyAnnihilation_%d_d.html',
media: 'https://www.interracialpass.com', media: 'https://www.interracialpass.com',

View File

@ -33,9 +33,11 @@ async function findEntities(baseReleases) {
)); ));
const entities = await knex('entities') const entities = await knex('entities')
.select(knex.raw('entities.*, row_to_json(parents) as parent')) .select(knex.raw('entities.*, row_to_json(parents) as parent, json_agg(children) as children'))
.leftJoin('entities as parents', 'parents.id', 'entities.parent_id') .leftJoin('entities as parents', 'parents.id', 'entities.parent_id')
.leftJoin('entities as children', 'children.parent_id', 'entities.id')
.whereIn('entities.slug', entitySlugs) .whereIn('entities.slug', entitySlugs)
.groupBy('entities.id', 'parents.id')
.orderBy('entities.type', 'asc'); .orderBy('entities.type', 'asc');
// channel entity will overwrite network entity // channel entity will overwrite network entity

View File

@ -86,7 +86,7 @@ async function fetchIncludedEntities() {
) )
/* select recursive channels as children of networks */ /* select recursive channels as children of networks */
SELECT SELECT
entities.*, json_agg(channels) as children entities.*, json_agg(channels ORDER BY channels.id) as children
FROM FROM
channels channels
LEFT JOIN LEFT JOIN

View File

@ -122,6 +122,7 @@ async function searchReleases(query, limit = 100) {
} }
module.exports = { module.exports = {
curateRelease,
fetchRelease, fetchRelease,
fetchReleases, fetchReleases,
searchReleases, searchReleases,

View File

@ -2,19 +2,10 @@
const util = require('util'); const util = require('util');
const knex = require('../knex');
const { get, geta, ed, formatDate, ctxa } = require('../utils/q'); const { get, geta, ed, formatDate, ctxa } = require('../utils/q');
const slugify = require('../utils/slugify'); const slugify = require('../utils/slugify');
const { feetInchesToCm } = require('../utils/convert'); const { feetInchesToCm } = require('../utils/convert');
async function getChannelRegExp(site) {
if (!['hushpass', 'interracialpass'].includes(site.parent.slug)) return null;
const sites = await knex('sites').where('network_id', site.parent.id);
return new RegExp(sites.map(channel => channel.parameters?.match || channel.name).join('|'), 'i');
}
function deriveEntryId(release) { function deriveEntryId(release) {
if (release.date && release.title) { if (release.date && release.title) {
return `${slugify(formatDate(release.date, 'YYYY-MM-DD'))}-${slugify(release.title)}`; return `${slugify(formatDate(release.date, 'YYYY-MM-DD'))}-${slugify(release.title)}`;
@ -80,7 +71,7 @@ function scrapeAll(scenes, site) {
}); });
} }
function scrapeAllT1(scenes, site, accSiteReleases) { function scrapeAllT1(scenes, site, accNetworkReleases) {
return scenes.map(({ qu }) => { return scenes.map(({ qu }) => {
const release = {}; const release = {};
@ -107,7 +98,7 @@ function scrapeAllT1(scenes, site, accSiteReleases) {
// release.entryId = q('.img-div img', 'id')?.match(/set-target-(\d+)/)[1]; // release.entryId = q('.img-div img', 'id')?.match(/set-target-(\d+)/)[1];
release.entryId = deriveEntryId(release); release.entryId = deriveEntryId(release);
if (site.parameters?.accFilter && accSiteReleases?.map(accRelease => accRelease.entryId).includes(release.entryId)) { if (site.parameters?.accFilter && accNetworkReleases?.map(accRelease => accRelease.entryId).includes(release.entryId)) {
// filter out releases that were already scraped from a categorized site, requires sequential site scraping // filter out releases that were already scraped from a categorized site, requires sequential site scraping
return null; return null;
} }
@ -160,7 +151,7 @@ function scrapeScene({ html, qu }, site, url, baseRelease) {
return release; return release;
} }
function scrapeSceneT1({ html, qu }, site, url, baseRelease, channelRegExp) { function scrapeSceneT1({ html, qu }, site, url, baseRelease) {
const release = { url }; const release = { url };
release.title = qu.q('.trailer-section-head .section-title', true); release.title = qu.q('.trailer-section-head .section-title', true);
@ -187,14 +178,12 @@ function scrapeSceneT1({ html, qu }, site, url, baseRelease, channelRegExp) {
const stars = qu.q('.update-rating', true).match(/\d.\d/)?.[0]; const stars = qu.q('.update-rating', true).match(/\d.\d/)?.[0];
if (stars) release.stars = Number(stars); if (stars) release.stars = Number(stars);
if (channelRegExp) { if (site.type === 'network') {
const channelRegExp = new RegExp(site.children.map(channel => channel.parameters?.match || channel.name).join('|'), 'i');
const channel = release.tags.find(tag => channelRegExp.test(tag)); const channel = release.tags.find(tag => channelRegExp.test(tag));
if (channel) { if (channel) {
release.channel = { release.channel = slugify(channel, '');
force: true,
slug: slugify(channel, ''),
};
} }
} }
@ -356,7 +345,7 @@ function scrapeProfileTour({ el, qu }, site) {
return profile; return profile;
} }
async function fetchLatest(site, page = 1, include, preflight, accSiteReleases) { async function fetchLatest(site, page = 1, include, { uniqueReleases, duplicateReleases }) {
const url = (site.parameters?.latest && util.format(site.parameters.latest, page)) const url = (site.parameters?.latest && util.format(site.parameters.latest, page))
|| (site.parameters?.t1 && `${site.url}/t1/categories/movies_${page}_d.html`) || (site.parameters?.t1 && `${site.url}/t1/categories/movies_${page}_d.html`)
|| `${site.url}/categories/movies_${page}_d.html`; || `${site.url}/categories/movies_${page}_d.html`;
@ -364,18 +353,17 @@ async function fetchLatest(site, page = 1, include, preflight, accSiteReleases)
const res = await geta(url, '.modelfeature, .item-video, .updateItem'); const res = await geta(url, '.modelfeature, .item-video, .updateItem');
if (!res.ok) return res.status; if (!res.ok) return res.status;
if (site.parameters?.t1) return scrapeAllT1(res.items, site, accSiteReleases); if (site.parameters?.t1) return scrapeAllT1(res.items, site, [...uniqueReleases, ...duplicateReleases]);
if (site.parameters?.tour) return scrapeAllTour(res.items, site, accSiteReleases); if (site.parameters?.tour) return scrapeAllTour(res.items, site);
return scrapeAll(res.items, site, accSiteReleases); return scrapeAll(res.items, site, uniqueReleases);
} }
async function fetchScene(url, site, baseRelease, beforeFetchLatest) { async function fetchScene(url, site, baseRelease) {
const channelRegExp = beforeFetchLatest || await getChannelRegExp(site);
const res = await get(url); const res = await get(url);
if (!res.ok) return res.status; if (!res.ok) return res.status;
if (site.parameters?.t1) return scrapeSceneT1(res.item, site, url, baseRelease, channelRegExp); if (site.parameters?.t1) return scrapeSceneT1(res.item, site, url, baseRelease);
if (site.parameters?.tour) return scrapeSceneTour(res.item, site, url, baseRelease); if (site.parameters?.tour) return scrapeSceneTour(res.item, site, url, baseRelease);
return scrapeScene(res.item, site, url, baseRelease); return scrapeScene(res.item, site, url, baseRelease);
@ -403,7 +391,6 @@ async function fetchProfile({ name: actorName }, { site }) {
} }
module.exports = { module.exports = {
beforeFetchLatest: getChannelRegExp,
fetchLatest, fetchLatest,
fetchScene, fetchScene,
fetchProfile, fetchProfile,

View File

@ -6,24 +6,31 @@ const moment = require('moment');
const argv = require('./argv'); const argv = require('./argv');
const logger = require('./logger')(__filename); const logger = require('./logger')(__filename);
const knex = require('./knex'); const knex = require('./knex');
const { curateRelease } = require('./releases');
const include = require('./utils/argv-include')(argv); const include = require('./utils/argv-include')(argv);
const scrapers = require('./scrapers/scrapers'); const scrapers = require('./scrapers/scrapers');
const { fetchIncludedEntities } = require('./entities'); const { fetchIncludedEntities } = require('./entities');
const emptyReleases = { uniqueReleases: [], duplicateReleases: [] };
async function filterUniqueReleases(latestReleases, accReleases) { async function filterUniqueReleases(latestReleases, accReleases) {
const latestReleaseIdentifiers = latestReleases const latestReleaseIdentifiers = latestReleases
.map(release => [release.entity.id, release.entryId]); .map(release => [release.entity.id, release.entryId]);
const duplicateReleases = await knex('releases') const duplicateReleaseEntries = await knex('releases')
.select(knex.raw('releases.*, row_to_json(entities) as entity'))
.leftJoin('entities', 'entities.id', 'releases.entity_id')
.whereIn(['entity_id', 'entry_id'], latestReleaseIdentifiers); .whereIn(['entity_id', 'entry_id'], latestReleaseIdentifiers);
const duplicateReleases = duplicateReleaseEntries.map(release => curateRelease(release));
// add entry IDs of accumulated releases to prevent an infinite scrape loop // add entry IDs of accumulated releases to prevent an infinite scrape loop
// when one page contains the same release as the previous // when one page contains the same release as the previous
const duplicateReleasesSiteIdAndEntryIds = duplicateReleases const duplicateReleasesSiteIdAndEntryIds = duplicateReleases
.concat(accReleases) .concat(accReleases)
.reduce((acc, release) => { .reduce((acc, release) => {
const entityId = release.entity_id || release.entity.id; const entityId = release.entityId || release.entity.id;
const entryId = release.entry_id || release.entryId; const entryId = release.entryId || release.entryId;
if (!acc[entityId]) acc[entityId] = {}; if (!acc[entityId]) acc[entityId] = {};
acc[entityId][entryId] = true; acc[entityId][entryId] = true;
@ -31,10 +38,9 @@ async function filterUniqueReleases(latestReleases, accReleases) {
return acc; return acc;
}, {}); }, {});
const uniqueReleases = latestReleases const uniqueReleases = latestReleases.filter(release => !duplicateReleasesSiteIdAndEntryIds[release.entity.id]?.[release.entryId]);
.filter(release => !duplicateReleasesSiteIdAndEntryIds[release.entity.id]?.[release.entryId]);
return uniqueReleases; return { uniqueReleases, duplicateReleases };
} }
function needNextPage(releases, uniqueReleases, totalReleases, hasDates, upcoming) { function needNextPage(releases, uniqueReleases, totalReleases, hasDates, upcoming) {
@ -66,7 +72,7 @@ function needNextPage(releases, uniqueReleases, totalReleases, hasDates, upcomin
return false; return false;
} }
async function scrapeReleases(scraper, entity, preData, upcoming = false, page = 1, accReleases = [], totalReleases = 0) { async function scrapeReleases(scraper, entity, preData, upcoming = false, page = 1, acc = emptyReleases, totalReleases = 0) {
const releases = upcoming const releases = upcoming
? await scraper.fetchUpcoming(entity, page, include, preData) ? await scraper.fetchUpcoming(entity, page, include, preData)
: await scraper.fetchLatest(entity, page, include, preData); : await scraper.fetchLatest(entity, page, include, preData);
@ -74,7 +80,7 @@ async function scrapeReleases(scraper, entity, preData, upcoming = false, page =
if (!Array.isArray(releases)) { if (!Array.isArray(releases)) {
// scraper is unable to fetch the releases and returned a HTTP code or null // scraper is unable to fetch the releases and returned a HTTP code or null
logger.warn(`Scraper returned ${releases} when fetching latest from '${entity.name}' (${entity.parent?.name})`); logger.warn(`Scraper returned ${releases} when fetching latest from '${entity.name}' (${entity.parent?.name})`);
return accReleases; return acc;
} }
const releasesWithEntity = releases.map(release => ({ const releasesWithEntity = releases.map(release => ({
@ -88,20 +94,25 @@ async function scrapeReleases(scraper, entity, preData, upcoming = false, page =
|| (hasDates && releasesWithEntity.filter(release => moment(release.date).isAfter(argv.after))) || (hasDates && releasesWithEntity.filter(release => moment(release.date).isAfter(argv.after)))
|| releasesWithEntity.slice(0, Math.max(argv.nullDateLimit - totalReleases, 0)); || releasesWithEntity.slice(0, Math.max(argv.nullDateLimit - totalReleases, 0));
const uniqueReleases = argv.force const { uniqueReleases, duplicateReleases } = argv.force
? limitedReleases ? { uniqueReleases: limitedReleases, duplicateReleases: [] }
: await filterUniqueReleases(limitedReleases, accReleases); : await filterUniqueReleases(limitedReleases, acc.uniqueReleases);
const accReleases = {
uniqueReleases: acc.uniqueReleases.concat(uniqueReleases),
duplicateReleases: acc.duplicateReleases.concat(duplicateReleases),
};
if (needNextPage(releases, uniqueReleases, totalReleases, hasDates, upcoming)) { if (needNextPage(releases, uniqueReleases, totalReleases, hasDates, upcoming)) {
return scrapeReleases(scraper, entity, preData, upcoming, page + 1, accReleases.concat(uniqueReleases), totalReleases + releases.length); return scrapeReleases(scraper, entity, preData, upcoming, page + 1, accReleases, totalReleases + releases.length);
} }
return accReleases.concat(uniqueReleases); return accReleases;
} }
async function scrapeLatestReleases(scraper, entity, preData) { async function scrapeLatestReleases(scraper, entity, preData) {
if (!scraper.fetchLatest) { if (!argv.latest || !scraper.fetchLatest) {
return []; return emptyReleases;
} }
try { try {
@ -114,12 +125,12 @@ async function scrapeLatestReleases(scraper, entity, preData) {
logger.warn(`Failed to scrape latest updates for '${entity.slug}' (${entity.parent?.slug}): ${error.message}`); logger.warn(`Failed to scrape latest updates for '${entity.slug}' (${entity.parent?.slug}): ${error.message}`);
} }
return []; return emptyReleases;
} }
async function scrapeUpcomingReleases(scraper, entity, preData) { async function scrapeUpcomingReleases(scraper, entity, preData) {
if (!scraper.fetchUpcoming) { if (!argv.upcoming || !scraper.fetchUpcoming) {
return []; return emptyReleases;
} }
try { try {
@ -132,11 +143,11 @@ async function scrapeUpcomingReleases(scraper, entity, preData) {
logger.warn(`Failed to scrape upcoming updates for '${entity.slug}' (${entity.parent?.slug}): ${error.message}`); logger.warn(`Failed to scrape upcoming updates for '${entity.slug}' (${entity.parent?.slug}): ${error.message}`);
} }
return []; return emptyReleases;
} }
async function scrapeMovies(scraper, entity) { async function scrapeMovies(scraper, entity) {
if (!scraper.fetchMovies) { if (!argv.movies || !scraper.fetchMovies) {
return []; return [];
} }
@ -152,20 +163,17 @@ async function scrapeMovies(scraper, entity) {
async function scrapeChannelReleases(scraper, channelEntity, preData) { async function scrapeChannelReleases(scraper, channelEntity, preData) {
const [latestReleases, upcomingReleases] = await Promise.all([ const [latestReleases, upcomingReleases] = await Promise.all([
argv.latest scrapeLatestReleases(scraper, channelEntity, preData),
? scrapeLatestReleases(scraper, channelEntity, preData) scrapeUpcomingReleases(scraper, channelEntity, preData),
: [], scrapeMovies(scraper, channelEntity, preData),
argv.upcoming
? scrapeUpcomingReleases(scraper, channelEntity, preData)
: [],
argv.movies
? scrapeMovies(scraper, channelEntity, preData)
: [],
]); ]);
logger.info(`Fetching ${latestReleases.length} latest and ${upcomingReleases.length} upcoming updates for '${channelEntity.name}' (${channelEntity.parent?.name})`); logger.info(`Fetching ${latestReleases.uniqueReleases.length} latest and ${upcomingReleases.uniqueReleases.length} upcoming updates for '${channelEntity.name}' (${channelEntity.parent?.name})`);
return [...latestReleases, ...upcomingReleases]; return {
uniqueReleases: [...latestReleases.uniqueReleases, ...upcomingReleases.uniqueReleases],
duplicateReleases: [...latestReleases.duplicateReleases, ...upcomingReleases.duplicateReleases],
};
} }
async function scrapeChannel(channelEntity, accNetworkReleases) { async function scrapeChannel(channelEntity, accNetworkReleases) {
@ -181,12 +189,10 @@ async function scrapeChannel(channelEntity, accNetworkReleases) {
try { try {
const beforeFetchLatest = await scraper.beforeFetchLatest?.(channelEntity); const beforeFetchLatest = await scraper.beforeFetchLatest?.(channelEntity);
const channelEntityReleases = await scrapeChannelReleases(scraper, channelEntity, { return await scrapeChannelReleases(scraper, channelEntity, {
accNetworkReleases, ...accNetworkReleases,
beforeFetchLatest, beforeFetchLatest,
}); });
return channelEntityReleases.map(release => ({ ...release, channelEntity }));
} catch (error) { } catch (error) {
logger.error(`Failed to scrape releases from ${channelEntity.name} using ${scraper.slug}: ${error.message}`); logger.error(`Failed to scrape releases from ${channelEntity.name} using ${scraper.slug}: ${error.message}`);
@ -195,22 +201,31 @@ async function scrapeChannel(channelEntity, accNetworkReleases) {
} }
async function scrapeNetworkSequential(networkEntity) { async function scrapeNetworkSequential(networkEntity) {
return Promise.reduce( const releases = await Promise.reduce(
networkEntity.children, networkEntity.children,
async (chain, channelEntity) => { async (chain, channelEntity) => {
const accNetworkReleases = await chain; const accNetworkReleases = await chain;
const channelReleases = await scrapeChannel(channelEntity, accNetworkReleases); const { uniqueReleases, duplicateReleases } = await scrapeChannel(channelEntity, accNetworkReleases);
return accNetworkReleases.concat(channelReleases); return {
uniqueReleases: accNetworkReleases.uniqueReleases.concat(uniqueReleases),
duplicateReleases: accNetworkReleases.duplicateReleases.concat(duplicateReleases),
};
}, },
Promise.resolve([]), Promise.resolve(emptyReleases),
); );
return releases.uniqueReleases;
} }
async function scrapeNetworkParallel(networkEntity) { async function scrapeNetworkParallel(networkEntity) {
return Promise.map( return Promise.map(
networkEntity.children, networkEntity.children,
async channelEntity => scrapeChannel(channelEntity, networkEntity), async (channelEntity) => {
const { uniqueReleases } = await scrapeChannel(channelEntity, networkEntity);
return uniqueReleases;
},
{ concurrency: 3 }, { concurrency: 3 },
); );
} }