forked from DebaucheryLibrarian/traxxx
Added tag groups. Added MOFOS scraped. Improved entry ID handling.
This commit is contained in:
@@ -58,18 +58,18 @@ async function accumulateIncludedSites() {
|
||||
|
||||
async function findDuplicateReleases(latestReleases, _siteId) {
|
||||
const latestReleasesShootIds = latestReleases.map(release => release.shootId).filter(release => release !== undefined);
|
||||
const latestReleasesPageIds = latestReleases.map(release => release.pageId).filter(release => release !== undefined);
|
||||
const latestReleasesEntryIds = latestReleases.map(release => release.entryId).filter(release => release !== undefined);
|
||||
|
||||
return knex('releases')
|
||||
.whereIn('shoot_id', latestReleasesShootIds)
|
||||
.orWhereIn('shoot_id', latestReleasesPageIds);
|
||||
.orWhereIn('entry_id', latestReleasesEntryIds);
|
||||
}
|
||||
|
||||
async function storeReleases(releases) {
|
||||
const curatedReleases = releases.map(release => ({
|
||||
site_id: release.site.id,
|
||||
shoot_id: release.shootId || null,
|
||||
entry_id: release.entry_id || null,
|
||||
entry_id: release.entryId || null,
|
||||
url: release.url,
|
||||
title: release.title,
|
||||
date: release.date,
|
||||
@@ -97,13 +97,16 @@ async function fetchNewReleases(scraper, site, afterDate, accReleases = [], page
|
||||
const latestReleases = await scraper.fetchLatest(site, page);
|
||||
|
||||
const duplicateReleases = await findDuplicateReleases(latestReleases, site.id);
|
||||
const duplicateReleasesShootIds = new Set(
|
||||
const duplicateReleasesIds = new Set(
|
||||
duplicateReleases
|
||||
.map(release => release.shoot_id)
|
||||
// exclude accumulated releases to prevent an infinite loop if the next page contains the same releases as the previous
|
||||
.concat(duplicateReleases.map(release => release.entry_id))
|
||||
.concat(accReleases.map(release => release.shootId)),
|
||||
);
|
||||
const uniqueReleases = latestReleases.filter(release => !duplicateReleasesShootIds.has(String(release.shootId)) && moment(release.date).isAfter(afterDate));
|
||||
const uniqueReleases = latestReleases.filter(release => !duplicateReleasesIds.has(String(release.shootId))
|
||||
&& !duplicateReleasesIds.has(String(release.entryId))
|
||||
&& moment(release.date).isAfter(afterDate));
|
||||
|
||||
console.log(`${site.name}: Scraped page ${page}, ${uniqueReleases.length} unique releases`);
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ const brazzers = require('./brazzers');
|
||||
const julesjordan = require('./julesjordan');
|
||||
const kink = require('./kink');
|
||||
const legalporno = require('./legalporno');
|
||||
const mofos = require('./mofos');
|
||||
const pervcity = require('./pervcity');
|
||||
const privateNetwork = require('./private'); // reserved keyword
|
||||
const vixen = require('./vixen');
|
||||
@@ -16,6 +17,7 @@ module.exports = {
|
||||
julesjordan,
|
||||
kink,
|
||||
legalporno,
|
||||
mofos,
|
||||
pervcity,
|
||||
private: privateNetwork,
|
||||
vixen,
|
||||
|
||||
105
src/scrapers/mofos.js
Normal file
105
src/scrapers/mofos.js
Normal file
@@ -0,0 +1,105 @@
|
||||
'use strict';
|
||||
|
||||
/* eslint-disable */
|
||||
const bhttp = require('bhttp');
|
||||
const cheerio = require('cheerio');
|
||||
const moment = require('moment');
|
||||
|
||||
const knex = require('../knex');
|
||||
const { matchTags } = require('../tags');
|
||||
|
||||
function scrape(html, site) {
|
||||
const $ = cheerio.load(html, { normalizeWhitespace: true });
|
||||
const sceneElements = $('.widget-release-card').toArray();
|
||||
|
||||
return sceneElements.map((element) => {
|
||||
const sceneLinkElement = $(element).find('.title a');
|
||||
|
||||
const title = sceneLinkElement.text().trim();
|
||||
const url = `https://www.mofos.com${sceneLinkElement.attr('href')}`;
|
||||
const entryId = url.split('/').slice(-2, -1)[0];
|
||||
|
||||
const date = moment.utc($(element).find('.date-added').text(), 'MMM DD, YYYY').toDate();
|
||||
const actors = $(element).find('.girls-name a').map((actorIndex, actorElement) => $(actorElement).attr('title').replace(/\s+/g, ' ')).toArray();
|
||||
|
||||
const stars = Number($(element).find('.rating').text().slice(0, -1).trim()) / 20;
|
||||
|
||||
return {
|
||||
url,
|
||||
entryId,
|
||||
title,
|
||||
actors,
|
||||
date,
|
||||
rating: {
|
||||
stars,
|
||||
},
|
||||
site,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async function scrapeScene(html, url, site) {
|
||||
const $ = cheerio.load(html, { normalizeWhitespace: true });
|
||||
const sceneElement = $('.video-info');
|
||||
|
||||
const entryId = url.split('/').slice(-2, -1)[0];
|
||||
const title = sceneElement.find('.title').text();
|
||||
const description = sceneElement.find('.desc').text();
|
||||
const actors = sceneElement.find('.girls-site-box a.model-name').map((actorIndex, actorElement) => $(actorElement).text().trim()).toArray();
|
||||
|
||||
const siteElement = sceneElement.find('.site-name');
|
||||
const sitename = siteElement.text().trim();
|
||||
const siteId = sitename.replace(/\s+/g, '').toLowerCase();
|
||||
const siteUrl = siteElement.attr('href').split('/').slice(0, 4).join('/');
|
||||
|
||||
const stars = Number(sceneElement.find('.rating-box .rating').text().slice(0, -1).trim()) / 20;
|
||||
|
||||
const rawTags = sceneElement.find('.categories a').map((tagIndex, tagElement) => $(tagElement).text().trim()).toArray();
|
||||
|
||||
const [channelSite, tags] = await Promise.all([
|
||||
knex('sites')
|
||||
.where({ id: siteId })
|
||||
.orWhere({ url: `https://www.mofos.com${siteUrl}` })
|
||||
.orWhere({ name: sitename })
|
||||
.first(),
|
||||
matchTags(rawTags),
|
||||
]);
|
||||
|
||||
return {
|
||||
url,
|
||||
entryId,
|
||||
title,
|
||||
actors,
|
||||
tags,
|
||||
rating: {
|
||||
stars,
|
||||
},
|
||||
site,
|
||||
};
|
||||
}
|
||||
|
||||
async function fetchLatest(site, page = 1) {
|
||||
const res = page > 1
|
||||
? await bhttp.get(`${site.url}/all-models/all-categories/alltime/bydate/${page}/`)
|
||||
: await bhttp.get(`${site.url}/all-models/all-categories/alltime/bydate/`); // explicit page 1 redirects to homepage
|
||||
|
||||
return scrape(res.body.toString(), site);
|
||||
}
|
||||
|
||||
async function fetchUpcoming(site) {
|
||||
const res = await bhttp.get(`${site.url}/all-models/all-categories/upcoming/bydate/`);
|
||||
|
||||
return scrape(res.body.toString(), site);
|
||||
}
|
||||
|
||||
async function fetchScene(url, site) {
|
||||
const res = await bhttp.get(url);
|
||||
|
||||
return scrapeScene(res.body.toString(), url, site);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
fetchLatest,
|
||||
fetchUpcoming,
|
||||
fetchScene,
|
||||
};
|
||||
Reference in New Issue
Block a user