Added Reality Kings scraper. Improved site finder.
This commit is contained in:
@@ -103,16 +103,16 @@ async function fetchNewReleases(scraper, site, afterDate, accReleases = [], page
|
||||
const duplicateReleases = await findDuplicateReleases(latestReleases, site.id);
|
||||
const duplicateReleasesIds = new Set(
|
||||
duplicateReleases
|
||||
.map(release => release.shoot_id)
|
||||
.map(release => release.shoot_id || release.entry_id)
|
||||
// exclude accumulated releases to prevent an infinite loop if the next page contains the same releases as the previous
|
||||
.concat(duplicateReleases.map(release => release.entry_id))
|
||||
.concat(accReleases.map(release => release.shootId)),
|
||||
.concat(duplicateReleases.map(release => release.shoot_id || release.entry_id))
|
||||
.concat(accReleases.map(release => release.shootId || release.entryId)),
|
||||
);
|
||||
const uniqueReleases = latestReleases.filter(release => !duplicateReleasesIds.has(String(release.shootId))
|
||||
&& !duplicateReleasesIds.has(String(release.entryId))
|
||||
&& moment(release.date).isAfter(afterDate));
|
||||
|
||||
console.log(`${site.name}: Scraped page ${page}, ${uniqueReleases.length} unique releases`);
|
||||
console.log(`\x1b[90m${site.name}: Scraped page ${page}, ${uniqueReleases.length} unique recent releases\x1b[0m`);
|
||||
|
||||
const oldestReleaseOnPage = latestReleases.slice(-1)[0].date;
|
||||
|
||||
@@ -138,7 +138,7 @@ async function fetchReleases() {
|
||||
scraper.fetchUpcoming ? await scraper.fetchUpcoming(site) : [],
|
||||
]);
|
||||
|
||||
console.log(`${site.name}: Found ${newReleases.length} new releases, ${upcomingReleases.length} upcoming releases`);
|
||||
console.log(`${site.name}: Found ${newReleases.length} recent releases, ${upcomingReleases.length} upcoming releases`);
|
||||
|
||||
if (argv.save) {
|
||||
await storeReleases(newReleases);
|
||||
|
||||
@@ -7,17 +7,28 @@ const knex = require('./knex');
|
||||
const scrapers = require('./scrapers');
|
||||
|
||||
async function findSite(url) {
|
||||
const { protocol, hostname } = new URL(url);
|
||||
const { hostname } = new URL(url);
|
||||
const domain = hostname.replace(/^www./, '');
|
||||
|
||||
/*
|
||||
const site = await knex('sites')
|
||||
.where({ url: `${protocol}//www.${hostname}` })
|
||||
.orWhere({ url: `${protocol}//${hostname}` })
|
||||
.where({ url: `${protocol}//www.${domain}` })
|
||||
.orWhere({ url: `${protocol}//${domain}` })
|
||||
.first()
|
||||
// scene might use generic network URL, let network scraper determine channel site
|
||||
|| await knex('networks')
|
||||
.where({ url: `${protocol}//www.${hostname}` })
|
||||
.orWhere({ url: `${protocol}//${hostname}` })
|
||||
.first();
|
||||
*/
|
||||
|
||||
const site = await knex('sites')
|
||||
.where('url', 'like', `%${domain}`)
|
||||
.first()
|
||||
// scene might use generic network URL, let network scraper determine channel site
|
||||
|| await knex('networks')
|
||||
.where('url', 'like', `%${domain}`)
|
||||
.first();
|
||||
|
||||
return {
|
||||
id: site.id,
|
||||
@@ -25,6 +36,7 @@ async function findSite(url) {
|
||||
description: site.description,
|
||||
url: site.url,
|
||||
networkId: site.network_id || site.id,
|
||||
parameters: site.parameters && JSON.parse(site.parameters),
|
||||
isFallback: site.network_id === undefined,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -66,9 +66,9 @@ async function scrapeScene(html, url, site) {
|
||||
const date = moment.utc(data.dateCreated, 'YYYY-MM-DD').toDate();
|
||||
|
||||
const actors = data.actor
|
||||
.sort(({ genderA }, { genderB }) => {
|
||||
if (genderA === 'female' && genderB === 'male') return 1;
|
||||
if (genderA === 'male' && genderB === 'female') return -1;
|
||||
.sort(({ gender: genderA }, { gender: genderB }) => {
|
||||
if (genderA === 'female' && genderB === 'male') return -1;
|
||||
if (genderA === 'male' && genderB === 'female') return 1;
|
||||
|
||||
return 0;
|
||||
})
|
||||
@@ -117,13 +117,13 @@ async function scrapeScene(html, url, site) {
|
||||
}
|
||||
|
||||
async function fetchLatest(site, page = 1) {
|
||||
const res = await bhttp.get(`${site.parameters.filter ? 'https://21sextury.com' : site.url}/en/videos/All-Categories/0/All-Pornstars/0/latest/${page}`);
|
||||
const res = await bhttp.get(`${site.parameters && site.parameters.filter ? 'https://21sextury.com' : site.url}/en/videos/All-Categories/0/All-Pornstars/0/latest/${page}`);
|
||||
|
||||
return scrape(res.body.toString(), site);
|
||||
}
|
||||
|
||||
async function fetchUpcoming(site) {
|
||||
const res = await bhttp.get(`${site.parameters.filter ? 'https://21sextury.com' : site.url}/en/videos/All-Categories/0/All-Pornstars/0/upcoming`);
|
||||
const res = await bhttp.get(`${site.parameters && site.parameters.filter ? 'https://21sextury.com' : site.url}/en/videos/All-Categories/0/All-Pornstars/0/upcoming`);
|
||||
|
||||
return scrape(res.body.toString(), site);
|
||||
}
|
||||
|
||||
@@ -52,9 +52,9 @@ async function scrapeScene(html, url, site) {
|
||||
|
||||
// const actors = sceneElement.find('.sceneActors a').map((actorIndex, actorElement) => $(actorElement).text().trim()).toArray();
|
||||
const actors = data.actor
|
||||
.sort(({ genderA }, { genderB }) => {
|
||||
if (genderA === 'female' && genderB === 'male') return 1;
|
||||
if (genderA === 'male' && genderB === 'female') return -1;
|
||||
.sort(({ gender: genderA }, { gender: genderB }) => {
|
||||
if (genderA === 'female' && genderB === 'male') return -1;
|
||||
if (genderA === 'male' && genderB === 'female') return 1;
|
||||
|
||||
return 0;
|
||||
})
|
||||
|
||||
@@ -9,6 +9,7 @@ const legalporno = require('./legalporno');
|
||||
const mofos = require('./mofos');
|
||||
const pervcity = require('./pervcity');
|
||||
const privateNetwork = require('./private'); // reserved keyword
|
||||
const realitykings = require('./realitykings');
|
||||
const twentyonesextury = require('./21sextury');
|
||||
const vixen = require('./vixen');
|
||||
const xempire = require('./xempire');
|
||||
@@ -24,6 +25,7 @@ module.exports = {
|
||||
mofos,
|
||||
pervcity,
|
||||
private: privateNetwork,
|
||||
realitykings,
|
||||
vixen,
|
||||
xempire,
|
||||
};
|
||||
|
||||
110
src/scrapers/realitykings.js
Normal file
110
src/scrapers/realitykings.js
Normal file
@@ -0,0 +1,110 @@
|
||||
'use strict';
|
||||
|
||||
/* eslint-disable newline-per-chained-call */
|
||||
const Promise = require('bluebird');
|
||||
const bhttp = require('bhttp');
|
||||
const { CookieJar } = Promise.promisifyAll(require('tough-cookie'));
|
||||
const cheerio = require('cheerio');
|
||||
const moment = require('moment');
|
||||
|
||||
const { matchTags } = require('../tags');
|
||||
|
||||
function scrapeLatest(html, site) {
|
||||
const $ = cheerio.load(html, { normalizeWhitespace: true });
|
||||
const sceneElements = $('.card.card--release').toArray();
|
||||
|
||||
return sceneElements.map((element) => {
|
||||
const sceneLinkElement = $(element).find('.card-info__title a');
|
||||
const title = sceneLinkElement.attr('title');
|
||||
const url = `${site.url}${sceneLinkElement.attr('href')}`;
|
||||
const entryId = url.split('/').slice(-3)[0];
|
||||
|
||||
const date = moment.utc($(element).find('.card-info__meta-date').text(), 'MMMM DD, YYYY').toDate();
|
||||
const actors = $(element).find('.card-info__cast a').map((actorIndex, actorElement) => $(actorElement).text().trim()).toArray();
|
||||
|
||||
return {
|
||||
url,
|
||||
entryId,
|
||||
title,
|
||||
actors,
|
||||
date,
|
||||
rating: null,
|
||||
site,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async function scrapeScene(data, url, site) {
|
||||
const {
|
||||
id: entryId,
|
||||
title,
|
||||
description,
|
||||
} = data;
|
||||
|
||||
const date = new Date(data.dateReleased);
|
||||
const actors = data.actors
|
||||
.sort(({ gender: genderA }, { gender: genderB }) => {
|
||||
if (genderA === 'female' && genderB === 'male') return -1;
|
||||
if (genderA === 'male' && genderB === 'female') return 1;
|
||||
|
||||
return 0;
|
||||
})
|
||||
.map(actor => actor.name);
|
||||
|
||||
const { likes, dislikes } = data.stats;
|
||||
const duration = data.videos.mediabook.length;
|
||||
|
||||
const rawTags = data.tags.map(tag => tag.name);
|
||||
const tags = await matchTags(rawTags);
|
||||
|
||||
return {
|
||||
url,
|
||||
entryId,
|
||||
title,
|
||||
description,
|
||||
actors,
|
||||
date,
|
||||
duration,
|
||||
tags,
|
||||
rating: {
|
||||
likes,
|
||||
dislikes,
|
||||
},
|
||||
site,
|
||||
};
|
||||
}
|
||||
|
||||
async function fetchLatest(site, page = 1) {
|
||||
const res = await bhttp.get(`https://www.realitykings.com/tour/videos/${site.name.replace(/\s+/g, '-').toLowerCase()}/all-categories/all-time/recent/${page}`);
|
||||
|
||||
return scrapeLatest(res.body.toString(), site);
|
||||
}
|
||||
|
||||
async function fetchScene(url, site) {
|
||||
if (site.isFallback || (site.parameters && site.parameters.altLayout)) {
|
||||
throw new Error('Cannot fetch scene details from this resource');
|
||||
}
|
||||
|
||||
const entryId = url.split('/').slice(-1)[0];
|
||||
|
||||
const cookieJar = new CookieJar();
|
||||
const session = bhttp.session({ cookieJar });
|
||||
|
||||
await session.get(url);
|
||||
|
||||
const cookies = await cookieJar.getCookieStringAsync(url);
|
||||
const instanceToken = cookies.split(';')[0].split('=')[1];
|
||||
|
||||
const res = await session.get(`https://site-api.project1service.com/v2/releases/${entryId}`, {
|
||||
headers: {
|
||||
Instance: instanceToken,
|
||||
},
|
||||
});
|
||||
|
||||
return scrapeScene(res.body.result.parent, url, site);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
fetchLatest,
|
||||
fetchScene,
|
||||
};
|
||||
@@ -12,11 +12,11 @@ function scrapeLatest(html, site) {
|
||||
const sceneElements = $('.scenes-latest').toArray();
|
||||
|
||||
return sceneElements.map((element) => {
|
||||
const actors = $('.actors a').map((actorIndex, actorElement) => $(actorElement).text()).toArray();
|
||||
const actors = $(element).find('.actors a').map((actorIndex, actorElement) => $(actorElement).text()).toArray();
|
||||
|
||||
return {
|
||||
url,
|
||||
shootId,
|
||||
entryId,
|
||||
title,
|
||||
actors,
|
||||
date,
|
||||
@@ -35,11 +35,11 @@ function scrapeUpcoming(html, site) {
|
||||
const sceneElements = $('.scenes-upcoming').toArray();
|
||||
|
||||
return sceneElements.map((element) => {
|
||||
const actors = $('.actors a').map((actorIndex, actorElement) => $(actorElement).text()).toArray();
|
||||
const actors = $(element).find('.actors a').map((actorIndex, actorElement) => $(actorElement).text()).toArray();
|
||||
|
||||
return {
|
||||
url,
|
||||
shootId,
|
||||
entryId,
|
||||
title,
|
||||
actors,
|
||||
date,
|
||||
@@ -56,9 +56,6 @@ function scrapeUpcoming(html, site) {
|
||||
async function scrapeScene(html, url, site) {
|
||||
const $ = cheerio.load(html, { normalizeWhitespace: true });
|
||||
|
||||
const rawTags = [];
|
||||
const tags = await matchTags(rawTags);
|
||||
|
||||
const actors = $('.actors a').map((actorIndex, actorElement) => $(actorElement).text()).toArray();
|
||||
|
||||
const rawTags = $('.tags a').map((tagIndex, tagElement) => $(tagElement).text()).toArray();
|
||||
@@ -66,7 +63,7 @@ async function scrapeScene(html, url, site) {
|
||||
|
||||
return {
|
||||
url,
|
||||
shootId,
|
||||
entryId,
|
||||
title,
|
||||
description,
|
||||
actors,
|
||||
|
||||
@@ -57,9 +57,9 @@ async function scrapeScene(html, url, site) {
|
||||
const date = moment.utc(data.dateCreated, 'YYYY-MM-DD').toDate();
|
||||
|
||||
const actors = data.actor
|
||||
.sort(({ genderA }, { genderB }) => {
|
||||
if (genderA === 'female' && genderB === 'male') return 1;
|
||||
if (genderA === 'male' && genderB === 'female') return -1;
|
||||
.sort(({ gender: genderA }, { gender: genderB }) => {
|
||||
if (genderA === 'female' && genderB === 'male') return -1;
|
||||
if (genderA === 'male' && genderB === 'female') return 1;
|
||||
|
||||
return 0;
|
||||
})
|
||||
|
||||
Reference in New Issue
Block a user