Added PurgatoryX scraper.

This commit is contained in:
DebaucheryLibrarian 2021-11-27 23:55:16 +01:00
parent 2539e88f47
commit 9d7183ac69
32 changed files with 196 additions and 16 deletions

View File

@ -272,7 +272,7 @@ import Pagination from '../pagination/pagination.vue';
const toggleValues = [true, null, false]; const toggleValues = [true, null, false];
const boobSizes = 'ABCDEFGHIJKZ'.split(''); const boobSizes = 'ABCDEFGHIJKZ'.split('');
const topCountries = ['AU', 'BR', 'DE', 'JP', 'RU', 'GB', 'US']; const topCountries = ['AU', 'BR', 'CZ', 'DE', 'JP', 'RU', 'GB', 'US'];
function updateFilters() { function updateFilters() {
this.$router.push({ this.$router.push({

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 23 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 23 KiB

View File

@ -52,6 +52,12 @@ const parentNetworks = [
}, },
parent: 'gamma', parent: 'gamma',
}, },
{
slug: 'radical',
alias: ['kb productions'],
name: 'Radical Entertainment',
url: 'https://radicalcash.com',
},
]; ];
const networks = [ const networks = [
@ -550,6 +556,7 @@ const networks = [
slug: 'topwebmodels', slug: 'topwebmodels',
name: 'Top Web Models', name: 'Top Web Models',
url: 'https://tour.topwebmodels.com', url: 'https://tour.topwebmodels.com',
parent: 'radical',
parameters: { parameters: {
apiKey: '5b637cd8c4bc59cd13686f1c38dcb780', apiKey: '5b637cd8c4bc59cd13686f1c38dcb780',
}, },
@ -624,12 +631,12 @@ const networks = [
}, },
]; ];
exports.seed = knex => Promise.resolve() exports.seed = (knex) => Promise.resolve()
.then(async () => { .then(async () => {
const grandParentNetworkEntries = await upsert('entities', grandParentNetworks.map(network => ({ ...network, type: 'network' })), ['slug', 'type'], knex); const grandParentNetworkEntries = await upsert('entities', grandParentNetworks.map((network) => ({ ...network, type: 'network' })), ['slug', 'type'], knex);
const grandParentNetworksBySlug = [].concat(grandParentNetworkEntries.inserted, grandParentNetworkEntries.updated).reduce((acc, network) => ({ ...acc, [network.slug]: network.id }), {}); const grandParentNetworksBySlug = [].concat(grandParentNetworkEntries.inserted, grandParentNetworkEntries.updated).reduce((acc, network) => ({ ...acc, [network.slug]: network.id }), {});
const parentNetworksWithGrandParent = parentNetworks.map(network => ({ const parentNetworksWithGrandParent = parentNetworks.map((network) => ({
slug: network.slug, slug: network.slug,
name: network.name, name: network.name,
type: network.type || 'network', type: network.type || 'network',
@ -643,7 +650,7 @@ exports.seed = knex => Promise.resolve()
const parentNetworkEntries = await upsert('entities', parentNetworksWithGrandParent, ['slug', 'type'], knex); const parentNetworkEntries = await upsert('entities', parentNetworksWithGrandParent, ['slug', 'type'], knex);
const parentNetworksBySlug = [].concat(parentNetworkEntries.inserted, parentNetworkEntries.updated).reduce((acc, network) => ({ ...acc, [network.slug]: network.id }), {}); const parentNetworksBySlug = [].concat(parentNetworkEntries.inserted, parentNetworkEntries.updated).reduce((acc, network) => ({ ...acc, [network.slug]: network.id }), {});
const networksWithParent = networks.map(network => ({ const networksWithParent = networks.map((network) => ({
slug: network.slug, slug: network.slug,
name: network.name, name: network.name,
type: network.type || 'network', type: network.type || 'network',
@ -665,14 +672,14 @@ exports.seed = knex => Promise.resolve()
networkEntries.updated, networkEntries.updated,
).reduce((acc, network) => ({ ...acc, [network.slug]: network.id }), {}); ).reduce((acc, network) => ({ ...acc, [network.slug]: network.id }), {});
const tagSlugs = networks.map(network => network.tags).flat().filter(Boolean); const tagSlugs = networks.map((network) => network.tags).flat().filter(Boolean);
const tagEntries = await knex('tags').whereIn('slug', tagSlugs); const tagEntries = await knex('tags').whereIn('slug', tagSlugs);
const tagIdsBySlug = tagEntries.reduce((acc, tag) => ({ ...acc, [tag.slug]: tag.id }), {}); const tagIdsBySlug = tagEntries.reduce((acc, tag) => ({ ...acc, [tag.slug]: tag.id }), {});
const tagAssociations = networks const tagAssociations = networks
.map(network => (network.tags .map((network) => (network.tags
? network.tags.map(tagSlug => ({ ? network.tags.map((tagSlug) => ({
entity_id: networkIdsBySlug[network.slug], entity_id: networkIdsBySlug[network.slug],
tag_id: tagIdsBySlug[tagSlug], tag_id: tagIdsBySlug[tagSlug],
inherit: true, inherit: true,

View File

@ -7235,6 +7235,14 @@ const sites = [
parent: 'puretaboo', parent: 'puretaboo',
}, },
*/ */
// RADICAL ENTERTAINMENT
{
name: 'PurgatoryX',
slug: 'purgatoryx',
url: 'https://tour.purgatoryx.com',
independent: true,
parent: 'radical',
},
// REALITY KINGS // REALITY KINGS
{ {
name: 'Look At Her Now', name: 'Look At Her Now',
@ -10706,7 +10714,7 @@ const sites = [
]; ];
/* eslint-disable max-len */ /* eslint-disable max-len */
exports.seed = knex => Promise.resolve() exports.seed = (knex) => Promise.resolve()
.then(async () => { .then(async () => {
const networks = await knex('entities') const networks = await knex('entities')
.where('type', 'network') .where('type', 'network')
@ -10717,7 +10725,7 @@ exports.seed = knex => Promise.resolve()
const tags = await knex('tags').select('*').whereNull('alias_for'); const tags = await knex('tags').select('*').whereNull('alias_for');
const tagsMap = tags.reduce((acc, { id, slug }) => ({ ...acc, [slug]: id }), {}); const tagsMap = tags.reduce((acc, { id, slug }) => ({ ...acc, [slug]: id }), {});
const sitesWithNetworks = sites.map(site => ({ const sitesWithNetworks = sites.map((site) => ({
slug: site.slug, slug: site.slug,
name: site.name, name: site.name,
type: site.type || 'channel', type: site.type || 'channel',
@ -10735,8 +10743,8 @@ exports.seed = knex => Promise.resolve()
const { inserted, updated } = await upsert('entities', sitesWithNetworks, ['slug', 'type'], knex); const { inserted, updated } = await upsert('entities', sitesWithNetworks, ['slug', 'type'], knex);
const sitesMap = [].concat(inserted, updated).reduce((acc, { id, slug }) => ({ ...acc, [slug]: id }), {}); const sitesMap = [].concat(inserted, updated).reduce((acc, { id, slug }) => ({ ...acc, [slug]: id }), {});
const tagAssociations = sites.map(site => (site.tags const tagAssociations = sites.map((site) => (site.tags
? site.tags.map(tagSlug => ({ ? site.tags.map((tagSlug) => ({
entity_id: sitesMap[site.slug], entity_id: sitesMap[site.slug],
tag_id: tagsMap[tagSlug], tag_id: tagsMap[tagSlug],
inherit: true, inherit: true,

View File

@ -178,7 +178,7 @@ async function scrapeReleases(baseReleases, entitiesBySlug, type) {
return [slug, { ...entity, preData }]; return [slug, { ...entity, preData }];
} }
return null; return [slug, entity];
})); }));
const entitiesWithBeforeDataBySlug = Object.fromEntries(entitiesWithBeforeDataEntries.filter(Boolean)); const entitiesWithBeforeDataBySlug = Object.fromEntries(entitiesWithBeforeDataEntries.filter(Boolean));

View File

@ -179,6 +179,8 @@ async function getSession(site, parameters) {
const cookieString = await cookieJar.getCookieStringAsync(sessionUrl); const cookieString = await cookieJar.getCookieStringAsync(sessionUrl);
const { instance_token: instanceToken } = cookie.parse(cookieString); const { instance_token: instanceToken } = cookie.parse(cookieString);
console.log(site.name, instanceToken);
return { session, instanceToken }; return { session, instanceToken };
} }
@ -240,6 +242,8 @@ async function fetchLatest(site, page = 1, options) {
const { session, instanceToken } = options.beforeNetwork?.headers?.Instance ? options.beforeNetwork : await getSession(site, options.parameters); const { session, instanceToken } = options.beforeNetwork?.headers?.Instance ? options.beforeNetwork : await getSession(site, options.parameters);
console.log('fetch', instanceToken);
const beforeDate = moment().add('1', 'day').format('YYYY-MM-DD'); const beforeDate = moment().add('1', 'day').format('YYYY-MM-DD');
const limit = 24; const limit = 24;
const apiUrl = site.parameters?.native || site.parameters?.extract const apiUrl = site.parameters?.native || site.parameters?.extract

152
src/scrapers/purgatoryx.js Normal file
View File

@ -0,0 +1,152 @@
'use strict';
const qu = require('../utils/qu');
const http = require('../utils/http');
const slugify = require('../utils/slugify');
const { feetInchesToCm, lbsToKg } = require('../utils/convert');
function scrapeAll(scenes) {
return scenes.map(({ query }) => {
const release = {};
release.title = query.cnt('.title');
release.url = query.url('.title a');
release.entryId = new URL(release.url).pathname.match(/\/view\/(\d+)/)[1];
release.date = query.date('.pub-date', 'MMM DD, YYYY');
release.duration = query.duration('.video-duration');
release.actors = query.all('.models a').map((el) => ({
name: query.cnt(el),
url: query.url(el, null),
}));
if (query.exists('.thumb-big')) { // updates page
release.poster = query.img('.thumb-big', 'data-image') || JSON.parse(query.el('.thumbnails-wrap a', 'data-images'));
release.photos = [query.img('.thumb-top', 'data-image'), query.img('.thumb-bottom', 'data-image')];
}
if (query.exists('.thumbnails-wrap')) { // actor page
try {
const images = JSON.parse(query.el('.thumbnails-wrap a', 'data-images'));
release.poster = images.slice(0, 1)[0];
release.photos = images.slice(1);
} catch (error) {
// images probably not available
}
}
console.log(release.photos);
return release;
});
}
function scrapeScene({ query }, url) {
const release = {};
release.title = query.cnt('.title');
release.entryId = new URL(url).pathname.match(/\/view\/(\d+)/)[1];
release.date = query.date('.date', 'MMMM DD, YYYY', /\w+ \d{1,2}, \d{4}/);
release.description = query.cnt('.description p');
release.duration = query.duration('.total-time');
release.actors = query.all('.model-wrap li').map((el) => ({
name: query.cnt(el, 'h5'),
url: query.url(el, 'a'),
avatar: query.img(el),
}));
release.poster = query.poster();
release.photos = query.urls('.photos-slider a');
release.trailer = query.video();
release.comment = query.cnt('.series');
console.log(release);
return release;
}
async function fetchLatest(channel, page) {
const res = await qu.getAll(`${channel.url}/episodes?page=${page}`, '.content-item');
if (res.ok) {
return scrapeAll(res.items, channel);
}
return res.status;
}
function scrapeProfile({ query }, url) {
const profile = { url };
const bio = Object.fromEntries(query.all('.model-desc li').map((el) => [slugify(query.cnt(el, 'span'), '_'), query.text(el)]));
profile.description = bio.bio;
profile.dateOfBirth = qu.extractDate(bio.birthdate, 'YYYY-MM-DD');
profile.birthPlace = bio.birthplace;
profile.hairColor = bio.hair_color;
profile.eyes = bio.eye_color;
profile.height = feetInchesToCm(bio.height);
profile.weight = lbsToKg(bio.weight);
profile.measurements = bio.measurements;
profile.avatar = query.img('.model-pic img');
profile.scenes = scrapeAll(qu.initAll(query.all('.content-item')));
console.log(bio);
console.log(profile);
return profile;
}
async function searchActor(baseActor, channel) {
const searchRes = await http.post(`${channel.url}/search-preview`, { q: slugify(baseActor.name, ' ') }, {
encodeJSON: false,
headers: {
'Accept-Language': 'en-US,en;',
},
});
if (searchRes.ok) {
const actorUrl = searchRes.body.find((item) => item.type === 'model' && slugify(item.title) === baseActor.slug)?.url;
return actorUrl || null;
}
return null;
}
async function fetchProfile(baseActor, context, include, retry = false) {
const actorUrl = (!retry && baseActor.url) || await searchActor(baseActor, context.entity);
if (!actorUrl) {
return null;
}
const res = await qu.get(actorUrl);
if (res.ok) {
return scrapeProfile(res.item, actorUrl);
}
if (baseActor.url) {
return fetchProfile(baseActor, context, include, true);
}
return res.status;
}
module.exports = {
fetchLatest,
fetchProfile,
scrapeAll,
scrapeScene,
};

View File

@ -51,6 +51,7 @@ const pascalssubsluts = require('./pascalssubsluts'); // reserved keyword
const pierrewoodman = require('./pierrewoodman'); const pierrewoodman = require('./pierrewoodman');
const pinkyxxx = require('./pinkyxxx'); const pinkyxxx = require('./pinkyxxx');
const privateNetwork = require('./private'); // reserved keyword const privateNetwork = require('./private'); // reserved keyword
const purgatoryx = require('./purgatoryx'); // reserved keyword
const score = require('./score'); const score = require('./score');
const spizoo = require('./spizoo'); const spizoo = require('./spizoo');
const teamskeet = require('./teamskeet'); const teamskeet = require('./teamskeet');
@ -136,6 +137,7 @@ const scrapers = {
porncz, porncz,
pornpros: whalemember, pornpros: whalemember,
private: privateNetwork, private: privateNetwork,
purgatoryx,
score, score,
sexyhub: mindgeek, sexyhub: mindgeek,
spizoo, spizoo,
@ -255,6 +257,7 @@ const scrapers = {
povperverts: fullpornnetwork, povperverts: fullpornnetwork,
povpornstars: hush, povpornstars: hush,
private: privateNetwork, private: privateNetwork,
purgatoryx,
realitykings: mindgeek, realitykings: mindgeek,
realvr: badoink, realvr: badoink,
roccosiffredi: famedigital, roccosiffredi: famedigital,

View File

@ -39,9 +39,11 @@ function filterLocalUniqueReleases(releases, accReleases) {
async function filterUniqueReleases(releases) { async function filterUniqueReleases(releases) {
const releaseIdentifiers = releases const releaseIdentifiers = releases
.map((release) => [release.entity.id, release.entryId]); .map((release) => [release.entity.id, release.entryId.toString()]);
const duplicateReleaseEntries = await knex('releases') console.log(releaseIdentifiers.length);
const duplicateReleaseEntriesQuery = knex('releases')
.select(knex.raw('releases.*, row_to_json(entities) as entity')) .select(knex.raw('releases.*, row_to_json(entities) as entity'))
.leftJoin('entities', 'entities.id', 'releases.entity_id') .leftJoin('entities', 'entities.id', 'releases.entity_id')
.whereIn(['entity_id', 'entry_id'], releaseIdentifiers) .whereIn(['entity_id', 'entry_id'], releaseIdentifiers)
@ -55,6 +57,10 @@ async function filterUniqueReleases(releases) {
.orWhere(knex.raw('updated_at - date > INTERVAL \'1 day\'')); // scene was updated after the release date, no updates expected .orWhere(knex.raw('updated_at - date > INTERVAL \'1 day\'')); // scene was updated after the release date, no updates expected
}); });
console.log(duplicateReleaseEntriesQuery.toString());
const duplicateReleaseEntries = await duplicateReleaseEntriesQuery;
const duplicateReleases = duplicateReleaseEntries.map((release) => curateRelease(release)); const duplicateReleases = duplicateReleaseEntries.map((release) => curateRelease(release));
const duplicateReleasesByEntityIdAndEntryId = duplicateReleases.reduce(mapReleasesToEntityIdAndEntryId, {}); const duplicateReleasesByEntityIdAndEntryId = duplicateReleases.reduce(mapReleasesToEntityIdAndEntryId, {});
@ -262,7 +268,7 @@ async function scrapeNetworkSequential(networkEntity) {
} }
async function scrapeNetworkParallel(networkEntity) { async function scrapeNetworkParallel(networkEntity) {
const beforeNetwork = await networkEntity.scraper.beforeNetwork?.(networkEntity); const beforeNetwork = await networkEntity.scraper?.beforeNetwork?.(networkEntity);
return Promise.map( return Promise.map(
networkEntity.includedChildren, networkEntity.includedChildren,