Added PurgatoryX scraper.
This commit is contained in:
@@ -178,7 +178,7 @@ async function scrapeReleases(baseReleases, entitiesBySlug, type) {
|
||||
return [slug, { ...entity, preData }];
|
||||
}
|
||||
|
||||
return null;
|
||||
return [slug, entity];
|
||||
}));
|
||||
|
||||
const entitiesWithBeforeDataBySlug = Object.fromEntries(entitiesWithBeforeDataEntries.filter(Boolean));
|
||||
|
||||
@@ -179,6 +179,8 @@ async function getSession(site, parameters) {
|
||||
const cookieString = await cookieJar.getCookieStringAsync(sessionUrl);
|
||||
const { instance_token: instanceToken } = cookie.parse(cookieString);
|
||||
|
||||
console.log(site.name, instanceToken);
|
||||
|
||||
return { session, instanceToken };
|
||||
}
|
||||
|
||||
@@ -240,6 +242,8 @@ async function fetchLatest(site, page = 1, options) {
|
||||
|
||||
const { session, instanceToken } = options.beforeNetwork?.headers?.Instance ? options.beforeNetwork : await getSession(site, options.parameters);
|
||||
|
||||
console.log('fetch', instanceToken);
|
||||
|
||||
const beforeDate = moment().add('1', 'day').format('YYYY-MM-DD');
|
||||
const limit = 24;
|
||||
const apiUrl = site.parameters?.native || site.parameters?.extract
|
||||
|
||||
152
src/scrapers/purgatoryx.js
Normal file
152
src/scrapers/purgatoryx.js
Normal file
@@ -0,0 +1,152 @@
|
||||
'use strict';
|
||||
|
||||
const qu = require('../utils/qu');
|
||||
const http = require('../utils/http');
|
||||
const slugify = require('../utils/slugify');
|
||||
const { feetInchesToCm, lbsToKg } = require('../utils/convert');
|
||||
|
||||
function scrapeAll(scenes) {
|
||||
return scenes.map(({ query }) => {
|
||||
const release = {};
|
||||
|
||||
release.title = query.cnt('.title');
|
||||
release.url = query.url('.title a');
|
||||
release.entryId = new URL(release.url).pathname.match(/\/view\/(\d+)/)[1];
|
||||
|
||||
release.date = query.date('.pub-date', 'MMM DD, YYYY');
|
||||
release.duration = query.duration('.video-duration');
|
||||
|
||||
release.actors = query.all('.models a').map((el) => ({
|
||||
name: query.cnt(el),
|
||||
url: query.url(el, null),
|
||||
}));
|
||||
|
||||
if (query.exists('.thumb-big')) { // updates page
|
||||
release.poster = query.img('.thumb-big', 'data-image') || JSON.parse(query.el('.thumbnails-wrap a', 'data-images'));
|
||||
release.photos = [query.img('.thumb-top', 'data-image'), query.img('.thumb-bottom', 'data-image')];
|
||||
}
|
||||
|
||||
if (query.exists('.thumbnails-wrap')) { // actor page
|
||||
try {
|
||||
const images = JSON.parse(query.el('.thumbnails-wrap a', 'data-images'));
|
||||
|
||||
release.poster = images.slice(0, 1)[0];
|
||||
release.photos = images.slice(1);
|
||||
} catch (error) {
|
||||
// images probably not available
|
||||
}
|
||||
}
|
||||
|
||||
console.log(release.photos);
|
||||
|
||||
return release;
|
||||
});
|
||||
}
|
||||
|
||||
function scrapeScene({ query }, url) {
|
||||
const release = {};
|
||||
|
||||
release.title = query.cnt('.title');
|
||||
release.entryId = new URL(url).pathname.match(/\/view\/(\d+)/)[1];
|
||||
release.date = query.date('.date', 'MMMM DD, YYYY', /\w+ \d{1,2}, \d{4}/);
|
||||
|
||||
release.description = query.cnt('.description p');
|
||||
release.duration = query.duration('.total-time');
|
||||
|
||||
release.actors = query.all('.model-wrap li').map((el) => ({
|
||||
name: query.cnt(el, 'h5'),
|
||||
url: query.url(el, 'a'),
|
||||
avatar: query.img(el),
|
||||
}));
|
||||
|
||||
release.poster = query.poster();
|
||||
release.photos = query.urls('.photos-slider a');
|
||||
release.trailer = query.video();
|
||||
|
||||
release.comment = query.cnt('.series');
|
||||
|
||||
console.log(release);
|
||||
|
||||
return release;
|
||||
}
|
||||
|
||||
async function fetchLatest(channel, page) {
|
||||
const res = await qu.getAll(`${channel.url}/episodes?page=${page}`, '.content-item');
|
||||
|
||||
if (res.ok) {
|
||||
return scrapeAll(res.items, channel);
|
||||
}
|
||||
|
||||
return res.status;
|
||||
}
|
||||
|
||||
function scrapeProfile({ query }, url) {
|
||||
const profile = { url };
|
||||
|
||||
const bio = Object.fromEntries(query.all('.model-desc li').map((el) => [slugify(query.cnt(el, 'span'), '_'), query.text(el)]));
|
||||
|
||||
profile.description = bio.bio;
|
||||
|
||||
profile.dateOfBirth = qu.extractDate(bio.birthdate, 'YYYY-MM-DD');
|
||||
profile.birthPlace = bio.birthplace;
|
||||
|
||||
profile.hairColor = bio.hair_color;
|
||||
profile.eyes = bio.eye_color;
|
||||
|
||||
profile.height = feetInchesToCm(bio.height);
|
||||
profile.weight = lbsToKg(bio.weight);
|
||||
profile.measurements = bio.measurements;
|
||||
|
||||
profile.avatar = query.img('.model-pic img');
|
||||
|
||||
profile.scenes = scrapeAll(qu.initAll(query.all('.content-item')));
|
||||
|
||||
console.log(bio);
|
||||
console.log(profile);
|
||||
|
||||
return profile;
|
||||
}
|
||||
|
||||
async function searchActor(baseActor, channel) {
|
||||
const searchRes = await http.post(`${channel.url}/search-preview`, { q: slugify(baseActor.name, ' ') }, {
|
||||
encodeJSON: false,
|
||||
headers: {
|
||||
'Accept-Language': 'en-US,en;',
|
||||
},
|
||||
});
|
||||
|
||||
if (searchRes.ok) {
|
||||
const actorUrl = searchRes.body.find((item) => item.type === 'model' && slugify(item.title) === baseActor.slug)?.url;
|
||||
|
||||
return actorUrl || null;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
async function fetchProfile(baseActor, context, include, retry = false) {
|
||||
const actorUrl = (!retry && baseActor.url) || await searchActor(baseActor, context.entity);
|
||||
|
||||
if (!actorUrl) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const res = await qu.get(actorUrl);
|
||||
|
||||
if (res.ok) {
|
||||
return scrapeProfile(res.item, actorUrl);
|
||||
}
|
||||
|
||||
if (baseActor.url) {
|
||||
return fetchProfile(baseActor, context, include, true);
|
||||
}
|
||||
|
||||
return res.status;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
fetchLatest,
|
||||
fetchProfile,
|
||||
scrapeAll,
|
||||
scrapeScene,
|
||||
};
|
||||
@@ -51,6 +51,7 @@ const pascalssubsluts = require('./pascalssubsluts'); // reserved keyword
|
||||
const pierrewoodman = require('./pierrewoodman');
|
||||
const pinkyxxx = require('./pinkyxxx');
|
||||
const privateNetwork = require('./private'); // reserved keyword
|
||||
const purgatoryx = require('./purgatoryx'); // reserved keyword
|
||||
const score = require('./score');
|
||||
const spizoo = require('./spizoo');
|
||||
const teamskeet = require('./teamskeet');
|
||||
@@ -136,6 +137,7 @@ const scrapers = {
|
||||
porncz,
|
||||
pornpros: whalemember,
|
||||
private: privateNetwork,
|
||||
purgatoryx,
|
||||
score,
|
||||
sexyhub: mindgeek,
|
||||
spizoo,
|
||||
@@ -255,6 +257,7 @@ const scrapers = {
|
||||
povperverts: fullpornnetwork,
|
||||
povpornstars: hush,
|
||||
private: privateNetwork,
|
||||
purgatoryx,
|
||||
realitykings: mindgeek,
|
||||
realvr: badoink,
|
||||
roccosiffredi: famedigital,
|
||||
|
||||
@@ -39,9 +39,11 @@ function filterLocalUniqueReleases(releases, accReleases) {
|
||||
|
||||
async function filterUniqueReleases(releases) {
|
||||
const releaseIdentifiers = releases
|
||||
.map((release) => [release.entity.id, release.entryId]);
|
||||
.map((release) => [release.entity.id, release.entryId.toString()]);
|
||||
|
||||
const duplicateReleaseEntries = await knex('releases')
|
||||
console.log(releaseIdentifiers.length);
|
||||
|
||||
const duplicateReleaseEntriesQuery = knex('releases')
|
||||
.select(knex.raw('releases.*, row_to_json(entities) as entity'))
|
||||
.leftJoin('entities', 'entities.id', 'releases.entity_id')
|
||||
.whereIn(['entity_id', 'entry_id'], releaseIdentifiers)
|
||||
@@ -55,6 +57,10 @@ async function filterUniqueReleases(releases) {
|
||||
.orWhere(knex.raw('updated_at - date > INTERVAL \'1 day\'')); // scene was updated after the release date, no updates expected
|
||||
});
|
||||
|
||||
console.log(duplicateReleaseEntriesQuery.toString());
|
||||
|
||||
const duplicateReleaseEntries = await duplicateReleaseEntriesQuery;
|
||||
|
||||
const duplicateReleases = duplicateReleaseEntries.map((release) => curateRelease(release));
|
||||
const duplicateReleasesByEntityIdAndEntryId = duplicateReleases.reduce(mapReleasesToEntityIdAndEntryId, {});
|
||||
|
||||
@@ -262,7 +268,7 @@ async function scrapeNetworkSequential(networkEntity) {
|
||||
}
|
||||
|
||||
async function scrapeNetworkParallel(networkEntity) {
|
||||
const beforeNetwork = await networkEntity.scraper.beforeNetwork?.(networkEntity);
|
||||
const beforeNetwork = await networkEntity.scraper?.beforeNetwork?.(networkEntity);
|
||||
|
||||
return Promise.map(
|
||||
networkEntity.includedChildren,
|
||||
|
||||
Reference in New Issue
Block a user