Split Girlsway from Adult Time. Added Fantasy Massage. Using Gamma scraper for Pure Taboo. Added photo path parameter to Gamma scraper.

This commit is contained in:
2020-02-08 02:49:39 +01:00
parent bfb26b717a
commit 1546e0836c
57 changed files with 2359 additions and 205 deletions

View File

@@ -22,8 +22,6 @@ async function networkFetchScene(url, site) {
async function fetchLatest(site, page = 1) {
const releases = await fetchApiLatest(site, page, false);
console.log(releases);
return releases.map(release => curateRelease(release, site));
}

View File

@@ -0,0 +1,10 @@
'use strict';
const { fetchLatest, fetchUpcoming, fetchScene } = require('./gamma');
module.exports = {
fetchLatest,
fetchScene,
fetchUpcoming,
};

View File

@@ -7,7 +7,7 @@ const cheerio = require('cheerio');
const moment = require('moment');
const argv = require('../argv');
const logger = require('../logger');
const logger = require('../logger')(__filename);
const { ex, get } = require('../utils/q');
const slugify = require('../utils/slugify');
@@ -20,11 +20,12 @@ async function fetchPhotos(url) {
function scrapePhotos(html) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
return $('.preview .imgLink').toArray().map((linkEl) => {
return $('.preview .imgLink, .pgFooterThumb a').toArray().map((linkEl) => {
const url = $(linkEl).attr('href');
if (url.match('/join')) {
if (/\/join|\/createaccount/.test(url)) {
// URL links to join page instead of full photo, extract thumbnail
// /createaccount is used by e.g. Tricky Spa
const src = $(linkEl).find('img').attr('src');
if (src.match('previews/')) {
@@ -46,12 +47,14 @@ function scrapePhotos(html) {
}
async function getPhotos(albumPath, site) {
const albumUrl = `${site.url}${albumPath}`;
const albumUrl = site.parameters?.photos
? `${site.url}${site.parameters.photos}${albumPath.split('/').slice(-2).join('/')}`
: `${site.url}${albumPath}`;
try {
const html = await fetchPhotos(albumUrl);
const $ = cheerio.load(html, { normalizeWhitespace: true });
const photos = scrapePhotos(html);
const photos = scrapePhotos(html, site);
const lastPage = $('.Gamma_Paginator a.last').attr('href')?.match(/\d+$/)[0];
@@ -62,7 +65,7 @@ async function getPhotos(albumPath, site) {
const pageUrl = `${site.url}/${albumPath}/${page}`;
const pageHtml = await fetchPhotos(pageUrl);
return scrapePhotos(pageHtml);
return scrapePhotos(pageHtml, site);
}, {
concurrency: 2,
});
@@ -97,9 +100,9 @@ async function scrapeApiReleases(json, site) {
release.actors = scene.actors.map(({ name }) => name);
release.director = scene.directors[0].name;
console.log(release.url);
release.tags = scene.master_categories.concat(scene.categories?.map(category => category.name));
release.tags = scene.master_categories
.concat(scene.categories?.map(category => category.name))
.filter(Boolean); // some categories don't have a name
const posterPath = scene.pictures.resized || (scene.pictures.nsfw?.top && Object.values(scene.pictures.nsfw.top)[0]);
@@ -118,38 +121,35 @@ async function scrapeApiReleases(json, site) {
function scrapeAll(html, site, networkUrl, hasTeaser = true) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
const scenesElements = $('li[data-itemtype=scene]').toArray();
const scenesElements = $('li[data-itemtype=scene], div[data-itemtype=scenes]').toArray();
return scenesElements.map((element) => {
const release = {};
const sceneLinkElement = $(element).find('.sceneTitle a');
const sceneLinkElement = $(element).find('.sceneTitle a, .tlcTitle a');
if (site) release.url = `${networkUrl ? site.network.url : site.url}${sceneLinkElement.attr('href')}`;
else release.url = `${networkUrl}${sceneLinkElement.attr('href')}`;
release.title = sceneLinkElement.attr('title');
release.entryId = $(element).attr('data-itemid');
const dateEl = $(element).find('.sceneDate').text() || null;
const dateEl = $(element).find('.sceneDate, .tlcSpecsDate .tlcDetailsValue').text() || null;
if (dateEl) {
release.date = moment
.utc($(element).find('.sceneDate').text(), ['MM-DD-YYYY', 'YYYY-MM-DD'])
.utc(dateEl, ['MM-DD-YYYY', 'YYYY-MM-DD'])
.toDate();
}
release.actors = $(element).find('.sceneActors a')
release.actors = $(element).find('.sceneActors a, .tlcActors a')
.map((actorIndex, actorElement) => $(actorElement).attr('title'))
.toArray();
const [likes, dislikes] = $(element).find('.value')
[release.likes, release.dislikes] = $(element).find('.value')
.toArray()
.map(value => Number($(value).text()));
release.rating = { likes, dislikes };
const posterEl = $(element).find('.imgLink img');
const posterEl = $(element).find('.imgLink img, .tlcImageItem');
if (posterEl) release.poster = posterEl.attr('data-original') || posterEl.attr('src');
if (hasTeaser) {
@@ -189,18 +189,18 @@ async function scrapeScene(html, url, site) {
release.director = data.director?.[0].name || data2?.director?.[0].name;
const actors = data?.actor || data2?.actor || [];
const hasTrans = actors.some(actor => actor.gender === 'shemale');
release.actors = actors.map(actor => actor.name);
const stars = (data.aggregateRating.ratingValue / data.aggregateRating.bestRating) * 5;
if (stars) release.rating = { stars };
release.duration = moment.duration(data.duration.slice(2)).asSeconds();
const rawTags = data.keywords?.split(', ');
release.tags = hasTrans ? [...rawTags, 'transsexual'] : rawTags;
}
const hasTrans = release.actors.some(actor => actor.gender === 'shemale');
const rawTags = data?.keywords?.split(', ') || data2?.keywords?.split(', ');
release.tags = hasTrans ? [...rawTags, 'transsexual'] : rawTags;
const channel = data?.productionCompany?.name || $('.studioLink a').attr('title')?.trim();
if (channel) release.channel = slugify(channel, { delimiter: '' });
@@ -345,7 +345,7 @@ function getApiUrl(appId, apiKey) {
}
async function fetchApiCredentials(referer, site) {
if (site.parameters?.appId && site.parameters?.apiKey) {
if (site?.parameters?.appId && site?.parameters?.apiKey) {
return getApiUrl(site.parameters.appId, site.parameters.apiKey);
}
@@ -374,7 +374,7 @@ async function fetchApiLatest(site, page = 1, upcoming = false) {
requests: [
{
indexName: 'all_scenes',
params: `query=&hitsPerPage=36&maxValuesPerFacet=100&page=${page - 1}&facetFilters=[["lesbian:"],["bisex:"],["shemale:"],["upcoming:${upcoming ? 1 : 0}"]]&filters=sitename:${site.slug}`,
params: `query=&hitsPerPage=36&maxValuesPerFacet=100&page=${page - 1}&facetFilters=[["lesbian:"],["bisex:"],["shemale:"],["upcoming:${upcoming ? 1 : 0}"]]&filters=sitename:${site.slug} OR channels.id:${site.slug}`,
},
],
}, {
@@ -384,6 +384,8 @@ async function fetchApiLatest(site, page = 1, upcoming = false) {
encodeJSON: true,
});
console.log(res.body);
if (res.statusCode === 200 && res.body.results?.[0]?.hits) {
return scrapeApiReleases(res.body.results[0].hits, site);
}

10
src/scrapers/girlsway.js Normal file
View File

@@ -0,0 +1,10 @@
'use strict';
const { fetchApiLatest, fetchApiUpcoming, fetchScene } = require('./gamma');
module.exports = {
fetchLatest: fetchApiLatest,
fetchScene,
fetchUpcoming: fetchApiUpcoming,
};

View File

@@ -4,7 +4,7 @@
const Promise = require('bluebird');
const bhttp = require('bhttp');
const logger = require('../logger');
const logger = require('../logger')(__filename);
const slugify = require('../utils/slugify');
async function fetchToken(site) {

View File

@@ -1,103 +1,10 @@
'use strict';
const bhttp = require('bhttp');
const moment = require('moment');
const { fetchApiLatest, fetchApiUpcoming, fetchScene } = require('./gamma');
// const { fetchLatest } = require('./gamma');
const { ex, ctxa } = require('../utils/q');
const { getPhotos } = require('./gamma');
function scrape(html) {
const { document } = ex(html);
return ctxa(document, '.sceneList .sceneContainer').map(({ q, qa }) => {
const release = {};
const linkEl = q('a.imgLink');
release.entryId = linkEl.dataset.id;
release.title = linkEl.title;
release.url = `https://www.puretaboo.com${linkEl.href}`;
release.actors = qa('.sceneActors a', true);
release.poster = q('.imgLink img').dataset.original;
release.trailer = `https://videothumb.gammacdn.com/307x224/${release.entryId}.mp4`;
return release;
});
}
async function scrapeScene(html, url, site) {
const release = {};
const { q, qa, qd, qm } = ex(html);
const { pathname } = new URL(url);
const dataPrefix = 'dataLayer = [';
const dataStart = html.indexOf(dataPrefix);
const dataString = html.slice(dataStart + dataPrefix.length, html.indexOf('];', dataStart));
const { sceneDetails: data } = JSON.parse(dataString);
const dataString2 = q('script[type="application/ld+json"]', true);
const data2 = JSON.parse(dataString2)[1];
const videoPrefix = 'window.ScenePlayerOptions = ';
const videoStart = html.indexOf(videoPrefix);
const videoString = html.slice(videoStart + videoPrefix.length, html.indexOf('};', videoStart) + 1);
const videoData = JSON.parse(videoString);
release.entryId = data.sceneId || videoData.playerOptions.sceneInfos.sceneId || pathname.slice(pathname.lastIndexOf('/') + 1);
release.url = url;
release.title = data.sceneTitle || videoData.playerOptions.sceneInfos.sceneTitle || qm('meta[name="twitter:title"]') || q('.sceneTitle', true);
release.description = data.sceneDescription || data2.description || q('.sceneDesc', true).replace('Video Description:', '');
release.duration = moment.duration(data2.duration.slice(2)).asSeconds();
// data2.dateCreated is the date the scene was first added to 'upcoming'
release.date = videoData.playerOptions.sceneInfos.sceneReleaseDate
? moment.utc(videoData.playerOptions.sceneInfos.sceneReleaseDate, 'YYYY-MM-DD').toDate()
: qd('.updatedDate', 'MM-DD-YYYY');
release.actors = data.sceneActors?.map(actor => actor.actorName)
|| data2.actor?.map(actor => actor.name)
|| qa('.sceneColActors a', true);
release.tags = data2.keywords.split(', ') || qa('.sceneColCategories a', 'title');
release.poster = videoData.picPreview;
release.trailer = {
src: `${videoData.playerOptions.host}${videoData.url}`,
};
release.photos = await getPhotos(q('.picturesItem a').href, site);
return release;
}
async function fetchLatest(site, page = 1) {
const res = await bhttp.get(`${site.url}/en/videos/AllCategories/0/${page}`);
if (res.statusCode === 200) {
return scrape(res.body.toString(), site);
}
return null;
}
async function fetchUpcoming(site) {
const res = await bhttp.get(`${site.url}/en/videos/AllCategories/0/1/upcoming`);
return scrape(res.body.toString(), site);
}
async function fetchScene(url, site) {
const res = await bhttp.get(url);
return scrapeScene(res.body.toString(), url, site);
}
module.exports = {
fetchLatest,
fetchLatest: fetchApiLatest,
fetchScene,
fetchUpcoming,
fetchUpcoming: fetchApiUpcoming,
};

View File

@@ -1,54 +1,51 @@
'use strict';
// releases
const adulttime = require('./adulttime');
const babes = require('./babes');
const bang = require('./bang');
const dogfart = require('./dogfart');
const digitalplayground = require('./digitalplayground');
const fakehub = require('./fakehub');
const jayrock = require('./jayrock');
const kink = require('./kink');
const mikeadriano = require('./mikeadriano');
const milehighmedia = require('./milehighmedia');
const perfectgonzo = require('./perfectgonzo');
const pervcity = require('./pervcity');
const pornpros = require('./pornpros');
const privateNetwork = require('./private'); // reserved keyword
const puretaboo = require('./puretaboo');
const mindgeek = require('./mindgeek');
const realitykings = require('./realitykings');
const teamskeet = require('./teamskeet');
const vixen = require('./vixen');
const vogov = require('./vogov');
// releases and profiles
const bangbros = require('./bangbros');
const blowpass = require('./blowpass');
const boobpedia = require('./boobpedia');
const brazzers = require('./brazzers');
const ddfnetwork = require('./ddfnetwork');
const famedigital = require('./famedigital');
const digitalplayground = require('./digitalplayground');
const dogfart = require('./dogfart');
const evilangel = require('./evilangel');
const julesjordan = require('./julesjordan');
const kellymadison = require('./kellymadison');
const legalporno = require('./legalporno');
const men = require('./men');
const metrohd = require('./metrohd');
const mofos = require('./mofos');
const naughtyamerica = require('./naughtyamerica');
const score = require('./score');
const twentyonesextury = require('./21sextury');
const twistys = require('./twistys');
const xempire = require('./xempire');
const wicked = require('./wicked');
// profiles
const boobpedia = require('./boobpedia');
const fakehub = require('./fakehub');
const famedigital = require('./famedigital');
const fantasymassage = require('./fantasymassage');
const freeones = require('./freeones');
const freeonesLegacy = require('./freeones_legacy');
const iconmale = require('./iconmale');
const transangels = require('./transangels');
const jayrock = require('./jayrock');
const julesjordan = require('./julesjordan');
const kellymadison = require('./kellymadison');
const kink = require('./kink');
const legalporno = require('./legalporno');
const men = require('./men');
const metrohd = require('./metrohd');
const mikeadriano = require('./mikeadriano');
const milehighmedia = require('./milehighmedia');
const mindgeek = require('./mindgeek');
const mofos = require('./mofos');
const girlsway = require('./girlsway');
const naughtyamerica = require('./naughtyamerica');
const perfectgonzo = require('./perfectgonzo');
const pervcity = require('./pervcity');
const pornhub = require('./pornhub');
const pornpros = require('./pornpros');
const privateNetwork = require('./private'); // reserved keyword
const puretaboo = require('./puretaboo');
const realitykings = require('./realitykings');
const score = require('./score');
const teamskeet = require('./teamskeet');
const transangels = require('./transangels');
const twentyonesextury = require('./21sextury');
const twistys = require('./twistys');
const vixen = require('./vixen');
const vogov = require('./vogov');
const wicked = require('./wicked');
const xempire = require('./xempire');
module.exports = {
releases: {
@@ -67,6 +64,8 @@ module.exports = {
famedigital,
evilangel,
fakehub,
fantasymassage,
girlsway,
jayrock,
julesjordan,
kellymadison,