Replaced cheerio with qu in Gamma scraper.
This commit is contained in:
parent
53a1c697d0
commit
87094a9498
|
@ -3,7 +3,6 @@
|
||||||
const Promise = require('bluebird');
|
const Promise = require('bluebird');
|
||||||
const util = require('util');
|
const util = require('util');
|
||||||
const { JSDOM } = require('jsdom');
|
const { JSDOM } = require('jsdom');
|
||||||
const cheerio = require('cheerio');
|
|
||||||
const moment = require('moment');
|
const moment = require('moment');
|
||||||
const format = require('template-format');
|
const format = require('template-format');
|
||||||
|
|
||||||
|
@ -62,21 +61,19 @@ function getAlbumUrl(albumPath, site) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function fetchPhotos(url) {
|
async function fetchPhotos(url) {
|
||||||
const res = await http.get(url);
|
const res = await qu.get(url);
|
||||||
|
|
||||||
return res.body.toString();
|
return res.item;
|
||||||
}
|
}
|
||||||
|
|
||||||
function scrapePhotos(html, includeThumbnails = true) {
|
function scrapePhotos({ query }, includeThumbnails = true) {
|
||||||
const $ = cheerio.load(html, { normalizeWhitespace: true });
|
return query.all('.preview .imgLink, .pgFooterThumb a').map((linkEl) => {
|
||||||
|
const url = linkEl.href;
|
||||||
return $('.preview .imgLink, .pgFooterThumb a').toArray().map((linkEl) => {
|
|
||||||
const url = $(linkEl).attr('href');
|
|
||||||
|
|
||||||
if (/\/join|\/createaccount/.test(url)) {
|
if (/\/join|\/createaccount/.test(url)) {
|
||||||
// URL links to join page instead of full photo, extract thumbnail
|
// URL links to join page instead of full photo, extract thumbnail
|
||||||
// /createaccount is used by e.g. Tricky Spa native site
|
// /createaccount is used by e.g. Tricky Spa native site
|
||||||
const src = $(linkEl).find('img').attr('src');
|
const src = query.img(linkEl);
|
||||||
|
|
||||||
if (/previews\//.test(src)) {
|
if (/previews\//.test(src)) {
|
||||||
// resource often serves full photo at a modifier URL anyway, add as primary source
|
// resource often serves full photo at a modifier URL anyway, add as primary source
|
||||||
|
@ -106,20 +103,18 @@ async function getPhotos(albumPath, site, includeThumbnails = true) {
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const html = await fetchPhotos(albumUrl);
|
const item = await fetchPhotos(albumUrl);
|
||||||
const $ = cheerio.load(html, { normalizeWhitespace: true });
|
const photos = scrapePhotos(item, includeThumbnails);
|
||||||
const photos = scrapePhotos(html, includeThumbnails);
|
|
||||||
|
|
||||||
const lastPage = $('.Gamma_Paginator a.last').attr('href')?.match(/\d+$/)[0];
|
const lastPage = item.query.url('.Gamma_Paginator a.last')?.match(/\d+$/)[0];
|
||||||
|
|
||||||
if (lastPage) {
|
if (lastPage) {
|
||||||
const otherPages = Array.from({ length: Number(lastPage) }, (_value, index) => index + 1).slice(1);
|
const otherPages = Array.from({ length: Number(lastPage) }, (_value, index) => index + 1).slice(1);
|
||||||
|
|
||||||
const otherPhotos = await Promise.map(otherPages, async (page) => {
|
const otherPhotos = await Promise.map(otherPages, async (page) => {
|
||||||
const pageUrl = `${albumUrl}/${page}`;
|
const pageItem = await fetchPhotos(`${albumUrl}/${page}`);
|
||||||
const pageHtml = await fetchPhotos(pageUrl);
|
|
||||||
|
|
||||||
return scrapePhotos(pageHtml, includeThumbnails);
|
return scrapePhotos(pageItem, includeThumbnails);
|
||||||
}, {
|
}, {
|
||||||
concurrency: 2,
|
concurrency: 2,
|
||||||
});
|
});
|
||||||
|
@ -244,41 +239,21 @@ async function scrapeApiReleases(json, site) {
|
||||||
}).filter(Boolean);
|
}).filter(Boolean);
|
||||||
}
|
}
|
||||||
|
|
||||||
function scrapeAll(html, site, networkUrl, hasTeaser = true) {
|
function scrapeAll(scenes, site, networkUrl, hasTeaser = true) {
|
||||||
const $ = cheerio.load(html, { normalizeWhitespace: true });
|
return scenes.map(({ query, el }) => {
|
||||||
const scenesElements = $('li[data-itemtype=scene], div[data-itemtype=scenes]').toArray();
|
|
||||||
|
|
||||||
return scenesElements.map((element) => {
|
|
||||||
const release = {};
|
const release = {};
|
||||||
|
|
||||||
const sceneLinkElement = $(element).find('.sceneTitle a, .tlcTitle a');
|
release.url = query.url('.sceneTitle a, .tlcTitle a', 'href', { origin: networkUrl ? site.parent.url : site.url });
|
||||||
|
|
||||||
if (site) release.url = `${networkUrl ? site.parent.url : site.url}${sceneLinkElement.attr('href')}`;
|
release.title = query.cnt('.sceneTitle a', 'tlcTitle a', 'title');
|
||||||
else release.url = `${networkUrl}${sceneLinkElement.attr('href')}`;
|
release.entryId = el.dataset.itemid;
|
||||||
|
|
||||||
release.title = sceneLinkElement.attr('title');
|
release.date = query.date('.sceneDate, .tlcSpecsDate .tlcDetailsValue', ['MM-DD-YYYY', 'YYYY-MM-DD']);
|
||||||
release.entryId = $(element).attr('data-itemid');
|
release.actors = query.cnts('.sceneActors a, .tlcActors a', ' title');
|
||||||
|
|
||||||
const dateEl = $(element).find('.sceneDate, .tlcSpecsDate .tlcDetailsValue').text() || null;
|
[release.likes, release.dislikes] = query.all('.value').map((likeEl) => query.number(likeEl));
|
||||||
if (dateEl) {
|
|
||||||
release.date = moment
|
|
||||||
.utc(dateEl, ['MM-DD-YYYY', 'YYYY-MM-DD'])
|
|
||||||
.toDate();
|
|
||||||
}
|
|
||||||
|
|
||||||
release.actors = $(element).find('.sceneActors a, .tlcActors a')
|
release.poster = query.img('.imgLink img, .tlcImageItem', 'data-original') || query.img('.imgLink img, .tlcImageItem');
|
||||||
.map((actorIndex, actorElement) => $(actorElement).attr('title'))
|
|
||||||
.toArray();
|
|
||||||
|
|
||||||
[release.likes, release.dislikes] = $(element).find('.value')
|
|
||||||
.toArray()
|
|
||||||
.map((value) => Number($(value).text()));
|
|
||||||
|
|
||||||
const posterEl = $(element).find('.imgLink img, .tlcImageItem');
|
|
||||||
if (posterEl) release.poster = posterEl.attr('data-original') || posterEl.attr('src');
|
|
||||||
|
|
||||||
const channelEl = $(element).find('.fromSite a');
|
|
||||||
if (channelEl.attr('title')) release.channel = channelEl.attr('title').replace('.com', '');
|
|
||||||
|
|
||||||
if (hasTeaser) {
|
if (hasTeaser) {
|
||||||
release.teaser = [
|
release.teaser = [
|
||||||
|
@ -287,76 +262,66 @@ function scrapeAll(html, site, networkUrl, hasTeaser = true) {
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
release.channel = query.el('.fromSite a', 'title')?.replace('.com', '');
|
||||||
|
|
||||||
return release;
|
return release;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async function scrapeScene(html, url, site, baseRelease, mobileHtml, options) {
|
async function scrapeScene({ query }, url, channel, baseRelease, mobileItem, options) {
|
||||||
const $ = cheerio.load(html, { normalizeWhitespace: true });
|
const release = { query }; // used by XEmpire scraper to resolve channel-specific details
|
||||||
const m$ = mobileHtml && cheerio.load(mobileHtml, { normalizeWhitespace: true });
|
|
||||||
const release = { $, url };
|
|
||||||
|
|
||||||
const json = $('script[type="application/ld+json"]').html();
|
const json = query.html('script[type="application/ld+json"]');
|
||||||
const videoJson = $('script:contains("window.ScenePlayerOptions")').html();
|
const videoJson = query.htmls('script').find((script) => /ScenePlayerOptions/i.test(script));
|
||||||
|
|
||||||
const [data, data2] = json ? JSON.parse(json) : [];
|
const [data, data2] = json ? JSON.parse(json) : [];
|
||||||
const videoData = videoJson && JSON.parse(videoJson.slice(videoJson.indexOf('{'), videoJson.indexOf('};') + 1));
|
const videoData = videoJson && JSON.parse(videoJson.slice(videoJson.indexOf('{'), videoJson.indexOf('};') + 1));
|
||||||
|
|
||||||
release.entryId = (baseRelease?.path || new URL(url).pathname).match(/\/(\d{2,})(\/|$)/)?.[1];
|
release.entryId = (baseRelease?.path || new URL(url).pathname).match(/\/(\d{2,})(\/|$)/)?.[1];
|
||||||
release.title = videoData?.playerOptions?.sceneInfos.sceneTitle || data?.name;
|
release.title = videoData?.playerOptions?.sceneInfos.sceneTitle || data?.name;
|
||||||
|
release.description = data?.description;
|
||||||
|
|
||||||
// date in data object is not the release date of the scene, but the date the entry was added; only use as fallback
|
release.date = query.date('.updatedDate', ['MM-DD-YYYY', 'YYYY-MM-DD'])
|
||||||
const dateString = $('.updatedDate').first().text().trim();
|
|| qu.extractDate(data?.dateCreated, 'YYYY-MM-DD')
|
||||||
const dateMatch = dateString.match(/\d{2,4}[-/]\d{2}[-/]\d{2,4}/)?.[0];
|
|| videoData?.playerOptions?.sceneInfos.sceneReleaseDate;
|
||||||
|
|
||||||
if (dateMatch) release.date = moment.utc(dateMatch, ['MM-DD-YYYY', 'YYYY-MM-DD']).toDate();
|
release.actors = (data?.actor || data2?.actor)?.map((actor) => ({
|
||||||
else if (data?.dateCreated) release.date = moment.utc(data.dateCreated, 'YYYY-MM-DD').toDate();
|
name: actor.name,
|
||||||
else release.date = videoData?.playerOptions?.sceneInfos.sceneReleaseDate;
|
gender: actor.gender,
|
||||||
|
})) || [];
|
||||||
|
|
||||||
if (data) {
|
release.duration = qu.durationToSeconds(data.duration);
|
||||||
release.description = data.description;
|
release.director = data?.director?.[0]?.name || data2?.director?.[0]?.name;
|
||||||
if (data.director?.[0]?.name) release.director = data.director[0].name;
|
|
||||||
else if (data2?.director?.[0]?.name) release.director = data2.director[0].name;
|
|
||||||
|
|
||||||
const stars = (data.aggregateRating.ratingValue / data.aggregateRating.bestRating) * 5;
|
release.tags = data?.keywords?.split(', ') || data2?.keywords?.split(', ') || [];
|
||||||
if (stars) release.rating = { stars };
|
release.stars = (data.aggregateRating.ratingValue / data.aggregateRating.bestRating) * 5 || null;
|
||||||
|
|
||||||
release.duration = moment.duration(data.duration.slice(2)).asSeconds();
|
release.channel = slugify(data?.productionCompany?.name
|
||||||
|
|| query.el('.studioLink a, .siteLink a', 'title')
|
||||||
|
|| query.cnt('.siteNameSpan')?.toLowerCase().replace('.com', '')
|
||||||
|
|| query.meta('meta[name="twitter:domain"]')?.replace('.com', ''), '');
|
||||||
|
|
||||||
|
if (videoData?.picPreview && new URL(videoData.picPreview).pathname.length > 1) {
|
||||||
|
// sometimes links to just https://images02-fame.gammacdn.com/
|
||||||
|
const poster = new URL(videoData.picPreview);
|
||||||
|
|
||||||
|
release.poster = [
|
||||||
|
`${poster.origin}${poster.pathname}`,
|
||||||
|
videoData.picPreview,
|
||||||
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
const actors = data?.actor || data2?.actor;
|
const photoLink = query.url('.picturesItem a');
|
||||||
|
const mobilePhotos = mobileItem?.query.imgs('.preview-displayer a img') || [];
|
||||||
if (actors) {
|
|
||||||
release.actors = actors.map((actor) => ({
|
|
||||||
name: actor.name,
|
|
||||||
gender: actor.gender,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
const hasTrans = release.actors?.some((actor) => actor.gender === 'shemale');
|
|
||||||
const rawTags = data?.keywords?.split(', ') || data2?.keywords?.split(', ') || [];
|
|
||||||
release.tags = hasTrans ? [...rawTags, 'transsexual'] : rawTags;
|
|
||||||
|
|
||||||
const channel = data?.productionCompany?.name
|
|
||||||
|| $('.studioLink a, .siteLink a').attr('title')?.trim()
|
|
||||||
|| $('.siteNameSpan').text()
|
|
||||||
?.trim()
|
|
||||||
.toLowerCase()
|
|
||||||
.replace('.com', '')
|
|
||||||
|| $('meta[name="twitter:domain"]').attr('content')?.replace('.com', '');
|
|
||||||
|
|
||||||
if (channel) release.channel = slugify(channel, '');
|
|
||||||
|
|
||||||
if (videoData?.picPreview && new URL(videoData.picPreview).pathname.length > 1) release.poster = videoData.picPreview; // sometimes links to just https://images02-fame.gammacdn.com/
|
|
||||||
|
|
||||||
const photoLink = $('.picturesItem a').attr('href');
|
|
||||||
const mobilePhotos = m$ ? m$('.preview-displayer a img').map((photoIndex, photoEl) => $(photoEl).attr('src')).toArray() : [];
|
|
||||||
|
|
||||||
if (photoLink && options.includePhotos) {
|
if (photoLink && options.includePhotos) {
|
||||||
const photos = await getPhotos(photoLink, site, mobilePhotos.length < 3); // only get thumbnails when less than 3 mobile photos are available
|
const photos = await getPhotos(photoLink, channel, mobilePhotos.length < 3); // only get thumbnails when less than 3 mobile photos are available
|
||||||
|
|
||||||
if (photos.length < 7) release.photos = [...photos, ...mobilePhotos]; // probably only teaser photos available, supplement with mobile album
|
if (photos.length < 7) {
|
||||||
else release.photos = photos;
|
release.photos = [...photos, ...mobilePhotos]; // probably only teaser photos available, supplement with mobile album
|
||||||
|
} else {
|
||||||
|
release.photos = photos;
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
release.photos = mobilePhotos;
|
release.photos = mobilePhotos;
|
||||||
}
|
}
|
||||||
|
@ -397,15 +362,14 @@ async function scrapeScene(html, url, site, baseRelease, mobileHtml, options) {
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
const movie = $('.dvdLink');
|
const movieUrl = query.url('.dvdLink', 'href', { origin: channel.url });
|
||||||
const movieUrl = qu.prefixUrl(movie.attr('href'), site.url);
|
|
||||||
|
|
||||||
if (movieUrl) {
|
if (movieUrl) {
|
||||||
release.movie = {
|
release.movie = {
|
||||||
url: movieUrl,
|
url: movieUrl,
|
||||||
title: movie.attr('title'),
|
title: query.el('.dvdLink', 'title'),
|
||||||
entryId: movieUrl.match(/\/(\d+)(\/|$)/)?.[1],
|
entryId: movieUrl.match(/\/(\d+)(\/|$)/)?.[1],
|
||||||
covers: [movie.find('img').attr('src')],
|
covers: [qu.imgs('.dvdLink img')],
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -547,9 +511,7 @@ async function fetchActorReleases(profileUrl, getActorReleasesUrl, page = 1, acc
|
||||||
return accReleases.concat(releases);
|
return accReleases.concat(releases);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function scrapeProfile(html, url, actorName, _siteSlug, getActorReleasesUrl, withReleases, context) {
|
async function scrapeProfile({ query }, url, actorName, _siteSlug, getActorReleasesUrl, withReleases, context) {
|
||||||
const { query } = qu.extract(html);
|
|
||||||
|
|
||||||
const avatar = query.el('img.actorPicture');
|
const avatar = query.el('img.actorPicture');
|
||||||
const hair = query.cnt('.actorProfile .attribute_hair_color');
|
const hair = query.cnt('.actorProfile .attribute_hair_color');
|
||||||
const height = query.cnt('.actorProfile .attribute_height');
|
const height = query.cnt('.actorProfile .attribute_height');
|
||||||
|
@ -699,10 +661,10 @@ function getUpcomingUrl(site) {
|
||||||
|
|
||||||
async function fetchLatest(site, page = 1) {
|
async function fetchLatest(site, page = 1) {
|
||||||
const url = getLatestUrl(site, page);
|
const url = getLatestUrl(site, page);
|
||||||
const res = await http.get(url);
|
const res = await qu.getAll(url, 'li[data-itemtype=scene], div[data-itemtype=scenes]');
|
||||||
|
|
||||||
if (res.ok) {
|
if (res.ok) {
|
||||||
return scrapeAll(res.body.toString(), site);
|
return scrapeAll(res.items, site);
|
||||||
}
|
}
|
||||||
|
|
||||||
return res.status;
|
return res.status;
|
||||||
|
@ -710,10 +672,10 @@ async function fetchLatest(site, page = 1) {
|
||||||
|
|
||||||
async function fetchUpcoming(site) {
|
async function fetchUpcoming(site) {
|
||||||
const url = getUpcomingUrl(site);
|
const url = getUpcomingUrl(site);
|
||||||
const res = await http.get(url);
|
const res = await qu.getAll(url, 'li[data-itemtype=scene], div[data-itemtype=scenes]');
|
||||||
|
|
||||||
if (res.ok) {
|
if (res.ok) {
|
||||||
return scrapeAll(res.body.toString(), site, null, false);
|
return scrapeAll(res.items, site, null, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
return res.status;
|
return res.status;
|
||||||
|
@ -753,8 +715,8 @@ async function fetchScene(url, site, baseRelease, options) {
|
||||||
|
|
||||||
if (deepUrl) {
|
if (deepUrl) {
|
||||||
const [res, mobileRes] = await Promise.all([
|
const [res, mobileRes] = await Promise.all([
|
||||||
http.get(deepUrl),
|
qu.get(deepUrl),
|
||||||
mobileUrl && http.get(mobileUrl, {
|
mobileUrl && qu.get(mobileUrl, null, {
|
||||||
headers: {
|
headers: {
|
||||||
// don't redirect to main site
|
// don't redirect to main site
|
||||||
'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.122 Mobile Safari/537.36',
|
'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.122 Mobile Safari/537.36',
|
||||||
|
@ -763,8 +725,8 @@ async function fetchScene(url, site, baseRelease, options) {
|
||||||
]);
|
]);
|
||||||
|
|
||||||
if (res.status === 200) {
|
if (res.status === 200) {
|
||||||
const mobileBody = mobileRes?.status === 200 ? mobileRes.body.toString() : null;
|
const mobileItem = mobileRes?.status === 200 ? mobileRes.item : null;
|
||||||
const scene = await scrapeScene(res.body.toString(), url, site, baseRelease, mobileBody, options);
|
const scene = await scrapeScene(res.item, url, site, baseRelease, mobileItem, options);
|
||||||
|
|
||||||
return { ...scene, deepUrl };
|
return { ...scene, deepUrl };
|
||||||
}
|
}
|
||||||
|
@ -827,13 +789,13 @@ async function fetchProfile({ name: actorName }, context, include, altSearchUrl,
|
||||||
|
|
||||||
if (actorUrl) {
|
if (actorUrl) {
|
||||||
const url = `https://${siteSlug}.com${actorUrl}`;
|
const url = `https://${siteSlug}.com${actorUrl}`;
|
||||||
const actorRes = await http.get(url);
|
const actorRes = await qu.get(url);
|
||||||
|
|
||||||
if (actorRes.status !== 200) {
|
if (actorRes.status !== 200) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
return scrapeProfile(actorRes.body.toString(), url, actorName, siteSlug, getActorReleasesUrl, include.scenes, context);
|
return scrapeProfile(actorRes.item, url, actorName, siteSlug, getActorReleasesUrl, include.scenes, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
|
|
|
@ -1,14 +1,14 @@
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const { fetchLatest, fetchUpcoming, scrapeScene, fetchProfile } = require('./gamma');
|
const { fetchLatest, fetchUpcoming, scrapeScene, fetchProfile } = require('./gamma');
|
||||||
const http = require('../utils/http');
|
const qu = require('../utils/qu');
|
||||||
|
|
||||||
async function fetchScene(url, site, baseRelease, options) {
|
async function fetchScene(url, site, baseRelease, options) {
|
||||||
const res = await http.get(url);
|
const res = await qu.get(url);
|
||||||
|
|
||||||
const release = await scrapeScene(res.body.toString(), url, site, baseRelease, null, options);
|
const release = await scrapeScene(res.item, url, site, baseRelease, null, options);
|
||||||
|
|
||||||
const siteDomain = release.$('meta[name="twitter:domain"]').attr('content') || 'allblackx.com'; // only AllBlackX has no twitter domain, no other useful hints available
|
const siteDomain = release.query.el('meta[name="twitter:domain"]', 'content') || 'allblackx.com'; // only AllBlackX has no twitter domain, no other useful hints available
|
||||||
const siteSlug = siteDomain && siteDomain.split('.')[0].toLowerCase();
|
const siteSlug = siteDomain && siteDomain.split('.')[0].toLowerCase();
|
||||||
// const siteUrl = siteDomain && `https://www.${siteDomain}`;
|
// const siteUrl = siteDomain && `https://www.${siteDomain}`;
|
||||||
|
|
||||||
|
|
|
@ -16,6 +16,10 @@ function trim(str) {
|
||||||
}
|
}
|
||||||
|
|
||||||
function extractDate(dateString, format, match) {
|
function extractDate(dateString, format, match) {
|
||||||
|
if (!dateString) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
if (match) {
|
if (match) {
|
||||||
const dateStamp = trim(dateString).match(match);
|
const dateStamp = trim(dateString).match(match);
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue