Added full photo album to Jules Jordan scraper.

This commit is contained in:
ThePendulum 2019-10-29 04:49:27 +01:00
parent d23e9b7a4b
commit ffe2564bb3
1 changed files with 61 additions and 4 deletions

View File

@ -1,11 +1,49 @@
'use strict';
const Promise = require('bluebird');
const bhttp = require('bhttp');
const cheerio = require('cheerio');
const moment = require('moment');
const { matchTags } = require('../tags');
async function fetchPhotos(url) {
const res = await bhttp.get(url);
return res.body.toString();
}
function scrapePhotos(html) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
const photos = $('.photo_gallery_thumbnail_wrapper .thumbs')
.map((photoIndex, photoElement) => $(photoElement).attr('src').replace('thumbs/', 'photos/'))
.toArray();
return photos;
}
async function getPhotos(entryId, page = 1) {
const albumUrl = `https://www.julesjordan.com/trial/gallery.php?id=${entryId}&type=highres&page=${page}`;
const html = await fetchPhotos(albumUrl);
const $ = cheerio.load(html, { normalizeWhitespace: true });
const photos = scrapePhotos(html);
const pages = Number($('.page_totals').text().trim().match(/\d+$/)[0]);
const otherPhotos = await Promise.map(Array.from({ length: pages - 1 }), async (val, index) => {
const pageUrl = `https://www.julesjordan.com/trial/gallery.php?id=${entryId}&type=highres&page=${index + 2}`;
const pageHtml = await fetchPhotos(pageUrl);
return scrapePhotos(pageHtml);
}, {
concurrency: 2,
});
return photos.concat(otherPhotos.flat());
}
function scrapeLatest(html, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
const scenesElements = $('.update_details').toArray();
@ -19,7 +57,7 @@ function scrapeLatest(html, site) {
const url = sceneLinkElement.attr('href');
const title = sceneLinkElement.text();
const shootId = $(element).attr('data-setid');
const entryId = $(element).attr('data-setid');
const date = moment
.utc($(element).find('.update_date').text(), 'MM/DD/YYYY')
@ -31,7 +69,7 @@ function scrapeLatest(html, site) {
return {
url,
shootId,
entryId,
title,
actors,
date,
@ -50,7 +88,7 @@ function scrapeUpcoming(html, site) {
const photoCount = Number(photoElement.attr('cnt'));
const photos = Array.from({ length: photoCount }, (value, index) => photoElement.attr(`src${index}_1x`)).filter(photoUrl => photoUrl !== undefined);
const shootId = $(element).find('.upcoming_updates_thumb').attr('id').match(/\d+/)[0];
const entryId = $(element).find('.upcoming_updates_thumb').attr('id').match(/\d+/)[0];
const details = $(element).find('.update_details_comingsoon')
.eq(1)
@ -73,7 +111,7 @@ function scrapeUpcoming(html, site) {
return {
url: null,
shootId,
entryId,
title,
date,
actors,
@ -88,6 +126,7 @@ async function scrapeScene(html, url, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
const title = $('.title_bar_hilite').text().trim();
const entryId = $('.suggest_tags a').attr('href').match(/\d+/)[0];
const date = moment
.utc($('.update_date').text(), 'MM/DD/YYYY')
.toDate();
@ -100,15 +139,33 @@ async function scrapeScene(html, url, site) {
const stars = Number($('.avg_rating').text().trim().replace(/[\s|Avg Rating:]/g, ''));
const infoLines = $('script:contains("useimage")')
.html()
.split('\n');
const poster = infoLines.find(line => line.match('useimage')).replace('useimage = "', '').slice(0, -2);
const trailerLine = infoLines.find(line => line.match('movie["Trailer_720"]'));
const trailer = trailerLine.slice(trailerLine.indexOf('path:"') + 6, trailerLine.indexOf('",movie'));
const photos = await getPhotos(entryId);
const rawTags = $('.update_tags a').map((tagIndex, tagElement) => $(tagElement).text()).toArray();
const tags = await matchTags(rawTags);
return {
url,
entryId,
title,
date,
actors,
description,
poster,
photos,
trailer: {
src: trailer,
quality: 720,
},
rating: {
stars,
},