Added Jules Jordan scraper. Added render argument to stop going into ncurses mode during dev.

This commit is contained in:
2019-03-18 04:46:53 +01:00
parent d84466445c
commit 1a170bfbd5
12 changed files with 394 additions and 59 deletions

View File

@@ -1,9 +1,11 @@
'use strict';
const pervcity = require('./pervcity');
const xempire = require('./xempire');
const julesjordan = require('./julesjordan');
const pervcity = require('./pervcity');
module.exports = {
pervcity,
xempire,
julesjordan,
pervcity,
};

View File

@@ -0,0 +1,83 @@
'use strict';
const bhttp = require('bhttp');
const cheerio = require('cheerio');
const moment = require('moment');
function scrapeLatest(html, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
const scenesElements = $('.update_details').toArray();
return scenesElements.map((element) => {
const sceneLinkElement = $(element).children('a').eq(1);
const url = sceneLinkElement.attr('href');
const title = sceneLinkElement.text();
const date = moment
.utc($(element).find('.update_date').text(), 'MM/DD/YYYY')
.toDate();
const actors = $(element).find('.update_models a')
.map((actorIndex, actorElement) => $(actorElement).text())
.toArray();
return {
url,
title,
actors,
date,
rating: null,
site,
};
});
}
function scrapeUpcoming(html, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
const scenesElements = $('#coming_soon_carousel').find('.table').toArray();
return scenesElements.map((element) => {
const details = $(element).find('.update_details_comingsoon')
.eq(1)
.children()
.remove();
const title = details
.end()
.text()
.trim();
const actors = details
.text()
.trim()
.split(', ');
const date = moment
.utc($(element).find('.update_date_comingsoon').text().slice(7), 'MM/DD/YYYY')
.toDate();
return {
url: null,
title,
actors,
date,
rating: null,
site,
};
});
}
async function fetchReleases(site) {
const [latestRes, upcomingRes] = await Promise.all([
bhttp.get(`${site.url}/categories/movies_1_d.html`),
bhttp.get(`${site.url}/index.php`),
]);
return [
...scrapeUpcoming(upcomingRes.body.toString(), site, true),
...scrapeLatest(latestRes.body.toString(), site),
];
}
module.exports = fetchReleases;

View File

@@ -14,7 +14,10 @@ function scrape(html, site) {
const actors = $('.home_model_name a').toArray().map(element => $(element).text().replace(/,[\u0020\u00A0\u202F]/, '')); // replace weird commas
const date = moment.utc($('.add_date').text(), 'DD-MM-YYYY').toDate();
const stars = $('img[src*="/star.png"]').toArray().map(element => $(element).attr('src')).length || null;
const stars = $('img[src*="/star.png"]')
.toArray()
.map(element => $(element).attr('src'))
.length || null;
return {
url,
@@ -31,8 +34,6 @@ function scrape(html, site) {
}
async function fetchReleases(site) {
// const session = bhttp.session();
const res = await bhttp.get(`${site.url}/final_latestupdateview.php?limitstart=0&limitend=9&websiteid=0&deviceview=browser&tourId=${site.parameters.tourId}`);
const elements = JSON.parse(res.body.toString());

View File

@@ -13,16 +13,18 @@ function scrape(html, site) {
const url = `${site.url}${sceneLinkElement.attr('href')}`;
const title = sceneLinkElement.attr('title');
const [likes, dislikes] = $(element).find('.value')
.toArray()
.map(value => Number($(value).text()));
const stars = likes || dislikes ? Math.floor(((likes * 5 + dislikes) / (likes + dislikes)) * 100) / 100 : null;
const date = moment
.utc($(element).find('.sceneDate').text(), 'MM-DD-YYYY')
.toDate();
const actors = $(element).find('.sceneActors a')
.map((actorIndex, actorElement) => $(actorElement).attr('title'))
.toArray();
const date = moment.utc($(element).find('.sceneDate').text(), 'MM-DD-YYYY').toDate();
const [likes, dislikes] = $(element).find('.value')
.toArray()
.map(value => Number($(value).text()));
const stars = likes || dislikes ? Math.floor(((likes * 5 + dislikes) / (likes + dislikes)) * 100) / 100 : null;
return {
url,