Completed Perv City scraper. Outputting results as list.

This commit is contained in:
2019-03-04 04:19:03 +01:00
parent 5e4e12fbf8
commit af9565b296
7 changed files with 77 additions and 18 deletions

View File

@@ -1,7 +1,9 @@
'use strict';
const util = require('util');
const config = require('config');
const { terminal } = require('terminal-kit');
const moment = require('moment');
const networks = require('../networks.js');
const scrapers = require('./scrapers');
@@ -80,10 +82,8 @@ function accumulateSites() {
return config.include ? accumulateIncludedSites() : accumulateExcludedSites();
}
async function init() {
const sites = accumulateSites();
const scenes = await Promise.all(sites.map(async (site) => {
async function fetchScenes(sites) {
return Promise.all(sites.map(async (site) => {
const scraper = scrapers[site.id] || scrapers[site.network];
if (scraper) {
@@ -92,8 +92,25 @@ async function init() {
return [];
}));
}
console.log(util.inspect(scenes, { depth: 5 }));
function exit() {
terminal.grabInput(false);
terminal.clear();
terminal.processExit();
}
async function init() {
const sites = accumulateSites();
const scenes = await fetchScenes(sites);
terminal.singleColumnMenu(scenes[0].latest.map(scene => `[${scene.siteId} ${moment(scene.date).format('YYYY-MM-DD')}] ${scene.title} (${scene.actors.join(', ')}) ★ ${scene.rating.stars.toFixed(2)}`));
terminal.on('key', (name) => {
if (name === 'CTRL_C') {
exit();
}
});
}
init();

View File

@@ -2,16 +2,46 @@
const bhttp = require('bhttp');
const cheerio = require('cheerio');
const moment = require('moment');
function scrape(html, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
const sceneLinkElement = $('#scene_title_border a');
const url = `${site.url}/${sceneLinkElement.attr('href')}`;
const title = sceneLinkElement.attr('title');
const actors = $('.home_model_name a').toArray().map(element => $(element).text().replace(/,[\u0020\u00A0\u202F]/, ''));
const date = moment.utc($('.add_date').text(), 'DD-MM-YYYY').toDate();
const stars = $('img[src*="/star.png"]').toArray().map(element => $(element).attr('src')).length || null;
return {
url,
title,
actors,
date,
rating: {
likes: null,
dislikes: null,
stars,
},
siteId: site.id,
};
}
async function fetchReleases(site) {
// const session = bhttp.session();
return {};
console.log(site.url, site.parameters.tourId)
const res = await bhttp.get(`${site.url}/final_latestupdateview.php?limitstart=0&limitend=9&websiteid=0&deviceview=browser&tourId=${site.parameters.tourId}`);
const elements = JSON.parse(res.body.toString());
const res = await bhttp.get(`${site.url}/final_latestupdateview.php&limitstart=0&limitend=9&websiteid=0&deviceview=browser&tourId=${site.parameters.tourId}`);
const latest = elements.total_arr.map(html => scrape(html, site));
console.log(res.body.toString());
return {
latest,
};
}
module.exports = fetchReleases;

View File

@@ -2,7 +2,7 @@
const bhttp = require('bhttp');
const cheerio = require('cheerio');
const { parse, startOfDay } = require('date-fns');
const moment = require('moment');
function scrape(html, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
@@ -22,7 +22,7 @@ function scrape(html, site) {
.map((actorIndex, actorElement) => $(actorElement).attr('title'))
.toArray();
const date = startOfDay(parse($(element).find('.sceneDate').text(), 'MM-DD-YYYY'));
const date = moment.utc($(element).find('.sceneDate').text(), 'MM-DD-YYYY').toDate();
return {
url,
@@ -34,6 +34,7 @@ function scrape(html, site) {
dislikes,
stars,
},
siteId: site.id,
};
});
}