'use strict'; const bhttp = require('bhttp'); const cheerio = require('cheerio'); const moment = require('moment'); function scrape(html, site) { const $ = cheerio.load(html, { normalizeWhitespace: true }); const scenesElements = $('.sceneInfo').toArray(); return scenesElements.map((element) => { const sceneLinkElement = $(element).find('.sceneTitle a'); const url = `${site.url}${sceneLinkElement.attr('href')}`; const title = sceneLinkElement.attr('title'); const date = moment .utc($(element).find('.sceneDate').text(), 'MM-DD-YYYY') .toDate(); const actors = $(element).find('.sceneActors a') .map((actorIndex, actorElement) => $(actorElement).attr('title')) .toArray(); const [likes, dislikes] = $(element).find('.value') .toArray() .map(value => Number($(value).text())); const stars = likes || dislikes ? Math.floor(((likes * 5 + dislikes) / (likes + dislikes)) * 100) / 100 : null; return { url, title, actors, date, rating: { likes, dislikes, stars, }, site, }; }); } async function fetchReleases(site) { const [latestRes, upcomingRes] = await Promise.all([ bhttp.get(`${site.url}/en/videos`), bhttp.get(`${site.url}/en/videos/AllCategories/0/1/upcoming`), ]); return [ ...scrape(upcomingRes.body.toString(), site, true), ...scrape(latestRes.body.toString(), site), ]; } module.exports = fetchReleases;