traxxx/src/scrapers/template.js

106 lines
2.4 KiB
JavaScript
Raw Normal View History

'use strict';
/* eslint-disable newline-per-chained-call */
const bhttp = require('bhttp');
const { JSDOM } = require('jsdom');
const moment = require('moment');
const { matchTags } = require('../tags');
function scrapeLatest(html, site) {
const { document } = new JSDOM(html).window;
const sceneElements = $('.scenes-latest').toArray();
return sceneElements.map((element) => {
const actors = $(element).find('.actors a').map((actorIndex, actorElement) => $(actorElement).text()).toArray();
return {
url,
entryId,
title,
actors,
date,
rating: {
likes,
dislikes,
stars,
},
site,
};
});
}
function scrapeUpcoming(html, site) {
const { document } = new JSDOM(html).window;
const sceneElements = $('.scenes-upcoming').toArray();
return sceneElements.map((element) => {
const actors = $(element).find('.actors a').map((actorIndex, actorElement) => $(actorElement).text()).toArray();
return {
url,
entryId,
title,
actors,
date,
rating: {
likes,
dislikes,
stars,
},
site,
};
});
}
async function scrapeScene(html, url, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
const actors = $('.actors a').map((actorIndex, actorElement) => $(actorElement).text()).toArray();
const rawTags = $('.tags a').map((tagIndex, tagElement) => $(tagElement).text()).toArray();
const tags = await matchTags(rawTags);
return {
url,
entryId,
title,
description,
actors,
director,
date,
duration,
tags,
rating: {
likes,
dislikes,
stars,
},
site,
};
}
async function fetchLatest(site, page = 1) {
const res = await bhttp.get(`${site.url}/url`);
return scrapeLatest(res.body.toString(), site);
}
async function fetchUpcoming(site) {
const res = await bhttp.get(`${site.url}/url`);
return scrapeUpcoming(res.body.toString(), site);
}
async function fetchScene(url, site) {
const res = await bhttp.get(url);
return scrapeScene(res.body.toString(), url, site);
}
module.exports = {
fetchLatest,
fetchUpcoming,
fetchScene,
};