traxxx/src/scrapers/template.js

107 lines
2.4 KiB
JavaScript
Raw Normal View History

'use strict';
/* eslint-disable */
const bhttp = require('bhttp');
const cheerio = require('cheerio');
const moment = require('moment');
const { matchTags } = require('../tags');
function scrapeLatest(html, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
const sceneElements = $('.scenes-latest').toArray();
return sceneElements.map((element) => {
const actors = $('.actors a').map((actorIndex, actorElement) => $(actorElement).text()).toArray();
return {
url,
shootId,
title,
actors,
date,
rating: {
likes,
dislikes,
stars,
},
site,
};
});
}
function scrapeUpcoming(html, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
const sceneElements = $('.scenes-upcoming').toArray();
return sceneElements.map((element) => {
const actors = $('.actors a').map((actorIndex, actorElement) => $(actorElement).text()).toArray();
return {
url,
shootId,
title,
actors,
date,
rating: {
likes,
dislikes,
stars,
},
site,
};
});
}
async function scrapeScene(html, url, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
const rawTags = [];
const tags = await matchTags(rawTags);
const actors = $('.actors a').map((actorIndex, actorElement) => $(actorElement).text()).toArray();
const rawTags = $('.tags a').map((tagIndex, tagElement) => $(tagElement).text()).toArray();
const tags = await matchTags(rawTags);
return {
url,
shootId,
title,
actors,
director,
date,
tags,
rating: {
likes,
dislikes,
stars,
},
site,
};
}
async function fetchLatest(site, page = 1) {
const res = await bhttp.get(`${site.url}/url`);
return scrapeLatest(res.body.toString(), site);
}
async function fetchUpcoming(site) {
const res = await bhttp.get(`${site.url}/url`);
return scrapeUpcoming(res.body.toString(), site);
}
async function fetchScene(url, site) {
const res = await bhttp.get(url);
return scrapeScene(res.body.toString(), url, site);
}
module.exports = {
fetchLatest,
fetchUpcoming,
fetchScene,
};