Added Brazzers scraper. Removed scene-only sites in database in favor of automatic network fallback.

This commit is contained in:
2019-04-04 20:22:47 +02:00
parent 784542253b
commit 6190247ac5
9 changed files with 494 additions and 29 deletions

93
src/scrapers/template.js Normal file
View File

@@ -0,0 +1,93 @@
'use strict';
/* eslint-disable */
const bhttp = require('bhttp');
const cheerio = require('cheerio');
const moment = require('moment');
const { matchTags } = require('../tags');
function scrapeLatest(html, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
const sceneElements = $('.scenes-latest').toArray();
return sceneElements.map((element) => {
return {
url,
shootId,
title,
actors,
date,
rating: {
likes,
dislikes,
stars,
},
site,
};
});
}
function scrapeUpcoming(html, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
const sceneElements = $('.scenes-upcoming').toArray();
return sceneElements.map((element) => {
return {
url,
shootId,
title,
actors,
date,
rating: {
likes,
dislikes,
stars,
},
site,
};
});
}
function scrapeScene(html, url, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
return {
url,
shootId,
title,
actors,
director: '',
date,
rating: {
likes,
dislikes,
stars,
},
site,
};
}
async function fetchLatest(site) {
const res = await bhttp.get(`${site.url}/url`);
return scrapeLatest(res.body.toString(), site);
}
async function fetchUpcoming(site) {
const res = await bhttp.get(`${site.url}/url`);
return scrapeUpcoming(res.body.toString(), site);
}
async function fetchScene(url, site) {
const res = await bhttp.get(url);
return scrapeScene(res.body.toString(), url, site);
}
module.exports = {
fetchLatest,
fetchUpcoming,
fetchScene,
};