Removed direct bhttp usage from scrapers in favor of local http module. Deleted legacy scrapers, as old code is available via git repo history.

This commit is contained in:
DebaucheryLibrarian
2020-11-23 00:05:02 +01:00
parent 3d427f7e1d
commit 0633197793
22 changed files with 77 additions and 537 deletions

View File

@@ -2,7 +2,6 @@
const util = require('util');
const Promise = require('bluebird');
const bhttp = require('@thependulum/bhttp');
const cheerio = require('cheerio');
const { JSDOM } = require('jsdom');
const moment = require('moment');
@@ -13,7 +12,7 @@ const { heightToCm } = require('../utils/convert');
const slugify = require('../utils/slugify');
async function fetchPhotos(url) {
const res = await bhttp.get(url);
const res = await http.get(url);
return res.body.toString();
}
@@ -369,7 +368,7 @@ async function fetchLatest(site, page = 1, include, preData, entryIdFromTitle =
? util.format(site.parameters.latest, page)
: `${site.url}/trial/categories/movies_${page}_d.html`;
// const res = await bhttp.get(url);
// const res = await http.get(url);
const res = await qu.getAll(url, '.update_details');
return res.ok ? scrapeAll(res.items, site, entryIdFromTitle) : res.status;