Removed direct bhttp usage from scrapers in favor of local http module. Deleted legacy scrapers, as old code is available via git repo history.

This commit is contained in:
DebaucheryLibrarian
2020-11-23 00:05:02 +01:00
parent 3d427f7e1d
commit 0633197793
22 changed files with 77 additions and 537 deletions

View File

@@ -1,9 +1,10 @@
'use strict';
const bhttp = require('@thependulum/bhttp');
const { JSDOM } = require('jsdom');
const moment = require('moment');
const http = require('../utils/http');
const ethnicityMap = {
White: 'Caucasian',
};
@@ -59,8 +60,8 @@ async function fetchProfile({ name: actorName }) {
const pornstarUrl = `https://pornhub.com/pornstar/${actorSlug}`;
const [modelRes, pornstarRes] = await Promise.all([
bhttp.get(modelUrl),
bhttp.get(pornstarUrl),
http.get(modelUrl),
http.get(pornstarUrl),
]);
const model = modelRes.statusCode === 200 && await scrapeProfile(modelRes.body.toString(), modelUrl, actorName);
@@ -75,7 +76,7 @@ async function fetchProfile({ name: actorName }) {
*/
const pornstarUrl = `https://pornhub.com/pornstar/${actorSlug}`;
const pornstarRes = await bhttp.get(pornstarUrl);
const pornstarRes = await http.get(pornstarUrl);
return scrapeProfile(pornstarRes.body.toString(), pornstarUrl, actorName);
}