Removed direct bhttp usage from scrapers in favor of local http module. Deleted legacy scrapers, as old code is available via git repo history.

This commit is contained in:
DebaucheryLibrarian
2020-11-23 00:05:02 +01:00
parent 3d427f7e1d
commit 0633197793
22 changed files with 77 additions and 537 deletions

View File

@@ -1,12 +1,12 @@
'use strict';
/* eslint-disable newline-per-chained-call */
const bhttp = require('@thependulum/bhttp');
const cheerio = require('cheerio');
const moment = require('moment');
const logger = require('../logger')(__filename);
const slugify = require('../utils/slugify');
const http = require('../utils/http');
const { get, getAll, ex } = require('../utils/q');
function scrape(html, site) {
@@ -192,7 +192,7 @@ async function fetchLatest(site, page = 1) {
/*
async function fetchUpcoming(site) {
const res = await bhttp.get('https://www.bangbros.com');
const res = await http.get('https://www.bangbros.com');
return scrapeUpcoming(res.body.toString(), site);
}
@@ -224,13 +224,13 @@ async function fetchScene(url, site, release) {
async function fetchProfile({ name: actorName }, scope) {
const actorSlug = slugify(actorName);
const url = `https://bangbros.com/search/${actorSlug}`;
const res = await bhttp.get(url);
const res = await http.get(url);
if (res.statusCode === 200) {
const actorUrl = scrapeProfileSearch(res.body.toString(), actorName);
if (actorUrl) {
const actorRes = await bhttp.get(actorUrl);
const actorRes = await http.get(actorUrl);
if (actorRes.statusCode === 200) {
return scrapeProfile(actorRes.body.toString(), scope);