Removed direct bhttp usage from scrapers in favor of local http module. Deleted legacy scrapers, as old code is available via git repo history.
This commit is contained in:
@@ -1,17 +1,17 @@
|
||||
'use strict';
|
||||
|
||||
/* eslint-disable newline-per-chained-call */
|
||||
const bhttp = require('@thependulum/bhttp');
|
||||
const cheerio = require('cheerio');
|
||||
const moment = require('moment');
|
||||
|
||||
const { get, geta } = require('../utils/q');
|
||||
const slugify = require('../utils/slugify');
|
||||
const http = require('../utils/http');
|
||||
|
||||
async function getPhotos(entryId, site) {
|
||||
const { hostname } = new URL(site.url);
|
||||
|
||||
const res = await bhttp.get(`https://${hostname}/gallery.php?type=highres&id=${entryId}`);
|
||||
const res = await http.get(`https://${hostname}/gallery.php?type=highres&id=${entryId}`);
|
||||
const html = res.body.toString();
|
||||
|
||||
const $ = cheerio.load(html, { normalizeWhitespace: true });
|
||||
@@ -159,18 +159,18 @@ async function fetchLatest(site, page = 1) {
|
||||
const { hostname } = new URL(site.url);
|
||||
|
||||
if (hostname.match('private.com')) {
|
||||
const res = await bhttp.get(`${site.url}/${page}/`);
|
||||
const res = await http.get(`${site.url}/${page}/`);
|
||||
|
||||
return scrapeLatest(res.body.toString(), site);
|
||||
}
|
||||
|
||||
const res = await bhttp.get(`${site.url}/scenes/${page}/`);
|
||||
const res = await http.get(`${site.url}/scenes/${page}/`);
|
||||
|
||||
return scrapeLatest(res.body.toString(), site);
|
||||
}
|
||||
|
||||
async function fetchScene(url, site) {
|
||||
const res = await bhttp.get(url);
|
||||
const res = await http.get(url);
|
||||
|
||||
return scrapeScene(res.body.toString(), url, site);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user