forked from DebaucheryLibrarian/traxxx
Removed direct bhttp usage from scrapers in favor of local http module. Deleted legacy scrapers, as old code is available via git repo history.
This commit is contained in:
@@ -2,12 +2,13 @@
|
||||
|
||||
/* eslint-disable newline-per-chained-call */
|
||||
// const Promise = require('bluebird');
|
||||
const bhttp = require('@thependulum/bhttp');
|
||||
const { JSDOM } = require('jsdom');
|
||||
const moment = require('moment');
|
||||
|
||||
const http = require('../utils/http');
|
||||
|
||||
async function getPhotos(albumUrl) {
|
||||
const res = await bhttp.get(albumUrl);
|
||||
const res = await http.get(albumUrl);
|
||||
const html = res.body.toString();
|
||||
const { document } = new JSDOM(html).window;
|
||||
|
||||
@@ -125,13 +126,13 @@ async function scrapeScene(html, url, site) {
|
||||
}
|
||||
|
||||
async function fetchLatest(site, page = 1) {
|
||||
const res = await bhttp.get(`https://dogfartnetwork.com/tour/scenes/?p=${page}`);
|
||||
const res = await http.get(`https://dogfartnetwork.com/tour/scenes/?p=${page}`);
|
||||
|
||||
return scrapeLatest(res.body.toString(), site);
|
||||
}
|
||||
|
||||
async function fetchScene(url, site) {
|
||||
const res = await bhttp.get(url);
|
||||
const res = await http.get(url);
|
||||
|
||||
return scrapeScene(res.body.toString(), url, site);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user