Removed direct bhttp usage from scrapers in favor of local http module. Deleted legacy scrapers, as old code is available via git repo history.

This commit is contained in:
DebaucheryLibrarian
2020-11-23 00:05:02 +01:00
parent 3d427f7e1d
commit 0633197793
22 changed files with 77 additions and 537 deletions

View File

@@ -1,9 +1,8 @@
'use strict';
const bhttp = require('bhttp');
const qu = require('../utils/qu');
const slugify = require('../utils/slugify');
const http = require('../utils/http');
function scrapeAll(scenes, site, origin) {
return scenes.map(({ query }) => {
@@ -150,14 +149,14 @@ async function fetchLatest(channel, page = 1) {
async function fetchScene(url, site) {
// DDF's main site moved to Porn World
// const res = await bhttp.get(`https://ddfnetwork.com${new URL(url).pathname}`);
// const res = await http.get(`https://ddfnetwork.com${new URL(url).pathname}`);
const res = await qu.get(url, '.content, #content, .taspVideoPage');
return res.ok ? scrapeScene(res.item, url, site) : res.status;
}
async function fetchProfile({ name: actorName }) {
const resSearch = await bhttp.post('https://ddfnetwork.com/search/ajax',
const resSearch = await http.post('https://ddfnetwork.com/search/ajax',
{
type: 'hints',
word: actorName,
@@ -180,7 +179,7 @@ async function fetchProfile({ name: actorName }) {
const [actor] = resSearch.body.list.pornstarsName;
const url = `https://ddfnetwork.com${actor.href}`;
const resActor = await bhttp.get(url);
const resActor = await http.get(url);
if (resActor.statusCode !== 200) {
return null;