Removed direct bhttp usage from scrapers in favor of local http module. Deleted legacy scrapers, as old code is available via git repo history.
This commit is contained in:
@@ -1,9 +1,8 @@
|
||||
'use strict';
|
||||
|
||||
const bhttp = require('bhttp');
|
||||
|
||||
const qu = require('../utils/qu');
|
||||
const slugify = require('../utils/slugify');
|
||||
const http = require('../utils/http');
|
||||
|
||||
function scrapeAll(scenes, site, origin) {
|
||||
return scenes.map(({ query }) => {
|
||||
@@ -150,14 +149,14 @@ async function fetchLatest(channel, page = 1) {
|
||||
|
||||
async function fetchScene(url, site) {
|
||||
// DDF's main site moved to Porn World
|
||||
// const res = await bhttp.get(`https://ddfnetwork.com${new URL(url).pathname}`);
|
||||
// const res = await http.get(`https://ddfnetwork.com${new URL(url).pathname}`);
|
||||
const res = await qu.get(url, '.content, #content, .taspVideoPage');
|
||||
|
||||
return res.ok ? scrapeScene(res.item, url, site) : res.status;
|
||||
}
|
||||
|
||||
async function fetchProfile({ name: actorName }) {
|
||||
const resSearch = await bhttp.post('https://ddfnetwork.com/search/ajax',
|
||||
const resSearch = await http.post('https://ddfnetwork.com/search/ajax',
|
||||
{
|
||||
type: 'hints',
|
||||
word: actorName,
|
||||
@@ -180,7 +179,7 @@ async function fetchProfile({ name: actorName }) {
|
||||
const [actor] = resSearch.body.list.pornstarsName;
|
||||
const url = `https://ddfnetwork.com${actor.href}`;
|
||||
|
||||
const resActor = await bhttp.get(url);
|
||||
const resActor = await http.get(url);
|
||||
|
||||
if (resActor.statusCode !== 200) {
|
||||
return null;
|
||||
|
||||
Reference in New Issue
Block a user