Removed direct bhttp usage from scrapers in favor of local http module. Deleted legacy scrapers, as old code is available via git repo history.

This commit is contained in:
DebaucheryLibrarian
2020-11-23 00:05:02 +01:00
parent 3d427f7e1d
commit 0633197793
22 changed files with 77 additions and 537 deletions

View File

@@ -1,10 +1,9 @@
'use strict';
/* eslint-disable no-unused-vars */
const bhttp = require('@thependulum/bhttp');
const { get, ed } = require('../utils/q');
const { fetchApiLatest, fetchApiUpcoming, fetchScene, fetchApiProfile } = require('./gamma');
const http = require('../utils/http');
const slugify = require('../utils/slugify');
function scrapeLatestNative(scenes, site) {
@@ -72,7 +71,7 @@ async function fetchLatestNative(site, page = 1) {
}
const apiUrl = `${site.url}/videos/api/?limit=50&offset=${(page - 1) * 50}&sort=datedesc`;
const res = await bhttp.get(apiUrl, {
const res = await http.get(apiUrl, {
decodeJSON: true,
});
@@ -107,7 +106,7 @@ async function fetchSceneWrapper(url, site, release) {
if (scene.date - new Date(site.parameters?.lastNative) <= 0) {
// scene is probably still available on Vivid site, use search API to get URL and original date
const searchUrl = `${site.url}/videos/api/?limit=10&sort=datedesc&search=${encodeURI(scene.title)}`;
const searchRes = await bhttp.get(searchUrl, {
const searchRes = await http.get(searchUrl, {
decodeJSON: true,
});