From cde760c1ea7cd7c44a70b5bb74bee0d73d42d3a8 Mon Sep 17 00:00:00 2001 From: DebaucheryLibrarian Date: Sat, 12 Feb 2022 16:22:00 +0100 Subject: [PATCH] Preventing Bang! scraper from redundant deep scraping when base release is available. --- src/scrapers/bang.js | 7 +++++++ src/tools/realitykings.js | 3 --- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/src/scrapers/bang.js b/src/scrapers/bang.js index 27e613d9..b8f41711 100644 --- a/src/scrapers/bang.js +++ b/src/scrapers/bang.js @@ -102,6 +102,8 @@ async function scrapeScene(scene, entity, options) { .replace(/[! .]/g, '') .replace('&', 'and'); + console.log(release); + return release; } @@ -357,6 +359,11 @@ async function fetchUpcoming(site, page = 1) { } async function fetchScene(url, entity, baseRelease, options) { + if (baseRelease?.entryId) { + // overview and deep data is the same, don't hit server unnecessarily + return baseRelease; + } + const encodedId = new URL(url).pathname.split('/')[2]; const entryId = decodeId(encodedId); diff --git a/src/tools/realitykings.js b/src/tools/realitykings.js index be4e4013..91b4a09d 100644 --- a/src/tools/realitykings.js +++ b/src/tools/realitykings.js @@ -1,9 +1,6 @@ 'use strict'; -const https = require('https'); -const bhttp = require('bhttp'); const fetch = require('node-fetch'); -const { request } = require('undici'); const express = require('express'); async function init() {