forked from DebaucheryLibrarian/traxxx
Preventing Bang! scraper from redundant deep scraping when base release is available.
This commit is contained in:
parent
19c7f958e1
commit
cde760c1ea
|
@ -102,6 +102,8 @@ async function scrapeScene(scene, entity, options) {
|
||||||
.replace(/[! .]/g, '')
|
.replace(/[! .]/g, '')
|
||||||
.replace('&', 'and');
|
.replace('&', 'and');
|
||||||
|
|
||||||
|
console.log(release);
|
||||||
|
|
||||||
return release;
|
return release;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -357,6 +359,11 @@ async function fetchUpcoming(site, page = 1) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function fetchScene(url, entity, baseRelease, options) {
|
async function fetchScene(url, entity, baseRelease, options) {
|
||||||
|
if (baseRelease?.entryId) {
|
||||||
|
// overview and deep data is the same, don't hit server unnecessarily
|
||||||
|
return baseRelease;
|
||||||
|
}
|
||||||
|
|
||||||
const encodedId = new URL(url).pathname.split('/')[2];
|
const encodedId = new URL(url).pathname.split('/')[2];
|
||||||
const entryId = decodeId(encodedId);
|
const entryId = decodeId(encodedId);
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,6 @@
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const https = require('https');
|
|
||||||
const bhttp = require('bhttp');
|
|
||||||
const fetch = require('node-fetch');
|
const fetch = require('node-fetch');
|
||||||
const { request } = require('undici');
|
|
||||||
const express = require('express');
|
const express = require('express');
|
||||||
|
|
||||||
async function init() {
|
async function init() {
|
||||||
|
|
Loading…
Reference in New Issue