Compare commits

...

2 Commits

Author SHA1 Message Date
DebaucheryLibrarian 9edd652a2c 1.206.11 2022-02-12 16:22:03 +01:00
DebaucheryLibrarian cde760c1ea Preventing Bang! scraper from redundant deep scraping when base release is available. 2022-02-12 16:22:00 +01:00
4 changed files with 10 additions and 6 deletions

4
package-lock.json generated
View File

@ -1,12 +1,12 @@
{
"name": "traxxx",
"version": "1.206.10",
"version": "1.206.11",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "traxxx",
"version": "1.206.10",
"version": "1.206.11",
"license": "ISC",
"dependencies": {
"@casl/ability": "^5.2.2",

View File

@ -1,6 +1,6 @@
{
"name": "traxxx",
"version": "1.206.10",
"version": "1.206.11",
"description": "All the latest porn releases in one place",
"main": "src/app.js",
"scripts": {

View File

@ -102,6 +102,8 @@ async function scrapeScene(scene, entity, options) {
.replace(/[! .]/g, '')
.replace('&', 'and');
console.log(release);
return release;
}
@ -357,6 +359,11 @@ async function fetchUpcoming(site, page = 1) {
}
async function fetchScene(url, entity, baseRelease, options) {
if (baseRelease?.entryId) {
// overview and deep data is the same, don't hit server unnecessarily
return baseRelease;
}
const encodedId = new URL(url).pathname.split('/')[2];
const entryId = decodeId(encodedId);

View File

@ -1,9 +1,6 @@
'use strict';
const https = require('https');
const bhttp = require('bhttp');
const fetch = require('node-fetch');
const { request } = require('undici');
const express = require('express');
async function init() {