Fixed some Gamma scene scrapers.

This commit is contained in:
DebaucheryLibrarian 2021-01-29 04:26:45 +01:00
parent 1fc67704dc
commit 6938e88fbf
8 changed files with 5 additions and 5 deletions

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 948 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 39 KiB

View File

@ -685,6 +685,7 @@ const tagPhotos = [
['69', 2, 'Abigail Mac and Kissa Sins in "Lesbian Anal Workout" for HardX'], ['69', 2, 'Abigail Mac and Kissa Sins in "Lesbian Anal Workout" for HardX'],
['airtight', 7, 'Lana Rhoades in "Gangbang Me 3" for HardX'], ['airtight', 7, 'Lana Rhoades in "Gangbang Me 3" for HardX'],
['airtight', 6, 'Remy Lacroix in "Ass Worship 14" for Jules Jordan'], ['airtight', 6, 'Remy Lacroix in "Ass Worship 14" for Jules Jordan'],
['airtight', 10, 'Asa Akira in "Asa Akira To The Limit" for Jules Jordan'],
['airtight', 8, 'Veronica Leal in LegalPorno SZ2520'], ['airtight', 8, 'Veronica Leal in LegalPorno SZ2520'],
['airtight', 5, 'Chloe Amour in "DP Masters 4" for Jules Jordan'], ['airtight', 5, 'Chloe Amour in "DP Masters 4" for Jules Jordan'],
['airtight', 3, 'Anita Bellini in "Triple Dick Gangbang" for Hands On Hardcore (DDF Network)'], ['airtight', 3, 'Anita Bellini in "Triple Dick Gangbang" for Hands On Hardcore (DDF Network)'],

View File

@ -2,8 +2,8 @@
const { fetchScene, fetchLatest, fetchUpcoming, fetchProfile } = require('./gamma'); const { fetchScene, fetchLatest, fetchUpcoming, fetchProfile } = require('./gamma');
async function fetchSceneWrapper(url, site, baseRelease) { async function fetchSceneWrapper(url, site, baseRelease, options) {
const release = await fetchScene(url, site, baseRelease); const release = await fetchScene(url, site, baseRelease, options);
if (site.isNetwork && release.channel) { if (site.isNetwork && release.channel) {
const channelUrl = url.replace('blowpass.com', `${release.channel}.com`); const channelUrl = url.replace('blowpass.com', `${release.channel}.com`);

View File

@ -2,7 +2,6 @@
const { fetchLatest, fetchUpcoming, fetchScene } = require('./gamma'); const { fetchLatest, fetchUpcoming, fetchScene } = require('./gamma');
module.exports = { module.exports = {
fetchLatest, fetchLatest,
fetchScene, fetchScene,

View File

@ -3,10 +3,10 @@
const { fetchLatest, fetchUpcoming, scrapeScene, fetchProfile } = require('./gamma'); const { fetchLatest, fetchUpcoming, scrapeScene, fetchProfile } = require('./gamma');
const http = require('../utils/http'); const http = require('../utils/http');
async function fetchScene(url, site) { async function fetchScene(url, site, baseRelease, options) {
const res = await http.get(url); const res = await http.get(url);
const release = await scrapeScene(res.body.toString(), url, site); const release = await scrapeScene(res.body.toString(), url, site, baseRelease, null, options);
const siteDomain = release.$('meta[name="twitter:domain"]').attr('content') || 'allblackx.com'; // only AllBlackX has no twitter domain, no other useful hints available const siteDomain = release.$('meta[name="twitter:domain"]').attr('content') || 'allblackx.com'; // only AllBlackX has no twitter domain, no other useful hints available
const siteSlug = siteDomain && siteDomain.split('.')[0].toLowerCase(); const siteSlug = siteDomain && siteDomain.split('.')[0].toLowerCase();