Removed stray slice from Gamma scraper.

This commit is contained in:
DebaucheryLibrarian
2026-01-11 16:58:33 +01:00
parent 24adf04fe3
commit 91981a6dd7
7 changed files with 54 additions and 29 deletions

View File

@@ -833,7 +833,7 @@ async function scrapeActors(argNames) {
const entitySlugs = sources.flat();
const [entitiesBySlug, existingActorEntries] = await Promise.all([
fetchEntitiesBySlug(entitySlugs, { types: ['channel', 'network', 'info'], prefer: argv.prefer }),
fetchEntitiesBySlug(entitySlugs, { types: ['channel', 'network', 'info'], prefer: argv.prefer || 'channel' }),
knex('actors')
.select(knex.raw('actors.id, actors.name, actors.slug, actors.entry_id, actors.entity_id, row_to_json(entities) as entity'))
.whereIn('actors.slug', baseActors.map((baseActor) => baseActor.slug))

View File

@@ -218,7 +218,7 @@ function curateTitle(title, channel) {
}
async function scrapeApiReleases(json, site, options) {
return json.slice(0, 6).reduce((acc, scene) => {
return json.reduce((acc, scene) => {
if (options.parameters?.extract && scene.sitename !== options.parameters.extract) {
return acc;
}
@@ -658,7 +658,7 @@ function scrapeApiProfile(data, releases, siteSlug) {
return profile;
}
async function fetchLatestApi(site, page = 1, options, preData, upcoming = false) {
async function fetchLatestApi(site, page = 1, options, _preData, upcoming = false) {
const referer = options.parameters?.referer || `${options.parameters?.networkReferer ? site.parent.url : site.url}/en/videos`;
const { apiUrl } = await fetchApiCredentials(referer, site);
const slug = options.parameters.querySlug || site.slug;

View File

@@ -136,7 +136,7 @@ async function fetchScene(url, channel) {
}
async function fetchProfile({ name: actorName }, entity, include) {
const url = `${entity.url}/models/${slugify(actorName)}.en.html`;
const url = `${new URL(entity.url).origin}/models/${slugify(actorName)}.en.html`;
const res = await unprint.get(url);
return res.ok ? scrapeProfile(res.context, url, include) : res.status;

View File

@@ -277,19 +277,36 @@ const scrapers = {
interracialpovs: hush,
povpornstars: hush,
seehimfuck: hush,
// wankzvr
wankzvr,
tranzvr: wankzvr,
milfvr: wankzvr,
// nubilus
anilos: nubiles,
brattysis: nubiles,
deeplush: nubiles,
hotcrazymess: nubiles,
nfbusty: nubiles,
nubilefilms: nubiles,
nubiles,
thatsitcomshow: nubiles,
// porndoe
amateureuro: porndoe,
forbondage: porndoe,
mamacitaz: porndoe,
transbella: porndoe,
vipsexvault: porndoe,
// etc
'18vr': badoink,
theflourishxxx: theflourish,
adultempire,
archangel,
allherluv: missax,
amateureuro: porndoe,
americanpornstar,
analbbc: fullpornnetwork,
analized: fullpornnetwork,
analviolation: fullpornnetwork,
angelogodshackoriginal,
anilos: nubiles,
asiam: modelmedia,
aziani,
'2poles1hole': aziani,
@@ -305,23 +322,19 @@ const scrapers = {
vurigvlaanderen: bluedonkeymedia,
boobpedia,
bradmontana,
brattysis: nubiles,
cherrypimps,
cumlouder,
deeplush: nubiles,
dorcelclub: dorcel,
doubleviewcasting: firstanalquest,
dtfsluts: fullpornnetwork,
exploitedx, // only from known URL that will specify site
firstanalquest,
forbondage: porndoe,
freeones,
girlfaction: fullpornnetwork,
hergape: fullpornnetwork,
hitzefrei,
homemadeanalwhores: fullpornnetwork,
hookuphotshot,
hotcrazymess: nubiles,
inthecrack,
jamesdeen: fullpornnetwork,
jerkaoke: modelmedia,
@@ -339,18 +352,13 @@ const scrapers = {
// analvids,
analvids: pornbox,
littlecapricedreams,
mamacitaz: porndoe,
mariskax,
milfvr: wankzvr,
missax,
mylf: teamskeet,
mugfucked: fullpornnetwork,
naughtyamerica,
tonightsgirlfriend: naughtyamerica,
nebraskacoeds: elevatedx,
nfbusty: nubiles,
nubilefilms: nubiles,
nubiles,
onlyprince: fullpornnetwork,
pascalssubsluts,
pervcity,
@@ -383,11 +391,7 @@ const scrapers = {
teencoreclub,
teenmegaworld,
testedefudelidade,
thatsitcomshow: nubiles,
tokyohot,
transbella: porndoe,
tranzvr: wankzvr,
vipsexvault: porndoe,
virtualtaboo,
darkroomvr: virtualtaboo,
onlytarts: virtualtaboo,
@@ -403,7 +407,6 @@ const scrapers = {
slayed: vixen,
wifey: vixen,
vrcosplayx: badoink,
wankzvr,
wildoncam: cherrypimps,
},
};

View File

@@ -105,9 +105,9 @@ function scrapeScene({ query, window }, { url }) {
function scrapeProfile({ query }) {
const profile = {};
const bioKeys = query.contents('.pornstar-detail__params--top strong, .actor-detail__param-name');
const bioValues = query.exists('.actor-detail__param-value')
? query.contents('.actor-detail__param-value')
const bioKeys = query.contents('.pornstar-detail__params--top strong, .actor-detail__param-name, td.pornstar-detail__info--title');
const bioValues = query.exists('.actor-detail__param-value, .pornstar-detail__info--title')
? query.contents('.actor-detail__param-value, .pornstar-detail__info--title + td')
: query.text('.pornstar-detail__params--top', { join: false })?.map((text) => text.split('•')[0].replace(':', '').trim());
const bio = Object.fromEntries(bioKeys.map((key, index) => [slugify(key, '_'), bioValues[index]]));

View File

@@ -133,7 +133,7 @@ async function scrapeProfile({ query }, url, entity, options) {
profile.description = query.cnt('.person__content');
profile.gender = entity.slug === 'tranzvr' ? 'transsexual' : 'female';
profile.age = bio.age;
profile.age = Number(bio.age) || null;
profile.birthPlace = bio.birthplace;