Moved Killergram to Aylo. Added profile tests.

This commit is contained in:
DebaucheryLibrarian
2026-01-12 01:31:30 +01:00
parent 1a2bf77692
commit db62652dc8
9 changed files with 52 additions and 140 deletions

View File

@@ -182,7 +182,7 @@ async function fetchProfile({ url }, { entity, parameters }) {
const query = new URLSearchParams({
cms_data_value_ids: actorId,
cms_block_id: entity.parameters.modelBlockId,
cms_block_id: entity.parameters.modelBlockId || entity.parameters.blockId,
cms_data_type_id: 4,
}).toString();

View File

@@ -106,7 +106,7 @@ function scrapeProfile({ query }, url, entity) {
const avatarSources = query.srcset('.girl-details-photo-content picture source', 'srcset') || [query.img('.girl-details-photo')];
profile.avatar = getPoster(avatarSources);
profile.avatar = getPoster(avatarSources).slice(0, 3); // returns a metric ton of links, if first few don't work the rest presumably won't either
profile.social = query.urls('.girl-details-social-media-list a');
profile.scenes = scrapeAll(qu.initAll(query.all('.video-card')), entity);
@@ -127,7 +127,7 @@ async function fetchLatest(channel, page) {
}
async function fetchProfile(baseActor, { entity }) {
const url = `${entity.url}/${entity.parameters?.actor || 'pornstar'}/${slugify(baseActor.name, '')}/`;
const url = `${entity.url}/${entity.parameters?.actor || 'pornstar'}/${slugify(baseActor.name, '-')}/`;
const res = await qu.get(url);
if (res.ok) {

View File

@@ -1,119 +0,0 @@
'use strict';
const qu = require('../utils/qu');
const slugify = require('../utils/slugify');
function scrapeAll({ query }) {
const urls = query.urls('td > a:not([href*=joinnow])').map((pathname) => `http://killergram.com/${encodeURI(pathname)}`);
const posters = query.imgs('td > a img');
const titles = query.all('.episodeheadertext', true);
const actors = query.all('.episodetextinfo:nth-child(3)').map((el) => query.all(el, 'a', true));
const channels = query.all('.episodetextinfo:nth-child(2) a', true).map((channel) => slugify(channel, ''));
if ([urls.length, posters.length, titles.length, actors.length, channels.length].every((value, index, array) => value === array[0])) { // make sure every set has the same number of items
const releases = urls.map((url, index) => ({
url,
entryId: new URL(url).searchParams.get('id'),
title: titles[index],
actors: actors[index],
channel: channels[index],
poster: posters[index],
}));
return releases;
}
return [];
}
function scrapeScene({ query, html }, url) {
const release = {};
release.entryId = new URL(url).searchParams.get('id');
release.date = query.date('.episodetext', 'DD MMMM YYYY', /\d{2} \w+ \d{4}/);
release.description = query.q('.episodetext tr:nth-child(5) td:nth-child(2)', true);
release.actors = query.all('.modelstarring a', true);
const duration = html.match(/(\d+) minutes/)?.[1];
const channelUrl = query.url('a[href*="ct=site"]');
if (duration) release.duration = Number(duration) * 60;
if (channelUrl) {
const siteName = new URL(`https://killergram.com/${channelUrl}`).searchParams.get('site');
release.channel = slugify(siteName, '');
}
[release.poster, ...release.photos] = query.imgs('img[src*="/models"]');
return release;
}
async function fetchActorReleases({ query }, url, remainingPages, actorName, accReleases = []) {
const releases = scrapeAll({ query }).filter((release) => release.actors.includes(actorName));
if (remainingPages.length > 0) {
const { origin, pathname, searchParams } = new URL(url);
searchParams.set('p', remainingPages[0]);
const nextPage = `${origin}${pathname}?${searchParams}`;
const res = await qu.get(nextPage, '#episodes > table');
if (res.ok) {
return fetchActorReleases(res.item, url, remainingPages.slice(1), actorName, accReleases.concat(releases));
}
}
return accReleases.concat(releases);
}
async function scrapeProfile({ query }, actorName, url, include) {
const profile = {};
profile.avatar = {
src: `http://thumbs.killergram.com/models/${encodeURI(actorName)}/modelprofilethumb.jpg`,
process: {
crop: {
top: 4,
left: 4,
width: 289,
height: 125,
},
},
};
if (include.releases) {
const availablePages = query.all('.pageboxdropdown option', 'value');
profile.releases = await fetchActorReleases(qu.init(query.q('#episodes > table')), url, availablePages.slice(1), actorName);
}
return profile;
}
async function fetchLatest(channel, page = 1) {
const res = await qu.get(`${channel.url}&p=${((page - 1) * 15) + 1}`, '#episodes > table');
return res.ok ? scrapeAll(res.item, channel) : res.status;
}
async function fetchScene(url, channel) {
const res = await qu.get(url, '#episodes > table');
return res.ok ? scrapeScene(res.item, url, channel) : res.status;
}
async function fetchProfile({ name: actorName }, entity, include) {
const url = `http://killergram.com/episodes.asp?page=episodes&model=${encodeURI(actorName)}&ct=model`;
const res = await qu.get(url, '#content', null, {
followRedirects: false,
});
return res.ok ? scrapeProfile(res.item, actorName, url, include) : res.status;
}
module.exports = {
fetchLatest,
fetchScene,
fetchProfile,
};

View File

@@ -37,7 +37,6 @@ const jesseloadsmonsterfacials = require('./jesseloadsmonsterfacials');
const julesjordan = require('./julesjordan');
const karups = require('./karups');
const kellymadison = require('./kellymadison');
const killergram = require('./killergram');
const kink = require('./kink');
const mariskax = require('./mariskax');
// const analvids = require('./analvids');
@@ -144,7 +143,6 @@ const scrapers = {
karups,
kellymadison,
'8kmembers': kellymadison,
killergram,
kink,
// kinkvr: badoink,
// analvids,
@@ -164,7 +162,6 @@ const scrapers = {
perfectgonzo,
pervcity,
pierrewoodman,
pimpxxx: cherrypimps,
pinkyxxx,
porncz,
pornpros: whalemember,
@@ -208,6 +205,7 @@ const scrapers = {
familysinners: aylo,
gaywire: aylo,
iconmale: aylo,
killergram: aylo,
letsdoeit: aylo,
men: aylo,
metrohd: aylo,
@@ -296,6 +294,10 @@ const scrapers = {
mamacitaz: porndoe,
transbella: porndoe,
vipsexvault: porndoe,
// aziani
aziani,
'2poles1hole': aziani,
creampiled: aziani,
// etc
'18vr': badoink,
theflourishxxx: theflourish,
@@ -308,9 +310,6 @@ const scrapers = {
analviolation: fullpornnetwork,
angelogodshackoriginal,
asiam: modelmedia,
aziani,
'2poles1hole': aziani,
creampiled: aziani,
babevr: badoink,
baddaddypov: fullpornnetwork,
badoinkvr: badoink,
@@ -342,7 +341,6 @@ const scrapers = {
karups,
kellymadison,
'8kmembers': kellymadison,
killergram,
kink,
kinkmen: kink,
kinkvr: kink,
@@ -365,7 +363,6 @@ const scrapers = {
dpdiva: pervcity,
pervertgallery: fullpornnetwork,
pierrewoodman,
pimpxxx: cherrypimps,
porncz,
pornhub,
pornworld,
@@ -388,7 +385,7 @@ const scrapers = {
rawattack: spizoo,
spizoo,
teamskeet,
teencoreclub,
// teencoreclub,
teenmegaworld,
testedefudelidade,
tokyohot,
@@ -407,7 +404,6 @@ const scrapers = {
slayed: vixen,
wifey: vixen,
vrcosplayx: badoink,
wildoncam: cherrypimps,
},
};

View File

@@ -108,7 +108,7 @@ function scrapeProfile({ query }, entity) {
profile.url = unprint.prefixUrl(data.url, entity.url);
profile.dateOfBirth = unprint.extractDate(data.birthDate, 'MMMM DD, YYYY');
profile.birthPlace = data.nationality; // origin country rather than nationality
profile.birthPlace = data.nationality?.name || data.nationality; // origin country rather than nationality
// height and weight are provided in both cm and lbs, but this seems to be a manual conversion; the format isn't always the same
profile.height = unprint.extractNumber(data.height, { match: /(\d+)\s*cm/, matchIndex: 1 });

View File

@@ -747,7 +747,7 @@ function extractSizes(sizes) {
}
// SpermMania, Handjob Japan
function scrapeProfile({ query }, channel, url) {
function scrapeProfile({ query }, _channel, url) {
const profile = { url };
const bio = Object.fromEntries(query.all('.actr-item, .profile tr, #profile tr, .profile-info li, .model-detail .item, .model-item').map((bioEl) => [
@@ -784,17 +784,17 @@ function scrapeProfile({ query }, channel, url) {
profile.leg = unprint.extractNumber(bio.leg_length);
profile.thigh = unprint.extractNumber(bio.thigh_width);
profile.social = [bio.homepage, bio.twitter].filter(Boolean);
profile.socials = [bio.homepage, bio.twitter].filter((social) => /^http/.test(social));
const avatar = query.img('.scene-array img[src*="/actress"], img.portrait, .profile-img img')
|| query.img('.costume-bg', { attribute: 'data-img' })
|| query.style('.model-profile, #profile, .carousel-item')?.['background-image']?.match(/url\((.*)\)/)?.[1];
if (avatar) {
profile.avatar = [
profile.avatar = Array.from(new Set([
avatar.replace('-header.jpg', '.jpg'), // Transex Japan, prefer avatar over header banner
avatar,
];
]));
}
profile.photos = [
@@ -805,7 +805,7 @@ function scrapeProfile({ query }, channel, url) {
return profile;
}
function scrapeProfileLesbian({ query, html }, channel, url) {
function scrapeProfileLesbian({ query, html }, _channel, url) {
const profile = { url };
profile.age = query.number('//strong[contains(text(), "Age")]/following-sibling::text()[1]');
@@ -823,7 +823,7 @@ function scrapeProfileLesbian({ query, html }, channel, url) {
profile.hip = measurements.hip;
}
profile.avatar = html.match(/https:\/\/img.uralesbian.com\/models\/\d+\.jpg/)?.[0];
profile.avatar = html.match(/https:\/\/(img|cdn).uralesbian.com\/models\/\d+\.jpg/)?.[0];
return profile;
}
@@ -833,7 +833,7 @@ async function fetchProfile({ slug, url: actorUrl }, { entity, parameters }) {
? `${parameters.actors}/${slug}`
: `${entity.url}/actress/${slug}`);
const res = await unprint.get(url);
const res = await unprint.get(url, { followRedirects: false });
if (res.ok) {
if (parameters.layout === 'lesbian') {