Moved Arch Angel to Full Porn Network and adapted scraper.

This commit is contained in:
DebaucheryLibrarian
2026-01-20 04:28:49 +01:00
parent b2116f728f
commit 2a4dce106e
8 changed files with 113 additions and 219 deletions

View File

@@ -1,112 +1,115 @@
'use strict';
const { get, geta, ctxa } = require('../utils/q');
const slugify = require('../utils/slugify');
const unprint = require('unprint');
function scrapeAll(scenes, site) {
return scenes.map(({ _el, qu }) => {
const slugify = require('../utils/slugify');
const { convert } = require('../utils/convert');
const tryUrls = require('../utils/try-urls');
function scrapeAll(scenes, channel) {
return scenes.map(({ query }) => {
const release = {};
// release.entryId = el.dataset.setid || qu.q('.update_thumb', 'id').match(/\w+-\w+-(\d+)-\d+/)[1];
release.url = `${site.url}${qu.url('.scene-title a')}`;
release.entryId = new URL(release.url).pathname
.toLowerCase()
.replace(/\/$/, '')
.split('/')
.slice(-1)[0];
release.url = query.url('a');
release.entryId = slugify(new URL(release.url).pathname.match(/trailers\/(.*)/)[1]);
release.title = qu.q('.scene-title', true);
// release.description = qu.q('.title', 'title');
release.title = query.content('h2 a');
release.duration = query.duration('.video-data');
// release.date = qu.date('.video-data > span:last-child', 'YYYY-MM-DD');
const minutes = qu.q('.scene-details', true).match(/(\d+) minutes/)[1];
release.duration = Number(minutes) * 60;
release.actors = query.all('a[href*="models/"], a[href*="sets.php"]').map((actorEl) => ({
name: unprint.query.content(actorEl),
url: unprint.query.url(actorEl, null, { origin: channel.url }),
}));
release.actors = qu.text('.update-models').trim().split(/\s*,\s*/g);
release.poster = query.img('.thumbnail img');
const poster = qu.img('.scene-thumb img');
const teaser = query.video('.thumbnail img', { attribute: 'data-vid' }); // not a mistake, video source is on img tag
if (poster) {
release.poster = [
poster.replace('-1x', '-2x'),
poster,
];
if (!teaser?.includes('blur')) { // seemingly global SFW
release.teaser = teaser;
}
return release;
});
}
function scrapeScene({ qu }, url, site) {
const release = { url };
async function fetchLatest(channel, page = 1) {
const url = `${channel.url}/porn-categories/movies/?page=${page}&sort=most-recent`; // parameter order matters for some reason!
const res = await unprint.get(url, { selectAll: '.content div[data-setid]' });
release.entryId = new URL(url).pathname
.toLowerCase()
.replace(/\/$/, '')
.split('/')
.slice(-1)[0];
if (res.ok) {
return scrapeAll(res.context, channel);
}
release.title = qu.q('h4.text-center', true);
release.description = qu.q('p.hide-for-small-only', true);
return res.status;
}
release.actors = qu.all('a[href*="/model"]', true);
release.tags = qu.all('a[href*="/category"]', true);
function scrapeScene({ query }, { url, entity }) {
const release = {};
const trailer = qu.video('source');
if (trailer) release.trailer = { src: `${site.url}${trailer}` };
release.entryId = slugify(new URL(url).pathname.match(/trailers\/(.*)/)[1]);
release.title = query.content('h1.title_bar');
release.description = query.content('.description-text, #description');
release.date = query.date('//label[contains(text(), \'Date\')]/following-sibling::p[1]', 'YYYY-MM-DD')
|| query.date('//label[contains(text(), \'Date Added\')]/following-sibling::text()[1]', 'YYYY-MM-DD');
release.actors = query.all('#preview a[href*="/models"]').map((actorEl) => ({
name: unprint.query.content(actorEl),
url: unprint.query.url(actorEl, null, { origin: entity.origin }),
}));
release.tags = query.contents('#preview a[href*="categories/"]');
release.poster = query.poster('#preview video');
release.trailer = query.video('#preview video source');
return release;
}
function scrapeProfile({ el, qu }, actorName) {
if (slugify(qu.q('h1', true)) !== slugify(actorName)) {
// no 404 when actor is not found
return null;
}
function scrapeProfile({ query }, { url }) {
const profile = { url };
const profile = {};
const bio = Object.fromEntries(query.all('.model-details > div').map((bioEl) => [
slugify(unprint.query.content(bioEl, 'h2'), '_'),
unprint.query.text(bioEl),
]));
const description = qu.q('h4 + p', true);
if (description) profile.description = description;
profile.avatar = [
query.img('.model_bio_thumb', { attribute: 'src0_3x' }),
query.img('.model_bio_thumb', { attribute: 'src0_2x' }),
query.img('.model_bio_thumb', { attribute: 'src0_1x' }),
query.img('.model_bio_thumb'),
].filter(Boolean);
const avatar = qu.img('main img');
profile.description = [query.content('.model-bio-text, #performer-description'), bio.funfact].filter(Boolean).join(' ');
profile.aliases = bio.alias?.split(/[,\n]/).map((alias) => alias.trim());
if (avatar) {
profile.avatar = [
avatar.replace('set-1x', 'set-2x'),
avatar,
];
}
profile.releases = scrapeAll(ctxa(el, '.update, .scene-update'));
profile.age = parseInt(bio.age, 10) || null;
profile.dateOfBirth = unprint.extractDate(bio.age, 'MM/DD/YYYY');
profile.measurements = bio.measurements;
profile.height = Number(bio.height?.match(/(\d+)\s*cm/)?.[1]) || convert(bio.height, 'cm');
return profile;
}
async function fetchLatest(site, page = 1) {
const url = `${site.url}/1/scenes/recent/${page}/`;
const res = await geta(url, '.latest-updates .update, .scene-update');
async function fetchProfile({ name: actorName, url: actorUrl }, { entity, include }) {
const { res, url } = await tryUrls([
actorUrl,
`${entity.url}/models/${slugify(actorName, '')}.html`,
`${entity.url}/models/${slugify(actorName, '-')}.html`,
]);
return res.ok ? scrapeAll(res.items, site) : res.status;
}
if (res.ok) {
return scrapeProfile(res.context, { entity, include, url });
}
async function fetchScene(url, site) {
const res = await get(url, 'main');
return res.ok && res.item ? scrapeScene(res.item, url, site) : res.status;
}
async function fetchProfile({ name: actorName }, { site }) {
const actorSlug = slugify(actorName, '');
const url = `${site.url}/1/model/${actorSlug}`;
const res = await get(url);
return res.ok ? scrapeProfile(res.item, actorName) : res.status;
return res.status;
}
module.exports = {
fetchLatest,
fetchScene,
fetchProfile,
scrapeScene,
};