forked from DebaucheryLibrarian/traxxx
Added Arch Angel, updated BAM Visions scraper to accomodate Arch Angel (different network, same unidentified CMS).
This commit is contained in:
@@ -1,5 +1,7 @@
|
||||
'use strict';
|
||||
|
||||
const format = require('template-format');
|
||||
|
||||
const { get, geta, initAll, formatDate } = require('../utils/qu');
|
||||
const slugify = require('../utils/slugify');
|
||||
|
||||
@@ -68,7 +70,10 @@ function scrapeScene({ html, qu }, url, site) {
|
||||
}
|
||||
|
||||
async function fetchActorReleases(actorId, site, page = 1, accScenes = []) {
|
||||
const url = `${site.url}/sets.php?id=${actorId}&page=${page}`;
|
||||
const url = site.parameters?.sets
|
||||
? `${site.parameters.sets}?id=${actorId}&page=${page}`
|
||||
: `${site.url}/sets.php?id=${actorId}&page=${page}`;
|
||||
|
||||
const res = await get(url);
|
||||
|
||||
if (!res.ok) return [];
|
||||
@@ -86,6 +91,11 @@ async function fetchActorReleases(actorId, site, page = 1, accScenes = []) {
|
||||
}
|
||||
|
||||
async function scrapeProfile({ qu }, site, withScenes) {
|
||||
if (!qu.exists('.content')) {
|
||||
// page probably returned a 404 with a 200 HTTP code
|
||||
return null;
|
||||
}
|
||||
|
||||
const profile = {};
|
||||
|
||||
const bio = qu.all('.stats li', true).reduce((acc, row) => {
|
||||
@@ -120,7 +130,9 @@ async function scrapeProfile({ qu }, site, withScenes) {
|
||||
}
|
||||
|
||||
async function fetchLatest(site, page = 1) {
|
||||
const url = `${site.url}/categories/movies/${page}/latest/`;
|
||||
const url = site.parameters?.latest
|
||||
? format(site.parameters.latest, { page })
|
||||
: `${site.url}/categories/movies/${page}/latest/`;
|
||||
const res = await geta(url, '.item-episode');
|
||||
|
||||
return res.ok ? scrapeAll(res.items, site) : res.status;
|
||||
@@ -132,18 +144,31 @@ async function fetchScene(url, site) {
|
||||
return res.ok ? scrapeScene(res.item, url, site) : res.status;
|
||||
}
|
||||
|
||||
async function fetchMovies(channel, page) {
|
||||
console.log(channel, page);
|
||||
}
|
||||
|
||||
async function fetchProfile({ name: actorName }, { site }, include) {
|
||||
const actorSlugA = slugify(actorName, '');
|
||||
const actorSlugB = slugify(actorName);
|
||||
|
||||
const resA = await get(`${site.url}/models/${actorSlugA}.html`);
|
||||
const res = resA.ok ? resA : await get(`${site.url}/models/${actorSlugB}.html`);
|
||||
const urlA = site.parameters?.profile
|
||||
? format(site.parameters.profile, { slug: actorSlugA })
|
||||
: `${site.url}/models/${actorSlugA}.html`;
|
||||
|
||||
const urlB = site.parameters?.profile
|
||||
? format(site.parameters.profile, { slug: actorSlugB })
|
||||
: `${site.url}/models/${actorSlugB}.html`;
|
||||
|
||||
const resA = await get(urlA);
|
||||
const res = resA.ok ? resA : await get(urlB);
|
||||
|
||||
return res.ok ? scrapeProfile(res.item, site, include.scenes) : res.status;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
fetchLatest,
|
||||
fetchMovies,
|
||||
fetchScene,
|
||||
fetchProfile,
|
||||
};
|
||||
|
||||
@@ -71,6 +71,7 @@ const scrapers = {
|
||||
amateurallure,
|
||||
americanpornstar,
|
||||
amateureuro: porndoe,
|
||||
archangel: bamvisions,
|
||||
assylum,
|
||||
aziani,
|
||||
badoink,
|
||||
@@ -155,6 +156,7 @@ const scrapers = {
|
||||
analized: fullpornnetwork,
|
||||
analviolation: fullpornnetwork,
|
||||
anilos: nubiles,
|
||||
archangel: bamvisions,
|
||||
aziani,
|
||||
babes: mindgeek,
|
||||
babevr: badoink,
|
||||
|
||||
Reference in New Issue
Block a user