Added missing Bang sites, updated channel URLs.

This commit is contained in:
DebaucheryLibrarian
2026-02-01 04:50:29 +01:00
parent 2e9e678c51
commit 85fbc9d2f2
4 changed files with 56 additions and 45 deletions

View File

@@ -89,6 +89,28 @@ function scrapeAll(scenes, entity) {
});
}
async function fetchLatest(channel, page = 1) {
const url = `${channel.url}?by=date.desc&page=${page}`;
const res = await unprint.get(url, { selectAll: '.video_container' });
if (res.ok) {
return scrapeAll(res.context, channel);
}
return res.status;
}
async function fetchUpcoming(channel) {
const url = `${channel.url}&by=date.desc&early-access=true`;
const res = await unprint.get(url, { selectAll: '.video_container' });
if (res.ok) {
return scrapeAll(res.context, channel);
}
return res.status;
}
async function scrapeScene({ query }, { url, entity }) {
const release = {};
const data = query.json('//script[contains(text(), "VideoObject")]');
@@ -177,28 +199,6 @@ async function scrapeProfile({ query, element }, url, entity, include) {
return profile;
}
async function fetchLatest(channel, page = 1) {
const url = `${channel.url}&by=date.desc&page=${page}`;
const res = await unprint.get(url, { selectAll: '.video_container' });
if (res.ok) {
return scrapeAll(res.context, channel);
}
return res.status;
}
async function fetchUpcoming(channel) {
const url = `${channel.url}&by=date.desc&early-access=true`;
const res = await unprint.get(url, { selectAll: '.video_container' });
if (res.ok) {
return scrapeAll(res.context, channel);
}
return res.status;
}
async function fetchProfile({ name: actorName }, { entity }, include) {
const searchRes = await unprint.get(`https://www.bang.com/pornstars?term=${slugify(actorName, '+')}`);

View File

@@ -99,7 +99,7 @@ async function scrapeProfile(data) {
profile.birthPlace = bio.born;
profile.dateOfBirth = unprint.extractDate(bio.birthdate, 'YYYY-MM-DD');
profile.age = bio.age;
profile.age = bio.age; // not always in data even when displayed on site
profile.measurements = bio.measurements;
profile.height = convert(bio.height, 'cm');
@@ -121,11 +121,7 @@ async function scrapeProfile(data) {
async function fetchProfile(actor, context) {
const url = `${context.channel.url}/models/${actor.slug}`;
const res = await unprint.get(url, {
parser: {
runScripts: 'dangerously',
},
});
const res = await unprint.get(url);
if (res.ok) {
const data = res.context.query.json('#__NEXT_DATA__');