Added upcoming scraper to PurgatoryX.

This commit is contained in:
DebaucheryLibrarian 2021-11-28 01:20:39 +01:00
parent dfeb0c08cf
commit f04eb72891
1 changed files with 27 additions and 7 deletions

View File

@ -37,12 +37,26 @@ function scrapeAll(scenes) {
}
}
console.log(release.photos);
return release;
});
}
function scrapeUpcoming({ query }) {
const release = {};
release.url = query.url('.bottom-info a');
release.entryId = new URL(release.url).pathname.match(/\/view\/(\d+)/)?.[1];
release.title = query.cnt('.title');
release.actors = query.all('.model-wrap li').map((el) => ({
name: query.cnt(el, 'h5'),
url: query.url(el, '.model-thumb a'),
avatar: query.img(el, '.model-thumb img'),
}));
return release;
}
function scrapeScene({ query }, url) {
const release = {};
@ -65,8 +79,6 @@ function scrapeScene({ query }, url) {
release.comment = query.cnt('.series');
console.log(release);
return release;
}
@ -80,6 +92,16 @@ async function fetchLatest(channel, page) {
return res.status;
}
async function fetchUpcoming(channel) {
const res = await qu.get(channel.url, '.upcoming-info-wrap');
if (res.ok && res.item) {
return [scrapeUpcoming(res.item, channel)];
}
return res.status;
}
function scrapeProfile({ query }, url) {
const profile = { url };
@ -101,9 +123,6 @@ function scrapeProfile({ query }, url) {
profile.scenes = scrapeAll(qu.initAll(query.all('.content-item')));
console.log(bio);
console.log(profile);
return profile;
}
@ -147,6 +166,7 @@ async function fetchProfile(baseActor, context, include, retry = false) {
module.exports = {
fetchLatest,
fetchProfile,
fetchUpcoming,
scrapeAll,
scrapeScene,
};