Separated Filthy Kings into its channels, upgraded Gamma scraper to accomodate.

This commit is contained in:
DebaucheryLibrarian
2023-07-09 04:35:30 +02:00
parent c51577098a
commit 88a56794aa
58 changed files with 152 additions and 120 deletions

View File

@@ -143,8 +143,8 @@ async function getPhotos(albumPath, site, includeThumbnails = true) {
}
}
async function getFullPhotos(entryId, site) {
const res = await http.get(`${site.url}/media/signPhotoset/${entryId}`, {
async function getFullPhotos(entryId, site, parameters) {
const res = await http.get(`${parameters.album || site.url}/media/signPhotoset/${entryId}`, {
headers: {
'X-Requested-With': 'XMLHttpRequest',
},
@@ -193,8 +193,8 @@ async function getThumbs(entryId, site, parameters) {
async function getPhotosApi(entryId, site, parameters) {
const [photos, thumbs] = await Promise.all([
getFullPhotos(entryId, site, parameters),
getThumbs(entryId, site, parameters),
getFullPhotos(entryId, site, parameters).catch(() => { logger.error(`Gamma scraper failed to fetch photos for ${entryId}`); return []; }),
getThumbs(entryId, site, parameters).catch(() => { logger.error(`Gamma scraper failed to fetch photos for ${entryId}`); return []; }),
]);
return photos.concat(thumbs.slice(photos.length));
@@ -217,14 +217,14 @@ function curateTitle(title, channel) {
return title.replace(new RegExp(`^\\s*${channel.name}\\s*[:|-]\\s`, 'i'), '');
}
async function scrapeApiReleases(json, site) {
return json.map((scene) => {
if (site.parameters?.extract && scene.sitename !== site.parameters.extract) {
return null;
async function scrapeApiReleases(json, site, options) {
return json.reduce((acc, scene) => {
if (options.parameters?.extract && scene.sitename !== options.parameters.extract) {
return acc;
}
if (site.parameters?.filterExclusive && scene.availableOnSite.length > 1) {
return null;
if (options.parameters?.filterExclusive && scene.availableOnSite.length > 1) {
return acc;
}
const release = {
@@ -238,8 +238,11 @@ async function scrapeApiReleases(json, site) {
release.title = curateTitle(scene.title, site);
release.path = `/${scene.url_title}/${release.entryId}`;
if (site.parameters?.scene) release.url = `${site.parameters.scene}${release.path}`;
else if (site.url && site.parameters?.scene !== false) release.url = `${site.url}/en/video${release.path}`;
if (options.parameters?.scene) {
release.url = `${options.parameters.scene}${release.path}`;
} else if (site.url && options.parameters?.scene !== false) {
release.url = `${site.url}/en/video${release.path}`;
}
release.date = moment.utc(scene.release_date, 'YYYY-MM-DD').toDate();
release.director = scene.directors[0]?.name || null;
@@ -276,10 +279,24 @@ async function scrapeApiReleases(json, site) {
];
}
if (options.parameters.filterNetwork && scene.mainChannel) {
return {
...acc,
unextracted: acc.unextracted.concat(release),
};
}
release.channel = slugify(scene.mainChannel?.id || scene.sitename, ''); // remove -
// release.movie = `${site.url}/en/movie/${scene.url_movie_title}/${scene.movie_id}`;
return release;
}).filter(Boolean);
return {
...acc,
scenes: acc.scenes.concat(release),
};
}, {
scenes: [],
unextracted: [],
});
}
function scrapeAll(scenes, site, networkUrl, hasTeaser = true) {
@@ -476,7 +493,7 @@ async function scrapeReleaseApi(data, site, options, movieScenes) {
release.scenes = await Promise.all(movieScenes.map((movieScene) => scrapeReleaseApi(movieScene, site, options)));
}
release.channel = data.sitename;
release.channel = slugify(data.mainChannel?.id || data.sitename, ''); // remove -
release.qualities = data.download_sizes;
return release;
@@ -638,11 +655,15 @@ async function fetchLatestApi(site, page = 1, options, preData, upcoming = false
const referer = options.parameters?.referer || `${options.parameters?.networkReferer ? site.parent.url : site.url}/en/videos`;
const { apiUrl } = await fetchApiCredentials(referer, site);
const params = `query=&hitsPerPage=36&maxValuesPerFacet=100&page=${page - 1}&facetFilters=[["lesbian:"],["bisex:"],["shemale:"],["upcoming:${upcoming ? 1 : 0}"]]${options.parameters.queryChannel
? `&filters=channels.id:${options.parameters.queryChannel === true ? site.slug : options.parameters.queryChannel}`
: `&filters=sitename:${site.slug}`}`;
const res = await http.post(apiUrl, {
requests: [
{
indexName: 'all_scenes',
params: `query=&hitsPerPage=36&maxValuesPerFacet=100&page=${page - 1}&facetFilters=[["lesbian:"],["bisex:"],["shemale:"],["upcoming:${upcoming ? 1 : 0}"]]&filters=sitename:${site.slug}`, // OR channels.id:${site.slug}`,
params,
},
],
}, {
@@ -654,7 +675,7 @@ async function fetchLatestApi(site, page = 1, options, preData, upcoming = false
});
if (res.status === 200 && res.body.results?.[0]?.hits) {
return scrapeApiReleases(res.body.results[0].hits, site);
return scrapeApiReleases(res.body.results[0].hits, site, options);
}
return res.status;