Added beforeNetwork hook, used by MindGeek. Added Filthy Kings to Gamma.
After Width: | Height: | Size: 2.0 KiB |
After Width: | Height: | Size: 2.1 KiB |
After Width: | Height: | Size: 2.0 KiB |
After Width: | Height: | Size: 820 KiB |
After Width: | Height: | Size: 2.8 KiB |
After Width: | Height: | Size: 2.8 KiB |
After Width: | Height: | Size: 820 KiB |
After Width: | Height: | Size: 23 KiB |
After Width: | Height: | Size: 23 KiB |
|
@ -3171,6 +3171,17 @@ const sites = [
|
|||
url: 'https://blackambush.com',
|
||||
parent: 'exploitedx',
|
||||
},
|
||||
// FILTHY KINGS
|
||||
{
|
||||
slug: 'filthykings',
|
||||
name: 'Filthy Kings',
|
||||
url: 'https://www.filthykings.com',
|
||||
parent: 'gamma',
|
||||
independent: true,
|
||||
parameters: {
|
||||
layout: 'api',
|
||||
},
|
||||
},
|
||||
// FIRST ANAL QUEST
|
||||
{
|
||||
slug: 'firstanalquest',
|
||||
|
|
|
@ -112,7 +112,7 @@ async function scrapeRelease(baseRelease, entitiesBySlug, type = 'scene') {
|
|||
|
||||
const options = {
|
||||
...include,
|
||||
beforeFetchScene: entity.preData,
|
||||
beforeFetchScenes: entity.preData,
|
||||
parameters: getRecursiveParameters(entity),
|
||||
};
|
||||
|
||||
|
@ -172,8 +172,8 @@ async function scrapeRelease(baseRelease, entitiesBySlug, type = 'scene') {
|
|||
|
||||
async function scrapeReleases(baseReleases, entitiesBySlug, type) {
|
||||
const entitiesWithBeforeDataEntries = await Promise.all(Object.entries(entitiesBySlug).map(async ([slug, entity]) => {
|
||||
if (entity.scraper?.beforeFetchScene) {
|
||||
const preData = await entity.scraper.beforeFetchScene(entity);
|
||||
if (entity.scraper?.beforeFetchScenes) {
|
||||
const preData = await entity.scraper.beforeFetchScenes(entity);
|
||||
|
||||
return [slug, { ...entity, preData }];
|
||||
}
|
||||
|
|
|
@ -237,7 +237,7 @@ async function fetchLatest(site, page = 1, options) {
|
|||
const { searchParams } = new URL(url);
|
||||
const siteId = searchParams.get('site');
|
||||
|
||||
const { session, instanceToken } = await getSession(site, options.parameters);
|
||||
const { session, instanceToken } = options.beforeNetwork || await getSession(site, options.parameters);
|
||||
|
||||
const beforeDate = moment().add('1', 'day').format('YYYY-MM-DD');
|
||||
const limit = 10;
|
||||
|
@ -294,7 +294,7 @@ async function fetchScene(url, site, baseScene, options) {
|
|||
}
|
||||
|
||||
const entryId = new URL(url).pathname.match(/\/(\d+)/)?.[1];
|
||||
const { session, instanceToken } = options.beforeFetchScene || await getSession(site, options.parameters);
|
||||
const { session, instanceToken } = options.beforeFetchScenes || await getSession(site, options.parameters);
|
||||
|
||||
const res = await http.get(`https://site-api.project1service.com/v2/releases/${entryId}`, {
|
||||
session,
|
||||
|
@ -363,7 +363,8 @@ async function fetchProfile({ name: actorName, slug: actorSlug }, { entity, para
|
|||
}
|
||||
|
||||
module.exports = {
|
||||
beforeFetchScene: getSession,
|
||||
beforeNetwork: getSession,
|
||||
beforeFetchScenes: getSession,
|
||||
scrapeLatestX,
|
||||
fetchLatest,
|
||||
fetchUpcoming,
|
||||
|
|
|
@ -107,6 +107,7 @@ async function scrapeReleases(scraper, entity, preData, isUpcoming) {
|
|||
const options = {
|
||||
...config.options[scraper.slug],
|
||||
...include,
|
||||
...preData,
|
||||
parameters: getRecursiveParameters(entity),
|
||||
};
|
||||
|
||||
|
@ -207,7 +208,7 @@ async function scrapeChannelReleases(scraper, channelEntity, preData) {
|
|||
scrapeMovies(scraper, channelEntity, preData),
|
||||
]);
|
||||
|
||||
logger.info(`Fetching ${latestReleases.uniqueReleases.length} latest and ${upcomingReleases.uniqueReleases.length} upcoming updates for '${channelEntity.name}' (${channelEntity.parent?.name})`);
|
||||
logger.info(`Fetching ${argv.latest ? latestReleases.uniqueReleases.length : 'no'} latest and ${argv.upcoming ? upcomingReleases.uniqueReleases.length : 'no'} upcoming updates for '${channelEntity.name}' (${channelEntity.parent?.name})`);
|
||||
|
||||
return {
|
||||
uniqueReleases: [...latestReleases.uniqueReleases, ...upcomingReleases.uniqueReleases],
|
||||
|
@ -215,7 +216,7 @@ async function scrapeChannelReleases(scraper, channelEntity, preData) {
|
|||
};
|
||||
}
|
||||
|
||||
async function scrapeChannel(channelEntity, accNetworkReleases) {
|
||||
async function scrapeChannel(channelEntity, accNetworkReleases, beforeNetwork) {
|
||||
const scraper = resolveScraper(channelEntity);
|
||||
const layoutScraper = resolveLayoutScraper(channelEntity, scraper);
|
||||
|
||||
|
@ -230,6 +231,7 @@ async function scrapeChannel(channelEntity, accNetworkReleases) {
|
|||
return await scrapeChannelReleases(layoutScraper, channelEntity, {
|
||||
...accNetworkReleases,
|
||||
beforeFetchLatest,
|
||||
beforeNetwork,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(`Failed to scrape releases from ${channelEntity.name} using ${scraper.slug}: ${error.message}`);
|
||||
|
@ -257,10 +259,12 @@ async function scrapeNetworkSequential(networkEntity) {
|
|||
}
|
||||
|
||||
async function scrapeNetworkParallel(networkEntity) {
|
||||
const beforeNetwork = await networkEntity.scraper.beforeNetwork?.(networkEntity);
|
||||
|
||||
return Promise.map(
|
||||
networkEntity.includedChildren,
|
||||
async (channelEntity) => {
|
||||
const { uniqueReleases } = await scrapeChannel(channelEntity, networkEntity);
|
||||
const { uniqueReleases } = await scrapeChannel(channelEntity, null, beforeNetwork);
|
||||
|
||||
return uniqueReleases;
|
||||
},
|
||||
|
|