diff --git a/public/img/logos/filthykings/favicon.png b/public/img/logos/filthykings/favicon.png new file mode 100644 index 00000000..9ecb9419 Binary files /dev/null and b/public/img/logos/filthykings/favicon.png differ diff --git a/public/img/logos/filthykings/favicon_dark.png b/public/img/logos/filthykings/favicon_dark.png new file mode 100644 index 00000000..fea51fdc Binary files /dev/null and b/public/img/logos/filthykings/favicon_dark.png differ diff --git a/public/img/logos/filthykings/favicon_light.png b/public/img/logos/filthykings/favicon_light.png new file mode 100644 index 00000000..59724f4d Binary files /dev/null and b/public/img/logos/filthykings/favicon_light.png differ diff --git a/public/img/logos/filthykings/filthykings.png b/public/img/logos/filthykings/filthykings.png new file mode 100644 index 00000000..150cf17a Binary files /dev/null and b/public/img/logos/filthykings/filthykings.png differ diff --git a/public/img/logos/filthykings/lazy/filthykings.png b/public/img/logos/filthykings/lazy/filthykings.png new file mode 100644 index 00000000..ffbd54ca Binary files /dev/null and b/public/img/logos/filthykings/lazy/filthykings.png differ diff --git a/public/img/logos/filthykings/lazy/network.png b/public/img/logos/filthykings/lazy/network.png new file mode 100644 index 00000000..594f70b2 Binary files /dev/null and b/public/img/logos/filthykings/lazy/network.png differ diff --git a/public/img/logos/filthykings/network.png b/public/img/logos/filthykings/network.png new file mode 100644 index 00000000..19b2ebce Binary files /dev/null and b/public/img/logos/filthykings/network.png differ diff --git a/public/img/logos/filthykings/thumbs/filthykings.png b/public/img/logos/filthykings/thumbs/filthykings.png new file mode 100644 index 00000000..384dbf0e Binary files /dev/null and b/public/img/logos/filthykings/thumbs/filthykings.png differ diff --git a/public/img/logos/filthykings/thumbs/network.png b/public/img/logos/filthykings/thumbs/network.png new file mode 100644 index 00000000..ae047922 Binary files /dev/null and b/public/img/logos/filthykings/thumbs/network.png differ diff --git a/seeds/02_sites.js b/seeds/02_sites.js index c5df7a6f..cecd6d2a 100644 --- a/seeds/02_sites.js +++ b/seeds/02_sites.js @@ -3171,6 +3171,17 @@ const sites = [ url: 'https://blackambush.com', parent: 'exploitedx', }, + // FILTHY KINGS + { + slug: 'filthykings', + name: 'Filthy Kings', + url: 'https://www.filthykings.com', + parent: 'gamma', + independent: true, + parameters: { + layout: 'api', + }, + }, // FIRST ANAL QUEST { slug: 'firstanalquest', diff --git a/src/deep.js b/src/deep.js index e09bc5bd..606637d1 100644 --- a/src/deep.js +++ b/src/deep.js @@ -112,7 +112,7 @@ async function scrapeRelease(baseRelease, entitiesBySlug, type = 'scene') { const options = { ...include, - beforeFetchScene: entity.preData, + beforeFetchScenes: entity.preData, parameters: getRecursiveParameters(entity), }; @@ -172,8 +172,8 @@ async function scrapeRelease(baseRelease, entitiesBySlug, type = 'scene') { async function scrapeReleases(baseReleases, entitiesBySlug, type) { const entitiesWithBeforeDataEntries = await Promise.all(Object.entries(entitiesBySlug).map(async ([slug, entity]) => { - if (entity.scraper?.beforeFetchScene) { - const preData = await entity.scraper.beforeFetchScene(entity); + if (entity.scraper?.beforeFetchScenes) { + const preData = await entity.scraper.beforeFetchScenes(entity); return [slug, { ...entity, preData }]; } diff --git a/src/scrapers/mindgeek.js b/src/scrapers/mindgeek.js index 36198d45..f9da5af6 100644 --- a/src/scrapers/mindgeek.js +++ b/src/scrapers/mindgeek.js @@ -237,7 +237,7 @@ async function fetchLatest(site, page = 1, options) { const { searchParams } = new URL(url); const siteId = searchParams.get('site'); - const { session, instanceToken } = await getSession(site, options.parameters); + const { session, instanceToken } = options.beforeNetwork || await getSession(site, options.parameters); const beforeDate = moment().add('1', 'day').format('YYYY-MM-DD'); const limit = 10; @@ -294,7 +294,7 @@ async function fetchScene(url, site, baseScene, options) { } const entryId = new URL(url).pathname.match(/\/(\d+)/)?.[1]; - const { session, instanceToken } = options.beforeFetchScene || await getSession(site, options.parameters); + const { session, instanceToken } = options.beforeFetchScenes || await getSession(site, options.parameters); const res = await http.get(`https://site-api.project1service.com/v2/releases/${entryId}`, { session, @@ -363,7 +363,8 @@ async function fetchProfile({ name: actorName, slug: actorSlug }, { entity, para } module.exports = { - beforeFetchScene: getSession, + beforeNetwork: getSession, + beforeFetchScenes: getSession, scrapeLatestX, fetchLatest, fetchUpcoming, diff --git a/src/updates.js b/src/updates.js index 97ce332e..36db6fbf 100644 --- a/src/updates.js +++ b/src/updates.js @@ -107,6 +107,7 @@ async function scrapeReleases(scraper, entity, preData, isUpcoming) { const options = { ...config.options[scraper.slug], ...include, + ...preData, parameters: getRecursiveParameters(entity), }; @@ -207,7 +208,7 @@ async function scrapeChannelReleases(scraper, channelEntity, preData) { scrapeMovies(scraper, channelEntity, preData), ]); - logger.info(`Fetching ${latestReleases.uniqueReleases.length} latest and ${upcomingReleases.uniqueReleases.length} upcoming updates for '${channelEntity.name}' (${channelEntity.parent?.name})`); + logger.info(`Fetching ${argv.latest ? latestReleases.uniqueReleases.length : 'no'} latest and ${argv.upcoming ? upcomingReleases.uniqueReleases.length : 'no'} upcoming updates for '${channelEntity.name}' (${channelEntity.parent?.name})`); return { uniqueReleases: [...latestReleases.uniqueReleases, ...upcomingReleases.uniqueReleases], @@ -215,7 +216,7 @@ async function scrapeChannelReleases(scraper, channelEntity, preData) { }; } -async function scrapeChannel(channelEntity, accNetworkReleases) { +async function scrapeChannel(channelEntity, accNetworkReleases, beforeNetwork) { const scraper = resolveScraper(channelEntity); const layoutScraper = resolveLayoutScraper(channelEntity, scraper); @@ -230,6 +231,7 @@ async function scrapeChannel(channelEntity, accNetworkReleases) { return await scrapeChannelReleases(layoutScraper, channelEntity, { ...accNetworkReleases, beforeFetchLatest, + beforeNetwork, }); } catch (error) { logger.error(`Failed to scrape releases from ${channelEntity.name} using ${scraper.slug}: ${error.message}`); @@ -257,10 +259,12 @@ async function scrapeNetworkSequential(networkEntity) { } async function scrapeNetworkParallel(networkEntity) { + const beforeNetwork = await networkEntity.scraper.beforeNetwork?.(networkEntity); + return Promise.map( networkEntity.includedChildren, async (channelEntity) => { - const { uniqueReleases } = await scrapeChannel(channelEntity, networkEntity); + const { uniqueReleases } = await scrapeChannel(channelEntity, null, beforeNetwork); return uniqueReleases; },