'use strict'; const Promise = require('bluebird'); const moment = require('moment'); const argv = require('./argv'); const logger = require('./logger')(__filename); const knex = require('./knex'); const include = require('./utils/argv-include')(argv); const scrapers = require('./scrapers/scrapers'); const { fetchIncludedEntities } = require('./entities'); async function filterUniqueReleases(latestReleases, accReleases) { const latestReleaseIdentifiers = latestReleases .map(release => [release.entity.id, release.entryId]); const duplicateReleases = await knex('releases') .whereIn(['entity_id', 'entry_id'], latestReleaseIdentifiers); // add entry IDs of accumulated releases to prevent an infinite scrape loop // when one page contains the same release as the previous const duplicateReleasesSiteIdAndEntryIds = duplicateReleases .concat(accReleases) .reduce((acc, release) => { const entityId = release.entity_id || release.entity.id; const entryId = release.entry_id || release.entryId; if (!acc[entityId]) acc[entityId] = {}; acc[entityId][entryId] = true; return acc; }, {}); const uniqueReleases = latestReleases .filter(release => !duplicateReleasesSiteIdAndEntryIds[release.entity.id]?.[release.entryId]); return uniqueReleases; } function needNextPage(releases, uniqueReleases, totalReleases, hasDates) { if (argv.last) { return totalReleases + releases.length < argv.last; } if (!hasDates) { return totalReleases + releases.length < argv.nullDateLimit; } const oldestReleaseOnPage = releases .sort((releaseA, releaseB) => releaseB.date - releaseA.date) .slice(-1)[0]; if (moment(oldestReleaseOnPage.date).isAfter(argv.after)) { // oldest release on page is newer than the specified date cut-off return true; } return false; } async function scrapeReleases(scraper, entity, preData, upcoming = false, page = 1, accReleases = [], totalReleases = 0) { const releases = upcoming ? await scraper.fetchUpcoming(entity, page, include, preData) : await scraper.fetchLatest(entity, page, include, preData); if (!Array.isArray(releases)) { // scraper is unable to fetch the releases and returned a HTTP code or null logger.warn(`Scraper returned ${releases} when fetching latest from '${entity.name}' (${entity.parent?.name})`); return accReleases; } const releasesWithEntity = releases.map(release => ({ ...release, entity: release.entity || entity, // allow override })); // attach entity the release is assigned to when stored const hasDates = releasesWithEntity.every(release => !!release.date); const limitedReleases = (argv.last && releasesWithEntity.slice(0, Math.max(argv.last - totalReleases, 0))) || (hasDates && releasesWithEntity.filter(release => moment(release.date).isAfter(argv.after))) || releasesWithEntity.slice(0, Math.max(argv.nullDateLimit - totalReleases, 0)); const uniqueReleases = argv.force ? limitedReleases : await filterUniqueReleases(limitedReleases, accReleases); if (needNextPage(releases, uniqueReleases, totalReleases, hasDates)) { return scrapeReleases(scraper, entity, preData, upcoming, page + 1, accReleases.concat(uniqueReleases), totalReleases + releases.length); } return accReleases.concat(uniqueReleases); } async function scrapeLatestReleases(scraper, entity, preData) { if (!scraper.fetchLatest) { return []; } try { return await scrapeReleases(scraper, entity, preData, false, argv.page || 1); } catch (error) { if (argv.debug) { console.trace(error); } logger.warn(`Failed to scrape latest updates for '${entity.slug}' (${entity.parent?.slug}): ${error.message}`); } return []; } async function scrapeUpcomingReleases(scraper, entity, preData) { if (!scraper.fetchUpcoming) { return []; } try { return await scrapeReleases(scraper, entity, preData, true); } catch (error) { if (argv.debug) { console.trace(error); } logger.warn(`Failed to scrape upcoming updates for '${entity.slug}' (${entity.parent?.slug}): ${error.message}`); } return []; } async function scrapeMovies(scraper, entity) { if (!scraper.fetchMovies) { return []; } try { // return await scrapeReleases(scraper, entity, preData, true); return await scraper.fetchMovies(entity); } catch (error) { logger.warn(`Failed to scrape movies for '${entity.slug}' (${entity.parent?.slug}): ${error.message}`); } return []; } async function scrapeChannelReleases(scraper, channelEntity, preData) { const [latestReleases, upcomingReleases] = await Promise.all([ argv.latest ? scrapeLatestReleases(scraper, channelEntity, preData) : [], argv.upcoming ? scrapeUpcomingReleases(scraper, channelEntity, preData) : [], argv.movies ? scrapeMovies(scraper, channelEntity, preData) : [], ]); logger.info(`Fetching ${latestReleases.length} latest and ${upcomingReleases.length} upcoming updates for '${channelEntity.name}' (${channelEntity.parent?.name})`); return [...latestReleases, ...upcomingReleases]; } async function scrapeChannel(channelEntity, accNetworkReleases) { const scraper = scrapers.releases[channelEntity.slug] || scrapers.releases[channelEntity.parent?.slug] || scrapers.releases[channelEntity.parent?.parent?.slug]; if (!scraper) { logger.warn(`No scraper found for '${channelEntity.name}' (${channelEntity.parent?.name})`); return []; } try { const beforeFetchLatest = await scraper.beforeFetchLatest?.(channelEntity); const channelEntityReleases = await scrapeChannelReleases(scraper, channelEntity, { accNetworkReleases, beforeFetchLatest, }); return channelEntityReleases.map(release => ({ ...release, channelEntity })); } catch (error) { logger.error(`Failed to scrape releases from ${channelEntity.name} using ${scraper.slug}: ${error.message}`); return []; } } async function scrapeNetworkSequential(networkEntity) { return Promise.reduce( networkEntity.children, async (chain, channelEntity) => { const accNetworkReleases = await chain; const channelReleases = await scrapeChannel(channelEntity, accNetworkReleases); return accNetworkReleases.concat(channelReleases); }, Promise.resolve([]), ); } async function scrapeNetworkParallel(networkEntity) { return Promise.map( networkEntity.children, async channelEntity => scrapeChannel(channelEntity, networkEntity), { concurrency: 3 }, ); } async function fetchUpdates() { const includedNetworks = await fetchIncludedEntities(); const scrapedNetworks = await Promise.map( includedNetworks, async networkEntity => (networkEntity.parameters?.sequential ? scrapeNetworkSequential(networkEntity) : scrapeNetworkParallel(networkEntity)), { concurrency: 5 }, ); const releases = scrapedNetworks.flat(2); return releases; } module.exports = fetchUpdates;