'use strict'; const Promise = require('bluebird'); const moment = require('moment'); const argv = require('./argv'); const logger = require('./logger')(__filename); const knex = require('./knex'); const include = require('./utils/argv-include')(argv); const scrapers = require('./scrapers/scrapers'); const { fetchSitesFromArgv, fetchSitesFromConfig } = require('./sites'); const afterDate = (() => { if (/\d{2,4}-\d{2}-\d{2,4}/.test(argv.after)) { // using date return moment .utc(argv.after, ['YYYY-MM-DD', 'DD-MM-YYYY']) .toDate(); } // using time distance (e.g. "1 month") return moment .utc() .subtract(...argv.after.split(' ')) .toDate(); })(); async function extractUniqueReleases(latestReleases, accReleases) { const latestReleaseIdentifiers = latestReleases .map(release => [release.site.id, release.entryId]); const duplicateReleases = await knex('releases') .whereIn(['site_id', 'entry_id'], latestReleaseIdentifiers); // add entry IDs of accumulated releases to prevent an infinite scrape loop // when one page contains the same release as the previous const duplicateReleasesSiteIdAndEntryIds = duplicateReleases .concat(accReleases) .reduce((acc, release) => { const siteId = release.site_id || release.site.id; const entryId = release.entry_id || release.entryId; if (!acc[siteId]) acc[siteId] = {}; acc[siteId][entryId] = true; return acc; }, {}); const uniqueReleases = latestReleases .filter(release => !duplicateReleasesSiteIdAndEntryIds[release.site.id]?.[release.entryId]); return uniqueReleases; } function needNextPage(uniqueReleases, pageAccReleases) { if (uniqueReleases === 0) { return false; } if (argv.last && pageAccReleases.length < argv.last) { // request for last N releases not yet satisfied return true; } const oldestReleaseOnPage = uniqueReleases .sort((releaseA, releaseB) => releaseB.date - releaseA.date) .slice(-1)[0]; if (oldestReleaseOnPage && moment(oldestReleaseOnPage.date).isAfter(afterDate)) { // oldest release on page is newer than the specified date cut-off return true; } // dates missing, and limit for scenes without dates not yet reached return pageAccReleases.length <= argv.nullDateLimit; } async function scrapeLatestReleases(scraper, site, preData) { if (!scraper.fetchLatest) { return []; } const scrapePage = async (page = 1, accReleases = []) => { const latestReleases = await scraper.fetchLatest(site, page, preData, include); if (!Array.isArray(latestReleases)) { // scraper is unable to fetch the releases and returned a HTTP code or null logger.warn(`Scraper returned ${latestReleases} when fetching latest from '${site.name}' (${site.network.name})`); return accReleases; } if (latestReleases.length === 0) { // scraper successfully requested releases, but found none return accReleases; } const latestReleasesWithSite = latestReleases.map(release => ({ ...release, site: release.site || site })); // attach site release is assigned to when stored const uniqueReleases = argv.redownload ? latestReleasesWithSite : await extractUniqueReleases(latestReleasesWithSite, accReleases); const pageAccReleases = accReleases.concat(uniqueReleases); logger.verbose(`Scraped '${site.name}' (${site.network.name}) page ${page}, found ${uniqueReleases.length} unique releases`); if (needNextPage(uniqueReleases, pageAccReleases)) { return scrapePage(page + 1, pageAccReleases); } return pageAccReleases; }; const releases = await scrapePage(1, []); if (argv.last) { return releases.slice(0, argv.last); } if (releases.every(release => release.date)) { return releases .filter(release => moment(release.date).isAfter(afterDate)); } return releases.slice(0, argv.nullDateLimit); } async function scrapeUpcomingReleases(scraper, site, preData) { if (!scraper.fetchUpcoming) { return []; } try { const upcomingReleases = await scraper.fetchUpcoming(site, 1, preData, include); if (upcomingReleases) { return upcomingReleases.map(release => ({ ...release, site, upcoming: true, })); } } catch (error) { logger.warn(`Failed to scrape upcoming releases for '${site.slug}' (${site.network.slug})`); } return []; } async function scrapeSiteReleases(scraper, site, preData) { const [latestReleases, upcomingReleases] = await Promise.all([ argv.latest ? scrapeLatestReleases(scraper, site, preData) : [], argv.upcoming ? scrapeUpcomingReleases(scraper, site, preData) : [], ]); return [...latestReleases, ...upcomingReleases]; } async function scrapeSite(site, accSiteReleases) { const scraper = scrapers.releases[site.slug] || scrapers.releases[site.network.slug] || scrapers.releases[site.network.parent?.slug]; if (!scraper) { logger.warn(`No scraper found for '${site.name}' (${site.network.name})`); return []; } try { const beforeFetchLatest = await scraper.beforeFetchLatest?.(site); const siteReleases = await scrapeSiteReleases(scraper, site, { accSiteReleases, beforeFetchLatest, }); return siteReleases.map(release => ({ ...release, site })); } catch (error) { logger.error(`Failed to scrape releases from ${site.name} using ${scraper.slug}: ${error.message}`); return []; } } async function scrapeNetworkSequential(network) { return Promise.reduce( network.sites, async (chain, site) => { const accSiteReleases = await chain; const siteReleases = await scrapeSite(site, network, accSiteReleases); return accSiteReleases.concat(siteReleases); }, Promise.resolve([]), ); } async function scrapeNetworkParallel(network) { return Promise.map( network.sites, async site => scrapeSite(site, network), { concurrency: 3 }, ); } async function fetchUpdates() { const includedNetworks = argv.sites || argv.networks ? await fetchSitesFromArgv() : await fetchSitesFromConfig(); const scrapedNetworks = await Promise.map( includedNetworks, async network => (network.parameters?.sequential ? scrapeNetworkSequential(network) : scrapeNetworkParallel(network)), { concurrency: 5 }, ); const releases = scrapedNetworks.flat(2); return releases; } module.exports = fetchUpdates;