traxxx/src/scrape-sites.js

185 lines
6.5 KiB
JavaScript

'use strict';
const Promise = require('bluebird');
const moment = require('moment');
const argv = require('./argv');
const include = require('./utils/argv-include')(argv);
const logger = require('./logger')(__filename);
const knex = require('./knex');
const { fetchIncludedSites } = require('./sites');
const scrapers = require('./scrapers/scrapers');
const { deepFetchReleases } = require('./scrape-releases');
const { storeReleases } = require('./releases');
function getAfterDate() {
if (/\d{2,4}-\d{2}-\d{2,4}/.test(argv.after)) {
// using date
return moment
.utc(argv.after, ['YYYY-MM-DD', 'DD-MM-YYYY'])
.toDate();
}
// using time distance (e.g. "1 month")
return moment
.utc()
.subtract(...argv.after.split(' '))
.toDate();
}
async function findDuplicateReleaseIds(latestReleases, accReleases) {
const duplicateReleases = await knex('releases')
.whereIn('entry_id', latestReleases.map(({ entryId }) => entryId));
// include accumulated releases as duplicates to prevent an infinite
// loop when the next page contains the same releases as the previous
return new Set(duplicateReleases
.map(release => String(release.entry_id))
.concat(accReleases.map(release => String(release.entryId))));
}
async function scrapeUniqueReleases(scraper, site, beforeFetchLatest, accSiteReleases, afterDate = getAfterDate(), accReleases = [], page = argv.page) {
if (!argv.latest || !scraper.fetchLatest) {
return [];
}
const latestReleases = await scraper.fetchLatest(site, page, beforeFetchLatest, accSiteReleases, include);
if (!Array.isArray(latestReleases)) {
logger.warn(`Scraper returned ${latestReleases || 'null'} when fetching latest from '${site.name}' on '${site.network.name}'`);
return accReleases;
}
if (latestReleases.length === 0) {
return accReleases;
}
const latestReleasesWithSite = latestReleases.map(release => ({ ...release, site }));
const oldestReleaseOnPage = latestReleases.slice(-1)[0].date;
const duplicateReleaseIds = argv.redownload ? new Set() : await findDuplicateReleaseIds(latestReleases, accReleases);
const uniqueReleases = latestReleasesWithSite
.filter(release => !duplicateReleaseIds.has(String(release.entryId)) // release is already in database
&& (argv.last || !release.date || moment(release.date).isAfter(afterDate))); // release is older than specified date limit
logger.verbose(`${site.name}: Scraped page ${page}, ${uniqueReleases.length} unique recent releases`);
if (
uniqueReleases.length > 0
// && (oldestReleaseOnPage || page < argv.pages)
&& ((oldestReleaseOnPage
? moment(oldestReleaseOnPage).isAfter(afterDate)
: accReleases.length + uniqueReleases.length <= argv.nullDateLimit)
|| (argv.last && accReleases.length + uniqueReleases.length < argv.last))
) {
// oldest release on page is newer that specified date range, or latest count has not yet been met, fetch next page
return scrapeUniqueReleases(scraper, site, beforeFetchLatest, accSiteReleases, afterDate, accReleases.concat(uniqueReleases), page + 1);
}
if (argv.last && uniqueReleases.length >= argv.last) {
return accReleases.concat(uniqueReleases).slice(0, argv.last);
}
if (oldestReleaseOnPage) {
return accReleases.concat(uniqueReleases);
}
return accReleases.concat(uniqueReleases).slice(0, argv.nullDateLimit);
}
async function scrapeUpcomingReleases(scraper, site, beforeFetchLatest) {
if (argv.upcoming && scraper.fetchUpcoming) {
const upcomingReleases = await scraper.fetchUpcoming(site, 1, beforeFetchLatest, include);
return upcomingReleases
? upcomingReleases.map(release => ({ ...release, site, upcoming: true }))
: [];
}
return [];
}
async function scrapeSiteReleases(scraper, site, accSiteReleases) {
const beforeFetchLatest = await scraper.beforeFetchLatest?.(site, accSiteReleases);
const [newReleases, upcomingReleases] = await Promise.all([
scrapeUniqueReleases(scraper, site, beforeFetchLatest, accSiteReleases), // fetch basic release info from scene overview
scrapeUpcomingReleases(scraper, site, beforeFetchLatest, accSiteReleases), // fetch basic release info from upcoming overview
]);
if (argv.upcoming) {
logger.info(`${site.name}: ${argv.latest ? `Found ${newReleases.length}` : 'Ignoring'} latest releases,${argv.upcoming ? ' ' : ' ignoring '}${upcomingReleases.length || '0'} upcoming releases`);
}
const baseReleases = [...newReleases, ...upcomingReleases];
if (argv.deep) {
// follow URL for every release
return deepFetchReleases(baseReleases, beforeFetchLatest);
}
return baseReleases;
}
async function scrapeSite(site, network, accSiteReleases = []) {
if (site.parameters?.ignore) {
logger.warn(`Ignoring ${network.name}: ${site.name}`);
return [];
}
const scraper = scrapers.releases[site.slug] || scrapers.releases[site.network.slug];
if (!scraper) {
logger.warn(`No scraper found for '${site.name}' (${site.slug})`);
return [];
}
try {
const siteReleases = await scrapeSiteReleases(scraper, site, accSiteReleases);
return siteReleases.map(release => ({ ...release, site }));
} catch (error) {
logger.error(`${site.name}: Failed to scrape releases: ${error.message}`);
return [];
}
}
async function scrapeSites() {
const networks = await fetchIncludedSites();
const scrapedNetworks = await Promise.map(networks, async (network) => {
if (network.parameters?.sequential) {
logger.info(`Scraping '${network.name}' sequentially`);
return Promise.reduce(network.sites, async (acc, site) => {
const accSiteReleases = await acc;
const siteReleases = await scrapeSite(site, network, accSiteReleases);
return accSiteReleases.concat(siteReleases);
}, Promise.resolve([]));
}
return Promise.map(network.sites, async site => scrapeSite(site, network), {
concurrency: network.parameters?.concurrency || 2,
});
},
{
// 5 networks at a time
concurrency: 5,
});
const releases = scrapedNetworks.flat(2);
if (argv.inspect) {
console.log(releases);
}
if (argv.save) {
await storeReleases(releases);
}
}
module.exports = scrapeSites;