traxxx/src/updates.js

256 lines
8.5 KiB
JavaScript
Raw Normal View History

'use strict';
const Promise = require('bluebird');
const moment = require('moment');
const argv = require('./argv');
const logger = require('./logger')(__filename);
const knex = require('./knex');
const { curateRelease } = require('./releases');
const include = require('./utils/argv-include')(argv);
const scrapers = require('./scrapers/scrapers');
2020-08-13 22:32:59 +00:00
const { fetchIncludedEntities } = require('./entities');
const emptyReleases = { uniqueReleases: [], duplicateReleases: [] };
async function filterUniqueReleases(latestReleases, accReleases) {
const latestReleaseIdentifiers = latestReleases
.map(release => [release.entity.id, release.entryId]);
const duplicateReleaseEntries = await knex('releases')
.select(knex.raw('releases.*, row_to_json(entities) as entity'))
.leftJoin('entities', 'entities.id', 'releases.entity_id')
.whereIn(['entity_id', 'entry_id'], latestReleaseIdentifiers);
const duplicateReleases = duplicateReleaseEntries.map(release => curateRelease(release));
// add entry IDs of accumulated releases to prevent an infinite scrape loop
// when one page contains the same release as the previous
const duplicateReleasesSiteIdAndEntryIds = duplicateReleases
.concat(accReleases)
.reduce((acc, release) => {
const entityId = release.entityId || release.entity.id;
const entryId = release.entryId || release.entryId;
if (!acc[entityId]) acc[entityId] = {};
acc[entityId][entryId] = true;
return acc;
}, {});
const uniqueReleases = latestReleases.filter(release => !duplicateReleasesSiteIdAndEntryIds[release.entity.id]?.[release.entryId]);
return { uniqueReleases, duplicateReleases };
}
2020-09-18 22:12:15 +00:00
function needNextPage(releasesOnPage, uniqueReleases, totalReleases, hasDates, upcoming) {
if (releasesOnPage.length === 0) {
return false;
}
if (upcoming) {
return uniqueReleases.length > 0 && argv.paginateUpcoming;
}
if (argv.last) {
2020-09-18 22:12:15 +00:00
// this will keep paginating until the second condition is met on sites that will keep serving the last page if you exceed the last page number (e.g. HardX as of september 2020)
// checking unqiueReleases > 0 could prevent that, but this would stop pagination prematurely if we already scraped a full page of data earlier
return releasesOnPage.length > 0 && totalReleases + releasesOnPage.length < argv.last;
}
if (!hasDates) {
2020-09-18 22:12:15 +00:00
return totalReleases + releasesOnPage.length < argv.nullDateLimit;
2020-07-15 02:51:39 +00:00
}
if (argv.after) {
2020-09-18 22:12:15 +00:00
// this will keep paginating infinitely on sites that will keep serving the last page if you exceed the last page number (e.g. HardX as of september 2020)
// checking unqiueReleases > 0 could prevent that, but this would stop pagination prematurely if we already scraped a full page of data earlier
const oldestReleaseOnPage = releasesOnPage
.sort((releaseA, releaseB) => releaseB.date - releaseA.date)
.slice(-1)[0];
if (moment(oldestReleaseOnPage.date).isAfter(argv.after)) {
// oldest release on page is newer than the specified date cut-off
return true;
}
}
return false;
}
async function scrapeReleases(scraper, entity, preData, upcoming = false, page = 1, acc = emptyReleases, totalReleases = 0) {
const releases = upcoming
? await scraper.fetchUpcoming(entity, page, include, preData)
: await scraper.fetchLatest(entity, page, include, preData);
if (!Array.isArray(releases)) {
// scraper is unable to fetch the releases and returned a HTTP code or null
logger.warn(`Scraper returned ${releases} when fetching latest from '${entity.name}' (${entity.parent?.name})`);
return acc;
}
const releasesWithEntity = releases.map(release => ({
...release,
entity: release.entity || entity, // allow override
})); // attach entity the release is assigned to when stored
const hasDates = releasesWithEntity.every(release => !!release.date);
const limitedReleases = (argv.last && releasesWithEntity.slice(0, Math.max(argv.last - totalReleases, 0)))
|| (hasDates && releasesWithEntity.filter(release => moment(release.date).isAfter(argv.after)))
|| releasesWithEntity.slice(0, Math.max(argv.nullDateLimit - totalReleases, 0));
const { uniqueReleases, duplicateReleases } = argv.force
? { uniqueReleases: limitedReleases, duplicateReleases: [] }
: await filterUniqueReleases(limitedReleases, acc.uniqueReleases);
const accReleases = {
uniqueReleases: acc.uniqueReleases.concat(uniqueReleases),
duplicateReleases: acc.duplicateReleases.concat(duplicateReleases),
};
if (needNextPage(releases, uniqueReleases, totalReleases, hasDates, upcoming)) {
return scrapeReleases(scraper, entity, preData, upcoming, page + 1, accReleases, totalReleases + releases.length);
}
return accReleases;
}
async function scrapeLatestReleases(scraper, entity, preData) {
2020-09-10 01:17:19 +00:00
if ((!argv.latest && !argv.last && !argv.after) || !scraper.fetchLatest) {
return emptyReleases;
}
try {
return await scrapeReleases(scraper, entity, preData, false, argv.page || 1);
} catch (error) {
if (argv.debug) {
console.trace(error);
}
logger.warn(`Failed to scrape latest updates for '${entity.slug}' (${entity.parent?.slug}): ${error.message}`);
}
return emptyReleases;
}
async function scrapeUpcomingReleases(scraper, entity, preData) {
if (!argv.upcoming || !scraper.fetchUpcoming) {
return emptyReleases;
}
try {
return await scrapeReleases(scraper, entity, preData, true);
} catch (error) {
if (argv.debug) {
console.trace(error);
}
logger.warn(`Failed to scrape upcoming updates for '${entity.slug}' (${entity.parent?.slug}): ${error.message}`);
}
return emptyReleases;
}
async function scrapeMovies(scraper, entity) {
if (!argv.movies || !scraper.fetchMovies) {
return [];
}
try {
// return await scrapeReleases(scraper, entity, preData, true);
return await scraper.fetchMovies(entity);
} catch (error) {
logger.warn(`Failed to scrape movies for '${entity.slug}' (${entity.parent?.slug}): ${error.message}`);
}
return [];
}
async function scrapeChannelReleases(scraper, channelEntity, preData) {
const [latestReleases, upcomingReleases] = await Promise.all([
scrapeLatestReleases(scraper, channelEntity, preData),
scrapeUpcomingReleases(scraper, channelEntity, preData),
scrapeMovies(scraper, channelEntity, preData),
]);
logger.info(`Fetching ${latestReleases.uniqueReleases.length} latest and ${upcomingReleases.uniqueReleases.length} upcoming updates for '${channelEntity.name}' (${channelEntity.parent?.name})`);
return {
uniqueReleases: [...latestReleases.uniqueReleases, ...upcomingReleases.uniqueReleases],
duplicateReleases: [...latestReleases.duplicateReleases, ...upcomingReleases.duplicateReleases],
};
}
async function scrapeChannel(channelEntity, accNetworkReleases) {
const scraper = scrapers.releases[channelEntity.slug]
|| scrapers.releases[channelEntity.parent?.slug]
|| scrapers.releases[channelEntity.parent?.parent?.slug];
if (!scraper) {
logger.warn(`No scraper found for '${channelEntity.name}' (${channelEntity.parent?.name})`);
2020-09-03 20:22:12 +00:00
return emptyReleases;
}
try {
const beforeFetchLatest = await scraper.beforeFetchLatest?.(channelEntity);
return await scrapeChannelReleases(scraper, channelEntity, {
...accNetworkReleases,
beforeFetchLatest,
});
} catch (error) {
logger.error(`Failed to scrape releases from ${channelEntity.name} using ${scraper.slug}: ${error.message}`);
2020-09-03 20:22:12 +00:00
return emptyReleases;
}
}
async function scrapeNetworkSequential(networkEntity) {
const releases = await Promise.reduce(
networkEntity.children,
async (chain, channelEntity) => {
const accNetworkReleases = await chain;
const { uniqueReleases, duplicateReleases } = await scrapeChannel(channelEntity, accNetworkReleases);
return {
uniqueReleases: accNetworkReleases.uniqueReleases.concat(uniqueReleases),
duplicateReleases: accNetworkReleases.duplicateReleases.concat(duplicateReleases),
};
},
Promise.resolve(emptyReleases),
);
return releases.uniqueReleases;
}
async function scrapeNetworkParallel(networkEntity) {
return Promise.map(
networkEntity.children,
async (channelEntity) => {
const { uniqueReleases } = await scrapeChannel(channelEntity, networkEntity);
return uniqueReleases;
},
{ concurrency: 3 },
);
}
async function fetchUpdates() {
2020-08-13 22:32:59 +00:00
const includedNetworks = await fetchIncludedEntities();
const scrapedNetworks = await Promise.map(
includedNetworks,
async networkEntity => (networkEntity.parameters?.sequential
? scrapeNetworkSequential(networkEntity)
: scrapeNetworkParallel(networkEntity)),
{ concurrency: 5 },
);
const releases = scrapedNetworks.flat(2);
return releases;
}
module.exports = fetchUpdates;