forked from DebaucheryLibrarian/traxxx
243 lines
7.1 KiB
JavaScript
243 lines
7.1 KiB
JavaScript
'use strict';
|
|
|
|
const Promise = require('bluebird');
|
|
const moment = require('moment');
|
|
|
|
const argv = require('./argv');
|
|
const logger = require('./logger')(__filename);
|
|
const knex = require('./knex');
|
|
const include = require('./utils/argv-include')(argv);
|
|
const scrapers = require('./scrapers/scrapers');
|
|
const { fetchChannelsFromArgv, fetchChannelsFromConfig } = require('./entities');
|
|
|
|
const afterDate = (() => {
|
|
if (/\d{2,4}-\d{2}-\d{2,4}/.test(argv.after)) {
|
|
// using date
|
|
return moment
|
|
.utc(argv.after, ['YYYY-MM-DD', 'DD-MM-YYYY'])
|
|
.toDate();
|
|
}
|
|
|
|
// using time distance (e.g. "1 month")
|
|
return moment
|
|
.utc()
|
|
.subtract(...argv.after.split(' '))
|
|
.toDate();
|
|
})();
|
|
|
|
async function filterUniqueReleases(latestReleases, accReleases) {
|
|
const latestReleaseIdentifiers = latestReleases
|
|
.map(release => [release.entity.id, release.entryId]);
|
|
|
|
const duplicateReleases = await knex('releases')
|
|
.whereIn(['entity_id', 'entry_id'], latestReleaseIdentifiers);
|
|
|
|
// add entry IDs of accumulated releases to prevent an infinite scrape loop
|
|
// when one page contains the same release as the previous
|
|
const duplicateReleasesSiteIdAndEntryIds = duplicateReleases
|
|
.concat(accReleases)
|
|
.reduce((acc, release) => {
|
|
const entityId = release.entity_id || release.entity.id;
|
|
const entryId = release.entry_id || release.entryId;
|
|
|
|
if (!acc[entityId]) acc[entityId] = {};
|
|
acc[entityId][entryId] = true;
|
|
|
|
return acc;
|
|
}, {});
|
|
|
|
const uniqueReleases = latestReleases
|
|
.filter(release => !duplicateReleasesSiteIdAndEntryIds[release.entity.id]?.[release.entryId]);
|
|
|
|
return uniqueReleases;
|
|
}
|
|
|
|
function needNextPage(uniqueReleases, pageAccReleases) {
|
|
if (uniqueReleases.length === 0) {
|
|
return false;
|
|
}
|
|
|
|
if (argv.last && pageAccReleases.length < argv.last) {
|
|
// request for last N releases not yet satisfied
|
|
return true;
|
|
}
|
|
|
|
if (uniqueReleases.every(release => !!release.date)) {
|
|
const oldestReleaseOnPage = uniqueReleases
|
|
.sort((releaseA, releaseB) => releaseB.date - releaseA.date)
|
|
.slice(-1)[0];
|
|
|
|
if (moment(oldestReleaseOnPage.date).isAfter(afterDate)) {
|
|
// oldest release on page is newer than the specified date cut-off
|
|
return true;
|
|
}
|
|
}
|
|
|
|
// dates missing, and limit for scenes without dates not yet reached
|
|
return pageAccReleases.length <= argv.nullDateLimit;
|
|
}
|
|
|
|
async function scrapeReleases(scraper, entity, preData, upcoming = false) {
|
|
const scrapePage = async (page = 1, accReleases = []) => {
|
|
const latestReleases = upcoming
|
|
? await scraper.fetchUpcoming(entity, page, preData, include)
|
|
: await scraper.fetchLatest(entity, page, preData, include);
|
|
|
|
if (!Array.isArray(latestReleases)) {
|
|
// scraper is unable to fetch the releases and returned a HTTP code or null
|
|
logger.warn(`Scraper returned ${latestReleases} when fetching latest from '${entity.name}' (${entity.parent?.name})`);
|
|
return accReleases;
|
|
}
|
|
|
|
if (latestReleases.length === 0) {
|
|
// scraper successfully requested releases, but found none
|
|
return accReleases;
|
|
}
|
|
|
|
const latestReleasesWithEntity = latestReleases.map(release => ({
|
|
...release,
|
|
entity: release.entity || entity, // allow override
|
|
})); // attach entity the release is assigned to when stored
|
|
|
|
const uniqueReleases = argv.redownload
|
|
? latestReleasesWithEntity
|
|
: await filterUniqueReleases(latestReleasesWithEntity, accReleases);
|
|
|
|
const pageAccReleases = accReleases.concat(uniqueReleases);
|
|
|
|
logger.verbose(`Scraped '${entity.name}' (${entity.parent?.name}) ${upcoming ? 'upcoming' : 'latest'} page ${page}, found ${uniqueReleases.length} unique updates`);
|
|
|
|
if (needNextPage(uniqueReleases, pageAccReleases)) {
|
|
return scrapePage(page + 1, pageAccReleases);
|
|
}
|
|
|
|
return pageAccReleases;
|
|
};
|
|
|
|
const rawReleases = await scrapePage(argv.page || 1, []);
|
|
const releases = upcoming
|
|
? rawReleases.map(rawRelease => ({ ...rawRelease, upcoming: true }))
|
|
: rawReleases;
|
|
|
|
if (argv.last) {
|
|
return releases.slice(0, argv.last);
|
|
}
|
|
|
|
if (releases.every(release => release.date)) {
|
|
return releases.filter(release => moment(release.date).isAfter(afterDate));
|
|
}
|
|
|
|
return releases.slice(0, argv.nullDateLimit);
|
|
}
|
|
|
|
async function scrapeLatestReleases(scraper, entity, preData) {
|
|
if (!scraper.fetchLatest) {
|
|
return [];
|
|
}
|
|
|
|
try {
|
|
return await scrapeReleases(scraper, entity, preData, false);
|
|
} catch (error) {
|
|
console.trace(error);
|
|
logger.warn(`Failed to scrape latest updates for '${entity.slug}' (${entity.parent?.slug}): ${error.message}`);
|
|
}
|
|
|
|
return [];
|
|
}
|
|
|
|
async function scrapeUpcomingReleases(scraper, entity, preData) {
|
|
if (!scraper.fetchUpcoming) {
|
|
return [];
|
|
}
|
|
|
|
try {
|
|
return await scrapeReleases(scraper, entity, preData, true);
|
|
} catch (error) {
|
|
logger.warn(`Failed to scrape upcoming updates for '${entity.slug}' (${entity.parent?.slug}): ${error.message}`);
|
|
}
|
|
|
|
return [];
|
|
}
|
|
|
|
async function scrapeChannelReleases(scraper, channelEntity, preData) {
|
|
const [latestReleases, upcomingReleases] = await Promise.all([
|
|
argv.latest
|
|
? scrapeLatestReleases(scraper, channelEntity, preData)
|
|
: [],
|
|
argv.upcoming
|
|
? scrapeUpcomingReleases(scraper, channelEntity, preData)
|
|
: [],
|
|
]);
|
|
|
|
logger.info(`Fetching ${latestReleases.length} latest and ${upcomingReleases.length} upcoming updates for '${channelEntity.name}' (${channelEntity.parent?.name})`);
|
|
|
|
return [...latestReleases, ...upcomingReleases];
|
|
}
|
|
|
|
async function scrapeChannel(channelEntity, accNetworkReleases) {
|
|
const scraper = scrapers.releases[channelEntity.slug]
|
|
|| scrapers.releases[channelEntity.parent?.slug]
|
|
|| scrapers.releases[channelEntity.parent?.parent?.slug];
|
|
|
|
if (!scraper) {
|
|
logger.warn(`No scraper found for '${channelEntity.name}' (${channelEntity.parent?.name})`);
|
|
return [];
|
|
}
|
|
|
|
try {
|
|
const beforeFetchLatest = await scraper.beforeFetchLatest?.(channelEntity);
|
|
|
|
const channelEntityReleases = await scrapeChannelReleases(scraper, channelEntity, {
|
|
accNetworkReleases,
|
|
beforeFetchLatest,
|
|
});
|
|
|
|
return channelEntityReleases.map(release => ({ ...release, channelEntity }));
|
|
} catch (error) {
|
|
logger.error(`Failed to scrape releases from ${channelEntity.name} using ${scraper.slug}: ${error.message}`);
|
|
|
|
return [];
|
|
}
|
|
}
|
|
|
|
async function scrapeNetworkSequential(networkEntity) {
|
|
return Promise.reduce(
|
|
networkEntity.children,
|
|
async (chain, channelEntity) => {
|
|
const accNetworkReleases = await chain;
|
|
const channelReleases = await scrapeChannel(channelEntity, accNetworkReleases);
|
|
|
|
return accNetworkReleases.concat(channelReleases);
|
|
},
|
|
Promise.resolve([]),
|
|
);
|
|
}
|
|
|
|
async function scrapeNetworkParallel(networkEntity) {
|
|
return Promise.map(
|
|
networkEntity.children,
|
|
async channelEntity => scrapeChannel(channelEntity, networkEntity),
|
|
{ concurrency: 3 },
|
|
);
|
|
}
|
|
|
|
async function fetchUpdates() {
|
|
const includedNetworks = argv.channels || argv.networks
|
|
? await fetchChannelsFromArgv()
|
|
: await fetchChannelsFromConfig();
|
|
|
|
const scrapedNetworks = await Promise.map(
|
|
includedNetworks,
|
|
async networkEntity => (networkEntity.parameters?.sequential
|
|
? scrapeNetworkSequential(networkEntity)
|
|
: scrapeNetworkParallel(networkEntity)),
|
|
{ concurrency: 5 },
|
|
);
|
|
|
|
const releases = scrapedNetworks.flat(2);
|
|
|
|
return releases;
|
|
}
|
|
|
|
module.exports = fetchUpdates;
|