280 lines
9.1 KiB
JavaScript
280 lines
9.1 KiB
JavaScript
'use strict';
|
|
|
|
const config = require('config');
|
|
const Promise = require('bluebird');
|
|
const moment = require('moment');
|
|
|
|
const argv = require('./argv');
|
|
const logger = require('./logger')(__filename);
|
|
const knex = require('./knex');
|
|
const { curateRelease } = require('./releases');
|
|
const include = require('./utils/argv-include')(argv);
|
|
const scrapers = require('./scrapers/scrapers');
|
|
const { fetchIncludedEntities } = require('./entities');
|
|
|
|
const emptyReleases = { uniqueReleases: [], duplicateReleases: [] };
|
|
|
|
function mapReleasesToEntityIdAndEntryId(acc, release) {
|
|
const entityId = release.entityId || release.entity.id;
|
|
const entryId = release.entryId || release.entryId;
|
|
|
|
if (!acc[entityId]) acc[entityId] = {};
|
|
acc[entityId][entryId] = release;
|
|
|
|
return acc;
|
|
}
|
|
|
|
function filterLocalUniqueReleases(releases, accReleases) {
|
|
const localDuplicateReleasesBySiteIdAndEntryId = accReleases.reduce(mapReleasesToEntityIdAndEntryId, {});
|
|
|
|
const localUniqueReleases = releases.filter(release => !localDuplicateReleasesBySiteIdAndEntryId[release.entity.id]?.[release.entryId]);
|
|
const localDuplicateReleases = releases.filter(release => localDuplicateReleasesBySiteIdAndEntryId[release.entity.id]?.[release.entryId]);
|
|
|
|
return {
|
|
localUniqueReleases,
|
|
localDuplicateReleases,
|
|
};
|
|
}
|
|
|
|
async function filterUniqueReleases(releases) {
|
|
const releaseIdentifiers = releases
|
|
.map(release => [release.entity.id, release.entryId]);
|
|
|
|
const duplicateReleaseEntries = await knex('releases')
|
|
.select(knex.raw('releases.*, row_to_json(entities) as entity'))
|
|
.leftJoin('entities', 'entities.id', 'releases.entity_id')
|
|
.whereIn(['entity_id', 'entry_id'], releaseIdentifiers);
|
|
|
|
const duplicateReleases = duplicateReleaseEntries.map(release => curateRelease(release));
|
|
const duplicateReleasesByEntityIdAndEntryId = duplicateReleases.reduce(mapReleasesToEntityIdAndEntryId, {});
|
|
|
|
const internalUniqueReleasesByEntityIdAndEntryId = releases.reduce((acc, release) => mapReleasesToEntityIdAndEntryId(acc, release), {});
|
|
const internalUniqueReleases = Object.values(internalUniqueReleasesByEntityIdAndEntryId).map(releasesByEntryId => Object.values(releasesByEntryId)).flat();
|
|
|
|
const uniqueReleases = internalUniqueReleases.filter(release => !duplicateReleasesByEntityIdAndEntryId[release.entity.id]?.[release.entryId]);
|
|
|
|
return { uniqueReleases, duplicateReleases };
|
|
}
|
|
|
|
function needNextPage(pageReleases, accReleases, isUpcoming) {
|
|
const { localUniqueReleases: uniquePageReleases } = filterLocalUniqueReleases(pageReleases, accReleases);
|
|
|
|
if (uniquePageReleases.length === 0) {
|
|
// page is empty, or only contains scenes from previous page
|
|
return false;
|
|
}
|
|
|
|
if (isUpcoming) {
|
|
return uniquePageReleases.length > 0 && argv.paginateUpcoming;
|
|
}
|
|
|
|
if (uniquePageReleases.length > 0) {
|
|
if (argv.last) {
|
|
return accReleases.length + pageReleases.length < argv.last;
|
|
}
|
|
|
|
if (!pageReleases.every(release => !!release.date)) { // some scenes don't have dates
|
|
return accReleases.length + pageReleases.length < argv.missingDateLimit;
|
|
}
|
|
|
|
if (argv.after) {
|
|
const oldestReleaseOnPage = pageReleases
|
|
.sort((releaseA, releaseB) => releaseB.date - releaseA.date)
|
|
.slice(-1)[0];
|
|
|
|
if (moment(oldestReleaseOnPage.date).isAfter(argv.after)) {
|
|
// oldest release on page is newer than the specified date cut-off
|
|
return true;
|
|
}
|
|
}
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
async function scrapeReleases(scraper, entity, preData, isUpcoming) {
|
|
async function scrapeReleasesPage(page, accReleases) {
|
|
const options = {
|
|
...config.options[scraper.slug],
|
|
...include,
|
|
};
|
|
|
|
const pageReleases = isUpcoming
|
|
? await scraper.fetchUpcoming(entity, page, options, preData)
|
|
: await scraper.fetchLatest(entity, page, options, preData);
|
|
|
|
if (!Array.isArray(pageReleases)) {
|
|
// scraper is unable to fetch the releases and returned a HTTP code or null
|
|
logger.warn(`Scraper returned ${pageReleases} when fetching latest from '${entity.name}' (${entity.parent?.name})`);
|
|
return accReleases;
|
|
}
|
|
|
|
const validPageReleases = pageReleases.filter(release => release?.entryId); // filter out empty and unidentified releases
|
|
const pageReleasesWithEntity = validPageReleases.map(release => ({ ...release, entity: release.entity || entity }));
|
|
|
|
if (pageReleases.length > validPageReleases.length) {
|
|
logger.warn(`Found ${pageReleases.length - validPageReleases.length} empty or unidentified releases on page ${page} for '${entity.name}'`);
|
|
}
|
|
|
|
if (needNextPage(pageReleasesWithEntity, accReleases, isUpcoming)) {
|
|
return scrapeReleasesPage(page + 1, accReleases.concat(pageReleasesWithEntity), isUpcoming);
|
|
}
|
|
|
|
return accReleases.concat(pageReleasesWithEntity);
|
|
}
|
|
|
|
const releases = await scrapeReleasesPage(argv.page || 1, []);
|
|
|
|
const hasDates = releases.every(release => !!release.date);
|
|
|
|
const limitedReleases = (argv.last && releases.slice(0, Math.max(argv.last, 0)))
|
|
|| (hasDates && releases.filter(release => moment(release.date).isAfter(argv.after)))
|
|
|| releases.slice(0, Math.max(argv.missingDateLimit, 0));
|
|
|
|
const { uniqueReleases, duplicateReleases } = argv.force
|
|
? { uniqueReleases: limitedReleases, duplicateReleases: [] }
|
|
: await filterUniqueReleases(limitedReleases);
|
|
|
|
return { uniqueReleases, duplicateReleases };
|
|
}
|
|
|
|
async function scrapeLatestReleases(scraper, entity, preData) {
|
|
if (!argv.latest || !scraper.fetchLatest) {
|
|
return emptyReleases;
|
|
}
|
|
|
|
try {
|
|
return await scrapeReleases(scraper, entity, preData, false);
|
|
} catch (error) {
|
|
if (argv.debug) {
|
|
console.trace(error);
|
|
}
|
|
|
|
logger.warn(`Failed to scrape latest updates for '${entity.slug}' (${entity.parent?.slug}): ${error.message}`);
|
|
}
|
|
|
|
return emptyReleases;
|
|
}
|
|
|
|
async function scrapeUpcomingReleases(scraper, entity, preData) {
|
|
if (!argv.upcoming || !scraper.fetchUpcoming) {
|
|
return emptyReleases;
|
|
}
|
|
|
|
try {
|
|
return await scrapeReleases(scraper, entity, preData, true);
|
|
} catch (error) {
|
|
if (argv.debug) {
|
|
console.trace(error);
|
|
}
|
|
|
|
logger.warn(`Failed to scrape upcoming updates for '${entity.slug}' (${entity.parent?.slug}): ${error.message}`);
|
|
}
|
|
|
|
return emptyReleases;
|
|
}
|
|
|
|
async function scrapeMovies(scraper, entity) {
|
|
if (!argv.movies || !scraper.fetchMovies) {
|
|
return [];
|
|
}
|
|
|
|
try {
|
|
// return await scrapeReleases(scraper, entity, preData, true);
|
|
return await scraper.fetchMovies(entity);
|
|
} catch (error) {
|
|
logger.warn(`Failed to scrape movies for '${entity.slug}' (${entity.parent?.slug}): ${error.message}`);
|
|
}
|
|
|
|
return [];
|
|
}
|
|
|
|
async function scrapeChannelReleases(scraper, channelEntity, preData) {
|
|
const [latestReleases, upcomingReleases] = await Promise.all([
|
|
scrapeLatestReleases(scraper, channelEntity, preData),
|
|
scrapeUpcomingReleases(scraper, channelEntity, preData),
|
|
scrapeMovies(scraper, channelEntity, preData),
|
|
]);
|
|
|
|
logger.info(`Fetching ${latestReleases.uniqueReleases.length} latest and ${upcomingReleases.uniqueReleases.length} upcoming updates for '${channelEntity.name}' (${channelEntity.parent?.name})`);
|
|
|
|
return {
|
|
uniqueReleases: [...latestReleases.uniqueReleases, ...upcomingReleases.uniqueReleases],
|
|
duplicateReleases: [...latestReleases.duplicateReleases, ...upcomingReleases.duplicateReleases],
|
|
};
|
|
}
|
|
|
|
async function scrapeChannel(channelEntity, accNetworkReleases) {
|
|
const scraper = scrapers.releases[channelEntity.slug]
|
|
|| scrapers.releases[channelEntity.parent?.slug]
|
|
|| scrapers.releases[channelEntity.parent?.parent?.slug];
|
|
|
|
const layoutScraper = scraper?.[channelEntity.parameters?.layout] || scraper;
|
|
|
|
if (!layoutScraper) {
|
|
logger.warn(`No scraper found for '${channelEntity.name}' (${channelEntity.parent?.name})`);
|
|
return emptyReleases;
|
|
}
|
|
|
|
try {
|
|
const beforeFetchLatest = await scraper.beforeFetchLatest?.(channelEntity);
|
|
|
|
return await scrapeChannelReleases(layoutScraper, channelEntity, {
|
|
...accNetworkReleases,
|
|
beforeFetchLatest,
|
|
});
|
|
} catch (error) {
|
|
logger.error(`Failed to scrape releases from ${channelEntity.name} using ${scraper.slug}: ${error.message}`);
|
|
|
|
return emptyReleases;
|
|
}
|
|
}
|
|
|
|
async function scrapeNetworkSequential(networkEntity) {
|
|
const releases = await Promise.reduce(
|
|
networkEntity.includedChildren,
|
|
async (chain, channelEntity) => {
|
|
const accNetworkReleases = await chain;
|
|
const { uniqueReleases, duplicateReleases } = await scrapeChannel(channelEntity, accNetworkReleases);
|
|
|
|
return {
|
|
uniqueReleases: accNetworkReleases.uniqueReleases.concat(uniqueReleases),
|
|
duplicateReleases: accNetworkReleases.duplicateReleases.concat(duplicateReleases),
|
|
};
|
|
},
|
|
Promise.resolve(emptyReleases),
|
|
);
|
|
|
|
return releases.uniqueReleases;
|
|
}
|
|
|
|
async function scrapeNetworkParallel(networkEntity) {
|
|
return Promise.map(
|
|
networkEntity.includedChildren,
|
|
async (channelEntity) => {
|
|
const { uniqueReleases } = await scrapeChannel(channelEntity, networkEntity);
|
|
|
|
return uniqueReleases;
|
|
},
|
|
{ concurrency: 3 },
|
|
);
|
|
}
|
|
|
|
async function fetchUpdates() {
|
|
const includedNetworks = await fetchIncludedEntities();
|
|
|
|
const scrapedNetworks = await Promise.map(
|
|
includedNetworks,
|
|
async networkEntity => (networkEntity.parameters?.sequential
|
|
? scrapeNetworkSequential(networkEntity)
|
|
: scrapeNetworkParallel(networkEntity)),
|
|
{ concurrency: 5 },
|
|
);
|
|
|
|
const releases = scrapedNetworks.flat(2);
|
|
|
|
return releases;
|
|
}
|
|
|
|
module.exports = fetchUpdates;
|