Improved duplicate filtering, now also applying to upcoming updates. Updated Gamma fetchLatest method parameters. Added shortcut for SFW-mode.

This commit is contained in:
2020-03-28 04:37:04 +01:00
parent 95d115b585
commit 6d9f96c5d5
10 changed files with 112 additions and 40 deletions

View File

@@ -25,7 +25,7 @@ const afterDate = (() => {
.toDate();
})();
async function extractUniqueReleases(latestReleases, accReleases) {
async function filterUniqueReleases(latestReleases, accReleases) {
const latestReleaseIdentifiers = latestReleases
.map(release => [release.site.id, release.entryId]);
@@ -75,13 +75,11 @@ function needNextPage(uniqueReleases, pageAccReleases) {
return pageAccReleases.length <= argv.nullDateLimit;
}
async function scrapeLatestReleases(scraper, site, preData) {
if (!scraper.fetchLatest) {
return [];
}
async function scrapeReleases(scraper, site, preData, upcoming = false) {
const scrapePage = async (page = 1, accReleases = []) => {
const latestReleases = await scraper.fetchLatest(site, page, preData, include);
const latestReleases = upcoming
? await scraper.fetchUpcoming(site, page, preData, include)
: await scraper.fetchLatest(site, page, preData, include);
if (!Array.isArray(latestReleases)) {
// scraper is unable to fetch the releases and returned a HTTP code or null
@@ -98,11 +96,11 @@ async function scrapeLatestReleases(scraper, site, preData) {
const uniqueReleases = argv.redownload
? latestReleasesWithSite
: await extractUniqueReleases(latestReleasesWithSite, accReleases);
: await filterUniqueReleases(latestReleasesWithSite, accReleases);
const pageAccReleases = accReleases.concat(uniqueReleases);
logger.verbose(`Scraped '${site.name}' (${site.network.name}) page ${page}, found ${uniqueReleases.length} unique releases`);
logger.verbose(`Scraped '${site.name}' (${site.network.name}) ${upcoming ? 'upcoming' : 'latest'} page ${page}, found ${uniqueReleases.length} unique updates`);
if (needNextPage(uniqueReleases, pageAccReleases)) {
return scrapePage(page + 1, pageAccReleases);
@@ -111,37 +109,45 @@ async function scrapeLatestReleases(scraper, site, preData) {
return pageAccReleases;
};
const releases = await scrapePage(1, []);
const rawReleases = await scrapePage(1, []);
const releases = upcoming
? rawReleases.map(rawRelease => ({ ...rawRelease, upcoming: true }))
: rawReleases;
if (argv.last) {
return releases.slice(0, argv.last);
}
if (releases.every(release => release.date)) {
return releases
.filter(release => moment(release.date).isAfter(afterDate));
return releases.filter(release => moment(release.date).isAfter(afterDate));
}
return releases.slice(0, argv.nullDateLimit);
}
async function scrapeLatestReleases(scraper, site, preData) {
if (!scraper.fetchLatest) {
return [];
}
try {
return await scrapeReleases(scraper, site, preData, false);
} catch (error) {
logger.warn(`Failed to scrape latest updates for '${site.slug}' (${site.network.slug})`);
}
return [];
}
async function scrapeUpcomingReleases(scraper, site, preData) {
if (!scraper.fetchUpcoming) {
return [];
}
try {
const upcomingReleases = await scraper.fetchUpcoming(site, 1, preData, include);
if (upcomingReleases) {
return upcomingReleases.map(release => ({
...release,
site,
upcoming: true,
}));
}
return await scrapeReleases(scraper, site, preData, true);
} catch (error) {
logger.warn(`Failed to scrape upcoming releases for '${site.slug}' (${site.network.slug})`);
logger.warn(`Failed to scrape upcoming updates for '${site.slug}' (${site.network.slug})`);
}
return [];
@@ -157,6 +163,8 @@ async function scrapeSiteReleases(scraper, site, preData) {
: [],
]);
logger.info(`Fetching ${latestReleases.length} latest and ${upcomingReleases.length} upcoming updates for '${site.name}' (${site.network.name})`);
return [...latestReleases, ...upcomingReleases];
}