Refactoring to use entities over sites and networks.
This commit is contained in:
@@ -30,14 +30,14 @@ async function filterUniqueReleases(latestReleases, accReleases) {
|
||||
.map(release => [release.site.id, release.entryId]);
|
||||
|
||||
const duplicateReleases = await knex('releases')
|
||||
.whereIn(['site_id', 'entry_id'], latestReleaseIdentifiers);
|
||||
.whereIn(['entity_id', 'entry_id'], latestReleaseIdentifiers);
|
||||
|
||||
// add entry IDs of accumulated releases to prevent an infinite scrape loop
|
||||
// when one page contains the same release as the previous
|
||||
const duplicateReleasesSiteIdAndEntryIds = duplicateReleases
|
||||
.concat(accReleases)
|
||||
.reduce((acc, release) => {
|
||||
const siteId = release.site_id || release.site.id;
|
||||
const siteId = release.entity_id || release.site.id;
|
||||
const entryId = release.entry_id || release.entryId;
|
||||
|
||||
if (!acc[siteId]) acc[siteId] = {};
|
||||
@@ -85,7 +85,7 @@ async function scrapeReleases(scraper, site, preData, upcoming = false) {
|
||||
|
||||
if (!Array.isArray(latestReleases)) {
|
||||
// scraper is unable to fetch the releases and returned a HTTP code or null
|
||||
logger.warn(`Scraper returned ${latestReleases} when fetching latest from '${site.name}' (${site.network.name})`);
|
||||
logger.warn(`Scraper returned ${latestReleases} when fetching latest from '${site.name}' (${site.parent?.name})`);
|
||||
return accReleases;
|
||||
}
|
||||
|
||||
@@ -102,7 +102,7 @@ async function scrapeReleases(scraper, site, preData, upcoming = false) {
|
||||
|
||||
const pageAccReleases = accReleases.concat(uniqueReleases);
|
||||
|
||||
logger.verbose(`Scraped '${site.name}' (${site.network.name}) ${upcoming ? 'upcoming' : 'latest'} page ${page}, found ${uniqueReleases.length} unique updates`);
|
||||
logger.verbose(`Scraped '${site.name}' (${site.parent?.name}) ${upcoming ? 'upcoming' : 'latest'} page ${page}, found ${uniqueReleases.length} unique updates`);
|
||||
|
||||
if (needNextPage(uniqueReleases, pageAccReleases)) {
|
||||
return scrapePage(page + 1, pageAccReleases);
|
||||
@@ -135,7 +135,7 @@ async function scrapeLatestReleases(scraper, site, preData) {
|
||||
try {
|
||||
return await scrapeReleases(scraper, site, preData, false);
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to scrape latest updates for '${site.slug}' (${site.network.slug}): ${error.message}`);
|
||||
logger.warn(`Failed to scrape latest updates for '${site.slug}' (${site.parent?.slug}): ${error.message}`);
|
||||
}
|
||||
|
||||
return [];
|
||||
@@ -149,7 +149,7 @@ async function scrapeUpcomingReleases(scraper, site, preData) {
|
||||
try {
|
||||
return await scrapeReleases(scraper, site, preData, true);
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to scrape upcoming updates for '${site.slug}' (${site.network.slug}): ${error.message}`);
|
||||
logger.warn(`Failed to scrape upcoming updates for '${site.slug}' (${site.parent?.slug}): ${error.message}`);
|
||||
}
|
||||
|
||||
return [];
|
||||
@@ -165,18 +165,18 @@ async function scrapeSiteReleases(scraper, site, preData) {
|
||||
: [],
|
||||
]);
|
||||
|
||||
logger.info(`Fetching ${latestReleases.length} latest and ${upcomingReleases.length} upcoming updates for '${site.name}' (${site.network.name})`);
|
||||
logger.info(`Fetching ${latestReleases.length} latest and ${upcomingReleases.length} upcoming updates for '${site.name}' (${site.parent.name})`);
|
||||
|
||||
return [...latestReleases, ...upcomingReleases];
|
||||
}
|
||||
|
||||
async function scrapeSite(site, accSiteReleases) {
|
||||
const scraper = scrapers.releases[site.slug]
|
||||
|| scrapers.releases[site.network.slug]
|
||||
|| scrapers.releases[site.network.parent?.slug];
|
||||
|| scrapers.releases[site.parent?.slug]
|
||||
|| scrapers.releases[site.parent?.parent?.slug];
|
||||
|
||||
if (!scraper) {
|
||||
logger.warn(`No scraper found for '${site.name}' (${site.network.name})`);
|
||||
logger.warn(`No scraper found for '${site.name}' (${site.parent.name})`);
|
||||
return [];
|
||||
}
|
||||
|
||||
@@ -211,7 +211,7 @@ async function scrapeNetworkSequential(network) {
|
||||
|
||||
async function scrapeNetworkParallel(network) {
|
||||
return Promise.map(
|
||||
network.sites,
|
||||
network.children,
|
||||
async site => scrapeSite(site, network),
|
||||
{ concurrency: 3 },
|
||||
);
|
||||
@@ -222,8 +222,6 @@ async function fetchUpdates() {
|
||||
? await fetchSitesFromArgv()
|
||||
: await fetchSitesFromConfig();
|
||||
|
||||
// console.log('included', includedNetworks);
|
||||
|
||||
const scrapedNetworks = await Promise.map(
|
||||
includedNetworks,
|
||||
async network => (network.parameters?.sequential
|
||||
|
||||
Reference in New Issue
Block a user