forked from DebaucheryLibrarian/traxxx
Major refactor, cleand up site scrape module, fixed and cleaned up release scrape module. Removed old CLI code
This commit is contained in:
136
src/scrape-sites.js
Normal file
136
src/scrape-sites.js
Normal file
@@ -0,0 +1,136 @@
|
||||
'use strict';
|
||||
|
||||
const Promise = require('bluebird');
|
||||
const moment = require('moment');
|
||||
|
||||
const argv = require('./argv');
|
||||
const knex = require('./knex');
|
||||
const { fetchIncludedSites } = require('./sites');
|
||||
const scrapers = require('./scrapers/scrapers');
|
||||
const scrapeRelease = require('./scrape-release');
|
||||
const { storeReleases } = require('./releases');
|
||||
|
||||
function getAfterDate() {
|
||||
return moment
|
||||
.utc()
|
||||
.subtract(...argv.after.split(' '))
|
||||
.toDate();
|
||||
}
|
||||
|
||||
async function findDuplicateReleaseIds(latestReleases, accReleases) {
|
||||
const duplicateReleases = await knex('releases')
|
||||
.whereIn('entry_id', latestReleases.map(({ entryId }) => entryId));
|
||||
|
||||
// include accumulated releases as duplicates to prevent an infinite
|
||||
// loop when the next page contains the same releases as the previous
|
||||
return new Set(duplicateReleases
|
||||
.map(release => release.entry_id)
|
||||
.concat(accReleases.map(release => release.entryId)));
|
||||
}
|
||||
|
||||
async function scrapeUniqueReleases(scraper, site, afterDate = getAfterDate(), accReleases = [], page = 1) {
|
||||
const latestReleases = await scraper.fetchLatest(site, page);
|
||||
const oldestReleaseOnPage = latestReleases.slice(-1)[0].date;
|
||||
|
||||
if (latestReleases.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const duplicateReleaseIds = await findDuplicateReleaseIds(latestReleases, accReleases);
|
||||
|
||||
const uniqueReleases = latestReleases
|
||||
.filter(release => !duplicateReleaseIds.has(String(release.entryId)) // release is already in database
|
||||
&& moment(release.date).isAfter(afterDate)); // release is older than specified date limit
|
||||
|
||||
console.log(`\x1b[90m${site.name}: Scraped page ${page}, ${uniqueReleases.length} unique recent releases\x1b[0m`);
|
||||
|
||||
if (
|
||||
uniqueReleases.length > 0
|
||||
&& (oldestReleaseOnPage || page < argv.pages)
|
||||
&& moment(oldestReleaseOnPage).isAfter(afterDate)
|
||||
) {
|
||||
// oldest release on page is newer that specified limit, fetch next page
|
||||
return scrapeUniqueReleases(scraper, site, afterDate, accReleases.concat(uniqueReleases), page + 1);
|
||||
}
|
||||
|
||||
return accReleases.concat(uniqueReleases);
|
||||
}
|
||||
|
||||
async function scrapeUpcomingReleases(scraper, site) {
|
||||
if (scraper.fetchUpcoming) {
|
||||
const upcomingReleases = scraper.fetchUpcoming(site);
|
||||
|
||||
return upcomingReleases.map(release => ({ ...release, upcoming: true }));
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
async function deepFetchReleases(baseReleases) {
|
||||
return Promise.map(baseReleases, async (release) => {
|
||||
if (release.url) {
|
||||
const fullRelease = await scrapeRelease(release.url, release, true);
|
||||
|
||||
return {
|
||||
...release,
|
||||
...fullRelease,
|
||||
};
|
||||
}
|
||||
|
||||
return release;
|
||||
}, {
|
||||
concurrency: 2,
|
||||
});
|
||||
}
|
||||
|
||||
async function scrapeSiteReleases(scraper, site) {
|
||||
const [newReleases, upcomingReleases] = await Promise.all([
|
||||
scrapeUniqueReleases(scraper, site), // fetch basic release info from scene overview
|
||||
scrapeUpcomingReleases(scraper, site), // fetch basic release info from upcoming overview
|
||||
]);
|
||||
|
||||
console.log(`${site.name}: Found ${newReleases.length} recent releases, ${upcomingReleases.length} upcoming releases`);
|
||||
|
||||
const baseReleases = [...newReleases, ...upcomingReleases];
|
||||
|
||||
if (argv.deep) {
|
||||
// follow URL for every release
|
||||
return deepFetchReleases(baseReleases);
|
||||
}
|
||||
|
||||
return baseReleases;
|
||||
}
|
||||
|
||||
async function scrapeReleases() {
|
||||
const sites = await fetchIncludedSites();
|
||||
|
||||
console.log(`Found ${sites.length} sites in database`);
|
||||
|
||||
await Promise.map(sites, async (site) => {
|
||||
const scraper = scrapers[site.slug] || scrapers[site.network.slug];
|
||||
|
||||
if (!scraper) {
|
||||
console.warn(`No scraper found for '${site.name}' (${site.slug})`);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const siteReleases = await scrapeSiteReleases(scraper, site);
|
||||
|
||||
if (argv.save) {
|
||||
await storeReleases(siteReleases);
|
||||
}
|
||||
} catch (error) {
|
||||
if (argv.debug) {
|
||||
console.error(`${site.id}: Failed to fetch releases`, error);
|
||||
return;
|
||||
}
|
||||
|
||||
console.warn(`${site.id}: Failed to fetch releases`);
|
||||
}
|
||||
}, {
|
||||
concurrency: 2,
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = scrapeReleases;
|
||||
Reference in New Issue
Block a user