63 lines
1.5 KiB
JavaScript
63 lines
1.5 KiB
JavaScript
'use strict';
|
|
|
|
const config = require('config');
|
|
|
|
const argv = require('./argv');
|
|
const scrapers = require('./scrapers/scrapers');
|
|
const { storeReleases } = require('./releases');
|
|
const { findSiteByUrl } = require('./sites');
|
|
const { findNetworkByUrl } = require('./networks');
|
|
|
|
async function findSite(url, release) {
|
|
const site = (release && release.site) || await findSiteByUrl(url);
|
|
|
|
if (site) {
|
|
return site;
|
|
}
|
|
|
|
const network = await findNetworkByUrl(url);
|
|
|
|
if (network) {
|
|
return {
|
|
...network,
|
|
network,
|
|
isFallback: true,
|
|
};
|
|
}
|
|
|
|
return null;
|
|
}
|
|
|
|
async function scrapeRelease(url, release, deep = false) {
|
|
const site = await findSite(url, release);
|
|
|
|
if (!site) {
|
|
throw new Error('Could not find site in database');
|
|
}
|
|
|
|
const scraper = scrapers.releases[site.slug] || scrapers.releases[site.network.slug];
|
|
|
|
if (!scraper) {
|
|
throw new Error('Could not find scraper for URL');
|
|
}
|
|
|
|
if (!scraper.fetchScene) {
|
|
throw new Error(`The '${site.name}'-scraper cannot fetch individual releases`);
|
|
}
|
|
|
|
const scene = await scraper.fetchScene(url, site, release);
|
|
|
|
if (!deep && argv.save) {
|
|
// don't store release when called by site scraper
|
|
const [storedRelease] = await storeReleases([scene]);
|
|
|
|
if (storedRelease) {
|
|
console.log(`http://${config.web.host}:${config.web.port}/scene/${storedRelease.id}`);
|
|
}
|
|
}
|
|
|
|
return scene;
|
|
}
|
|
|
|
module.exports = scrapeRelease;
|