'use strict'; const config = require('config'); const moment = require('moment'); const networks = require('./networks'); const scrapers = require('./scrapers'); function findSite(url) { const { origin } = new URL(url); return Object.entries(networks) .reduce((foundNetwork, [networkId, network]) => foundNetwork || Object.entries(network.sites) .reduce((foundSite, [siteId, site]) => { if (foundSite) return foundSite; if (site.url !== origin) return null; return { site: { ...site, id: siteId, }, network: { ...network, id: networkId, }, }; }, null), null); } function deriveFilename(scene) { const props = { siteName: scene.site.name, sceneId: scene.id, sceneTitle: scene.title, sceneActors: scene.actors.join(config.filename.actorsJoin), sceneDate: moment.utc(scene.date).format(config.filename.dateFormat), }; const filename = config.filename.pattern.replace(/\{\w+\}/g, (match) => { const prop = match.slice(1, -1); const value = props[prop]; if (value && config.filename.subpatterns[prop]) { return config.filename.subpatterns[prop] .replace(/\{\w+\}/, value) .replace(/\//g, config.filename.slash); } if (value) { return value.replace(/\//g, config.filename.slash) || ''; } return ''; }); return filename; } async function fetchScene(url) { const { site, network } = findSite(url); const scraper = scrapers[site.id] || scrapers[network.id]; if (!scraper) { throw new Error('Could not find scraper for URL'); } if (!scraper.fetchScene) { throw new Error(`The '${site.name}'-scraper cannot fetch individual scenes`); } const scene = await scraper.fetchScene(url, site); const filename = deriveFilename(scene); return { ...scene, filename, copy: filename, }; } module.exports = fetchScene;