traxxx/src/fetch-scene.js

75 lines
1.8 KiB
JavaScript

'use strict';
const config = require('config');
const moment = require('moment');
const knex = require('./knex');
const scrapers = require('./scrapers');
async function findSite(url) {
const { origin } = new URL(url);
const site = await knex('sites').where({ url: origin }).first();
return {
id: site.id,
name: site.name,
description: site.description,
url: site.url,
networkId: site.network_id,
};
}
function deriveFilename(scene) {
const props = {
siteName: scene.site.name,
sceneId: scene.shootId,
sceneTitle: scene.title,
sceneActors: scene.actors.join(config.filename.actorsJoin),
sceneDate: moment.utc(scene.date).format(config.filename.dateFormat),
};
const filename = config.filename.pattern.replace(/\{\w+\}/g, (match) => {
const prop = match.slice(1, -1);
const value = props[prop];
if (value && config.filename.subpatterns[prop]) {
return config.filename.subpatterns[prop]
.replace(/\{\w+\}/, value)
.replace(/\//g, config.filename.slash);
}
if (value) {
return value.replace(/\//g, config.filename.slash) || '';
}
return '';
});
return filename;
}
async function fetchScene(url) {
const site = await findSite(url);
const scraper = scrapers[site.id] || scrapers[site.networkId];
if (!scraper) {
throw new Error('Could not find scraper for URL');
}
if (!scraper.fetchScene) {
throw new Error(`The '${site.name}'-scraper cannot fetch individual scenes`);
}
const scene = await scraper.fetchScene(url, site);
const filename = deriveFilename(scene);
return {
...scene,
filename,
copy: filename,
};
}
module.exports = fetchScene;