Added 21 Naturals and 21 Sextreme sites.

This commit is contained in:
2020-02-10 23:11:11 +01:00
parent c411979edb
commit ce448da7e0
14 changed files with 88 additions and 6 deletions

View File

@@ -91,6 +91,10 @@ async function getPhotos(albumPath, site) {
async function scrapeApiReleases(json, site) {
return json.map((scene) => {
if (site.parameters?.extract && scene.sitename !== site.parameters.extract) {
return null;
}
const release = {
entryId: scene.clip_id,
title: scene.title,
@@ -125,7 +129,7 @@ async function scrapeApiReleases(json, site) {
release.movie = `${site.url}/en/movie/${scene.url_movie_title}/${scene.movie_id}`;
return release;
});
}).filter(Boolean);
}
function scrapeAll(html, site, networkUrl, hasTeaser = true) {

View File

@@ -51,7 +51,7 @@ function scrapeLatest(html, site) {
});
}
function scrapeScene(html, site) {
function scrapeScene(html, site, url) {
const { document } = new JSDOM(html).window;
const release = { site };
@@ -59,6 +59,7 @@ function scrapeScene(html, site) {
release.description = document.querySelector('#story-and-tags td:nth-child(2) div').textContent;
const [actors, title, channel] = document.querySelector('title').textContent.split('|').map(item => item.trim());
release.url = url;
release.title = title;
release.actors = extractActors(actors);
release.channel = channel.toLowerCase();
@@ -166,11 +167,11 @@ async function fetchScene(url, site) {
const session = bhttp.session(); // resolve redirects
const res = await session.get(url);
if (site.parameters.scraper === 'A') {
if (site.parameters?.scraper === 'A') {
return scrapeSceneA(res.body.toString(), site, null, url);
}
return scrapeScene(res.body.toString(), site);
return scrapeScene(res.body.toString(), site, url);
}
module.exports = {

View File

@@ -111,9 +111,9 @@ async function fetchSitesFromArgv() {
'sites.*',
'networks.name as network_name', 'networks.slug as network_slug', 'networks.url as network_url', 'networks.description as network_description', 'networks.parameters as network_parameters',
)
.where('sites.scrape', true)
.whereIn('sites.slug', argv.sites || [])
.orWhereIn('networks.slug', argv.networks || [])
.where('sites.scrape', true)
.leftJoin('networks', 'sites.network_id', 'networks.id');
const curatedSites = await curateSites(rawSites, true);
@@ -132,7 +132,6 @@ async function fetchSitesFromConfig() {
'networks.name as network_name', 'networks.slug as network_slug', 'networks.url as network_url', 'networks.description as network_description', 'networks.parameters as network_parameters',
)
.leftJoin('networks', 'sites.network_id', 'networks.id')
.where('sites.scrape', true)
.where((builder) => {
if (config.include) {
builder