Added Bang Bros data (no scraper yet). Added 21Sextury scene fetch date fallback.

This commit is contained in:
2019-04-10 03:42:20 +02:00
parent 194c6d1cca
commit bc01cbe6dc
10 changed files with 566 additions and 11 deletions

View File

@@ -33,7 +33,10 @@ function curateSites(sites) {
name: site.name,
description: site.description,
url: site.url,
networkId: site.network_id,
network: {
id: site.network_id,
name: site.network_name,
},
parameters: JSON.parse(site.parameters),
}));
}
@@ -41,8 +44,10 @@ function curateSites(sites) {
async function accumulateIncludedSites() {
if (argv.networks || argv.sites) {
const rawSites = await knex('sites')
.whereIn('id', argv.sites || [])
.orWhereIn('network_id', argv.networks || []);
.select('sites.*', 'networks.name as network_name')
.whereIn('sites.id', argv.sites || [])
.orWhereIn('network_id', argv.networks || [])
.leftJoin('networks', 'sites.network_id', 'networks.id');
return curateSites(rawSites);
}
@@ -50,8 +55,10 @@ async function accumulateIncludedSites() {
const included = destructConfigNetworks(config.include);
const rawSites = await knex('sites')
.whereIn('id', included.sites)
.orWhereIn('network_id', included.networks);
.select('sites.*', 'networks.name as network_name')
.whereIn('sites.id', included.sites)
.orWhereIn('network_id', included.networks)
.leftJoin('networks', 'sites.network_id', 'networks.id');
return curateSites(rawSites);
}
@@ -127,7 +134,7 @@ async function fetchReleases() {
const sites = await accumulateIncludedSites();
const scenesPerSite = await Promise.all(sites.map(async (site) => {
const scraper = scrapers[site.id] || scrapers[site.networkId];
const scraper = scrapers[site.id] || scrapers[site.network.id];
if (scraper) {
try {
@@ -144,7 +151,17 @@ async function fetchReleases() {
await storeReleases(newReleases);
}
return [...newReleases, ...upcomingReleases.map(release => ({ ...release, upcoming: true }))];
return [
...newReleases.map(release => ({
...release,
network: site.network,
})),
...upcomingReleases.map(release => ({
...release,
network: site.network,
upcoming: true,
})),
];
} catch (error) {
if (argv.debug) {
console.error(`${site.id}: Failed to fetch releases`, error);

View File

@@ -35,7 +35,9 @@ async function findSite(url) {
name: site.name,
description: site.description,
url: site.url,
networkId: site.network_id || site.id,
network: {
id: site.network_id || site.id,
},
parameters: site.parameters && JSON.parse(site.parameters),
isFallback: site.network_id === undefined,
};
@@ -72,7 +74,7 @@ function deriveFilename(scene) {
async function fetchScene(url) {
const site = await findSite(url);
const scraper = scrapers[site.id] || scrapers[site.networkId];
const scraper = scrapers[site.id] || scrapers[site.network.id];
if (!scraper) {
throw new Error('Could not find scraper for URL');

View File

@@ -63,7 +63,11 @@ async function scrapeScene(html, url, site) {
const entryId = new URL(url).pathname.split('/').slice(-1)[0];
const title = data.name;
const date = moment.utc(data.dateCreated, 'YYYY-MM-DD').toDate();
const dataDate = moment.utc(data.dateCreated, 'YYYY-MM-DD');
const date = dataDate.isValid()
? dataDate.toDate()
: moment.utc(sceneElement.find('.updatedDate').text().trim(), 'MM-DD-YYYY').toDate();
const actors = data.actor
.sort(({ gender: genderA }, { gender: genderB }) => {

View File

@@ -101,7 +101,7 @@ async function fetchScene(url, site) {
},
});
return scrapeScene(res.body.result.parent, url, site);
return scrapeScene(res.body.result.parent || res.body.result, url, site);
}
module.exports = {

View File

@@ -4,6 +4,7 @@ const moment = require('moment');
const formatters = {
site: site => site.name,
network: network => network.name,
date: (date, column) => moment(date).format(column.format || 'MMM DD, YYYY'),
actors: actors => actors.join(', '),
rating: (rating) => {

51
src/utils/try-links.js Normal file
View File

@@ -0,0 +1,51 @@
'use strict';
const Promise = require('bluebird');
const bhttp = require('bhttp');
const fs = Promise.promisifyAll(require('fs'));
const knex = require('../knex');
const argv = require('../argv');
const options = {
responseTimeout: 30000,
};
async function tryLinks() {
const sites = await knex('sites').whereIn('network_id', argv.network);
const results = await Promise.all(sites.map(async (site) => {
console.log(`Trying ${site.name} URLs`);
const [resHttp, resHttpWww, resHttps, resHttpsWww] = await Promise.all([
bhttp.get(`http://${site.id}.com/`, options).catch(error => ({ statusCode: error.message })),
bhttp.get(`http://www.${site.id}.com/`, options).catch(error => ({ statusCode: error.message })),
bhttp.get(`https://${site.id}.com/`, options).catch(error => ({ statusCode: error.message })),
bhttp.get(`https://www.${site.id}.com/`, options).catch(error => ({ statusCode: error.message })),
]);
console.log(`Got results for ${site.name}`);
return {
...site,
url: (resHttp.statusCode === 200 && `http://${site.id}.com`)
|| (resHttpWww.statusCode === 200 && `http://www.${site.id}.com`)
|| (resHttps.statusCode === 200 && `https://${site.id}.com`)
|| (resHttpsWww.statusCode === 200 && `https://www.${site.id}.com`)
|| site.url,
network_id: site.network_id,
};
}));
const sortedResults = results.sort((siteA, siteB) => {
if (siteA.id > siteB.id) return 1;
if (siteA.id < siteB.id) return -1;
return 0;
});
console.log(sortedResults);
await fs.writeFileAsync('./src/utils/link-results.json', JSON.stringify(sortedResults, null, 4));
}
tryLinks();