2019-03-23 21:48:39 +00:00
|
|
|
'use strict';
|
|
|
|
|
|
|
|
const config = require('config');
|
2019-05-06 00:01:57 +00:00
|
|
|
const Promise = require('bluebird');
|
2019-03-23 21:48:39 +00:00
|
|
|
const moment = require('moment');
|
|
|
|
|
2019-04-04 02:00:28 +00:00
|
|
|
const argv = require('./argv');
|
2019-03-25 02:57:33 +00:00
|
|
|
const knex = require('./knex');
|
2019-03-23 21:48:39 +00:00
|
|
|
const scrapers = require('./scrapers');
|
2019-05-06 00:01:57 +00:00
|
|
|
const fetchScene = require('./fetch-scene');
|
2019-03-23 21:48:39 +00:00
|
|
|
|
2019-03-25 02:57:33 +00:00
|
|
|
function destructConfigNetworks(networks) {
|
|
|
|
return networks.reduce((acc, network) => {
|
2019-03-23 21:48:39 +00:00
|
|
|
if (Array.isArray(network)) {
|
2019-03-25 02:57:33 +00:00
|
|
|
// network specifies sites
|
|
|
|
return {
|
2019-03-23 21:48:39 +00:00
|
|
|
...acc,
|
2019-03-25 02:57:33 +00:00
|
|
|
sites: [...acc.sites, ...network[1]],
|
|
|
|
};
|
2019-03-23 21:48:39 +00:00
|
|
|
}
|
|
|
|
|
2019-03-25 02:57:33 +00:00
|
|
|
return {
|
2019-03-23 21:48:39 +00:00
|
|
|
...acc,
|
2019-03-25 02:57:33 +00:00
|
|
|
networks: [...acc.networks, network],
|
|
|
|
};
|
|
|
|
}, {
|
|
|
|
networks: [],
|
|
|
|
sites: [],
|
|
|
|
});
|
2019-03-23 21:48:39 +00:00
|
|
|
}
|
|
|
|
|
2019-03-25 02:57:33 +00:00
|
|
|
function curateSites(sites) {
|
|
|
|
return sites.map(site => ({
|
|
|
|
id: site.id,
|
|
|
|
name: site.name,
|
|
|
|
description: site.description,
|
|
|
|
url: site.url,
|
2019-04-10 01:42:20 +00:00
|
|
|
network: {
|
|
|
|
id: site.network_id,
|
|
|
|
name: site.network_name,
|
|
|
|
},
|
2019-03-26 00:26:47 +00:00
|
|
|
parameters: JSON.parse(site.parameters),
|
2019-03-25 02:57:33 +00:00
|
|
|
}));
|
|
|
|
}
|
2019-03-23 21:48:39 +00:00
|
|
|
|
2019-03-25 02:57:33 +00:00
|
|
|
async function accumulateIncludedSites() {
|
2019-04-04 02:00:28 +00:00
|
|
|
if (argv.networks || argv.sites) {
|
|
|
|
const rawSites = await knex('sites')
|
2019-04-10 01:42:20 +00:00
|
|
|
.select('sites.*', 'networks.name as network_name')
|
|
|
|
.whereIn('sites.id', argv.sites || [])
|
|
|
|
.orWhereIn('network_id', argv.networks || [])
|
|
|
|
.leftJoin('networks', 'sites.network_id', 'networks.id');
|
2019-04-04 02:00:28 +00:00
|
|
|
|
|
|
|
return curateSites(rawSites);
|
|
|
|
}
|
|
|
|
|
2019-03-25 02:57:33 +00:00
|
|
|
const included = destructConfigNetworks(config.include);
|
2019-03-23 21:48:39 +00:00
|
|
|
|
2019-03-25 02:57:33 +00:00
|
|
|
const rawSites = await knex('sites')
|
2019-04-10 01:42:20 +00:00
|
|
|
.select('sites.*', 'networks.name as network_name')
|
|
|
|
.whereIn('sites.id', included.sites)
|
|
|
|
.orWhereIn('network_id', included.networks)
|
|
|
|
.leftJoin('networks', 'sites.network_id', 'networks.id');
|
2019-03-23 21:48:39 +00:00
|
|
|
|
2019-03-25 02:57:33 +00:00
|
|
|
return curateSites(rawSites);
|
2019-03-23 21:48:39 +00:00
|
|
|
}
|
|
|
|
|
2019-04-06 21:24:26 +00:00
|
|
|
async function findDuplicateReleases(latestReleases, _siteId) {
|
|
|
|
const latestReleasesShootIds = latestReleases.map(release => release.shootId).filter(release => release !== undefined);
|
2019-04-07 00:15:57 +00:00
|
|
|
const latestReleasesEntryIds = latestReleases.map(release => release.entryId).filter(release => release !== undefined);
|
2019-04-05 01:45:40 +00:00
|
|
|
|
2019-04-04 02:00:28 +00:00
|
|
|
return knex('releases')
|
2019-04-06 21:24:26 +00:00
|
|
|
.whereIn('shoot_id', latestReleasesShootIds)
|
2019-04-07 00:15:57 +00:00
|
|
|
.orWhereIn('entry_id', latestReleasesEntryIds);
|
2019-04-01 00:45:15 +00:00
|
|
|
}
|
|
|
|
|
2019-05-06 00:01:57 +00:00
|
|
|
async function storeReleases(releases = []) {
|
|
|
|
return Promise.reduce(releases, async (acc, release) => {
|
|
|
|
await acc;
|
|
|
|
|
|
|
|
const curatedRelease = {
|
|
|
|
site_id: release.site.id,
|
|
|
|
shoot_id: release.shootId || null,
|
|
|
|
entry_id: release.entryId || null,
|
|
|
|
url: release.url,
|
|
|
|
title: release.title,
|
|
|
|
date: release.date,
|
|
|
|
description: release.description,
|
|
|
|
director: release.director,
|
|
|
|
duration: release.duration,
|
|
|
|
likes: release.rating && release.rating.likes,
|
|
|
|
dislikes: release.rating && release.rating.dislikes,
|
|
|
|
rating: release.rating && release.rating.stars,
|
|
|
|
};
|
2019-04-01 00:45:15 +00:00
|
|
|
|
2019-05-06 00:01:57 +00:00
|
|
|
const releaseQuery = `${knex('releases').insert(curatedRelease).toString()} ON CONFLICT DO NOTHING RETURNING *`;
|
|
|
|
const releaseEntry = await knex.raw(releaseQuery);
|
2019-04-01 00:45:15 +00:00
|
|
|
|
2019-05-06 00:01:57 +00:00
|
|
|
if (release.actors && release.actors.length > 0) {
|
|
|
|
const actors = await knex('actors').whereIn('name', release.actors);
|
|
|
|
const newActors = release.actors.filter(actorName => !actors.some(actor => actor.name === actorName));
|
|
|
|
const { rows: insertedActors } = newActors.length
|
|
|
|
? await knex.raw(`${knex('actors').insert(newActors.map(actorName => ({ name: actorName })))} ON CONFLICT DO NOTHING RETURNING *`)
|
|
|
|
: { rows: [] };
|
2019-04-01 00:45:15 +00:00
|
|
|
|
2019-05-06 00:01:57 +00:00
|
|
|
await knex('actors_associated').insert(actors.concat(insertedActors).map(actor => ({
|
|
|
|
release_id: releaseEntry.rows[0].id,
|
|
|
|
actor_id: actor.id,
|
|
|
|
})), '*');
|
|
|
|
}
|
2019-04-01 00:45:15 +00:00
|
|
|
|
2019-05-06 00:01:57 +00:00
|
|
|
if (release.tags && release.tags.length > 0) {
|
|
|
|
await knex('tags_associated').insert(release.tags.map(tag => ({
|
|
|
|
tag_id: tag,
|
|
|
|
release_id: releaseEntry.rows[0].id,
|
|
|
|
})));
|
|
|
|
}
|
|
|
|
}, []);
|
2019-04-01 00:45:15 +00:00
|
|
|
}
|
|
|
|
|
2019-04-05 01:45:40 +00:00
|
|
|
async function fetchNewReleases(scraper, site, afterDate, accReleases = [], page = 1) {
|
|
|
|
const latestReleases = await scraper.fetchLatest(site, page);
|
|
|
|
|
2019-04-07 18:51:14 +00:00
|
|
|
if (latestReleases.length === 0) {
|
|
|
|
return [];
|
|
|
|
}
|
|
|
|
|
2019-04-06 21:24:26 +00:00
|
|
|
const duplicateReleases = await findDuplicateReleases(latestReleases, site.id);
|
2019-04-07 00:15:57 +00:00
|
|
|
const duplicateReleasesIds = new Set(
|
2019-04-05 01:45:40 +00:00
|
|
|
duplicateReleases
|
2019-04-07 23:49:45 +00:00
|
|
|
.map(release => release.shoot_id || release.entry_id)
|
2019-04-05 01:45:40 +00:00
|
|
|
// exclude accumulated releases to prevent an infinite loop if the next page contains the same releases as the previous
|
2019-04-07 23:49:45 +00:00
|
|
|
.concat(duplicateReleases.map(release => release.shoot_id || release.entry_id))
|
|
|
|
.concat(accReleases.map(release => release.shootId || release.entryId)),
|
2019-04-05 01:45:40 +00:00
|
|
|
);
|
2019-04-07 00:15:57 +00:00
|
|
|
const uniqueReleases = latestReleases.filter(release => !duplicateReleasesIds.has(String(release.shootId))
|
|
|
|
&& !duplicateReleasesIds.has(String(release.entryId))
|
|
|
|
&& moment(release.date).isAfter(afterDate));
|
2019-04-05 01:45:40 +00:00
|
|
|
|
2019-04-07 23:49:45 +00:00
|
|
|
console.log(`\x1b[90m${site.name}: Scraped page ${page}, ${uniqueReleases.length} unique recent releases\x1b[0m`);
|
2019-04-05 01:45:40 +00:00
|
|
|
|
|
|
|
const oldestReleaseOnPage = latestReleases.slice(-1)[0].date;
|
|
|
|
|
|
|
|
if (uniqueReleases.length > 0 && moment(oldestReleaseOnPage).isAfter(afterDate)) {
|
|
|
|
return fetchNewReleases(scraper, site, afterDate, accReleases.concat(uniqueReleases), page + 1);
|
|
|
|
}
|
|
|
|
|
|
|
|
return accReleases.concat(uniqueReleases);
|
|
|
|
}
|
|
|
|
|
2019-03-23 21:48:39 +00:00
|
|
|
async function fetchReleases() {
|
2019-03-25 02:57:33 +00:00
|
|
|
const sites = await accumulateIncludedSites();
|
2019-03-23 21:48:39 +00:00
|
|
|
|
|
|
|
const scenesPerSite = await Promise.all(sites.map(async (site) => {
|
2019-04-10 01:42:20 +00:00
|
|
|
const scraper = scrapers[site.id] || scrapers[site.network.id];
|
2019-03-23 21:48:39 +00:00
|
|
|
|
|
|
|
if (scraper) {
|
2019-04-05 01:45:40 +00:00
|
|
|
try {
|
|
|
|
const afterDate = moment.utc().subtract(...argv.after.split(' ')).toDate();
|
|
|
|
|
|
|
|
const [newReleases, upcomingReleases] = await Promise.all([
|
|
|
|
fetchNewReleases(scraper, site, afterDate),
|
|
|
|
scraper.fetchUpcoming ? await scraper.fetchUpcoming(site) : [],
|
|
|
|
]);
|
|
|
|
|
2019-04-07 23:49:45 +00:00
|
|
|
console.log(`${site.name}: Found ${newReleases.length} recent releases, ${upcomingReleases.length} upcoming releases`);
|
2019-04-05 01:45:40 +00:00
|
|
|
|
|
|
|
if (argv.save) {
|
2019-05-06 00:01:57 +00:00
|
|
|
const finalReleases = argv.deep
|
|
|
|
? await Promise.all(newReleases.map(async (release) => {
|
|
|
|
if (release.url) {
|
|
|
|
return fetchScene(release.url);
|
|
|
|
}
|
|
|
|
|
|
|
|
return release;
|
|
|
|
}), {
|
|
|
|
concurrency: 2,
|
|
|
|
})
|
|
|
|
: newReleases;
|
|
|
|
|
|
|
|
await storeReleases(finalReleases);
|
2019-04-05 01:45:40 +00:00
|
|
|
}
|
|
|
|
|
2019-04-10 01:42:20 +00:00
|
|
|
return [
|
|
|
|
...newReleases.map(release => ({
|
|
|
|
...release,
|
|
|
|
network: site.network,
|
|
|
|
})),
|
|
|
|
...upcomingReleases.map(release => ({
|
|
|
|
...release,
|
|
|
|
network: site.network,
|
|
|
|
upcoming: true,
|
|
|
|
})),
|
|
|
|
];
|
2019-04-05 01:45:40 +00:00
|
|
|
} catch (error) {
|
|
|
|
if (argv.debug) {
|
|
|
|
console.error(`${site.id}: Failed to fetch releases`, error);
|
|
|
|
return [];
|
|
|
|
}
|
|
|
|
|
|
|
|
console.log(`${site.id}: Failed to fetch releases`);
|
|
|
|
return [];
|
|
|
|
}
|
2019-03-23 21:48:39 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return [];
|
|
|
|
}));
|
|
|
|
|
|
|
|
const accumulatedScenes = scenesPerSite.reduce((acc, siteScenes) => ([...acc, ...siteScenes]), []);
|
|
|
|
const sortedScenes = accumulatedScenes.sort(({ date: dateA }, { date: dateB }) => moment(dateB).diff(dateA));
|
|
|
|
|
|
|
|
return sortedScenes;
|
|
|
|
}
|
|
|
|
|
|
|
|
module.exports = fetchReleases;
|