forked from DebaucheryLibrarian/traxxx
Added Private scraper. Added Vixen scraper to repository.
This commit is contained in:
10
src/argv.js
10
src/argv.js
@@ -4,6 +4,16 @@ const yargs = require('yargs');
|
||||
|
||||
const { argv } = yargs
|
||||
.command('npm start')
|
||||
.option('networks', {
|
||||
describe: 'Networks to include (overrides config)',
|
||||
type: 'array',
|
||||
alias: 'network',
|
||||
})
|
||||
.option('sites', {
|
||||
describe: 'Sites to include (overrides config)',
|
||||
type: 'array',
|
||||
alias: 'site',
|
||||
})
|
||||
.option('render', {
|
||||
describe: 'Fetch data without rendering interface',
|
||||
type: 'boolean',
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
const config = require('config');
|
||||
const moment = require('moment');
|
||||
|
||||
const argv = require('./argv');
|
||||
const knex = require('./knex');
|
||||
const scrapers = require('./scrapers');
|
||||
|
||||
@@ -38,6 +39,14 @@ function curateSites(sites) {
|
||||
}
|
||||
|
||||
async function accumulateIncludedSites() {
|
||||
if (argv.networks || argv.sites) {
|
||||
const rawSites = await knex('sites')
|
||||
.whereIn('id', argv.sites || [])
|
||||
.orWhereIn('network_id', argv.networks || []);
|
||||
|
||||
return curateSites(rawSites);
|
||||
}
|
||||
|
||||
const included = destructConfigNetworks(config.include);
|
||||
|
||||
const rawSites = await knex('sites')
|
||||
@@ -47,9 +56,11 @@ async function accumulateIncludedSites() {
|
||||
return curateSites(rawSites);
|
||||
}
|
||||
|
||||
async function getExistingReleases() {
|
||||
return knex('releases');
|
||||
// .where('date', '>', new Date(2019, 2, 26));
|
||||
async function getStoredReleases(siteId, limit) {
|
||||
return knex('releases')
|
||||
.where({ site_id: siteId })
|
||||
.orderBy('date', 'desc')
|
||||
.limit(limit);
|
||||
}
|
||||
|
||||
async function storeReleases(releases) {
|
||||
@@ -60,6 +71,7 @@ async function storeReleases(releases) {
|
||||
title: release.title,
|
||||
date: release.date,
|
||||
description: release.description,
|
||||
director: release.director,
|
||||
duration: release.duration,
|
||||
likes: release.rating && release.rating.likes,
|
||||
dislikes: release.rating && release.rating.dislikes,
|
||||
@@ -80,20 +92,22 @@ async function storeReleases(releases) {
|
||||
|
||||
async function fetchReleases() {
|
||||
const sites = await accumulateIncludedSites();
|
||||
const releases = await getExistingReleases();
|
||||
// const releases = await getExistingReleases();
|
||||
|
||||
const scenesPerSite = await Promise.all(sites.map(async (site) => {
|
||||
const scraper = scrapers[site.id] || scrapers[site.networkId];
|
||||
|
||||
if (scraper) {
|
||||
const storedReleases = await getStoredReleases(site.id, 100);
|
||||
|
||||
const [latest, upcoming] = await Promise.all([
|
||||
scraper.fetchLatest(site),
|
||||
scraper.fetchLatest(site, storedReleases),
|
||||
scraper.fetchUpcoming ? scraper.fetchUpcoming(site) : [],
|
||||
]);
|
||||
|
||||
console.log(`${latest.length} published releases and ${upcoming.length} upcoming releases found`);
|
||||
|
||||
await storeReleases(latest, releases);
|
||||
await storeReleases(latest);
|
||||
|
||||
return [...latest, ...upcoming];
|
||||
}
|
||||
|
||||
@@ -9,14 +9,63 @@ const { matchTags } = require('../tags');
|
||||
|
||||
function scrapeLatest(html, site) {
|
||||
const $ = cheerio.load(html, { normalizeWhitespace: true });
|
||||
const sceneElements = $('.scenes-latest');
|
||||
|
||||
return sceneElements.map((element) => {
|
||||
return {
|
||||
url,
|
||||
shootId,
|
||||
title,
|
||||
actors,
|
||||
date,
|
||||
rating: {
|
||||
likes,
|
||||
dislikes,
|
||||
stars,
|
||||
},
|
||||
site,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
function scrapeUpcoming(html, site) {
|
||||
const $ = cheerio.load(html, { normalizeWhitespace: true });
|
||||
const sceneElements = $('.scenes-upcoming');
|
||||
|
||||
return sceneElements.map((element) => {
|
||||
return {
|
||||
url,
|
||||
shootId,
|
||||
title,
|
||||
actors,
|
||||
date,
|
||||
rating: {
|
||||
likes,
|
||||
dislikes,
|
||||
stars,
|
||||
},
|
||||
site,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
function scrapeScene(html, url, site) {
|
||||
const $ = cheerio.load(html, { normalizeWhitespace: true });
|
||||
|
||||
return {
|
||||
url,
|
||||
shootId,
|
||||
title,
|
||||
actors,
|
||||
director: '',
|
||||
date,
|
||||
rating: {
|
||||
likes,
|
||||
dislikes,
|
||||
stars,
|
||||
},
|
||||
site,
|
||||
};
|
||||
}
|
||||
|
||||
async function fetchLatest(site) {
|
||||
|
||||
@@ -1,17 +1,19 @@
|
||||
'use strict';
|
||||
|
||||
const xempire = require('./xempire');
|
||||
const julesjordan = require('./julesjordan');
|
||||
const kink = require('./kink');
|
||||
const legalporno = require('./legalporno');
|
||||
const pervcity = require('./pervcity');
|
||||
const privateNetwork = require('./private'); // reserved keyword
|
||||
const vixen = require('./vixen');
|
||||
const xempire = require('./xempire');
|
||||
|
||||
module.exports = {
|
||||
xempire,
|
||||
julesjordan,
|
||||
kink,
|
||||
legalporno,
|
||||
pervcity,
|
||||
private: privateNetwork,
|
||||
vixen,
|
||||
xempire,
|
||||
};
|
||||
|
||||
@@ -65,8 +65,10 @@ async function scrapeScene(html, url, shootId, ratingRes, site) {
|
||||
|
||||
const rawTags = $('.tag-list > a[href*="/tag"]').map((tagIndex, tagElement) => $(tagElement).text()).toArray();
|
||||
|
||||
const channelSite = await knex('sites').where({ id: sitename }).first();
|
||||
const tags = await matchTags(rawTags);
|
||||
const [channelSite, tags] = await Promise.all([
|
||||
knex('sites').where({ id: sitename }).first(),
|
||||
matchTags(rawTags),
|
||||
]);
|
||||
|
||||
return {
|
||||
url,
|
||||
|
||||
100
src/scrapers/private.js
Normal file
100
src/scrapers/private.js
Normal file
@@ -0,0 +1,100 @@
|
||||
'use strict';
|
||||
|
||||
/* eslint-disable */
|
||||
const bhttp = require('bhttp');
|
||||
const cheerio = require('cheerio');
|
||||
const moment = require('moment');
|
||||
|
||||
const knex = require('../knex');
|
||||
const { matchTags } = require('../tags');
|
||||
|
||||
function scrapeLatest(html, site) {
|
||||
const $ = cheerio.load(html, { normalizeWhitespace: true });
|
||||
const sceneElements = $('.content-wrapper.container .scene').toArray();
|
||||
|
||||
return sceneElements.map((element) => {
|
||||
const sceneLinkElement = $(element).find('a[data-track="TITLE_LINK"]');
|
||||
|
||||
const url = sceneLinkElement.attr('href');
|
||||
const title = sceneLinkElement.text();
|
||||
const shootId = url.split('/').slice(-1)[0];
|
||||
|
||||
const date = moment.utc($(element).find('.scene-date'), 'MM/DD/YYYY').toDate();
|
||||
|
||||
const actors = $(element).find('a[data-track="PORNSTAR_LINK"]').map((actorIndex, actorElement) => $(actorElement).text()).toArray();
|
||||
const likes = Number($(element).find('.scene-votes').text());
|
||||
|
||||
return {
|
||||
url,
|
||||
shootId,
|
||||
title,
|
||||
actors,
|
||||
date,
|
||||
rating: {
|
||||
likes,
|
||||
dislikes: 0,
|
||||
},
|
||||
site,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async function scrapeScene(html, url, site) {
|
||||
const $ = cheerio.load(html, { normalizeWhitespace: true });
|
||||
|
||||
const shootId = url.split('/').slice(-1)[0];
|
||||
const title = $('.video-wrapper meta[itemprop="name"]').attr('content');
|
||||
|
||||
const date = moment.utc($('.video-wrapper meta[itemprop="uploadDate"]').attr('content'), 'MM/DD/YYYY').toDate()
|
||||
const actors = $('.content-wrapper .scene-models-list a').map((actorIndex, actorElement) => $(actorElement).text()).toArray();
|
||||
|
||||
const description = $('.video-wrapper meta[itemprop="description"]').attr('content');
|
||||
const [minutes, seconds] = $('.video-wrapper meta[itemprop="duration"]').attr('content').match(/\d+/g);
|
||||
const duration = Number(minutes) * 60 + Number(seconds);
|
||||
|
||||
const likes = Number($('.content-desc #social-actions #likes').text());
|
||||
|
||||
const siteElement = $('.content-wrapper .logos-sites a');
|
||||
const siteUrl = siteElement.attr('href').slice(0, -1);
|
||||
const siteName = siteElement.text();
|
||||
const channelSite = await knex('sites')
|
||||
.where({ url: siteUrl })
|
||||
.orWhere({ name: siteName })
|
||||
.first();
|
||||
|
||||
const rawTags = $('.content-desc .scene-tags a').map((tagIndex, tagElement) => $(tagElement).text()).toArray();
|
||||
const tags = await matchTags(rawTags);
|
||||
|
||||
return {
|
||||
url,
|
||||
shootId,
|
||||
title,
|
||||
date,
|
||||
actors,
|
||||
description,
|
||||
duration,
|
||||
tags,
|
||||
rating: {
|
||||
likes,
|
||||
dislikes: 0,
|
||||
},
|
||||
site: channelSite || site,
|
||||
};
|
||||
}
|
||||
|
||||
async function fetchLatest(site) {
|
||||
const res = await bhttp.get(`${site.url}/`);
|
||||
|
||||
return scrapeLatest(res.body.toString(), site);
|
||||
}
|
||||
|
||||
async function fetchScene(url, site) {
|
||||
const res = await bhttp.get(url);
|
||||
|
||||
return scrapeScene(res.body.toString(), url, site);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
fetchLatest,
|
||||
fetchScene,
|
||||
};
|
||||
90
src/scrapers/vixen.js
Normal file
90
src/scrapers/vixen.js
Normal file
@@ -0,0 +1,90 @@
|
||||
'use strict';
|
||||
|
||||
/* eslint-disable */
|
||||
const bhttp = require('bhttp');
|
||||
const cheerio = require('cheerio');
|
||||
const moment = require('moment');
|
||||
|
||||
const { matchTags } = require('../tags');
|
||||
|
||||
function scrapeLatest(html, site) {
|
||||
const $ = cheerio.load(html, { normalizeWhitespace: true });
|
||||
|
||||
const stateObject = $('script:contains("INITIAL_STATE")');
|
||||
const { videos: scenes } = JSON.parse(stateObject.html().trim().slice(27, -1));
|
||||
|
||||
return scenes.map((scene) => {
|
||||
const shootId = scene.newId;
|
||||
const title = scene.title;
|
||||
const url = `${site.url}${scene.targetUrl}`;
|
||||
const date = moment.utc(scene.releaseDateFormatted, 'MMMM DD, YYYY').toDate();
|
||||
const actors = scene.models;
|
||||
const stars = Number(scene.textRating) / 2;
|
||||
|
||||
return {
|
||||
url,
|
||||
shootId,
|
||||
title,
|
||||
actors,
|
||||
date,
|
||||
rating: {
|
||||
stars,
|
||||
},
|
||||
site,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async function scrapeScene(html, url, site) {
|
||||
const $ = cheerio.load(html, { normalizeWhitespace: true });
|
||||
const { pathname, search } = new URL(url);
|
||||
|
||||
const stateObject = $('script:contains("INITIAL_STATE")');
|
||||
const data = JSON.parse(stateObject.html().trim().slice(27, -1));
|
||||
|
||||
const scene = data.page.data[`${pathname}${search}`].data.video;
|
||||
|
||||
const shootId = scene.newId;
|
||||
const title = scene.title;
|
||||
const date = new Date(scene.releaseDate);
|
||||
const actors = scene.models;
|
||||
const stars = scene.totalRateVal;
|
||||
|
||||
const rawTags = scene.tags;
|
||||
const tags = await matchTags(rawTags);
|
||||
|
||||
const duration = scene.runLength;
|
||||
const director = scene.directorNames;
|
||||
|
||||
return {
|
||||
url,
|
||||
shootId,
|
||||
title,
|
||||
actors,
|
||||
director,
|
||||
date,
|
||||
duration,
|
||||
tags,
|
||||
rating: {
|
||||
stars,
|
||||
},
|
||||
site,
|
||||
};
|
||||
}
|
||||
|
||||
async function fetchLatest(site) {
|
||||
const res = await bhttp.get(`${site.url}/videos`);
|
||||
|
||||
return scrapeLatest(res.body.toString(), site);
|
||||
}
|
||||
|
||||
async function fetchScene(url, site) {
|
||||
const res = await bhttp.get(url);
|
||||
|
||||
return scrapeScene(res.body.toString(), url, site);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
fetchLatest,
|
||||
fetchScene,
|
||||
};
|
||||
@@ -34,6 +34,7 @@ function scrape(html, site) {
|
||||
shootId,
|
||||
title,
|
||||
actors,
|
||||
director: 'Mason',
|
||||
date,
|
||||
rating: {
|
||||
likes,
|
||||
@@ -62,6 +63,7 @@ async function scrapeSceneFallback($, url, site) {
|
||||
title,
|
||||
date,
|
||||
actors,
|
||||
director: 'Mason',
|
||||
description,
|
||||
tags,
|
||||
rating: {
|
||||
@@ -108,6 +110,7 @@ async function scrapeScene(html, url, site) {
|
||||
title,
|
||||
date,
|
||||
actors,
|
||||
director: 'Mason',
|
||||
description,
|
||||
duration,
|
||||
tags,
|
||||
@@ -118,10 +121,15 @@ async function scrapeScene(html, url, site) {
|
||||
};
|
||||
}
|
||||
|
||||
async function fetchLatest(site) {
|
||||
async function fetchLatest(site, storedReleases) {
|
||||
const res = await bhttp.get(`${site.url}/en/videos`);
|
||||
const releases = scrape(res.body.toString(), site);
|
||||
|
||||
return scrape(res.body.toString(), site);
|
||||
const storedShootIds = new Set(storedReleases.map(release => release.shoot_id));
|
||||
|
||||
const newReleases = releases.filter(release => !storedShootIds.has(release.shootId));
|
||||
|
||||
console.log(newReleases);
|
||||
}
|
||||
|
||||
async function fetchUpcoming(site) {
|
||||
|
||||
@@ -5,7 +5,8 @@ const knex = require('./knex');
|
||||
async function matchTags(rawTags) {
|
||||
const tagEntries = await knex('tags')
|
||||
.select(knex.raw('ifnull(original.tag, tags.tag) as tag'), knex.raw('ifnull(original.tag, tags.tag) as tag'))
|
||||
.whereIn('tags.tag', rawTags.map(tag => tag.toLowerCase()))
|
||||
.whereIn('tags.tag', rawTags)
|
||||
.orWhereIn('tags.tag', rawTags.map(tag => tag.toLowerCase()))
|
||||
.leftJoin('tags as original', 'tags.alias_for', 'original.tag');
|
||||
|
||||
return tagEntries.map(({ tag }) => tag);
|
||||
|
||||
Reference in New Issue
Block a user