Added Private scraper. Added Vixen scraper to repository.

This commit is contained in:
2019-04-04 04:00:28 +02:00
parent 439d3225ec
commit b3beeef3e4
14 changed files with 545 additions and 90 deletions

View File

@@ -9,14 +9,63 @@ const { matchTags } = require('../tags');
function scrapeLatest(html, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
const sceneElements = $('.scenes-latest');
return sceneElements.map((element) => {
return {
url,
shootId,
title,
actors,
date,
rating: {
likes,
dislikes,
stars,
},
site,
};
});
}
function scrapeUpcoming(html, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
const sceneElements = $('.scenes-upcoming');
return sceneElements.map((element) => {
return {
url,
shootId,
title,
actors,
date,
rating: {
likes,
dislikes,
stars,
},
site,
};
});
}
function scrapeScene(html, url, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
return {
url,
shootId,
title,
actors,
director: '',
date,
rating: {
likes,
dislikes,
stars,
},
site,
};
}
async function fetchLatest(site) {

View File

@@ -1,17 +1,19 @@
'use strict';
const xempire = require('./xempire');
const julesjordan = require('./julesjordan');
const kink = require('./kink');
const legalporno = require('./legalporno');
const pervcity = require('./pervcity');
const privateNetwork = require('./private'); // reserved keyword
const vixen = require('./vixen');
const xempire = require('./xempire');
module.exports = {
xempire,
julesjordan,
kink,
legalporno,
pervcity,
private: privateNetwork,
vixen,
xempire,
};

View File

@@ -65,8 +65,10 @@ async function scrapeScene(html, url, shootId, ratingRes, site) {
const rawTags = $('.tag-list > a[href*="/tag"]').map((tagIndex, tagElement) => $(tagElement).text()).toArray();
const channelSite = await knex('sites').where({ id: sitename }).first();
const tags = await matchTags(rawTags);
const [channelSite, tags] = await Promise.all([
knex('sites').where({ id: sitename }).first(),
matchTags(rawTags),
]);
return {
url,

100
src/scrapers/private.js Normal file
View File

@@ -0,0 +1,100 @@
'use strict';
/* eslint-disable */
const bhttp = require('bhttp');
const cheerio = require('cheerio');
const moment = require('moment');
const knex = require('../knex');
const { matchTags } = require('../tags');
function scrapeLatest(html, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
const sceneElements = $('.content-wrapper.container .scene').toArray();
return sceneElements.map((element) => {
const sceneLinkElement = $(element).find('a[data-track="TITLE_LINK"]');
const url = sceneLinkElement.attr('href');
const title = sceneLinkElement.text();
const shootId = url.split('/').slice(-1)[0];
const date = moment.utc($(element).find('.scene-date'), 'MM/DD/YYYY').toDate();
const actors = $(element).find('a[data-track="PORNSTAR_LINK"]').map((actorIndex, actorElement) => $(actorElement).text()).toArray();
const likes = Number($(element).find('.scene-votes').text());
return {
url,
shootId,
title,
actors,
date,
rating: {
likes,
dislikes: 0,
},
site,
};
});
}
async function scrapeScene(html, url, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
const shootId = url.split('/').slice(-1)[0];
const title = $('.video-wrapper meta[itemprop="name"]').attr('content');
const date = moment.utc($('.video-wrapper meta[itemprop="uploadDate"]').attr('content'), 'MM/DD/YYYY').toDate()
const actors = $('.content-wrapper .scene-models-list a').map((actorIndex, actorElement) => $(actorElement).text()).toArray();
const description = $('.video-wrapper meta[itemprop="description"]').attr('content');
const [minutes, seconds] = $('.video-wrapper meta[itemprop="duration"]').attr('content').match(/\d+/g);
const duration = Number(minutes) * 60 + Number(seconds);
const likes = Number($('.content-desc #social-actions #likes').text());
const siteElement = $('.content-wrapper .logos-sites a');
const siteUrl = siteElement.attr('href').slice(0, -1);
const siteName = siteElement.text();
const channelSite = await knex('sites')
.where({ url: siteUrl })
.orWhere({ name: siteName })
.first();
const rawTags = $('.content-desc .scene-tags a').map((tagIndex, tagElement) => $(tagElement).text()).toArray();
const tags = await matchTags(rawTags);
return {
url,
shootId,
title,
date,
actors,
description,
duration,
tags,
rating: {
likes,
dislikes: 0,
},
site: channelSite || site,
};
}
async function fetchLatest(site) {
const res = await bhttp.get(`${site.url}/`);
return scrapeLatest(res.body.toString(), site);
}
async function fetchScene(url, site) {
const res = await bhttp.get(url);
return scrapeScene(res.body.toString(), url, site);
}
module.exports = {
fetchLatest,
fetchScene,
};

90
src/scrapers/vixen.js Normal file
View File

@@ -0,0 +1,90 @@
'use strict';
/* eslint-disable */
const bhttp = require('bhttp');
const cheerio = require('cheerio');
const moment = require('moment');
const { matchTags } = require('../tags');
function scrapeLatest(html, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
const stateObject = $('script:contains("INITIAL_STATE")');
const { videos: scenes } = JSON.parse(stateObject.html().trim().slice(27, -1));
return scenes.map((scene) => {
const shootId = scene.newId;
const title = scene.title;
const url = `${site.url}${scene.targetUrl}`;
const date = moment.utc(scene.releaseDateFormatted, 'MMMM DD, YYYY').toDate();
const actors = scene.models;
const stars = Number(scene.textRating) / 2;
return {
url,
shootId,
title,
actors,
date,
rating: {
stars,
},
site,
};
});
}
async function scrapeScene(html, url, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
const { pathname, search } = new URL(url);
const stateObject = $('script:contains("INITIAL_STATE")');
const data = JSON.parse(stateObject.html().trim().slice(27, -1));
const scene = data.page.data[`${pathname}${search}`].data.video;
const shootId = scene.newId;
const title = scene.title;
const date = new Date(scene.releaseDate);
const actors = scene.models;
const stars = scene.totalRateVal;
const rawTags = scene.tags;
const tags = await matchTags(rawTags);
const duration = scene.runLength;
const director = scene.directorNames;
return {
url,
shootId,
title,
actors,
director,
date,
duration,
tags,
rating: {
stars,
},
site,
};
}
async function fetchLatest(site) {
const res = await bhttp.get(`${site.url}/videos`);
return scrapeLatest(res.body.toString(), site);
}
async function fetchScene(url, site) {
const res = await bhttp.get(url);
return scrapeScene(res.body.toString(), url, site);
}
module.exports = {
fetchLatest,
fetchScene,
};

View File

@@ -34,6 +34,7 @@ function scrape(html, site) {
shootId,
title,
actors,
director: 'Mason',
date,
rating: {
likes,
@@ -62,6 +63,7 @@ async function scrapeSceneFallback($, url, site) {
title,
date,
actors,
director: 'Mason',
description,
tags,
rating: {
@@ -108,6 +110,7 @@ async function scrapeScene(html, url, site) {
title,
date,
actors,
director: 'Mason',
description,
duration,
tags,
@@ -118,10 +121,15 @@ async function scrapeScene(html, url, site) {
};
}
async function fetchLatest(site) {
async function fetchLatest(site, storedReleases) {
const res = await bhttp.get(`${site.url}/en/videos`);
const releases = scrape(res.body.toString(), site);
return scrape(res.body.toString(), site);
const storedShootIds = new Set(storedReleases.map(release => release.shoot_id));
const newReleases = releases.filter(release => !storedShootIds.has(release.shootId));
console.log(newReleases);
}
async function fetchUpcoming(site) {