Added Blowpass scraper. Split shootId and pageId.

This commit is contained in:
2019-04-06 23:24:26 +02:00
parent 069c2c1628
commit 3a90f98d41
14 changed files with 208 additions and 11 deletions

110
src/scrapers/blowpass.js Normal file
View File

@@ -0,0 +1,110 @@
'use strict';
/* eslint-disable */
const bhttp = require('bhttp');
const cheerio = require('cheerio');
const moment = require('moment');
const { matchTags } = require('../tags');
function scrape(html, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
const sceneElements = $('.sceneList .scene').toArray();
return sceneElements.map((element) => {
const entryId = $(element).attr('data-itemid');
const sceneLinkElement = $(element).find('.sceneTitle a');
const title = sceneLinkElement.attr('title');
const url = `${site.url}/en/scene/${sceneLinkElement.attr('href').split('/').slice(-2).join('/')}`;
const date = moment.utc($(element).find('.sceneDate').text(), 'MM-DD-YYYY').toDate();
const actors = $(element).find('.sceneActors a').map((actorIndex, actorElement) => $(actorElement).text()).toArray();
const likes = Number($(element).find('.rating .state_1 .value').text());
return {
url,
entryId,
title,
actors,
date,
rating: {
likes,
},
site,
};
});
}
async function scrapeScene(html, url, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
const json = $('script[type="application/ld+json"]').html();
const data = JSON.parse(json).slice(-1)[0];
const sceneElement = $('#wrapper');
const workName = data.isPartOf.name.split(' - ');
const shootId = workName.length > 1 ? workName[0] : null;
const entryId = url.split('/').slice(-1)[0];
const title = data.name;
const description = data.description;
const date = moment.utc(data.isPartOf.datePublished, 'YYYY-MM-DD').toDate();
// const actors = sceneElement.find('.sceneActors a').map((actorIndex, actorElement) => $(actorElement).text().trim()).toArray();
const actors = data.actor
.sort(({ genderA }, { genderB }) => {
if (genderA === 'female' && genderB === 'male') return 1;
if (genderA === 'male' && genderB === 'female') return -1;
return 0;
})
.map(actor => actor.name);
const likes = Number(sceneElement.find('.rating .state_1 .value').text());
const dislikes = Number(sceneElement.find('.rating .state_2 .value').text());
const duration = moment.duration(data.duration.slice(2)).asSeconds();
const rawTags = data.keywords.split(', ');
const tags = await matchTags(rawTags);
return {
url,
shootId,
entryId,
title,
actors,
date,
duration,
tags,
rating: {
likes,
dislikes,
},
site,
};
}
async function fetchLatest(site, page = 1) {
const res = await bhttp.get(`https://www.blowpass.com/en/videos/${site.id}/latest/All-Categories/0/All-Pornstars/0/${page}`);
return scrape(res.body.toString(), site);
}
async function fetchUpcoming(site) {
const res = await bhttp.get(`https://www.blowpass.com/en/videos/${site.id}/upcoming`);
return scrape(res.body.toString(), site);
}
async function fetchScene(url, site) {
const res = await bhttp.get(`https://www.blowpass.com/en/video/${site.id}/${new URL(url).pathname.split('/').slice(-2).join('/')}`);
return scrapeScene(res.body.toString(), url, site);
}
module.exports = {
fetchLatest,
fetchUpcoming,
fetchScene,
};

View File

@@ -1,5 +1,6 @@
'use strict';
const blowpass = require('./blowpass');
const brazzers = require('./brazzers');
const julesjordan = require('./julesjordan');
const kink = require('./kink');
@@ -10,6 +11,7 @@ const vixen = require('./vixen');
const xempire = require('./xempire');
module.exports = {
blowpass,
brazzers,
julesjordan,
kink,

View File

@@ -29,6 +29,7 @@ function scrapeLatest(html, site) {
return {
url,
shootId,
entryId: shootId,
title,
actors,
date,
@@ -73,6 +74,7 @@ async function scrapeScene(html, url, shootId, ratingRes, site) {
return {
url,
shootId,
entryId: shootId,
title,
date,
actors,

View File

@@ -25,13 +25,14 @@ function scrapeLatest(html, site) {
const originalTitle = sceneLinkElement.text().trim(); // title attribute breaks when they use \\ escaping
const { shootId, title } = extractTitle(originalTitle);
const internalId = new URL(url).pathname.split('/')[2];
const entryId = new URL(url).pathname.split('/')[2];
const date = moment.utc($(element).attr('release'), 'YYYY/MM/DD').toDate();
return {
url,
shootId: shootId || internalId,
shootId,
entryId,
title,
date,
site,
@@ -44,6 +45,7 @@ async function scrapeScene(html, url, site) {
const originalTitle = $('h1.watchpage-title').text().trim();
const { shootId, title } = extractTitle(originalTitle);
const entryId = new URL(url).pathname.split('/')[2];
const date = moment.utc($('span[title="Release date"] a').text(), 'YYYY-MM-DD').toDate();
@@ -60,6 +62,7 @@ async function scrapeScene(html, url, site) {
return {
url,
shootId,
entryId,
title,
date,
actors,

View File

@@ -32,7 +32,6 @@ function scrapeLatest(html, site) {
date,
rating: {
likes,
dislikes: 0,
},
site,
};
@@ -79,7 +78,6 @@ async function scrapeScene(html, url, site) {
tags,
rating: {
likes,
dislikes: 0,
},
site: channelSite || site,
};

View File

@@ -49,16 +49,20 @@ function scrapeUpcoming(html, site) {
});
}
function scrapeScene(html, url, site) {
async function scrapeScene(html, url, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
const rawTags = [];
const tags = await matchTags(rawTags);
return {
url,
shootId,
title,
actors,
director: '',
director,
date,
tags,
rating: {
likes,
dislikes,
@@ -68,7 +72,7 @@ function scrapeScene(html, url, site) {
};
}
async function fetchLatest(site) {
async function fetchLatest(site, page = 1) {
const res = await bhttp.get(`${site.url}/url`);
return scrapeLatest(res.body.toString(), site);