Merged Reality Kings and Babes into MindGeek scraper. Kept classic latest wrapper for Look At Her Now and Tranny Surprise.

This commit is contained in:
ThePendulum 2020-01-14 04:50:42 +01:00
parent f5704451f8
commit ef76909d3c
8 changed files with 183 additions and 335 deletions

View File

@ -134,7 +134,7 @@
<ul class="tags nolist">
<li
v-for="tag in release.tags"
:key="`tag-${tag.slug}`"
:key="`tag-${tag.id}`"
class="tag"
>
<a

View File

@ -111,7 +111,7 @@
>
<li
v-for="tag in release.tags"
:key="`tag-${tag.slug}`"
:key="`tag-${tag.id}`"
class="tag"
>
<router-link

View File

@ -179,8 +179,8 @@ function getTags(groupsMap) {
group_id: groupsMap.body,
},
{
name: 'big cock',
slug: 'big-cock',
name: 'big dick',
slug: 'big-dick',
alias_for: null,
group_id: groupsMap.body,
},
@ -989,11 +989,11 @@ function getTagAliases(tagsMap) {
},
{
name: 'big cocks',
alias_for: tagsMap['big-cock'],
alias_for: tagsMap['big-dick'],
},
{
name: 'big dick',
alias_for: tagsMap['big-cock'],
name: 'big cock',
alias_for: tagsMap['big-dick'],
},
{
name: 'big booty',

View File

@ -1,152 +1,6 @@
'use strict';
/* eslint-disable newline-per-chained-call */
const Promise = require('bluebird');
const bhttp = require('bhttp');
const { CookieJar } = Promise.promisifyAll(require('tough-cookie'));
const moment = require('moment');
const { cookieToData } = require('../utils/cookies');
function getThumbs(scene) {
if (scene.images.poster) {
return scene.images.poster.map(image => image.xl.url);
}
if (scene.images.card_main_rect) {
return scene.images.card_main_rect
.concat(scene.images.card_secondary_rect || [])
.map(image => image.xl.url.replace('.thumb', ''));
}
return [];
}
async function scrapeLatest(items, site) {
return Promise.all(items.map(async (data) => {
const { id: entryId, title, description } = data;
const url = `https://www.babes.com/scene/${entryId}/`;
const date = new Date(data.dateReleased);
const actors = data.actors.map(actor => actor.name);
const tags = data.tags.map(tag => tag.name);
const [poster, ...photos] = getThumbs(data);
const trailer = data.videos.mediabook && (data.videos.mediabook.files['720p'] || data.videos.mediabook.files['320p']);
const duration = data.videos.mediabook && data.videos.mediabook.length;
return {
url,
entryId,
title,
description,
actors,
tags,
duration,
poster,
photos,
trailer: trailer && {
src: trailer.urls.view,
quality: parseInt(trailer.format, 10),
},
date,
site,
};
}));
}
async function scrapeScene(data, url, site) {
const { id: entryId, title, description } = data;
const date = new Date(data.dateReleased);
const actors = data.actors.map(actor => actor.name);
const tags = data.tags.map(tag => tag.name);
const [poster, ...photos] = getThumbs(data);
const trailer = data.videos.mediabook && (data.videos.mediabook.files['720p'] || data.videos.mediabook.files['320p']);
const siteName = data.collections[0].name;
const channel = siteName.replace(/\s+/g, '').toLowerCase();
return {
url,
entryId,
title,
description,
actors,
tags,
poster,
photos,
trailer: trailer && {
src: trailer.urls.view,
quality: parseInt(trailer.format, 10),
},
date,
site,
channel,
};
}
function getUrl(site) {
const { hostname, search } = new URL(site.url);
if (hostname.match(/(www\.)?babes\.com/) && search.match(/\?site=\d+/)) {
return site.url;
}
if (site.parameters && site.parameters.siteId) {
return `https://www.babes.com/scenes?site=${site.parameters.siteId}`;
}
throw new Error(`Babes site '${site.name}' (${site.url}) not supported`);
}
async function fetchLatest(site, page = 1) {
const url = getUrl(site);
const { search } = new URL(url);
const siteId = new URLSearchParams(search).get('site');
const cookieJar = new CookieJar();
const session = bhttp.session({ cookieJar });
console.log(url, siteId);
await session.get(url);
const cookieString = await cookieJar.getCookieStringAsync(url);
const { instance_token: instanceToken } = cookieToData(cookieString);
const beforeDate = moment().add('1', 'day').format('YYYY-MM-DD');
const limit = 10;
const apiUrl = `https://site-api.project1service.com/v2/releases?collectionId=${siteId}&dateReleased=<${beforeDate}&limit=${limit}&offset=${limit * (page - 1)}&orderBy=-dateReleased&type=scene`;
const res = await session.get(apiUrl, {
headers: {
Instance: instanceToken,
},
});
return scrapeLatest(res.body.result, site);
}
async function fetchScene(url, site) {
const entryId = url.match(/\d+/)[0];
const cookieJar = new CookieJar();
const session = bhttp.session({ cookieJar });
await session.get(url);
const cookieString = await cookieJar.getCookieStringAsync(url);
const { instance_token: instanceToken } = cookieToData(cookieString);
const res = await session.get(`https://site-api.project1service.com/v2/releases/${entryId}`, {
headers: {
Instance: instanceToken,
},
});
return scrapeScene(res.body.result, url, site);
}
const { fetchLatest, fetchScene } = require('./mindgeek');
module.exports = {
fetchLatest,

157
src/scrapers/mindgeek.js Normal file
View File

@ -0,0 +1,157 @@
'use strict';
/* eslint-disable newline-per-chained-call */
const Promise = require('bluebird');
const bhttp = require('bhttp');
const { CookieJar } = Promise.promisifyAll(require('tough-cookie'));
const moment = require('moment');
const { cookieToData } = require('../utils/cookies');
function getThumbs(scene) {
if (scene.images.poster) {
return scene.images.poster.map(image => image.xl.url);
}
if (scene.images.card_main_rect) {
return scene.images.card_main_rect
.concat(scene.images.card_secondary_rect || [])
.map(image => image.xl.url.replace('.thumb', ''));
}
return [];
}
function scrapeLatestX(data, site) {
const { id: entryId, title, description } = data;
const hostname = site.parameters?.classic ? site.url : site.network.url;
const url = `${hostname}/scene/${entryId}/`;
const date = new Date(data.dateReleased);
const actors = data.actors.map(actor => actor.name);
const tags = data.tags.map(tag => tag.name);
const [poster, ...photos] = getThumbs(data);
const trailer = data.videos.mediabook && (data.videos.mediabook.files['720p'] || data.videos.mediabook.files['320p']);
const duration = data.videos.mediabook && data.videos.mediabook.length;
return {
url,
entryId,
title,
description,
actors,
tags,
duration,
poster,
photos,
trailer: trailer && {
src: trailer.urls.view,
quality: parseInt(trailer.format, 10),
},
date,
site,
};
}
async function scrapeLatest(items, site) {
return Promise.all(items.map(async data => scrapeLatestX(data, site)));
}
async function scrapeScene(data, url, site) {
const { id: entryId, title, description } = data;
const date = new Date(data.dateReleased);
const actors = data.actors.map(actor => actor.name);
const tags = data.tags.map(tag => tag.name);
const [poster, ...photos] = getThumbs(data);
const trailer = data.videos.mediabook && (data.videos.mediabook.files['720p'] || data.videos.mediabook.files['320p']);
const siteName = data.collections[0].name;
const channel = siteName.replace(/\s+/g, '').toLowerCase();
return {
url,
entryId,
title,
description,
actors,
tags,
poster,
photos,
trailer: trailer && {
src: trailer.urls.view,
quality: parseInt(trailer.format, 10),
},
date,
site,
channel,
};
}
function getUrl(site) {
const { search } = new URL(site.url);
if (search.match(/\?site=\d+/)) {
return site.url;
}
if (site.parameters && site.parameters.siteId) {
return `${site.network.url}/scenes?site=${site.parameters.siteId}`;
}
throw new Error(`Mind Geek site '${site.name}' (${site.url}) not supported`);
}
async function fetchLatest(site, page = 1) {
const url = getUrl(site);
const { search } = new URL(url);
const siteId = new URLSearchParams(search).get('site');
const cookieJar = new CookieJar();
const session = bhttp.session({ cookieJar });
await session.get(url);
const cookieString = await cookieJar.getCookieStringAsync(url);
const { instance_token: instanceToken } = cookieToData(cookieString);
const beforeDate = moment().add('1', 'day').format('YYYY-MM-DD');
const limit = 10;
const apiUrl = `https://site-api.project1service.com/v2/releases?collectionId=${siteId}&dateReleased=<${beforeDate}&limit=${limit}&offset=${limit * (page - 1)}&orderBy=-dateReleased&type=scene`;
const res = await session.get(apiUrl, {
headers: {
Instance: instanceToken,
},
});
return scrapeLatest(res.body.result, site);
}
async function fetchScene(url, site) {
const entryId = url.match(/\d+/)[0];
const cookieJar = new CookieJar();
const session = bhttp.session({ cookieJar });
await session.get(url);
const cookieString = await cookieJar.getCookieStringAsync(url);
const { instance_token: instanceToken } = cookieToData(cookieString);
const res = await session.get(`https://site-api.project1service.com/v2/releases/${entryId}`, {
headers: {
Instance: instanceToken,
},
});
return scrapeScene(res.body.result, url, site);
}
module.exports = {
scrapeLatestX,
fetchLatest,
fetchScene,
};

View File

@ -1,60 +1,9 @@
'use strict';
/* eslint-disable newline-per-chained-call */
const Promise = require('bluebird');
const bhttp = require('bhttp');
const { CookieJar } = Promise.promisifyAll(require('tough-cookie'));
const cheerio = require('cheerio');
const moment = require('moment');
const { cookieToData } = require('../utils/cookies');
function getThumbs(scene) {
if (scene.images.poster) {
return scene.images.poster.map(image => image.xl.url);
}
if (scene.images.card_main_rect) {
return scene.images.card_main_rect
.concat(scene.images.card_secondary_rect || [])
.map(image => image.xl.url.replace('.thumb', ''));
}
return [];
}
async function scrapeLatest(items, site) {
return Promise.all(items.map(async (data) => {
const { id: entryId, title, description } = data;
const url = `https://www.realitykings.com/scene/${entryId}/`;
const date = new Date(data.dateReleased);
const actors = data.actors.map(actor => actor.name);
const tags = data.tags.map(tag => tag.name);
const [poster, ...photos] = getThumbs(data);
const trailer = data.videos.mediabook && (data.videos.mediabook.files['720p'] || data.videos.mediabook.files['320p']);
const duration = data.videos.mediabook && data.videos.mediabook.length;
return {
url,
entryId,
title,
description,
actors,
tags,
duration,
poster,
photos,
trailer: trailer && {
src: trailer.urls.view,
quality: parseInt(trailer.format, 10),
},
date,
site,
};
}));
}
const { scrapeLatestX, fetchLatest, fetchScene } = require('./mindgeek');
function scrapeLatestClassic(html, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
@ -66,84 +15,7 @@ function scrapeLatestClassic(html, site) {
const stateString = stateTag.slice(prefixIndex + prefix.length - 1, stateTag.indexOf('};', prefixIndex) + suffix.length - 1);
const data = JSON.parse(stateString);
return Object.values(data.entities.releases).map((scene) => {
const release = { site };
release.entryId = scene.id;
release.url = `${site.url}/scene/${release.entryId}/`;
release.title = scene.title;
release.description = scene.description;
release.date = new Date(scene.dateReleased);
release.actors = scene.actors.map(actorId => data.entities.actors[actorId].name);
release.tags = scene.tags.map(tagId => data.entities.tags[tagId].name);
const [poster, ...photos] = getThumbs(scene);
release.poster = poster;
release.photos = photos;
if (scene.videos.mediabook) {
const trailer = (scene.videos.mediabook.files['720p'] || scene.videos.mediabook.files['320p']);
release.duration = scene.videos.mediabook.length;
release.trailer = trailer && {
src: trailer.urls.view,
quality: parseInt(trailer.format, 10),
};
}
return release;
});
}
async function scrapeScene(data, url, site) {
const { id: entryId, title, description } = data;
const date = new Date(data.dateReleased);
const actors = data.actors.map(actor => actor.name);
const tags = data.tags.map(tag => tag.name);
const [poster, ...photos] = getThumbs(data);
const trailer = data.videos.mediabook && (data.videos.mediabook.files['720p'] || data.videos.mediabook.files['320p']);
const siteName = data.collections[0].name;
// const siteId = data.collections[0].id;
const channel = siteName.replace(/\s+/g, '').toLowerCase();
// const siteUrl = `https://www.realitykings.com/scenes?site=${siteId}`;
return {
url,
entryId,
title,
description,
actors,
tags,
poster,
photos,
trailer: trailer && {
src: trailer.urls.view,
quality: parseInt(trailer.format, 10),
},
date,
site,
channel,
};
}
function getUrl(site) {
const { hostname, search } = new URL(site.url);
if (hostname.match(/(www\.)?realitykings\.com/) && search.match(/\?site=\d+/)) {
return site.url;
}
if (site.parameters && site.parameters.siteId) {
return `https://www.realitykings.com/scenes?site=${site.parameters.siteId}`;
}
throw new Error(`Reality Kings site '${site.name}' (${site.url}) not supported`);
return Object.values(data.entities.releases).map(scene => scrapeLatestX(scene, site));
}
async function fetchClassic(site, page) {
@ -156,56 +28,15 @@ async function fetchClassic(site, page) {
return null;
}
async function fetchLatest(site, page = 1) {
async function fetchLatestWrap(site, page = 1) {
if (site.parameters && site.parameters.classic) {
return fetchClassic(site, page);
}
const url = getUrl(site);
const { search } = new URL(url);
const siteId = new URLSearchParams(search).get('site');
const cookieJar = new CookieJar();
const session = bhttp.session({ cookieJar });
await session.get(url);
const cookieString = await cookieJar.getCookieStringAsync(url);
const { instance_token: instanceToken } = cookieToData(cookieString);
const beforeDate = moment().add('1', 'day').format('YYYY-MM-DD');
const limit = 10;
const apiUrl = `https://site-api.project1service.com/v2/releases?collectionId=${siteId}&dateReleased=<${beforeDate}&limit=${limit}&offset=${limit * (page - 1)}&orderBy=-dateReleased&type=scene`;
const res = await session.get(apiUrl, {
headers: {
Instance: instanceToken,
},
});
return scrapeLatest(res.body.result, site);
}
async function fetchScene(url, site) {
const entryId = url.match(/\d+/)[0];
const cookieJar = new CookieJar();
const session = bhttp.session({ cookieJar });
await session.get(url);
const cookieString = await cookieJar.getCookieStringAsync(url);
const { instance_token: instanceToken } = cookieToData(cookieString);
const res = await session.get(`https://site-api.project1service.com/v2/releases/${entryId}`, {
headers: {
Instance: instanceToken,
},
});
return scrapeScene(res.body.result, url, site);
return fetchLatest(site, page);
}
module.exports = {
fetchLatest,
fetchLatest: fetchLatestWrap,
fetchScene,
};

View File

@ -102,7 +102,10 @@ function sitesByNetwork(sites) {
async function fetchSitesFromArgv() {
const rawSites = await knex('sites')
.select('sites.*', 'networks.name as network_name', 'networks.slug as network_slug', 'networks.parameters as network_parameters')
.select(
'sites.*',
'networks.name as network_name', 'networks.slug as network_slug', 'networks.url as network_url', 'networks.description as network_description', 'networks.parameters as network_parameters',
)
.whereIn('sites.slug', argv.sites || [])
.orWhereIn('networks.slug', argv.networks || [])
.leftJoin('networks', 'sites.network_id', 'networks.id');
@ -120,7 +123,10 @@ async function fetchSitesFromConfig() {
const networkIds = networks.map(network => network.id);
const rawSites = await knex('sites')
.select('sites.*', 'networks.name as network_name')
.select(
'sites.*',
'networks.name as network_name', 'networks.slug as network_slug', 'networks.url as network_url', 'networks.description as network_description', 'networks.parameters as network_parameters',
)
.whereIn('sites.slug', included.sites || [])
.orWhereIn('network_id', networkIds)
.leftJoin('networks', 'sites.network_id', 'networks.id');

View File

@ -43,16 +43,16 @@ async function matchTags(rawTags) {
.pluck('aliases.id')
.whereIn('tags.name', tags)
.orWhereIn('tags.slug', tags)
.leftJoin('tags as aliases', function join() {
this
.on('tags.alias_for', 'aliases.id')
.orOn('tags.id', 'aliases.id');
})
.where(function where() {
this
.whereNull('tags.alias_for')
.orWhereNull('aliases.alias_for');
})
.join('tags as aliases', function join() {
this
.on('tags.alias_for', 'aliases.id')
.orOn('tags.id', 'aliases.id');
})
.groupBy('aliases.id');
return tagEntries;