forked from DebaucheryLibrarian/traxxx
Refactored MOFOS scraper to use generic MindGeek scraper. Added Digital Playground and Fake Hub.
This commit is contained in:
8
src/scrapers/digitalplayground.js
Normal file
8
src/scrapers/digitalplayground.js
Normal file
@@ -0,0 +1,8 @@
|
||||
'use strict';
|
||||
|
||||
const { fetchScene, fetchLatest } = require('./mindgeek');
|
||||
|
||||
module.exports = {
|
||||
fetchLatest,
|
||||
fetchScene,
|
||||
};
|
||||
8
src/scrapers/fakehub.js
Normal file
8
src/scrapers/fakehub.js
Normal file
@@ -0,0 +1,8 @@
|
||||
'use strict';
|
||||
|
||||
const { fetchScene, fetchLatest } = require('./mindgeek');
|
||||
|
||||
module.exports = {
|
||||
fetchLatest,
|
||||
fetchScene,
|
||||
};
|
||||
@@ -1,147 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
const Promise = require('bluebird');
|
||||
const bhttp = require('bhttp');
|
||||
const { CookieJar } = Promise.promisifyAll(require('tough-cookie'));
|
||||
const moment = require('moment');
|
||||
|
||||
const { fetchSites } = require('../sites');
|
||||
const { cookieToData } = require('../utils/cookies');
|
||||
const { matchTags } = require('../tags');
|
||||
|
||||
function getThumbs(scene) {
|
||||
if (scene.images.poster) {
|
||||
return scene.images.poster.map(image => image.xl.url);
|
||||
}
|
||||
|
||||
if (scene.images.card_main_rect) {
|
||||
return scene.images.card_main_rect
|
||||
.concat(scene.images.card_secondary_rect || [])
|
||||
.map(image => image.xl.url.replace('.thumb', ''));
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
/* eslint-disable newline-per-chained-call */
|
||||
async function scrapeLatest(items, site) {
|
||||
return Promise.all(items.map(async (data) => {
|
||||
const { id: entryId, title, description } = data;
|
||||
const url = `https://www.mofos.com/scene/${entryId}/`;
|
||||
const date = new Date(data.dateReleased);
|
||||
const actors = data.actors.map(actor => actor.name);
|
||||
|
||||
const rawTags = data.tags.map(tag => tag.name);
|
||||
const tags = await matchTags(rawTags);
|
||||
|
||||
const [poster, ...photos] = getThumbs(data);
|
||||
const trailer = data.videos.mediabook && (data.videos.mediabook.files['720p'] || data.videos.mediabook.files['320p']);
|
||||
|
||||
return {
|
||||
url,
|
||||
entryId,
|
||||
title,
|
||||
description,
|
||||
actors,
|
||||
tags,
|
||||
poster,
|
||||
photos,
|
||||
trailer: {
|
||||
src: trailer.urls.view,
|
||||
quality: parseInt(trailer.format, 10),
|
||||
},
|
||||
date,
|
||||
site,
|
||||
};
|
||||
}));
|
||||
}
|
||||
|
||||
async function scrapeScene(data, url, site) {
|
||||
const { id: entryId, title, description } = data;
|
||||
const date = new Date(data.dateReleased);
|
||||
const actors = data.actors.map(actor => actor.name);
|
||||
|
||||
const rawTags = data.tags.map(tag => tag.name);
|
||||
|
||||
const [poster, ...photos] = getThumbs(data);
|
||||
const trailer = data.videos.mediabook && (data.videos.mediabook.files['720p'] || data.videos.mediabook.files['320p']);
|
||||
|
||||
const siteName = data.collections[0].name;
|
||||
const siteId = data.collections[0].id;
|
||||
const siteSlug = siteName.replace(/\s+/g, '').toLowerCase();
|
||||
const siteUrl = `https://www.mofos.com/scenes?site=${siteId}`;
|
||||
|
||||
const [[channelSite], tags] = await Promise.all([
|
||||
site.isFallback
|
||||
? fetchSites({
|
||||
slug: siteSlug,
|
||||
name: siteName,
|
||||
url: siteUrl,
|
||||
})
|
||||
: [site],
|
||||
matchTags(rawTags),
|
||||
]);
|
||||
|
||||
return {
|
||||
url,
|
||||
entryId,
|
||||
title,
|
||||
description,
|
||||
actors,
|
||||
tags,
|
||||
poster,
|
||||
photos,
|
||||
trailer: {
|
||||
src: trailer.urls.view,
|
||||
quality: parseInt(trailer.format, 10),
|
||||
},
|
||||
date,
|
||||
site: channelSite,
|
||||
};
|
||||
}
|
||||
|
||||
async function fetchLatest(site, page = 1) {
|
||||
const { search } = new URL(site.url);
|
||||
const siteId = new URLSearchParams(search).get('site');
|
||||
|
||||
const cookieJar = new CookieJar();
|
||||
const session = bhttp.session({ cookieJar });
|
||||
|
||||
await session.get(site.url);
|
||||
|
||||
const cookieString = await cookieJar.getCookieStringAsync(site.url);
|
||||
const { instance_token: instanceToken } = cookieToData(cookieString);
|
||||
|
||||
const beforeDate = moment().add('1', 'day').format('YYYY-MM-DD');
|
||||
const limit = 10;
|
||||
const res = await session.get(`https://site-api.project1service.com/v2/releases?collectionId=${siteId}&dateReleased=<${beforeDate}&limit=${limit}&offset=${limit * (page - 1)}&orderBy=-dateReleased&type=scene`, {
|
||||
headers: {
|
||||
Instance: instanceToken,
|
||||
},
|
||||
});
|
||||
|
||||
return scrapeLatest(res.body.result, site);
|
||||
}
|
||||
|
||||
async function fetchScene(url, site) {
|
||||
const entryId = url.match(/\d+/)[0];
|
||||
|
||||
const cookieJar = new CookieJar();
|
||||
const session = bhttp.session({ cookieJar });
|
||||
|
||||
await session.get(url);
|
||||
|
||||
const cookieString = await cookieJar.getCookieStringAsync(url);
|
||||
const { instance_token: instanceToken } = cookieToData(cookieString);
|
||||
|
||||
const res = await session.get(`https://site-api.project1service.com/v2/releases/${entryId}`, {
|
||||
headers: {
|
||||
Instance: instanceToken,
|
||||
},
|
||||
});
|
||||
|
||||
return scrapeScene(res.body.result, url, site);
|
||||
}
|
||||
const { fetchScene, fetchLatest } = require('./mindgeek');
|
||||
|
||||
module.exports = {
|
||||
fetchLatest,
|
||||
|
||||
@@ -7,7 +7,9 @@ const bang = require('./bang');
|
||||
const bangbros = require('./bangbros');
|
||||
const blowpass = require('./blowpass');
|
||||
const dogfart = require('./dogfart');
|
||||
const digitalplayground = require('./digitalplayground');
|
||||
const evilangel = require('./evilangel');
|
||||
const fakehub = require('./fakehub');
|
||||
const jayrock = require('./jayrock');
|
||||
const kink = require('./kink');
|
||||
const mikeadriano = require('./mikeadriano');
|
||||
@@ -41,9 +43,11 @@ module.exports = {
|
||||
blowpass,
|
||||
brazzers,
|
||||
ddfnetwork,
|
||||
digitalplayground,
|
||||
dogfart,
|
||||
dogfartnetwork: dogfart,
|
||||
evilangel,
|
||||
fakehub,
|
||||
jayrock,
|
||||
julesjordan,
|
||||
kellymadison,
|
||||
|
||||
Reference in New Issue
Block a user