Wrapping up

This commit is contained in:
ThePendulum 2020-01-14 01:51:18 +01:00
parent 8c76568f44
commit f5704451f8
9 changed files with 154 additions and 0 deletions

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

154
src/scrapers/babes.js Normal file
View File

@ -0,0 +1,154 @@
'use strict';
/* eslint-disable newline-per-chained-call */
const Promise = require('bluebird');
const bhttp = require('bhttp');
const { CookieJar } = Promise.promisifyAll(require('tough-cookie'));
const moment = require('moment');
const { cookieToData } = require('../utils/cookies');
function getThumbs(scene) {
if (scene.images.poster) {
return scene.images.poster.map(image => image.xl.url);
}
if (scene.images.card_main_rect) {
return scene.images.card_main_rect
.concat(scene.images.card_secondary_rect || [])
.map(image => image.xl.url.replace('.thumb', ''));
}
return [];
}
async function scrapeLatest(items, site) {
return Promise.all(items.map(async (data) => {
const { id: entryId, title, description } = data;
const url = `https://www.babes.com/scene/${entryId}/`;
const date = new Date(data.dateReleased);
const actors = data.actors.map(actor => actor.name);
const tags = data.tags.map(tag => tag.name);
const [poster, ...photos] = getThumbs(data);
const trailer = data.videos.mediabook && (data.videos.mediabook.files['720p'] || data.videos.mediabook.files['320p']);
const duration = data.videos.mediabook && data.videos.mediabook.length;
return {
url,
entryId,
title,
description,
actors,
tags,
duration,
poster,
photos,
trailer: trailer && {
src: trailer.urls.view,
quality: parseInt(trailer.format, 10),
},
date,
site,
};
}));
}
async function scrapeScene(data, url, site) {
const { id: entryId, title, description } = data;
const date = new Date(data.dateReleased);
const actors = data.actors.map(actor => actor.name);
const tags = data.tags.map(tag => tag.name);
const [poster, ...photos] = getThumbs(data);
const trailer = data.videos.mediabook && (data.videos.mediabook.files['720p'] || data.videos.mediabook.files['320p']);
const siteName = data.collections[0].name;
const channel = siteName.replace(/\s+/g, '').toLowerCase();
return {
url,
entryId,
title,
description,
actors,
tags,
poster,
photos,
trailer: trailer && {
src: trailer.urls.view,
quality: parseInt(trailer.format, 10),
},
date,
site,
channel,
};
}
function getUrl(site) {
const { hostname, search } = new URL(site.url);
if (hostname.match(/(www\.)?babes\.com/) && search.match(/\?site=\d+/)) {
return site.url;
}
if (site.parameters && site.parameters.siteId) {
return `https://www.babes.com/scenes?site=${site.parameters.siteId}`;
}
throw new Error(`Babes site '${site.name}' (${site.url}) not supported`);
}
async function fetchLatest(site, page = 1) {
const url = getUrl(site);
const { search } = new URL(url);
const siteId = new URLSearchParams(search).get('site');
const cookieJar = new CookieJar();
const session = bhttp.session({ cookieJar });
console.log(url, siteId);
await session.get(url);
const cookieString = await cookieJar.getCookieStringAsync(url);
const { instance_token: instanceToken } = cookieToData(cookieString);
const beforeDate = moment().add('1', 'day').format('YYYY-MM-DD');
const limit = 10;
const apiUrl = `https://site-api.project1service.com/v2/releases?collectionId=${siteId}&dateReleased=<${beforeDate}&limit=${limit}&offset=${limit * (page - 1)}&orderBy=-dateReleased&type=scene`;
const res = await session.get(apiUrl, {
headers: {
Instance: instanceToken,
},
});
return scrapeLatest(res.body.result, site);
}
async function fetchScene(url, site) {
const entryId = url.match(/\d+/)[0];
const cookieJar = new CookieJar();
const session = bhttp.session({ cookieJar });
await session.get(url);
const cookieString = await cookieJar.getCookieStringAsync(url);
const { instance_token: instanceToken } = cookieToData(cookieString);
const res = await session.get(`https://site-api.project1service.com/v2/releases/${entryId}`, {
headers: {
Instance: instanceToken,
},
});
return scrapeScene(res.body.result, url, site);
}
module.exports = {
fetchLatest,
fetchScene,
};