Added generic photo page extract method to media module, to allow pre-filtering sources and relief Dogfart scraper. Added 'transsexual' site tag to Trans Angels.

This commit is contained in:
ThePendulum 2020-02-02 22:36:33 +01:00
parent 204a4d4bdd
commit 0ed1b2eff9
4 changed files with 51 additions and 26 deletions

View File

@ -1654,6 +1654,7 @@ function getSiteTags() {
thegayoffice: ['gay'], thegayoffice: ['gay'],
tiny4k: ['4k'], tiny4k: ['4k'],
toptobottom: ['gay'], toptobottom: ['gay'],
transangels: ['transsexual'],
trueanal: ['anal'], trueanal: ['anal'],
tspussyhunters: ['transsexual'], tspussyhunters: ['transsexual'],
}; };

View File

@ -12,6 +12,7 @@ const blake2 = require('blake2');
const logger = require('./logger'); const logger = require('./logger');
const knex = require('./knex'); const knex = require('./knex');
const upsert = require('./utils/upsert'); const upsert = require('./utils/upsert');
const { ex } = require('./utils/q');
function getHash(buffer) { function getHash(buffer) {
const hash = blake2.createHash('blake2b', { digestLength: 24 }); const hash = blake2.createHash('blake2b', { digestLength: 24 });
@ -81,12 +82,22 @@ function curatePhotoEntries(files) {
async function findDuplicates(photos, identifier, prop = null, label) { async function findDuplicates(photos, identifier, prop = null, label) {
const duplicates = await knex('media') const duplicates = await knex('media')
.whereIn(identifier, photos.flat().map(photo => (prop ? photo[prop] : photo))); .whereIn(identifier, photos.flat().map((photo) => {
if (prop) return photo[prop];
if (photo.src) return photo.src;
return photo;
}));
const duplicateLookup = new Set(duplicates.map(photo => photo[prop || identifier])); const duplicateLookup = new Set(duplicates.map(photo => photo[prop || identifier]));
const originals = photos.filter(source => (Array.isArray(source) // fallbacks provided?
? !source.some(sourceX => duplicateLookup.has(prop ? sourceX[prop] : sourceX)) // ensure none of the sources match const originals = photos.filter((source) => {
: !duplicateLookup.has(prop ? source[prop] : source))); if (Array.isArray(source)) {
return !source.some(sourceX => !duplicateLookup.has((prop && sourceX[prop]) || (sourceX.src && sourceX)));
}
return !duplicateLookup.has((prop && source[prop]) || (source.src && source));
});
if (duplicates.length > 0) { if (duplicates.length > 0) {
logger.info(`${duplicates.length} media items already present by ${identifier} for ${label}`); logger.info(`${duplicates.length} media items already present by ${identifier} for ${label}`);
@ -99,7 +110,26 @@ async function findDuplicates(photos, identifier, prop = null, label) {
return [duplicates, originals]; return [duplicates, originals];
} }
async function extractPhoto(source) {
const res = await bhttp.get(source.src);
if (res.statusCode === 200) {
const { q } = ex(res.body.toString());
return source.extract(q);
}
return null;
}
async function fetchPhoto(photoUrl, index, label, attempt = 1) { async function fetchPhoto(photoUrl, index, label, attempt = 1) {
if (photoUrl.src && photoUrl.extract) {
// source links to page containing a (presumably) tokenized photo
const photo = await extractPhoto(photoUrl);
return fetchPhoto(photo, index, label);
}
if (Array.isArray(photoUrl)) { if (Array.isArray(photoUrl)) {
return photoUrl.reduce(async (outcome, url) => outcome.catch(async () => { return photoUrl.reduce(async (outcome, url) => outcome.catch(async () => {
const photo = await fetchPhoto(url, index, label); const photo = await fetchPhoto(url, index, label);

View File

@ -9,7 +9,11 @@ async function fetchScene(url, site) {
const res = await bhttp.get(`https://www.blowpass.com/en/video/${site.id}/${new URL(url).pathname.split('/').slice(-2).join('/')}`); const res = await bhttp.get(`https://www.blowpass.com/en/video/${site.id}/${new URL(url).pathname.split('/').slice(-2).join('/')}`);
const release = await scrapeScene(res.body.toString(), url, site); const release = await scrapeScene(res.body.toString(), url, site);
release.channel = release.$('.siteNameSpan').text().trim().toLowerCase(); release.channel = release.$('.siteNameSpan')
.text()
.trim()
.toLowerCase()
.replace('.com', '');
if (['onlyteenblowjobs.com', 'mommyblowsbest.com'].includes(release.channel)) release.url = url.replace(/video\/\w+\//, 'scene/'); if (['onlyteenblowjobs.com', 'mommyblowsbest.com'].includes(release.channel)) release.url = url.replace(/video\/\w+\//, 'scene/');
else release.url = url.replace(/video\/\w+\//, 'video/'); else release.url = url.replace(/video\/\w+\//, 'video/');

View File

@ -1,21 +1,11 @@
'use strict'; 'use strict';
/* eslint-disable newline-per-chained-call */ /* eslint-disable newline-per-chained-call */
const Promise = require('bluebird'); // const Promise = require('bluebird');
const bhttp = require('bhttp'); const bhttp = require('bhttp');
const { JSDOM } = require('jsdom'); const { JSDOM } = require('jsdom');
const moment = require('moment'); const moment = require('moment');
async function getPhoto(url) {
const res = await bhttp.get(url);
const html = res.body.toString();
const { document } = new JSDOM(html).window;
const photoUrl = document.querySelector('.scenes-module img').src;
return photoUrl;
}
async function getPhotos(albumUrl) { async function getPhotos(albumUrl) {
const res = await bhttp.get(albumUrl); const res = await bhttp.get(albumUrl);
const html = res.body.toString(); const html = res.body.toString();
@ -24,12 +14,13 @@ async function getPhotos(albumUrl) {
const lastPhotoPage = Array.from(document.querySelectorAll('.preview-image-container a')).slice(-1)[0].href; const lastPhotoPage = Array.from(document.querySelectorAll('.preview-image-container a')).slice(-1)[0].href;
const lastPhotoIndex = parseInt(lastPhotoPage.match(/\d+.jpg/)[0], 10); const lastPhotoIndex = parseInt(lastPhotoPage.match(/\d+.jpg/)[0], 10);
const photoUrls = await Promise.map(Array.from({ length: lastPhotoIndex }), async (value, index) => { const photoUrls = Array.from({ length: lastPhotoIndex }, (value, index) => {
const pageUrl = `https://blacksonblondes.com${lastPhotoPage.replace(/\d+.jpg/, `${index.toString().padStart(3, '0')}.jpg`)}`; const pageUrl = `https://blacksonblondes.com${lastPhotoPage.replace(/\d+.jpg/, `${(index + 1).toString().padStart(3, '0')}.jpg`)}`;
return getPhoto(pageUrl); return {
}, { src: pageUrl,
concurrency: 5, extract: q => q('.scenes-module img', 'src'),
};
}); });
return photoUrls; return photoUrls;
@ -90,6 +81,9 @@ async function scrapeScene(html, url, site) {
.trim(); .trim();
const channel = document.querySelector('.site-name').textContent.split('.')[0].toLowerCase(); const channel = document.querySelector('.site-name').textContent.split('.')[0].toLowerCase();
const { origin, pathname } = new URL(url);
const entryId = `${channel}_${pathname.split('/').slice(-2)[0]}`;
const date = new Date(document.querySelector('meta[itemprop="uploadDate"]').content); const date = new Date(document.querySelector('meta[itemprop="uploadDate"]').content);
const duration = moment const duration = moment
.duration(`00:${document .duration(`00:${document
@ -103,13 +97,13 @@ async function scrapeScene(html, url, site) {
const { trailer } = trailerElement.dataset; const { trailer } = trailerElement.dataset;
const lastPhotosUrl = Array.from(document.querySelectorAll('.pagination a')).slice(-1)[0].href; const lastPhotosUrl = Array.from(document.querySelectorAll('.pagination a')).slice(-1)[0].href;
const { origin, pathname } = new URL(url);
const photos = await getPhotos(`${origin}${pathname}${lastPhotosUrl}`, site, url); const photos = await getPhotos(`${origin}${pathname}${lastPhotosUrl}`, site, url);
const stars = Math.floor(Number(document.querySelector('span[itemprop="average"]').textContent) / 2); const stars = Math.floor(Number(document.querySelector('span[itemprop="average"]').textContent) / 2);
const tags = Array.from(document.querySelectorAll('.scene-details .categories a')).map(({ textContent }) => textContent); const tags = Array.from(document.querySelectorAll('.scene-details .categories a')).map(({ textContent }) => textContent);
return { return {
entryId,
url: `${origin}${pathname}`, url: `${origin}${pathname}`,
title, title,
description, description,
@ -131,11 +125,7 @@ async function scrapeScene(html, url, site) {
} }
async function fetchLatest(site, page = 1) { async function fetchLatest(site, page = 1) {
console.time('dogfart');
console.log('scraping...', site.name);
const res = await bhttp.get(`https://dogfartnetwork.com/tour/scenes/?p=${page}`); const res = await bhttp.get(`https://dogfartnetwork.com/tour/scenes/?p=${page}`);
console.timeEnd('dogfart');
console.log('done!', site.name);
return scrapeLatest(res.body.toString(), site); return scrapeLatest(res.body.toString(), site);
} }