Added studio support. Added studios for LegalPorno. Improved media fetch error handling. Fixed DDFNetwork scraper, added media support.

This commit is contained in:
2019-10-30 04:45:42 +01:00
parent d1ef444d75
commit 382e40b651
12 changed files with 254 additions and 234 deletions

View File

@@ -90,6 +90,8 @@ async function storeActors(release, releaseEntry) {
const actors = await knex('actors').whereIn('name', release.actors);
const newActors = release.actors.filter(actorName => !actors.some(actor => actor.name === actorName));
console.log(release.actors, actors, newActors);
const { rows: insertedActors } = newActors.length
? await knex.raw(`${knex('actors').insert(newActors.map(actorName => ({
name: actorName,
@@ -118,18 +120,25 @@ async function storePhotos(release, releaseEntry) {
const mimetype = mime.getType(pathname);
const res = await bhttp.get(photoUrl);
const filepath = path.join(release.site.slug, releaseEntry.id.toString(), `${index + 1}.${mime.getExtension(mimetype)}`);
await fs.writeFile(path.join(config.photoPath, filepath), res.body);
return {
filepath,
mimetype,
};
if (res.statusCode === 200) {
const filepath = path.join(release.site.slug, releaseEntry.id.toString(), `${index + 1}.${mime.getExtension(mimetype)}`);
await fs.writeFile(path.join(config.photoPath, filepath), res.body);
return {
filepath,
mimetype,
};
}
console.warn(`Failed to store photo ${index + 1} for (${release.site.name}, ${releaseEntry.id}) "${release.title}": ${res.statusCode}`);
return null;
}, {
concurrency: 2,
});
await knex('media').insert(files.map(({ filepath, mimetype }, index) => ({
await knex('media').insert(files.filter(file => file).map(({ filepath, mimetype }, index) => ({
path: filepath,
mime: mimetype,
index,
@@ -144,19 +153,25 @@ async function storePoster(release, releaseEntry) {
const res = await bhttp.get(release.poster);
const { pathname } = new URL(release.poster);
const mimetype = res.headers['content-type'] || mime.getType(pathname) || 'image/jpeg';
if (res.statusCode === 200) {
const { pathname } = new URL(release.poster);
const mimetype = res.headers['content-type'] || mime.getType(pathname) || 'image/jpeg';
const filepath = path.join(release.site.slug, releaseEntry.id.toString(), `poster.${mime.getExtension(mimetype)}`);
await fs.writeFile(path.join(config.photoPath, filepath), res.body);
const filepath = path.join(release.site.slug, releaseEntry.id.toString(), `poster.${mime.getExtension(mimetype)}`);
await fs.writeFile(path.join(config.photoPath, filepath), res.body);
await knex('media').insert({
path: filepath,
mime: mimetype,
domain: 'releases',
target_id: releaseEntry.id,
role: 'poster',
});
await knex('media').insert({
path: filepath,
mime: mimetype,
domain: 'releases',
target_id: releaseEntry.id,
role: 'poster',
});
return;
}
console.warn(`Failed to store poster for (${release.site.name}, ${releaseEntry.id}) "${release.title}": ${res.statusCode}`);
}
async function storeTrailer(release, releaseEntry) {
@@ -183,6 +198,7 @@ async function storeReleases(releases = []) {
return Promise.map(releases, async (release) => {
const curatedRelease = {
site_id: release.site.id,
studio_id: release.studio ? release.studio.id : null,
shoot_id: release.shootId || null,
entry_id: release.entryId || null,
url: release.url,

View File

@@ -41,6 +41,13 @@ async function curateRelease(release) {
name: release.site_name,
url: release.site_url,
},
studio: release.studio_id
? {
id: release.studio_id,
name: release.studio_name,
url: release.studio_url,
}
: null,
network: {
id: release.network_id,
name: release.network_name,
@@ -58,8 +65,13 @@ async function fetchReleases(releaseId) {
const releases = await knex('releases')
.where(releaseId ? { 'releases.id': releaseId } : {})
.select('releases.*', 'sites.name as site_name', 'sites.url as site_url', 'sites.network_id', 'networks.name as network_name', 'networks.url as network_url')
.select(
'releases.*', 'sites.name as site_name', 'sites.url as site_url', 'sites.network_id',
'studios.name as studio_name', 'studios.url as studio_url',
'networks.name as network_name', 'networks.url as network_url',
)
.leftJoin('sites', 'releases.site_id', 'sites.id')
.leftJoin('studios', 'releases.studio_id', 'studios.id')
.leftJoin('networks', 'sites.network_id', 'networks.id')
.orderBy('date', 'desc')
.limit(100);

View File

@@ -4,7 +4,7 @@ const bhttp = require('bhttp');
const cheerio = require('cheerio');
const moment = require('moment');
const knex = require('../knex');
// const knex = require('../knex');
const { matchTags } = require('../tags');
/* eslint-disable newline-per-chained-call */
@@ -18,10 +18,14 @@ function scrapeLatest(html, site) {
const url = `${site.url}${sceneLinkElement.attr('href')}`;
const entryId = url.split('/').slice(-1)[0];
const date = moment.utc($(element).find('.card-footer .text-muted').text(), 'MMMM DD, YYYY').toDate();
const actors = $(element).find('.card-subtitle a').map((actorIndex, actorElement) => $(actorElement).text().trim()).toArray().filter(actor => actor);
const date = moment.utc($(element).find('small[datetime]').attr('datetime'), 'YYYY-MM-DD HH:mm:ss').toDate();
const actors = $(element).find('.card-subtitle a').map((actorIndex, actorElement) => $(actorElement).text().trim())
.toArray()
.filter(actor => actor);
const duration = Number($(element).find('.card-info div:nth-child(2) .card-text').text().slice(0, -4)) * 60;
const duration = parseInt($(element).find('.card-info div:nth-child(2) .card-text').text(), 10) * 60;
const poster = sceneLinkElement.find('img').attr('data-src');
return {
url,
@@ -30,6 +34,7 @@ function scrapeLatest(html, site) {
actors,
date,
duration,
poster,
rating: null,
site,
};
@@ -40,26 +45,39 @@ async function scrapeScene(html, url, site) {
const $ = cheerio.load(html, { normalizeWhitespace: true });
const entryId = url.split('/').slice(-1)[0];
const title = $('.video-title h1').text();
const description = $('.description-box .box-container').text();
const title = $('meta[itemprop="name"]').attr('content');
const description = $('.descr-box p').text(); // meta tags don't contain full description
const date = moment.utc($('.video-title .remain time').text(), 'MMMM DD, YYYY').toDate();
const actors = $('.pornstars-box .pornstar-card .card-title a').map((actorIndex, actorElement) => $(actorElement).text()).toArray();
const date = moment.utc($('meta[itemprop="uploadDate"]').attr('content'), 'YYYY-MM-DD').toDate();
const actors = $('.pornstar-card > a').map((actorIndex, actorElement) => $(actorElement).attr('title')).toArray();
const likes = Number($('.info-panel.likes .likes').text());
const duration = Number($('.info-panel.duration .duration').text().slice(0, -4)) * 60;
const rawTags = $('.tags-tab .tags a').map((tagIndex, tagElement) => $(tagElement).text()).toArray();
const poster = $('#video').attr('poster');
const photos = $('.photo-slider-guest .card a').map((photoIndex, photoElement) => $(photoElement).attr('href')).toArray();
const trailer540 = $('source[res="540"]').attr('src');
const trailer720 = $('source[res="720"]').attr('src');
/*
* broken as of nov 2019
const { origin } = new URL($('.pornstar-card meta[itemprop="url"]').first().attr('content'));
const rawTags = $('#tagsBox .tags a').map((tagIndex, tagElement) => $(tagElement).text()).toArray();
const [channelSite, tags] = await Promise.all([
// don't find site if original is already specific
site.isFallback ? knex('sites').where({ url: origin }).first() : site,
matchTags(rawTags),
]);
*/
const tags = await matchTags(rawTags);
return {
url: channelSite ? `${channelSite.url}${new URL(url).pathname}` : url,
// url: channelSite ? `${channelSite.url}${new URL(url).pathname}` : url,
url,
entryId,
title,
description,
@@ -67,10 +85,23 @@ async function scrapeScene(html, url, site) {
date,
duration,
tags,
poster,
photos,
trailer: trailer540
? {
src: trailer540,
quality: 540,
}
: {
// backup
src: trailer720,
quality: 720,
},
rating: {
likes,
},
site: channelSite || site,
// site: channelSite || site,
site,
};
}

View File

@@ -3,6 +3,7 @@
const bhttp = require('bhttp');
const cheerio = require('cheerio');
const moment = require('moment');
const knex = require('../knex');
const { matchTags } = require('../tags');
@@ -95,8 +96,17 @@ async function scrapeScene(html, url, site, useGallery) {
const trailer = data.clip.qualities.find(clip => clip.quality === 'vga' || clip.quality === 'hd');
const studioName = $('.watchpage-studioname').first().text().trim();
const studioSlug = studioName.replace(/\s+/g, '').toLowerCase();
const rawTags = $(tagsElement).find('a').map((tagIndex, tagElement) => $(tagElement).text()).toArray();
const tags = await matchTags(rawTags);
const [studio, tags] = await Promise.all([
knex('studios')
.where({ name: studioName })
.orWhere({ slug: studioSlug })
.first(),
matchTags(rawTags),
]);
return {
url,
@@ -115,6 +125,7 @@ async function scrapeScene(html, url, site, useGallery) {
},
tags,
site,
studio,
};
}

View File

@@ -3,9 +3,13 @@
const knex = require('./knex');
async function matchTags(rawTags) {
const tags = rawTags
.concat(rawTags.map(tag => tag.toLowerCase()))
.concat(rawTags.map(tag => tag.toUpperCase()));
const tagEntries = await knex('tags')
.pluck('aliases.id')
.whereIn('tags.name', rawTags.map(tag => tag.toLowerCase()))
.whereIn('tags.name', tags)
.where(function where() {
this
.whereNull('tags.alias_for')

View File

@@ -12,7 +12,7 @@ function initServer() {
const app = express();
const router = Router();
router.use(express.static(config.photoPath));
router.use('/media', express.static(config.photoPath));
router.use(express.static('public'));
router.use(bodyParser.json({ strict: false }));