forked from DebaucheryLibrarian/traxxx
Improved update runner. Improved HTTP module API, added default user agent. Added PornCZ and Czechav logos.
This commit is contained in:
@@ -282,7 +282,7 @@ function scrapeMovie({ el, qu }, url, site) {
|
||||
movie.entryId = qu.q('.dvd_details_overview .rating_box').dataset.id;
|
||||
movie.title = qu.q('.title_bar span', true);
|
||||
movie.covers = qu.urls('#dvd-cover-flip > a');
|
||||
movie.channel = qu.q('.update_date a', true);
|
||||
movie.channel = slugify(qu.q('.update_date a', true), '');
|
||||
|
||||
// movie.releases = Array.from(document.querySelectorAll('.cell.dvd_info > a'), el => el.href);
|
||||
const sceneQus = ctxa(el, '.dvd_details');
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
'use strict';
|
||||
|
||||
/* eslint-disable newline-per-chained-call */
|
||||
const bhttp = require('bhttp');
|
||||
const { JSDOM } = require('jsdom');
|
||||
const cheerio = require('cheerio');
|
||||
const moment = require('moment');
|
||||
|
||||
const { get } = require('../utils/http');
|
||||
|
||||
const descriptionTags = {
|
||||
'anal cream pie': 'anal creampie',
|
||||
'ass to mouth': 'ass to mouth',
|
||||
@@ -85,26 +86,29 @@ async function scrapeLatestA(html, site) {
|
||||
}));
|
||||
}
|
||||
|
||||
async function scrapeLatestB(html, site) {
|
||||
async function scrapeLatestB(html) {
|
||||
const { document } = new JSDOM(html).window;
|
||||
const sceneElements = document.querySelectorAll('.content-border');
|
||||
|
||||
return Promise.all(Array.from(sceneElements, async (element) => {
|
||||
const $ = cheerio.load(element.innerHTML, { normalizeWhitespace: true });
|
||||
const release = {
|
||||
director: 'Mike Adriano',
|
||||
};
|
||||
|
||||
const titleElement = element.querySelector('.content-title-wrap a');
|
||||
const title = titleElement.title || titleElement.textContent.trim();
|
||||
const url = titleElement.href;
|
||||
const entryId = url.split('/').slice(-2)[0];
|
||||
release.title = titleElement.title || titleElement.textContent.trim();
|
||||
release.url = titleElement.href;
|
||||
release.entryId = release.url.split('/').slice(-2)[0];
|
||||
|
||||
const description = element.querySelector('.content-description').textContent.trim();
|
||||
const date = (moment(element.querySelector('.mobile-date').textContent, 'MM/DD/YYYY')
|
||||
release.description = element.querySelector('.content-description').textContent.trim();
|
||||
release.date = (moment(element.querySelector('.mobile-date').textContent, 'MM/DD/YYYY')
|
||||
|| moment(element.querySelector('.date').textContent, 'Do MMM YYYY')).toDate();
|
||||
const actors = Array.from(element.querySelectorAll('.content-models a'), actorElement => actorElement.textContent);
|
||||
release.actors = Array.from(element.querySelectorAll('.content-models a'), actorElement => actorElement.textContent);
|
||||
|
||||
const durationString = element.querySelector('.total-time').textContent.trim();
|
||||
// timestamp is somethines 00:00, sometimes 0:00:00
|
||||
const duration = durationString.split(':').length === 3
|
||||
release.duration = durationString.split(':').length === 3
|
||||
? moment.duration(durationString).asSeconds()
|
||||
: moment.duration(`00:${durationString}`).asSeconds();
|
||||
|
||||
@@ -114,65 +118,44 @@ async function scrapeLatestB(html, site) {
|
||||
.toArray()
|
||||
.map(photoUrl => photoUrl.slice(photoUrl.indexOf('http'), photoUrl.indexOf('.jpg') + 4));
|
||||
|
||||
const photos = [...primaryPhotos, ...secondaryPhotos];
|
||||
const tags = deriveTagsFromDescription(description);
|
||||
release.poster = poster;
|
||||
release.photos = [...primaryPhotos, ...secondaryPhotos];
|
||||
|
||||
return {
|
||||
url,
|
||||
entryId,
|
||||
title,
|
||||
description,
|
||||
actors,
|
||||
director: 'Mike Adriano',
|
||||
date,
|
||||
duration,
|
||||
tags,
|
||||
poster,
|
||||
photos,
|
||||
site,
|
||||
};
|
||||
release.tags = deriveTagsFromDescription(release.description);
|
||||
return release;
|
||||
}));
|
||||
}
|
||||
|
||||
async function scrapeSceneA(html, url, site) {
|
||||
async function scrapeSceneA(html, url) {
|
||||
const { document } = new JSDOM(html).window;
|
||||
const element = document.querySelector('.content-page-info');
|
||||
const release = {
|
||||
url,
|
||||
director: 'Mike Adriano',
|
||||
};
|
||||
|
||||
const entryId = url.split('/').slice(-2)[0];
|
||||
const title = element.querySelector('.title').textContent.trim();
|
||||
const description = element.querySelector('.desc').textContent.trim();
|
||||
const date = moment(element.querySelector('.post-date').textContent.trim(), 'Do MMM YYYY').toDate();
|
||||
release.entryId = url.split('/').slice(-2)[0];
|
||||
release.title = element.querySelector('.title').textContent.trim();
|
||||
release.description = element.querySelector('.desc').textContent.trim();
|
||||
release.date = moment(element.querySelector('.post-date').textContent.trim(), 'Do MMM YYYY').toDate();
|
||||
|
||||
const actors = Array.from(element.querySelectorAll('.models a'), actorElement => actorElement.textContent);
|
||||
release.actors = Array.from(element.querySelectorAll('.models a'), actorElement => actorElement.textContent);
|
||||
|
||||
const durationString = element.querySelector('.total-time').textContent.trim();
|
||||
// timestamp is sometimes 00:00, sometimes 0:00:00
|
||||
const duration = durationString.split(':').length === 3
|
||||
release.duration = durationString.split(':').length === 3
|
||||
? moment.duration(durationString).asSeconds()
|
||||
: moment.duration(`00:${durationString}`).asSeconds();
|
||||
|
||||
const { poster } = document.querySelector('.content-page-header video');
|
||||
const { src, type } = document.querySelector('.content-page-header source');
|
||||
|
||||
const tags = deriveTagsFromDescription(description);
|
||||
release.poster = poster;
|
||||
release.trailer = { src, type };
|
||||
|
||||
return {
|
||||
url,
|
||||
entryId,
|
||||
title,
|
||||
description,
|
||||
actors,
|
||||
director: 'Mike Adriano',
|
||||
date,
|
||||
duration,
|
||||
tags,
|
||||
poster,
|
||||
trailer: {
|
||||
src,
|
||||
type,
|
||||
},
|
||||
site,
|
||||
};
|
||||
release.tags = deriveTagsFromDescription(release.description);
|
||||
|
||||
return release;
|
||||
}
|
||||
|
||||
async function scrapeSceneB(html, url, site) {
|
||||
@@ -220,25 +203,34 @@ async function scrapeSceneB(html, url, site) {
|
||||
|
||||
async function fetchLatest(site, page = 1) {
|
||||
const { host } = new URL(site.url);
|
||||
const url = `https://tour.${host}/videos?page=${page}`;
|
||||
|
||||
const res = await bhttp.get(`https://tour.${host}/videos?page=${page}`);
|
||||
const res = await get(url);
|
||||
|
||||
if (host === 'trueanal.com' || host === 'swallowed.com') {
|
||||
return scrapeLatestA(res.body.toString(), site);
|
||||
if (res.code === 200) {
|
||||
if (host === 'trueanal.com' || host === 'swallowed.com') {
|
||||
return scrapeLatestA(res.html, site);
|
||||
}
|
||||
|
||||
return scrapeLatestB(res.html, site);
|
||||
}
|
||||
|
||||
return scrapeLatestB(res.body.toString(), site);
|
||||
return res.code;
|
||||
}
|
||||
|
||||
async function fetchScene(url, site) {
|
||||
const { host } = new URL(site.url);
|
||||
const res = await bhttp.get(url);
|
||||
const res = await get(url);
|
||||
|
||||
if (host === 'trueanal.com' || host === 'swallowed.com') {
|
||||
return scrapeSceneA(res.body.toString(), url, site);
|
||||
if (res.code === 200) {
|
||||
if (host === 'trueanal.com' || host === 'swallowed.com') {
|
||||
return scrapeSceneA(res.body.toString(), url, site);
|
||||
}
|
||||
|
||||
return scrapeSceneB(res.body.toString(), url, site);
|
||||
}
|
||||
|
||||
return scrapeSceneB(res.body.toString(), url, site);
|
||||
return res.code;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
||||
Reference in New Issue
Block a user