Added Insex. Renamed q's stand-alone date function. Separated q's trim function. Release tile uses cover if available, and poster is not available.

This commit is contained in:
2020-02-12 04:39:57 +01:00
parent 2f894edda5
commit b8074205ef
21 changed files with 212 additions and 23 deletions

View File

@@ -2,7 +2,7 @@
const bhttp = require('bhttp');
const { d, ex, exa, get } = require('../utils/q');
const { fd, ex, exa, get } = require('../utils/q');
const slugify = require('../utils/slugify');
/* eslint-disable newline-per-chained-call */
@@ -97,7 +97,7 @@ async function scrapeProfile(html, _url, actorName) {
};
profile.description = q('.description-box', true);
profile.birthdate = d(bio.birthday, 'MMMM DD, YYYY');
profile.birthdate = fd(bio.birthday, 'MMMM DD, YYYY');
if (bio.nationality) profile.nationality = bio.nationality;

100
src/scrapers/insex.js Normal file
View File

@@ -0,0 +1,100 @@
'use strict';
const bhttp = require('bhttp');
const { get, exa, fd } = require('../utils/q');
function scrapeLatest(html, site) {
const scenes = exa(html, 'body > table');
return scenes.map(({ q, qd, qi, qu, ql }) => {
// if (q('.articleTitleText')) return scrapeFirstLatest(ctx(el), site);
const release = {};
const titleEl = q('.galleryTitleText, .articleTitleText');
const [title, ...actors] = titleEl.textContent.split('|');
const date = qd('.articlePostDateText', 'MMM D, YYYY');
const url = qu(titleEl, 'a');
[release.entryId] = url.split('/').slice(-2);
release.url = `${site.url}${url}`;
if (date) {
release.title = title.trim();
release.date = date;
} else {
// title should contain date instead
release.title = title.slice(title.indexOf(':') + 1).trim();
release.date = fd(title.slice(0, title.indexOf(':')), 'MMM D, YYYY');
}
release.actors = actors.map(actor => actor.trim());
const description = q('.articleCopyText .articleCopyText', true);
if (description) release.description = description;
const duration = ql('.articleCopyText a:nth-child(2)');
if (duration) release.duration = duration;
const poster = qi('a img');
release.poster = [
poster.replace('_thumbnail', ''),
poster,
];
return release;
});
}
function scrapeScene({ q, qd, ql, qu, qis, qp, qt }, site) {
const release = {};
const titleEl = q('.articleTitleText');
const [title, ...actors] = titleEl.textContent.split('|');
const url = qu(titleEl, 'a');
[release.entryId] = url.split('/').slice(-2);
release.url = `${site.url}${url}`;
release.title = title.trim();
release.description = q('.articleCopyText', true);
release.actors = actors.map(actor => actor.trim());
release.date = qd('.articlePostDateText', 'MMMM D, YYYY');
release.duration = ql('.articlePostDateText a:nth-child(2)');
const [cover, ...photos] = qis('img[src*="images"]');
release.covers = [cover];
release.photos = photos;
release.poster = qp();
const trailer = qt();
release.trailer = { src: trailer };
return release;
}
async function fetchLatest(site, page = 1) {
const url = `${site.url}/scripts/switch_tour.php?page=${page}`;
const res = await bhttp.get(url, {
type: 'gallery',
page,
});
if (res.statusCode === 200) {
return scrapeLatest(res.body.html, site);
}
return null;
}
async function fetchScene(url, site) {
const qScene = await get(url);
return qScene && scrapeScene(qScene, site);
}
module.exports = {
fetchLatest,
fetchScene,
};

View File

@@ -19,6 +19,7 @@ const freeones = require('./freeones');
const freeonesLegacy = require('./freeones_legacy');
const girlsway = require('./girlsway');
const iconmale = require('./iconmale');
const insex = require('./insex');
const jayrock = require('./jayrock');
const julesjordan = require('./julesjordan');
const kellymadison = require('./kellymadison');
@@ -73,6 +74,7 @@ module.exports = {
fakehub,
fantasymassage,
girlsway,
insex,
jayrock,
julesjordan,
kellymadison,

View File

@@ -3,7 +3,7 @@
/* eslint-disable no-unused-vars */
const bhttp = require('bhttp');
const { get, date } = require('../utils/q');
const { get, fd } = require('../utils/q');
const { fetchApiLatest, fetchApiUpcoming, fetchScene, fetchApiProfile } = require('./gamma');
const slugify = require('../utils/slugify');
@@ -15,7 +15,7 @@ function scrapeLatestNative(scenes, site) {
release.url = `${site.url}${scene.url}`;
release.title = scene.name;
release.date = date(scene.release_date, 'YYYY-MM-DD');
release.date = fd(scene.release_date, 'YYYY-MM-DD');
release.duration = parseInt(scene.runtime, 10) * 60;
release.actors = scene.cast?.map(actor => ({
@@ -40,7 +40,7 @@ function scrapeSceneNative({ html, q, qa }, url, _site) {
release.description = q('.indie-model-p', true);
const dateString = qa('h5').find(el => /Released/.test(el.textContent)).textContent;
release.date = date(dateString, 'MMM DD, YYYY', /\w+ \d{1,2}, \d{4}/);
release.date = fd(dateString, 'MMM DD, YYYY', /\w+ \d{1,2}, \d{4}/);
const duration = qa('h5').find(el => /Runtime/.test(el.textContent)).textContent;
const [hours, minutes] = duration.match(/\d+/g);
@@ -118,7 +118,7 @@ async function fetchSceneWrapper(url, site, release) {
return {
...scene,
url: `${site.url}${sceneMatch.url}`,
date: date(sceneMatch.release_date, 'YYYY-MM-DD'),
date: fd(sceneMatch.release_date, 'YYYY-MM-DD'),
};
}
}