forked from DebaucheryLibrarian/traxxx
Added upcoming support to Nubiles. Renamed q's formatDate to extractDate, added actual formatDate.
This commit is contained in:
parent
bbf06a3882
commit
1ff8d37d89
|
@ -126,4 +126,10 @@ export default {
|
||||||
height: 18rem;
|
height: 18rem;
|
||||||
vertical-align: middle;
|
vertical-align: middle;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@media(max-width: $breakpoint2) {
|
||||||
|
.trailer-video {
|
||||||
|
object-fit: contain;
|
||||||
|
}
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
|
|
|
@ -2862,6 +2862,9 @@ const sites = [
|
||||||
name: 'Anilos',
|
name: 'Anilos',
|
||||||
url: 'https://www.anilos.com',
|
url: 'https://www.anilos.com',
|
||||||
network: 'nubiles',
|
network: 'nubiles',
|
||||||
|
parameters: {
|
||||||
|
upcoming: true,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
slug: 'brattysis',
|
slug: 'brattysis',
|
||||||
|
@ -2869,6 +2872,9 @@ const sites = [
|
||||||
url: 'https://www.brattysis.com',
|
url: 'https://www.brattysis.com',
|
||||||
tags: ['family'],
|
tags: ['family'],
|
||||||
network: 'nubiles',
|
network: 'nubiles',
|
||||||
|
parameters: {
|
||||||
|
upcoming: true,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
slug: 'deeplush',
|
slug: 'deeplush',
|
||||||
|
@ -2888,18 +2894,27 @@ const sites = [
|
||||||
url: 'https://www.nfbusty.com',
|
url: 'https://www.nfbusty.com',
|
||||||
tags: ['big-boobs'],
|
tags: ['big-boobs'],
|
||||||
network: 'nubiles',
|
network: 'nubiles',
|
||||||
|
parameters: {
|
||||||
|
upcoming: true,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
slug: 'nubilefilms',
|
slug: 'nubilefilms',
|
||||||
name: 'Nubile Films',
|
name: 'Nubile Films',
|
||||||
url: 'https://www.nubilefilms.com',
|
url: 'https://www.nubilefilms.com',
|
||||||
network: 'nubiles',
|
network: 'nubiles',
|
||||||
|
parameters: {
|
||||||
|
upcoming: true,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
slug: 'nubiles',
|
slug: 'nubiles',
|
||||||
name: 'Nubiles',
|
name: 'Nubiles',
|
||||||
url: 'https://www.nubiles.net',
|
url: 'https://www.nubiles.net',
|
||||||
network: 'nubiles',
|
network: 'nubiles',
|
||||||
|
parameters: {
|
||||||
|
upcoming: true,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
slug: 'nubilescasting',
|
slug: 'nubilescasting',
|
||||||
|
@ -2914,12 +2929,18 @@ const sites = [
|
||||||
url: 'https://www.momsteachsex.com',
|
url: 'https://www.momsteachsex.com',
|
||||||
tags: ['family', 'milf'],
|
tags: ['family', 'milf'],
|
||||||
network: 'nubiles',
|
network: 'nubiles',
|
||||||
|
parameters: {
|
||||||
|
upcoming: true,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
slug: 'petitehdporn',
|
slug: 'petitehdporn',
|
||||||
name: 'Petite HD Porn',
|
name: 'Petite HD Porn',
|
||||||
url: 'https://www.petitehdporn.com',
|
url: 'https://www.petitehdporn.com',
|
||||||
network: 'nubiles',
|
network: 'nubiles',
|
||||||
|
parameters: {
|
||||||
|
upcoming: true,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
slug: 'driverxxx',
|
slug: 'driverxxx',
|
||||||
|
@ -2946,6 +2967,9 @@ const sites = [
|
||||||
url: 'https://www.stepsiblingscaught.com',
|
url: 'https://www.stepsiblingscaught.com',
|
||||||
tags: ['family'],
|
tags: ['family'],
|
||||||
network: 'nubiles',
|
network: 'nubiles',
|
||||||
|
parameters: {
|
||||||
|
upcoming: true,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
slug: 'princesscum',
|
slug: 'princesscum',
|
||||||
|
@ -2984,6 +3008,9 @@ const sites = [
|
||||||
url: 'https://www.myfamilypies.com',
|
url: 'https://www.myfamilypies.com',
|
||||||
tags: ['family'],
|
tags: ['family'],
|
||||||
network: 'nubiles',
|
network: 'nubiles',
|
||||||
|
parameters: {
|
||||||
|
upcoming: true,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
slug: 'nubileset',
|
slug: 'nubileset',
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const { get, geta, ctxa, fd } = require('../utils/q');
|
const { get, geta, ctxa, ed } = require('../utils/q');
|
||||||
const slugify = require('../utils/slugify');
|
const slugify = require('../utils/slugify');
|
||||||
|
|
||||||
function scrapeAll(scenes, site) {
|
function scrapeAll(scenes, site) {
|
||||||
|
@ -67,7 +67,7 @@ function scrapeProfile({ q, qa, qtx }) {
|
||||||
if (bio.weight) profile.weight = Number(bio.weight.match(/\((\d+)kg\)/)[1]);
|
if (bio.weight) profile.weight = Number(bio.weight.match(/\((\d+)kg\)/)[1]);
|
||||||
if (bio.race) profile.ethnicity = bio.race;
|
if (bio.race) profile.ethnicity = bio.race;
|
||||||
|
|
||||||
if (bio.date_of_birth) profile.birthdate = fd(bio.date_of_birth, 'MMMM D, YYYY');
|
if (bio.date_of_birth) profile.birthdate = ed(bio.date_of_birth, 'MMMM D, YYYY');
|
||||||
if (bio.birthplace) profile.birthPlace = bio.birthplace;
|
if (bio.birthplace) profile.birthPlace = bio.birthplace;
|
||||||
|
|
||||||
if (bio.measurements) {
|
if (bio.measurements) {
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
const bhttp = require('bhttp');
|
const bhttp = require('bhttp');
|
||||||
|
|
||||||
const { fd, ex, exa, get } = require('../utils/q');
|
const { ed, ex, exa, get } = require('../utils/q');
|
||||||
const slugify = require('../utils/slugify');
|
const slugify = require('../utils/slugify');
|
||||||
|
|
||||||
/* eslint-disable newline-per-chained-call */
|
/* eslint-disable newline-per-chained-call */
|
||||||
|
@ -97,7 +97,7 @@ async function scrapeProfile(html, _url, actorName) {
|
||||||
};
|
};
|
||||||
|
|
||||||
profile.description = q('.description-box', true);
|
profile.description = q('.description-box', true);
|
||||||
profile.birthdate = fd(bio.birthday, 'MMMM DD, YYYY');
|
profile.birthdate = ed(bio.birthday, 'MMMM DD, YYYY');
|
||||||
|
|
||||||
if (bio.nationality) profile.nationality = bio.nationality;
|
if (bio.nationality) profile.nationality = bio.nationality;
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const bhttp = require('bhttp');
|
const bhttp = require('bhttp');
|
||||||
const { get, exa, fd } = require('../utils/q');
|
const { get, exa, ed } = require('../utils/q');
|
||||||
|
|
||||||
function scrapeLatest(html, site) {
|
function scrapeLatest(html, site) {
|
||||||
const scenes = site.slug === 'paintoy'
|
const scenes = site.slug === 'paintoy'
|
||||||
|
@ -26,7 +26,7 @@ function scrapeLatest(html, site) {
|
||||||
} else {
|
} else {
|
||||||
// title should contain date instead, not applicable in brief mode
|
// title should contain date instead, not applicable in brief mode
|
||||||
release.title = title.slice(title.indexOf(':') + 1).trim();
|
release.title = title.slice(title.indexOf(':') + 1).trim();
|
||||||
release.date = fd(title.slice(0, title.indexOf(':')), 'MMM D, YYYY');
|
release.date = ed(title.slice(0, title.indexOf(':')), 'MMM D, YYYY');
|
||||||
}
|
}
|
||||||
|
|
||||||
release.actors = actors.map(actor => actor.trim());
|
release.actors = actors.map(actor => actor.trim());
|
||||||
|
|
|
@ -32,12 +32,14 @@ function scrapeAll(scenes, site, origin) {
|
||||||
|
|
||||||
if (channelUrl) release.url = `${channelUrl}${pathname}`;
|
if (channelUrl) release.url = `${channelUrl}${pathname}`;
|
||||||
else release.url = url;
|
else release.url = url;
|
||||||
} else {
|
} else if (!/\/join/.test(url)) {
|
||||||
release.entryId = url.split('/')[3];
|
release.entryId = url.split('/')[3];
|
||||||
|
|
||||||
if (channelUrl) release.url = `${channelUrl}${url}`;
|
if (channelUrl) release.url = `${channelUrl}${url}`;
|
||||||
else if (site?.url) release.url = `${site.url}${url}`;
|
else if (site?.url) release.url = `${site.url}${url}`;
|
||||||
else if (origin) release.url = `${origin}${url}`;
|
else if (origin) release.url = `${origin}${url}`;
|
||||||
|
} else {
|
||||||
|
release.entryId = q('a img', 'tube_tour_thumb_id');
|
||||||
}
|
}
|
||||||
|
|
||||||
release.date = qd('.date', 'MMM D, YYYY');
|
release.date = qd('.date', 'MMM D, YYYY');
|
||||||
|
@ -117,6 +119,17 @@ async function fetchLatest(site, page = 1) {
|
||||||
return qLatest && scrapeAll(qLatest, site);
|
return qLatest && scrapeAll(qLatest, site);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function fetchUpcoming(site) {
|
||||||
|
if (site.parameters?.upcoming) {
|
||||||
|
const url = `${site.url}/video/upcoming`;
|
||||||
|
const qUpcoming = await geta(url, '.content-grid-item');
|
||||||
|
|
||||||
|
return qUpcoming && scrapeAll(qUpcoming, site);
|
||||||
|
}
|
||||||
|
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
async function fetchScene(url, site) {
|
async function fetchScene(url, site) {
|
||||||
const qScene = await get(url);
|
const qScene = await get(url);
|
||||||
|
|
||||||
|
@ -144,6 +157,7 @@ async function fetchProfile(actorName, siteSlug) {
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
fetchLatest,
|
fetchLatest,
|
||||||
|
fetchUpcoming,
|
||||||
fetchScene,
|
fetchScene,
|
||||||
fetchProfile,
|
fetchProfile,
|
||||||
};
|
};
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
/* eslint-disable no-unused-vars */
|
/* eslint-disable no-unused-vars */
|
||||||
const bhttp = require('bhttp');
|
const bhttp = require('bhttp');
|
||||||
|
|
||||||
const { get, fd } = require('../utils/q');
|
const { get, ed } = require('../utils/q');
|
||||||
const { fetchApiLatest, fetchApiUpcoming, fetchScene, fetchApiProfile } = require('./gamma');
|
const { fetchApiLatest, fetchApiUpcoming, fetchScene, fetchApiProfile } = require('./gamma');
|
||||||
const slugify = require('../utils/slugify');
|
const slugify = require('../utils/slugify');
|
||||||
|
|
||||||
|
@ -15,7 +15,7 @@ function scrapeLatestNative(scenes, site) {
|
||||||
release.url = `${site.url}${scene.url}`;
|
release.url = `${site.url}${scene.url}`;
|
||||||
|
|
||||||
release.title = scene.name;
|
release.title = scene.name;
|
||||||
release.date = fd(scene.release_date, 'YYYY-MM-DD');
|
release.date = ed(scene.release_date, 'YYYY-MM-DD');
|
||||||
release.duration = parseInt(scene.runtime, 10) * 60;
|
release.duration = parseInt(scene.runtime, 10) * 60;
|
||||||
|
|
||||||
release.actors = scene.cast?.map(actor => ({
|
release.actors = scene.cast?.map(actor => ({
|
||||||
|
@ -40,7 +40,7 @@ function scrapeSceneNative({ html, q, qa }, url, _site) {
|
||||||
release.description = q('.indie-model-p', true);
|
release.description = q('.indie-model-p', true);
|
||||||
|
|
||||||
const dateString = qa('h5').find(el => /Released/.test(el.textContent)).textContent;
|
const dateString = qa('h5').find(el => /Released/.test(el.textContent)).textContent;
|
||||||
release.date = fd(dateString, 'MMM DD, YYYY', /\w+ \d{1,2}, \d{4}/);
|
release.date = ed(dateString, 'MMM DD, YYYY', /\w+ \d{1,2}, \d{4}/);
|
||||||
|
|
||||||
const duration = qa('h5').find(el => /Runtime/.test(el.textContent)).textContent;
|
const duration = qa('h5').find(el => /Runtime/.test(el.textContent)).textContent;
|
||||||
const [hours, minutes] = duration.match(/\d+/g);
|
const [hours, minutes] = duration.match(/\d+/g);
|
||||||
|
@ -118,7 +118,7 @@ async function fetchSceneWrapper(url, site, release) {
|
||||||
return {
|
return {
|
||||||
...scene,
|
...scene,
|
||||||
url: `${site.url}${sceneMatch.url}`,
|
url: `${site.url}${sceneMatch.url}`,
|
||||||
date: fd(sceneMatch.release_date, 'YYYY-MM-DD'),
|
date: ed(sceneMatch.release_date, 'YYYY-MM-DD'),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,6 +8,23 @@ function trim(str) {
|
||||||
return str.trim().replace(/\s+/g, ' ');
|
return str.trim().replace(/\s+/g, ' ');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function extractDate(dateString, format, match) {
|
||||||
|
if (match) {
|
||||||
|
const dateStamp = trim(dateString).match(match);
|
||||||
|
|
||||||
|
if (dateStamp) return moment.utc(dateStamp[0], format).toDate();
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return moment.utc(trim(dateString), format).toDate();
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatDate(date, format, inputFormat) {
|
||||||
|
if (inputFormat) return moment(date, inputFormat).format(format);
|
||||||
|
|
||||||
|
return moment(date).format(format);
|
||||||
|
}
|
||||||
|
|
||||||
function prefixProtocol(url, protocol = 'https') {
|
function prefixProtocol(url, protocol = 'https') {
|
||||||
if (protocol && /^\/\//.test(url)) {
|
if (protocol && /^\/\//.test(url)) {
|
||||||
return `${protocol}:${url}`;
|
return `${protocol}:${url}`;
|
||||||
|
@ -61,23 +78,12 @@ function qmeta(context, selector, attrArg = 'content', applyTrim = true) {
|
||||||
return q(context, `meta[${selector}]`, attrArg, applyTrim);
|
return q(context, `meta[${selector}]`, attrArg, applyTrim);
|
||||||
}
|
}
|
||||||
|
|
||||||
function formatDate(dateString, format, match) {
|
|
||||||
if (match) {
|
|
||||||
const dateStamp = trim(dateString).match(match);
|
|
||||||
|
|
||||||
if (dateStamp) return moment.utc(dateStamp[0], format).toDate();
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return moment.utc(trim(dateString), format).toDate();
|
|
||||||
}
|
|
||||||
|
|
||||||
function qdate(context, selector, format, match, attr = 'textContent') {
|
function qdate(context, selector, format, match, attr = 'textContent') {
|
||||||
const dateString = q(context, selector, attr, true);
|
const dateString = q(context, selector, attr, true);
|
||||||
|
|
||||||
if (!dateString) return null;
|
if (!dateString) return null;
|
||||||
|
|
||||||
return formatDate(dateString, format, match);
|
return extractDate(dateString, format, match);
|
||||||
}
|
}
|
||||||
|
|
||||||
function qimage(context, selector = 'img', attr = 'src', protocol = 'https') {
|
function qimage(context, selector = 'img', attr = 'src', protocol = 'https') {
|
||||||
|
@ -234,21 +240,24 @@ async function getAll(url, selector, headers) {
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
formatDate,
|
extractDate,
|
||||||
extract,
|
extract,
|
||||||
extractAll,
|
extractAll,
|
||||||
init,
|
init,
|
||||||
initAll,
|
initAll,
|
||||||
|
formatDate,
|
||||||
get,
|
get,
|
||||||
getAll,
|
getAll,
|
||||||
context: init,
|
context: init,
|
||||||
contextAll: initAll,
|
contextAll: initAll,
|
||||||
fd: formatDate,
|
ed: extractDate,
|
||||||
ex: extract,
|
ex: extract,
|
||||||
exa: extractAll,
|
exa: extractAll,
|
||||||
|
fd: formatDate,
|
||||||
ctx: init,
|
ctx: init,
|
||||||
ctxa: initAll,
|
ctxa: initAll,
|
||||||
geta: getAll,
|
geta: getAll,
|
||||||
|
edate: extractDate,
|
||||||
fdate: formatDate,
|
fdate: formatDate,
|
||||||
...funcs,
|
...funcs,
|
||||||
};
|
};
|
||||||
|
|
Loading…
Reference in New Issue