Added upcoming support to Nubiles. Renamed q's formatDate to extractDate, added actual formatDate.
This commit is contained in:
parent
bbf06a3882
commit
1ff8d37d89
|
@ -126,4 +126,10 @@ export default {
|
|||
height: 18rem;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
@media(max-width: $breakpoint2) {
|
||||
.trailer-video {
|
||||
object-fit: contain;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -2862,6 +2862,9 @@ const sites = [
|
|||
name: 'Anilos',
|
||||
url: 'https://www.anilos.com',
|
||||
network: 'nubiles',
|
||||
parameters: {
|
||||
upcoming: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
slug: 'brattysis',
|
||||
|
@ -2869,6 +2872,9 @@ const sites = [
|
|||
url: 'https://www.brattysis.com',
|
||||
tags: ['family'],
|
||||
network: 'nubiles',
|
||||
parameters: {
|
||||
upcoming: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
slug: 'deeplush',
|
||||
|
@ -2888,18 +2894,27 @@ const sites = [
|
|||
url: 'https://www.nfbusty.com',
|
||||
tags: ['big-boobs'],
|
||||
network: 'nubiles',
|
||||
parameters: {
|
||||
upcoming: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
slug: 'nubilefilms',
|
||||
name: 'Nubile Films',
|
||||
url: 'https://www.nubilefilms.com',
|
||||
network: 'nubiles',
|
||||
parameters: {
|
||||
upcoming: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
slug: 'nubiles',
|
||||
name: 'Nubiles',
|
||||
url: 'https://www.nubiles.net',
|
||||
network: 'nubiles',
|
||||
parameters: {
|
||||
upcoming: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
slug: 'nubilescasting',
|
||||
|
@ -2914,12 +2929,18 @@ const sites = [
|
|||
url: 'https://www.momsteachsex.com',
|
||||
tags: ['family', 'milf'],
|
||||
network: 'nubiles',
|
||||
parameters: {
|
||||
upcoming: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
slug: 'petitehdporn',
|
||||
name: 'Petite HD Porn',
|
||||
url: 'https://www.petitehdporn.com',
|
||||
network: 'nubiles',
|
||||
parameters: {
|
||||
upcoming: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
slug: 'driverxxx',
|
||||
|
@ -2946,6 +2967,9 @@ const sites = [
|
|||
url: 'https://www.stepsiblingscaught.com',
|
||||
tags: ['family'],
|
||||
network: 'nubiles',
|
||||
parameters: {
|
||||
upcoming: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
slug: 'princesscum',
|
||||
|
@ -2984,6 +3008,9 @@ const sites = [
|
|||
url: 'https://www.myfamilypies.com',
|
||||
tags: ['family'],
|
||||
network: 'nubiles',
|
||||
parameters: {
|
||||
upcoming: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
slug: 'nubileset',
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
'use strict';
|
||||
|
||||
const { get, geta, ctxa, fd } = require('../utils/q');
|
||||
const { get, geta, ctxa, ed } = require('../utils/q');
|
||||
const slugify = require('../utils/slugify');
|
||||
|
||||
function scrapeAll(scenes, site) {
|
||||
|
@ -67,7 +67,7 @@ function scrapeProfile({ q, qa, qtx }) {
|
|||
if (bio.weight) profile.weight = Number(bio.weight.match(/\((\d+)kg\)/)[1]);
|
||||
if (bio.race) profile.ethnicity = bio.race;
|
||||
|
||||
if (bio.date_of_birth) profile.birthdate = fd(bio.date_of_birth, 'MMMM D, YYYY');
|
||||
if (bio.date_of_birth) profile.birthdate = ed(bio.date_of_birth, 'MMMM D, YYYY');
|
||||
if (bio.birthplace) profile.birthPlace = bio.birthplace;
|
||||
|
||||
if (bio.measurements) {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
const bhttp = require('bhttp');
|
||||
|
||||
const { fd, ex, exa, get } = require('../utils/q');
|
||||
const { ed, ex, exa, get } = require('../utils/q');
|
||||
const slugify = require('../utils/slugify');
|
||||
|
||||
/* eslint-disable newline-per-chained-call */
|
||||
|
@ -97,7 +97,7 @@ async function scrapeProfile(html, _url, actorName) {
|
|||
};
|
||||
|
||||
profile.description = q('.description-box', true);
|
||||
profile.birthdate = fd(bio.birthday, 'MMMM DD, YYYY');
|
||||
profile.birthdate = ed(bio.birthday, 'MMMM DD, YYYY');
|
||||
|
||||
if (bio.nationality) profile.nationality = bio.nationality;
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
'use strict';
|
||||
|
||||
const bhttp = require('bhttp');
|
||||
const { get, exa, fd } = require('../utils/q');
|
||||
const { get, exa, ed } = require('../utils/q');
|
||||
|
||||
function scrapeLatest(html, site) {
|
||||
const scenes = site.slug === 'paintoy'
|
||||
|
@ -26,7 +26,7 @@ function scrapeLatest(html, site) {
|
|||
} else {
|
||||
// title should contain date instead, not applicable in brief mode
|
||||
release.title = title.slice(title.indexOf(':') + 1).trim();
|
||||
release.date = fd(title.slice(0, title.indexOf(':')), 'MMM D, YYYY');
|
||||
release.date = ed(title.slice(0, title.indexOf(':')), 'MMM D, YYYY');
|
||||
}
|
||||
|
||||
release.actors = actors.map(actor => actor.trim());
|
||||
|
|
|
@ -32,12 +32,14 @@ function scrapeAll(scenes, site, origin) {
|
|||
|
||||
if (channelUrl) release.url = `${channelUrl}${pathname}`;
|
||||
else release.url = url;
|
||||
} else {
|
||||
} else if (!/\/join/.test(url)) {
|
||||
release.entryId = url.split('/')[3];
|
||||
|
||||
if (channelUrl) release.url = `${channelUrl}${url}`;
|
||||
else if (site?.url) release.url = `${site.url}${url}`;
|
||||
else if (origin) release.url = `${origin}${url}`;
|
||||
} else {
|
||||
release.entryId = q('a img', 'tube_tour_thumb_id');
|
||||
}
|
||||
|
||||
release.date = qd('.date', 'MMM D, YYYY');
|
||||
|
@ -117,6 +119,17 @@ async function fetchLatest(site, page = 1) {
|
|||
return qLatest && scrapeAll(qLatest, site);
|
||||
}
|
||||
|
||||
async function fetchUpcoming(site) {
|
||||
if (site.parameters?.upcoming) {
|
||||
const url = `${site.url}/video/upcoming`;
|
||||
const qUpcoming = await geta(url, '.content-grid-item');
|
||||
|
||||
return qUpcoming && scrapeAll(qUpcoming, site);
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
async function fetchScene(url, site) {
|
||||
const qScene = await get(url);
|
||||
|
||||
|
@ -144,6 +157,7 @@ async function fetchProfile(actorName, siteSlug) {
|
|||
|
||||
module.exports = {
|
||||
fetchLatest,
|
||||
fetchUpcoming,
|
||||
fetchScene,
|
||||
fetchProfile,
|
||||
};
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
/* eslint-disable no-unused-vars */
|
||||
const bhttp = require('bhttp');
|
||||
|
||||
const { get, fd } = require('../utils/q');
|
||||
const { get, ed } = require('../utils/q');
|
||||
const { fetchApiLatest, fetchApiUpcoming, fetchScene, fetchApiProfile } = require('./gamma');
|
||||
const slugify = require('../utils/slugify');
|
||||
|
||||
|
@ -15,7 +15,7 @@ function scrapeLatestNative(scenes, site) {
|
|||
release.url = `${site.url}${scene.url}`;
|
||||
|
||||
release.title = scene.name;
|
||||
release.date = fd(scene.release_date, 'YYYY-MM-DD');
|
||||
release.date = ed(scene.release_date, 'YYYY-MM-DD');
|
||||
release.duration = parseInt(scene.runtime, 10) * 60;
|
||||
|
||||
release.actors = scene.cast?.map(actor => ({
|
||||
|
@ -40,7 +40,7 @@ function scrapeSceneNative({ html, q, qa }, url, _site) {
|
|||
release.description = q('.indie-model-p', true);
|
||||
|
||||
const dateString = qa('h5').find(el => /Released/.test(el.textContent)).textContent;
|
||||
release.date = fd(dateString, 'MMM DD, YYYY', /\w+ \d{1,2}, \d{4}/);
|
||||
release.date = ed(dateString, 'MMM DD, YYYY', /\w+ \d{1,2}, \d{4}/);
|
||||
|
||||
const duration = qa('h5').find(el => /Runtime/.test(el.textContent)).textContent;
|
||||
const [hours, minutes] = duration.match(/\d+/g);
|
||||
|
@ -118,7 +118,7 @@ async function fetchSceneWrapper(url, site, release) {
|
|||
return {
|
||||
...scene,
|
||||
url: `${site.url}${sceneMatch.url}`,
|
||||
date: fd(sceneMatch.release_date, 'YYYY-MM-DD'),
|
||||
date: ed(sceneMatch.release_date, 'YYYY-MM-DD'),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,6 +8,23 @@ function trim(str) {
|
|||
return str.trim().replace(/\s+/g, ' ');
|
||||
}
|
||||
|
||||
function extractDate(dateString, format, match) {
|
||||
if (match) {
|
||||
const dateStamp = trim(dateString).match(match);
|
||||
|
||||
if (dateStamp) return moment.utc(dateStamp[0], format).toDate();
|
||||
return null;
|
||||
}
|
||||
|
||||
return moment.utc(trim(dateString), format).toDate();
|
||||
}
|
||||
|
||||
function formatDate(date, format, inputFormat) {
|
||||
if (inputFormat) return moment(date, inputFormat).format(format);
|
||||
|
||||
return moment(date).format(format);
|
||||
}
|
||||
|
||||
function prefixProtocol(url, protocol = 'https') {
|
||||
if (protocol && /^\/\//.test(url)) {
|
||||
return `${protocol}:${url}`;
|
||||
|
@ -61,23 +78,12 @@ function qmeta(context, selector, attrArg = 'content', applyTrim = true) {
|
|||
return q(context, `meta[${selector}]`, attrArg, applyTrim);
|
||||
}
|
||||
|
||||
function formatDate(dateString, format, match) {
|
||||
if (match) {
|
||||
const dateStamp = trim(dateString).match(match);
|
||||
|
||||
if (dateStamp) return moment.utc(dateStamp[0], format).toDate();
|
||||
return null;
|
||||
}
|
||||
|
||||
return moment.utc(trim(dateString), format).toDate();
|
||||
}
|
||||
|
||||
function qdate(context, selector, format, match, attr = 'textContent') {
|
||||
const dateString = q(context, selector, attr, true);
|
||||
|
||||
if (!dateString) return null;
|
||||
|
||||
return formatDate(dateString, format, match);
|
||||
return extractDate(dateString, format, match);
|
||||
}
|
||||
|
||||
function qimage(context, selector = 'img', attr = 'src', protocol = 'https') {
|
||||
|
@ -234,21 +240,24 @@ async function getAll(url, selector, headers) {
|
|||
}
|
||||
|
||||
module.exports = {
|
||||
formatDate,
|
||||
extractDate,
|
||||
extract,
|
||||
extractAll,
|
||||
init,
|
||||
initAll,
|
||||
formatDate,
|
||||
get,
|
||||
getAll,
|
||||
context: init,
|
||||
contextAll: initAll,
|
||||
fd: formatDate,
|
||||
ed: extractDate,
|
||||
ex: extract,
|
||||
exa: extractAll,
|
||||
fd: formatDate,
|
||||
ctx: init,
|
||||
ctxa: initAll,
|
||||
geta: getAll,
|
||||
edate: extractDate,
|
||||
fdate: formatDate,
|
||||
...funcs,
|
||||
};
|
||||
|
|
Loading…
Reference in New Issue