Added teaser support. Added Score network with scraper for Scoreland. Improved q. Added assets.

This commit is contained in:
ThePendulum 2020-02-02 05:14:58 +01:00
parent 14e5695b6e
commit a97c6defca
52 changed files with 4291 additions and 3435 deletions

View File

@ -120,9 +120,7 @@ async function fetchNetwork() {
}
this.sites = this.network.sites
.filter(site => !site.independent)
// .concat(this.studios)
.sort(({ name: nameA }, { name: nameB }) => nameA.localeCompare(nameB));
.filter(site => !site.independent);
this.releases = this.network.releases;
}

View File

@ -27,6 +27,22 @@
class="item trailer-video"
controls
>Sorry, the tailer cannot be played in your browser</video>
<video
v-else-if="release.teaser && /^video\//.test(release.teaser.mime)"
:src="`/media/${release.teaser.path}`"
:poster="`/media/${(release.poster && release.poster.thumbnail)}`"
:alt="release.title"
class="item trailer-video"
controls
>Sorry, the tailer cannot be played in your browser</video>
<img
v-else-if="release.teaser && /^image\//.test(release.teaser.mime)"
:src="`/media/${release.teaser.path}`"
:alt="release.title"
class="item trailer-video"
>
</div>
<a

View File

@ -28,6 +28,7 @@ function curateRelease(release) {
if (release.photos) curatedRelease.photos = release.photos.map(({ media }) => media);
if (release.trailer) curatedRelease.trailer = release.trailer.media;
if (release.teaser) curatedRelease.teaser = release.teaser.media;
if (release.actors) curatedRelease.actors = release.actors.map(({ actor }) => curateActor(actor, curatedRelease));
return curatedRelease;

View File

@ -92,6 +92,18 @@ const releaseTrailerFragment = `
index
path
thumbnail
mime
}
}
`;
const releaseTeaserFragment = `
teaser: releasesTeaserByReleaseId {
media {
index
path
thumbnail
mime
}
}
`;
@ -153,6 +165,7 @@ const releaseFragment = `
${releasePosterFragment}
${releasePhotosFragment}
${releaseTrailerFragment}
${releaseTeaserFragment}
${siteFragment}
studio {
id

View File

@ -18,12 +18,15 @@ function initNetworksActions(store, _router) {
name
slug
url
sites {
sites(
orderBy: PRIORITY_DESC
) {
id
name
slug
url
independent
priority
network {
id
name

View File

@ -40,6 +40,7 @@ module.exports = {
'kellymadison',
'bangbros',
'ddfnetwork',
'score',
'boobpedia',
'pornhub',
'freeones',

View File

@ -407,6 +407,19 @@ exports.up = knex => Promise.resolve()
table.unique('release_id');
}))
.then(() => knex.schema.createTable('releases_teasers', (table) => {
table.integer('release_id', 16)
.notNullable()
.references('id')
.inTable('releases');
table.integer('media_id', 16)
.notNullable()
.references('id')
.inTable('media');
table.unique('release_id');
}))
.then(() => knex.schema.createTable('releases_photos', (table) => {
table.integer('release_id', 16)
.notNullable()
@ -475,6 +488,7 @@ exports.down = knex => knex.raw(`
DROP TABLE IF EXISTS releases_photos CASCADE;
DROP TABLE IF EXISTS releases_covers CASCADE;
DROP TABLE IF EXISTS releases_trailers CASCADE;
DROP TABLE IF EXISTS releases_teasers CASCADE;
DROP TABLE IF EXISTS releases_tags CASCADE;
DROP TABLE IF EXISTS actors_avatars CASCADE;
DROP TABLE IF EXISTS actors_photos CASCADE;

View File

@ -0,0 +1,10 @@
exports.up = async knex => Promise.resolve()
.then(() => knex.schema.table('sites', (table) => {
table.integer('priority', 3)
.defaultTo(0);
}));
exports.down = async knex => Promise.resolve()
.then(() => knex.schema.table('sites', (table) => {
table.dropColumn('priority');
}));

View File

@ -20,7 +20,7 @@
"rollback": "knex-migrate down",
"seed-make": "knex seed:make",
"seed": "knex seed:run",
"flush": "cli-confirm \"This completely purges the database, are you sure?\" && knex-migrate down && knex-migrate up && knex seed:run"
"flush": "cli-confirm \"This completely purges the database, are you sure?\" && knex-migrate down --to 0 && knex-migrate up && knex seed:run"
},
"repository": {
"type": "git",

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 91 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 54 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 32 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 23 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 31 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 108 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 33 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 46 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.2 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 97 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 98 KiB

After

Width:  |  Height:  |  Size: 5.1 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 19 KiB

After

Width:  |  Height:  |  Size: 115 KiB

View File

@ -173,6 +173,12 @@ const networks = [
url: 'https://www.realitykings.com',
description: 'Home of HD reality porn featuring the nicest tits and ass online! The hottest curvy girls in real amateur sex stories are only on REALITYkings.com',
},
{
slug: 'score',
name: 'SCORE',
url: 'https://www.scorepass.com',
description: '',
},
{
slug: 'teamskeet',
name: 'Team Skeet',

File diff suppressed because it is too large Load Diff

View File

@ -4,9 +4,9 @@ const tagPosters = Object.entries({
'anal-creampie': [0, 'Gina Valentina and Jane Wilde in "A Very Special Anniversary" for Tushy'],
'ass-to-mouth': ['poster', 'Alysa Gap and Logan in "Anal Buffet 4" for Evil Angel'],
'da-tp': [0, 'Natasha Teen in LegalPorno SZ2164'],
'double-anal': [2, 'Lana Rhoades in "Gangbang Me 3" for HardX'],
'double-anal': [5, 'Riley Reid in "The Gangbang of Riley Reid" for Jules Jordan'],
'double-penetration': ['poster', 'Mia Malkova in "DP!" for HardX'],
'double-vaginal': [0, 'Aaliyah Hadid in "Squirting From Double Penetration With Anal" for Bang Bros'],
'double-vaginal': ['poster', 'Riley Reid in "Pizza That Ass" for Reid My Lips'],
'dv-tp': ['poster', 'Juelz Ventura in "Gangbanged 5" for Elegant Angel'],
'oral-creampie': [1, 'Keisha Grey in Brazzers House'],
'triple-anal': ['poster', 'Kristy Black in SZ1986 for LegalPorno'],
@ -50,11 +50,12 @@ const tagPhotos = [
['da-tp', 2, 'Angel Smalls in GIO408 for LegalPorno'],
['da-tp', 3, 'Evelina Darling in GIO294'],
['da-tp', 4, 'Ninel Mojado aka Mira Cuckold in GIO063 for LegalPorno'],
['double-anal', 2, 'Lana Rhoades in "Gangbang Me 3" for HardX'],
['double-anal', 'poster', 'Haley Reed in "Young Hot Ass" for Evil Angel'],
['double-anal', 0, 'Nicole Black doing double anal during a gangbang in GIO971 for LegalPorno'],
['double-anal', 1, 'Ria Sunn in SZ1801 for LegalPorno'],
['double-penetration', 0],
['double-vaginal', 'poster', 'Riley Reid in "Pizza That Ass" for Reid My Lips'],
['double-vaginal', 0, 'Aaliyah Hadid in "Squirting From Double Penetration With Anal" for Bang Bros'],
['dv-tp', 1, 'Adriana Chechik in "Adriana\'s Triple Anal Penetration!"'],
['dv-tp', 0, 'Luna Rival in LegalPorno SZ1490'],
['facefucking', '0', 'Brea for Young Throats'],

View File

@ -346,7 +346,7 @@ async function scrapeActors(actorNames) {
const profileScrapers = [].concat(source).map(slug => ({ scraperSlug: slug, scraper: scrapers.actors[slug] }));
try {
return profileScrapers.reduce(async (outcome, { scraper, scraperSlug }) => outcome.catch(async () => {
return await profileScrapers.reduce(async (outcome, { scraper, scraperSlug }) => outcome.catch(async () => {
logger.verbose(`Searching '${actorName}' on ${scraperSlug}`);
const profile = await scraper.fetchProfile(actorEntry ? actorEntry.name : actorName, scraperSlug);
@ -365,7 +365,7 @@ async function scrapeActors(actorNames) {
throw new Error(`Profile for ${actorName} not available on ${scraperSlug}`);
}), Promise.reject(new Error()));
} catch (error) {
logger.error(error);
logger.warn(`Error in scraper ${source}: ${error.message}`);
}
return null;
@ -418,6 +418,7 @@ async function scrapeActors(actorNames) {
return profile;
} catch (error) {
console.log(error);
logger.warn(`${actorName}: ${error}`);
return null;

View File

@ -188,7 +188,7 @@ async function storePhotos(photos, {
targetId,
subpath,
primaryRole, // role to assign to first photo if not already in database, used mainly for avatars
entropy = 2.5, // filter out fallback avatars and other generic clipart
entropyFilter = 2.5, // filter out fallback avatars and other generic clipart
}, label) {
if (!photos || photos.length === 0) {
logger.info(`No ${role}s available for ${label}`);
@ -200,7 +200,7 @@ async function storePhotos(photos, {
const metaFiles = await Promise.map(sourceOriginals, async (photoUrl, index) => fetchPhoto(photoUrl, index, label), {
concurrency: 10,
}).filter(photo => photo && photo.entropy > entropy);
}).filter(photo => photo && photo.entropy > entropyFilter);
const metaFilesByHash = metaFiles.reduce((acc, photo) => ({ ...acc, [photo.hash]: photo }), {}); // pre-filter hash duplicates within set; may occur through fallbacks
const [hashDuplicates, hashOriginals] = await findDuplicates(Object.values(metaFilesByHash), 'hash', 'hash', label);
@ -285,6 +285,7 @@ async function storeReleasePhotos(releases, label) {
async function storeTrailer(trailers, {
domain = 'releases',
role = 'trailer',
targetId,
subpath,
}, label) {
@ -294,7 +295,7 @@ async function storeTrailer(trailers, {
: trailers;
if (!trailer || !trailer.src) {
logger.info(`No trailer available for ${label}`);
logger.info(`No ${role} available for ${label}`);
return;
}
@ -306,7 +307,7 @@ async function storeTrailer(trailers, {
const res = await bhttp.get(trailerX.src);
const hash = getHash(res.body);
const filepath = path.join(domain, subpath, `trailer${trailerX.quality ? `_${trailerX.quality}` : ''}.${mime.getExtension(mimetype)}`);
const filepath = path.join(domain, subpath, `${role}${trailerX.quality ? `_${trailerX.quality}` : ''}.${mime.getExtension(mimetype)}`);
return {
trailer: res.body,
@ -327,6 +328,7 @@ async function storeTrailer(trailers, {
source: trailerX.source,
quality: trailerX.quality,
hash: trailerX.hash,
type: role,
})))
.returning('*');
@ -336,7 +338,7 @@ async function storeTrailer(trailers, {
? [...sourceDuplicates, ...hashDuplicates, ...newTrailers]
: [...sourceDuplicates, ...hashDuplicates];
await upsert('releases_trailers', trailerEntries.map(trailerEntry => ({
await upsert(`releases_${role}s`, trailerEntries.map(trailerEntry => ({
release_id: targetId,
media_id: trailerEntry.id,
})), ['release_id', 'media_id']);

View File

@ -345,6 +345,13 @@ async function storeReleaseAssets(releases) {
await storeTrailer(release.trailer, {
targetId: release.id,
subpath,
role: 'trailer',
}, identifier);
await storeTrailer(release.teaser, {
targetId: release.id,
subpath,
role: 'teaser',
}, identifier);
} catch (error) {
logger.error(error.message);

View File

@ -5,7 +5,7 @@ const bhttp = require('bhttp');
const { ex } = require('../utils/q');
function scrapeProfile(html) {
const { q, qa, qd, qi, qu } = ex(html); /* eslint-disable-line object-curly-newline */
const { q, qa, qd, qi, qus } = ex(html); /* eslint-disable-line object-curly-newline */
const profile = {};
const bio = qa('.infobox tr[valign="top"]')
@ -59,19 +59,15 @@ function scrapeProfile(html) {
if (bio.Blood_group) profile.blood = bio.Blood_group;
if (bio.Also_known_as) profile.aliases = bio.Also_known_as.split(', ');
const avatars = qi('.image img');
const avatarThumbPath = qi('.image img');
if (avatars.length > 0) {
const [avatarThumbPath] = avatars;
if (avatarThumbPath && !/NoImageAvailable/.test(avatarThumbPath)) {
const avatarPath = avatarThumbPath.slice(0, avatarThumbPath.lastIndexOf('/')).replace('thumb/', '');
if (!/NoImageAvailable/.test(avatarThumbPath)) {
const avatarPath = avatarThumbPath.slice(0, avatarThumbPath.lastIndexOf('/')).replace('thumb/', '');
profile.avatar = `http://www.boobpedia.com${avatarPath}`;
}
profile.avatar = `http://www.boobpedia.com${avatarPath}`;
}
profile.social = qu('.infobox a.external');
profile.social = qus('.infobox a.external');
return profile;
}

View File

@ -101,7 +101,7 @@ function scrapeScene(html, url, site) {
}
function scrapeProfile(html) {
const { q, qu } = ex(html);
const { q, qus } = ex(html);
const profile = {};
profile.description = q('.bio_about_text', true);
@ -109,7 +109,7 @@ function scrapeProfile(html) {
const avatar = q('img.performer-pic', 'src');
if (avatar) profile.avatar = `https:${avatar}`;
profile.releases = qu('.scene-item > a:first-child');
profile.releases = qus('.scene-item > a:first-child');
return profile;
}

206
src/scrapers/score.js Normal file
View File

@ -0,0 +1,206 @@
'use strict';
const bhttp = require('bhttp');
const { ex, exa } = require('../utils/q');
const slugify = require('../utils/slugify');
const { heightToCm, lbsToKg } = require('../utils/convert');
function scrapePhotos(html) {
const { qis } = ex(html, '#photos-page');
const photos = qis('img');
return photos.map(photo => [
photo
.replace('x_800', 'x_xl')
.replace('_tn', ''),
photo,
]);
}
async function fetchPhotos(url) {
const res = await bhttp.get(url);
if (res.statusCode === 200) {
return scrapePhotos(res.body.toString(), url);
}
return [];
}
function scrapeAll(html) {
return exa(html, '.container .video').map(({ q, qa, qd, ql }) => {
const release = {};
const linkEl = q('a.i-title');
release.title = linkEl.textContent.trim();
const url = new URL(linkEl.href);
release.url = `${url.origin}${url.pathname}`;
// this is a photo album, not a scene (used for profiles)
if (/photos\//.test(url)) return null;
[release.entryId] = url.pathname.split('/').slice(-2);
release.date = qd('.i-date', 'MMM DD', /\w+ \d{1,2}$/);
release.actors = qa('.i-model', true);
release.duration = ql('.i-amount');
const posterEl = q('.item-img img');
if (posterEl) {
release.poster = `https:${posterEl.src}`;
release.teaser = {
src: `https:${posterEl.dataset.gifPreview}`,
};
}
return release;
}).filter(Boolean);
}
async function scrapeScene(html, url) {
const { q, qa, qd, ql, qu, qp, qt } = ex(html, '#videos-page');
const release = {};
[release.entryId] = new URL(url).pathname.split('/').slice(-2);
release.title = q('#breadcrumb-top + h1', true);
release.description = q('.p-desc', true);
release.actors = qa('a[href*=models]', true);
release.tags = qa('a[href*=tag]', true);
const dateEl = qa('.value').find(el => /\w+ \d+\w+, \d{4}/.test(el.textContent));
release.date = qd(dateEl, null, 'MMMM Do, YYYY');
const durationEl = qa('value').find(el => /\d{1,3}:\d{2}/.test(el.textContent));
release.duration = ql(durationEl);
const photosUrl = qu('a[href*=photos]');
release.photos = await fetchPhotos(photosUrl);
release.poster = qp('video'); // _800.jpg is larger than _xl.jpg in landscape
const trailer = qt();
release.trailer = [
{
// don't rely on trailer always being 720p by default
src: trailer.replace(/\d+p\.mp4/, '720p.mp4'),
quality: 720,
},
{
src: trailer.replace(/\d+p\.mp4/, '360p.mp4'),
quality: 360,
},
];
const stars = q('.rate-box').dataset.score;
if (stars) release.rating = { stars };
return release;
}
function scrapeModels(html, actorName) {
const { qa } = ex(html);
const model = qa('.model a').find(link => link.title === actorName);
return model?.href || null;
}
function scrapeProfile(html) {
const { q, qa, qi } = ex(html, '#model-page');
const profile = { gender: 'female' };
const bio = qa('.stat').reduce((acc, el) => {
const prop = q(el, '.label', true).slice(0, -1);
const key = slugify(prop, false, '_');
const value = q(el, '.value', true);
return {
...acc,
[key]: value,
};
}, {});
if (bio.location) profile.residencePlace = bio.location.replace('Czech Repulic', 'Czech Republic'); // see Laura Lion
if (bio.birthday) {
const birthMonth = bio.birthday.match(/^\w+/)[0].toLowerCase();
const [birthDay] = bio.birthday.match(/\d+/);
profile.birthday = [birthMonth, birthDay]; // currently unused, not to be confused with birthdate
}
if (bio.ethnicity) profile.ethnicity = bio.ethnicity;
if (bio.hair_color) profile.hair = bio.hair_color;
if (bio.height) profile.height = heightToCm(bio.height);
if (bio.weight) profile.weight = lbsToKg(bio.weight);
if (bio.bra_size) profile.bust = bio.bra_size;
if (bio.measurements) [, profile.waist, profile.hip] = bio.measurements.split('-');
if (bio.occupation) profile.occupation = bio.occupation;
const avatar = qi('img');
if (avatar) profile.avatar = avatar;
const releases = ex(html, '#model-page + .container');
profile.releases = scrapeAll(releases.document.outerHTML);
return profile;
}
async function fetchLatest(site, page = 1) {
const url = `${site.url}/big-boob-videos?page=${page}`;
const res = await bhttp.get(url);
if (res.statusCode === 200) {
return scrapeAll(res.body.toString(), site);
}
return null;
}
async function fetchScene(url, site) {
const res = await bhttp.get(url);
if (res.statusCode === 200) {
return scrapeScene(res.body.toString(), url, site);
}
return null;
}
async function fetchProfile(actorName, scraperSlug, page = 1) {
const letter = actorName.charAt(0).toUpperCase();
const url = `https://www.scoreland.com/big-boob-models/browse/${letter}/?page=${page}`;
const res = await bhttp.get(url);
if (res.statusCode === 200) {
const actorUrl = scrapeModels(res.body.toString(), actorName);
if (actorUrl) {
const actorRes = await bhttp.get(actorUrl);
if (actorRes.statusCode === 200) {
return scrapeProfile(actorRes.body.toString());
}
return null;
}
return fetchProfile(actorName, scraperSlug, page + 1);
}
return null;
}
module.exports = {
fetchLatest,
fetchScene,
fetchProfile,
};

View File

@ -34,6 +34,7 @@ const men = require('./men');
const metrohd = require('./metrohd');
const mofos = require('./mofos');
const naughtyamerica = require('./naughtyamerica');
const score = require('./score');
const twentyonesextury = require('./21sextury');
const xempire = require('./xempire');
const wicked = require('./wicked');
@ -78,6 +79,7 @@ module.exports = {
puretaboo,
naughtyamerica,
realitykings,
score,
teamskeet,
vixen,
vogov,
@ -109,6 +111,7 @@ module.exports = {
naughtyamerica,
pornhub,
realitykings,
score,
transangels,
wicked,
xempire,

View File

@ -116,7 +116,7 @@ function scrapeLatest(html) {
}
function scrapeScene(html, url) {
const { q, qa, qd, qu, ql, qm } = ex(html);
const { q, qa, qd, qus, ql, qm } = ex(html);
const release = { url };
// release.entryId = slugify(release.title);
@ -131,7 +131,7 @@ function scrapeScene(html, url) {
release.actors = qa('.info-video-models a', true);
release.tags = qa('.info-video-category a', true);
release.photos = qu('.swiper-wrapper .swiper-slide a').map(source => source.replace('.jpg/', '.jpg'));
release.photos = qus('.swiper-wrapper .swiper-slide a').map(source => source.replace('.jpg/', '.jpg'));
release.poster = qm('meta[property="og:image"');
if (!release.poster) {

View File

@ -3,11 +3,21 @@
const { JSDOM } = require('jsdom');
const moment = require('moment');
function prefixProtocol(url, protocol = 'https') {
if (protocol && /^\/\//.test(url)) {
return `${protocol}:${url}`;
}
return url;
}
function q(context, selector, attrArg, trim = true) {
const attr = attrArg === true ? 'textContent' : attrArg;
if (attr) {
const value = context.querySelector(selector)?.[attr];
const value = selector
? context.querySelector(selector)?.[attr]
: context[attr];
return trim ? value?.trim() : value;
}
@ -30,12 +40,14 @@ function qmeta(context, selector, attrArg = 'content', trim = true) {
}
function qdate(context, selector, format, match, attr = 'textContent') {
const dateString = context.querySelector(selector)?.[attr];
const dateString = selector
? context.querySelector(selector)?.[attr]
: context[attr];
if (!dateString) return null;
if (match) {
const dateStamp = dateString.match(match);
const dateStamp = dateString.trim().match(match);
if (dateStamp) return moment.utc(dateStamp[0], format).toDate();
return null;
@ -44,20 +56,41 @@ function qdate(context, selector, format, match, attr = 'textContent') {
return moment.utc(dateString.trim(), format).toDate();
}
function qimages(context, selector = 'img', attr = 'src') {
return qall(context, selector, attr);
function qimage(context, selector = 'img', attr = 'src', protocol = 'https') {
const image = q(context, selector, attr);
// no attribute means q output will be HTML element
return attr ? prefixProtocol(image, protocol) : image;
}
function qurls(context, selector = 'a', attr = 'href') {
return qall(context, selector, attr);
function qimages(context, selector = 'img', attr = 'src', protocol = 'https') {
const images = qall(context, selector, attr);
return attr ? images.map(image => prefixProtocol(image, protocol)) : images;
}
function qposter(context, selector = 'video', attr = 'poster') {
return q(context, selector, attr);
function qurl(context, selector = 'a', attr = 'href', protocol = 'https') {
const url = q(context, selector, attr);
return attr ? prefixProtocol(url, protocol) : url;
}
function qtrailer(context, selector = 'source', attr = 'src') {
return q(context, selector, attr);
function qurls(context, selector = 'a', attr = 'href', protocol = 'https') {
const urls = qall(context, selector, attr);
return attr ? urls.map(url => prefixProtocol(url, protocol)) : urls;
}
function qposter(context, selector = 'video', attr = 'poster', protocol = 'https') {
const poster = q(context, selector, attr);
return attr ? prefixProtocol(poster, protocol) : poster;
}
function qtrailer(context, selector = 'source', attr = 'src', protocol = 'https') {
const trailer = q(context, selector, attr);
return attr ? prefixProtocol(trailer, protocol) : trailer;
}
function qlength(context, selector, attr = 'textContent') {
@ -77,20 +110,24 @@ const funcs = {
q,
qall,
qdate,
qimage,
qimages,
qposter,
qlength,
qmeta,
qtrailer,
qurls,
qurl,
qa: qall,
qd: qdate,
qi: qimages,
qi: qimage,
qis: qimages,
qp: qposter,
ql: qlength,
qm: qmeta,
qt: qtrailer,
qu: qurls,
qu: qurl,
qus: qurls,
};
function ctx(element, window) {
@ -110,18 +147,29 @@ function ctx(element, window) {
};
}
function ctxa(context, selector) {
return Array.from(context.querySelectorAll(selector)).map(element => ctx(element));
function ctxa(context, selector, window) {
return Array.from(context.querySelectorAll(selector)).map(element => ctx(element, window));
}
function ex(html) {
function ex(html, selector) {
const { window } = new JSDOM(html);
if (selector) {
return ctx(window.document.querySelector(selector), window);
}
return ctx(window.document, window);
}
function exa(html, selector) {
const { window } = new JSDOM(html);
return ctxa(window.document, selector, window);
}
module.exports = {
ex,
exa,
ctx,
ctxa,
...funcs,

View File

@ -1,7 +1,7 @@
'use strict';
function slugify(string, encode = false) {
const slug = string.trim().toLowerCase().match(/\w+/g).join('-');
function slugify(string, encode = false, delimiter = '-') {
const slug = string.trim().toLowerCase().match(/\w+/g).join(delimiter);
return encode ? encodeURI(slug) : slug;
}