diff --git a/src/actors.js b/src/actors.js index 7f79e83b..0f091768 100644 --- a/src/actors.js +++ b/src/actors.js @@ -610,7 +610,7 @@ async function scrapeActors(actorNames) { }, }), {}); - const newBaseActors = baseActors.filter(baseActor => !existingActorEntriesBySlugAndEntryId[baseActor.slug][baseActor.entryId]); + const newBaseActors = baseActors.filter(baseActor => !existingActorEntriesBySlugAndEntryId[baseActor.slug]?.[baseActor.entryId]); const [batchId] = newBaseActors.length > 0 ? await knex('batches').insert({ comment: null }).returning('id') : [null]; const curatedActorEntries = batchId && curateActorEntries(newBaseActors, batchId); diff --git a/src/scrapers/aziani.js b/src/scrapers/aziani.js index 67932c44..31a3390e 100644 --- a/src/scrapers/aziani.js +++ b/src/scrapers/aziani.js @@ -126,7 +126,7 @@ async function fetchScene(url, site) { return res.status; } -async function fetchProfile(actorName, { site }) { +async function fetchProfile({ name: actorName }, { site }) { const actorSlug = slugify(actorName, ''); const url = `${site.url}/tour/models/${actorSlug}.html`; const res = await get(url, '.page-content .row'); diff --git a/src/scrapers/babes.js b/src/scrapers/babes.js index 68418c93..0f1ec228 100644 --- a/src/scrapers/babes.js +++ b/src/scrapers/babes.js @@ -2,8 +2,8 @@ const { fetchScene, fetchLatest, fetchProfile } = require('./mindgeek'); -async function networkFetchProfile(actorName) { - return fetchProfile(actorName, 'babes'); +async function networkFetchProfile({ name: actorName }) { + return fetchProfile({ name: actorName }, 'babes'); } module.exports = { diff --git a/src/scrapers/bamvisions.js b/src/scrapers/bamvisions.js index 8f6a6c0e..d312b347 100644 --- a/src/scrapers/bamvisions.js +++ b/src/scrapers/bamvisions.js @@ -132,7 +132,7 @@ async function fetchScene(url, site) { return res.ok ? scrapeScene(res.item, url, site) : res.status; } -async function fetchProfile(actorName, { site }, include) { +async function fetchProfile({ name: actorName }, { site }, include) { const actorSlugA = slugify(actorName, ''); const actorSlugB = slugify(actorName); diff --git a/src/scrapers/bang.js b/src/scrapers/bang.js index e9bcde07..50d09417 100644 --- a/src/scrapers/bang.js +++ b/src/scrapers/bang.js @@ -278,7 +278,7 @@ async function fetchScene(url) { return scrapeScene(res.body._source); // eslint-disable-line no-underscore-dangle } -async function fetchProfile(actorName, context, include) { +async function fetchProfile({ name: actorName }, context, include) { const res = await post(`https://${clusterId}.us-east-1.aws.found.io/actors/actor/_search`, { size: 5, sort: [{ diff --git a/src/scrapers/bangbros.js b/src/scrapers/bangbros.js index 0f2bf8f4..b0beb2f2 100644 --- a/src/scrapers/bangbros.js +++ b/src/scrapers/bangbros.js @@ -221,7 +221,7 @@ async function fetchScene(url, site, release) { return scrapeScene(res.item.html, url, site); } -async function fetchProfile(actorName, scope) { +async function fetchProfile({ name: actorName }, scope) { const actorSlug = slugify(actorName); const url = `https://bangbros.com/search/${actorSlug}`; const res = await bhttp.get(url); diff --git a/src/scrapers/blowpass.js b/src/scrapers/blowpass.js index f56fd144..7d5c2ca2 100644 --- a/src/scrapers/blowpass.js +++ b/src/scrapers/blowpass.js @@ -25,8 +25,8 @@ function getActorReleasesUrl(actorPath, page = 1) { return `https://www.blowpass.com/en/videos/blowpass/latest/All-Categories/0${actorPath}/${page}`; } -async function networkFetchProfile(actorName, context, include) { - return fetchProfile(actorName, context, null, getActorReleasesUrl, include); +async function networkFetchProfile({ name: actorName }, context, include) { + return fetchProfile({ name: actorName }, context, null, getActorReleasesUrl, include); } module.exports = { diff --git a/src/scrapers/boobpedia.js b/src/scrapers/boobpedia.js index 22c3ca40..f942bae4 100644 --- a/src/scrapers/boobpedia.js +++ b/src/scrapers/boobpedia.js @@ -78,7 +78,7 @@ function scrapeProfile(html) { return profile; } -async function fetchProfile(actorName) { +async function fetchProfile({ name: actorName }) { const actorSlug = actorName.replace(/\s+/, '_'); const res = await bhttp.get(`http://www.boobpedia.com/boobs/${actorSlug}`); diff --git a/src/scrapers/brazzers.js b/src/scrapers/brazzers.js index 15284757..4d0aa2d9 100644 --- a/src/scrapers/brazzers.js +++ b/src/scrapers/brazzers.js @@ -185,7 +185,7 @@ async function fetchScene(url, site) { return res.status; } -async function fetchProfile(actorName, context, include) { +async function fetchProfile({ name: actorName }, context, include) { const searchRes = await qu.get('https://brazzers.com/pornstars-search/', `a[title="${actorName}" i]`, { Cookie: `textSearch=${encodeURIComponent(actorName)};`, }); diff --git a/src/scrapers/cherrypimps.js b/src/scrapers/cherrypimps.js index 2341d42e..cccde4e8 100644 --- a/src/scrapers/cherrypimps.js +++ b/src/scrapers/cherrypimps.js @@ -1,31 +1,31 @@ 'use strict'; -const { get, geta, ctxa, ed } = require('../utils/q'); +const qu = require('../utils/qu'); const slugify = require('../utils/slugify'); function scrapeAll(scenes, site) { - return scenes.map(({ qu }) => { - const url = qu.url('.text-thumb a'); + return scenes.map(({ query }) => { + const url = query.url('.text-thumb a'); const { pathname } = new URL(url); - const channelUrl = qu.url('.badge'); + const channelUrl = query.url('.badge'); - if (site?.parameters?.extract && qu.q('.badge', true) !== site.name) { + if (site?.parameters?.extract && query.q('.badge', true) !== site.name) { return null; } const release = {}; release.url = channelUrl ? `${channelUrl}${pathname}` : url; - release.entryId = pathname.match(/\/\d+/)[0].slice(1); - release.title = qu.q('.text-thumb a', true); + release.entryId = pathname.match(/\/trailers\/(.*).html/)[1]; + release.title = query.q('.text-thumb a', true); - release.date = qu.date('.date', 'YYYY-MM-DD', /\d{4}-\d{2}-\d{2}/); - release.duration = qu.dur('.date', /(\d{2}:)?\d{2}:\d{2}/); + release.date = query.date('.date', 'YYYY-MM-DD', /\d{4}-\d{2}-\d{2}/); + release.duration = query.dur('.date', /(\d{2}:)?\d{2}:\d{2}/); - release.actors = qu.all('.category a', true); + release.actors = query.all('.category a', true); - release.poster = qu.img('img.video_placeholder, .video-images img'); - release.teaser = { src: qu.trailer() }; + release.poster = query.img('img.video_placeholder, .video-images img'); + release.teaser = { src: query.trailer() }; return release; }).filter(Boolean); @@ -56,18 +56,18 @@ function scrapeScene({ q, qd, qa }, url, _site, baseRelease) { return release; } -function scrapeProfile({ q, qa, qtx }) { +function scrapeProfile({ query }) { const profile = {}; - const keys = qa('.model-descr_line:not(.model-descr_rait) p.text span', true); - const values = qa('.model-descr_line:not(.model-descr_rait) p.text').map(el => qtx(el)); + const keys = query.all('.model-descr_line:not(.model-descr_rait) p.text span', true); + const values = query.all('.model-descr_line:not(.model-descr_rait) p.text').map(el => query.text(el)); const bio = keys.reduce((acc, key, index) => ({ ...acc, [slugify(key, '_')]: values[index] }), {}); - if (bio.height) profile.height = Number(bio.height.match(/\((\d+)cm\)/)[1]); - if (bio.weight) profile.weight = Number(bio.weight.match(/\((\d+)kg\)/)[1]); + if (bio.height) profile.height = Number(bio.height.match(/\((\d+)\s*cm\)/)?.[1]); + if (bio.weight) profile.weight = Number(bio.weight.match(/\((\d+)kg\)/)?.[1]); if (bio.race) profile.ethnicity = bio.race; - if (bio.date_of_birth) profile.birthdate = ed(bio.date_of_birth, 'MMMM D, YYYY'); + if (bio.date_of_birth) profile.birthdate = qu.extractDate(bio.date_of_birth, 'MMMM D, YYYY'); if (bio.birthplace) profile.birthPlace = bio.birthplace; if (bio.measurements) { @@ -96,11 +96,11 @@ function scrapeProfile({ q, qa, qtx }) { if (bio.aliases) profile.aliases = bio.aliases.split(',').map(alias => alias.trim()); - const avatar = q('.model-img img'); + const avatar = query.q('.model-img img'); profile.avatar = avatar.getAttribute('src0_3x') || avatar.getAttribute('src0_2x') || avatar.dataset.src; - const releases = qa('.video-thumb'); - profile.releases = scrapeAll(ctxa(releases)); + const releases = query.all('.video-thumb'); + profile.releases = scrapeAll(qu.initAll(releases)); return profile; } @@ -109,18 +109,18 @@ async function fetchLatest(site, page = 1) { const url = site.parameters?.extract ? `https://cherrypimps.com/categories/movies_${page}.html` : `${site.url}/categories/movies_${page}.html`; - const res = await geta(url, 'div.video-thumb'); + const res = await qu.getAll(url, 'div.video-thumb'); return res.ok ? scrapeAll(res.items, site) : res.status; } async function fetchScene(url, site, release) { - const res = await get(url); + const res = await qu.get(url); return res.ok ? scrapeScene(res.item, url, site, release) : res.status; } -async function fetchProfile(actorName, { site, network, scraper }) { +async function fetchProfile({ name: actorName }, { site, network, scraper }) { const actorSlug = slugify(actorName); const actorSlug2 = slugify(actorName, ''); @@ -130,10 +130,10 @@ async function fetchProfile(actorName, { site, network, scraper }) { ? [`${origin}/models/${actorSlug}.html`, `${origin}/models/${actorSlug2}.html`] : [`${origin}/models/${actorSlug}.html`, `${origin}/models/${actorSlug2}.html`]; - const res = await get(url); + const res = await qu.get(url); if (res.ok) return scrapeProfile(res.item); - const res2 = await get(url2); + const res2 = await qu.get(url2); return res2.ok ? scrapeProfile(res2.item) : res2.status; } diff --git a/src/scrapers/ddfnetwork.js b/src/scrapers/ddfnetwork.js index 79dcb6e1..c5b07759 100644 --- a/src/scrapers/ddfnetwork.js +++ b/src/scrapers/ddfnetwork.js @@ -156,7 +156,7 @@ async function fetchScene(url, site) { return res.ok ? scrapeScene(res.item, url, site) : res.status; } -async function fetchProfile(actorName) { +async function fetchProfile({ name: actorName }) { const resSearch = await bhttp.post('https://ddfnetwork.com/search/ajax', { type: 'hints', diff --git a/src/scrapers/digitalplayground.js b/src/scrapers/digitalplayground.js index 96d6a5d7..7d25a390 100644 --- a/src/scrapers/digitalplayground.js +++ b/src/scrapers/digitalplayground.js @@ -2,8 +2,8 @@ const { fetchScene, fetchLatest, fetchProfile } = require('./mindgeek'); -async function networkFetchProfile(actorName) { - return fetchProfile(actorName, 'digitalplayground', 'modelprofile'); +async function networkFetchProfile({ name: actorName }) { + return fetchProfile({ name: actorName }, 'digitalplayground', 'modelprofile'); } module.exports = { diff --git a/src/scrapers/fakehub.js b/src/scrapers/fakehub.js index 7f6d24a9..f9857d6c 100644 --- a/src/scrapers/fakehub.js +++ b/src/scrapers/fakehub.js @@ -2,8 +2,8 @@ const { fetchScene, fetchLatest, fetchProfile } = require('./mindgeek'); -async function networkFetchProfile(actorName) { - return fetchProfile(actorName, 'fakehub', 'modelprofile'); +async function networkFetchProfile({ name: actorName }) { + return fetchProfile({ name: actorName }, 'fakehub', 'modelprofile'); } module.exports = { diff --git a/src/scrapers/famedigital.js b/src/scrapers/famedigital.js index 531d3856..4066c7a2 100644 --- a/src/scrapers/famedigital.js +++ b/src/scrapers/famedigital.js @@ -90,10 +90,10 @@ async function fetchClassicProfile(actorName, { site }) { return null; } -async function networkFetchProfile(actorName, context, include) { +async function networkFetchProfile({ name: actorName }, context, include) { const profile = await ((context.site.parameters?.api && fetchApiProfile(actorName, context, include)) || (context.site.parameters?.classic && include.scenes && fetchClassicProfile(actorName, context, include)) // classic profiles only have scenes, no bio - || fetchProfile(actorName, context, true, getActorReleasesUrl, include)); + || fetchProfile({ name: actorName }, context, true, getActorReleasesUrl, include)); return profile; } diff --git a/src/scrapers/freeones.js b/src/scrapers/freeones.js index 46f67f1e..576ba20e 100644 --- a/src/scrapers/freeones.js +++ b/src/scrapers/freeones.js @@ -63,7 +63,7 @@ function scrapeSearch(html) { return document.querySelector('a.image-link')?.href || null; } -async function fetchProfile(actorName) { +async function fetchProfile({ name: actorName }) { const actorSlug = actorName.toLowerCase().replace(/\s+/g, '-'); const res = await bhttp.get(`https://freeones.nl/${actorSlug}/profile`); diff --git a/src/scrapers/freeones_legacy.js b/src/scrapers/freeones_legacy.js index df1c9251..f4c4f5f5 100644 --- a/src/scrapers/freeones_legacy.js +++ b/src/scrapers/freeones_legacy.js @@ -107,7 +107,7 @@ async function scrapeProfileBio(html, frontpageProfile, url, name) { return profile; } -async function fetchProfile(actorName) { +async function fetchProfile({ name: actorName }) { const slug = actorName.replace(' ', '_'); const frontpageUrl = `https://www.freeones.com/html/v_links/${slug}`; diff --git a/src/scrapers/fullpornnetwork.js b/src/scrapers/fullpornnetwork.js index 2196a370..c61dab87 100644 --- a/src/scrapers/fullpornnetwork.js +++ b/src/scrapers/fullpornnetwork.js @@ -96,7 +96,7 @@ async function fetchScene(url, site) { return res.ok && res.item ? scrapeScene(res.item, url, site) : res.status; } -async function fetchProfile(actorName, { site }) { +async function fetchProfile({ name: actorName }, { site }) { const actorSlug = slugify(actorName, ''); const url = `${site.url}/1/model/${actorSlug}`; diff --git a/src/scrapers/gamma.js b/src/scrapers/gamma.js index ce4553b6..38eccb8e 100644 --- a/src/scrapers/gamma.js +++ b/src/scrapers/gamma.js @@ -559,7 +559,7 @@ async function fetchActorScenes(actorName, apiUrl, siteSlug) { return []; } -async function fetchProfile(actorName, context, altSearchUrl, getActorReleasesUrl, include) { +async function fetchProfile({ name: actorName }, context, altSearchUrl, getActorReleasesUrl, include) { const siteSlug = context.entity.slug || context.site?.slug || context.network?.slug; const actorSlug = actorName.toLowerCase().replace(/\s+/, '+'); diff --git a/src/scrapers/hush.js b/src/scrapers/hush.js index bab6bf37..f5e2adfd 100644 --- a/src/scrapers/hush.js +++ b/src/scrapers/hush.js @@ -381,7 +381,7 @@ async function fetchScene(url, site, baseRelease, beforeFetchLatest) { return scrapeScene(res.item, site, url, baseRelease); } -async function fetchProfile(actorName, { site }) { +async function fetchProfile({ name: actorName }, { site }) { const actorSlugA = slugify(actorName, ''); const actorSlugB = slugify(actorName); diff --git a/src/scrapers/iconmale.js b/src/scrapers/iconmale.js index dea8600e..c5dea8d7 100644 --- a/src/scrapers/iconmale.js +++ b/src/scrapers/iconmale.js @@ -2,8 +2,8 @@ const { fetchProfile } = require('./mindgeek'); -async function networkFetchProfile(actorName) { - return fetchProfile(actorName, 'iconmale'); +async function networkFetchProfile({ name: actorName }) { + return fetchProfile({ name: actorName }, 'iconmale'); } module.exports = { diff --git a/src/scrapers/julesjordan.js b/src/scrapers/julesjordan.js index 97b43f76..366ce6b7 100644 --- a/src/scrapers/julesjordan.js +++ b/src/scrapers/julesjordan.js @@ -381,7 +381,7 @@ async function fetchMovie(url, site) { return res.ok ? scrapeMovie(res.item, url, site) : res.status; } -async function fetchProfile(actorName) { +async function fetchProfile({ name: actorName }) { const actorSlugA = slugify(actorName, '-'); const actorSlugB = slugify(actorName, ''); diff --git a/src/scrapers/kellymadison.js b/src/scrapers/kellymadison.js index 2781fbd8..a063bdb5 100644 --- a/src/scrapers/kellymadison.js +++ b/src/scrapers/kellymadison.js @@ -154,7 +154,7 @@ async function fetchScene(url, channel, baseRelease) { return res.ok ? scrapeScene(res.item, url, baseRelease) : res.status; } -async function fetchProfile(actorName) { +async function fetchProfile({ name: actorName }) { const actorSlug = slugify(actorName); const res = await qu.get(`https://www.kellymadison.com/models/${actorSlug}`, null, { 'X-Requested-With': 'XMLHttpRequest', diff --git a/src/scrapers/killergram.js b/src/scrapers/killergram.js index 66cd60ff..0c9a209f 100644 --- a/src/scrapers/killergram.js +++ b/src/scrapers/killergram.js @@ -103,7 +103,7 @@ async function fetchScene(url, channel) { return res.ok ? scrapeScene(res.item, url, channel) : res.status; } -async function fetchProfile(actorName, entity, include) { +async function fetchProfile({ name: actorName }, entity, include) { const url = `http://killergram.com/episodes.asp?page=episodes&model=${encodeURI(actorName)}&ct=model`; const res = await qu.get(url, '#content', null, { followRedirects: false, diff --git a/src/scrapers/kink.js b/src/scrapers/kink.js index f7882f3f..7cc14dec 100644 --- a/src/scrapers/kink.js +++ b/src/scrapers/kink.js @@ -148,7 +148,7 @@ async function fetchScene(url, site) { return res.status; } -async function fetchProfile(actorName, entity, include) { +async function fetchProfile({ name: actorName }, entity, include) { const searchRes = await qu.getAll(`https://kink.com/search?type=performers&q=${actorName}`, '.model'); if (searchRes.ok) { diff --git a/src/scrapers/legalporno.js b/src/scrapers/legalporno.js index eb1a4bb7..613b2cdf 100644 --- a/src/scrapers/legalporno.js +++ b/src/scrapers/legalporno.js @@ -179,7 +179,7 @@ async function fetchScene(url, site) { return scrapeScene(res.body.toString(), url, site, useGallery); } -async function fetchProfile(actorName) { +async function fetchProfile({ name: actorName }) { const res = await bhttp.get(`https://www.legalporno.com/api/autocomplete/search?q=${actorName.replace(' ', '+')}`); const data = res.body; diff --git a/src/scrapers/men.js b/src/scrapers/men.js index 304e59ad..577ec325 100644 --- a/src/scrapers/men.js +++ b/src/scrapers/men.js @@ -2,8 +2,8 @@ const { fetchScene, fetchLatest, fetchProfile } = require('./mindgeek'); -async function networkFetchProfile(actorName) { - return fetchProfile(actorName, 'men', 'modelprofile'); +async function networkFetchProfile({ name: actorName }) { + return fetchProfile({ name: actorName }, 'men', 'modelprofile'); } module.exports = { diff --git a/src/scrapers/metrohd.js b/src/scrapers/metrohd.js index 78c2b30b..7af86bf2 100644 --- a/src/scrapers/metrohd.js +++ b/src/scrapers/metrohd.js @@ -2,8 +2,8 @@ const { fetchScene, fetchLatest, fetchProfile } = require('./mindgeek'); -async function networkFetchProfile(actorName) { - return fetchProfile(actorName, 'devianthardcore'); +async function networkFetchProfile({ name: actorName }) { + return fetchProfile({ name: actorName }, 'devianthardcore'); } module.exports = { diff --git a/src/scrapers/mikeadriano.js b/src/scrapers/mikeadriano.js index ab201835..a5abe4f6 100644 --- a/src/scrapers/mikeadriano.js +++ b/src/scrapers/mikeadriano.js @@ -86,7 +86,7 @@ async function fetchScene(url, channel) { } /* API protected -async function fetchProfile(actorName, context , site) { +async function fetchProfile({ name: actorName }, context , site) { const session = bhttp.session(); await session.get(`https://tour.${site.slug}.com`); diff --git a/src/scrapers/milehighmedia.js b/src/scrapers/milehighmedia.js index a77460a0..099ff0cf 100644 --- a/src/scrapers/milehighmedia.js +++ b/src/scrapers/milehighmedia.js @@ -2,8 +2,8 @@ const { fetchScene, fetchLatest, fetchProfile } = require('./mindgeek'); -async function networkFetchProfile(actorName) { - return fetchProfile(actorName, 'milehighmedia'); +async function networkFetchProfile({ name: actorName }) { + return fetchProfile({ name: actorName }, 'milehighmedia'); } module.exports = { diff --git a/src/scrapers/mindgeek.js b/src/scrapers/mindgeek.js index f237a9b4..8bba7322 100644 --- a/src/scrapers/mindgeek.js +++ b/src/scrapers/mindgeek.js @@ -1,4 +1,3 @@ - 'use strict'; /* eslint-disable newline-per-chained-call */ @@ -7,7 +6,7 @@ const bhttp = require('bhttp'); const { CookieJar } = Promise.promisifyAll(require('tough-cookie')); const moment = require('moment'); -const { ex } = require('../utils/q'); +const qu = require('../utils/qu'); const slugify = require('../utils/slugify'); const { inchesToCm, lbsToKg } = require('../utils/convert'); const { cookieToData } = require('../utils/cookies'); @@ -140,7 +139,7 @@ async function getSession(url) { } function scrapeProfile(data, html, releases = [], networkName) { - const { qa, qd } = ex(html); + const { query } = qu.extract(html); const profile = { description: data.bio, @@ -169,8 +168,8 @@ function scrapeProfile(data, html, releases = [], networkName) { || data.images.card_main_rect[0].xs?.url; } - const birthdate = qa('li').find(el => /Date of Birth/.test(el.textContent)); - if (birthdate) profile.birthdate = qd(birthdate, 'span', 'MMMM Do, YYYY'); + const birthdate = query.all('li').find(el => /Date of Birth/.test(el.textContent)); + if (birthdate) profile.birthdate = query.date(birthdate, 'span', 'MMMM Do, YYYY'); profile.releases = releases.map(release => scrapeScene(release, null, null, networkName)); @@ -222,7 +221,7 @@ async function fetchScene(url, site) { return null; } -async function fetchProfile(actorName, networkSlug, actorPath = 'model') { +async function fetchProfile({ name: actorName }, networkSlug, actorPath = 'model') { const url = `https://www.${networkSlug}.com`; const { session, instanceToken } = await getSession(url); diff --git a/src/scrapers/mofos.js b/src/scrapers/mofos.js index 83cfd1a7..7a4930c6 100644 --- a/src/scrapers/mofos.js +++ b/src/scrapers/mofos.js @@ -2,8 +2,8 @@ const { fetchScene, fetchLatest, fetchProfile } = require('./mindgeek'); -async function networkFetchProfile(actorName) { - return fetchProfile(actorName, 'mofos'); +async function networkFetchProfile({ name: actorName }) { + return fetchProfile({ name: actorName }, 'mofos'); } module.exports = { diff --git a/src/scrapers/naughtyamerica.js b/src/scrapers/naughtyamerica.js index fd2c3551..702aacbc 100644 --- a/src/scrapers/naughtyamerica.js +++ b/src/scrapers/naughtyamerica.js @@ -139,7 +139,7 @@ async function fetchScene(url, site) { return scrapeScene(res.body.toString(), url, site); } -async function fetchProfile(actorName) { +async function fetchProfile({ name: actorName }) { const actorSlug = slugify(actorName); const res = await bhttp.get(`https://www.naughtyamerica.com/pornstar/${actorSlug}`); diff --git a/src/scrapers/nubiles.js b/src/scrapers/nubiles.js index e78f128b..226e8965 100644 --- a/src/scrapers/nubiles.js +++ b/src/scrapers/nubiles.js @@ -136,7 +136,7 @@ async function fetchScene(url, site) { return res.ok ? scrapeScene(res.item, url, site) : res.status; } -async function fetchProfile(actorName, { site }) { +async function fetchProfile({ name: actorName }, { site }) { const firstLetter = actorName.charAt(0).toLowerCase(); const origin = slugUrlMap[site.slug] || site.url; diff --git a/src/scrapers/pervcity.js b/src/scrapers/pervcity.js index 46a5431a..c1db0ef3 100644 --- a/src/scrapers/pervcity.js +++ b/src/scrapers/pervcity.js @@ -100,7 +100,7 @@ async function fetchScene(url, entity) { return res.ok ? scrapeScene(res.item, entity) : res.status; } -async function fetchProfile(actorName) { +async function fetchProfile({ name: actorName }) { const url = `https://pervcity.com/models/${slugify(actorName)}.html`; const res = await qu.get(url); diff --git a/src/scrapers/porndoe.js b/src/scrapers/porndoe.js index 13d85cfd..c70e7e37 100644 --- a/src/scrapers/porndoe.js +++ b/src/scrapers/porndoe.js @@ -100,8 +100,6 @@ async function scrapeProfile({ query }, url, include) { profile.releases = await fetchActorReleases({ query }, url); } - console.log(profile); - return profile; } @@ -117,7 +115,7 @@ async function fetchScene(url, channel) { return res.ok ? scrapeScene(res.item, url, channel) : res.status; } -async function fetchProfile(actorName, entity, include) { +async function fetchProfile({ name: actorName }, entity, include) { const url = `http://letsdoeit.com/models/${slugify(actorName)}.en.html`; const res = await qu.get(url); diff --git a/src/scrapers/pornhub.js b/src/scrapers/pornhub.js index a546fc98..c50f746d 100644 --- a/src/scrapers/pornhub.js +++ b/src/scrapers/pornhub.js @@ -51,7 +51,7 @@ async function scrapeProfile(html, _url, actorName) { return profile; } -async function fetchProfile(actorName) { +async function fetchProfile({ name: actorName }) { const actorSlug = actorName.toLowerCase().replace(/\s+/g, '-'); /* Model pages are not reliably associated with actual porn stars diff --git a/src/scrapers/private.js b/src/scrapers/private.js index b6944376..f862002f 100644 --- a/src/scrapers/private.js +++ b/src/scrapers/private.js @@ -175,7 +175,7 @@ async function fetchScene(url, site) { return scrapeScene(res.body.toString(), url, site); } -async function fetchProfile(actorName) { +async function fetchProfile({ name: actorName }) { const actorSearchSlug = slugify(actorName, '+'); const url = `https://www.private.com/search.php?query=${actorSearchSlug}`; const modelRes = await geta(url, '.model h3 a'); diff --git a/src/scrapers/realitykings.js b/src/scrapers/realitykings.js index fe40d895..720ce156 100644 --- a/src/scrapers/realitykings.js +++ b/src/scrapers/realitykings.js @@ -41,8 +41,8 @@ async function fetchLatestWrap(site, page = 1) { return fetchLatest(site, page); } -async function networkFetchProfile(actorName) { - return fetchProfile(actorName, 'realitykings'); +async function networkFetchProfile({ name: actorName }) { + return fetchProfile({ name: actorName }, 'realitykings'); } module.exports = { diff --git a/src/scrapers/score.js b/src/scrapers/score.js index 7ce8527e..7128b9d2 100644 --- a/src/scrapers/score.js +++ b/src/scrapers/score.js @@ -217,7 +217,7 @@ async function fetchScene(url, site) { return null; } -async function fetchProfile(actorName, context, include, page = 1, source = 0) { +async function fetchProfile({ name: actorName }, context, include, page = 1, source = 0) { const letter = actorName.charAt(0).toUpperCase(); const sources = [ @@ -244,11 +244,11 @@ async function fetchProfile(actorName, context, include, page = 1, source = 0) { return null; } - return fetchProfile(actorName, context, include, page + 1, source); + return fetchProfile({ name: actorName }, context, include, page + 1, source); } if (sources[source + 1]) { - return fetchProfile(actorName, context, include, 1, source + 1); + return fetchProfile({ name: actorName }, context, include, 1, source + 1); } return null; diff --git a/src/scrapers/teencoreclub.js b/src/scrapers/teencoreclub.js index 3169ea4a..17cc86e5 100644 --- a/src/scrapers/teencoreclub.js +++ b/src/scrapers/teencoreclub.js @@ -134,7 +134,7 @@ async function fetchScene(url, entity) { return res.status; } -async function fetchProfile(actorName, { entity }, include) { +async function fetchProfile({ name: actorName }, { entity }, include) { const res = await http.get(`https://teencoreclub.com/api/actors?query=${actorName}`); if (res.ok) { diff --git a/src/scrapers/template.js b/src/scrapers/template.js index 0416e6c4..dfb93a50 100644 --- a/src/scrapers/template.js +++ b/src/scrapers/template.js @@ -79,7 +79,7 @@ async function fetchScene(url, channel) { return res.status; } -async function fetchProfile(actorName, entity, include) { +async function fetchProfile({ name: actorName }, entity, include) { const url = `${entity.url}/actors/${slugify(actorName, '_')}`; const res = await qu.get(url); diff --git a/src/scrapers/transangels.js b/src/scrapers/transangels.js index 560dfccd..bd0b01d0 100644 --- a/src/scrapers/transangels.js +++ b/src/scrapers/transangels.js @@ -2,8 +2,8 @@ const { fetchProfile } = require('./mindgeek'); -async function networkFetchProfile(actorName) { - return fetchProfile(actorName, 'transangels'); +async function networkFetchProfile({ name: actorName }) { + return fetchProfile({ name: actorName }, 'transangels'); } module.exports = { diff --git a/src/scrapers/twistys.js b/src/scrapers/twistys.js index e753ac8c..4425d75d 100644 --- a/src/scrapers/twistys.js +++ b/src/scrapers/twistys.js @@ -2,8 +2,8 @@ const { fetchScene, fetchLatest, fetchProfile } = require('./mindgeek'); -async function networkFetchProfile(actorName) { - return fetchProfile(actorName, 'twistys'); +async function networkFetchProfile({ name: actorName }) { + return fetchProfile({ name: actorName }, 'twistys'); } module.exports = { diff --git a/src/scrapers/vixen.js b/src/scrapers/vixen.js index f9d2a736..cc940f11 100644 --- a/src/scrapers/vixen.js +++ b/src/scrapers/vixen.js @@ -233,7 +233,7 @@ async function fetchScene(url, site, baseRelease) { return res.code; } -async function fetchProfile(actorName, { site }, include) { +async function fetchProfile({ name: actorName }, { site }, include) { const origin = site.url; const actorSlug = slugify(actorName); const url = `${origin}/api/${actorSlug}`; diff --git a/src/scrapers/xempire.js b/src/scrapers/xempire.js index e976754f..1995bc37 100644 --- a/src/scrapers/xempire.js +++ b/src/scrapers/xempire.js @@ -23,8 +23,8 @@ function getActorReleasesUrl(actorPath, page = 1) { return `https://www.xempire.com/en/videos/xempire/latest/${page}/All-Categories/0${actorPath}`; } -async function networkFetchProfile(actorName, context, include) { - return fetchProfile(actorName, context, null, getActorReleasesUrl, include); +async function networkFetchProfile(baseActor, context, include) { + return fetchProfile(baseActor, context, null, getActorReleasesUrl, include); } module.exports = { diff --git a/src/utils/qu.js b/src/utils/qu.js index 4ad709a0..340edf7a 100644 --- a/src/utils/qu.js +++ b/src/utils/qu.js @@ -326,7 +326,7 @@ function init(element, window) { const quContextFuncs = Object.entries(quFuncs) // dynamically attach methods with context .reduce((acc, [key, func]) => ({ ...acc, - [key]: (...args) => (args[0].nodeType === undefined // allow for different context + [key]: (...args) => (args[0]?.nodeType === undefined // allow for different context ? func(element, ...args) : func(...args)), }), {});