diff --git a/src/scrapers/dogfart.js b/src/scrapers/dogfart.js index 0362182fd..1b21df730 100644 --- a/src/scrapers/dogfart.js +++ b/src/scrapers/dogfart.js @@ -106,8 +106,9 @@ async function fetchProfile(baseActor, entity) { const res = await qu.getAll(url, '.recent-updates'); if (res.ok) { - const scenes = scrapeLatest(res.items, entity, false); + const { scenes } = scrapeLatest(res.items, entity, false); + // no bio available return { scenes }; } diff --git a/src/scrapers/kellymadison.js b/src/scrapers/kellymadison.js index ccd0f8861..5d2b9672e 100644 --- a/src/scrapers/kellymadison.js +++ b/src/scrapers/kellymadison.js @@ -16,7 +16,7 @@ const siteMapByKey = { const siteMapBySlug = Object.entries(siteMapByKey).reduce((acc, [key, value]) => ({ ...acc, [value]: key }), {}); function scrapeLatest(scenes, site) { - return scenes.map(({ query }) => { + return scenes.reduce((acc, { query }) => { const release = {}; release.shootId = query.q('.card-meta .text-right, .row .text-right, .card-footer-item:last-child', true); @@ -24,11 +24,6 @@ function scrapeLatest(scenes, site) { const siteId = release.shootId.match(/\d?\w{2}/)[0]; const siteSlug = siteMapByKey[siteId]; - if (site.slug !== siteSlug) { - // using generic network overview, scene is not from the site we want - return null; - } - const { pathname } = new URL(query.url('h5 a, .ep-title a, .title a')); [release.entryId] = pathname.match(/\d+$/); release.url = `${site.url}${pathname}`; @@ -52,8 +47,16 @@ function scrapeLatest(scenes, site) { }; } - return release; - }).filter((scene) => scene); + if (site.slug !== siteSlug) { + // using generic network overview, scene is not from the site we want + return { ...acc, unextracted: [...acc.unextracted, release] }; + } + + return { ...acc, scenes: [...acc.scenes, release] }; + }, { + scenes: [], + unextracted: [], + }); } async function scrapeScene({ query, html }, url, baseRelease, channel, session) {