From 278b74e78c498f9e7071e05d8a73fc72f24e93b2 Mon Sep 17 00:00:00 2001 From: DebaucheryLibrarian Date: Sun, 23 Aug 2020 02:43:10 +0200 Subject: [PATCH] Providing duplicate releases in predata. Using duplicates for filtering scenes without channel in Hush scraper. --- src/releases.js | 1 + src/scrapers/hush.js | 10 ++++------ src/updates.js | 45 ++++++++++++++++++++++++++++---------------- 3 files changed, 34 insertions(+), 22 deletions(-) diff --git a/src/releases.js b/src/releases.js index 471328f2..6d899326 100644 --- a/src/releases.js +++ b/src/releases.js @@ -122,6 +122,7 @@ async function searchReleases(query, limit = 100) { } module.exports = { + curateRelease, fetchRelease, fetchReleases, searchReleases, diff --git a/src/scrapers/hush.js b/src/scrapers/hush.js index 41f71c89..cdb8c750 100644 --- a/src/scrapers/hush.js +++ b/src/scrapers/hush.js @@ -107,8 +107,6 @@ function scrapeAllT1(scenes, site, accNetworkReleases) { // release.entryId = q('.img-div img', 'id')?.match(/set-target-(\d+)/)[1]; release.entryId = deriveEntryId(release); - console.log(site.name, accNetworkReleases.map(accRelease => accRelease.entryId), release.entryId, accNetworkReleases?.map(accRelease => accRelease.entryId).includes(release.entryId)); - if (site.parameters?.accFilter && accNetworkReleases?.map(accRelease => accRelease.entryId).includes(release.entryId)) { // filter out releases that were already scraped from a categorized site, requires sequential site scraping return null; @@ -360,7 +358,7 @@ function scrapeProfileTour({ el, qu }, site) { return profile; } -async function fetchLatest(site, page = 1, include, { accNetworkReleases }) { +async function fetchLatest(site, page = 1, include, { uniqueReleases, duplicateReleases }) { const url = (site.parameters?.latest && util.format(site.parameters.latest, page)) || (site.parameters?.t1 && `${site.url}/t1/categories/movies_${page}_d.html`) || `${site.url}/categories/movies_${page}_d.html`; @@ -368,10 +366,10 @@ async function fetchLatest(site, page = 1, include, { accNetworkReleases }) { const res = await geta(url, '.modelfeature, .item-video, .updateItem'); if (!res.ok) return res.status; - if (site.parameters?.t1) return scrapeAllT1(res.items, site, accNetworkReleases); - if (site.parameters?.tour) return scrapeAllTour(res.items, site, accNetworkReleases); + if (site.parameters?.t1) return scrapeAllT1(res.items, site, [...uniqueReleases, ...duplicateReleases]); + if (site.parameters?.tour) return scrapeAllTour(res.items, site); - return scrapeAll(res.items, site, accNetworkReleases); + return scrapeAll(res.items, site, uniqueReleases); } async function fetchScene(url, site, baseRelease, include, beforeFetchLatest) { diff --git a/src/updates.js b/src/updates.js index e5333aa0..9127c5d6 100644 --- a/src/updates.js +++ b/src/updates.js @@ -6,6 +6,7 @@ const moment = require('moment'); const argv = require('./argv'); const logger = require('./logger')(__filename); const knex = require('./knex'); +const { curateRelease } = require('./releases'); const include = require('./utils/argv-include')(argv); const scrapers = require('./scrapers/scrapers'); const { fetchIncludedEntities } = require('./entities'); @@ -16,16 +17,20 @@ async function filterUniqueReleases(latestReleases, accReleases) { const latestReleaseIdentifiers = latestReleases .map(release => [release.entity.id, release.entryId]); - const duplicateReleases = await knex('releases') + const duplicateReleaseEntries = await knex('releases') + .select(knex.raw('releases.*, row_to_json(entities) as entity')) + .leftJoin('entities', 'entities.id', 'releases.entity_id') .whereIn(['entity_id', 'entry_id'], latestReleaseIdentifiers); + const duplicateReleases = duplicateReleaseEntries.map(release => curateRelease(release)); + // add entry IDs of accumulated releases to prevent an infinite scrape loop // when one page contains the same release as the previous const duplicateReleasesSiteIdAndEntryIds = duplicateReleases .concat(accReleases) .reduce((acc, release) => { - const entityId = release.entity_id || release.entity.id; - const entryId = release.entry_id || release.entryId; + const entityId = release.entityId || release.entity.id; + const entryId = release.entryId || release.entryId; if (!acc[entityId]) acc[entityId] = {}; acc[entityId][entryId] = true; @@ -163,11 +168,12 @@ async function scrapeChannelReleases(scraper, channelEntity, preData) { scrapeMovies(scraper, channelEntity, preData), ]); - console.log(latestReleases); + logger.info(`Fetching ${latestReleases.uniqueReleases.length} latest and ${upcomingReleases.uniqueReleases.length} upcoming updates for '${channelEntity.name}' (${channelEntity.parent?.name})`); - logger.info(`Fetching ${latestReleases.length} latest and ${upcomingReleases.length} upcoming updates for '${channelEntity.name}' (${channelEntity.parent?.name})`); - - return [...latestReleases.uniqueReleases, ...upcomingReleases.uniqueReleases]; + return { + uniqueReleases: [...latestReleases.uniqueReleases, ...upcomingReleases.uniqueReleases], + duplicateReleases: [...latestReleases.duplicateReleases, ...upcomingReleases.duplicateReleases], + }; } async function scrapeChannel(channelEntity, accNetworkReleases) { @@ -183,12 +189,10 @@ async function scrapeChannel(channelEntity, accNetworkReleases) { try { const beforeFetchLatest = await scraper.beforeFetchLatest?.(channelEntity); - const channelEntityReleases = await scrapeChannelReleases(scraper, channelEntity, { - accNetworkReleases, + return await scrapeChannelReleases(scraper, channelEntity, { + ...accNetworkReleases, beforeFetchLatest, }); - - return channelEntityReleases.map(release => ({ ...release, channelEntity })); } catch (error) { logger.error(`Failed to scrape releases from ${channelEntity.name} using ${scraper.slug}: ${error.message}`); @@ -197,22 +201,31 @@ async function scrapeChannel(channelEntity, accNetworkReleases) { } async function scrapeNetworkSequential(networkEntity) { - return Promise.reduce( + const releases = await Promise.reduce( networkEntity.children, async (chain, channelEntity) => { const accNetworkReleases = await chain; - const channelReleases = await scrapeChannel(channelEntity, accNetworkReleases); + const { uniqueReleases, duplicateReleases } = await scrapeChannel(channelEntity, accNetworkReleases); - return accNetworkReleases.concat(channelReleases); + return { + uniqueReleases: accNetworkReleases.uniqueReleases.concat(uniqueReleases), + duplicateReleases: accNetworkReleases.duplicateReleases.concat(duplicateReleases), + }; }, - Promise.resolve([]), + Promise.resolve(emptyReleases), ); + + return releases.uniqueReleases; } async function scrapeNetworkParallel(networkEntity) { return Promise.map( networkEntity.children, - async channelEntity => scrapeChannel(channelEntity, networkEntity), + async (channelEntity) => { + const { uniqueReleases } = await scrapeChannel(channelEntity, networkEntity); + + return uniqueReleases; + }, { concurrency: 3 }, ); }