Selecting included networks with infinite parent depth to facilitate scraper resolve.

This commit is contained in:
DebaucheryLibrarian 2021-02-02 01:31:12 +01:00
parent 46c0b269c3
commit d5cdfb36a9
4 changed files with 101 additions and 80 deletions

View File

@ -5,10 +5,10 @@ const merge = require('object-merge-advanced');
const argv = require('./argv'); const argv = require('./argv');
const include = require('./utils/argv-include')(argv); const include = require('./utils/argv-include')(argv);
const { resolveScraper, resolveLayoutScraper } = require('./scrapers/resolve');
const { fetchReleaseEntities, urlToSiteSlug } = require('./entities'); const { fetchReleaseEntities, urlToSiteSlug } = require('./entities');
const logger = require('./logger')(__filename); const logger = require('./logger')(__filename);
const qu = require('./utils/qu'); const qu = require('./utils/qu');
const scrapers = require('./scrapers/scrapers');
function toBaseReleases(baseReleasesOrUrls, entity = null) { function toBaseReleases(baseReleasesOrUrls, entity = null) {
if (!baseReleasesOrUrls) { if (!baseReleasesOrUrls) {
@ -68,30 +68,6 @@ async function fetchScene(scraper, url, entity, baseRelease, options) {
return null; return null;
} }
function findScraper(entity) {
if (scrapers.releases[entity.slug]) {
return scrapers.releases[entity.slug];
}
if (entity.parent) {
return findScraper(entity.parent);
}
return null;
}
function findLayoutScraper(entity, scraper) {
if (scraper?.[entity.parameters?.layout]) {
return scraper[entity.parameters.layout];
}
if (entity.parent) {
return findLayoutScraper(entity.parent, scraper);
}
return scraper;
}
async function scrapeRelease(baseRelease, entitiesBySlug, type = 'scene') { async function scrapeRelease(baseRelease, entitiesBySlug, type = 'scene') {
const entity = baseRelease.entity || entitiesBySlug[urlToSiteSlug(baseRelease.url)]; const entity = baseRelease.entity || entitiesBySlug[urlToSiteSlug(baseRelease.url)];
@ -107,8 +83,8 @@ async function scrapeRelease(baseRelease, entitiesBySlug, type = 'scene') {
}; };
} }
const scraper = findScraper(entity); const scraper = resolveScraper(entity);
const layoutScraper = findLayoutScraper(entity, scraper); const layoutScraper = resolveLayoutScraper(entity, scraper);
if (!layoutScraper) { if (!layoutScraper) {
logger.warn(`Could not find scraper for ${baseRelease.url}`); logger.warn(`Could not find scraper for ${baseRelease.url}`);

View File

@ -128,7 +128,7 @@ async function fetchIncludedEntities() {
AND entities.type = 'network') AND entities.type = 'network')
OR (entities.slug = ANY(:excludedChannels) OR (entities.slug = ANY(:excludedChannels)
AND entities.type = 'channel')) AND entities.type = 'channel'))
) ), included_per_network AS (
/* select recursive channels as children of networks */ /* select recursive channels as children of networks */
SELECT SELECT
parents.*, parents.*,
@ -146,10 +146,28 @@ async function fetchIncludedEntities() {
WHERE WHERE
included_entities.type = 'channel' included_entities.type = 'channel'
GROUP BY GROUP BY
parents.id, grandparents.id; parents.id, grandparents.id
), entity_tree as (
/* get recursive parents of networks (necessary for scraper resolve) */
SELECT to_jsonb(included_per_network) as entity,
parent_id,
array['parent'] as parent_path,
0 as depth
FROM included_per_network
UNION ALL
SELECT jsonb_set(entity_tree.entity, entity_tree.parent_path, to_jsonb(entities)),
entities.parent_id,
entity_tree.parent_path || array['parent'],
depth + 1
FROM entity_tree
JOIN entities ON entity_tree.parent_id = entities.id
)
SELECT entity FROM entity_tree WHERE parent_id is null;
`, include); `, include);
const curatedNetworks = rawNetworks.rows.map(entity => curateEntity(entity, true)); const curatedNetworks = rawNetworks.rows.map(({ entity }) => curateEntity(entity, true));
return curatedNetworks; return curatedNetworks;
} }
@ -164,7 +182,7 @@ async function fetchReleaseEntities(baseReleases) {
)); ));
const entities = await knex.raw(` const entities = await knex.raw(`
WITH RECURSIVE tree as ( WITH RECURSIVE entity_tree as (
SELECT to_jsonb(entities) as entity, SELECT to_jsonb(entities) as entity,
parent_id, parent_id,
array['parent'] as parent_path, array['parent'] as parent_path,
@ -174,14 +192,14 @@ async function fetchReleaseEntities(baseReleases) {
UNION ALL UNION ALL
SELECT jsonb_set(tree.entity, tree.parent_path, to_jsonb(entities)), SELECT jsonb_set(entity_tree.entity, entity_tree.parent_path, to_jsonb(entities)),
entities.parent_id, entities.parent_id,
tree.parent_path || array['parent'], entity_tree.parent_path || array['parent'],
depth + 1 depth + 1
FROM tree FROM entity_tree
JOIN entities ON tree.parent_id = entities.id JOIN entities ON entity_tree.parent_id = entities.id
) )
SELECT entity FROM tree WHERE parent_id is null SELECT entity FROM entity_tree WHERE parent_id is null
ORDER BY entity->'type' ASC; ORDER BY entity->'type' ASC;
`, { entitySlugs }); `, { entitySlugs });

32
src/scrapers/resolve.js Normal file
View File

@ -0,0 +1,32 @@
'use strict';
const scrapers = require('./scrapers');
function resolveScraper(entity) {
if (scrapers.releases[entity.slug]) {
return scrapers.releases[entity.slug];
}
if (entity.parent) {
return resolveScraper(entity.parent);
}
return null;
}
function resolveLayoutScraper(entity, scraper) {
if (scraper?.[entity.parameters?.layout]) {
return scraper[entity.parameters.layout];
}
if (entity.parent) {
return resolveLayoutScraper(entity.parent, scraper);
}
return scraper;
}
module.exports = {
resolveScraper,
resolveLayoutScraper,
};

View File

@ -9,7 +9,7 @@ const logger = require('./logger')(__filename);
const knex = require('./knex'); const knex = require('./knex');
const { curateRelease } = require('./releases'); const { curateRelease } = require('./releases');
const include = require('./utils/argv-include')(argv); const include = require('./utils/argv-include')(argv);
const scrapers = require('./scrapers/scrapers'); const { resolveScraper, resolveLayoutScraper } = require('./scrapers/resolve');
const { fetchIncludedEntities } = require('./entities'); const { fetchIncludedEntities } = require('./entities');
const emptyReleases = { uniqueReleases: [], duplicateReleases: [] }; const emptyReleases = { uniqueReleases: [], duplicateReleases: [] };
@ -205,13 +205,8 @@ async function scrapeChannelReleases(scraper, channelEntity, preData) {
} }
async function scrapeChannel(channelEntity, accNetworkReleases) { async function scrapeChannel(channelEntity, accNetworkReleases) {
console.log(channelEntity); const scraper = resolveScraper(channelEntity);
const layoutScraper = resolveLayoutScraper(channelEntity, scraper);
const scraper = scrapers.releases[channelEntity.slug]
|| scrapers.releases[channelEntity.parent?.slug]
|| scrapers.releases[channelEntity.parent?.parent?.slug];
const layoutScraper = scraper?.[channelEntity.parameters?.layout] || scraper?.[channelEntity.parent?.parameters?.layout] || scraper?.[channelEntity.parent?.parent?.parameters?.layout] || scraper;
if (!layoutScraper) { if (!layoutScraper) {
logger.warn(`No scraper found for '${channelEntity.name}' (${channelEntity.parent?.name})`); logger.warn(`No scraper found for '${channelEntity.name}' (${channelEntity.parent?.name})`);