Selecting included networks with infinite parent depth to facilitate scraper resolve.
This commit is contained in:
parent
46c0b269c3
commit
d5cdfb36a9
30
src/deep.js
30
src/deep.js
|
@ -5,10 +5,10 @@ const merge = require('object-merge-advanced');
|
|||
|
||||
const argv = require('./argv');
|
||||
const include = require('./utils/argv-include')(argv);
|
||||
const { resolveScraper, resolveLayoutScraper } = require('./scrapers/resolve');
|
||||
const { fetchReleaseEntities, urlToSiteSlug } = require('./entities');
|
||||
const logger = require('./logger')(__filename);
|
||||
const qu = require('./utils/qu');
|
||||
const scrapers = require('./scrapers/scrapers');
|
||||
|
||||
function toBaseReleases(baseReleasesOrUrls, entity = null) {
|
||||
if (!baseReleasesOrUrls) {
|
||||
|
@ -68,30 +68,6 @@ async function fetchScene(scraper, url, entity, baseRelease, options) {
|
|||
return null;
|
||||
}
|
||||
|
||||
function findScraper(entity) {
|
||||
if (scrapers.releases[entity.slug]) {
|
||||
return scrapers.releases[entity.slug];
|
||||
}
|
||||
|
||||
if (entity.parent) {
|
||||
return findScraper(entity.parent);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function findLayoutScraper(entity, scraper) {
|
||||
if (scraper?.[entity.parameters?.layout]) {
|
||||
return scraper[entity.parameters.layout];
|
||||
}
|
||||
|
||||
if (entity.parent) {
|
||||
return findLayoutScraper(entity.parent, scraper);
|
||||
}
|
||||
|
||||
return scraper;
|
||||
}
|
||||
|
||||
async function scrapeRelease(baseRelease, entitiesBySlug, type = 'scene') {
|
||||
const entity = baseRelease.entity || entitiesBySlug[urlToSiteSlug(baseRelease.url)];
|
||||
|
||||
|
@ -107,8 +83,8 @@ async function scrapeRelease(baseRelease, entitiesBySlug, type = 'scene') {
|
|||
};
|
||||
}
|
||||
|
||||
const scraper = findScraper(entity);
|
||||
const layoutScraper = findLayoutScraper(entity, scraper);
|
||||
const scraper = resolveScraper(entity);
|
||||
const layoutScraper = resolveLayoutScraper(entity, scraper);
|
||||
|
||||
if (!layoutScraper) {
|
||||
logger.warn(`Could not find scraper for ${baseRelease.url}`);
|
||||
|
|
|
@ -128,7 +128,7 @@ async function fetchIncludedEntities() {
|
|||
AND entities.type = 'network')
|
||||
OR (entities.slug = ANY(:excludedChannels)
|
||||
AND entities.type = 'channel'))
|
||||
)
|
||||
), included_per_network AS (
|
||||
/* select recursive channels as children of networks */
|
||||
SELECT
|
||||
parents.*,
|
||||
|
@ -146,10 +146,28 @@ async function fetchIncludedEntities() {
|
|||
WHERE
|
||||
included_entities.type = 'channel'
|
||||
GROUP BY
|
||||
parents.id, grandparents.id;
|
||||
parents.id, grandparents.id
|
||||
), entity_tree as (
|
||||
/* get recursive parents of networks (necessary for scraper resolve) */
|
||||
SELECT to_jsonb(included_per_network) as entity,
|
||||
parent_id,
|
||||
array['parent'] as parent_path,
|
||||
0 as depth
|
||||
FROM included_per_network
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT jsonb_set(entity_tree.entity, entity_tree.parent_path, to_jsonb(entities)),
|
||||
entities.parent_id,
|
||||
entity_tree.parent_path || array['parent'],
|
||||
depth + 1
|
||||
FROM entity_tree
|
||||
JOIN entities ON entity_tree.parent_id = entities.id
|
||||
)
|
||||
SELECT entity FROM entity_tree WHERE parent_id is null;
|
||||
`, include);
|
||||
|
||||
const curatedNetworks = rawNetworks.rows.map(entity => curateEntity(entity, true));
|
||||
const curatedNetworks = rawNetworks.rows.map(({ entity }) => curateEntity(entity, true));
|
||||
|
||||
return curatedNetworks;
|
||||
}
|
||||
|
@ -164,7 +182,7 @@ async function fetchReleaseEntities(baseReleases) {
|
|||
));
|
||||
|
||||
const entities = await knex.raw(`
|
||||
WITH RECURSIVE tree as (
|
||||
WITH RECURSIVE entity_tree as (
|
||||
SELECT to_jsonb(entities) as entity,
|
||||
parent_id,
|
||||
array['parent'] as parent_path,
|
||||
|
@ -174,14 +192,14 @@ async function fetchReleaseEntities(baseReleases) {
|
|||
|
||||
UNION ALL
|
||||
|
||||
SELECT jsonb_set(tree.entity, tree.parent_path, to_jsonb(entities)),
|
||||
SELECT jsonb_set(entity_tree.entity, entity_tree.parent_path, to_jsonb(entities)),
|
||||
entities.parent_id,
|
||||
tree.parent_path || array['parent'],
|
||||
entity_tree.parent_path || array['parent'],
|
||||
depth + 1
|
||||
FROM tree
|
||||
JOIN entities ON tree.parent_id = entities.id
|
||||
FROM entity_tree
|
||||
JOIN entities ON entity_tree.parent_id = entities.id
|
||||
)
|
||||
SELECT entity FROM tree WHERE parent_id is null
|
||||
SELECT entity FROM entity_tree WHERE parent_id is null
|
||||
ORDER BY entity->'type' ASC;
|
||||
`, { entitySlugs });
|
||||
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
'use strict';
|
||||
|
||||
const scrapers = require('./scrapers');
|
||||
|
||||
function resolveScraper(entity) {
|
||||
if (scrapers.releases[entity.slug]) {
|
||||
return scrapers.releases[entity.slug];
|
||||
}
|
||||
|
||||
if (entity.parent) {
|
||||
return resolveScraper(entity.parent);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function resolveLayoutScraper(entity, scraper) {
|
||||
if (scraper?.[entity.parameters?.layout]) {
|
||||
return scraper[entity.parameters.layout];
|
||||
}
|
||||
|
||||
if (entity.parent) {
|
||||
return resolveLayoutScraper(entity.parent, scraper);
|
||||
}
|
||||
|
||||
return scraper;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
resolveScraper,
|
||||
resolveLayoutScraper,
|
||||
};
|
|
@ -9,7 +9,7 @@ const logger = require('./logger')(__filename);
|
|||
const knex = require('./knex');
|
||||
const { curateRelease } = require('./releases');
|
||||
const include = require('./utils/argv-include')(argv);
|
||||
const scrapers = require('./scrapers/scrapers');
|
||||
const { resolveScraper, resolveLayoutScraper } = require('./scrapers/resolve');
|
||||
const { fetchIncludedEntities } = require('./entities');
|
||||
|
||||
const emptyReleases = { uniqueReleases: [], duplicateReleases: [] };
|
||||
|
@ -205,13 +205,8 @@ async function scrapeChannelReleases(scraper, channelEntity, preData) {
|
|||
}
|
||||
|
||||
async function scrapeChannel(channelEntity, accNetworkReleases) {
|
||||
console.log(channelEntity);
|
||||
|
||||
const scraper = scrapers.releases[channelEntity.slug]
|
||||
|| scrapers.releases[channelEntity.parent?.slug]
|
||||
|| scrapers.releases[channelEntity.parent?.parent?.slug];
|
||||
|
||||
const layoutScraper = scraper?.[channelEntity.parameters?.layout] || scraper?.[channelEntity.parent?.parameters?.layout] || scraper?.[channelEntity.parent?.parent?.parameters?.layout] || scraper;
|
||||
const scraper = resolveScraper(channelEntity);
|
||||
const layoutScraper = resolveLayoutScraper(channelEntity, scraper);
|
||||
|
||||
if (!layoutScraper) {
|
||||
logger.warn(`No scraper found for '${channelEntity.name}' (${channelEntity.parent?.name})`);
|
||||
|
|
Loading…
Reference in New Issue