forked from DebaucheryLibrarian/traxxx
Including all children of included networks, separated included children into dedicated property.
This commit is contained in:
parent
1b407254a7
commit
4e559f63e3
|
@ -156,7 +156,8 @@ exports.up = knex => Promise.resolve()
|
|||
|
||||
table.integer('parent_id', 12)
|
||||
.references('id')
|
||||
.inTable('entities');
|
||||
.inTable('entities')
|
||||
.index();
|
||||
|
||||
table.text('name');
|
||||
table.text('slug', 32);
|
||||
|
|
|
@ -39,14 +39,11 @@ function curateEntity(entity, includeParameters = false) {
|
|||
}, includeParameters));
|
||||
}
|
||||
|
||||
if (entity.siblings) {
|
||||
curatedEntity.parent = {
|
||||
...curatedEntity.parent,
|
||||
children: entity.siblings.map(sibling => curateEntity({
|
||||
...sibling,
|
||||
parent: curatedEntity.parent,
|
||||
}, includeParameters)),
|
||||
};
|
||||
if (entity.included_children) {
|
||||
curatedEntity.includedChildren = entity.included_children.map(child => curateEntity({
|
||||
...child,
|
||||
parent: curatedEntity.id ? curatedEntity : null,
|
||||
}, includeParameters));
|
||||
}
|
||||
|
||||
if (entity.tags) {
|
||||
|
@ -119,7 +116,11 @@ async function fetchIncludedEntities() {
|
|||
)
|
||||
/* select recursive channels as children of networks */
|
||||
SELECT
|
||||
parents.*, json_agg(included_entities ORDER BY included_entities.id) as children
|
||||
parents.*,
|
||||
json_agg(included_entities ORDER BY included_entities.id) included_children,
|
||||
(SELECT json_agg(children)
|
||||
FROM entities AS children
|
||||
WHERE children.parent_id = parents.id) children
|
||||
FROM
|
||||
included_entities
|
||||
LEFT JOIN
|
||||
|
@ -130,9 +131,6 @@ async function fetchIncludedEntities() {
|
|||
parents.id;
|
||||
`, include);
|
||||
|
||||
// console.log(rawNetworks.rows[0]);
|
||||
// console.log(rawNetworks.toString());
|
||||
|
||||
const curatedNetworks = rawNetworks.rows.map(entity => curateEntity(entity, true));
|
||||
|
||||
return curatedNetworks;
|
||||
|
|
|
@ -6,8 +6,6 @@ const slugify = require('../utils/slugify');
|
|||
function matchChannel(release, channel) {
|
||||
const series = channel.children || channel.parent.children;
|
||||
|
||||
// console.log(series?.length, release.url, channel.name);
|
||||
|
||||
const serieNames = series.reduce((acc, serie) => ({
|
||||
...acc,
|
||||
[serie.name]: serie,
|
||||
|
@ -19,7 +17,8 @@ function matchChannel(release, channel) {
|
|||
const serieName = release.title.match(new RegExp(Object.keys(serieNames).join('|'), 'i'))?.[0];
|
||||
const serie = serieName && serieNames[slugify(serieName, '')];
|
||||
|
||||
console.log(release.title, serieName);
|
||||
console.log(release.title);
|
||||
console.log(serieName);
|
||||
|
||||
if (serie) {
|
||||
return {
|
||||
|
|
|
@ -230,7 +230,7 @@ async function scrapeChannel(channelEntity, accNetworkReleases) {
|
|||
|
||||
async function scrapeNetworkSequential(networkEntity) {
|
||||
const releases = await Promise.reduce(
|
||||
networkEntity.children,
|
||||
networkEntity.includedChildren,
|
||||
async (chain, channelEntity) => {
|
||||
const accNetworkReleases = await chain;
|
||||
const { uniqueReleases, duplicateReleases } = await scrapeChannel(channelEntity, accNetworkReleases);
|
||||
|
@ -248,7 +248,7 @@ async function scrapeNetworkSequential(networkEntity) {
|
|||
|
||||
async function scrapeNetworkParallel(networkEntity) {
|
||||
return Promise.map(
|
||||
networkEntity.children,
|
||||
networkEntity.includedChildren,
|
||||
async (channelEntity) => {
|
||||
const { uniqueReleases } = await scrapeChannel(channelEntity, networkEntity);
|
||||
|
||||
|
|
Loading…
Reference in New Issue