Compare commits
No commits in common. "dfeb0c08cf3f7d7d1dcf0707b1f29aba0fd819df" and "2539e88f47675096a9782b63bf074fe15b46796a" have entirely different histories.
dfeb0c08cf
...
2539e88f47
|
@ -272,7 +272,7 @@ import Pagination from '../pagination/pagination.vue';
|
||||||
|
|
||||||
const toggleValues = [true, null, false];
|
const toggleValues = [true, null, false];
|
||||||
const boobSizes = 'ABCDEFGHIJKZ'.split('');
|
const boobSizes = 'ABCDEFGHIJKZ'.split('');
|
||||||
const topCountries = ['AU', 'BR', 'CZ', 'DE', 'JP', 'RU', 'GB', 'US'];
|
const topCountries = ['AU', 'BR', 'DE', 'JP', 'RU', 'GB', 'US'];
|
||||||
|
|
||||||
function updateFilters() {
|
function updateFilters() {
|
||||||
this.$router.push({
|
this.$router.push({
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
{
|
{
|
||||||
"name": "traxxx",
|
"name": "traxxx",
|
||||||
"version": "1.202.0",
|
"version": "1.201.6",
|
||||||
"lockfileVersion": 2,
|
"lockfileVersion": 2,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "traxxx",
|
"name": "traxxx",
|
||||||
"version": "1.202.0",
|
"version": "1.201.6",
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@casl/ability": "^5.2.2",
|
"@casl/ability": "^5.2.2",
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "traxxx",
|
"name": "traxxx",
|
||||||
"version": "1.202.0",
|
"version": "1.201.6",
|
||||||
"description": "All the latest porn releases in one place",
|
"description": "All the latest porn releases in one place",
|
||||||
"main": "src/app.js",
|
"main": "src/app.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
|
Before Width: | Height: | Size: 1.5 KiB |
Before Width: | Height: | Size: 1.5 KiB |
Before Width: | Height: | Size: 1.5 KiB |
Before Width: | Height: | Size: 2.3 KiB |
Before Width: | Height: | Size: 2.3 KiB |
Before Width: | Height: | Size: 2.3 KiB |
Before Width: | Height: | Size: 2.4 KiB |
Before Width: | Height: | Size: 2.4 KiB |
Before Width: | Height: | Size: 12 KiB |
Before Width: | Height: | Size: 12 KiB |
Before Width: | Height: | Size: 2.3 KiB |
Before Width: | Height: | Size: 2.3 KiB |
Before Width: | Height: | Size: 2.3 KiB |
Before Width: | Height: | Size: 8.9 KiB |
Before Width: | Height: | Size: 8.9 KiB |
Before Width: | Height: | Size: 11 KiB |
Before Width: | Height: | Size: 11 KiB |
Before Width: | Height: | Size: 21 KiB |
Before Width: | Height: | Size: 24 KiB |
Before Width: | Height: | Size: 24 KiB |
Before Width: | Height: | Size: 24 KiB |
Before Width: | Height: | Size: 24 KiB |
Before Width: | Height: | Size: 23 KiB |
Before Width: | Height: | Size: 23 KiB |
|
@ -52,12 +52,6 @@ const parentNetworks = [
|
||||||
},
|
},
|
||||||
parent: 'gamma',
|
parent: 'gamma',
|
||||||
},
|
},
|
||||||
{
|
|
||||||
slug: 'radical',
|
|
||||||
alias: ['kb productions'],
|
|
||||||
name: 'Radical Entertainment',
|
|
||||||
url: 'https://radicalcash.com',
|
|
||||||
},
|
|
||||||
];
|
];
|
||||||
|
|
||||||
const networks = [
|
const networks = [
|
||||||
|
@ -556,7 +550,6 @@ const networks = [
|
||||||
slug: 'topwebmodels',
|
slug: 'topwebmodels',
|
||||||
name: 'Top Web Models',
|
name: 'Top Web Models',
|
||||||
url: 'https://tour.topwebmodels.com',
|
url: 'https://tour.topwebmodels.com',
|
||||||
parent: 'radical',
|
|
||||||
parameters: {
|
parameters: {
|
||||||
apiKey: '5b637cd8c4bc59cd13686f1c38dcb780',
|
apiKey: '5b637cd8c4bc59cd13686f1c38dcb780',
|
||||||
},
|
},
|
||||||
|
@ -631,12 +624,12 @@ const networks = [
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
exports.seed = (knex) => Promise.resolve()
|
exports.seed = knex => Promise.resolve()
|
||||||
.then(async () => {
|
.then(async () => {
|
||||||
const grandParentNetworkEntries = await upsert('entities', grandParentNetworks.map((network) => ({ ...network, type: 'network' })), ['slug', 'type'], knex);
|
const grandParentNetworkEntries = await upsert('entities', grandParentNetworks.map(network => ({ ...network, type: 'network' })), ['slug', 'type'], knex);
|
||||||
const grandParentNetworksBySlug = [].concat(grandParentNetworkEntries.inserted, grandParentNetworkEntries.updated).reduce((acc, network) => ({ ...acc, [network.slug]: network.id }), {});
|
const grandParentNetworksBySlug = [].concat(grandParentNetworkEntries.inserted, grandParentNetworkEntries.updated).reduce((acc, network) => ({ ...acc, [network.slug]: network.id }), {});
|
||||||
|
|
||||||
const parentNetworksWithGrandParent = parentNetworks.map((network) => ({
|
const parentNetworksWithGrandParent = parentNetworks.map(network => ({
|
||||||
slug: network.slug,
|
slug: network.slug,
|
||||||
name: network.name,
|
name: network.name,
|
||||||
type: network.type || 'network',
|
type: network.type || 'network',
|
||||||
|
@ -650,7 +643,7 @@ exports.seed = (knex) => Promise.resolve()
|
||||||
const parentNetworkEntries = await upsert('entities', parentNetworksWithGrandParent, ['slug', 'type'], knex);
|
const parentNetworkEntries = await upsert('entities', parentNetworksWithGrandParent, ['slug', 'type'], knex);
|
||||||
const parentNetworksBySlug = [].concat(parentNetworkEntries.inserted, parentNetworkEntries.updated).reduce((acc, network) => ({ ...acc, [network.slug]: network.id }), {});
|
const parentNetworksBySlug = [].concat(parentNetworkEntries.inserted, parentNetworkEntries.updated).reduce((acc, network) => ({ ...acc, [network.slug]: network.id }), {});
|
||||||
|
|
||||||
const networksWithParent = networks.map((network) => ({
|
const networksWithParent = networks.map(network => ({
|
||||||
slug: network.slug,
|
slug: network.slug,
|
||||||
name: network.name,
|
name: network.name,
|
||||||
type: network.type || 'network',
|
type: network.type || 'network',
|
||||||
|
@ -672,14 +665,14 @@ exports.seed = (knex) => Promise.resolve()
|
||||||
networkEntries.updated,
|
networkEntries.updated,
|
||||||
).reduce((acc, network) => ({ ...acc, [network.slug]: network.id }), {});
|
).reduce((acc, network) => ({ ...acc, [network.slug]: network.id }), {});
|
||||||
|
|
||||||
const tagSlugs = networks.map((network) => network.tags).flat().filter(Boolean);
|
const tagSlugs = networks.map(network => network.tags).flat().filter(Boolean);
|
||||||
|
|
||||||
const tagEntries = await knex('tags').whereIn('slug', tagSlugs);
|
const tagEntries = await knex('tags').whereIn('slug', tagSlugs);
|
||||||
const tagIdsBySlug = tagEntries.reduce((acc, tag) => ({ ...acc, [tag.slug]: tag.id }), {});
|
const tagIdsBySlug = tagEntries.reduce((acc, tag) => ({ ...acc, [tag.slug]: tag.id }), {});
|
||||||
|
|
||||||
const tagAssociations = networks
|
const tagAssociations = networks
|
||||||
.map((network) => (network.tags
|
.map(network => (network.tags
|
||||||
? network.tags.map((tagSlug) => ({
|
? network.tags.map(tagSlug => ({
|
||||||
entity_id: networkIdsBySlug[network.slug],
|
entity_id: networkIdsBySlug[network.slug],
|
||||||
tag_id: tagIdsBySlug[tagSlug],
|
tag_id: tagIdsBySlug[tagSlug],
|
||||||
inherit: true,
|
inherit: true,
|
||||||
|
|
|
@ -7235,14 +7235,6 @@ const sites = [
|
||||||
parent: 'puretaboo',
|
parent: 'puretaboo',
|
||||||
},
|
},
|
||||||
*/
|
*/
|
||||||
// RADICAL ENTERTAINMENT
|
|
||||||
{
|
|
||||||
name: 'PurgatoryX',
|
|
||||||
slug: 'purgatoryx',
|
|
||||||
url: 'https://tour.purgatoryx.com',
|
|
||||||
independent: true,
|
|
||||||
parent: 'radical',
|
|
||||||
},
|
|
||||||
// REALITY KINGS
|
// REALITY KINGS
|
||||||
{
|
{
|
||||||
name: 'Look At Her Now',
|
name: 'Look At Her Now',
|
||||||
|
@ -10714,7 +10706,7 @@ const sites = [
|
||||||
];
|
];
|
||||||
|
|
||||||
/* eslint-disable max-len */
|
/* eslint-disable max-len */
|
||||||
exports.seed = (knex) => Promise.resolve()
|
exports.seed = knex => Promise.resolve()
|
||||||
.then(async () => {
|
.then(async () => {
|
||||||
const networks = await knex('entities')
|
const networks = await knex('entities')
|
||||||
.where('type', 'network')
|
.where('type', 'network')
|
||||||
|
@ -10725,7 +10717,7 @@ exports.seed = (knex) => Promise.resolve()
|
||||||
const tags = await knex('tags').select('*').whereNull('alias_for');
|
const tags = await knex('tags').select('*').whereNull('alias_for');
|
||||||
const tagsMap = tags.reduce((acc, { id, slug }) => ({ ...acc, [slug]: id }), {});
|
const tagsMap = tags.reduce((acc, { id, slug }) => ({ ...acc, [slug]: id }), {});
|
||||||
|
|
||||||
const sitesWithNetworks = sites.map((site) => ({
|
const sitesWithNetworks = sites.map(site => ({
|
||||||
slug: site.slug,
|
slug: site.slug,
|
||||||
name: site.name,
|
name: site.name,
|
||||||
type: site.type || 'channel',
|
type: site.type || 'channel',
|
||||||
|
@ -10743,8 +10735,8 @@ exports.seed = (knex) => Promise.resolve()
|
||||||
const { inserted, updated } = await upsert('entities', sitesWithNetworks, ['slug', 'type'], knex);
|
const { inserted, updated } = await upsert('entities', sitesWithNetworks, ['slug', 'type'], knex);
|
||||||
const sitesMap = [].concat(inserted, updated).reduce((acc, { id, slug }) => ({ ...acc, [slug]: id }), {});
|
const sitesMap = [].concat(inserted, updated).reduce((acc, { id, slug }) => ({ ...acc, [slug]: id }), {});
|
||||||
|
|
||||||
const tagAssociations = sites.map((site) => (site.tags
|
const tagAssociations = sites.map(site => (site.tags
|
||||||
? site.tags.map((tagSlug) => ({
|
? site.tags.map(tagSlug => ({
|
||||||
entity_id: sitesMap[site.slug],
|
entity_id: sitesMap[site.slug],
|
||||||
tag_id: tagsMap[tagSlug],
|
tag_id: tagsMap[tagSlug],
|
||||||
inherit: true,
|
inherit: true,
|
||||||
|
|
|
@ -178,7 +178,7 @@ async function scrapeReleases(baseReleases, entitiesBySlug, type) {
|
||||||
return [slug, { ...entity, preData }];
|
return [slug, { ...entity, preData }];
|
||||||
}
|
}
|
||||||
|
|
||||||
return [slug, entity];
|
return null;
|
||||||
}));
|
}));
|
||||||
|
|
||||||
const entitiesWithBeforeDataBySlug = Object.fromEntries(entitiesWithBeforeDataEntries.filter(Boolean));
|
const entitiesWithBeforeDataBySlug = Object.fromEntries(entitiesWithBeforeDataEntries.filter(Boolean));
|
||||||
|
|
|
@ -179,8 +179,6 @@ async function getSession(site, parameters) {
|
||||||
const cookieString = await cookieJar.getCookieStringAsync(sessionUrl);
|
const cookieString = await cookieJar.getCookieStringAsync(sessionUrl);
|
||||||
const { instance_token: instanceToken } = cookie.parse(cookieString);
|
const { instance_token: instanceToken } = cookie.parse(cookieString);
|
||||||
|
|
||||||
console.log(site.name, instanceToken);
|
|
||||||
|
|
||||||
return { session, instanceToken };
|
return { session, instanceToken };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -242,8 +240,6 @@ async function fetchLatest(site, page = 1, options) {
|
||||||
|
|
||||||
const { session, instanceToken } = options.beforeNetwork?.headers?.Instance ? options.beforeNetwork : await getSession(site, options.parameters);
|
const { session, instanceToken } = options.beforeNetwork?.headers?.Instance ? options.beforeNetwork : await getSession(site, options.parameters);
|
||||||
|
|
||||||
console.log('fetch', instanceToken);
|
|
||||||
|
|
||||||
const beforeDate = moment().add('1', 'day').format('YYYY-MM-DD');
|
const beforeDate = moment().add('1', 'day').format('YYYY-MM-DD');
|
||||||
const limit = 24;
|
const limit = 24;
|
||||||
const apiUrl = site.parameters?.native || site.parameters?.extract
|
const apiUrl = site.parameters?.native || site.parameters?.extract
|
||||||
|
|
|
@ -1,152 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
const qu = require('../utils/qu');
|
|
||||||
const http = require('../utils/http');
|
|
||||||
const slugify = require('../utils/slugify');
|
|
||||||
const { feetInchesToCm, lbsToKg } = require('../utils/convert');
|
|
||||||
|
|
||||||
function scrapeAll(scenes) {
|
|
||||||
return scenes.map(({ query }) => {
|
|
||||||
const release = {};
|
|
||||||
|
|
||||||
release.title = query.cnt('.title');
|
|
||||||
release.url = query.url('.title a');
|
|
||||||
release.entryId = new URL(release.url).pathname.match(/\/view\/(\d+)/)[1];
|
|
||||||
|
|
||||||
release.date = query.date('.pub-date', 'MMM DD, YYYY');
|
|
||||||
release.duration = query.duration('.video-duration');
|
|
||||||
|
|
||||||
release.actors = query.all('.models a').map((el) => ({
|
|
||||||
name: query.cnt(el),
|
|
||||||
url: query.url(el, null),
|
|
||||||
}));
|
|
||||||
|
|
||||||
if (query.exists('.thumb-big')) { // updates page
|
|
||||||
release.poster = query.img('.thumb-big', 'data-image') || JSON.parse(query.el('.thumbnails-wrap a', 'data-images'));
|
|
||||||
release.photos = [query.img('.thumb-top', 'data-image'), query.img('.thumb-bottom', 'data-image')];
|
|
||||||
}
|
|
||||||
|
|
||||||
if (query.exists('.thumbnails-wrap')) { // actor page
|
|
||||||
try {
|
|
||||||
const images = JSON.parse(query.el('.thumbnails-wrap a', 'data-images'));
|
|
||||||
|
|
||||||
release.poster = images.slice(0, 1)[0];
|
|
||||||
release.photos = images.slice(1);
|
|
||||||
} catch (error) {
|
|
||||||
// images probably not available
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log(release.photos);
|
|
||||||
|
|
||||||
return release;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function scrapeScene({ query }, url) {
|
|
||||||
const release = {};
|
|
||||||
|
|
||||||
release.title = query.cnt('.title');
|
|
||||||
release.entryId = new URL(url).pathname.match(/\/view\/(\d+)/)[1];
|
|
||||||
release.date = query.date('.date', 'MMMM DD, YYYY', /\w+ \d{1,2}, \d{4}/);
|
|
||||||
|
|
||||||
release.description = query.cnt('.description p');
|
|
||||||
release.duration = query.duration('.total-time');
|
|
||||||
|
|
||||||
release.actors = query.all('.model-wrap li').map((el) => ({
|
|
||||||
name: query.cnt(el, 'h5'),
|
|
||||||
url: query.url(el, 'a'),
|
|
||||||
avatar: query.img(el),
|
|
||||||
}));
|
|
||||||
|
|
||||||
release.poster = query.poster();
|
|
||||||
release.photos = query.urls('.photos-slider a');
|
|
||||||
release.trailer = query.video();
|
|
||||||
|
|
||||||
release.comment = query.cnt('.series');
|
|
||||||
|
|
||||||
console.log(release);
|
|
||||||
|
|
||||||
return release;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function fetchLatest(channel, page) {
|
|
||||||
const res = await qu.getAll(`${channel.url}/episodes?page=${page}`, '.content-item');
|
|
||||||
|
|
||||||
if (res.ok) {
|
|
||||||
return scrapeAll(res.items, channel);
|
|
||||||
}
|
|
||||||
|
|
||||||
return res.status;
|
|
||||||
}
|
|
||||||
|
|
||||||
function scrapeProfile({ query }, url) {
|
|
||||||
const profile = { url };
|
|
||||||
|
|
||||||
const bio = Object.fromEntries(query.all('.model-desc li').map((el) => [slugify(query.cnt(el, 'span'), '_'), query.text(el)]));
|
|
||||||
|
|
||||||
profile.description = bio.bio;
|
|
||||||
|
|
||||||
profile.dateOfBirth = qu.extractDate(bio.birthdate, 'YYYY-MM-DD');
|
|
||||||
profile.birthPlace = bio.birthplace;
|
|
||||||
|
|
||||||
profile.hairColor = bio.hair_color;
|
|
||||||
profile.eyes = bio.eye_color;
|
|
||||||
|
|
||||||
profile.height = feetInchesToCm(bio.height);
|
|
||||||
profile.weight = lbsToKg(bio.weight);
|
|
||||||
profile.measurements = bio.measurements;
|
|
||||||
|
|
||||||
profile.avatar = query.img('.model-pic img');
|
|
||||||
|
|
||||||
profile.scenes = scrapeAll(qu.initAll(query.all('.content-item')));
|
|
||||||
|
|
||||||
console.log(bio);
|
|
||||||
console.log(profile);
|
|
||||||
|
|
||||||
return profile;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function searchActor(baseActor, channel) {
|
|
||||||
const searchRes = await http.post(`${channel.url}/search-preview`, { q: slugify(baseActor.name, ' ') }, {
|
|
||||||
encodeJSON: false,
|
|
||||||
headers: {
|
|
||||||
'Accept-Language': 'en-US,en;',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (searchRes.ok) {
|
|
||||||
const actorUrl = searchRes.body.find((item) => item.type === 'model' && slugify(item.title) === baseActor.slug)?.url;
|
|
||||||
|
|
||||||
return actorUrl || null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function fetchProfile(baseActor, context, include, retry = false) {
|
|
||||||
const actorUrl = (!retry && baseActor.url) || await searchActor(baseActor, context.entity);
|
|
||||||
|
|
||||||
if (!actorUrl) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const res = await qu.get(actorUrl);
|
|
||||||
|
|
||||||
if (res.ok) {
|
|
||||||
return scrapeProfile(res.item, actorUrl);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (baseActor.url) {
|
|
||||||
return fetchProfile(baseActor, context, include, true);
|
|
||||||
}
|
|
||||||
|
|
||||||
return res.status;
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
fetchLatest,
|
|
||||||
fetchProfile,
|
|
||||||
scrapeAll,
|
|
||||||
scrapeScene,
|
|
||||||
};
|
|
|
@ -51,7 +51,6 @@ const pascalssubsluts = require('./pascalssubsluts'); // reserved keyword
|
||||||
const pierrewoodman = require('./pierrewoodman');
|
const pierrewoodman = require('./pierrewoodman');
|
||||||
const pinkyxxx = require('./pinkyxxx');
|
const pinkyxxx = require('./pinkyxxx');
|
||||||
const privateNetwork = require('./private'); // reserved keyword
|
const privateNetwork = require('./private'); // reserved keyword
|
||||||
const purgatoryx = require('./purgatoryx'); // reserved keyword
|
|
||||||
const score = require('./score');
|
const score = require('./score');
|
||||||
const spizoo = require('./spizoo');
|
const spizoo = require('./spizoo');
|
||||||
const teamskeet = require('./teamskeet');
|
const teamskeet = require('./teamskeet');
|
||||||
|
@ -137,7 +136,6 @@ const scrapers = {
|
||||||
porncz,
|
porncz,
|
||||||
pornpros: whalemember,
|
pornpros: whalemember,
|
||||||
private: privateNetwork,
|
private: privateNetwork,
|
||||||
purgatoryx,
|
|
||||||
score,
|
score,
|
||||||
sexyhub: mindgeek,
|
sexyhub: mindgeek,
|
||||||
spizoo,
|
spizoo,
|
||||||
|
@ -257,7 +255,6 @@ const scrapers = {
|
||||||
povperverts: fullpornnetwork,
|
povperverts: fullpornnetwork,
|
||||||
povpornstars: hush,
|
povpornstars: hush,
|
||||||
private: privateNetwork,
|
private: privateNetwork,
|
||||||
purgatoryx,
|
|
||||||
realitykings: mindgeek,
|
realitykings: mindgeek,
|
||||||
realvr: badoink,
|
realvr: badoink,
|
||||||
roccosiffredi: famedigital,
|
roccosiffredi: famedigital,
|
||||||
|
|
|
@ -39,11 +39,9 @@ function filterLocalUniqueReleases(releases, accReleases) {
|
||||||
|
|
||||||
async function filterUniqueReleases(releases) {
|
async function filterUniqueReleases(releases) {
|
||||||
const releaseIdentifiers = releases
|
const releaseIdentifiers = releases
|
||||||
.map((release) => [release.entity.id, release.entryId.toString()]);
|
.map((release) => [release.entity.id, release.entryId]);
|
||||||
|
|
||||||
console.log(releaseIdentifiers.length);
|
const duplicateReleaseEntries = await knex('releases')
|
||||||
|
|
||||||
const duplicateReleaseEntriesQuery = knex('releases')
|
|
||||||
.select(knex.raw('releases.*, row_to_json(entities) as entity'))
|
.select(knex.raw('releases.*, row_to_json(entities) as entity'))
|
||||||
.leftJoin('entities', 'entities.id', 'releases.entity_id')
|
.leftJoin('entities', 'entities.id', 'releases.entity_id')
|
||||||
.whereIn(['entity_id', 'entry_id'], releaseIdentifiers)
|
.whereIn(['entity_id', 'entry_id'], releaseIdentifiers)
|
||||||
|
@ -57,10 +55,6 @@ async function filterUniqueReleases(releases) {
|
||||||
.orWhere(knex.raw('updated_at - date > INTERVAL \'1 day\'')); // scene was updated after the release date, no updates expected
|
.orWhere(knex.raw('updated_at - date > INTERVAL \'1 day\'')); // scene was updated after the release date, no updates expected
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log(duplicateReleaseEntriesQuery.toString());
|
|
||||||
|
|
||||||
const duplicateReleaseEntries = await duplicateReleaseEntriesQuery;
|
|
||||||
|
|
||||||
const duplicateReleases = duplicateReleaseEntries.map((release) => curateRelease(release));
|
const duplicateReleases = duplicateReleaseEntries.map((release) => curateRelease(release));
|
||||||
const duplicateReleasesByEntityIdAndEntryId = duplicateReleases.reduce(mapReleasesToEntityIdAndEntryId, {});
|
const duplicateReleasesByEntityIdAndEntryId = duplicateReleases.reduce(mapReleasesToEntityIdAndEntryId, {});
|
||||||
|
|
||||||
|
@ -268,7 +262,7 @@ async function scrapeNetworkSequential(networkEntity) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function scrapeNetworkParallel(networkEntity) {
|
async function scrapeNetworkParallel(networkEntity) {
|
||||||
const beforeNetwork = await networkEntity.scraper?.beforeNetwork?.(networkEntity);
|
const beforeNetwork = await networkEntity.scraper.beforeNetwork?.(networkEntity);
|
||||||
|
|
||||||
return Promise.map(
|
return Promise.map(
|
||||||
networkEntity.includedChildren,
|
networkEntity.includedChildren,
|
||||||
|
|