Skipping Babel, updated node version. Improved deep scrape array merge.

This commit is contained in:
DebaucheryLibrarian 2021-01-13 16:08:19 +01:00
parent ef1d34e4de
commit ae0efccb04
4 changed files with 7 additions and 4 deletions

2
.nvmrc
View File

@ -1 +1 @@
14.15.1
14.15.4

View File

@ -4,7 +4,7 @@
"description": "All the latest porn releases in one place",
"main": "src/app.js",
"scripts": {
"start": "node -r source-map-support/register dist/init.js",
"start": "node -r source-map-support/register src/init.js",
"webpack": "webpack --env=production --mode=production",
"webpack-dev": "webpack --env=development --mode=development",
"webpack-watch": "webpack --progress --colors --watch --env=development --mode=development",

View File

@ -5729,7 +5729,6 @@ const sites = [
name: 'Pinky XXX',
slug: 'pinkyxxx',
url: 'https://pinkyxxx.com',
independent: true,
},
// PORN CZ
{

View File

@ -123,11 +123,15 @@ async function scrapeRelease(baseRelease, entities, type = 'scene') {
: await layoutScraper.fetchMovie(baseRelease.url, entity, baseRelease, include, null);
const mergedRelease = {
...merge(baseRelease, scrapedRelease),
...merge(baseRelease, scrapedRelease, {
dedupeStringsInArrayValues: true,
}),
deep: !!scrapedRelease,
entity,
};
console.log(baseRelease, scrapedRelease, mergedRelease);
if (!mergedRelease.entryId) {
throw Object.assign(new Error('No entry ID supplied'), { code: 'NO_ENTRY_ID' });
}