Updated dependencies. Added periodic memory logger.

This commit is contained in:
DebaucheryLibrarian
2021-11-20 23:59:15 +01:00
parent a867817dc1
commit 26539b74a5
109 changed files with 10238 additions and 10833 deletions

View File

@@ -22,13 +22,13 @@ async function bulkUpsert(table, items, conflict, update = true, chunkSize) {
const chunked = chunk(items, chunkSize);
const queries = chunked
.map(chunkItems => knex.raw(updated || ':query RETURNING *;', {
.map((chunkItems) => knex.raw(updated || ':query RETURNING *;', {
query: knex(table).insert(chunkItems),
}).transacting(transaction));
const responses = await Promise.all(queries);
return responses.flat().map(response => response.rows).flat();
return responses.flat().map((response) => response.rows).flat();
});
}

View File

@@ -7,7 +7,7 @@ function capitalize(string, { trim = true, uncapitalize = false } = {}) {
const capitalized = string
.split(/\s+/)
.map(component => `${component.charAt(0).toUpperCase()}${uncapitalize ? component.slice(1).toLowerCase() : component.slice(1)}`)
.map((component) => `${component.charAt(0).toUpperCase()}${uncapitalize ? component.slice(1).toLowerCase() : component.slice(1)}`)
.join(' ');
return trim ? capitalized.trim() : capitalized;

View File

@@ -62,7 +62,7 @@ function convertManyApi(input, to) {
const curatedInput = input
.replace('\'', 'ft')
.replace(/"|''/, 'in')
.replace(/\d+ft\s*\d+\s*$/, match => `${match}in`); // height without any inch symbol
.replace(/\d+ft\s*\d+\s*$/, (match) => `${match}in`); // height without any inch symbol
return Math.round(convertMany(curatedInput).to(to)) || null;
}

View File

@@ -8,7 +8,7 @@ async function getFileEntries(location) {
}
const file = await fs.promises.readFile(location, 'utf-8');
const entries = file.split(/\n/).map(entry => entry.trim()).filter(Boolean);
const entries = file.split(/\n/).map((entry) => entry.trim()).filter(Boolean);
return entries;
}

View File

@@ -27,7 +27,7 @@ async function fetchSource(link) {
const tempFileStream = fs.createWriteStream(tempFilePath);
const hashStream = new PassThrough();
hashStream.on('data', chunk => hasher.write(chunk));
hashStream.on('data', (chunk) => hasher.write(chunk));
try {
const res = await http.get(link, null, {

View File

@@ -102,7 +102,7 @@ function all(context, selector, attrArg, applyTrim = true) {
const attr = attrArg === true ? 'textContent' : attrArg;
if (attr) {
return Array.from(context.querySelectorAll(selector), el => q(el, null, attr, applyTrim));
return Array.from(context.querySelectorAll(selector), (el) => q(el, null, attr, applyTrim));
}
return Array.from(context.querySelectorAll(selector));
@@ -155,7 +155,7 @@ function jsons(context, selector) {
function htmls(context, selector) {
const els = all(context, selector, null, true);
return els.map(el => el.innerHTML);
return els.map((el) => el.innerHTML);
}
function texts(context, selector, applyTrim = true, filter = true) {
@@ -163,8 +163,8 @@ function texts(context, selector, applyTrim = true, filter = true) {
if (!el) return null;
const nodes = Array.from(el.childNodes)
.filter(node => node.nodeName === '#text')
.map(node => (applyTrim ? trim(node.textContent) : node.textContent));
.filter((node) => node.nodeName === '#text')
.map((node) => (applyTrim ? trim(node.textContent) : node.textContent));
return filter ? nodes.filter(Boolean) : nodes;
}
@@ -272,7 +272,7 @@ function images(context, selector = 'img', attr, { origin, protocol = 'https' }
const imageEls = all(context, selector, attribute);
return imageEls.map(imageEl => prefixUrl(imageEl, origin, protocol));
return imageEls.map((imageEl) => prefixUrl(imageEl, origin, protocol));
}
function url(context, selector = 'a', attr = 'href', { origin, protocol = 'https', object = false } = {}) {
@@ -289,7 +289,7 @@ function url(context, selector = 'a', attr = 'href', { origin, protocol = 'https
function urls(context, selector = 'a', attr = 'href', { origin, protocol = 'https' } = {}) {
const urlEls = all(context, selector, attr);
return attr ? urlEls.map(urlEl => prefixUrl(urlEl, origin, protocol)) : urlEls;
return attr ? urlEls.map((urlEl) => prefixUrl(urlEl, origin, protocol)) : urlEls;
}
function sourceSet(context, selector, attr = 'srcset', options = {}) {
@@ -330,7 +330,7 @@ function sourceSet(context, selector, attr = 'srcset', options = {}) {
return sources;
}
return sources.map(source => source.url);
return sources.map((source) => source.url);
}
function poster(context, selector = 'video', attr = 'poster', { origin, protocol = 'https' } = {}) {
@@ -348,7 +348,7 @@ function video(context, selector = 'source', attr = 'src', { origin, protocol =
function videos(context, selector = 'source', attr = 'src', { origin, protocol = 'https' } = {}) {
const trailerEls = all(context, selector, attr);
return attr ? trailerEls.map(trailerEl => prefixUrl(trailerEl, origin, protocol)) : trailerEls;
return attr ? trailerEls.map((trailerEl) => prefixUrl(trailerEl, origin, protocol)) : trailerEls;
}
function duration(context, selector, match, attr = 'textContent') {
@@ -499,11 +499,11 @@ function init(context, selector, window) {
function initAll(context, selector, window) {
if (Array.isArray(context)) {
return context.map(element => init(element, null, window));
return context.map((element) => init(element, null, window));
}
return Array.from(context.querySelectorAll(selector))
.map(element => init(element, null, window));
.map((element) => init(element, null, window));
}
function extract(htmlValue, selector, options) {

View File

@@ -61,7 +61,7 @@ function slugify(string, delimiter = '-', {
if (accSlug.length < limit) {
if (removeAccents) {
return accSlug.replace(/[à-ÿ]/g, match => substitutes[match] || '');
return accSlug.replace(/[à-ÿ]/g, (match) => substitutes[match] || '');
}
return accSlug;

View File

@@ -6,15 +6,15 @@ const logger = require('../logger')(__filename);
async function upsert(table, items, identifier = ['id'], _knex) {
const identifiers = Array.isArray(identifier) ? identifier : [identifier];
const duplicates = await knex(table).whereIn(identifiers, items.map(item => identifiers.map(identifierX => item[identifierX])));
const duplicates = await knex(table).whereIn(identifiers, items.map((item) => identifiers.map((identifierX) => item[identifierX])));
const duplicatesByIdentifiers = duplicates.reduce((acc, duplicate) => {
const duplicateIdentifier = identifiers.map(identifierX => duplicate[identifierX]).toString();
const duplicateIdentifier = identifiers.map((identifierX) => duplicate[identifierX]).toString();
return { ...acc, [duplicateIdentifier]: duplicate };
}, {});
const { insert, update } = items.reduce((acc, item) => {
const itemIdentifier = identifiers.map(identifierX => item[identifierX]).toString();
const itemIdentifier = identifiers.map((identifierX) => item[identifierX]).toString();
if (duplicatesByIdentifiers[itemIdentifier]) {
acc.update.push(item);
@@ -34,7 +34,7 @@ async function upsert(table, items, identifier = ['id'], _knex) {
const [inserted, updated] = await Promise.all([
knex(table).returning('*').insert(insert),
knex.transaction(async trx => Promise.all(update.map((item) => {
knex.transaction(async (trx) => Promise.all(update.map((item) => {
const clause = identifiers.reduce((acc, identifierX) => ({ ...acc, [identifierX]: item[identifierX] }), {});
return trx

View File

@@ -9,8 +9,8 @@ function getVirtualConsole(filepath) {
const virtualConsole = new VirtualConsole();
const context = path.basename(filepath).replace(path.extname(filepath), '');
virtualConsole.on('error', message => logger.warn(`Error from JSDOM in ${context}: ${message}`));
virtualConsole.on('jsdomError', message => logger.warn(`Error from JSDOM in ${context}: ${message}`));
virtualConsole.on('error', (message) => logger.warn(`Error from JSDOM in ${context}: ${message}`));
virtualConsole.on('jsdomError', (message) => logger.warn(`Error from JSDOM in ${context}: ${message}`));
return virtualConsole;
}