Added periodic memory logger.

This commit is contained in:
DebaucheryLibrarian
2021-11-20 23:59:15 +01:00
parent a867817dc1
commit ccb99e278c
109 changed files with 10238 additions and 10833 deletions

View File

@@ -53,7 +53,7 @@ function getImageWithFallbacks(q, selector, site, el) {
q(selector, 'src0_1x'),
];
return sources.filter(Boolean).map(src => `${site.parameters?.media || site.url}${src}`);
return sources.filter(Boolean).map((src) => `${site.parameters?.media || site.url}${src}`);
}
function scrapeAllClassic(scenes, channel) {
@@ -107,7 +107,7 @@ function scrapeAllTubular(scenes, channel, accNetworkReleases) {
// release.entryId = q('.img-div img', 'id')?.match(/set-target-(\d+)/)[1];
release.entryId = deriveEntryId(release);
if (channel.parameters?.accFilter && accNetworkReleases?.map(accRelease => accRelease.entryId).includes(release.entryId)) {
if (channel.parameters?.accFilter && accNetworkReleases?.map((accRelease) => accRelease.entryId).includes(release.entryId)) {
// filter out releases that were already scraped from a categorized site, requeryires sequeryential site scraping
return null;
}
@@ -143,7 +143,7 @@ function scrapeSceneTubular({ query, html }, entity, url, baseRelease) {
release.date = query.date('.update-info-row', 'MMM D, YYYY', /\w+ \d{1,2}, \d{4}/);
release.duration = query.dur('.update-info-row:nth-child(2)');
release.actors = query.all('.models-list-thumbs a').map(el => ({
release.actors = query.all('.models-list-thumbs a').map((el) => ({
name: query.cnt(el, 'span'),
avatar: getImageWithFallbacks(query.q, 'img', entity, el),
url: query.url(el, null),
@@ -164,8 +164,8 @@ function scrapeSceneTubular({ query, html }, entity, url, baseRelease) {
if (stars) release.stars = Number(stars);
if (entity.type === 'network') {
const channelRegExp = new RegExp(entity.children.map(channel => channel.parameters?.match || channel.name).join('|'), 'i');
const channel = release.tags.find(tag => channelRegExp.test(tag));
const channelRegExp = new RegExp(entity.children.map((channel) => channel.parameters?.match || channel.name).join('|'), 'i');
const channel = release.tags.find((tag) => channelRegExp.test(tag));
if (channel) {
release.channel = slugify(channel, '');
@@ -199,8 +199,8 @@ async function scrapeProfile({ query }, entity, parameters) {
avatarEl.getAttribute('src0'),
avatarEl.getAttribute('src'),
]
.filter(avatar => avatar && !/p\d+.jpe?g/.test(avatar)) // remove non-existing attributes and placeholder images
.map(avatar => qu.prefixUrl(avatar, entity.url));
.filter((avatar) => avatar && !/p\d+.jpe?g/.test(avatar)) // remove non-existing attributes and placeholder images
.map((avatar) => qu.prefixUrl(avatar, entity.url));
if (avatarSources.length) profile.avatar = avatarSources;
}