Added periodic memory logger.

This commit is contained in:
DebaucheryLibrarian
2021-11-20 23:59:15 +01:00
parent a867817dc1
commit ccb99e278c
109 changed files with 10238 additions and 10833 deletions

View File

@@ -53,7 +53,7 @@ function getImageWithFallbacks(q, selector, site, el) {
q(selector, 'src0_1x'),
];
return sources.filter(Boolean).map(src => `${site.parameters?.media || site.url}${src}`);
return sources.filter(Boolean).map((src) => `${site.parameters?.media || site.url}${src}`);
}
function scrapeAll(scenes, channel) {
@@ -108,7 +108,7 @@ function scrapeAllT1(scenes, site, accNetworkReleases) {
// release.entryId = q('.img-div img', 'id')?.match(/set-target-(\d+)/)[1];
release.entryId = deriveEntryId(release);
if (site.parameters?.accFilter && accNetworkReleases?.map(accRelease => accRelease.entryId).includes(release.entryId)) {
if (site.parameters?.accFilter && accNetworkReleases?.map((accRelease) => accRelease.entryId).includes(release.entryId)) {
// filter out releases that were already scraped from a categorized site, requeryires sequeryential site scraping
return null;
}
@@ -132,7 +132,7 @@ function scrapeScene({ html, query }, channel, url) {
const poster = qu.prefixUrl(posterPath, channel.url) || query.img('.update_thumb', 'src0_1x', { origin: channel.url }); // latter used when trailer requires signup
[release.poster, ...release.photos] = [poster, ...query.imgs('.item-thumb img', 'src0_1x', { origin: channel.url })]
.map(src => src && [
.map((src) => src && [
src.replace('-1x', '-3x'),
src.replace('-1x', '-2x'),
src,
@@ -161,7 +161,7 @@ function scrapeSceneT1({ html, query }, site, url, baseRelease) {
release.date = query.date('.update-info-row', 'MMM D, YYYY', /\w+ \d{1,2}, \d{4}/);
release.duration = query.dur('.update-info-row:nth-child(2)');
release.actors = query.all('.models-list-thumbs a').map(el => ({
release.actors = query.all('.models-list-thumbs a').map((el) => ({
name: query.q(el, 'span', true),
avatar: getImageWithFallbacks(query.q, 'img', site, el),
}));
@@ -180,8 +180,8 @@ function scrapeSceneT1({ html, query }, site, url, baseRelease) {
if (stars) release.stars = Number(stars);
if (site.type === 'network') {
const channelRegExp = new RegExp(site.children.map(channel => channel.parameters?.match || channel.name).join('|'), 'i');
const channel = release.tags.find(tag => channelRegExp.test(tag));
const channelRegExp = new RegExp(site.children.map((channel) => channel.parameters?.match || channel.name).join('|'), 'i');
const channel = release.tags.find((tag) => channelRegExp.test(tag));
if (channel) {
release.channel = slugify(channel, '');
@@ -290,7 +290,7 @@ async function scrapeProfile({ query, el }, channel, options) {
if (bio.piercings && /yes/i.test(bio.piercings)) profile.hasPiercings = true;
if (bio.piercings && /no/i.test(bio.piercings)) profile.hasPiercings = false;
if (bio.aliases) profile.aliases = bio.aliases.split(',').map(alias => alias.trim());
if (bio.aliases) profile.aliases = bio.aliases.split(',').map((alias) => alias.trim());
profile.social = [bio.onlyfans, bio.twitter, bio.instagram].filter(Boolean);