Tracking media fetch and storage durations. Enabled log symlink.

This commit is contained in:
DebaucheryLibrarian 2025-03-29 02:08:34 +01:00
parent 89da72d3d8
commit f198929983
5 changed files with 50 additions and 1920 deletions

File diff suppressed because it is too large Load Diff

View File

@ -5976,6 +5976,16 @@ const sites = [
studioId: 29,
},
},
{
name: 'Anal Teens',
slug: 'analteens',
url: 'https://www.analvids.com/studios/anal_teens',
parent: 'analvids',
hasLogo: false,
parameters: {
studioId: 2251,
},
},
{
name: 'Gonzo.com',
slug: 'gonzocom',
@ -6016,7 +6026,7 @@ const sites = [
},
},
{
name: 'NRX-Studio',
name: 'NRX Studio',
slug: 'nrxstudio',
url: 'https://www.analvids.com/studios/nrx-studio',
parent: 'analvids',
@ -6030,6 +6040,16 @@ const sites = [
studioId: 94,
},
},
{
name: 'NRX The Dark Side',
slug: 'nrxdarkside',
url: 'https://www.analvids.com/studios/nrx_dark_side',
parent: 'analvids',
hasLogo: false,
parameters: {
studioId: 3022,
},
},
{
name: "Giorgio's Lab",
slug: 'giorgioslab',

View File

@ -1034,6 +1034,7 @@ const tagMedia = [
['teen', 0, 'Alexa Flexy', 'sensualgirl'],
['teen', 1, 'Stalfra aka Precious', 'nubiles'],
['trainbang', 'monika_fox_analvids', 'Monika Fox', 'analvids'],
['trainbang', 'sasha_paige_analvids', 'Sasha Piage', 'analvids'],
['trainbang', 1, 'Ria Sunn', 'private'],
['trainbang', 0, 'Nicole Black in GIO971', 'analvids'],
['transsexual', 'kelly_silva_mel_almeida_brazilliantranssexuals', 'Kelly Silva and Mel Almeida', 'brazilliantranssexuals'],

View File

@ -28,14 +28,20 @@ function logger(filepath) {
timestamp: true,
}),
new winston.transports.DailyRotateFile({
datePattern: 'YYYY-MM-DD',
filename: path.join('log', 'combined_%DATE%.log'),
level: 'silly',
datePattern: 'YYYY-MM-DD',
dirname: 'log',
filename: 'combined_%DATE%.log',
symlinkName: 'combined.log',
createSymlink: true,
}),
new winston.transports.DailyRotateFile({
datePattern: 'YYYY-MM-DD',
filename: path.join('log', 'error_%DATE%.log'),
level: 'error',
datePattern: 'YYYY-MM-DD',
dirname: 'log',
filename: 'error_%DATE%.log',
symlinkName: 'error.log',
createSymlink: true,
}),
],
});

View File

@ -463,11 +463,15 @@ async function writeLazy(image, lazypath) {
.toFile(path.join(config.media.path, lazypath));
}
const nsPerSec = 1e9; // nanoseconds per second
async function storeImageFile(media, hashDir, hashSubDir, filename, filedir, filepath, options) {
logger.silly(`Storing permanent media files for ${media.id} from ${media.src} at ${filepath}`);
logger.debug(`Memory usage at image storage: ${process.memoryUsage.rss() / 1000000} MB (${media.src})`);
try {
const startTime = process.hrtime();
const thumbdir = config.s3.enabled ? path.join(media.role, 'thumbs') : path.join(media.role, 'thumbs', hashDir, hashSubDir);
const thumbpath = path.join(thumbdir, filename);
@ -531,7 +535,10 @@ async function storeImageFile(media, hashDir, hashSubDir, filename, filedir, fil
]);
}
const diffTime = process.hrtime(startTime);
logger.silly(`Stored thumbnail, lazy and permanent media file for ${media.id} from ${media.src} at ${filepath}`);
logger.debug(`Media storage took ${(diffTime[0] * nsPerSec + diffTime[1]) / 1e6} ms (${media.role} ${media.id})`);
return {
...media,
@ -584,6 +591,8 @@ async function storeFile(media, options) {
return storeImageFile(media, hashDir, hashSubDir, filename, filedir, filepath, options);
}
const startTime = process.hrtime();
if (['posters', 'photos', 'caps', 'covers'].includes(media.role)) {
throw new Error(`Media for '${media.role}' must be an image, but '${media.meta.mimetype}' was detected`);
}
@ -605,7 +614,10 @@ async function storeFile(media, options) {
await storeS3Object(filepath, media);
}
const diffTime = process.hrtime(startTime);
logger.silly(`Stored permanent media file for ${media.id} from ${media.src} at ${filepath}`);
logger.debug(`Media storage took ${(diffTime[0] * nsPerSec + diffTime[1]) / 1e6} ms (${media.role} ${media.id})`);
return {
...media,
@ -792,6 +804,8 @@ async function trySource(baseSource, existingMedias, baseMedia) {
async function fetchMedia(baseMedia, existingMedias) {
try {
const startTime = process.hrtime();
const source = await baseMedia.sources.reduce(
// try each source until success
(result, baseSource, baseSourceIndex) => result.catch(async (error) => {
@ -804,6 +818,10 @@ async function fetchMedia(baseMedia, existingMedias) {
Promise.reject(new Error()),
);
const diffTime = process.hrtime(startTime);
logger.debug(`Media fetch took ${(diffTime[0] * nsPerSec + diffTime[1]) / 1e6} ms (${baseMedia.role} ${baseMedia.id})`);
return {
...baseMedia,
...source,