Added sharpness and re-added entropy to avatars, ignoring low-entropy photos as main avatar and in profile photo list.

This commit is contained in:
DebaucheryLibrarian
2020-12-20 04:21:28 +01:00
parent cbcac0725d
commit 5f4039c5d4
7 changed files with 128 additions and 87 deletions

View File

@@ -526,7 +526,9 @@ async function interpolateProfiles(actorIds) {
profile.tattoos = getLongest(valuesByProperty.tattoos);
profile.piercings = getLongest(valuesByProperty.piercings);
profile.avatar_media_id = avatars.sort((avatarA, avatarB) => avatarB.height - avatarA.height)[0]?.id || null;
profile.avatar_media_id = avatars
.filter(avatar => avatar.entropy > 6)
.sort((avatarA, avatarB) => avatarB.height - avatarA.height)[0]?.id || null;
return profile;
});

View File

@@ -126,7 +126,7 @@ function toBaseSource(rawSource) {
return null;
}
function baseSourceToBaseMedia(baseSource, role, metadata, options) {
function baseSourceToBaseMedia(baseSource, role, metadata) {
if (Array.isArray(baseSource)) {
if (baseSource.length > 0) {
return {
@@ -134,7 +134,6 @@ function baseSourceToBaseMedia(baseSource, role, metadata, options) {
id: nanoid(),
role,
sources: baseSource,
...options,
};
}
@@ -147,7 +146,6 @@ function baseSourceToBaseMedia(baseSource, role, metadata, options) {
id: nanoid(),
role,
sources: [baseSource],
...options,
};
}
@@ -281,7 +279,7 @@ async function extractSource(baseSource, { existingExtractMediaByUrl }) {
throw new Error(`Could not extract source from ${baseSource.url}: ${res.status}`);
}
async function storeImageFile(media, hashDir, hashSubDir, filename, filedir, filepath) {
async function storeImageFile(media, hashDir, hashSubDir, filename, filedir, filepath, options) {
logger.silly(`Storing permanent media files for ${media.id} from ${media.src} at ${filepath}`);
try {
@@ -298,20 +296,22 @@ async function storeImageFile(media, hashDir, hashSubDir, filename, filedir, fil
]);
const image = sharp(media.file.path);
const info = await image.metadata();
const isProcessed = media.meta.subtype !== 'jpeg' || media.process;
console.log(media);
const [info, stats] = await Promise.all([
image.metadata(),
options?.stats && image.stats(),
]);
if (media.process) {
Object.entries(media.process).forEach(([operation, options]) => {
Object.entries(media.process).forEach(([operation, processOptions]) => {
if (image[operation]) {
image[operation](...(Array.isArray(options) ? options : [options]));
image[operation](...(Array.isArray(processOptions) ? processOptions : [processOptions]));
return;
}
if (operation === 'crop') {
image.extract(...(Array.isArray(options) ? options : [options]));
image.extract(...(Array.isArray(processOptions) ? processOptions : [processOptions]));
return;
}
@@ -365,6 +365,8 @@ async function storeImageFile(media, hashDir, hashSubDir, filename, filedir, fil
...media.meta,
width: info.width,
height: info.height,
entropy: stats?.entropy || null,
sharpness: stats?.sharpness || null,
},
};
} catch (error) {
@@ -376,7 +378,7 @@ async function storeImageFile(media, hashDir, hashSubDir, filename, filedir, fil
}
}
async function storeFile(media) {
async function storeFile(media, options) {
try {
const hashDir = media.meta.hash.slice(0, 2);
const hashSubDir = media.meta.hash.slice(2, 4);
@@ -399,7 +401,7 @@ async function storeFile(media) {
}
if (media.meta.type === 'image') {
return storeImageFile(media, hashDir, hashSubDir, filename, filedir, filepath);
return storeImageFile(media, hashDir, hashSubDir, filename, filedir, filepath, options);
}
const [stat] = await Promise.all([
@@ -609,6 +611,7 @@ function curateMediaEntry(media, index) {
width: media.meta.width,
height: media.meta.height,
entropy: media.meta.entropy,
sharpness: media.meta.sharpness,
source: media.src,
source_page: media.url,
scraper: media.scraper,
@@ -623,7 +626,7 @@ function curateMediaEntry(media, index) {
};
}
async function storeMedias(baseMedias) {
async function storeMedias(baseMedias, options) {
await fsPromises.mkdir(path.join(config.media.path, 'temp'), { recursive: true });
const [existingSourceMediaByUrl, existingExtractMediaByUrl] = await findSourceDuplicates(baseMedias);
@@ -638,7 +641,7 @@ async function storeMedias(baseMedias) {
const savedMedias = await Promise.map(
uniqueHashMedias,
async baseMedia => storeFile(baseMedia),
async baseMedia => storeFile(baseMedia, options),
{ concurrency: 100 }, // don't overload disk
);
@@ -646,7 +649,7 @@ async function storeMedias(baseMedias) {
// overwrite files in case image processing was changed
await Promise.map(
existingHashMedias,
async baseMedia => storeFile(baseMedia),
async baseMedia => storeFile(baseMedia, options),
{ concurrency: 100 }, // don't overload disk
);
}
@@ -733,14 +736,14 @@ async function associateAvatars(profiles) {
avatarBaseMedia: toBaseMedias([profile.avatar], 'avatars', {
credit: profile.credit || profile.entity?.name || null,
scraper: profile.scraper || null,
}, { stats: true })[0],
})[0],
}
: profile
));
const baseMedias = profilesWithBaseMedias.map(profile => profile.avatarBaseMedia).filter(Boolean);
const storedMedias = await storeMedias(baseMedias);
const storedMedias = await storeMedias(baseMedias, { stats: true });
const storedMediasById = itemsByKey(storedMedias, 'id');
const profilesWithAvatarIds = profilesWithBaseMedias.map((profile) => {