Simplified tag media seed file. Renamed tags. Media refactor experiment.

This commit is contained in:
2020-01-08 23:33:24 +01:00
parent 4c28c8b640
commit 863df38b29
28 changed files with 342 additions and 313 deletions

View File

@@ -9,6 +9,7 @@ const mime = require('mime');
const sharp = require('sharp');
const blake2 = require('blake2');
const logger = require('./logger');
const knex = require('./knex');
const upsert = require('./utils/upsert');
@@ -75,11 +76,11 @@ async function findDuplicates(photos, identifier, prop = null, label) {
: !duplicateLookup.has(prop ? source[prop] : source)));
if (duplicates.length > 0) {
console.log(`${duplicates.length} media items already present by ${identifier} for ${label}`);
logger.info(`${duplicates.length} media items already present by ${identifier} for ${label}`);
}
if (originals.length > 0) {
console.log(`Fetching ${originals.length} new media items for ${label}`);
logger.info(`Fetching ${originals.length} new media items for ${label}`);
}
return [duplicates, originals];
@@ -181,8 +182,6 @@ async function storePhotos(photos, {
concurrency: 10,
}).filter(photo => photo);
return metaFiles;
const [hashDuplicates, hashOriginals] = await findDuplicates(metaFiles, 'hash', 'hash', label);
const savedPhotos = await savePhotos(hashOriginals, {
@@ -230,6 +229,39 @@ async function storePhotos(photos, {
await upsert(`${domain}s_${role}s`, photoAssociations, [`${domain}_id`, 'media_id']);
}
/*
async function storeReleasePhotos(releases, label) {
const sources = releases.map(release => pluckPhotos(release.photos)).flat();
const uniqueSources = Array.from(new Set(sources));
const [sourceDuplicates, sourceOriginals] = await findDuplicates(uniqueSources, 'source', null, label);
const metaFiles = await Promise.map(
sourceOriginals,
async (photoUrl, index) => fetchPhoto(photoUrl, index, label),
{ concurrency: 10 },
)
.filter(photo => photo);
const hashUniques = Object.values(metaFiles.reduce((acc, file) => {
if (!acc[file.hash]) acc[file.hash] = file;
return acc;
}, {}));
const [hashDuplicates, hashOriginals] = await findDuplicates(hashUniques, 'hash', 'hash', label);
const sourceHashes = metaFiles.concat(sourceDuplicates).reduce((acc, file) => {
acc[file.source] = file.hash;
return acc;
}, {});
const associations = releases.map(release => release.photos.map(source => [release.id, sourceHashes[source]])).flat();
console.log(associations);
}
*/
async function storeTrailer(trailers, {
domain = 'releases',
targetId,
@@ -292,5 +324,6 @@ async function storeTrailer(trailers, {
module.exports = {
createMediaDirectory,
storePhotos,
// storeReleasePhotos,
storeTrailer,
};