Extended and improved transfer tool. Moved scenes up on movie page.

This commit is contained in:
DebaucheryLibrarian 2023-06-03 02:51:42 +02:00
parent 62617ec6bf
commit 4b9a0e6bab
10 changed files with 314 additions and 102 deletions

View File

@ -62,6 +62,11 @@
/>
</div>
<Releases
v-if="release.scenes && release.scenes.length > 0"
:releases="release.scenes"
/>
<div class="row associations">
<ul
ref="actors"
@ -109,11 +114,6 @@
</div>
</div>
<Releases
v-if="release.scenes && release.scenes.length > 0"
:releases="release.scenes"
/>
<div
v-if="release.directors && release.directors.length > 0"
class="row"

View File

@ -337,6 +337,7 @@ module.exports = {
},
media: {
path: './media',
transferSource: 'http://localhost:5000/media',
maxSize: 1000,
quality: 80,
thumbnailSize: 320, // width for 16:9 will be exactly 576px

View File

@ -272,9 +272,15 @@ const { argv } = yargs
})
.option('flush-orphaned-media', {
describe: 'Remove all orphaned media items from database and disk.',
type: 'array',
type: 'boolean',
alias: 'flush-media',
})
.option('flush-media-files', {
describe: 'Remove files from storage when flushing media.',
type: 'boolean',
alias: 'flush-files',
default: true,
})
.option('flush-channels', {
describe: 'Delete all scenes and movies from channels.',
type: 'array',
@ -339,6 +345,11 @@ const { argv } = yargs
alias: ['timeout'],
default: 60000,
})
.option('media-source', {
describe: 'Traxxx host to use for media transfers.',
type: 'string',
default: config.media.transferSource,
})
.coerce('after', interpretAfter)
.coerce('actors-update', (after) => interpretAfter(after, true));

View File

@ -393,7 +393,9 @@ async function flushEntities(networkSlugs = [], channelSlugs = []) {
logger.info(`Removed ${deletedScenesCount} scenes, ${deletedMoviesCount} movies and ${deletedSeriesCount} series for ${entitySlugs}`);
if (argv.flushOrphanedMedia !== false) {
await flushOrphanedMedia();
}
}
module.exports = {

View File

@ -997,18 +997,22 @@ async function flushOrphanedMedia() {
.returning(['media.id', 'media.is_s3', 'media.path', 'media.thumbnail', 'media.lazy'])
.delete();
if (argv.flushMediaFiles) {
await Promise.all(orphanedMedia.filter((media) => !media.is_s3).map((media) => Promise.all([
media.path && fsPromises.unlink(path.join(config.media.path, media.path)).catch(() => { /* probably file not found */ }),
media.thumbnail && fsPromises.unlink(path.join(config.media.path, media.thumbnail)).catch(() => { /* probably file not found */ }),
media.lazy && fsPromises.unlink(path.join(config.media.path, media.lazy)).catch(() => { /* probably file not found */ }),
])));
logger.info(`Removed ${orphanedMedia.length} media files from database and storage`);
if (config.s3.enabled) {
await deleteS3Objects(orphanedMedia.filter((media) => media.is_s3));
}
logger.info(`Removed ${orphanedMedia.length} media files from database and storage`);
} else {
logger.info(`Removed ${orphanedMedia.length} media files from database, but not from storage`);
}
try {
await fsPromises.rm(path.join(config.media.path, 'temp'), { recursive: true });
logger.info('Cleared temporary media directory');

View File

@ -4,6 +4,7 @@ const inquirer = require('inquirer');
const logger = require('./logger')(__filename);
const knex = require('./knex');
const argv = require('./argv');
const { flushOrphanedMedia } = require('./media');
const { graphql } = require('./web/graphql');
@ -359,7 +360,9 @@ async function flushScenes() {
}
const deleteCount = await deleteScenes(sceneIds);
if (argv.flushOrphanedMedia !== false) {
await flushOrphanedMedia();
}
logger.info(`Removed ${deleteCount}/${sceneIds.length} scenes`);
}
@ -380,7 +383,9 @@ async function flushMovies() {
}
const deleteCount = await deleteMovies(movieIds);
if (argv.flushOrphanedMedia !== false) {
await flushOrphanedMedia();
}
logger.info(`Removed ${deleteCount}/${movieIds.length} movies`);
}
@ -401,7 +406,9 @@ async function flushSeries() {
}
const deleteCount = await deleteSeries(serieIds);
if (argv.flushOrphanedMedia !== false) {
await flushOrphanedMedia();
}
logger.info(`Removed ${deleteCount}/${serieIds.length} series`);
}
@ -437,7 +444,9 @@ async function flushBatches(batchIds) {
logger.info(`Removed ${deletedScenesCount} scenes and ${deletedMoviesCount} movies for batches ${batchIds}`);
if (argv.flushOrphanedMedia !== false) {
await flushOrphanedMedia();
}
}
module.exports = {

View File

@ -1,9 +1,11 @@
'use strict';
const config = require('config');
const fs = require('fs').promises;
const fs = require('fs');
const path = require('path');
const moment = require('moment');
const Promise = require('bluebird');
const bhttp = require('bhttp');
const { nanoid } = require('nanoid/non-secure');
const AWS = require('aws-sdk');
@ -22,11 +24,9 @@ const s3 = new AWS.S3({
},
});
console.log(Object.keys(s3));
// NOT TRANSFERRED, unutilized on old server: production location, availabile qualities, actor alias for, actor entry id, chapter posters, chapter photos
const releaseFields = `
const sceneFields = `
entryId
shootId
title
@ -43,6 +43,16 @@ const releaseFields = `
studio {
slug
}
movies: moviesScenesBySceneId {
movie {
title
entryId
entity {
slug
type
}
}
}
actors: releasesActors {
actor {
name
@ -56,6 +66,7 @@ const releaseFields = `
}
directors: releasesDirectors {
director {
name
slug
entryId
entity {
@ -164,12 +175,57 @@ const releaseFields = `
createdAt
`;
const movieFields = `
entryId
title
url
date
datePrecision
entity {
slug
type
}
poster: moviesPoster {
media {
hash
path
thumbnail
lazy
s3: isS3
mime
index
width
height
size
source
sourcePage
}
}
covers: moviesCovers {
media {
hash
path
thumbnail
lazy
s3: isS3
mime
index
width
height
size
source
sourcePage
}
}
createdAt
`;
async function save() {
const limit = args.limit || 1000;
const offset = args.offset || 0;
const { releases } = await graphql(`
query SearchReleases(
query SearchScenes(
$limit: Int = 20
$offset: Int = 0
) {
@ -178,20 +234,40 @@ async function save() {
offset: $offset
orderBy: DATE_DESC
) {
${releaseFields}
${sceneFields}
}
}
`, {
limit,
offset,
});
}, 'owner');
const curatedReleases = releases.map((release) => ({
const { movies } = await graphql(`
query SearchScenes(
$limit: Int = 20
$offset: Int = 0
) {
movies(
first: $limit
offset: $offset
orderBy: DATE_DESC
) {
${movieFields}
}
}
`, {
limit,
offset,
}, 'owner');
const curatedScenes = releases.map((release) => ({
...release,
type: 'release',
actors: release.actors.filter(Boolean).map(({ actor }) => actor),
directors: release.directors.filter(Boolean).map(({ director }) => director),
studio: release.studio?.slug,
tags: release.tags.map(({ tag }) => tag?.slug).filter(Boolean),
movies: release.movies?.map(({ movie }) => movie) || [],
chapters: release.chapters.filter(Boolean).map((chapter) => ({
...chapter,
tags: chapter.tags.map(({ tag }) => tag?.slug).filter(Boolean),
@ -203,10 +279,17 @@ async function save() {
covers: release.covers.filter(Boolean).map(({ media }) => media),
}));
const filename = `export-${offset}-${offset + limit}-${moment().format('YYYY-MM-DD_hh_mm_ss')}.json`;
const serializedData = JSON.stringify(curatedReleases, null, 4);
const curatedMovies = movies.map((movie) => ({
...movie,
type: 'movie',
poster: movie.poster?.media,
covers: movie.covers.filter(Boolean).map(({ media }) => media),
}));
await fs.writeFile(filename, serializedData);
const filename = `export-${offset}-${offset + limit}-${moment().format('YYYY-MM-DD_hh_mm_ss')}.json`;
const serializedData = JSON.stringify([...curatedScenes, ...curatedMovies], null, 4);
await fs.promises.writeFile(filename, serializedData);
console.log(`Saved ${releases.length} releases to ${filename}`);
@ -300,18 +383,73 @@ async function addReleaseChapters(release, context) {
}, Promise.resolve());
}
const dirs = {
path: '',
thumbnail: 'thumbs',
lazy: 'lazy',
};
async function transferMedia(media, target) {
return ['path', 'thumbnail', 'lazy'].reduce(async (chain, type) => {
await chain;
const filename = `${media.hash}${path.extname(media[type])}`;
const filepath = path.join(target, dirs[type], filename);
const temp = path.join('media/temp', filepath);
const url = new URL(media[type], `${args.mediaSource}/`).href;
console.log('Transferring media', url);
const res = await bhttp.get(url, { stream: true });
if (res.statusCode !== 200) {
console.warn(`Missing ${target} ${url}`);
return;
}
await fs.promises.mkdir(path.dirname(temp), { recursive: true });
await new Promise((resolve, reject) => {
const fileStream = fs.createWriteStream(temp);
res.pipe(fileStream);
res.on('error', () => { reject(); });
fileStream.on('finish', () => { resolve(); });
fileStream.on('error', () => { reject(); });
});
if (args.s3) {
await s3.upload({
Bucket: config.s3.bucket,
Body: fs.createReadStream(temp),
Key: filepath,
ContentType: media.mime,
}).promise();
await fs.promises.unlink(temp);
}
}, Promise.resolve());
}
async function addReleaseMedia(medias, release, target) {
return Promise.all(medias.filter(Boolean).map(async (media) => {
try {
const id = nanoid();
const existingMedia = await knex('media')
.where('hash', media.hash)
.first();
const id = existingMedia?.id || nanoid();
if (!existingMedia) {
await knex('media').insert({
id,
hash: media.hash,
path: media.path,
thumbnail: media.thumbnail,
lazy: media.lazy,
is_s3: media.s3,
path: path.join(target, '', `${media.hash}${path.extname(media.path)}`),
thumbnail: path.join(target, 'thumbs', `${media.hash}${path.extname(media.thumbnail)}`),
lazy: path.join(target, 'lazy', `${media.hash}${path.extname(media.lazy)}`),
// is_s3: media.s3,
is_s3: true,
index: media.index,
mime: media.mime,
size: media.size,
@ -321,19 +459,42 @@ async function addReleaseMedia(medias, release, target) {
source_page: media.sourcePage,
});
await knex(`releases_${target}`).insert({
release_id: release.id,
if (args.mediaSource) {
await transferMedia(media, target);
}
}
await knex(`${release.type}s_${target}`).insert({
[`${release.type}_id`]: release.id,
media_id: id,
});
} catch (error) {
console.log(`Skipped existing media ${media.hash} from ${media.url}: ${error.message}`);
}
}));
}
async function linkMovieScenes(release, context) {
await release.movies.reduce(async (chain, linkedMovie) => {
await chain;
const movie = context.movies.find((storedMovie) => storedMovie.entryId === linkedMovie.entryId
&& storedMovie.entity.slug === linkedMovie.entity.slug
&& storedMovie.entity.type === linkedMovie.entity.type);
console.log('movie', linkedMovie, movie);
if (!movie) {
throw new Error(`Missing ${linkedMovie.entity.slug} movie '${linkedMovie.title}' in '${release.title}'`);
}
await knex('movies_scenes').insert({
movie_id: movie.id,
scene_id: release.id,
});
}, Promise.resolve());
}
async function addRelease(release, context) {
const existingRelease = await knex('releases')
.leftJoin('entities', 'entities.id', 'releases.entity_id')
const existingRelease = await knex(`${release.type}s`)
.leftJoin('entities', 'entities.id', `${release.type}s.entity_id`)
.where('entry_id', release.entryId)
.where('entities.slug', release.entity.slug)
.where('entities.type', release.entity.type)
@ -351,47 +512,58 @@ async function addRelease(release, context) {
throw new Error(`Release contains non-existent ${release.entity.type} '${release.entity.slug}'`);
}
const [releaseId] = await knex('releases')
const [releaseEntry] = await knex(`${release.type}s`)
.insert({
entry_id: release.entryId,
entity_id: entity.id,
studio_id: context.studioIdsBySlug[release.studio],
shoot_id: release.shootId,
url: release.url,
title: release.title,
slug: release.slug,
date: release.date,
date_precision: release.datePrecision,
created_batch_id: context.batchId,
updated_batch_id: context.batchId,
...(release.type === 'scene' && {
shoot_id: release.shootId,
studio_id: context.studioIdsBySlug[release.studio],
production_date: release.productionDate,
description: release.description,
duration: release.duration,
created_batch_id: context.batchId,
updated_batch_id: context.batchId,
}),
})
.returning('id');
.returning(['id', 'entry_id']);
const releaseWithId = { ...release, id: releaseId };
const releaseWithId = {
...release,
id: releaseEntry.id,
entityId: entity.id,
};
await Promise.all([
addReleaseMedia([releaseWithId.poster], releaseWithId, 'posters', context),
...(release.type === 'release' ? [
addReleaseTags(releaseWithId, context),
addReleaseActors(releaseWithId, context),
addReleaseDirectors(releaseWithId, context),
addReleaseChapters(releaseWithId, context),
addReleaseMedia([releaseWithId.poster], releaseWithId, 'posters', context),
addReleaseMedia(releaseWithId.photos, releaseWithId, 'photos', context),
// addReleaseMedia(releaseWithId.covers, releaseWithId, 'covers', context),
linkMovieScenes(releaseWithId, context),
] : []),
...(release.type === 'movie' ? [
addReleaseMedia(releaseWithId.covers, releaseWithId, 'covers', context),
] : []),
]);
return true;
return releaseWithId;
}
async function load() {
const file = await fs.readFile(args.file, 'utf8');
const releases = JSON.parse(file);
const file = await fs.promises.readFile(args.file, 'utf8');
const releases = JSON.parse(file).slice(0, args.limit || Infinity);
const [batchId] = await knex('batches').insert({ comment: `import ${args.file}` }).returning('id');
const aggTags = Array.from(new Set(releases.flatMap((release) => [...release.tags, ...release.chapters.flatMap((chapter) => chapter.tags)]).filter(Boolean)));
const aggTags = Array.from(new Set(releases.filter((release) => release.type === 'release').flatMap((release) => [...release.tags, ...release.chapters.flatMap((chapter) => chapter.tags)]).filter(Boolean)));
const aggStudios = Array.from(new Set(releases.map((release) => release.studio).filter(Boolean)));
const tags = await knex('tags')
@ -406,14 +578,22 @@ async function load() {
const tagIdsBySlug = Object.fromEntries(tags.map((tag) => [tag.slug, tag.id]));
const studioIdsBySlug = Object.fromEntries(studios.map((studio) => [studio.slug, studio.id]));
const added = await releases.reduce(async (chain, release) => {
const addedMovies = await releases.filter((release) => release.type === 'movie').reduce(async (chain, release) => {
const acc = await chain;
const isAdded = await addRelease(release, { batchId, tagIdsBySlug, studioIdsBySlug });
const movie = await addRelease(release, { batchId, tagIdsBySlug, studioIdsBySlug });
return acc.concat(isAdded);
return acc.concat(movie);
}, Promise.resolve([]));
console.log(`Loaded ${added.filter(Boolean).length}/${releases.length} scenes in batch ${batchId}`);
const addedScenes = await releases.filter((release) => release.type === 'release').reduce(async (chain, release) => {
const acc = await chain;
const scene = await addRelease(release, { batchId, movies: addedMovies, tagIdsBySlug, studioIdsBySlug });
return acc.concat(scene);
}, Promise.resolve([]));
console.log(`Loaded ${addedMovies.filter(Boolean).length}/${releases.filter((release) => release.type === 'movie').length} movies in batch ${batchId}`);
console.log(`Loaded ${addedScenes.filter(Boolean).length}/${releases.filter((release) => release.type === 'release').length} scenes in batch ${batchId}`);
process.exit();
}

View File

@ -1,12 +1,15 @@
'use strict';
const config = require('config');
const { withPostGraphileContext } = require('postgraphile');
const { graphql } = require('graphql');
const pg = require('./postgraphile');
const initPg = require('./postgraphile');
const logger = require('../logger')(__filename);
async function query(graphqlQuery, params) {
async function query(graphqlQuery, params, role = 'query') {
const pg = initPg(config.database[role]);
return withPostGraphileContext(pg, async (context) => {
const schema = await pg.getGraphQLSchema();
const result = await graphql(schema, graphqlQuery, null, context, params);

View File

@ -1,8 +1,6 @@
'use strict';
/* eslint-disable arrow-body-style */
const config = require('config');
const { postgraphile } = require('postgraphile');
const PgConnectionFilterPlugin = require('postgraphile-plugin-connection-filter');
@ -11,15 +9,16 @@ const PgOrderByRelatedPlugin = require('@graphile-contrib/pg-order-by-related');
const { ActorPlugins, SitePlugins, ReleasePlugins, MediaPlugins } = require('./plugins/plugins');
const connectionString = `postgres://${config.database.query.user}:${config.database.query.password}@${config.database.query.host}:5432/${config.database.query.database}`;
async function pgSettings(req) {
return {
'user.id': req.session.user?.id || null, // undefined is passed as an empty string, avoid
};
}
module.exports = postgraphile(
function initPostgraphile(credentials) {
const connectionString = `postgres://${credentials.user}:${credentials.password}@${credentials.host}:5432/${credentials.database}`;
return postgraphile(
connectionString,
'public',
{
@ -48,4 +47,7 @@ module.exports = postgraphile(
pgSettings,
},
pgSettings,
);
);
}
module.exports = initPostgraphile;

View File

@ -13,7 +13,7 @@ const logger = require('../logger')(__filename);
const knex = require('../knex');
const errorHandler = require('./error');
const pg = require('./postgraphile');
const initPg = require('./postgraphile');
const {
login,
@ -83,7 +83,7 @@ async function initServer() {
router.use(bodyParser.json({ strict: false }));
router.use(session({ ...config.web.session, store }));
router.use(pg);
router.use(initPg(config.database.query));
router.use((req, res, next) => {
req.session.safeId = req.session.safeId || nanoid();