traxxx/src/tools/transfer.js

638 lines
14 KiB
JavaScript
Raw Normal View History

'use strict';
const config = require('config');
const fs = require('fs');
const path = require('path');
const moment = require('moment');
const Promise = require('bluebird');
const bhttp = require('bhttp');
const { nanoid } = require('nanoid/non-secure');
const AWS = require('aws-sdk');
const { graphql } = require('../web/graphql');
const knex = require('../knex');
const args = require('../argv');
const endpoint = new AWS.Endpoint('s3.eu-central-1.wasabisys.com');
const s3 = new AWS.S3({
// region: 'eu-central-1',
endpoint,
credentials: {
accessKeyId: config.s3.accessKey,
secretAccessKey: config.s3.secretKey,
},
});
// NOT TRANSFERRED, unutilized on old server: production location, availabile qualities, actor alias for, actor entry id, chapter posters, chapter photos
const sceneFields = `
entryId
shootId
title
url
date
datePrecision
productionDate
description
duration
entity {
slug
type
}
studio {
slug
}
movies: moviesScenesBySceneId {
movie {
title
entryId
entity {
slug
type
}
}
}
actors: releasesActors {
actor {
name
slug
entryId
entity {
slug
type
}
}
}
directors: releasesDirectors {
director {
name
slug
entryId
entity {
slug
type
}
}
}
tags: releasesTags {
tag {
slug
}
}
chapters(orderBy: TIME_ASC) {
index
time
duration
title
description
tags: chaptersTags {
tag {
slug
}
}
}
poster: releasesPoster {
media {
hash
path
thumbnail
lazy
s3: isS3
mime
index
width
height
size
source
sourcePage
}
}
photos: releasesPhotos {
media {
hash
path
thumbnail
lazy
s3: isS3
mime
index
width
height
size
source
sourcePage
}
}
covers: releasesCovers {
media {
hash
path
thumbnail
lazy
s3: isS3
mime
index
width
height
size
source
sourcePage
}
}
trailer: releasesTrailer {
media {
hash
path
thumbnail
lazy
s3: isS3
mime
index
width
height
size
source
sourcePage
}
}
teaser: releasesTeaser {
media {
hash
path
thumbnail
lazy
s3: isS3
mime
index
width
height
size
source
sourcePage
}
}
createdAt
`;
const movieFields = `
entryId
title
url
date
datePrecision
entity {
slug
type
}
poster: moviesPoster {
media {
hash
path
thumbnail
lazy
s3: isS3
mime
index
width
height
size
source
sourcePage
}
}
covers: moviesCovers {
media {
hash
path
thumbnail
lazy
s3: isS3
mime
index
width
height
size
source
sourcePage
}
}
createdAt
`;
async function save() {
const limit = args.limit || 1000;
const offset = args.start || 0;
const { releases } = await graphql(`
query SearchScenes(
$limit: Int = 20
$offset: Int = 0
) {
releases(
first: $limit
offset: $offset
orderBy: DATE_DESC
) {
${sceneFields}
}
}
`, {
limit,
offset,
}, 'owner');
const { movies } = await graphql(`
query SearchScenes(
$limit: Int = 20
$offset: Int = 0
) {
movies(
first: $limit
offset: $offset
orderBy: DATE_DESC
) {
${movieFields}
}
}
`, {
limit,
offset,
}, 'owner');
const filename = `export-${offset}-${offset + limit}-${moment().format('YYYY-MM-DD_hh_mm_ss')}.json`;
let savedScenes = 0;
let savedMovies = 0;
await releases.reduce(async (chain, release) => {
await chain;
const entry = JSON.stringify({
...release,
type: 'release',
description: release.description.replace(/\n/g, ' '),
actors: release.actors.filter(Boolean).map(({ actor }) => actor),
directors: release.directors.filter(Boolean).map(({ director }) => director),
studio: release.studio?.slug,
tags: release.tags.map(({ tag }) => tag?.slug).filter(Boolean),
movies: release.movies?.map(({ movie }) => movie) || [],
chapters: release.chapters.filter(Boolean).map((chapter) => ({
...chapter,
tags: chapter.tags.map(({ tag }) => tag?.slug).filter(Boolean),
})),
poster: release.poster?.media,
trailer: release.trailer?.media,
teaser: release.teaser?.media,
photos: release.photos.filter(Boolean).map(({ media }) => media),
covers: release.covers.filter(Boolean).map(({ media }) => media),
});
await fs.promises.appendFile(filename, `${entry}\n`);
savedScenes += 1;
}, Promise.resolve());
await movies.reduce(async (chain, movie) => {
await chain;
const entry = JSON.stringify({
...movie,
type: 'movie',
poster: movie.poster?.media,
covers: movie.covers.filter(Boolean).map(({ media }) => media),
});
await fs.promises.appendFile(filename, `${entry}\n`);
savedMovies += 1;
}, Promise.resolve());
console.log(`Saved ${savedScenes} scenes and ${savedMovies} movies to ${filename}`);
process.exit();
}
async function addReleaseTags(release, context) {
if (release.tags.length === 0) {
return;
}
await knex('releases_tags').insert(release.tags.map((tag) => ({
tag_id: context.tagIdsBySlug[tag],
release_id: release.id,
original_tag: tag,
})));
}
async function addNewActor(actor, entity, context) {
const [actorId] = await knex('actors')
.insert({
name: actor.name,
slug: actor.slug,
entity_id: entity?.id,
batch_id: context.batchId,
})
.returning('id');
return actorId;
}
async function addReleaseActors(release, context, target = 'actor') {
await release[`${target}s`].reduce(async (chain, actor) => {
await chain;
const entity = actor.entity
? await knex('entities').where(actor.entity).first()
: null;
if (actor.entity && !entity) {
throw new Error(`Actor ${actor.slug} contains non-existent ${release.entity.type} '${release.entity.slug}'`);
}
const existingActor = await knex('actors')
.where('slug', actor.slug)
.where((builder) => {
if (entity) {
builder.where('entity_id', entity.id);
return;
}
builder.whereNull('entity_id');
})
.first();
const actorId = existingActor?.id
|| await addNewActor(actor, entity, context);
await knex(`releases_${target}s`).insert({
release_id: release.id,
[`${target}_id`]: actorId,
});
}, Promise.resolve());
}
async function addReleaseDirectors(release, context) {
return addReleaseActors(release, context, 'director');
}
async function addReleaseChapters(release, context) {
await release.chapters.reduce(async (chain, chapter) => {
await chain;
const [chapterId] = await knex('chapters')
.insert({
release_id: release.id,
index: chapter.index,
time: chapter.time,
duration: chapter.duration,
description: chapter.description,
})
.returning('id');
if (chapter.tags.length > 0) {
await knex('chapters_tags').insert(chapter.tags.map((tag) => ({
tag_id: context.tagIdsBySlug[tag],
chapter_id: chapterId,
original_tag: tag,
})));
}
}, Promise.resolve());
}
const dirs = {
path: '',
thumbnail: 'thumbs',
lazy: 'lazy',
};
async function transferMedia(media, target) {
return ['path', 'thumbnail', 'lazy'].reduce(async (chain, type) => {
await chain;
const filename = `${media.hash}${path.extname(media[type])}`;
const filepath = path.join(target, dirs[type], filename);
const temp = path.join('media/temp', filepath);
const url = new URL(media[type], `${media.s3 ? config.media.transferSources.s3 : config.media.transferSources.local}/`).href;
console.log('Transferring media', url);
const res = await bhttp.get(url, { stream: true });
if (res.statusCode !== 200) {
console.warn(`Missing ${target} ${url}`);
return;
}
await fs.promises.mkdir(path.dirname(temp), { recursive: true });
await new Promise((resolve, reject) => {
const fileStream = fs.createWriteStream(temp);
res.pipe(fileStream);
res.on('error', () => { reject(); });
fileStream.on('finish', () => { resolve(); });
fileStream.on('error', () => { reject(); });
});
await s3.upload({
Bucket: config.s3.bucket,
Body: fs.createReadStream(temp),
Key: filepath,
ContentType: media.mime,
}).promise();
await fs.promises.unlink(temp);
}, Promise.resolve());
}
async function addReleaseMedia(medias, release, target) {
await medias.filter(Boolean).reduce(async (chain, media) => {
await chain;
const existingMedia = await knex('media')
.where('hash', media.hash)
.first();
const id = existingMedia?.id || nanoid();
if (!existingMedia) {
await knex('media').insert({
id,
hash: media.hash,
path: path.join(target, '', `${media.hash}${path.extname(media.path)}`),
thumbnail: path.join(target, 'thumbs', `${media.hash}${path.extname(media.thumbnail)}`),
lazy: path.join(target, 'lazy', `${media.hash}${path.extname(media.lazy)}`),
// is_s3: media.s3,
is_s3: true,
index: media.index,
mime: media.mime,
size: media.size,
width: media.width,
height: media.height,
source: media.source,
source_page: media.sourcePage,
});
await transferMedia(media, target);
}
await knex(`${release.type}s_${target}`).insert({
[`${release.type}_id`]: release.id,
media_id: id,
});
}, Promise.resolve());
}
async function linkMovieScenes(release, context) {
await release.movies.reduce(async (chain, linkedMovie) => {
await chain;
const movie = context.movies.find((storedMovie) => storedMovie.entryId === linkedMovie.entryId
&& storedMovie.entity.slug === linkedMovie.entity.slug
&& storedMovie.entity.type === linkedMovie.entity.type);
console.log('movie', linkedMovie, movie);
if (!movie) {
throw new Error(`Missing ${linkedMovie.entity.slug} movie '${linkedMovie.title}' in '${release.title}'`);
}
await knex('movies_scenes').insert({
movie_id: movie.id,
scene_id: release.id,
});
}, Promise.resolve());
}
async function addRelease(release, context) {
const existingRelease = await knex(`${release.type}s`)
.leftJoin('entities', 'entities.id', `${release.type}s.entity_id`)
.where('entry_id', release.entryId)
.where('entities.slug', release.entity.slug)
.where('entities.type', release.entity.type)
.first();
if (existingRelease) {
2023-06-03 22:42:29 +00:00
console.log(`Skipping ${release.entity.slug} release "${release.title}", already in database`);
return false;
}
const [entity] = await Promise.all([
knex('entities').select(['id', 'name']).where(release.entity).first(),
]);
if (!entity) {
throw new Error(`Release contains non-existent ${release.entity.type} '${release.entity.slug}'`);
}
const [releaseEntry] = await knex(`${release.type}s`)
.insert({
entry_id: release.entryId,
entity_id: entity.id,
url: release.url,
title: release.title,
slug: release.slug,
date: release.date,
date_precision: release.datePrecision,
created_batch_id: context.batchId,
updated_batch_id: context.batchId,
...(release.type === 'scene' && {
shoot_id: release.shootId,
studio_id: context.studioIdsBySlug[release.studio],
production_date: release.productionDate,
description: release.description,
duration: release.duration,
}),
})
.returning(['id', 'entry_id']);
const releaseWithId = {
...release,
id: releaseEntry.id,
entityId: entity.id,
entityName: entity.name,
};
await Promise.all([
addReleaseMedia([releaseWithId.poster], releaseWithId, 'posters', context),
...(release.type === 'release' ? [
addReleaseTags(releaseWithId, context),
addReleaseActors(releaseWithId, context),
addReleaseDirectors(releaseWithId, context),
addReleaseChapters(releaseWithId, context),
addReleaseMedia(releaseWithId.photos, releaseWithId, 'photos', context),
linkMovieScenes(releaseWithId, context),
] : []),
...(release.type === 'movie' ? [
addReleaseMedia(releaseWithId.covers, releaseWithId, 'covers', context),
] : []),
]);
return releaseWithId;
}
async function load() {
const file = await fs.promises.readFile(args.file, 'utf8');
const start = args.start || 0;
const end = args.limit ? start + args.limit : Infinity;
const releases = file.split('\n')
.filter(Boolean)
.map((data) => JSON.parse(data))
.filter((release) => (args.entity ? release.entity.slug === args.entity : true))
.slice(start, end);
if (releases.length === 0) {
console.log('Nothing to load');
return;
}
const [batchId] = await knex('batches').insert({ comment: `import ${args.file}` }).returning('id');
const aggTags = Array.from(new Set(releases.filter((release) => release.type === 'release').flatMap((release) => [...release.tags, ...release.chapters.flatMap((chapter) => chapter.tags)]).filter(Boolean)));
const aggStudios = Array.from(new Set(releases.map((release) => release.studio).filter(Boolean)));
const tags = await knex('tags')
.select('id', 'slug')
.whereIn('slug', aggTags);
const studios = await knex('entities')
.select('id', 'slug')
.where('type', 'studio')
.whereIn('slug', aggStudios);
const tagIdsBySlug = Object.fromEntries(tags.map((tag) => [tag.slug, tag.id]));
const studioIdsBySlug = Object.fromEntries(studios.map((studio) => [studio.slug, studio.id]));
const addedMovies = await releases.filter((release) => release.type === 'movie').reduce(async (chain, release) => {
const acc = await chain;
const movie = await addRelease(release, { batchId, tagIdsBySlug, studioIdsBySlug });
console.log(`Loaded '${movie.entityName}' movie "${movie.title}"`);
return acc.concat(movie);
}, Promise.resolve([]));
const addedScenes = await releases.filter((release) => release.type === 'release').reduce(async (chain, release) => {
const acc = await chain;
const scene = await addRelease(release, { batchId, movies: addedMovies, tagIdsBySlug, studioIdsBySlug });
console.log(`Loaded '${scene.entityName}' scene "${scene.title}"`);
return acc.concat(!!scene);
}, Promise.resolve([]));
console.log(`Loaded ${addedMovies.filter(Boolean).length}/${releases.filter((release) => release.type === 'movie').length} movies in batch ${batchId}`);
console.log(`Loaded ${addedScenes.filter(Boolean).length}/${releases.filter((release) => release.type === 'release').length} scenes in batch ${batchId}`);
process.exit();
}
({
save,
load,
})[args._]();