diff --git a/assets/components/releases/release.vue b/assets/components/releases/release.vue
index 96c9a628d..cc8be867e 100755
--- a/assets/components/releases/release.vue
+++ b/assets/components/releases/release.vue
@@ -62,6 +62,11 @@
/>
+
+
-
-
interpretAfter(after, true));
diff --git a/src/entities.js b/src/entities.js
index 9abb4044d..7ea37c115 100755
--- a/src/entities.js
+++ b/src/entities.js
@@ -393,7 +393,9 @@ async function flushEntities(networkSlugs = [], channelSlugs = []) {
logger.info(`Removed ${deletedScenesCount} scenes, ${deletedMoviesCount} movies and ${deletedSeriesCount} series for ${entitySlugs}`);
- await flushOrphanedMedia();
+ if (argv.flushOrphanedMedia !== false) {
+ await flushOrphanedMedia();
+ }
}
module.exports = {
diff --git a/src/media.js b/src/media.js
index c978031de..2444bfd50 100755
--- a/src/media.js
+++ b/src/media.js
@@ -997,16 +997,20 @@ async function flushOrphanedMedia() {
.returning(['media.id', 'media.is_s3', 'media.path', 'media.thumbnail', 'media.lazy'])
.delete();
- await Promise.all(orphanedMedia.filter((media) => !media.is_s3).map((media) => Promise.all([
- media.path && fsPromises.unlink(path.join(config.media.path, media.path)).catch(() => { /* probably file not found */ }),
- media.thumbnail && fsPromises.unlink(path.join(config.media.path, media.thumbnail)).catch(() => { /* probably file not found */ }),
- media.lazy && fsPromises.unlink(path.join(config.media.path, media.lazy)).catch(() => { /* probably file not found */ }),
- ])));
+ if (argv.flushMediaFiles) {
+ await Promise.all(orphanedMedia.filter((media) => !media.is_s3).map((media) => Promise.all([
+ media.path && fsPromises.unlink(path.join(config.media.path, media.path)).catch(() => { /* probably file not found */ }),
+ media.thumbnail && fsPromises.unlink(path.join(config.media.path, media.thumbnail)).catch(() => { /* probably file not found */ }),
+ media.lazy && fsPromises.unlink(path.join(config.media.path, media.lazy)).catch(() => { /* probably file not found */ }),
+ ])));
- logger.info(`Removed ${orphanedMedia.length} media files from database and storage`);
+ if (config.s3.enabled) {
+ await deleteS3Objects(orphanedMedia.filter((media) => media.is_s3));
+ }
- if (config.s3.enabled) {
- await deleteS3Objects(orphanedMedia.filter((media) => media.is_s3));
+ logger.info(`Removed ${orphanedMedia.length} media files from database and storage`);
+ } else {
+ logger.info(`Removed ${orphanedMedia.length} media files from database, but not from storage`);
}
try {
diff --git a/src/releases.js b/src/releases.js
index 29b4ece1e..1ebf78bf1 100755
--- a/src/releases.js
+++ b/src/releases.js
@@ -4,6 +4,7 @@ const inquirer = require('inquirer');
const logger = require('./logger')(__filename);
const knex = require('./knex');
+const argv = require('./argv');
const { flushOrphanedMedia } = require('./media');
const { graphql } = require('./web/graphql');
@@ -359,7 +360,9 @@ async function flushScenes() {
}
const deleteCount = await deleteScenes(sceneIds);
- await flushOrphanedMedia();
+ if (argv.flushOrphanedMedia !== false) {
+ await flushOrphanedMedia();
+ }
logger.info(`Removed ${deleteCount}/${sceneIds.length} scenes`);
}
@@ -380,7 +383,9 @@ async function flushMovies() {
}
const deleteCount = await deleteMovies(movieIds);
- await flushOrphanedMedia();
+ if (argv.flushOrphanedMedia !== false) {
+ await flushOrphanedMedia();
+ }
logger.info(`Removed ${deleteCount}/${movieIds.length} movies`);
}
@@ -401,7 +406,9 @@ async function flushSeries() {
}
const deleteCount = await deleteSeries(serieIds);
- await flushOrphanedMedia();
+ if (argv.flushOrphanedMedia !== false) {
+ await flushOrphanedMedia();
+ }
logger.info(`Removed ${deleteCount}/${serieIds.length} series`);
}
@@ -437,7 +444,9 @@ async function flushBatches(batchIds) {
logger.info(`Removed ${deletedScenesCount} scenes and ${deletedMoviesCount} movies for batches ${batchIds}`);
- await flushOrphanedMedia();
+ if (argv.flushOrphanedMedia !== false) {
+ await flushOrphanedMedia();
+ }
}
module.exports = {
diff --git a/src/tools/transfer.js b/src/tools/transfer.js
index ba22d10aa..81f496fb8 100644
--- a/src/tools/transfer.js
+++ b/src/tools/transfer.js
@@ -1,9 +1,11 @@
'use strict';
const config = require('config');
-const fs = require('fs').promises;
+const fs = require('fs');
+const path = require('path');
const moment = require('moment');
const Promise = require('bluebird');
+const bhttp = require('bhttp');
const { nanoid } = require('nanoid/non-secure');
const AWS = require('aws-sdk');
@@ -22,11 +24,9 @@ const s3 = new AWS.S3({
},
});
-console.log(Object.keys(s3));
-
// NOT TRANSFERRED, unutilized on old server: production location, availabile qualities, actor alias for, actor entry id, chapter posters, chapter photos
-const releaseFields = `
+const sceneFields = `
entryId
shootId
title
@@ -43,6 +43,16 @@ const releaseFields = `
studio {
slug
}
+ movies: moviesScenesBySceneId {
+ movie {
+ title
+ entryId
+ entity {
+ slug
+ type
+ }
+ }
+ }
actors: releasesActors {
actor {
name
@@ -56,6 +66,7 @@ const releaseFields = `
}
directors: releasesDirectors {
director {
+ name
slug
entryId
entity {
@@ -164,12 +175,57 @@ const releaseFields = `
createdAt
`;
+const movieFields = `
+ entryId
+ title
+ url
+ date
+ datePrecision
+ entity {
+ slug
+ type
+ }
+ poster: moviesPoster {
+ media {
+ hash
+ path
+ thumbnail
+ lazy
+ s3: isS3
+ mime
+ index
+ width
+ height
+ size
+ source
+ sourcePage
+ }
+ }
+ covers: moviesCovers {
+ media {
+ hash
+ path
+ thumbnail
+ lazy
+ s3: isS3
+ mime
+ index
+ width
+ height
+ size
+ source
+ sourcePage
+ }
+ }
+ createdAt
+`;
+
async function save() {
const limit = args.limit || 1000;
const offset = args.offset || 0;
const { releases } = await graphql(`
- query SearchReleases(
+ query SearchScenes(
$limit: Int = 20
$offset: Int = 0
) {
@@ -178,20 +234,40 @@ async function save() {
offset: $offset
orderBy: DATE_DESC
) {
- ${releaseFields}
+ ${sceneFields}
}
}
`, {
limit,
offset,
- });
+ }, 'owner');
- const curatedReleases = releases.map((release) => ({
+ const { movies } = await graphql(`
+ query SearchScenes(
+ $limit: Int = 20
+ $offset: Int = 0
+ ) {
+ movies(
+ first: $limit
+ offset: $offset
+ orderBy: DATE_DESC
+ ) {
+ ${movieFields}
+ }
+ }
+ `, {
+ limit,
+ offset,
+ }, 'owner');
+
+ const curatedScenes = releases.map((release) => ({
...release,
+ type: 'release',
actors: release.actors.filter(Boolean).map(({ actor }) => actor),
directors: release.directors.filter(Boolean).map(({ director }) => director),
studio: release.studio?.slug,
tags: release.tags.map(({ tag }) => tag?.slug).filter(Boolean),
+ movies: release.movies?.map(({ movie }) => movie) || [],
chapters: release.chapters.filter(Boolean).map((chapter) => ({
...chapter,
tags: chapter.tags.map(({ tag }) => tag?.slug).filter(Boolean),
@@ -203,10 +279,17 @@ async function save() {
covers: release.covers.filter(Boolean).map(({ media }) => media),
}));
- const filename = `export-${offset}-${offset + limit}-${moment().format('YYYY-MM-DD_hh_mm_ss')}.json`;
- const serializedData = JSON.stringify(curatedReleases, null, 4);
+ const curatedMovies = movies.map((movie) => ({
+ ...movie,
+ type: 'movie',
+ poster: movie.poster?.media,
+ covers: movie.covers.filter(Boolean).map(({ media }) => media),
+ }));
- await fs.writeFile(filename, serializedData);
+ const filename = `export-${offset}-${offset + limit}-${moment().format('YYYY-MM-DD_hh_mm_ss')}.json`;
+ const serializedData = JSON.stringify([...curatedScenes, ...curatedMovies], null, 4);
+
+ await fs.promises.writeFile(filename, serializedData);
console.log(`Saved ${releases.length} releases to ${filename}`);
@@ -300,18 +383,73 @@ async function addReleaseChapters(release, context) {
}, Promise.resolve());
}
+const dirs = {
+ path: '',
+ thumbnail: 'thumbs',
+ lazy: 'lazy',
+};
+
+async function transferMedia(media, target) {
+ return ['path', 'thumbnail', 'lazy'].reduce(async (chain, type) => {
+ await chain;
+
+ const filename = `${media.hash}${path.extname(media[type])}`;
+ const filepath = path.join(target, dirs[type], filename);
+ const temp = path.join('media/temp', filepath);
+ const url = new URL(media[type], `${args.mediaSource}/`).href;
+
+ console.log('Transferring media', url);
+
+ const res = await bhttp.get(url, { stream: true });
+
+ if (res.statusCode !== 200) {
+ console.warn(`Missing ${target} ${url}`);
+ return;
+ }
+
+ await fs.promises.mkdir(path.dirname(temp), { recursive: true });
+
+ await new Promise((resolve, reject) => {
+ const fileStream = fs.createWriteStream(temp);
+
+ res.pipe(fileStream);
+
+ res.on('error', () => { reject(); });
+
+ fileStream.on('finish', () => { resolve(); });
+ fileStream.on('error', () => { reject(); });
+ });
+
+ if (args.s3) {
+ await s3.upload({
+ Bucket: config.s3.bucket,
+ Body: fs.createReadStream(temp),
+ Key: filepath,
+ ContentType: media.mime,
+ }).promise();
+
+ await fs.promises.unlink(temp);
+ }
+ }, Promise.resolve());
+}
+
async function addReleaseMedia(medias, release, target) {
return Promise.all(medias.filter(Boolean).map(async (media) => {
- try {
- const id = nanoid();
+ const existingMedia = await knex('media')
+ .where('hash', media.hash)
+ .first();
+ const id = existingMedia?.id || nanoid();
+
+ if (!existingMedia) {
await knex('media').insert({
id,
hash: media.hash,
- path: media.path,
- thumbnail: media.thumbnail,
- lazy: media.lazy,
- is_s3: media.s3,
+ path: path.join(target, '', `${media.hash}${path.extname(media.path)}`),
+ thumbnail: path.join(target, 'thumbs', `${media.hash}${path.extname(media.thumbnail)}`),
+ lazy: path.join(target, 'lazy', `${media.hash}${path.extname(media.lazy)}`),
+ // is_s3: media.s3,
+ is_s3: true,
index: media.index,
mime: media.mime,
size: media.size,
@@ -321,19 +459,42 @@ async function addReleaseMedia(medias, release, target) {
source_page: media.sourcePage,
});
- await knex(`releases_${target}`).insert({
- release_id: release.id,
- media_id: id,
- });
- } catch (error) {
- console.log(`Skipped existing media ${media.hash} from ${media.url}: ${error.message}`);
+ if (args.mediaSource) {
+ await transferMedia(media, target);
+ }
}
+
+ await knex(`${release.type}s_${target}`).insert({
+ [`${release.type}_id`]: release.id,
+ media_id: id,
+ });
}));
}
+async function linkMovieScenes(release, context) {
+ await release.movies.reduce(async (chain, linkedMovie) => {
+ await chain;
+
+ const movie = context.movies.find((storedMovie) => storedMovie.entryId === linkedMovie.entryId
+ && storedMovie.entity.slug === linkedMovie.entity.slug
+ && storedMovie.entity.type === linkedMovie.entity.type);
+
+ console.log('movie', linkedMovie, movie);
+
+ if (!movie) {
+ throw new Error(`Missing ${linkedMovie.entity.slug} movie '${linkedMovie.title}' in '${release.title}'`);
+ }
+
+ await knex('movies_scenes').insert({
+ movie_id: movie.id,
+ scene_id: release.id,
+ });
+ }, Promise.resolve());
+}
+
async function addRelease(release, context) {
- const existingRelease = await knex('releases')
- .leftJoin('entities', 'entities.id', 'releases.entity_id')
+ const existingRelease = await knex(`${release.type}s`)
+ .leftJoin('entities', 'entities.id', `${release.type}s.entity_id`)
.where('entry_id', release.entryId)
.where('entities.slug', release.entity.slug)
.where('entities.type', release.entity.type)
@@ -351,47 +512,58 @@ async function addRelease(release, context) {
throw new Error(`Release contains non-existent ${release.entity.type} '${release.entity.slug}'`);
}
- const [releaseId] = await knex('releases')
+ const [releaseEntry] = await knex(`${release.type}s`)
.insert({
entry_id: release.entryId,
entity_id: entity.id,
- studio_id: context.studioIdsBySlug[release.studio],
- shoot_id: release.shootId,
url: release.url,
title: release.title,
slug: release.slug,
date: release.date,
date_precision: release.datePrecision,
- production_date: release.productionDate,
- description: release.description,
- duration: release.duration,
created_batch_id: context.batchId,
updated_batch_id: context.batchId,
+ ...(release.type === 'scene' && {
+ shoot_id: release.shootId,
+ studio_id: context.studioIdsBySlug[release.studio],
+ production_date: release.productionDate,
+ description: release.description,
+ duration: release.duration,
+ }),
})
- .returning('id');
+ .returning(['id', 'entry_id']);
- const releaseWithId = { ...release, id: releaseId };
+ const releaseWithId = {
+ ...release,
+ id: releaseEntry.id,
+ entityId: entity.id,
+ };
await Promise.all([
- addReleaseTags(releaseWithId, context),
- addReleaseActors(releaseWithId, context),
- addReleaseDirectors(releaseWithId, context),
- addReleaseChapters(releaseWithId, context),
addReleaseMedia([releaseWithId.poster], releaseWithId, 'posters', context),
- addReleaseMedia(releaseWithId.photos, releaseWithId, 'photos', context),
- // addReleaseMedia(releaseWithId.covers, releaseWithId, 'covers', context),
+ ...(release.type === 'release' ? [
+ addReleaseTags(releaseWithId, context),
+ addReleaseActors(releaseWithId, context),
+ addReleaseDirectors(releaseWithId, context),
+ addReleaseChapters(releaseWithId, context),
+ addReleaseMedia(releaseWithId.photos, releaseWithId, 'photos', context),
+ linkMovieScenes(releaseWithId, context),
+ ] : []),
+ ...(release.type === 'movie' ? [
+ addReleaseMedia(releaseWithId.covers, releaseWithId, 'covers', context),
+ ] : []),
]);
- return true;
+ return releaseWithId;
}
async function load() {
- const file = await fs.readFile(args.file, 'utf8');
- const releases = JSON.parse(file);
+ const file = await fs.promises.readFile(args.file, 'utf8');
+ const releases = JSON.parse(file).slice(0, args.limit || Infinity);
const [batchId] = await knex('batches').insert({ comment: `import ${args.file}` }).returning('id');
- const aggTags = Array.from(new Set(releases.flatMap((release) => [...release.tags, ...release.chapters.flatMap((chapter) => chapter.tags)]).filter(Boolean)));
+ const aggTags = Array.from(new Set(releases.filter((release) => release.type === 'release').flatMap((release) => [...release.tags, ...release.chapters.flatMap((chapter) => chapter.tags)]).filter(Boolean)));
const aggStudios = Array.from(new Set(releases.map((release) => release.studio).filter(Boolean)));
const tags = await knex('tags')
@@ -406,14 +578,22 @@ async function load() {
const tagIdsBySlug = Object.fromEntries(tags.map((tag) => [tag.slug, tag.id]));
const studioIdsBySlug = Object.fromEntries(studios.map((studio) => [studio.slug, studio.id]));
- const added = await releases.reduce(async (chain, release) => {
+ const addedMovies = await releases.filter((release) => release.type === 'movie').reduce(async (chain, release) => {
const acc = await chain;
- const isAdded = await addRelease(release, { batchId, tagIdsBySlug, studioIdsBySlug });
+ const movie = await addRelease(release, { batchId, tagIdsBySlug, studioIdsBySlug });
- return acc.concat(isAdded);
+ return acc.concat(movie);
}, Promise.resolve([]));
- console.log(`Loaded ${added.filter(Boolean).length}/${releases.length} scenes in batch ${batchId}`);
+ const addedScenes = await releases.filter((release) => release.type === 'release').reduce(async (chain, release) => {
+ const acc = await chain;
+ const scene = await addRelease(release, { batchId, movies: addedMovies, tagIdsBySlug, studioIdsBySlug });
+
+ return acc.concat(scene);
+ }, Promise.resolve([]));
+
+ console.log(`Loaded ${addedMovies.filter(Boolean).length}/${releases.filter((release) => release.type === 'movie').length} movies in batch ${batchId}`);
+ console.log(`Loaded ${addedScenes.filter(Boolean).length}/${releases.filter((release) => release.type === 'release').length} scenes in batch ${batchId}`);
process.exit();
}
diff --git a/src/web/graphql.js b/src/web/graphql.js
index 2c048bd2e..32e5be192 100755
--- a/src/web/graphql.js
+++ b/src/web/graphql.js
@@ -1,12 +1,15 @@
'use strict';
+const config = require('config');
const { withPostGraphileContext } = require('postgraphile');
const { graphql } = require('graphql');
-const pg = require('./postgraphile');
+const initPg = require('./postgraphile');
const logger = require('../logger')(__filename);
-async function query(graphqlQuery, params) {
+async function query(graphqlQuery, params, role = 'query') {
+ const pg = initPg(config.database[role]);
+
return withPostGraphileContext(pg, async (context) => {
const schema = await pg.getGraphQLSchema();
const result = await graphql(schema, graphqlQuery, null, context, params);
diff --git a/src/web/postgraphile.js b/src/web/postgraphile.js
index ae2134938..900b7b1cd 100755
--- a/src/web/postgraphile.js
+++ b/src/web/postgraphile.js
@@ -1,8 +1,6 @@
'use strict';
/* eslint-disable arrow-body-style */
-const config = require('config');
-
const { postgraphile } = require('postgraphile');
const PgConnectionFilterPlugin = require('postgraphile-plugin-connection-filter');
@@ -11,41 +9,45 @@ const PgOrderByRelatedPlugin = require('@graphile-contrib/pg-order-by-related');
const { ActorPlugins, SitePlugins, ReleasePlugins, MediaPlugins } = require('./plugins/plugins');
-const connectionString = `postgres://${config.database.query.user}:${config.database.query.password}@${config.database.query.host}:5432/${config.database.query.database}`;
-
async function pgSettings(req) {
return {
'user.id': req.session.user?.id || null, // undefined is passed as an empty string, avoid
};
}
-module.exports = postgraphile(
- connectionString,
- 'public',
- {
- // watchPg: true,
- disableDefaultMutations: true,
- dynamicJson: true,
- graphiql: true,
- enhanceGraphiql: true,
- allowExplain: () => true,
- // simpleCollections: 'only',
- simpleCollections: 'both',
- graphileBuildOptions: {
- pgOmitListSuffix: true,
- connectionFilterRelations: true,
- connectionFilterAllowNullInput: true,
+function initPostgraphile(credentials) {
+ const connectionString = `postgres://${credentials.user}:${credentials.password}@${credentials.host}:5432/${credentials.database}`;
+
+ return postgraphile(
+ connectionString,
+ 'public',
+ {
+ // watchPg: true,
+ disableDefaultMutations: true,
+ dynamicJson: true,
+ graphiql: true,
+ enhanceGraphiql: true,
+ allowExplain: () => true,
+ // simpleCollections: 'only',
+ simpleCollections: 'both',
+ graphileBuildOptions: {
+ pgOmitListSuffix: true,
+ connectionFilterRelations: true,
+ connectionFilterAllowNullInput: true,
+ },
+ appendPlugins: [
+ PgSimplifyInflectorPlugin,
+ PgConnectionFilterPlugin,
+ PgOrderByRelatedPlugin,
+ ...ActorPlugins,
+ ...SitePlugins,
+ ...ReleasePlugins,
+ ...MediaPlugins,
+ ],
+ pgSettings,
},
- appendPlugins: [
- PgSimplifyInflectorPlugin,
- PgConnectionFilterPlugin,
- PgOrderByRelatedPlugin,
- ...ActorPlugins,
- ...SitePlugins,
- ...ReleasePlugins,
- ...MediaPlugins,
- ],
pgSettings,
- },
- pgSettings,
-);
+ );
+}
+
+module.exports = initPostgraphile;
diff --git a/src/web/server.js b/src/web/server.js
index 3b1682a66..ce8cf9897 100755
--- a/src/web/server.js
+++ b/src/web/server.js
@@ -13,7 +13,7 @@ const logger = require('../logger')(__filename);
const knex = require('../knex');
const errorHandler = require('./error');
-const pg = require('./postgraphile');
+const initPg = require('./postgraphile');
const {
login,
@@ -83,7 +83,7 @@ async function initServer() {
router.use(bodyParser.json({ strict: false }));
router.use(session({ ...config.web.session, store }));
- router.use(pg);
+ router.use(initPg(config.database.query));
router.use((req, res, next) => {
req.session.safeId = req.session.safeId || nanoid();