Extended and improved transfer tool. Moved scenes up on movie page.

This commit is contained in:
DebaucheryLibrarian 2023-06-03 02:51:42 +02:00
parent 62617ec6bf
commit 4b9a0e6bab
10 changed files with 314 additions and 102 deletions

View File

@ -62,6 +62,11 @@
/> />
</div> </div>
<Releases
v-if="release.scenes && release.scenes.length > 0"
:releases="release.scenes"
/>
<div class="row associations"> <div class="row associations">
<ul <ul
ref="actors" ref="actors"
@ -109,11 +114,6 @@
</div> </div>
</div> </div>
<Releases
v-if="release.scenes && release.scenes.length > 0"
:releases="release.scenes"
/>
<div <div
v-if="release.directors && release.directors.length > 0" v-if="release.directors && release.directors.length > 0"
class="row" class="row"

View File

@ -337,6 +337,7 @@ module.exports = {
}, },
media: { media: {
path: './media', path: './media',
transferSource: 'http://localhost:5000/media',
maxSize: 1000, maxSize: 1000,
quality: 80, quality: 80,
thumbnailSize: 320, // width for 16:9 will be exactly 576px thumbnailSize: 320, // width for 16:9 will be exactly 576px

View File

@ -272,9 +272,15 @@ const { argv } = yargs
}) })
.option('flush-orphaned-media', { .option('flush-orphaned-media', {
describe: 'Remove all orphaned media items from database and disk.', describe: 'Remove all orphaned media items from database and disk.',
type: 'array', type: 'boolean',
alias: 'flush-media', alias: 'flush-media',
}) })
.option('flush-media-files', {
describe: 'Remove files from storage when flushing media.',
type: 'boolean',
alias: 'flush-files',
default: true,
})
.option('flush-channels', { .option('flush-channels', {
describe: 'Delete all scenes and movies from channels.', describe: 'Delete all scenes and movies from channels.',
type: 'array', type: 'array',
@ -339,6 +345,11 @@ const { argv } = yargs
alias: ['timeout'], alias: ['timeout'],
default: 60000, default: 60000,
}) })
.option('media-source', {
describe: 'Traxxx host to use for media transfers.',
type: 'string',
default: config.media.transferSource,
})
.coerce('after', interpretAfter) .coerce('after', interpretAfter)
.coerce('actors-update', (after) => interpretAfter(after, true)); .coerce('actors-update', (after) => interpretAfter(after, true));

View File

@ -393,7 +393,9 @@ async function flushEntities(networkSlugs = [], channelSlugs = []) {
logger.info(`Removed ${deletedScenesCount} scenes, ${deletedMoviesCount} movies and ${deletedSeriesCount} series for ${entitySlugs}`); logger.info(`Removed ${deletedScenesCount} scenes, ${deletedMoviesCount} movies and ${deletedSeriesCount} series for ${entitySlugs}`);
await flushOrphanedMedia(); if (argv.flushOrphanedMedia !== false) {
await flushOrphanedMedia();
}
} }
module.exports = { module.exports = {

View File

@ -997,16 +997,20 @@ async function flushOrphanedMedia() {
.returning(['media.id', 'media.is_s3', 'media.path', 'media.thumbnail', 'media.lazy']) .returning(['media.id', 'media.is_s3', 'media.path', 'media.thumbnail', 'media.lazy'])
.delete(); .delete();
await Promise.all(orphanedMedia.filter((media) => !media.is_s3).map((media) => Promise.all([ if (argv.flushMediaFiles) {
media.path && fsPromises.unlink(path.join(config.media.path, media.path)).catch(() => { /* probably file not found */ }), await Promise.all(orphanedMedia.filter((media) => !media.is_s3).map((media) => Promise.all([
media.thumbnail && fsPromises.unlink(path.join(config.media.path, media.thumbnail)).catch(() => { /* probably file not found */ }), media.path && fsPromises.unlink(path.join(config.media.path, media.path)).catch(() => { /* probably file not found */ }),
media.lazy && fsPromises.unlink(path.join(config.media.path, media.lazy)).catch(() => { /* probably file not found */ }), media.thumbnail && fsPromises.unlink(path.join(config.media.path, media.thumbnail)).catch(() => { /* probably file not found */ }),
]))); media.lazy && fsPromises.unlink(path.join(config.media.path, media.lazy)).catch(() => { /* probably file not found */ }),
])));
logger.info(`Removed ${orphanedMedia.length} media files from database and storage`); if (config.s3.enabled) {
await deleteS3Objects(orphanedMedia.filter((media) => media.is_s3));
}
if (config.s3.enabled) { logger.info(`Removed ${orphanedMedia.length} media files from database and storage`);
await deleteS3Objects(orphanedMedia.filter((media) => media.is_s3)); } else {
logger.info(`Removed ${orphanedMedia.length} media files from database, but not from storage`);
} }
try { try {

View File

@ -4,6 +4,7 @@ const inquirer = require('inquirer');
const logger = require('./logger')(__filename); const logger = require('./logger')(__filename);
const knex = require('./knex'); const knex = require('./knex');
const argv = require('./argv');
const { flushOrphanedMedia } = require('./media'); const { flushOrphanedMedia } = require('./media');
const { graphql } = require('./web/graphql'); const { graphql } = require('./web/graphql');
@ -359,7 +360,9 @@ async function flushScenes() {
} }
const deleteCount = await deleteScenes(sceneIds); const deleteCount = await deleteScenes(sceneIds);
await flushOrphanedMedia(); if (argv.flushOrphanedMedia !== false) {
await flushOrphanedMedia();
}
logger.info(`Removed ${deleteCount}/${sceneIds.length} scenes`); logger.info(`Removed ${deleteCount}/${sceneIds.length} scenes`);
} }
@ -380,7 +383,9 @@ async function flushMovies() {
} }
const deleteCount = await deleteMovies(movieIds); const deleteCount = await deleteMovies(movieIds);
await flushOrphanedMedia(); if (argv.flushOrphanedMedia !== false) {
await flushOrphanedMedia();
}
logger.info(`Removed ${deleteCount}/${movieIds.length} movies`); logger.info(`Removed ${deleteCount}/${movieIds.length} movies`);
} }
@ -401,7 +406,9 @@ async function flushSeries() {
} }
const deleteCount = await deleteSeries(serieIds); const deleteCount = await deleteSeries(serieIds);
await flushOrphanedMedia(); if (argv.flushOrphanedMedia !== false) {
await flushOrphanedMedia();
}
logger.info(`Removed ${deleteCount}/${serieIds.length} series`); logger.info(`Removed ${deleteCount}/${serieIds.length} series`);
} }
@ -437,7 +444,9 @@ async function flushBatches(batchIds) {
logger.info(`Removed ${deletedScenesCount} scenes and ${deletedMoviesCount} movies for batches ${batchIds}`); logger.info(`Removed ${deletedScenesCount} scenes and ${deletedMoviesCount} movies for batches ${batchIds}`);
await flushOrphanedMedia(); if (argv.flushOrphanedMedia !== false) {
await flushOrphanedMedia();
}
} }
module.exports = { module.exports = {

View File

@ -1,9 +1,11 @@
'use strict'; 'use strict';
const config = require('config'); const config = require('config');
const fs = require('fs').promises; const fs = require('fs');
const path = require('path');
const moment = require('moment'); const moment = require('moment');
const Promise = require('bluebird'); const Promise = require('bluebird');
const bhttp = require('bhttp');
const { nanoid } = require('nanoid/non-secure'); const { nanoid } = require('nanoid/non-secure');
const AWS = require('aws-sdk'); const AWS = require('aws-sdk');
@ -22,11 +24,9 @@ const s3 = new AWS.S3({
}, },
}); });
console.log(Object.keys(s3));
// NOT TRANSFERRED, unutilized on old server: production location, availabile qualities, actor alias for, actor entry id, chapter posters, chapter photos // NOT TRANSFERRED, unutilized on old server: production location, availabile qualities, actor alias for, actor entry id, chapter posters, chapter photos
const releaseFields = ` const sceneFields = `
entryId entryId
shootId shootId
title title
@ -43,6 +43,16 @@ const releaseFields = `
studio { studio {
slug slug
} }
movies: moviesScenesBySceneId {
movie {
title
entryId
entity {
slug
type
}
}
}
actors: releasesActors { actors: releasesActors {
actor { actor {
name name
@ -56,6 +66,7 @@ const releaseFields = `
} }
directors: releasesDirectors { directors: releasesDirectors {
director { director {
name
slug slug
entryId entryId
entity { entity {
@ -164,12 +175,57 @@ const releaseFields = `
createdAt createdAt
`; `;
const movieFields = `
entryId
title
url
date
datePrecision
entity {
slug
type
}
poster: moviesPoster {
media {
hash
path
thumbnail
lazy
s3: isS3
mime
index
width
height
size
source
sourcePage
}
}
covers: moviesCovers {
media {
hash
path
thumbnail
lazy
s3: isS3
mime
index
width
height
size
source
sourcePage
}
}
createdAt
`;
async function save() { async function save() {
const limit = args.limit || 1000; const limit = args.limit || 1000;
const offset = args.offset || 0; const offset = args.offset || 0;
const { releases } = await graphql(` const { releases } = await graphql(`
query SearchReleases( query SearchScenes(
$limit: Int = 20 $limit: Int = 20
$offset: Int = 0 $offset: Int = 0
) { ) {
@ -178,20 +234,40 @@ async function save() {
offset: $offset offset: $offset
orderBy: DATE_DESC orderBy: DATE_DESC
) { ) {
${releaseFields} ${sceneFields}
} }
} }
`, { `, {
limit, limit,
offset, offset,
}); }, 'owner');
const curatedReleases = releases.map((release) => ({ const { movies } = await graphql(`
query SearchScenes(
$limit: Int = 20
$offset: Int = 0
) {
movies(
first: $limit
offset: $offset
orderBy: DATE_DESC
) {
${movieFields}
}
}
`, {
limit,
offset,
}, 'owner');
const curatedScenes = releases.map((release) => ({
...release, ...release,
type: 'release',
actors: release.actors.filter(Boolean).map(({ actor }) => actor), actors: release.actors.filter(Boolean).map(({ actor }) => actor),
directors: release.directors.filter(Boolean).map(({ director }) => director), directors: release.directors.filter(Boolean).map(({ director }) => director),
studio: release.studio?.slug, studio: release.studio?.slug,
tags: release.tags.map(({ tag }) => tag?.slug).filter(Boolean), tags: release.tags.map(({ tag }) => tag?.slug).filter(Boolean),
movies: release.movies?.map(({ movie }) => movie) || [],
chapters: release.chapters.filter(Boolean).map((chapter) => ({ chapters: release.chapters.filter(Boolean).map((chapter) => ({
...chapter, ...chapter,
tags: chapter.tags.map(({ tag }) => tag?.slug).filter(Boolean), tags: chapter.tags.map(({ tag }) => tag?.slug).filter(Boolean),
@ -203,10 +279,17 @@ async function save() {
covers: release.covers.filter(Boolean).map(({ media }) => media), covers: release.covers.filter(Boolean).map(({ media }) => media),
})); }));
const filename = `export-${offset}-${offset + limit}-${moment().format('YYYY-MM-DD_hh_mm_ss')}.json`; const curatedMovies = movies.map((movie) => ({
const serializedData = JSON.stringify(curatedReleases, null, 4); ...movie,
type: 'movie',
poster: movie.poster?.media,
covers: movie.covers.filter(Boolean).map(({ media }) => media),
}));
await fs.writeFile(filename, serializedData); const filename = `export-${offset}-${offset + limit}-${moment().format('YYYY-MM-DD_hh_mm_ss')}.json`;
const serializedData = JSON.stringify([...curatedScenes, ...curatedMovies], null, 4);
await fs.promises.writeFile(filename, serializedData);
console.log(`Saved ${releases.length} releases to ${filename}`); console.log(`Saved ${releases.length} releases to ${filename}`);
@ -300,18 +383,73 @@ async function addReleaseChapters(release, context) {
}, Promise.resolve()); }, Promise.resolve());
} }
const dirs = {
path: '',
thumbnail: 'thumbs',
lazy: 'lazy',
};
async function transferMedia(media, target) {
return ['path', 'thumbnail', 'lazy'].reduce(async (chain, type) => {
await chain;
const filename = `${media.hash}${path.extname(media[type])}`;
const filepath = path.join(target, dirs[type], filename);
const temp = path.join('media/temp', filepath);
const url = new URL(media[type], `${args.mediaSource}/`).href;
console.log('Transferring media', url);
const res = await bhttp.get(url, { stream: true });
if (res.statusCode !== 200) {
console.warn(`Missing ${target} ${url}`);
return;
}
await fs.promises.mkdir(path.dirname(temp), { recursive: true });
await new Promise((resolve, reject) => {
const fileStream = fs.createWriteStream(temp);
res.pipe(fileStream);
res.on('error', () => { reject(); });
fileStream.on('finish', () => { resolve(); });
fileStream.on('error', () => { reject(); });
});
if (args.s3) {
await s3.upload({
Bucket: config.s3.bucket,
Body: fs.createReadStream(temp),
Key: filepath,
ContentType: media.mime,
}).promise();
await fs.promises.unlink(temp);
}
}, Promise.resolve());
}
async function addReleaseMedia(medias, release, target) { async function addReleaseMedia(medias, release, target) {
return Promise.all(medias.filter(Boolean).map(async (media) => { return Promise.all(medias.filter(Boolean).map(async (media) => {
try { const existingMedia = await knex('media')
const id = nanoid(); .where('hash', media.hash)
.first();
const id = existingMedia?.id || nanoid();
if (!existingMedia) {
await knex('media').insert({ await knex('media').insert({
id, id,
hash: media.hash, hash: media.hash,
path: media.path, path: path.join(target, '', `${media.hash}${path.extname(media.path)}`),
thumbnail: media.thumbnail, thumbnail: path.join(target, 'thumbs', `${media.hash}${path.extname(media.thumbnail)}`),
lazy: media.lazy, lazy: path.join(target, 'lazy', `${media.hash}${path.extname(media.lazy)}`),
is_s3: media.s3, // is_s3: media.s3,
is_s3: true,
index: media.index, index: media.index,
mime: media.mime, mime: media.mime,
size: media.size, size: media.size,
@ -321,19 +459,42 @@ async function addReleaseMedia(medias, release, target) {
source_page: media.sourcePage, source_page: media.sourcePage,
}); });
await knex(`releases_${target}`).insert({ if (args.mediaSource) {
release_id: release.id, await transferMedia(media, target);
media_id: id, }
});
} catch (error) {
console.log(`Skipped existing media ${media.hash} from ${media.url}: ${error.message}`);
} }
await knex(`${release.type}s_${target}`).insert({
[`${release.type}_id`]: release.id,
media_id: id,
});
})); }));
} }
async function linkMovieScenes(release, context) {
await release.movies.reduce(async (chain, linkedMovie) => {
await chain;
const movie = context.movies.find((storedMovie) => storedMovie.entryId === linkedMovie.entryId
&& storedMovie.entity.slug === linkedMovie.entity.slug
&& storedMovie.entity.type === linkedMovie.entity.type);
console.log('movie', linkedMovie, movie);
if (!movie) {
throw new Error(`Missing ${linkedMovie.entity.slug} movie '${linkedMovie.title}' in '${release.title}'`);
}
await knex('movies_scenes').insert({
movie_id: movie.id,
scene_id: release.id,
});
}, Promise.resolve());
}
async function addRelease(release, context) { async function addRelease(release, context) {
const existingRelease = await knex('releases') const existingRelease = await knex(`${release.type}s`)
.leftJoin('entities', 'entities.id', 'releases.entity_id') .leftJoin('entities', 'entities.id', `${release.type}s.entity_id`)
.where('entry_id', release.entryId) .where('entry_id', release.entryId)
.where('entities.slug', release.entity.slug) .where('entities.slug', release.entity.slug)
.where('entities.type', release.entity.type) .where('entities.type', release.entity.type)
@ -351,47 +512,58 @@ async function addRelease(release, context) {
throw new Error(`Release contains non-existent ${release.entity.type} '${release.entity.slug}'`); throw new Error(`Release contains non-existent ${release.entity.type} '${release.entity.slug}'`);
} }
const [releaseId] = await knex('releases') const [releaseEntry] = await knex(`${release.type}s`)
.insert({ .insert({
entry_id: release.entryId, entry_id: release.entryId,
entity_id: entity.id, entity_id: entity.id,
studio_id: context.studioIdsBySlug[release.studio],
shoot_id: release.shootId,
url: release.url, url: release.url,
title: release.title, title: release.title,
slug: release.slug, slug: release.slug,
date: release.date, date: release.date,
date_precision: release.datePrecision, date_precision: release.datePrecision,
production_date: release.productionDate,
description: release.description,
duration: release.duration,
created_batch_id: context.batchId, created_batch_id: context.batchId,
updated_batch_id: context.batchId, updated_batch_id: context.batchId,
...(release.type === 'scene' && {
shoot_id: release.shootId,
studio_id: context.studioIdsBySlug[release.studio],
production_date: release.productionDate,
description: release.description,
duration: release.duration,
}),
}) })
.returning('id'); .returning(['id', 'entry_id']);
const releaseWithId = { ...release, id: releaseId }; const releaseWithId = {
...release,
id: releaseEntry.id,
entityId: entity.id,
};
await Promise.all([ await Promise.all([
addReleaseTags(releaseWithId, context),
addReleaseActors(releaseWithId, context),
addReleaseDirectors(releaseWithId, context),
addReleaseChapters(releaseWithId, context),
addReleaseMedia([releaseWithId.poster], releaseWithId, 'posters', context), addReleaseMedia([releaseWithId.poster], releaseWithId, 'posters', context),
addReleaseMedia(releaseWithId.photos, releaseWithId, 'photos', context), ...(release.type === 'release' ? [
// addReleaseMedia(releaseWithId.covers, releaseWithId, 'covers', context), addReleaseTags(releaseWithId, context),
addReleaseActors(releaseWithId, context),
addReleaseDirectors(releaseWithId, context),
addReleaseChapters(releaseWithId, context),
addReleaseMedia(releaseWithId.photos, releaseWithId, 'photos', context),
linkMovieScenes(releaseWithId, context),
] : []),
...(release.type === 'movie' ? [
addReleaseMedia(releaseWithId.covers, releaseWithId, 'covers', context),
] : []),
]); ]);
return true; return releaseWithId;
} }
async function load() { async function load() {
const file = await fs.readFile(args.file, 'utf8'); const file = await fs.promises.readFile(args.file, 'utf8');
const releases = JSON.parse(file); const releases = JSON.parse(file).slice(0, args.limit || Infinity);
const [batchId] = await knex('batches').insert({ comment: `import ${args.file}` }).returning('id'); const [batchId] = await knex('batches').insert({ comment: `import ${args.file}` }).returning('id');
const aggTags = Array.from(new Set(releases.flatMap((release) => [...release.tags, ...release.chapters.flatMap((chapter) => chapter.tags)]).filter(Boolean))); const aggTags = Array.from(new Set(releases.filter((release) => release.type === 'release').flatMap((release) => [...release.tags, ...release.chapters.flatMap((chapter) => chapter.tags)]).filter(Boolean)));
const aggStudios = Array.from(new Set(releases.map((release) => release.studio).filter(Boolean))); const aggStudios = Array.from(new Set(releases.map((release) => release.studio).filter(Boolean)));
const tags = await knex('tags') const tags = await knex('tags')
@ -406,14 +578,22 @@ async function load() {
const tagIdsBySlug = Object.fromEntries(tags.map((tag) => [tag.slug, tag.id])); const tagIdsBySlug = Object.fromEntries(tags.map((tag) => [tag.slug, tag.id]));
const studioIdsBySlug = Object.fromEntries(studios.map((studio) => [studio.slug, studio.id])); const studioIdsBySlug = Object.fromEntries(studios.map((studio) => [studio.slug, studio.id]));
const added = await releases.reduce(async (chain, release) => { const addedMovies = await releases.filter((release) => release.type === 'movie').reduce(async (chain, release) => {
const acc = await chain; const acc = await chain;
const isAdded = await addRelease(release, { batchId, tagIdsBySlug, studioIdsBySlug }); const movie = await addRelease(release, { batchId, tagIdsBySlug, studioIdsBySlug });
return acc.concat(isAdded); return acc.concat(movie);
}, Promise.resolve([])); }, Promise.resolve([]));
console.log(`Loaded ${added.filter(Boolean).length}/${releases.length} scenes in batch ${batchId}`); const addedScenes = await releases.filter((release) => release.type === 'release').reduce(async (chain, release) => {
const acc = await chain;
const scene = await addRelease(release, { batchId, movies: addedMovies, tagIdsBySlug, studioIdsBySlug });
return acc.concat(scene);
}, Promise.resolve([]));
console.log(`Loaded ${addedMovies.filter(Boolean).length}/${releases.filter((release) => release.type === 'movie').length} movies in batch ${batchId}`);
console.log(`Loaded ${addedScenes.filter(Boolean).length}/${releases.filter((release) => release.type === 'release').length} scenes in batch ${batchId}`);
process.exit(); process.exit();
} }

View File

@ -1,12 +1,15 @@
'use strict'; 'use strict';
const config = require('config');
const { withPostGraphileContext } = require('postgraphile'); const { withPostGraphileContext } = require('postgraphile');
const { graphql } = require('graphql'); const { graphql } = require('graphql');
const pg = require('./postgraphile'); const initPg = require('./postgraphile');
const logger = require('../logger')(__filename); const logger = require('../logger')(__filename);
async function query(graphqlQuery, params) { async function query(graphqlQuery, params, role = 'query') {
const pg = initPg(config.database[role]);
return withPostGraphileContext(pg, async (context) => { return withPostGraphileContext(pg, async (context) => {
const schema = await pg.getGraphQLSchema(); const schema = await pg.getGraphQLSchema();
const result = await graphql(schema, graphqlQuery, null, context, params); const result = await graphql(schema, graphqlQuery, null, context, params);

View File

@ -1,8 +1,6 @@
'use strict'; 'use strict';
/* eslint-disable arrow-body-style */ /* eslint-disable arrow-body-style */
const config = require('config');
const { postgraphile } = require('postgraphile'); const { postgraphile } = require('postgraphile');
const PgConnectionFilterPlugin = require('postgraphile-plugin-connection-filter'); const PgConnectionFilterPlugin = require('postgraphile-plugin-connection-filter');
@ -11,41 +9,45 @@ const PgOrderByRelatedPlugin = require('@graphile-contrib/pg-order-by-related');
const { ActorPlugins, SitePlugins, ReleasePlugins, MediaPlugins } = require('./plugins/plugins'); const { ActorPlugins, SitePlugins, ReleasePlugins, MediaPlugins } = require('./plugins/plugins');
const connectionString = `postgres://${config.database.query.user}:${config.database.query.password}@${config.database.query.host}:5432/${config.database.query.database}`;
async function pgSettings(req) { async function pgSettings(req) {
return { return {
'user.id': req.session.user?.id || null, // undefined is passed as an empty string, avoid 'user.id': req.session.user?.id || null, // undefined is passed as an empty string, avoid
}; };
} }
module.exports = postgraphile( function initPostgraphile(credentials) {
connectionString, const connectionString = `postgres://${credentials.user}:${credentials.password}@${credentials.host}:5432/${credentials.database}`;
'public',
{ return postgraphile(
// watchPg: true, connectionString,
disableDefaultMutations: true, 'public',
dynamicJson: true, {
graphiql: true, // watchPg: true,
enhanceGraphiql: true, disableDefaultMutations: true,
allowExplain: () => true, dynamicJson: true,
// simpleCollections: 'only', graphiql: true,
simpleCollections: 'both', enhanceGraphiql: true,
graphileBuildOptions: { allowExplain: () => true,
pgOmitListSuffix: true, // simpleCollections: 'only',
connectionFilterRelations: true, simpleCollections: 'both',
connectionFilterAllowNullInput: true, graphileBuildOptions: {
pgOmitListSuffix: true,
connectionFilterRelations: true,
connectionFilterAllowNullInput: true,
},
appendPlugins: [
PgSimplifyInflectorPlugin,
PgConnectionFilterPlugin,
PgOrderByRelatedPlugin,
...ActorPlugins,
...SitePlugins,
...ReleasePlugins,
...MediaPlugins,
],
pgSettings,
}, },
appendPlugins: [
PgSimplifyInflectorPlugin,
PgConnectionFilterPlugin,
PgOrderByRelatedPlugin,
...ActorPlugins,
...SitePlugins,
...ReleasePlugins,
...MediaPlugins,
],
pgSettings, pgSettings,
}, );
pgSettings, }
);
module.exports = initPostgraphile;

View File

@ -13,7 +13,7 @@ const logger = require('../logger')(__filename);
const knex = require('../knex'); const knex = require('../knex');
const errorHandler = require('./error'); const errorHandler = require('./error');
const pg = require('./postgraphile'); const initPg = require('./postgraphile');
const { const {
login, login,
@ -83,7 +83,7 @@ async function initServer() {
router.use(bodyParser.json({ strict: false })); router.use(bodyParser.json({ strict: false }));
router.use(session({ ...config.web.session, store })); router.use(session({ ...config.web.session, store }));
router.use(pg); router.use(initPg(config.database.query));
router.use((req, res, next) => { router.use((req, res, next) => {
req.session.safeId = req.session.safeId || nanoid(); req.session.safeId = req.session.safeId || nanoid();