Added WIP media module. Returning releases from release search database function. Fixed page loop in update module.

This commit is contained in:
ThePendulum 2020-03-29 04:00:46 +02:00
parent 6d9f96c5d5
commit 93d4f0ff1a
19 changed files with 316 additions and 239 deletions

View File

@ -72,6 +72,7 @@ async function mounted() {
roleplay: [
'family',
'schoolgirl',
'nurse',
'maid',
],
tricks: [

View File

@ -37,7 +37,6 @@ function initReleasesActions(store, _router) {
query: $query
first: $limit
) {
release {
id
title
slug
@ -85,7 +84,6 @@ function initReleasesActions(store, _router) {
}
}
}
}
`, {
query,
limit,
@ -93,7 +91,7 @@ function initReleasesActions(store, _router) {
if (!res) return [];
return res.releases.map(release => curateRelease(release.release));
return res.releases.map(release => curateRelease(release));
}
async function fetchReleaseById({ _commit }, releaseId) {

View File

@ -18,7 +18,8 @@ exports.up = knex => Promise.resolve()
.defaultTo(0);
}))
.then(() => knex.schema.createTable('media', (table) => {
table.increments('id', 16);
table.string('id', 21)
.primary();
table.string('path');
table.string('thumbnail');
@ -35,7 +36,9 @@ exports.up = knex => Promise.resolve()
table.string('scraper', 32);
table.string('copyright', 100);
table.string('source', 1000);
table.string('source', 2100);
table.string('source_page', 2100);
table.text('comment');
table.string('group');
@ -47,23 +50,23 @@ exports.up = knex => Promise.resolve()
.defaultTo(knex.fn.now());
}))
.then(() => knex.schema.createTable('media_sfw', (table) => {
table.increments('id', 12);
table.string('id', 21)
.primary();
table.integer('media_id', 16)
table.string('media_id', 21)
.references('id')
.inTable('media');
table.unique('media_id');
.inTable('media')
.unique();
}))
.then(() => knex.raw(`
CREATE FUNCTION get_random_sfw_media_id() RETURNS int AS $$
CREATE FUNCTION get_random_sfw_media_id() RETURNS varchar AS $$
SELECT media_id FROM media_sfw
ORDER BY random()
LIMIT 1;
$$ LANGUAGE sql STABLE;
`))
.then(() => knex.schema.alterTable('media', (table) => {
table.integer('sfw_media_id', 16)
table.string('sfw_media_id', 21)
.references('id')
.inTable('media')
.defaultTo(knex.raw('get_random_sfw_media_id()'));
@ -112,7 +115,7 @@ exports.up = knex => Promise.resolve()
.references('id')
.inTable('tags');
table.integer('media_id', 16)
table.string('media_id', 21)
.notNullable()
.references('id')
.inTable('media');
@ -125,7 +128,7 @@ exports.up = knex => Promise.resolve()
.references('id')
.inTable('tags');
table.integer('media_id', 16)
table.string('media_id', 21)
.notNullable()
.references('id')
.inTable('media');
@ -472,7 +475,7 @@ exports.up = knex => Promise.resolve()
.references('id')
.inTable('actors');
table.integer('media_id', 16)
table.string('media_id', 21)
.notNullable()
.references('id')
.inTable('media');
@ -485,7 +488,7 @@ exports.up = knex => Promise.resolve()
.references('id')
.inTable('actors');
table.integer('media_id', 16)
table.string('media_id', 21)
.notNullable()
.references('id')
.inTable('media');
@ -622,7 +625,7 @@ exports.up = knex => Promise.resolve()
.references('id')
.inTable('releases');
table.integer('media_id', 16)
table.string('media_id', 21)
.notNullable()
.references('id')
.inTable('media');
@ -635,7 +638,7 @@ exports.up = knex => Promise.resolve()
.references('id')
.inTable('releases');
table.integer('media_id', 16)
table.string('media_id', 21)
.notNullable()
.references('id')
.inTable('media');
@ -648,7 +651,7 @@ exports.up = knex => Promise.resolve()
.references('id')
.inTable('releases');
table.integer('media_id', 16)
table.string('media_id', 21)
.notNullable()
.references('id')
.inTable('media');
@ -661,7 +664,7 @@ exports.up = knex => Promise.resolve()
.references('id')
.inTable('releases');
table.integer('media_id', 16)
table.string('media_id', 21)
.notNullable()
.references('id')
.inTable('media');
@ -674,7 +677,7 @@ exports.up = knex => Promise.resolve()
.references('id')
.inTable('releases');
table.integer('media_id', 16)
table.string('media_id', 21)
.notNullable()
.references('id')
.inTable('media');
@ -718,10 +721,12 @@ exports.up = knex => Promise.resolve()
CREATE UNIQUE INDEX releases_search_unique ON releases_search (release_id);
CREATE INDEX releases_search_index ON releases_search USING GIN (document);
CREATE FUNCTION search_releases(query text) RETURNS SETOF releases_search AS $$
SELECT * FROM releases_search AS search
CREATE FUNCTION search_releases(query text) RETURNS SETOF releases AS $$
SELECT * FROM releases WHERE releases.id IN (
SELECT release_id FROM releases_search AS search
WHERE search.document @@ plainto_tsquery('traxxx', regexp_replace(query, '\\.|-|(XXX\\.[\\d+|hd|sd].*$)', ' ', 'ig'))
ORDER BY ts_rank(search.document, plainto_tsquery('traxxx', regexp_replace(query, '\\.|-|(XXX\\.[\\d+|hd|sd].*$)', ' ', 'ig'))) DESC;
ORDER BY ts_rank(search.document, plainto_tsquery('traxxx', regexp_replace(query, '\\.|-|(XXX\\.[\\d+|hd|sd].*$)', ' ', 'ig'))) DESC
);
$$ LANGUAGE SQL STABLE;
CREATE FUNCTION search_sites(search text) RETURNS SETOF sites AS $$

Binary file not shown.

Before

Width:  |  Height:  |  Size: 788 B

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 80 KiB

View File

@ -1,21 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width,height=device-height,initial-scale=1,maximum-scale=1,user-scalable=no">
<meta name="theme-color" content="#ff6c88">
<title>traxxx</title>
<link rel="stylesheet" href="/css/style.css">
<link rel="icon" href="/img/favicon/favicon-16x16.ico" sizes="16x16">
<link rel="icon" href="/img/favicon/favicon-32x32.ico" sizes="32x32">
<link rel="icon" href="/img/favicon/favicon-62x62.ico" sizes="64x64">
<script src="/js/bundle.js" defer></script>
</head>
<body>
<div id="container"></div>
</body>
</html>

View File

@ -565,6 +565,11 @@ const tags = [
name: 'oil',
slug: 'oil',
},
{
name: 'nurse',
slug: 'nurse',
group: 'roleplay',
},
{
name: 'cum in mouth',
slug: 'cum-in-mouth',

View File

@ -1,3 +1,4 @@
const nanoid = require('nanoid/non-secure');
const upsert = require('../src/utils/upsert');
const tagPosters = [
@ -16,6 +17,7 @@ const tagPosters = [
['bukkake', 'poster', 'Mia Malkova in "Facialized 2" for HardX'],
['caucasian', 0, 'Remy Lacroix for HardX'],
['creampie', 'poster', 'ALina Lopez in "Making Yourself Unforgettable" for Blacked'],
['cum-in-mouth', 1, 'Keisha Grey in Brazzers House'],
['da-tp', 0, 'Natasha Teen in LegalPorno SZ2164'],
['deepthroat', 0, 'Chanel Grey in "Deepthroating Is Fun" for Throated'],
['double-anal', 7, 'Adriana Chechik in "DP Masters 6" for Jules Jordan'],
@ -35,7 +37,7 @@ const tagPosters = [
['milf', 0, 'Olivia Austin in "Dredd 3" for Jules Jordan'],
['mff', 0, 'Madison Ivy, Adriana Chechik and Keiran Lee in "Day With A Pornstar" for Brazzers'],
['mfm', 5, 'Vina Sky in "Slut Puppies 15" for Jules Jordan'],
['cum-in-mouth', 1, 'Keisha Grey in Brazzers House'],
['nurse', 0, 'Sarah Vandella in "Cum For Nurse Sarah" for Brazzers'],
['orgy', 1, 'Megan Rain (DP), Morgan Lee (anal), Jessa Rhodes, Melissa Moore and Kimmy Granger in "Orgy Masters 8" for Jules Jordan'],
['pussy-eating', 0, 'Elena Kosha and Ivy Wolfe in "Bare" for Jules Jordan'],
['redhead', 0, 'Penny Pax in "The Submission of Emma Marx: Boundaries" for New Sensations'],
@ -47,6 +49,7 @@ const tagPosters = [
['triple-anal', 'poster', 'Kristy Black in SZ1986 for LegalPorno'],
]
.map(([slug, filename, comment], index) => ({
id: nanoid(),
tagSlug: slug,
path: `tags/${slug}/${filename}.jpeg`,
thumbnail: `tags/${slug}/${filename}_thumb.jpeg`,
@ -103,6 +106,7 @@ const tagPhotos = [
['cum-in-mouth', 'poster', 'Khloe Kapri'],
]
.map(([slug, fileIndex, comment], index) => ({
id: nanoid(),
tagSlug: slug,
path: `tags/${slug}/${fileIndex}.jpeg`,
thumbnail: `tags/${slug}/${fileIndex}_thumb.jpeg`,
@ -715,6 +719,7 @@ const sfw = Object.entries({
],
})
.map(([category, photos]) => photos.map(([photo, copyright], index) => ({
id: nanoid(),
path: `sfw/${category}/${photo}.jpeg`,
thumbnail: `sfw/${category}/thumbs/${photo}.jpeg`,
mime: 'image/jpeg',
@ -730,7 +735,11 @@ const sfw = Object.entries({
exports.seed = knex => Promise.resolve()
.then(async () => {
const { inserted } = await upsert('media', sfw, 'path');
const sfwMediaIds = inserted.map(mediaEntry => ({ media_id: mediaEntry.id }));
const sfwMediaIds = inserted.map(mediaEntry => ({
id: nanoid(),
media_id: mediaEntry.id,
}));
await upsert('media_sfw', sfwMediaIds, 'media_id');
})
@ -740,9 +749,9 @@ exports.seed = knex => Promise.resolve()
const tags = await knex('tags').whereIn('slug', tagMedia.map(item => item.tagSlug));
const { inserted, updated } = await upsert('media', tagMedia.map(({
path, thumbnail, mime, index, comment,
id, path, thumbnail, mime, index, comment,
}) => ({
path, thumbnail, mime, index, comment,
id, path, thumbnail, mime, index, comment,
})), 'path', knex);
const tagIdsBySlug = tags.reduce((acc, tag) => ({ ...acc, [tag.slug]: tag.id }), {});

View File

@ -6,8 +6,7 @@ const initServer = require('./web/server');
const knex = require('./knex');
const fetchUpdates = require('./updates');
const { fetchScenes, fetchMovies } = require('./deep');
const { storeReleases } = require('./store-releases');
const { updateReleasesSearch } = require('./releases');
const { storeReleases, updateReleasesSearch } = require('./store-releases');
const { scrapeActors } = require('./actors-legacy');
async function init() {
@ -38,8 +37,6 @@ async function init() {
...(deepScenes || []),
...(deepMovies || []),
]);
// await storeReleaseActors(updateReleases);
}
knex.destroy();

View File

@ -329,8 +329,6 @@ async function storeMedia(sources, domain, role, { entropyFilter = 2.5 } = {}) {
return {};
}
console.log(presentSources, presentSources.length);
// split up source list to prevent excessive RAM usage
const itemChunksBySource = await Promise.all(chunk(presentSources, 50).map(async (sourceChunk, index) => {
try {

View File

@ -1,9 +1,170 @@
'use strict';
function toBaseAvatars() {
const Promise = require('bluebird');
const nanoid = require('nanoid/non-secure');
const logger = require('./logger')(__filename);
const argv = require('./argv');
const knex = require('./knex');
const { get } = require('./utils/qu');
function itemsByKey(items, key) {
return items.reduce((acc, item) => ({ ...acc, [item[key]]: item }), {});
}
function toBaseSource(rawSource) {
if (rawSource.src || (rawSource.extract && rawSource.url)) {
const baseSource = {};
if (rawSource.src) baseSource.src = rawSource.src;
if (rawSource.quality) baseSource.quality = rawSource.quality;
if (rawSource.type) baseSource.type = rawSource.type;
if (rawSource.url) baseSource.url = rawSource.url;
if (rawSource.extract) baseSource.extract = rawSource.extract;
return baseSource;
}
if (typeof rawSource === 'string') {
return {
src: rawSource,
};
}
return null;
}
function baseSourceToBaseMedia(baseSource) {
if (Array.isArray(baseSource)) {
if (baseSource.length > 0) {
return {
id: nanoid(),
sources: baseSource,
};
}
return null;
}
if (baseSource) {
return {
id: nanoid(),
sources: [baseSource],
};
}
return null;
}
function fallbackMediaToBaseMedia(rawMedia) {
const baseSources = rawMedia
.map(source => toBaseSource(source))
.filter(Boolean);
return baseSourceToBaseMedia(baseSources);
}
function toBaseMedias(rawMedias) {
if (!rawMedias || rawMedias.length === 0) {
return [];
}
return rawMedias.map((rawMedia) => {
if (!rawMedia) {
return null;
}
if (Array.isArray(rawMedia)) {
// fallback sources provided
return fallbackMediaToBaseMedia(rawMedia);
}
const baseSource = toBaseSource(rawMedia);
return baseSourceToBaseMedia(baseSource);
}).filter(Boolean);
}
async function findSourceDuplicates(baseMedias) {
const sourceUrls = baseMedias
.map(baseMedia => baseMedia.sources.map(source => source.src))
.flat()
.filter(Boolean);
const extractUrls = baseMedias
.map(baseMedia => baseMedia.sources.map(source => source.url))
.flat()
.filter(Boolean);
const [existingSourceMedia, existingExtractMedia] = await Promise.all([
knex('media').whereIn('source', sourceUrls),
knex('media').whereIn('source_page', extractUrls),
]);
const existingSourceMediaByUrl = itemsByKey(existingSourceMedia, 'source');
const existingExtractMediaByUrl = itemsByKey(existingExtractMedia, 'source_page');
return {
existingSourceMediaByUrl,
existingExtractMediaByUrl,
};
}
async function extractSource(baseSource) {
if (!baseSource.extract || !baseSource.url) {
return baseSource;
}
const res = await get(baseSource.url);
console.log(res);
return baseSource;
}
async function fetchSource(baseSource, { existingSourceMediaByUrl, existingExtractMediaByUrl }) {
// attempts
// extract
const extractedSource = await extractSource(baseSource, existingExtractMediaByUrl);
console.log(extractedSource);
}
async function fetchMedia(baseMedia, existingMedias) {
await baseMedia.sources.reduce((result, baseSource, _baseSourceIndex) => result.catch(async () => {
await fetchSource(baseSource, existingMedias);
}), Promise.reject(new Error()));
}
async function storeMedias(baseMedias) {
const { existingSourceMediaByUrl, existingExtractMediaByUrl } = await findSourceDuplicates(baseMedias);
await Promise.map(baseMedias, async baseMedia => fetchMedia(baseMedia, { existingSourceMediaByUrl, existingExtractMediaByUrl }));
console.log(existingSourceMediaByUrl, existingExtractMediaByUrl);
}
async function associateReleaseMedia(releases) {
if (!argv.media) {
return;
}
const baseMediasByReleaseId = releases.reduce((acc, release) => ({
...acc,
[release.id]: {
poster: argv.images && argv.poster && toBaseMedias([release.poster]),
photos: argv.images && argv.photos && toBaseMedias(release.photos),
trailer: argv.videos && argv.trailer && toBaseMedias([release.trailer]),
teaser: argv.videos && argv.teaser && toBaseMedias([release.teaser]),
},
}), {});
const baseMedias = Object.values(baseMediasByReleaseId).map(releaseMedia => Object.values(releaseMedia)).flat(2);
await storeMedias(baseMedias);
}
module.exports = {
associateReleaseMedia,
};

View File

@ -1,48 +1,20 @@
'use strict';
const logger = require('./logger')(__filename);
const knex = require('./knex');
async function updateReleasesSearch(releaseIds) {
logger.info(`Updating search documents for ${releaseIds ? releaseIds.length : 'all' } releases`);
async function fetchReleases(limit = 100) {
const releases = await knex('releases').limit(limit);
const documents = await knex.raw(`
SELECT
releases.id AS release_id,
TO_TSVECTOR(
'traxxx',
releases.title || ' ' ||
networks.name || ' ' ||
networks.slug || ' ' ||
networks.url || ' ' ||
sites.name || ' ' ||
sites.slug || ' ' ||
COALESCE(sites.url, '') || ' ' ||
COALESCE(sites.alias, '') || ' ' ||
COALESCE(releases.shoot_id, '') || ' ' ||
COALESCE(TO_CHAR(releases.date, 'YYYY YY MM FMMM FMmonth mon DD FMDD'), '') || ' ' ||
STRING_AGG(COALESCE(actors.name, ''), ' ') || ' ' ||
STRING_AGG(COALESCE(tags.name, ''), ' ') || ' ' ||
STRING_AGG(COALESCE(tags_aliases.name, ''), ' ')
) as document
FROM releases
LEFT JOIN sites ON releases.site_id = sites.id
LEFT JOIN networks ON sites.network_id = networks.id
LEFT JOIN releases_actors AS local_actors ON local_actors.release_id = releases.id
LEFT JOIN releases_tags AS local_tags ON local_tags.release_id = releases.id
LEFT JOIN actors ON local_actors.actor_id = actors.id
LEFT JOIN tags ON local_tags.tag_id = tags.id
LEFT JOIN tags as tags_aliases ON local_tags.tag_id = tags_aliases.alias_for
${releaseIds ? 'WHERE releases.id = ANY(?)' : ''}
GROUP BY releases.id, sites.name, sites.slug, sites.alias, sites.url, networks.name, networks.slug, networks.url;
`, releaseIds && [releaseIds]);
return releases;
}
if (documents.rows?.length > 0) {
const query = knex('releases_search').insert(documents.rows).toString();
await knex.raw(`${query} ON CONFLICT (release_id) DO UPDATE SET document = EXCLUDED.document`);
}
async function searchReleases(query, limit = 100) {
const releases = await knex.raw('SELECT * FROM search_releases(?) LIMIT ?;', [query, limit]);
return releases.rows;
}
module.exports = {
updateReleasesSearch,
fetchReleases,
searchReleases,
};

View File

@ -18,7 +18,7 @@ async function getPhotos(albumUrl) {
const pageUrl = `https://blacksonblondes.com${lastPhotoPage.replace(/\d+.jpg/, `${(index + 1).toString().padStart(3, '0')}.jpg`)}`;
return {
src: pageUrl,
url: pageUrl,
extract: q => q('.scenes-module img', 'src'),
};
});

View File

@ -5,8 +5,9 @@ const config = require('config');
const logger = require('./logger')(__filename);
const knex = require('./knex');
const slugify = require('./utils/slugify');
const { associateTags } = require('./tags');
const { associateActors } = require('./actors');
const { associateReleaseTags } = require('./tags');
const { associateReleaseMedia } = require('./media');
function curateReleaseEntry(release, batchId, existingRelease) {
const slug = slugify(release.title, '-', {
@ -149,6 +150,46 @@ async function filterDuplicateReleases(releases) {
};
}
async function updateReleasesSearch(releaseIds) {
logger.info(`Updating search documents for ${releaseIds ? releaseIds.length : 'all' } releases`);
const documents = await knex.raw(`
SELECT
releases.id AS release_id,
TO_TSVECTOR(
'traxxx',
releases.title || ' ' ||
networks.name || ' ' ||
networks.slug || ' ' ||
networks.url || ' ' ||
sites.name || ' ' ||
sites.slug || ' ' ||
COALESCE(sites.url, '') || ' ' ||
COALESCE(sites.alias, '') || ' ' ||
COALESCE(releases.shoot_id, '') || ' ' ||
COALESCE(TO_CHAR(releases.date, 'YYYY YY MM FMMM FMmonth mon DD FMDD'), '') || ' ' ||
STRING_AGG(COALESCE(actors.name, ''), ' ') || ' ' ||
STRING_AGG(COALESCE(tags.name, ''), ' ') || ' ' ||
STRING_AGG(COALESCE(tags_aliases.name, ''), ' ')
) as document
FROM releases
LEFT JOIN sites ON releases.site_id = sites.id
LEFT JOIN networks ON sites.network_id = networks.id
LEFT JOIN releases_actors AS local_actors ON local_actors.release_id = releases.id
LEFT JOIN releases_tags AS local_tags ON local_tags.release_id = releases.id
LEFT JOIN actors ON local_actors.actor_id = actors.id
LEFT JOIN tags ON local_tags.tag_id = tags.id AND tags.priority >= 7
LEFT JOIN tags as tags_aliases ON local_tags.tag_id = tags_aliases.alias_for AND tags_aliases.secondary = true
${releaseIds ? 'WHERE releases.id = ANY(?)' : ''}
GROUP BY releases.id, sites.name, sites.slug, sites.alias, sites.url, networks.name, networks.slug, networks.url;
`, releaseIds && [releaseIds]);
if (documents.rows?.length > 0) {
const query = knex('releases_search').insert(documents.rows).toString();
await knex.raw(`${query} ON CONFLICT (release_id) DO UPDATE SET document = EXCLUDED.document`);
}
}
async function storeReleases(releases) {
const [batchId] = await knex('batches').insert({ comment: null }).returning('id');
@ -169,15 +210,19 @@ async function storeReleases(releases) {
const releasesWithId = attachReleaseIds([].concat(uniqueReleases, duplicateReleases), [].concat(storedReleaseEntries, duplicateReleaseEntries));
await Promise.all([
associateTags(releasesWithId),
associateActors(releasesWithId),
associateReleaseTags(releasesWithId),
associateReleaseMedia(releasesWithId),
]);
logger.info(`Stored ${storedReleaseEntries.length} releases`);
await updateReleasesSearch(releasesWithId.map(release => release.id));
return releasesWithId;
}
module.exports = {
storeReleases,
updateReleasesSearch,
};

View File

@ -92,7 +92,7 @@ async function filterUniqueAssociations(tagAssociations) {
return uniqueAssociations;
}
async function associateTags(releases) {
async function associateReleaseTags(releases) {
const tagIdsBySlug = await matchReleaseTags(releases);
const siteTagIdsBySiteId = await getSiteTags(releases);
@ -103,5 +103,5 @@ async function associateTags(releases) {
}
module.exports = {
associateTags,
associateReleaseTags,
};

View File

@ -62,14 +62,16 @@ function needNextPage(uniqueReleases, pageAccReleases) {
return true;
}
if (uniqueReleases.every(release => !!release.date)) {
const oldestReleaseOnPage = uniqueReleases
.sort((releaseA, releaseB) => releaseB.date - releaseA.date)
.slice(-1)[0];
if (oldestReleaseOnPage && moment(oldestReleaseOnPage.date).isAfter(afterDate)) {
if (moment(oldestReleaseOnPage.date).isAfter(afterDate)) {
// oldest release on page is newer than the specified date cut-off
return true;
}
}
// dates missing, and limit for scenes without dates not yet reached
return pageAccReleases.length <= argv.nullDateLimit;

View File

@ -1,80 +1,17 @@
'use strict';
const {
fetchReleases,
fetchActorReleases,
fetchNetworkReleases,
fetchSiteReleases,
fetchTagReleases,
} = require('../releases');
const { fetchReleases, searchReleases } = require('../releases');
async function fetchReleasesApi(req, res) {
const releases = await fetchReleases({}, req.query);
const query = req.query.query || req.query.q;
res.send(releases);
}
async function fetchReleaseByIdApi(req, res) {
const [release] = await fetchReleases({
id: req.params.releaseId,
});
res.send(release);
}
async function fetchActorReleasesApi(req, res) {
const actorId = Number.isInteger(Number(req.params.actorId)) ? Number(req.params.actorId) : null;
const actorSlug = typeof req.params.actorId === 'string' ? req.params.actorId : null;
const releases = await fetchActorReleases({
id: actorId,
slug: actorSlug,
}, req.query);
res.send(releases);
}
async function fetchNetworkReleasesApi(req, res) {
const networkId = typeof req.params.networkId === 'number' ? req.params.networkId : null;
const networkSlug = typeof req.params.networkId === 'string' ? req.params.networkId : null;
const releases = await fetchNetworkReleases({
id: networkId,
slug: networkSlug,
}, req.query);
res.send(releases);
}
async function fetchSiteReleasesApi(req, res) {
const siteId = typeof req.params.siteId === 'number' ? req.params.siteId : null;
const siteSlug = typeof req.params.siteId === 'string' ? req.params.siteId : null;
const releases = await fetchSiteReleases({
id: siteId,
slug: siteSlug,
}, req.query);
res.send(releases);
}
async function fetchTagReleasesApi(req, res) {
const tagId = typeof req.params.tagId === 'number' ? req.params.tagId : null;
const tagSlug = typeof req.params.tagId === 'string' ? req.params.tagId : null;
const releases = await fetchTagReleases({
id: tagId,
slug: tagSlug,
}, req.query);
const releases = query
? await searchReleases(query, req.query.limit)
: await fetchReleases(req.query.limit);
res.send(releases);
}
module.exports = {
fetchReleases: fetchReleasesApi,
fetchReleaseById: fetchReleaseByIdApi,
fetchActorReleases: fetchActorReleasesApi,
fetchNetworkReleases: fetchNetworkReleasesApi,
fetchSiteReleases: fetchSiteReleasesApi,
fetchTagReleases: fetchTagReleasesApi,
};

View File

@ -16,22 +16,8 @@ const { ActorPlugins, SitePlugins, ReleasePlugins } = require('./plugins/plugins
const {
fetchReleases,
fetchReleaseById,
fetchActorReleases,
fetchNetworkReleases,
fetchSiteReleases,
fetchTagReleases,
} = require('./releases');
const {
fetchNetworks,
fetchNetworksFromReleases,
} = require('./networks');
const { fetchActors } = require('./actors');
const { fetchSites } = require('./sites');
const { fetchTags } = require('./tags');
function initServer() {
const app = express();
const router = Router();
@ -75,24 +61,6 @@ function initServer() {
router.use(bodyParser.json({ strict: false }));
router.get('/api/releases', fetchReleases);
router.get('/api/releases/:releaseId', fetchReleaseById);
router.get('/api/releases/networks', fetchNetworksFromReleases);
router.get('/api/actors', fetchActors);
router.get('/api/actors/:actorId', fetchActors);
router.get('/api/actors/:actorId/releases', fetchActorReleases);
router.get('/api/networks', fetchNetworks);
router.get('/api/networks/:networkId', fetchNetworks);
router.get('/api/networks/:networkId/releases', fetchNetworkReleases);
router.get('/api/sites', fetchSites);
router.get('/api/sites/:siteId', fetchSites);
router.get('/api/sites/:siteId/releases', fetchSiteReleases);
router.get('/api/tags', fetchTags);
router.get('/api/tags/:tagId', fetchTags);
router.get('/api/tags/:tagId/releases', fetchTagReleases);
router.get('*', (req, res) => {
res.render(path.join(__dirname, '../../assets/index.ejs'), {