forked from DebaucheryLibrarian/traxxx
Added separate task queue for video streams to prevent ffmpeg overstressing the CPU. Fixed entity parent in scene REST API.
This commit is contained in:
@@ -30,11 +30,6 @@ const { argv } = yargs
|
||||
type: 'boolean',
|
||||
alias: 'web',
|
||||
})
|
||||
.option('all', {
|
||||
describe: 'Scrape channels and networks defined in configuration',
|
||||
type: 'boolean',
|
||||
alias: 'scrape',
|
||||
})
|
||||
.option('networks', {
|
||||
describe: 'Network to scrape all channels from (overrides configuration)',
|
||||
type: 'array',
|
||||
|
||||
10
src/media.js
10
src/media.js
@@ -13,6 +13,7 @@ const mime = require('mime');
|
||||
const ffmpeg = require('fluent-ffmpeg');
|
||||
const sharp = require('sharp');
|
||||
const blake2 = require('blake2');
|
||||
const taskQueue = require('promise-task-queue');
|
||||
|
||||
const logger = require('./logger')(__filename);
|
||||
const argv = require('./argv');
|
||||
@@ -22,6 +23,7 @@ const bulkInsert = require('./utils/bulk-insert');
|
||||
const { get } = require('./utils/qu');
|
||||
|
||||
const pipeline = util.promisify(stream.pipeline);
|
||||
const streamQueue = taskQueue();
|
||||
|
||||
function sampleMedias(medias, limit = config.media.limit, preferLast = true) {
|
||||
// limit media sets, use extras as fallbacks
|
||||
@@ -419,7 +421,7 @@ async function fetchHttpSource(source, tempFileTarget, hashStream) {
|
||||
};
|
||||
}
|
||||
|
||||
async function fetchStreamSource(source, tempFileTarget, tempFilePath, hashStream) {
|
||||
streamQueue.define('fetchStreamSource', async ({ source, tempFileTarget, hashStream }) => {
|
||||
const meta = { mimetype: 'video/mp4' };
|
||||
|
||||
const video = ffmpeg(source.stream)
|
||||
@@ -434,7 +436,9 @@ async function fetchStreamSource(source, tempFileTarget, tempFilePath, hashStrea
|
||||
logger.verbose(`Finished fetching stream from ${source.stream}`);
|
||||
|
||||
return meta;
|
||||
}
|
||||
}, {
|
||||
concurrency: config.media.streamConcurrency,
|
||||
});
|
||||
|
||||
async function fetchSource(source, baseMedia) {
|
||||
logger.silly(`Fetching media from ${source.src}`);
|
||||
@@ -457,7 +461,7 @@ async function fetchSource(source, baseMedia) {
|
||||
});
|
||||
|
||||
const { mimetype } = source.stream
|
||||
? await fetchStreamSource(source, tempFileTarget, tempFilePath, hashStream)
|
||||
? await streamQueue.push('fetchStreamSource', { source, tempFileTarget, hashStream })
|
||||
: await fetchHttpSource(source, tempFileTarget, hashStream);
|
||||
|
||||
hasher.end();
|
||||
|
||||
@@ -20,10 +20,10 @@ function curateRelease(release, withMedia = false) {
|
||||
id: release.entity.id,
|
||||
name: release.entity.name,
|
||||
slug: release.entity.slug,
|
||||
parent: release.entity.parent && {
|
||||
id: release.entity.parent.id,
|
||||
name: release.entity.parent.name,
|
||||
slug: release.entity.parent.slug,
|
||||
parent: release.parent && {
|
||||
id: release.parent.id,
|
||||
name: release.parent.name,
|
||||
slug: release.parent.slug,
|
||||
},
|
||||
},
|
||||
actors: (release.actors || []).map(actor => ({
|
||||
|
||||
@@ -74,13 +74,6 @@ async function scrapeReleases(scraper, entity, preData, upcoming = false) {
|
||||
return accReleases;
|
||||
}
|
||||
|
||||
/* prevents argv.last from being satisfied
|
||||
if (latestReleases.length === 0) {
|
||||
// scraper successfully requested releases, but found none
|
||||
return accReleases;
|
||||
}
|
||||
*/
|
||||
|
||||
const latestReleasesWithEntity = latestReleases.map(release => ({
|
||||
...release,
|
||||
entity: release.entity || entity, // allow override
|
||||
|
||||
Reference in New Issue
Block a user