Added separate task queue for video streams to prevent ffmpeg overstressing the CPU. Fixed entity parent in scene REST API.
This commit is contained in:
parent
b3435c97c3
commit
e896d52968
|
@ -88,6 +88,8 @@ export default {
|
|||
flex-grow: 1;
|
||||
overflow-y: auto;
|
||||
overflow-x: hidden;
|
||||
box-shadow: 0 0 3px var(--shadow-weak);
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
.slide-enter-active,
|
||||
|
|
|
@ -11,10 +11,8 @@
|
|||
|
||||
<style lang="scss" scoped>
|
||||
.footer {
|
||||
margin: 2rem 0 0 0;
|
||||
background: var(--background);
|
||||
background: var(--background-dim);
|
||||
color: var(--shadow);
|
||||
box-shadow: inset 0 1px 3px var(--darken-hint);
|
||||
font-size: .8rem;
|
||||
font-weight: bold;
|
||||
text-align: center;
|
||||
|
@ -30,5 +28,9 @@
|
|||
|
||||
.footer-link {
|
||||
text-decoration: underline;
|
||||
|
||||
&:hover {
|
||||
color: var(--primary);
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<template>
|
||||
<div class="content">
|
||||
<div class="home">
|
||||
<div
|
||||
ref="content"
|
||||
class="content-inner"
|
||||
|
@ -20,10 +20,10 @@
|
|||
:items-per-page="limit"
|
||||
class="pagination-bottom"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<Footer />
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
|
@ -79,3 +79,11 @@ export default {
|
|||
},
|
||||
};
|
||||
</script>
|
||||
|
||||
<style lang="scss" scoped>
|
||||
.home {
|
||||
flex-grow: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -197,6 +197,7 @@ module.exports = {
|
|||
lazyQuality: 90,
|
||||
videoQuality: [480, 360, 320, 540, 720, 1080, 2160, 270, 240, 180],
|
||||
limit: 25, // max number of photos per release
|
||||
streamConcurrency: 2, // max number of video streams (m3u8 etc.) to fetch and process at once
|
||||
},
|
||||
titleSlugLength: 50,
|
||||
};
|
||||
|
|
|
@ -4968,7 +4968,7 @@ const sites = [
|
|||
},
|
||||
},
|
||||
{
|
||||
name: 'Horny doctor',
|
||||
name: 'Horny Doctor',
|
||||
slug: 'hornydoctor',
|
||||
url: 'https://www.hornydoctor.com',
|
||||
parent: 'porncz',
|
||||
|
|
|
@ -30,11 +30,6 @@ const { argv } = yargs
|
|||
type: 'boolean',
|
||||
alias: 'web',
|
||||
})
|
||||
.option('all', {
|
||||
describe: 'Scrape channels and networks defined in configuration',
|
||||
type: 'boolean',
|
||||
alias: 'scrape',
|
||||
})
|
||||
.option('networks', {
|
||||
describe: 'Network to scrape all channels from (overrides configuration)',
|
||||
type: 'array',
|
||||
|
|
10
src/media.js
10
src/media.js
|
@ -13,6 +13,7 @@ const mime = require('mime');
|
|||
const ffmpeg = require('fluent-ffmpeg');
|
||||
const sharp = require('sharp');
|
||||
const blake2 = require('blake2');
|
||||
const taskQueue = require('promise-task-queue');
|
||||
|
||||
const logger = require('./logger')(__filename);
|
||||
const argv = require('./argv');
|
||||
|
@ -22,6 +23,7 @@ const bulkInsert = require('./utils/bulk-insert');
|
|||
const { get } = require('./utils/qu');
|
||||
|
||||
const pipeline = util.promisify(stream.pipeline);
|
||||
const streamQueue = taskQueue();
|
||||
|
||||
function sampleMedias(medias, limit = config.media.limit, preferLast = true) {
|
||||
// limit media sets, use extras as fallbacks
|
||||
|
@ -419,7 +421,7 @@ async function fetchHttpSource(source, tempFileTarget, hashStream) {
|
|||
};
|
||||
}
|
||||
|
||||
async function fetchStreamSource(source, tempFileTarget, tempFilePath, hashStream) {
|
||||
streamQueue.define('fetchStreamSource', async ({ source, tempFileTarget, hashStream }) => {
|
||||
const meta = { mimetype: 'video/mp4' };
|
||||
|
||||
const video = ffmpeg(source.stream)
|
||||
|
@ -434,7 +436,9 @@ async function fetchStreamSource(source, tempFileTarget, tempFilePath, hashStrea
|
|||
logger.verbose(`Finished fetching stream from ${source.stream}`);
|
||||
|
||||
return meta;
|
||||
}
|
||||
}, {
|
||||
concurrency: config.media.streamConcurrency,
|
||||
});
|
||||
|
||||
async function fetchSource(source, baseMedia) {
|
||||
logger.silly(`Fetching media from ${source.src}`);
|
||||
|
@ -457,7 +461,7 @@ async function fetchSource(source, baseMedia) {
|
|||
});
|
||||
|
||||
const { mimetype } = source.stream
|
||||
? await fetchStreamSource(source, tempFileTarget, tempFilePath, hashStream)
|
||||
? await streamQueue.push('fetchStreamSource', { source, tempFileTarget, hashStream })
|
||||
: await fetchHttpSource(source, tempFileTarget, hashStream);
|
||||
|
||||
hasher.end();
|
||||
|
|
|
@ -20,10 +20,10 @@ function curateRelease(release, withMedia = false) {
|
|||
id: release.entity.id,
|
||||
name: release.entity.name,
|
||||
slug: release.entity.slug,
|
||||
parent: release.entity.parent && {
|
||||
id: release.entity.parent.id,
|
||||
name: release.entity.parent.name,
|
||||
slug: release.entity.parent.slug,
|
||||
parent: release.parent && {
|
||||
id: release.parent.id,
|
||||
name: release.parent.name,
|
||||
slug: release.parent.slug,
|
||||
},
|
||||
},
|
||||
actors: (release.actors || []).map(actor => ({
|
||||
|
|
|
@ -74,13 +74,6 @@ async function scrapeReleases(scraper, entity, preData, upcoming = false) {
|
|||
return accReleases;
|
||||
}
|
||||
|
||||
/* prevents argv.last from being satisfied
|
||||
if (latestReleases.length === 0) {
|
||||
// scraper successfully requested releases, but found none
|
||||
return accReleases;
|
||||
}
|
||||
*/
|
||||
|
||||
const latestReleasesWithEntity = latestReleases.map(release => ({
|
||||
...release,
|
||||
entity: release.entity || entity, // allow override
|
||||
|
|
Loading…
Reference in New Issue