Merge branch 'experimental' into master

This commit is contained in:
DebaucheryLibrarian 2020-08-19 21:49:04 +02:00
commit 5767bfb5a2
12 changed files with 61 additions and 41 deletions

View File

@ -88,6 +88,8 @@ export default {
flex-grow: 1; flex-grow: 1;
overflow-y: auto; overflow-y: auto;
overflow-x: hidden; overflow-x: hidden;
box-shadow: 0 0 3px var(--shadow-weak);
z-index: 1;
} }
.slide-enter-active, .slide-enter-active,

View File

@ -228,6 +228,15 @@ export default {
height: 1rem; height: 1rem;
} }
.content-inner {
display: flex;
flex-direction: column;
}
.releases {
flex-grow: 1;
}
.name { .name {
color: var(--text-light); color: var(--text-light);
display: flex; display: flex;

View File

@ -11,10 +11,8 @@
<style lang="scss" scoped> <style lang="scss" scoped>
.footer { .footer {
margin: 2rem 0 0 0; background: var(--background-dim);
background: var(--background);
color: var(--shadow); color: var(--shadow);
box-shadow: inset 0 1px 3px var(--darken-hint);
font-size: .8rem; font-size: .8rem;
font-weight: bold; font-weight: bold;
text-align: center; text-align: center;
@ -30,5 +28,9 @@
.footer-link { .footer-link {
text-decoration: underline; text-decoration: underline;
&:hover {
color: var(--primary);
}
} }
</style> </style>

View File

@ -1,5 +1,5 @@
<template> <template>
<div class="content"> <div class="home">
<div <div
ref="content" ref="content"
class="content-inner" class="content-inner"
@ -20,9 +20,9 @@
:items-per-page="limit" :items-per-page="limit"
class="pagination-bottom" class="pagination-bottom"
/> />
<Footer />
</div> </div>
<Footer />
</div> </div>
</template> </template>
@ -79,3 +79,11 @@ export default {
}, },
}; };
</script> </script>
<style lang="scss" scoped>
.home {
flex-grow: 1;
display: flex;
flex-direction: column;
}
</style>

View File

@ -2,23 +2,23 @@ import dayjs from 'dayjs';
const dateRanges = { const dateRanges = {
latest: () => ({ latest: () => ({
after: '1900-01-01', after: '1900-01-01 00:00:00',
before: dayjs(new Date()).add(1, 'day').format('YYYY-MM-DD'), before: dayjs().format('YYYY-MM-DD HH:mm:ss'),
orderBy: 'DATE_DESC', orderBy: 'DATE_DESC',
}), }),
upcoming: () => ({ upcoming: () => ({
after: dayjs(new Date()).format('YYYY-MM-DD'), after: dayjs().format('YYYY-MM-DD HH:mm:ss'),
before: '2100-01-01', before: '2100-01-01 00:00:00',
orderBy: 'DATE_ASC', orderBy: 'DATE_ASC',
}), }),
new: () => ({ new: () => ({
after: '1900-01-01', after: '1900-01-01 00:00:00',
before: '2100-01-01', before: '2100-01-01 00:00:00',
orderBy: 'CREATED_AT_DESC', orderBy: ['CREATED_AT_DESC', 'DATE_ASC'],
}), }),
all: () => ({ all: () => ({
after: '1900-01-01', after: '1900-01-01 00:00:00',
before: '2100-01-01', before: '2100-01-01 00:00:00',
orderBy: 'DATE_DESC', orderBy: 'DATE_DESC',
}), }),
}; };

View File

@ -7,13 +7,15 @@ function initReleasesActions(store, _router) {
async function fetchReleases({ _commit }, { limit = 10, pageNumber = 1, range = 'latest' }) { async function fetchReleases({ _commit }, { limit = 10, pageNumber = 1, range = 'latest' }) {
const { before, after, orderBy } = getDateRange(range); const { before, after, orderBy } = getDateRange(range);
console.log(after, before, orderBy);
const { connection: { releases, totalCount } } = await graphql(` const { connection: { releases, totalCount } } = await graphql(`
query Releases( query Releases(
$limit:Int = 1000, $limit:Int = 1000,
$offset:Int = 0, $offset:Int = 0,
$after:Datetime = "1900-01-01", $after:Datetime = "1900-01-01 00:00:00",
$before:Datetime = "2100-01-01", $before:Datetime = "2100-01-01 00:00:00",
$orderBy:[ReleasesOrderBy!], $orderBy: [ReleasesOrderBy!],
$exclude: [String!] $exclude: [String!]
) { ) {
${releasesFragment} ${releasesFragment}

View File

@ -197,6 +197,7 @@ module.exports = {
lazyQuality: 90, lazyQuality: 90,
videoQuality: [480, 360, 320, 540, 720, 1080, 2160, 270, 240, 180], videoQuality: [480, 360, 320, 540, 720, 1080, 2160, 270, 240, 180],
limit: 25, // max number of photos per release limit: 25, // max number of photos per release
streamConcurrency: 2, // max number of video streams (m3u8 etc.) to fetch and process at once
}, },
titleSlugLength: 50, titleSlugLength: 50,
}; };

View File

@ -4968,7 +4968,7 @@ const sites = [
}, },
}, },
{ {
name: 'Horny doctor', name: 'Horny Doctor',
slug: 'hornydoctor', slug: 'hornydoctor',
url: 'https://www.hornydoctor.com', url: 'https://www.hornydoctor.com',
parent: 'porncz', parent: 'porncz',

View File

@ -30,11 +30,6 @@ const { argv } = yargs
type: 'boolean', type: 'boolean',
alias: 'web', alias: 'web',
}) })
.option('all', {
describe: 'Scrape channels and networks defined in configuration',
type: 'boolean',
alias: 'scrape',
})
.option('networks', { .option('networks', {
describe: 'Network to scrape all channels from (overrides configuration)', describe: 'Network to scrape all channels from (overrides configuration)',
type: 'array', type: 'array',

View File

@ -13,6 +13,7 @@ const mime = require('mime');
const ffmpeg = require('fluent-ffmpeg'); const ffmpeg = require('fluent-ffmpeg');
const sharp = require('sharp'); const sharp = require('sharp');
const blake2 = require('blake2'); const blake2 = require('blake2');
const taskQueue = require('promise-task-queue');
const logger = require('./logger')(__filename); const logger = require('./logger')(__filename);
const argv = require('./argv'); const argv = require('./argv');
@ -22,6 +23,7 @@ const bulkInsert = require('./utils/bulk-insert');
const { get } = require('./utils/qu'); const { get } = require('./utils/qu');
const pipeline = util.promisify(stream.pipeline); const pipeline = util.promisify(stream.pipeline);
const streamQueue = taskQueue();
function sampleMedias(medias, limit = config.media.limit, preferLast = true) { function sampleMedias(medias, limit = config.media.limit, preferLast = true) {
// limit media sets, use extras as fallbacks // limit media sets, use extras as fallbacks
@ -419,7 +421,7 @@ async function fetchHttpSource(source, tempFileTarget, hashStream) {
}; };
} }
async function fetchStreamSource(source, tempFileTarget, tempFilePath, hashStream) { streamQueue.define('fetchStreamSource', async ({ source, tempFileTarget, hashStream }) => {
const meta = { mimetype: 'video/mp4' }; const meta = { mimetype: 'video/mp4' };
const video = ffmpeg(source.stream) const video = ffmpeg(source.stream)
@ -434,7 +436,9 @@ async function fetchStreamSource(source, tempFileTarget, tempFilePath, hashStrea
logger.verbose(`Finished fetching stream from ${source.stream}`); logger.verbose(`Finished fetching stream from ${source.stream}`);
return meta; return meta;
} }, {
concurrency: config.media.streamConcurrency,
});
async function fetchSource(source, baseMedia) { async function fetchSource(source, baseMedia) {
logger.silly(`Fetching media from ${source.src}`); logger.silly(`Fetching media from ${source.src}`);
@ -457,7 +461,7 @@ async function fetchSource(source, baseMedia) {
}); });
const { mimetype } = source.stream const { mimetype } = source.stream
? await fetchStreamSource(source, tempFileTarget, tempFilePath, hashStream) ? await streamQueue.push('fetchStreamSource', { source, tempFileTarget, hashStream })
: await fetchHttpSource(source, tempFileTarget, hashStream); : await fetchHttpSource(source, tempFileTarget, hashStream);
hasher.end(); hasher.end();

View File

@ -20,10 +20,10 @@ function curateRelease(release, withMedia = false) {
id: release.entity.id, id: release.entity.id,
name: release.entity.name, name: release.entity.name,
slug: release.entity.slug, slug: release.entity.slug,
parent: release.entity.parent && { parent: release.parent && {
id: release.entity.parent.id, id: release.parent.id,
name: release.entity.parent.name, name: release.parent.name,
slug: release.entity.parent.slug, slug: release.parent.slug,
}, },
}, },
actors: (release.actors || []).map(actor => ({ actors: (release.actors || []).map(actor => ({

View File

@ -37,7 +37,7 @@ async function filterUniqueReleases(latestReleases, accReleases) {
return uniqueReleases; return uniqueReleases;
} }
function needNextPage(uniqueReleases, pageAccReleases) { function needNextPage(uniqueReleases, pageAccReleases, hasDates) {
if (uniqueReleases.length === 0) { if (uniqueReleases.length === 0) {
return false; return false;
} }
@ -47,7 +47,7 @@ function needNextPage(uniqueReleases, pageAccReleases) {
return true; return true;
} }
if (uniqueReleases.every(release => !!release.date)) { if (hasDates) {
const oldestReleaseOnPage = uniqueReleases const oldestReleaseOnPage = uniqueReleases
.sort((releaseA, releaseB) => releaseB.date - releaseA.date) .sort((releaseA, releaseB) => releaseB.date - releaseA.date)
.slice(-1)[0]; .slice(-1)[0];
@ -74,18 +74,15 @@ async function scrapeReleases(scraper, entity, preData, upcoming = false) {
return accReleases; return accReleases;
} }
/* prevents argv.last from being satisfied
if (latestReleases.length === 0) {
// scraper successfully requested releases, but found none
return accReleases;
}
*/
const latestReleasesWithEntity = latestReleases.map(release => ({ const latestReleasesWithEntity = latestReleases.map(release => ({
...release, ...release,
entity: release.entity || entity, // allow override entity: release.entity || entity, // allow override
})); // attach entity the release is assigned to when stored })); // attach entity the release is assigned to when stored
const hasDates = latestReleasesWithEntity.every(release => !!release.date);
console.log(hasDates);
const uniqueReleases = argv.redownload const uniqueReleases = argv.redownload
? latestReleasesWithEntity ? latestReleasesWithEntity
: await filterUniqueReleases(latestReleasesWithEntity, accReleases); : await filterUniqueReleases(latestReleasesWithEntity, accReleases);
@ -94,7 +91,7 @@ async function scrapeReleases(scraper, entity, preData, upcoming = false) {
logger.verbose(`Scraped '${entity.name}' (${entity.parent?.name}) ${upcoming ? 'upcoming' : 'latest'} page ${page}, found ${uniqueReleases.length} unique updates`); logger.verbose(`Scraped '${entity.name}' (${entity.parent?.name}) ${upcoming ? 'upcoming' : 'latest'} page ${page}, found ${uniqueReleases.length} unique updates`);
if (needNextPage(uniqueReleases, pageAccReleases)) { if (needNextPage(uniqueReleases, pageAccReleases, hasDates)) {
return scrapePage(page + 1, pageAccReleases); return scrapePage(page + 1, pageAccReleases);
} }