Merge branch 'experimental' into master

This commit is contained in:
DebaucheryLibrarian 2020-08-19 21:49:04 +02:00
commit 5767bfb5a2
12 changed files with 61 additions and 41 deletions

View File

@ -88,6 +88,8 @@ export default {
flex-grow: 1;
overflow-y: auto;
overflow-x: hidden;
box-shadow: 0 0 3px var(--shadow-weak);
z-index: 1;
}
.slide-enter-active,

View File

@ -228,6 +228,15 @@ export default {
height: 1rem;
}
.content-inner {
display: flex;
flex-direction: column;
}
.releases {
flex-grow: 1;
}
.name {
color: var(--text-light);
display: flex;

View File

@ -11,10 +11,8 @@
<style lang="scss" scoped>
.footer {
margin: 2rem 0 0 0;
background: var(--background);
background: var(--background-dim);
color: var(--shadow);
box-shadow: inset 0 1px 3px var(--darken-hint);
font-size: .8rem;
font-weight: bold;
text-align: center;
@ -30,5 +28,9 @@
.footer-link {
text-decoration: underline;
&:hover {
color: var(--primary);
}
}
</style>

View File

@ -1,5 +1,5 @@
<template>
<div class="content">
<div class="home">
<div
ref="content"
class="content-inner"
@ -20,9 +20,9 @@
:items-per-page="limit"
class="pagination-bottom"
/>
<Footer />
</div>
<Footer />
</div>
</template>
@ -79,3 +79,11 @@ export default {
},
};
</script>
<style lang="scss" scoped>
.home {
flex-grow: 1;
display: flex;
flex-direction: column;
}
</style>

View File

@ -2,23 +2,23 @@ import dayjs from 'dayjs';
const dateRanges = {
latest: () => ({
after: '1900-01-01',
before: dayjs(new Date()).add(1, 'day').format('YYYY-MM-DD'),
after: '1900-01-01 00:00:00',
before: dayjs().format('YYYY-MM-DD HH:mm:ss'),
orderBy: 'DATE_DESC',
}),
upcoming: () => ({
after: dayjs(new Date()).format('YYYY-MM-DD'),
before: '2100-01-01',
after: dayjs().format('YYYY-MM-DD HH:mm:ss'),
before: '2100-01-01 00:00:00',
orderBy: 'DATE_ASC',
}),
new: () => ({
after: '1900-01-01',
before: '2100-01-01',
orderBy: 'CREATED_AT_DESC',
after: '1900-01-01 00:00:00',
before: '2100-01-01 00:00:00',
orderBy: ['CREATED_AT_DESC', 'DATE_ASC'],
}),
all: () => ({
after: '1900-01-01',
before: '2100-01-01',
after: '1900-01-01 00:00:00',
before: '2100-01-01 00:00:00',
orderBy: 'DATE_DESC',
}),
};

View File

@ -7,13 +7,15 @@ function initReleasesActions(store, _router) {
async function fetchReleases({ _commit }, { limit = 10, pageNumber = 1, range = 'latest' }) {
const { before, after, orderBy } = getDateRange(range);
console.log(after, before, orderBy);
const { connection: { releases, totalCount } } = await graphql(`
query Releases(
$limit:Int = 1000,
$offset:Int = 0,
$after:Datetime = "1900-01-01",
$before:Datetime = "2100-01-01",
$orderBy:[ReleasesOrderBy!],
$after:Datetime = "1900-01-01 00:00:00",
$before:Datetime = "2100-01-01 00:00:00",
$orderBy: [ReleasesOrderBy!],
$exclude: [String!]
) {
${releasesFragment}

View File

@ -197,6 +197,7 @@ module.exports = {
lazyQuality: 90,
videoQuality: [480, 360, 320, 540, 720, 1080, 2160, 270, 240, 180],
limit: 25, // max number of photos per release
streamConcurrency: 2, // max number of video streams (m3u8 etc.) to fetch and process at once
},
titleSlugLength: 50,
};

View File

@ -4968,7 +4968,7 @@ const sites = [
},
},
{
name: 'Horny doctor',
name: 'Horny Doctor',
slug: 'hornydoctor',
url: 'https://www.hornydoctor.com',
parent: 'porncz',

View File

@ -30,11 +30,6 @@ const { argv } = yargs
type: 'boolean',
alias: 'web',
})
.option('all', {
describe: 'Scrape channels and networks defined in configuration',
type: 'boolean',
alias: 'scrape',
})
.option('networks', {
describe: 'Network to scrape all channels from (overrides configuration)',
type: 'array',

View File

@ -13,6 +13,7 @@ const mime = require('mime');
const ffmpeg = require('fluent-ffmpeg');
const sharp = require('sharp');
const blake2 = require('blake2');
const taskQueue = require('promise-task-queue');
const logger = require('./logger')(__filename);
const argv = require('./argv');
@ -22,6 +23,7 @@ const bulkInsert = require('./utils/bulk-insert');
const { get } = require('./utils/qu');
const pipeline = util.promisify(stream.pipeline);
const streamQueue = taskQueue();
function sampleMedias(medias, limit = config.media.limit, preferLast = true) {
// limit media sets, use extras as fallbacks
@ -419,7 +421,7 @@ async function fetchHttpSource(source, tempFileTarget, hashStream) {
};
}
async function fetchStreamSource(source, tempFileTarget, tempFilePath, hashStream) {
streamQueue.define('fetchStreamSource', async ({ source, tempFileTarget, hashStream }) => {
const meta = { mimetype: 'video/mp4' };
const video = ffmpeg(source.stream)
@ -434,7 +436,9 @@ async function fetchStreamSource(source, tempFileTarget, tempFilePath, hashStrea
logger.verbose(`Finished fetching stream from ${source.stream}`);
return meta;
}
}, {
concurrency: config.media.streamConcurrency,
});
async function fetchSource(source, baseMedia) {
logger.silly(`Fetching media from ${source.src}`);
@ -457,7 +461,7 @@ async function fetchSource(source, baseMedia) {
});
const { mimetype } = source.stream
? await fetchStreamSource(source, tempFileTarget, tempFilePath, hashStream)
? await streamQueue.push('fetchStreamSource', { source, tempFileTarget, hashStream })
: await fetchHttpSource(source, tempFileTarget, hashStream);
hasher.end();

View File

@ -20,10 +20,10 @@ function curateRelease(release, withMedia = false) {
id: release.entity.id,
name: release.entity.name,
slug: release.entity.slug,
parent: release.entity.parent && {
id: release.entity.parent.id,
name: release.entity.parent.name,
slug: release.entity.parent.slug,
parent: release.parent && {
id: release.parent.id,
name: release.parent.name,
slug: release.parent.slug,
},
},
actors: (release.actors || []).map(actor => ({

View File

@ -37,7 +37,7 @@ async function filterUniqueReleases(latestReleases, accReleases) {
return uniqueReleases;
}
function needNextPage(uniqueReleases, pageAccReleases) {
function needNextPage(uniqueReleases, pageAccReleases, hasDates) {
if (uniqueReleases.length === 0) {
return false;
}
@ -47,7 +47,7 @@ function needNextPage(uniqueReleases, pageAccReleases) {
return true;
}
if (uniqueReleases.every(release => !!release.date)) {
if (hasDates) {
const oldestReleaseOnPage = uniqueReleases
.sort((releaseA, releaseB) => releaseB.date - releaseA.date)
.slice(-1)[0];
@ -74,18 +74,15 @@ async function scrapeReleases(scraper, entity, preData, upcoming = false) {
return accReleases;
}
/* prevents argv.last from being satisfied
if (latestReleases.length === 0) {
// scraper successfully requested releases, but found none
return accReleases;
}
*/
const latestReleasesWithEntity = latestReleases.map(release => ({
...release,
entity: release.entity || entity, // allow override
})); // attach entity the release is assigned to when stored
const hasDates = latestReleasesWithEntity.every(release => !!release.date);
console.log(hasDates);
const uniqueReleases = argv.redownload
? latestReleasesWithEntity
: await filterUniqueReleases(latestReleasesWithEntity, accReleases);
@ -94,7 +91,7 @@ async function scrapeReleases(scraper, entity, preData, upcoming = false) {
logger.verbose(`Scraped '${entity.name}' (${entity.parent?.name}) ${upcoming ? 'upcoming' : 'latest'} page ${page}, found ${uniqueReleases.length} unique updates`);
if (needNextPage(uniqueReleases, pageAccReleases)) {
if (needNextPage(uniqueReleases, pageAccReleases, hasDates)) {
return scrapePage(page + 1, pageAccReleases);
}