diff --git a/assets/components/container/container.vue b/assets/components/container/container.vue index e778e2b7..27e53252 100644 --- a/assets/components/container/container.vue +++ b/assets/components/container/container.vue @@ -88,6 +88,8 @@ export default { flex-grow: 1; overflow-y: auto; overflow-x: hidden; + box-shadow: 0 0 3px var(--shadow-weak); + z-index: 1; } .slide-enter-active, diff --git a/assets/components/entities/entity.vue b/assets/components/entities/entity.vue index 17a4fe20..3aef60be 100644 --- a/assets/components/entities/entity.vue +++ b/assets/components/entities/entity.vue @@ -228,6 +228,15 @@ export default { height: 1rem; } +.content-inner { + display: flex; + flex-direction: column; +} + +.releases { + flex-grow: 1; +} + .name { color: var(--text-light); display: flex; diff --git a/assets/components/footer/footer.vue b/assets/components/footer/footer.vue index 2a5d78c6..dc736090 100644 --- a/assets/components/footer/footer.vue +++ b/assets/components/footer/footer.vue @@ -11,10 +11,8 @@ diff --git a/assets/components/home/home.vue b/assets/components/home/home.vue index 6eb16349..3aee84d6 100644 --- a/assets/components/home/home.vue +++ b/assets/components/home/home.vue @@ -1,5 +1,5 @@ @@ -79,3 +79,11 @@ export default { }, }; + + diff --git a/assets/js/get-date-range.js b/assets/js/get-date-range.js index caf43c66..b30e3aae 100644 --- a/assets/js/get-date-range.js +++ b/assets/js/get-date-range.js @@ -2,23 +2,23 @@ import dayjs from 'dayjs'; const dateRanges = { latest: () => ({ - after: '1900-01-01', - before: dayjs(new Date()).add(1, 'day').format('YYYY-MM-DD'), + after: '1900-01-01 00:00:00', + before: dayjs().format('YYYY-MM-DD HH:mm:ss'), orderBy: 'DATE_DESC', }), upcoming: () => ({ - after: dayjs(new Date()).format('YYYY-MM-DD'), - before: '2100-01-01', + after: dayjs().format('YYYY-MM-DD HH:mm:ss'), + before: '2100-01-01 00:00:00', orderBy: 'DATE_ASC', }), new: () => ({ - after: '1900-01-01', - before: '2100-01-01', - orderBy: 'CREATED_AT_DESC', + after: '1900-01-01 00:00:00', + before: '2100-01-01 00:00:00', + orderBy: ['CREATED_AT_DESC', 'DATE_ASC'], }), all: () => ({ - after: '1900-01-01', - before: '2100-01-01', + after: '1900-01-01 00:00:00', + before: '2100-01-01 00:00:00', orderBy: 'DATE_DESC', }), }; diff --git a/assets/js/releases/actions.js b/assets/js/releases/actions.js index 59319cf3..6a9d63e6 100644 --- a/assets/js/releases/actions.js +++ b/assets/js/releases/actions.js @@ -7,13 +7,15 @@ function initReleasesActions(store, _router) { async function fetchReleases({ _commit }, { limit = 10, pageNumber = 1, range = 'latest' }) { const { before, after, orderBy } = getDateRange(range); + console.log(after, before, orderBy); + const { connection: { releases, totalCount } } = await graphql(` query Releases( $limit:Int = 1000, $offset:Int = 0, - $after:Datetime = "1900-01-01", - $before:Datetime = "2100-01-01", - $orderBy:[ReleasesOrderBy!], + $after:Datetime = "1900-01-01 00:00:00", + $before:Datetime = "2100-01-01 00:00:00", + $orderBy: [ReleasesOrderBy!], $exclude: [String!] ) { ${releasesFragment} diff --git a/config/default.js b/config/default.js index c858c413..c0abb69d 100644 --- a/config/default.js +++ b/config/default.js @@ -197,6 +197,7 @@ module.exports = { lazyQuality: 90, videoQuality: [480, 360, 320, 540, 720, 1080, 2160, 270, 240, 180], limit: 25, // max number of photos per release + streamConcurrency: 2, // max number of video streams (m3u8 etc.) to fetch and process at once }, titleSlugLength: 50, }; diff --git a/seeds/02_sites.js b/seeds/02_sites.js index ed6dcf75..8aeef7ef 100644 --- a/seeds/02_sites.js +++ b/seeds/02_sites.js @@ -4968,7 +4968,7 @@ const sites = [ }, }, { - name: 'Horny doctor', + name: 'Horny Doctor', slug: 'hornydoctor', url: 'https://www.hornydoctor.com', parent: 'porncz', diff --git a/src/argv.js b/src/argv.js index 413e87e5..d7560590 100644 --- a/src/argv.js +++ b/src/argv.js @@ -30,11 +30,6 @@ const { argv } = yargs type: 'boolean', alias: 'web', }) - .option('all', { - describe: 'Scrape channels and networks defined in configuration', - type: 'boolean', - alias: 'scrape', - }) .option('networks', { describe: 'Network to scrape all channels from (overrides configuration)', type: 'array', diff --git a/src/media.js b/src/media.js index 5cd19f2f..f044fbc0 100644 --- a/src/media.js +++ b/src/media.js @@ -13,6 +13,7 @@ const mime = require('mime'); const ffmpeg = require('fluent-ffmpeg'); const sharp = require('sharp'); const blake2 = require('blake2'); +const taskQueue = require('promise-task-queue'); const logger = require('./logger')(__filename); const argv = require('./argv'); @@ -22,6 +23,7 @@ const bulkInsert = require('./utils/bulk-insert'); const { get } = require('./utils/qu'); const pipeline = util.promisify(stream.pipeline); +const streamQueue = taskQueue(); function sampleMedias(medias, limit = config.media.limit, preferLast = true) { // limit media sets, use extras as fallbacks @@ -419,7 +421,7 @@ async function fetchHttpSource(source, tempFileTarget, hashStream) { }; } -async function fetchStreamSource(source, tempFileTarget, tempFilePath, hashStream) { +streamQueue.define('fetchStreamSource', async ({ source, tempFileTarget, hashStream }) => { const meta = { mimetype: 'video/mp4' }; const video = ffmpeg(source.stream) @@ -434,7 +436,9 @@ async function fetchStreamSource(source, tempFileTarget, tempFilePath, hashStrea logger.verbose(`Finished fetching stream from ${source.stream}`); return meta; -} +}, { + concurrency: config.media.streamConcurrency, +}); async function fetchSource(source, baseMedia) { logger.silly(`Fetching media from ${source.src}`); @@ -457,7 +461,7 @@ async function fetchSource(source, baseMedia) { }); const { mimetype } = source.stream - ? await fetchStreamSource(source, tempFileTarget, tempFilePath, hashStream) + ? await streamQueue.push('fetchStreamSource', { source, tempFileTarget, hashStream }) : await fetchHttpSource(source, tempFileTarget, hashStream); hasher.end(); diff --git a/src/releases.js b/src/releases.js index ed40e52b..471328f2 100644 --- a/src/releases.js +++ b/src/releases.js @@ -20,10 +20,10 @@ function curateRelease(release, withMedia = false) { id: release.entity.id, name: release.entity.name, slug: release.entity.slug, - parent: release.entity.parent && { - id: release.entity.parent.id, - name: release.entity.parent.name, - slug: release.entity.parent.slug, + parent: release.parent && { + id: release.parent.id, + name: release.parent.name, + slug: release.parent.slug, }, }, actors: (release.actors || []).map(actor => ({ diff --git a/src/updates.js b/src/updates.js index ed50bf7e..d4536309 100644 --- a/src/updates.js +++ b/src/updates.js @@ -37,7 +37,7 @@ async function filterUniqueReleases(latestReleases, accReleases) { return uniqueReleases; } -function needNextPage(uniqueReleases, pageAccReleases) { +function needNextPage(uniqueReleases, pageAccReleases, hasDates) { if (uniqueReleases.length === 0) { return false; } @@ -47,7 +47,7 @@ function needNextPage(uniqueReleases, pageAccReleases) { return true; } - if (uniqueReleases.every(release => !!release.date)) { + if (hasDates) { const oldestReleaseOnPage = uniqueReleases .sort((releaseA, releaseB) => releaseB.date - releaseA.date) .slice(-1)[0]; @@ -74,18 +74,15 @@ async function scrapeReleases(scraper, entity, preData, upcoming = false) { return accReleases; } - /* prevents argv.last from being satisfied - if (latestReleases.length === 0) { - // scraper successfully requested releases, but found none - return accReleases; - } - */ - const latestReleasesWithEntity = latestReleases.map(release => ({ ...release, entity: release.entity || entity, // allow override })); // attach entity the release is assigned to when stored + const hasDates = latestReleasesWithEntity.every(release => !!release.date); + + console.log(hasDates); + const uniqueReleases = argv.redownload ? latestReleasesWithEntity : await filterUniqueReleases(latestReleasesWithEntity, accReleases); @@ -94,7 +91,7 @@ async function scrapeReleases(scraper, entity, preData, upcoming = false) { logger.verbose(`Scraped '${entity.name}' (${entity.parent?.name}) ${upcoming ? 'upcoming' : 'latest'} page ${page}, found ${uniqueReleases.length} unique updates`); - if (needNextPage(uniqueReleases, pageAccReleases)) { + if (needNextPage(uniqueReleases, pageAccReleases, hasDates)) { return scrapePage(page + 1, pageAccReleases); }