forked from DebaucheryLibrarian/traxxx
Added version to stats page.
This commit is contained in:
parent
566c20ea7e
commit
3f843cc0fc
|
@ -3,6 +3,8 @@
|
||||||
<div class="content-inner">
|
<div class="content-inner">
|
||||||
<h1 class="heading">Stats</h1>
|
<h1 class="heading">Stats</h1>
|
||||||
|
|
||||||
|
<span class="version">Version {{ version }}</span>
|
||||||
|
|
||||||
<dl class="stat-table">
|
<dl class="stat-table">
|
||||||
<div class="stat-row">
|
<div class="stat-row">
|
||||||
<dt class="stat-label">Networks</dt>
|
<dt class="stat-label">Networks</dt>
|
||||||
|
@ -44,6 +46,8 @@ async function mounted() {
|
||||||
this.totalActors = stats.totalActors;
|
this.totalActors = stats.totalActors;
|
||||||
this.totalNetworks = stats.totalNetworks;
|
this.totalNetworks = stats.totalNetworks;
|
||||||
this.totalChannels = stats.totalChannels;
|
this.totalChannels = stats.totalChannels;
|
||||||
|
|
||||||
|
this.version = VERSION; // eslint-disable-line no-undef
|
||||||
}
|
}
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
|
|
|
@ -701,6 +701,13 @@ const tags = [
|
||||||
slug: 'pussy-eating',
|
slug: 'pussy-eating',
|
||||||
group: 'oral',
|
group: 'oral',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: 'pussy to mouth',
|
||||||
|
slug: 'pussy-to-mouth',
|
||||||
|
priority: 5,
|
||||||
|
description: 'Sucking off a cock right fresh out of your pussy.',
|
||||||
|
group: 'oral',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
name: 'pyjamas',
|
name: 'pyjamas',
|
||||||
slug: 'pyjamas',
|
slug: 'pyjamas',
|
||||||
|
@ -1277,6 +1284,10 @@ const aliases = [
|
||||||
name: 'cum on tits',
|
name: 'cum on tits',
|
||||||
for: 'cum-on-boobs',
|
for: 'cum-on-boobs',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: 'cum swallow',
|
||||||
|
for: 'swallowing',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
name: 'cum swallowing',
|
name: 'cum swallowing',
|
||||||
for: 'swallowing',
|
for: 'swallowing',
|
||||||
|
|
|
@ -37,10 +37,14 @@ async function filterUniqueReleases(latestReleases, accReleases) {
|
||||||
// add entry IDs of accumulated releases to prevent an infinite scrape loop
|
// add entry IDs of accumulated releases to prevent an infinite scrape loop
|
||||||
// when one page contains the same release as the previous
|
// when one page contains the same release as the previous
|
||||||
const duplicateReleasesBySiteIdAndEntryId = duplicateReleases
|
const duplicateReleasesBySiteIdAndEntryId = duplicateReleases
|
||||||
.concat(accReleases)
|
.concat(accReleases.uniqueReleases)
|
||||||
.reduce(mapReleasesToSiteIdAndEntryId, {});
|
.reduce(mapReleasesToSiteIdAndEntryId, {});
|
||||||
|
|
||||||
const localDuplicateReleasesBySiteIdAndEntryId = accReleases.reduce(mapReleasesToSiteIdAndEntryId, {});
|
const localDuplicateReleasesBySiteIdAndEntryId = accReleases.uniqueReleases
|
||||||
|
.concat(accReleases.duplicateReleases)
|
||||||
|
.reduce(mapReleasesToSiteIdAndEntryId, {});
|
||||||
|
|
||||||
|
console.log(localDuplicateReleasesBySiteIdAndEntryId);
|
||||||
|
|
||||||
const uniqueReleases = latestReleases.filter(release => !duplicateReleasesBySiteIdAndEntryId[release.entity.id]?.[release.entryId]);
|
const uniqueReleases = latestReleases.filter(release => !duplicateReleasesBySiteIdAndEntryId[release.entity.id]?.[release.entryId]);
|
||||||
const localUniqueReleases = latestReleases.filter(release => !localDuplicateReleasesBySiteIdAndEntryId[release.entity.id]?.[release.entryId]);
|
const localUniqueReleases = latestReleases.filter(release => !localDuplicateReleasesBySiteIdAndEntryId[release.entity.id]?.[release.entryId]);
|
||||||
|
@ -113,8 +117,8 @@ async function scrapeReleases(scraper, entity, preData, upcoming = false, page =
|
||||||
|| releasesWithEntity.slice(0, Math.max(argv.nullDateLimit - totalReleases, 0));
|
|| releasesWithEntity.slice(0, Math.max(argv.nullDateLimit - totalReleases, 0));
|
||||||
|
|
||||||
const { uniqueReleases, localUniqueReleases, duplicateReleases } = argv.force
|
const { uniqueReleases, localUniqueReleases, duplicateReleases } = argv.force
|
||||||
? { uniqueReleases: limitedReleases, localUniqueReleases: limitedReleases, duplicateReleases: [] }
|
? { uniqueReleases: limitedReleases, localUniqueReleases: releases, duplicateReleases: [] }
|
||||||
: await filterUniqueReleases(limitedReleases, acc.uniqueReleases);
|
: await filterUniqueReleases(limitedReleases, acc);
|
||||||
|
|
||||||
const accReleases = {
|
const accReleases = {
|
||||||
uniqueReleases: acc.uniqueReleases.concat(uniqueReleases),
|
uniqueReleases: acc.uniqueReleases.concat(uniqueReleases),
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
const webpack = require('webpack');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
const VueLoaderPlugin = require('vue-loader/lib/plugin');
|
const VueLoaderPlugin = require('vue-loader/lib/plugin');
|
||||||
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
||||||
|
@ -63,6 +64,9 @@ module.exports = {
|
||||||
new MiniCssExtractPlugin({
|
new MiniCssExtractPlugin({
|
||||||
filename: '../css/style.css',
|
filename: '../css/style.css',
|
||||||
}),
|
}),
|
||||||
|
new webpack.DefinePlugin({
|
||||||
|
VERSION: JSON.stringify(process.env.npm_package_version),
|
||||||
|
}),
|
||||||
],
|
],
|
||||||
resolve: {
|
resolve: {
|
||||||
alias: {
|
alias: {
|
||||||
|
|
Loading…
Reference in New Issue