Added support for Family Strokes.

This commit is contained in:
ThePendulum 2020-01-13 23:45:09 +01:00
parent 48b37a509e
commit 859cb7e1f3
58 changed files with 2130 additions and 33 deletions

View File

@ -119,7 +119,7 @@
title="bust-waist-hip"
class="bio-item"
>
<dfn class="bio-label"><Icon icon="ruler" />Sizes</dfn>
<dfn class="bio-label"><Icon icon="ruler" />Figure</dfn>
<span>
<Icon
v-if="actor.naturalBoobs === false"

View File

@ -21,6 +21,14 @@
>
</a>
<ul class="tags nolist">
<li
v-for="tag in site.tags"
:key="`tag-${tag.slug}`"
class="tag"
>{{ tag.name }}</li>
</ul>
<a
v-tooltip.bottom="`Go to ${site.network.name} overview`"
:href="`/network/${site.network.slug}`"
@ -115,6 +123,12 @@ export default {
filter: $logo-highlight;
}
.tag {
background: $shadow;
padding: .5rem;
margin: 0 .5rem .5rem 0;
}
@media(max-width: $breakpoint) {
.link {
padding: .5rem 1rem;
@ -123,5 +137,9 @@ export default {
.logo {
max-height: 2.5rem;
}
.tags {
display: none;
}
}
</style>

View File

@ -44,6 +44,7 @@ function curateSite(site, network) {
if (site.releases) curatedSite.releases = site.releases.map(release => curateRelease(release));
if (site.network || network) curatedSite.network = site.network || network;
if (site.tags) curatedSite.tags = site.tags.map(({ tag }) => tag);
return curatedSite;
}

View File

@ -16,6 +16,13 @@ function initSitesActions(store, _router) {
name
slug
url
tags: sitesTags {
tag {
id
slug
name
}
}
network {
id
name

1893
log/2020-01-13.log Normal file

File diff suppressed because it is too large Load Diff

View File

@ -159,6 +159,9 @@ exports.up = knex => Promise.resolve()
.references('id')
.inTable('sites');
table.boolean('inherit')
.defaultTo(false);
table.unique(['tag_id', 'site_id']);
}))
.then(() => knex.schema.createTable('sites_social', (table) => {

26
package-lock.json generated
View File

@ -4875,6 +4875,14 @@
"flat-cache": "^2.0.1"
}
},
"file-stream-rotator": {
"version": "0.5.5",
"resolved": "https://registry.npmjs.org/file-stream-rotator/-/file-stream-rotator-0.5.5.tgz",
"integrity": "sha512-XzvE1ogpxUbARtZPZLICaDRAeWxoQLFMKS3ZwADoCQmurKEwuDD2jEfDVPm/R1HeKYsRYEl9PzVIezjQ3VTTPQ==",
"requires": {
"moment": "^2.11.2"
}
},
"fill-range": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz",
@ -12274,6 +12282,24 @@
}
}
},
"winston-daily-rotate-file": {
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/winston-daily-rotate-file/-/winston-daily-rotate-file-4.4.1.tgz",
"integrity": "sha512-516bL4IDjgX5mPEsTPXNVNzZtJkrUFY2IvPhj8n5xSKyy804xadp4TUlhxEZLL/Jbs8CF+rESfq95QXFLFTzKA==",
"requires": {
"file-stream-rotator": "^0.5.5",
"object-hash": "^2.0.1",
"triple-beam": "^1.3.0",
"winston-transport": "^4.2.0"
},
"dependencies": {
"object-hash": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/object-hash/-/object-hash-2.0.1.tgz",
"integrity": "sha512-HgcGMooY4JC2PBt9sdUdJ6PMzpin+YtY3r/7wg0uTifP+HJWW8rammseSEHuyt0UeShI183UGssCJqm1bJR7QA=="
}
}
},
"winston-transport": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/winston-transport/-/winston-transport-4.3.0.tgz",

View File

@ -109,6 +109,7 @@
"vue-router": "^3.1.3",
"vuex": "^3.1.2",
"winston": "^3.2.1",
"winston-daily-rotate-file": "^4.4.1",
"yargs": "^13.3.0"
}
}

View File

@ -597,6 +597,11 @@
-webkit-filter: drop-shadow(0 0 1px rgba(255, 255, 255, 0.5));
filter: drop-shadow(0 0 1px rgba(255, 255, 255, 0.5));
}
.tag[data-v-194630f6] {
background: rgba(0, 0, 0, 0.5);
padding: .5rem;
margin: 0 .5rem .5rem 0;
}
@media (max-width: 720px) {
.link[data-v-194630f6] {
padding: .5rem 1rem;
@ -604,6 +609,9 @@
.logo[data-v-194630f6] {
max-height: 2.5rem;
}
.tags[data-v-194630f6] {
display: none;
}
}
/* $primary: #ff886c; */

Binary file not shown.

After

Width:  |  Height:  |  Size: 52 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 52 KiB

After

Width:  |  Height:  |  Size: 50 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.8 KiB

After

Width:  |  Height:  |  Size: 8.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.2 KiB

After

Width:  |  Height:  |  Size: 9.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.6 KiB

After

Width:  |  Height:  |  Size: 3.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 106 KiB

After

Width:  |  Height:  |  Size: 106 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 865 B

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.0 KiB

After

Width:  |  Height:  |  Size: 7.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 33 KiB

After

Width:  |  Height:  |  Size: 33 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 115 KiB

After

Width:  |  Height:  |  Size: 115 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 19 KiB

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 63 KiB

After

Width:  |  Height:  |  Size: 66 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 61 KiB

After

Width:  |  Height:  |  Size: 61 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 25 KiB

After

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 182 KiB

After

Width:  |  Height:  |  Size: 182 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 50 KiB

After

Width:  |  Height:  |  Size: 50 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 70 KiB

After

Width:  |  Height:  |  Size: 70 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 19 KiB

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 32 KiB

After

Width:  |  Height:  |  Size: 32 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 54 KiB

After

Width:  |  Height:  |  Size: 54 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 36 KiB

After

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 91 KiB

After

Width:  |  Height:  |  Size: 91 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 60 KiB

After

Width:  |  Height:  |  Size: 60 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 73 KiB

After

Width:  |  Height:  |  Size: 73 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 53 KiB

After

Width:  |  Height:  |  Size: 53 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 65 KiB

After

Width:  |  Height:  |  Size: 65 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 79 KiB

After

Width:  |  Height:  |  Size: 213 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 112 KiB

View File

@ -2704,6 +2704,22 @@ function getSites(networksMap) {
parameters: JSON.stringify({ id: 'sss' }),
network_id: networksMap.teamskeet,
},
{
slug: 'submissived',
name: 'Submissived',
description: '',
url: 'https://www.submissived.com',
parameters: JSON.stringify({ scraper: 'A' }),
network_id: networksMap.teamskeet,
},
{
slug: 'familystrokes',
name: 'Family Strokes',
description: '',
url: 'https://www.familystrokes.com',
parameters: JSON.stringify({ scraper: 'A' }),
network_id: networksMap.teamskeet,
},
// VIXEN
{
slug: 'vixen',

View File

@ -404,6 +404,11 @@ function getTags(groupsMap) {
alias_for: null,
group_id: groupsMap.finish,
},
{
name: 'family taboo',
slug: 'family',
alias_for: null,
},
{
name: 'feet',
slug: 'feet',
@ -1322,6 +1327,14 @@ function getTagAliases(tagsMap) {
name: 'huge toys',
alias_for: tagsMap.toys,
},
{
name: 'incest',
alias_for: tagsMap.family,
},
{
name: 'incest fantasy',
alias_for: tagsMap.family,
},
{
name: 'innie',
alias_for: tagsMap['innie-pussy'],
@ -1553,6 +1566,21 @@ function getTagAliases(tagsMap) {
];
}
function getSiteTags() {
return {
allanal: ['anal', 'mff'],
boundgods: ['gay'],
buttmachineboys: ['gay'],
divinebitches: ['femdom'],
familystrokes: ['family'],
menonedge: ['gay'],
submissived: ['bdsm'],
swallowed: ['blowjob', 'deepthroat', 'facefucking'],
trueanal: ['anal'],
tspussyhunters: ['transsexual'],
};
}
exports.seed = knex => Promise.resolve()
.then(async () => upsert('tags_groups', groups, 'slug', knex))
.then(async () => {
@ -1561,7 +1589,7 @@ exports.seed = knex => Promise.resolve()
const tags = getTags(groupsMap);
return upsert('tags', tags, 'slug', knex);
return upsert('tags', tags, 'slug');
})
.then(async () => {
const tags = await knex('tags').select('*').where({ alias_for: null });
@ -1569,5 +1597,22 @@ exports.seed = knex => Promise.resolve()
const tagAliases = getTagAliases(tagsMap);
return upsert('tags', tagAliases, 'name', knex);
return upsert('tags', tagAliases, 'name');
})
.then(async () => {
const siteTags = getSiteTags();
const sites = await knex('sites').whereIn('slug', Object.keys(siteTags));
const tags = await knex('tags').whereIn('slug', Object.values(siteTags).flat());
const tagsMap = tags.reduce((acc, tag) => ({ ...acc, [tag.slug]: tag.id }), {});
const tagAssociations = sites
.map(site => siteTags[site.slug].map(tagSlug => ({
tag_id: tagsMap[tagSlug],
site_id: site.id,
inherit: true,
})))
.flat();
return upsert('sites_tags', tagAssociations, ['tag_id', 'site_id']);
});

View File

@ -3,6 +3,7 @@
const util = require('util');
const winston = require('winston');
const args = require('./argv');
require('winston-daily-rotate-file');
const logger = winston.createLogger({
format: winston.format.combine(
@ -19,6 +20,11 @@ const logger = winston.createLogger({
),
timestamp: true,
}),
new winston.transports.DailyRotateFile({
datePattern: 'YYYY-MM-DD',
filename: 'log/%DATE%.log',
level: 'silly',
}),
],
});

View File

@ -320,7 +320,6 @@ async function storeReleaseAssets(releases) {
await createMediaDirectory('releases', subpath);
try {
// don't use Promise.all to prevent concurrency issues with duplicate detection
if (release.poster) {
await storePhotos([release.poster], {
@ -346,9 +345,6 @@ async function storeReleaseAssets(releases) {
targetId: release.id,
subpath,
}, identifier);
} catch (error) {
console.log(release.url, error);
}
}, {
concurrency: 10,
});
@ -409,7 +405,7 @@ async function storeReleases(releases) {
...releaseWithChannelSite,
};
} catch (error) {
logger.error(error);
logger.error(error.message);
return null;
}

View File

@ -79,7 +79,7 @@ async function deepFetchReleases(baseReleases) {
deep: true,
};
} catch (error) {
logger.error(error);
logger.error(error.message);
return {
...release,

View File

@ -6,15 +6,6 @@ const { JSDOM } = require('jsdom');
const cheerio = require('cheerio');
const moment = require('moment');
const { matchTags } = require('../tags');
const defaultTags = {
swallowed: ['blowjob', 'deepthroat', 'facefuck'],
trueanal: ['anal'],
allanal: ['anal', 'fmf'],
nympho: [],
};
const descriptionTags = {
'anal cream pie': 'anal creampie',
'ass to mouth': 'ass to mouth',
@ -55,7 +46,7 @@ async function scrapeLatestA(html, site) {
const actors = Array.from(element.querySelectorAll('h4.models a'), actorElement => actorElement.textContent);
const durationString = element.querySelector('.total-time').textContent.trim();
// timestamp is somethines 00:00, sometimes 0:00:00
// timestamp is sometimes 00:00, sometimes 0:00:00
const duration = durationString.split(':').length === 3
? moment.duration(durationString).asSeconds()
: moment.duration(`00:${durationString}`).asSeconds();
@ -70,7 +61,7 @@ async function scrapeLatestA(html, site) {
.map(photoUrl => photoUrl.slice(photoUrl.indexOf('http'), photoUrl.indexOf('.jpg') + 4));
const photos = [...primaryPhotos, ...secondaryPhotos];
const tags = await matchTags([...defaultTags[site.slug], ...deriveTagsFromDescription(description)]);
const tags = deriveTagsFromDescription(description);
const scene = {
url,
@ -124,7 +115,7 @@ async function scrapeLatestB(html, site) {
.map(photoUrl => photoUrl.slice(photoUrl.indexOf('http'), photoUrl.indexOf('.jpg') + 4));
const photos = [...primaryPhotos, ...secondaryPhotos];
const tags = await matchTags([...defaultTags[site.slug], ...deriveTagsFromDescription(description)]);
const tags = deriveTagsFromDescription(description);
return {
url,
@ -155,7 +146,7 @@ async function scrapeSceneA(html, url, site) {
const actors = Array.from(element.querySelectorAll('.models a'), actorElement => actorElement.textContent);
const durationString = element.querySelector('.total-time').textContent.trim();
// timestamp is somethines 00:00, sometimes 0:00:00
// timestamp is sometimes 00:00, sometimes 0:00:00
const duration = durationString.split(':').length === 3
? moment.duration(durationString).asSeconds()
: moment.duration(`00:${durationString}`).asSeconds();
@ -163,7 +154,7 @@ async function scrapeSceneA(html, url, site) {
const { poster } = document.querySelector('.content-page-header video');
const { src, type } = document.querySelector('.content-page-header source');
const tags = await matchTags([...defaultTags[site.slug], ...deriveTagsFromDescription(description)]);
const tags = deriveTagsFromDescription(description);
return {
url,
@ -204,7 +195,7 @@ async function scrapeSceneB(html, url, site) {
const { poster } = document.querySelector('.content-page-header-inner video');
const { src, type } = document.querySelector('.content-page-header-inner source');
const tags = await matchTags([...defaultTags[site.slug], ...deriveTagsFromDescription(description)]);
const tags = deriveTagsFromDescription(description);
const scene = {
url,

View File

@ -30,7 +30,7 @@ async function scrapeProfile(html, _url, actorName) {
if (descriptionString) profile.description = descriptionString.textContent;
if (bio.Birthday) profile.birthdate = moment.utc(bio.Birthday, 'MMM D, YYYY').toDate();
if (bio.Birthday && !/-0001/.test(bio.Birthday)) profile.birthdate = moment.utc(bio.Birthday, 'MMM D, YYYY').toDate(); // birthyear sometimes -0001, see Spencer Bradley as of january 2020
if (bio.Born) profile.birthdate = moment.utc(bio.Born, 'YYYY-MM-DD').toDate();
profile.birthPlace = bio['Birth Place'] || bio.Birthplace;

View File

@ -15,7 +15,7 @@ function extractTitle(pathname) {
function extractActors(str) {
return str
.split(/,|\band/)
.split(/,|\band\b/ig)
.filter(actor => !/\.{3}/.test(actor))
.map(actor => actor.trim())
.filter(actor => actor.length > 0);
@ -81,7 +81,54 @@ function scrapeScene(html, site) {
return release;
}
async function fetchLatest(site, page = 1) {
function scrapeSceneA(html, site, sceneX, url) {
const scene = sceneX || new JSDOM(html).window.document;
const release = { site };
release.description = scene.querySelector('.scene-story').textContent.replace('...read more', '...').trim();
release.date = moment.utc(scene.querySelector('.scene-date').textContent, 'MM/DD/YYYY').toDate();
release.actors = Array.from(scene.querySelectorAll('.starring span'), el => extractActors(el.textContent)).flat();
const durationString = scene.querySelector('.time').textContent.trim();
const duration = ['00'].concat(durationString.split(':')).slice(-3).join(':'); // ensure hh:mm:ss
release.duration = moment.duration(duration).asSeconds();
if (sceneX) {
const titleEl = scene.querySelector(':scope > a');
release.url = titleEl.href;
release.entryId = titleEl.id;
release.title = titleEl.title;
const [poster, ...photos] = Array.from(scene.querySelectorAll('.scene img'), el => el.src);
release.poster = [poster.replace('bio_big', 'video'), poster];
release.photos = photos;
}
if (!sceneX) {
release.title = scene.querySelector('.title span').textContent;
release.url = url;
release.poster = scene.querySelector('video').poster;
release.photos = [release.poster.replace('video', 'bio_small'), release.poster.replace('video', 'bio_small2')];
}
const [, entryIdA, entryIdB] = new URL(release.url).pathname.split('/');
release.entryId = entryIdA === 'scenes' ? entryIdB : entryIdA;
return release;
}
function scrapeLatestA(html, site) {
const { document } = new JSDOM(html).window;
const scenes = Array.from(document.querySelectorAll('.scenewrapper'));
return scenes.map(scene => scrapeSceneA(null, site, scene));
}
async function fetchLatestTeamSkeet(site, page = 1) {
const url = `https://www.teamskeet.com/t1/updates/load?fltrs[site]=${site.parameters.id}&page=${page}&view=newest&fltrs[time]=ALL&order=DESC`;
const res = await bhttp.get(url);
@ -92,10 +139,37 @@ async function fetchLatest(site, page = 1) {
return null;
}
async function fetchLatestA(site) {
const url = `${site.url}/scenes`;
const res = await bhttp.get(url);
if (res.statusCode === 200) {
return scrapeLatestA(res.body.toString(), site);
}
return null;
}
async function fetchLatest(site, page = 1) {
if (site.parameters.id) {
return fetchLatestTeamSkeet(site, page);
}
if (site.parameters.scraper === 'A') {
return fetchLatestA(site, page);
}
return null;
}
async function fetchScene(url, site) {
const session = bhttp.session(); // resolve redirects
const res = await session.get(url);
if (site.parameters.scraper === 'A') {
return scrapeSceneA(res.body.toString(), site, null, url);
}
return scrapeScene(res.body.toString(), site);
}

View File

@ -8,6 +8,11 @@ const knex = require('./knex');
const whereOr = require('./utils/where-or');
async function curateSite(site, includeParameters = false) {
const tags = await knex('sites_tags')
.select('tags.*', 'sites_tags.inherit')
.where('site_id', site.id)
.join('tags', 'tags.id', 'sites_tags.tag_id');
const parameters = JSON.parse(site.parameters);
return {
@ -16,6 +21,7 @@ async function curateSite(site, includeParameters = false) {
url: site.url,
description: site.description,
slug: site.slug,
tags,
independent: !!parameters && parameters.independent,
parameters: includeParameters ? parameters : null,
network: {
@ -55,7 +61,7 @@ function destructConfigNetworks(networks) {
async function findSiteByUrl(url) {
const { hostname } = new URL(url);
const domain = hostname.replace(/^www./, '');
const domain = hostname.replace(/www.|tour./, '');
const site = await knex('sites')
.leftJoin('networks', 'sites.network_id', 'networks.id')

View File

@ -42,6 +42,7 @@ async function matchTags(rawTags) {
const tagEntries = await knex('tags')
.pluck('aliases.id')
.whereIn('tags.name', tags)
.orWhereIn('tags.slug', tags)
.where(function where() {
this
.whereNull('tags.alias_for')
@ -58,15 +59,20 @@ async function matchTags(rawTags) {
}
async function associateTags(release, releaseId) {
if (!release.tags || release.tags.length === 0) {
const siteTags = release.site.tags.filter(tag => tag.inherit === true).map(tag => tag.id);
const rawReleaseTags = release.tags || [];
const releaseTags = rawReleaseTags.some(tag => typeof tag === 'string')
? await matchTags(release.tags) // scraper returned raw tags
: rawReleaseTags; // tags already matched by (outdated) scraper
const tags = releaseTags.concat(siteTags);
if (tags.length === 0) {
logger.info(`No tags available for (${release.site.name}, ${releaseId}) "${release.title}"`);
return;
}
const tags = release.tags.some(tag => typeof tag === 'string')
? await matchTags(release.tags) // scraper returned raw tags
: release.tags; // tags already matched by (outdated) scraper
const associationEntries = await knex('releases_tags')
.where('release_id', releaseId)
.whereIn('tag_id', tags);