Compare commits
4 Commits
f4b1fb4831
...
4af7597441
Author | SHA1 | Date |
---|---|---|
|
4af7597441 | |
|
c37d4ad01f | |
|
2801732f57 | |
|
b188bc5744 |
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "traxxx",
|
"name": "traxxx",
|
||||||
"version": "1.138.7",
|
"version": "1.138.9",
|
||||||
"lockfileVersion": 1,
|
"lockfileVersion": 1,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "traxxx",
|
"name": "traxxx",
|
||||||
"version": "1.138.7",
|
"version": "1.138.9",
|
||||||
"description": "All the latest porn releases in one place",
|
"description": "All the latest porn releases in one place",
|
||||||
"main": "src/app.js",
|
"main": "src/app.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 20 KiB |
File diff suppressed because one or more lines are too long
After Width: | Height: | Size: 5.1 KiB |
Binary file not shown.
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 20 KiB |
|
@ -3415,21 +3415,21 @@ const sites = [
|
||||||
{
|
{
|
||||||
slug: 'paintoy',
|
slug: 'paintoy',
|
||||||
name: 'Paintoy',
|
name: 'Paintoy',
|
||||||
url: 'https://www.paintoy.com',
|
url: 'http://www.paintoy.com',
|
||||||
tags: ['bdsm'],
|
tags: ['bdsm'],
|
||||||
parent: 'insex',
|
parent: 'insex',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
slug: 'aganmedon',
|
slug: 'aganmedon',
|
||||||
name: 'Agan Medon',
|
name: 'Agan Medon',
|
||||||
url: 'https://www.aganmedon.com',
|
url: 'http://www.aganmedon.com',
|
||||||
tags: ['bdsm', 'animated'],
|
tags: ['bdsm', 'animated'],
|
||||||
parent: 'insex',
|
parent: 'insex',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
slug: 'sensualpain',
|
slug: 'sensualpain',
|
||||||
name: 'Sensual Pain',
|
name: 'Sensual Pain',
|
||||||
url: 'https://www.sensualpain.com',
|
url: 'http://www.sensualpain.com',
|
||||||
tags: ['bdsm'],
|
tags: ['bdsm'],
|
||||||
parent: 'insex',
|
parent: 'insex',
|
||||||
},
|
},
|
||||||
|
|
|
@ -114,7 +114,13 @@ function getAverage(items) {
|
||||||
}
|
}
|
||||||
|
|
||||||
function toBaseActors(actorsOrNames, release) {
|
function toBaseActors(actorsOrNames, release) {
|
||||||
return actorsOrNames.map((actorOrName) => {
|
if (!actorsOrNames) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const baseActors = actorsOrNames
|
||||||
|
.filter(actorOrName => actorOrName && (typeof actorOrName === 'string' || actorOrName.name))
|
||||||
|
.map((actorOrName) => {
|
||||||
const [baseName, entryId] = (actorOrName.name || actorOrName).split(':');
|
const [baseName, entryId] = (actorOrName.name || actorOrName).split(':');
|
||||||
|
|
||||||
const name = capitalize(baseName);
|
const name = capitalize(baseName);
|
||||||
|
@ -137,6 +143,8 @@ function toBaseActors(actorsOrNames, release) {
|
||||||
|
|
||||||
return baseActor;
|
return baseActor;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
return baseActors;
|
||||||
}
|
}
|
||||||
|
|
||||||
function curateActor(actor, withDetails = false, isProfile = false) {
|
function curateActor(actor, withDetails = false, isProfile = false) {
|
||||||
|
@ -832,4 +840,5 @@ module.exports = {
|
||||||
fetchActor,
|
fetchActor,
|
||||||
scrapeActors,
|
scrapeActors,
|
||||||
searchActors,
|
searchActors,
|
||||||
|
toBaseActors,
|
||||||
};
|
};
|
||||||
|
|
|
@ -30,20 +30,20 @@ const { argv } = yargs
|
||||||
type: 'boolean',
|
type: 'boolean',
|
||||||
alias: 'web',
|
alias: 'web',
|
||||||
})
|
})
|
||||||
.option('networks', {
|
.option('include-networks', {
|
||||||
describe: 'Network to scrape all channels from (overrides configuration)',
|
describe: 'Network to scrape all channels from (overrides configuration)',
|
||||||
type: 'array',
|
type: 'array',
|
||||||
alias: 'network',
|
alias: ['include-network', 'networks', 'network'],
|
||||||
})
|
})
|
||||||
.option('exclude-networks', {
|
.option('exclude-networks', {
|
||||||
describe: 'Network not to scrape any channels from (overrides configuration)',
|
describe: 'Network not to scrape any channels from (overrides configuration)',
|
||||||
type: 'array',
|
type: 'array',
|
||||||
alias: 'exclude-network',
|
alias: 'exclude-network',
|
||||||
})
|
})
|
||||||
.option('channels', {
|
.option('include-channels', {
|
||||||
describe: 'Channel to scrape (overrides configuration)',
|
describe: 'Channel to scrape (overrides configuration)',
|
||||||
type: 'array',
|
type: 'array',
|
||||||
alias: 'channel',
|
alias: ['include-channel', 'channels', 'channel'],
|
||||||
})
|
})
|
||||||
.option('exclude-channels', {
|
.option('exclude-channels', {
|
||||||
describe: 'Channel not to scrape (overrides configuration)',
|
describe: 'Channel not to scrape (overrides configuration)',
|
||||||
|
|
|
@ -64,8 +64,8 @@ async function fetchIncludedEntities() {
|
||||||
includeAll: !argv.networks && !argv.channels && !config.include?.networks && !config.include?.channels,
|
includeAll: !argv.networks && !argv.channels && !config.include?.networks && !config.include?.channels,
|
||||||
includedNetworks: argv.networks || (!argv.channels && config.include?.networks) || [],
|
includedNetworks: argv.networks || (!argv.channels && config.include?.networks) || [],
|
||||||
includedChannels: argv.channels || (!argv.networks && config.include?.channels) || [],
|
includedChannels: argv.channels || (!argv.networks && config.include?.channels) || [],
|
||||||
excludedNetworks: argv.excludeNetworks || config.exclude?.networks || [],
|
excludedNetworks: argv.excludeNetworks || config.exclude?.networks.filter(network => !argv.networks?.includes(network)) || [], // ignore explicitly included networks
|
||||||
excludedChannels: argv.excludeChannels || config.exclude?.channels || [],
|
excludedChannels: argv.excludeChannels || config.exclude?.channels.filter(channel => !argv.channels?.includes(channel)) || [], // ignore explicitly included channels
|
||||||
};
|
};
|
||||||
|
|
||||||
const rawNetworks = await knex.raw(`
|
const rawNetworks = await knex.raw(`
|
||||||
|
|
|
@ -1,23 +1,35 @@
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const { get, initAll } = require('../utils/qu');
|
const { get, initAll, formatDate } = require('../utils/qu');
|
||||||
|
|
||||||
function scrapeLatest(scenes, dates, site) {
|
function scrapeLatest(scenes, dates, site) {
|
||||||
return scenes.map(({ qu }, index) => {
|
return scenes.map(({ qu }, index) => {
|
||||||
const release = {};
|
const release = {};
|
||||||
|
const path = qu.url('a[href*="videos/"]');
|
||||||
|
|
||||||
const path = qu.url('a');
|
if (path) {
|
||||||
release.url = `${site.url}/visitors/${path}`;
|
release.url = `${site.url}/visitors/${path}`;
|
||||||
release.entryId = path.match(/videos\/([a-zA-Z0-9]+)(?:_hd)?_trailer/)?.[1];
|
}
|
||||||
|
|
||||||
if (dates && dates[index]) {
|
if (dates && dates[index]) {
|
||||||
release.date = dates[index].qu.date(null, 'MM/DD/YYYY');
|
release.date = dates[index].qu.date(null, 'MM/DD/YYYY');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const entryId = path?.match(/videos\/([a-zA-Z0-9]+)(?:_hd)?_trailer/)?.[1]
|
||||||
|
|| qu.img('img[src*="graphics/fft"]')?.match(/fft_(\w+).gif/)?.[1];
|
||||||
|
|
||||||
|
if (!entryId) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
release.entryId = release.date ? `${formatDate(release.date, 'YYYY-MM-DD')}-${entryId}` : entryId;
|
||||||
release.description = qu.q('tbody tr:nth-child(3) font', true);
|
release.description = qu.q('tbody tr:nth-child(3) font', true);
|
||||||
|
|
||||||
const infoLine = qu.q('font[color="#663366"]', true);
|
const infoLine = qu.q('font[color="#663366"]', true);
|
||||||
if (infoLine) release.duration = Number(infoLine.match(/(\d+) min/)[1]) * 60;
|
|
||||||
|
if (infoLine) {
|
||||||
|
release.duration = Number(infoLine.match(/(\d+) min/i)?.[1] || infoLine.match(/video: (\d+)/i)?.[1]) * 60 || null;
|
||||||
|
}
|
||||||
|
|
||||||
const poster = qu.img('img[src*="photos/"][width="400"]');
|
const poster = qu.img('img[src*="photos/"][width="400"]');
|
||||||
release.poster = `${site.url}/visitors/${poster}`;
|
release.poster = `${site.url}/visitors/${poster}`;
|
||||||
|
|
|
@ -258,7 +258,8 @@ async function fetchLatest(entity, page, options) {
|
||||||
.limit(faker.random.number({ min: 2, max: 15 }))
|
.limit(faker.random.number({ min: 2, max: 15 }))
|
||||||
.pluck('name');
|
.pluck('name');
|
||||||
|
|
||||||
release.actors = actors(release);
|
// release.actors = actors(release);
|
||||||
|
release.actors = [null, 'Charles Darwin'];
|
||||||
release.title = title(release);
|
release.title = title(release);
|
||||||
|
|
||||||
return release;
|
return release;
|
||||||
|
|
|
@ -9,7 +9,7 @@ const slugify = require('./utils/slugify');
|
||||||
const bulkInsert = require('./utils/bulk-insert');
|
const bulkInsert = require('./utils/bulk-insert');
|
||||||
const resolvePlace = require('./utils/resolve-place');
|
const resolvePlace = require('./utils/resolve-place');
|
||||||
const { formatDate } = require('./utils/qu');
|
const { formatDate } = require('./utils/qu');
|
||||||
const { associateActors, scrapeActors } = require('./actors');
|
const { associateActors, scrapeActors, toBaseActors } = require('./actors');
|
||||||
const { associateReleaseTags } = require('./tags');
|
const { associateReleaseTags } = require('./tags');
|
||||||
const { curateEntity } = require('./entities');
|
const { curateEntity } = require('./entities');
|
||||||
const { associateReleaseMedia } = require('./media');
|
const { associateReleaseMedia } = require('./media');
|
||||||
|
@ -291,7 +291,8 @@ async function storeScenes(releases) {
|
||||||
const [batchId] = await knex('batches').insert({ comment: null }).returning('id');
|
const [batchId] = await knex('batches').insert({ comment: null }).returning('id');
|
||||||
|
|
||||||
const releasesWithChannels = await attachChannelEntities(releases);
|
const releasesWithChannels = await attachChannelEntities(releases);
|
||||||
const releasesWithStudios = await attachStudios(releasesWithChannels);
|
const releasesWithBaseActors = releasesWithChannels.map(release => ({ ...release, actors: toBaseActors(release.actors) }));
|
||||||
|
const releasesWithStudios = await attachStudios(releasesWithBaseActors);
|
||||||
|
|
||||||
// uniqueness is entity ID + entry ID, filter uniques after adding entities
|
// uniqueness is entity ID + entry ID, filter uniques after adding entities
|
||||||
const { uniqueReleases, duplicateReleases, duplicateReleaseEntries } = await filterDuplicateReleases(releasesWithStudios);
|
const { uniqueReleases, duplicateReleases, duplicateReleaseEntries } = await filterDuplicateReleases(releasesWithStudios);
|
||||||
|
|
|
@ -109,7 +109,12 @@ async function scrapeReleases(scraper, entity, preData, isUpcoming) {
|
||||||
return accReleases;
|
return accReleases;
|
||||||
}
|
}
|
||||||
|
|
||||||
const pageReleasesWithEntity = pageReleases.map(release => ({ ...release, entity: release.entity || entity }));
|
const validPageReleases = pageReleases.filter(release => release?.entryId); // filter out empty and unidentified releases
|
||||||
|
const pageReleasesWithEntity = validPageReleases.map(release => ({ ...release, entity: release.entity || entity }));
|
||||||
|
|
||||||
|
if (pageReleases.length > validPageReleases.length) {
|
||||||
|
logger.warn(`Found ${pageReleases.length - validPageReleases.length} empty or unidentified releases on page ${page} for '${entity.name}'`);
|
||||||
|
}
|
||||||
|
|
||||||
if (needNextPage(pageReleasesWithEntity, accReleases, isUpcoming)) {
|
if (needNextPage(pageReleasesWithEntity, accReleases, isUpcoming)) {
|
||||||
return scrapeReleasesPage(page + 1, accReleases.concat(pageReleasesWithEntity), isUpcoming);
|
return scrapeReleasesPage(page + 1, accReleases.concat(pageReleasesWithEntity), isUpcoming);
|
||||||
|
@ -119,6 +124,7 @@ async function scrapeReleases(scraper, entity, preData, isUpcoming) {
|
||||||
}
|
}
|
||||||
|
|
||||||
const releases = await scrapeReleasesPage(argv.page || 1, []);
|
const releases = await scrapeReleasesPage(argv.page || 1, []);
|
||||||
|
|
||||||
const hasDates = releases.every(release => !!release.date);
|
const hasDates = releases.every(release => !!release.date);
|
||||||
|
|
||||||
const limitedReleases = (argv.last && releases.slice(0, Math.max(argv.last, 0)))
|
const limitedReleases = (argv.last && releases.slice(0, Math.max(argv.last, 0)))
|
||||||
|
@ -133,7 +139,7 @@ async function scrapeReleases(scraper, entity, preData, isUpcoming) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function scrapeLatestReleases(scraper, entity, preData) {
|
async function scrapeLatestReleases(scraper, entity, preData) {
|
||||||
if ((!argv.latest && !argv.last && !argv.after) || !scraper.fetchLatest) {
|
if ((!argv.latest && !argv.last) || !scraper.fetchLatest) {
|
||||||
return emptyReleases;
|
return emptyReleases;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue