Replaced bhttp with patched fork. Improved Jesse Loads Monster Facials scraper reliability (WIP). Added various tag photos.
|
@ -103,6 +103,7 @@ async function mounted() {
|
||||||
'toy-dp',
|
'toy-dp',
|
||||||
'double-dildo',
|
'double-dildo',
|
||||||
'double-dildo-blowjob',
|
'double-dildo-blowjob',
|
||||||
|
'double-dildo-kiss',
|
||||||
'double-dildo-anal',
|
'double-dildo-anal',
|
||||||
],
|
],
|
||||||
roleplay: [
|
roleplay: [
|
||||||
|
|
|
@ -1066,6 +1066,56 @@
|
||||||
"tar": "^4.4.6"
|
"tar": "^4.4.6"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"@thependulum/bhttp": {
|
||||||
|
"version": "1.2.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/@thependulum/bhttp/-/bhttp-1.2.6.tgz",
|
||||||
|
"integrity": "sha512-jqYVj99upU9vfMq4cjMEi87lbmf381e2A8dM91IPgU7TVF9ryN0rYHDK/IcIWbPBJwa8q9l0GzazN2xC2R9TgA==",
|
||||||
|
"requires": {
|
||||||
|
"bluebird": "^2.8.2",
|
||||||
|
"concat-stream": "^1.4.7",
|
||||||
|
"debug": "^2.1.1",
|
||||||
|
"dev-null": "^0.1.1",
|
||||||
|
"errors": "^0.2.0",
|
||||||
|
"extend": "^2.0.0",
|
||||||
|
"form-data2": "^1.0.0",
|
||||||
|
"form-fix-array": "^1.0.0",
|
||||||
|
"lodash.clonedeep": "^4.5.0",
|
||||||
|
"lodash.merge": "^4.6.2",
|
||||||
|
"stream-length": "^1.0.2",
|
||||||
|
"through2-sink": "^1.0.0",
|
||||||
|
"through2-spy": "^1.2.0",
|
||||||
|
"tough-cookie": "^2.3.1"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"bluebird": {
|
||||||
|
"version": "2.11.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/bluebird/-/bluebird-2.11.0.tgz",
|
||||||
|
"integrity": "sha1-U0uQM8AiyVecVro7Plpcqvu2UOE="
|
||||||
|
},
|
||||||
|
"debug": {
|
||||||
|
"version": "2.6.9",
|
||||||
|
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
||||||
|
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
|
||||||
|
"requires": {
|
||||||
|
"ms": "2.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"ms": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||||
|
"integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
|
||||||
|
},
|
||||||
|
"tough-cookie": {
|
||||||
|
"version": "2.5.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz",
|
||||||
|
"integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==",
|
||||||
|
"requires": {
|
||||||
|
"psl": "^1.1.28",
|
||||||
|
"punycode": "^2.1.1"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"@tokenizer/token": {
|
"@tokenizer/token": {
|
||||||
"version": "0.1.1",
|
"version": "0.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/@tokenizer/token/-/token-0.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/@tokenizer/token/-/token-0.1.1.tgz",
|
||||||
|
|
|
@ -72,6 +72,7 @@
|
||||||
"@graphile-contrib/pg-order-by-related": "^1.0.0-beta.6",
|
"@graphile-contrib/pg-order-by-related": "^1.0.0-beta.6",
|
||||||
"@graphile-contrib/pg-simplify-inflector": "^5.0.0-beta.1",
|
"@graphile-contrib/pg-simplify-inflector": "^5.0.0-beta.1",
|
||||||
"@tensorflow/tfjs-node": "^1.5.2",
|
"@tensorflow/tfjs-node": "^1.5.2",
|
||||||
|
"@thependulum/bhttp": "^1.2.6",
|
||||||
"babel-polyfill": "^6.26.0",
|
"babel-polyfill": "^6.26.0",
|
||||||
"bhttp": "^1.2.6",
|
"bhttp": "^1.2.6",
|
||||||
"blake2": "^4.0.0",
|
"blake2": "^4.0.0",
|
||||||
|
|
After Width: | Height: | Size: 804 KiB |
After Width: | Height: | Size: 7.5 KiB |
After Width: | Height: | Size: 30 KiB |
After Width: | Height: | Size: 1.2 MiB |
After Width: | Height: | Size: 1004 KiB |
After Width: | Height: | Size: 7.5 KiB |
After Width: | Height: | Size: 8.1 KiB |
Before Width: | Height: | Size: 41 KiB After Width: | Height: | Size: 32 KiB |
Before Width: | Height: | Size: 38 KiB After Width: | Height: | Size: 30 KiB |
After Width: | Height: | Size: 31 KiB |
After Width: | Height: | Size: 36 KiB |
After Width: | Height: | Size: 2.0 MiB |
After Width: | Height: | Size: 7.0 KiB |
After Width: | Height: | Size: 29 KiB |
After Width: | Height: | Size: 2.0 MiB |
After Width: | Height: | Size: 8.7 KiB |
After Width: | Height: | Size: 38 KiB |
After Width: | Height: | Size: 644 KiB |
After Width: | Height: | Size: 7.2 KiB |
After Width: | Height: | Size: 32 KiB |
|
@ -328,7 +328,7 @@ const tags = [
|
||||||
{
|
{
|
||||||
name: 'double dildo',
|
name: 'double dildo',
|
||||||
slug: 'double-dildo',
|
slug: 'double-dildo',
|
||||||
description: 'Two girls fucking eachother using either end of a double-sided dildo.',
|
description: 'Two girls fucking eachother using either end of a double-sided dildo. They can suck it for a [double dildo blowjob](/tag/double-dildo-blowjob), deepthroat it for a [double dildo kiss](/tag/double-dildo-kiss), or put it up their ass for [double dildo anal](/tag/double-dildo-anal).',
|
||||||
priority: 4,
|
priority: 4,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -1372,6 +1372,10 @@ const aliases = [
|
||||||
name: 'dp',
|
name: 'dp',
|
||||||
for: 'dp',
|
for: 'dp',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: 'double dildo deepthroat',
|
||||||
|
for: 'double-dildo-kiss',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
name: 'double penetration (dp)',
|
name: 'double penetration (dp)',
|
||||||
for: 'dp',
|
for: 'dp',
|
||||||
|
|
|
@ -617,6 +617,7 @@ const tagPosters = [
|
||||||
['double-dildo', 0, 'Kali Roses in "Double Dildo Party" for KaliRoses.com'],
|
['double-dildo', 0, 'Kali Roses in "Double Dildo Party" for KaliRoses.com'],
|
||||||
['double-dildo-anal', 0, 'Vina Sky and Kenzie Reeves in "Vina Sky\'s 1st Lesbian Anal" for HardX'],
|
['double-dildo-anal', 0, 'Vina Sky and Kenzie Reeves in "Vina Sky\'s 1st Lesbian Anal" for HardX'],
|
||||||
['double-dildo-blowjob', 0, 'Adriana Chechik and Vicki Chase in "Anal Savages 1" for Jules Jordan'],
|
['double-dildo-blowjob', 0, 'Adriana Chechik and Vicki Chase in "Anal Savages 1" for Jules Jordan'],
|
||||||
|
['double-dildo-kiss', 0, 'Giselle Palmer and Romi Rain in "Punishable Behavior" for Brazzers'],
|
||||||
['dp', 3, 'Hime Marie in LegalPorno AA047'],
|
['dp', 3, 'Hime Marie in LegalPorno AA047'],
|
||||||
['dvp', 'poster', 'Riley Reid in "Pizza That Ass" for Reid My Lips'],
|
['dvp', 'poster', 'Riley Reid in "Pizza That Ass" for Reid My Lips'],
|
||||||
['dv-tp', 'poster', 'Juelz Ventura in "Gangbanged 5" for Elegant Angel'],
|
['dv-tp', 'poster', 'Juelz Ventura in "Gangbanged 5" for Elegant Angel'],
|
||||||
|
@ -721,8 +722,11 @@ const tagPhotos = [
|
||||||
['deepthroat', 1, 'Jynx Maze in "Slutty and Sluttier 13" for Evil Angel'],
|
['deepthroat', 1, 'Jynx Maze in "Slutty and Sluttier 13" for Evil Angel'],
|
||||||
['deepthroat', 0, 'Chanel Grey in "Deepthroating Is Fun" for Throated'],
|
['deepthroat', 0, 'Chanel Grey in "Deepthroating Is Fun" for Throated'],
|
||||||
['double-blowjob', 0, 'Kira Noir and Kali Roses for Brazzers'],
|
['double-blowjob', 0, 'Kira Noir and Kali Roses for Brazzers'],
|
||||||
['double-dildo-anal', 2, 'Adria Rae and Megan Rain in "Best Friends Anal" for Holed'],
|
|
||||||
['double-dildo-anal', 1, 'Sammie Rhodes and Ainsley Addision in "Tickle Me Pink" for We Live Together (Reality Kings)'],
|
['double-dildo-anal', 1, 'Sammie Rhodes and Ainsley Addision in "Tickle Me Pink" for We Live Together (Reality Kings)'],
|
||||||
|
['double-dildo-anal', 3, 'Amber Rayne, Phoenix Marie and Roxy Raye in "Deep Anal Abyss 4" for Evil Angel'],
|
||||||
|
['double-dildo-anal', 2, 'Adria Rae and Megan Rain in "Best Friends Anal" for Holed'],
|
||||||
|
['double-dildo-blowjob', 3, 'Angela White and Madison Ivy in "Sunbathing Babes" for Brazzers'],
|
||||||
|
['double-dildo-blowjob', 2, 'Giselle Palmer and Romi Rain in "Punishable Behavior" for Brazzers'],
|
||||||
['double-dildo-blowjob', 1, 'Aidra Fox and Reena Sky in "Reena\'s Got A Staring Problem" for Brazzers'],
|
['double-dildo-blowjob', 1, 'Aidra Fox and Reena Sky in "Reena\'s Got A Staring Problem" for Brazzers'],
|
||||||
['double-dildo-dp', 0, 'u/LacyCrow "Sometimes you have to do it yourself"'],
|
['double-dildo-dp', 0, 'u/LacyCrow "Sometimes you have to do it yourself"'],
|
||||||
['dp', 5, 'Lana Rhoades in "Gangbang Me 3" for HardX'],
|
['dp', 5, 'Lana Rhoades in "Gangbang Me 3" for HardX'],
|
||||||
|
@ -741,6 +745,7 @@ const tagPhotos = [
|
||||||
['facefucking', 2, 'Jynx Maze for Throated'],
|
['facefucking', 2, 'Jynx Maze for Throated'],
|
||||||
['facefucking', 4, 'Brooklyn Gray in "Throats Fucks 6" for Evil Angel'],
|
['facefucking', 4, 'Brooklyn Gray in "Throats Fucks 6" for Evil Angel'],
|
||||||
['facefucking', 3, 'Adriana Chechik in "Performing Magic Butt Tricks With Jules Jordan. What Will Disappear In Her Ass?" for Jules Jordan'],
|
['facefucking', 3, 'Adriana Chechik in "Performing Magic Butt Tricks With Jules Jordan. What Will Disappear In Her Ass?" for Jules Jordan'],
|
||||||
|
['fake-boobs', 14, 'Rikki Six for Dream Dolls'],
|
||||||
['fake-boobs', 13, 'Kitana Lure for Asshole Fever'],
|
['fake-boobs', 13, 'Kitana Lure for Asshole Fever'],
|
||||||
['fake-boobs', 11, 'Jessa Rhodes and Cali Carter in "Busty Anal Workout" for LesbianX'],
|
['fake-boobs', 11, 'Jessa Rhodes and Cali Carter in "Busty Anal Workout" for LesbianX'],
|
||||||
['fake-boobs', 10, 'Tia Cyrus in "Titty-Fucked Yoga Goddess" for Latina Sex Tapes'],
|
['fake-boobs', 10, 'Tia Cyrus in "Titty-Fucked Yoga Goddess" for Latina Sex Tapes'],
|
||||||
|
@ -788,6 +793,7 @@ const tagPhotos = [
|
||||||
['trainbang', 0, 'Nicole Black in GIO971 for LegalPorno'],
|
['trainbang', 0, 'Nicole Black in GIO971 for LegalPorno'],
|
||||||
['tap', 1, 'Natasha Teen in SZ2098 for LegalPorno'],
|
['tap', 1, 'Natasha Teen in SZ2098 for LegalPorno'],
|
||||||
['tap', 2, 'Kira Thorn in GIO1018 for LegalPorno'],
|
['tap', 2, 'Kira Thorn in GIO1018 for LegalPorno'],
|
||||||
|
['toy-anal', 2, 'Denise, Irina and Laki in "Sexy Slumber" for Lez Cuties'],
|
||||||
['toy-anal', 0, 'Kira Noir in 1225 for InTheCrack'],
|
['toy-anal', 0, 'Kira Noir in 1225 for InTheCrack'],
|
||||||
['toy-dp', 0, 'Marley Brinx, Ivy Lebelle and Lyra Law in "Marley Brinx First GGDP" for LesbianX'],
|
['toy-dp', 0, 'Marley Brinx, Ivy Lebelle and Lyra Law in "Marley Brinx First GGDP" for LesbianX'],
|
||||||
]
|
]
|
||||||
|
|
|
@ -792,6 +792,8 @@ async function associateActors(releases, batchId) {
|
||||||
|
|
||||||
await bulkInsert('releases_actors', releaseActorAssociations, false);
|
await bulkInsert('releases_actors', releaseActorAssociations, false);
|
||||||
|
|
||||||
|
logger.verbose(`Associated ${releaseActorAssociations.length} actors to ${releases.length} scenes`);
|
||||||
|
|
||||||
return actors;
|
return actors;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -262,11 +262,13 @@ async function flushEntities(networkSlugs = [], channelSlugs = []) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
await Promise.all([
|
const [deletedScenesCount, deletedMoviesCount] = await Promise.all([
|
||||||
deleteScenes(sceneIds),
|
deleteScenes(sceneIds),
|
||||||
deleteMovies(movieIds),
|
deleteMovies(movieIds),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
logger.info(`Removed ${deletedScenesCount} scenes and ${deletedMoviesCount} movies for ${entitySlugs}`);
|
||||||
|
|
||||||
await flushOrphanedMedia();
|
await flushOrphanedMedia();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -279,6 +279,8 @@ async function extractSource(baseSource, { existingExtractMediaByUrl }) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function storeImageFile(media, hashDir, hashSubDir, filename, filedir, filepath) {
|
async function storeImageFile(media, hashDir, hashSubDir, filename, filedir, filepath) {
|
||||||
|
logger.silly(`Storing permanent media files for ${media.id} from ${media.src} at ${filepath}`);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const thumbdir = path.join(media.role, 'thumbs', hashDir, hashSubDir);
|
const thumbdir = path.join(media.role, 'thumbs', hashDir, hashSubDir);
|
||||||
const thumbpath = path.join(thumbdir, filename);
|
const thumbpath = path.join(thumbdir, filename);
|
||||||
|
@ -620,6 +622,7 @@ async function storeMedias(baseMedias) {
|
||||||
const fetchedMedias = await Promise.map(
|
const fetchedMedias = await Promise.map(
|
||||||
baseMedias,
|
baseMedias,
|
||||||
async baseMedia => fetchMedia(baseMedia, { existingSourceMediaByUrl, existingExtractMediaByUrl }),
|
async baseMedia => fetchMedia(baseMedia, { existingSourceMediaByUrl, existingExtractMediaByUrl }),
|
||||||
|
{ concurrency: 100 }, // don't overload disk (or network, although this has its own throttling)
|
||||||
);
|
);
|
||||||
|
|
||||||
const [uniqueHashMedias, existingHashMedias] = await findHashDuplicates(fetchedMedias);
|
const [uniqueHashMedias, existingHashMedias] = await findHashDuplicates(fetchedMedias);
|
||||||
|
@ -627,6 +630,7 @@ async function storeMedias(baseMedias) {
|
||||||
const savedMedias = await Promise.map(
|
const savedMedias = await Promise.map(
|
||||||
uniqueHashMedias,
|
uniqueHashMedias,
|
||||||
async baseMedia => storeFile(baseMedia),
|
async baseMedia => storeFile(baseMedia),
|
||||||
|
{ concurrency: 100 }, // don't overload disk
|
||||||
);
|
);
|
||||||
|
|
||||||
if (argv.force) {
|
if (argv.force) {
|
||||||
|
@ -634,6 +638,7 @@ async function storeMedias(baseMedias) {
|
||||||
await Promise.map(
|
await Promise.map(
|
||||||
existingHashMedias,
|
existingHashMedias,
|
||||||
async baseMedia => storeFile(baseMedia),
|
async baseMedia => storeFile(baseMedia),
|
||||||
|
{ concurrency: 100 }, // don't overload disk
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -784,7 +789,7 @@ async function flushOrphanedMedia() {
|
||||||
|
|
||||||
await fsPromises.rmdir(path.join(config.media.path, 'temp'), { recursive: true });
|
await fsPromises.rmdir(path.join(config.media.path, 'temp'), { recursive: true });
|
||||||
|
|
||||||
logger.info('Removed temporary media directory');
|
logger.info('Cleared temporary media directory');
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
|
|
@ -126,15 +126,31 @@ async function searchReleases(query, limit = 100) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function deleteScenes(sceneIds) {
|
async function deleteScenes(sceneIds) {
|
||||||
await knex('releases')
|
if (sceneIds.length === 0) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
const deleteCount = await knex('releases')
|
||||||
.whereIn('id', sceneIds)
|
.whereIn('id', sceneIds)
|
||||||
.delete();
|
.delete();
|
||||||
|
|
||||||
|
logger.info(`Removed ${deleteCount}/${sceneIds.length} scenes`);
|
||||||
|
|
||||||
|
return deleteCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function deleteMovies(movieIds) {
|
async function deleteMovies(movieIds) {
|
||||||
await knex('movies')
|
if (movieIds.length === 0) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
const deleteCount = await knex('movies')
|
||||||
.whereIn('id', movieIds)
|
.whereIn('id', movieIds)
|
||||||
.delete();
|
.delete();
|
||||||
|
|
||||||
|
logger.info(`Removed ${deleteCount}/${movieIds.length} movies`);
|
||||||
|
|
||||||
|
return deleteCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function flushBatches(batchIds) {
|
async function flushBatches(batchIds) {
|
||||||
|
@ -161,11 +177,13 @@ async function flushBatches(batchIds) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
await Promise.all([
|
const [deletedScenesCount, deletedMoviesCount] = await Promise.all([
|
||||||
deleteScenes(sceneIds),
|
deleteScenes(sceneIds),
|
||||||
deleteMovies(movieIds),
|
deleteMovies(movieIds),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
logger.info(`Removed ${deletedScenesCount} scenes and ${deletedMoviesCount} movies for batches ${batchIds}`);
|
||||||
|
|
||||||
await flushOrphanedMedia();
|
await flushOrphanedMedia();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const bhttp = require('bhttp');
|
const bhttp = require('@thependulum/bhttp');
|
||||||
|
|
||||||
const { post } = require('../utils/http');
|
const { post } = require('../utils/http');
|
||||||
const { extractDate } = require('../utils/qu');
|
const { extractDate } = require('../utils/qu');
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
/* eslint-disable newline-per-chained-call */
|
/* eslint-disable newline-per-chained-call */
|
||||||
const bhttp = require('bhttp');
|
const bhttp = require('@thependulum/bhttp');
|
||||||
const cheerio = require('cheerio');
|
const cheerio = require('cheerio');
|
||||||
const moment = require('moment');
|
const moment = require('moment');
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
/* eslint-disable newline-per-chained-call */
|
/* eslint-disable newline-per-chained-call */
|
||||||
// const Promise = require('bluebird');
|
// const Promise = require('bluebird');
|
||||||
const bhttp = require('bhttp');
|
const bhttp = require('@thependulum/bhttp');
|
||||||
const { JSDOM } = require('jsdom');
|
const { JSDOM } = require('jsdom');
|
||||||
const moment = require('moment');
|
const moment = require('moment');
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const { get, initAll, formatDate } = require('../utils/qu');
|
const { get, initAll } = require('../utils/qu');
|
||||||
|
|
||||||
function scrapeLatest(scenes, dates, site) {
|
function scrapeLatest(scenes, dates, site) {
|
||||||
return scenes.map(({ qu }, index) => {
|
return scenes.map(({ qu }, index) => {
|
||||||
|
@ -8,21 +8,23 @@ function scrapeLatest(scenes, dates, site) {
|
||||||
const path = qu.url('a[href*="videos/"]');
|
const path = qu.url('a[href*="videos/"]');
|
||||||
|
|
||||||
if (path) {
|
if (path) {
|
||||||
release.url = `${site.url}/visitors/${path}`;
|
if (/\.wmv$/.test(path)) {
|
||||||
|
release.trailer = `${site.url}/visitors/${path}`;
|
||||||
|
} else {
|
||||||
|
release.url = `${site.url}/visitors/${path}`;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
console.log(dates, dates[index], path);
|
||||||
|
|
||||||
if (dates && dates[index]) {
|
if (dates && dates[index]) {
|
||||||
release.date = dates[index].qu.date(null, 'MM/DD/YYYY');
|
release.date = dates[index].qu.date(null, 'MM/DD/YYYY');
|
||||||
}
|
}
|
||||||
|
|
||||||
const entryId = path?.match(/videos\/([a-zA-Z0-9]+)(?:_hd)?_trailer/)?.[1]
|
// release.entryId = release.date ? `${formatDate(release.date, 'YYYY-MM-DD')}-${entryId}` : entryId;
|
||||||
|
release.entryId = path?.match(/videos\/([a-zA-Z0-9]+)(?:_hd)?_trailer/)?.[1]
|
||||||
|| qu.img('img[src*="graphics/fft"]')?.match(/fft_(\w+).gif/)?.[1];
|
|| qu.img('img[src*="graphics/fft"]')?.match(/fft_(\w+).gif/)?.[1];
|
||||||
|
|
||||||
if (!entryId) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
release.entryId = release.date ? `${formatDate(release.date, 'YYYY-MM-DD')}-${entryId}` : entryId;
|
|
||||||
release.description = qu.q('tbody tr:nth-child(3) font', true);
|
release.description = qu.q('tbody tr:nth-child(3) font', true);
|
||||||
|
|
||||||
const infoLine = qu.q('font[color="#663366"]', true);
|
const infoLine = qu.q('font[color="#663366"]', true);
|
||||||
|
@ -43,7 +45,14 @@ function scrapeScene({ qu }, url, site) {
|
||||||
const release = { url };
|
const release = { url };
|
||||||
|
|
||||||
const { pathname } = new URL(url);
|
const { pathname } = new URL(url);
|
||||||
release.entryId = pathname.match(/videos\/(\w+)_hd_trailer/)[1];
|
|
||||||
|
release.entryId = pathname?.match(/videos\/([a-zA-Z0-9]+)(?:_hd)?_trailer/)?.[1];
|
||||||
|
|
||||||
|
if (/\.wmv$/.test(pathname)) {
|
||||||
|
release.trailer = url;
|
||||||
|
|
||||||
|
return release;
|
||||||
|
}
|
||||||
|
|
||||||
const actor = qu.q('font[color="#990033"] strong', true);
|
const actor = qu.q('font[color="#990033"] strong', true);
|
||||||
release.actors = [actor];
|
release.actors = [actor];
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
const util = require('util');
|
const util = require('util');
|
||||||
const Promise = require('bluebird');
|
const Promise = require('bluebird');
|
||||||
const bhttp = require('bhttp');
|
const bhttp = require('@thependulum/bhttp');
|
||||||
const cheerio = require('cheerio');
|
const cheerio = require('cheerio');
|
||||||
const { JSDOM } = require('jsdom');
|
const { JSDOM } = require('jsdom');
|
||||||
const moment = require('moment');
|
const moment = require('moment');
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
/* eslint-disable newline-per-chained-call */
|
/* eslint-disable newline-per-chained-call */
|
||||||
const Promise = require('bluebird');
|
const Promise = require('bluebird');
|
||||||
const bhttp = require('bhttp');
|
const bhttp = require('@thependulum/bhttp');
|
||||||
const { CookieJar } = Promise.promisifyAll(require('tough-cookie'));
|
const { CookieJar } = Promise.promisifyAll(require('tough-cookie'));
|
||||||
const moment = require('moment');
|
const moment = require('moment');
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const bhttp = require('bhttp');
|
const bhttp = require('@thependulum/bhttp');
|
||||||
const { JSDOM } = require('jsdom');
|
const { JSDOM } = require('jsdom');
|
||||||
const moment = require('moment');
|
const moment = require('moment');
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
/* eslint-disable newline-per-chained-call */
|
/* eslint-disable newline-per-chained-call */
|
||||||
const bhttp = require('bhttp');
|
const bhttp = require('@thependulum/bhttp');
|
||||||
const cheerio = require('cheerio');
|
const cheerio = require('cheerio');
|
||||||
const moment = require('moment');
|
const moment = require('moment');
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const bhttp = require('bhttp');
|
const bhttp = require('@thependulum/bhttp');
|
||||||
const cheerio = require('cheerio');
|
const cheerio = require('cheerio');
|
||||||
|
|
||||||
const {
|
const {
|
||||||
|
|
|
@ -258,8 +258,7 @@ async function fetchLatest(entity, page, options) {
|
||||||
.limit(faker.random.number({ min: 2, max: 15 }))
|
.limit(faker.random.number({ min: 2, max: 15 }))
|
||||||
.pluck('name');
|
.pluck('name');
|
||||||
|
|
||||||
// release.actors = actors(release);
|
release.actors = [...actors(release), null]; // include empty actor to ensure proper handling
|
||||||
release.actors = [null, 'Charles Darwin'];
|
|
||||||
release.title = title(release);
|
release.title = title(release);
|
||||||
|
|
||||||
return release;
|
return release;
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
/* eslint-disable no-unused-vars */
|
/* eslint-disable no-unused-vars */
|
||||||
const bhttp = require('bhttp');
|
const bhttp = require('@thependulum/bhttp');
|
||||||
|
|
||||||
const { get, ed } = require('../utils/q');
|
const { get, ed } = require('../utils/q');
|
||||||
const { fetchApiLatest, fetchApiUpcoming, fetchScene, fetchApiProfile } = require('./gamma');
|
const { fetchApiLatest, fetchApiUpcoming, fetchScene, fetchApiProfile } = require('./gamma');
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const bhttp = require('bhttp');
|
const bhttp = require('@thependulum/bhttp');
|
||||||
|
|
||||||
const { fetchLatest, fetchUpcoming, scrapeScene, fetchProfile } = require('./gamma');
|
const { fetchLatest, fetchUpcoming, scrapeScene, fetchProfile } = require('./gamma');
|
||||||
|
|
||||||
|
|
|
@ -21,12 +21,10 @@ async function bulkUpsert(table, items, conflict, update = true, chunkSize) {
|
||||||
return knex.transaction(async (transaction) => {
|
return knex.transaction(async (transaction) => {
|
||||||
const chunked = chunk(items, chunkSize);
|
const chunked = chunk(items, chunkSize);
|
||||||
|
|
||||||
// console.log(items.length, chunkSize, chunked.length, chunked[0]?.length);
|
|
||||||
|
|
||||||
const queries = chunked
|
const queries = chunked
|
||||||
.map(chunkItems => knex.raw(updated || ':query RETURNING *;', {
|
.map(chunkItems => knex.raw(updated || ':query RETURNING *;', {
|
||||||
query: knex(table).insert(chunkItems).transacting(transaction),
|
query: knex(table).insert(chunkItems),
|
||||||
}));
|
}).transacting(transaction));
|
||||||
|
|
||||||
const responses = await Promise.all(queries);
|
const responses = await Promise.all(queries);
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@ const util = require('util');
|
||||||
const stream = require('stream');
|
const stream = require('stream');
|
||||||
const config = require('config');
|
const config = require('config');
|
||||||
const tunnel = require('tunnel');
|
const tunnel = require('tunnel');
|
||||||
const bhttp = require('bhttp');
|
const bhttp = require('@thependulum/bhttp');
|
||||||
const taskQueue = require('promise-task-queue');
|
const taskQueue = require('promise-task-queue');
|
||||||
|
|
||||||
const pipeline = util.promisify(stream.pipeline);
|
const pipeline = util.promisify(stream.pipeline);
|
||||||
|
|