Compare commits
No commits in common. "68db552e3768c71fa29da3443dc42659179dda00" and "4fdb4ffec39e635145b051ac318d533b522b9477" have entirely different histories.
68db552e37
...
4fdb4ffec3
|
|
@ -1,36 +0,0 @@
|
||||||
## Distributed by Jules Jordan Video
|
|
||||||
* The Ass Factory
|
|
||||||
* Kink.com
|
|
||||||
* Amateur Allure
|
|
||||||
* Vince Voyeur Unleashed
|
|
||||||
* Morally Corrupt
|
|
||||||
* Trench Coat X
|
|
||||||
* Live Gonzo
|
|
||||||
* Sins
|
|
||||||
|
|
||||||
## Manuel Ferrara exclusives
|
|
||||||
These DVDs supposedly do not appear on the Jules Jordan main website
|
|
||||||
|
|
||||||
Anal Nymphos Anal Legends http://manuelferrara.com/members/dvds/anal-nymphos-anal-legends.html
|
|
||||||
Anal Nymphos Anal Legends #2 http://manuelferrara.com/members/dvds/anal-nymphos-anal-legends-2.html
|
|
||||||
Anal Nymphos Anal Legends #3 http://manuelferrara.com/members/dvds/anal-nymphos-anal-legends-3.html
|
|
||||||
Anal Nymphos Anal Legends #4 http://manuelferrara.com/members/dvds/anal-nymphos-anal-legends-4.html
|
|
||||||
Big Ass Tits http://manuelferrara.com/members/dvds/big-ass-tits.html
|
|
||||||
Bounce #1 http://manuelferrara.com/members/dvds/bounce.html
|
|
||||||
Bounce #2 http://manuelferrara.com/members/dvds/bounce-2.html
|
|
||||||
Bounce #3 http://manuelferrara.com/members/dvds/bounce-3.html
|
|
||||||
Bounce #4 http://manuelferrara.com/members/dvds/bounce-4.html
|
|
||||||
Dirty Talk #6 http://manuelferrara.com/members/dvds/dirty-talk-6.html
|
|
||||||
Dirty Talk #7 http://manuelferrara.com/members/dvds/dirty-talk-7.html
|
|
||||||
Pretty Little Sluts http://manuelferrara.com/members/dvds/pretty-little-sluts.html
|
|
||||||
Pretty Little Sluts #2 http://manuelferrara.com/members/dvds/pretty-little-sluts-2.html
|
|
||||||
Raw #30 http://manuelferrara.com/members/dvds/raw-30.html
|
|
||||||
Raw #31 http://manuelferrara.com/members/dvds/raw31.html
|
|
||||||
Raw #32 http://manuelferrara.com/members/dvds/raw-32.html
|
|
||||||
Raw #33 http://manuelferrara.com/members/dvds/raw-33.html
|
|
||||||
Raw #34 http://manuelferrara.com/members/dvds/raw-34.html
|
|
||||||
Raw #35 http://manuelferrara.com/members/dvds/raw-35.html
|
|
||||||
Raw #36 http://manuelferrara.com/members/dvds/raw-36.html
|
|
||||||
Raw #37 http://manuelferrara.com/members/dvds/raw-37.html
|
|
||||||
Raw #38 http://manuelferrara.com/members/dvds/raw-38.html
|
|
||||||
Sex Machines http://manuelferrara.com/members/dvds/sex-machines.html
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "traxxx",
|
"name": "traxxx",
|
||||||
"version": "1.49.6",
|
"version": "1.49.5",
|
||||||
"lockfileVersion": 1,
|
"lockfileVersion": 1,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "traxxx",
|
"name": "traxxx",
|
||||||
"version": "1.49.6",
|
"version": "1.49.5",
|
||||||
"description": "All the latest porn releases in one place",
|
"description": "All the latest porn releases in one place",
|
||||||
"main": "src/app.js",
|
"main": "src/app.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
|
|
||||||
Binary file not shown.
|
Before Width: | Height: | Size: 9.9 KiB |
|
|
@ -4,7 +4,6 @@ const Promise = require('bluebird');
|
||||||
const UrlPattern = require('url-pattern');
|
const UrlPattern = require('url-pattern');
|
||||||
const moment = require('moment');
|
const moment = require('moment');
|
||||||
|
|
||||||
const logger = require('./logger');
|
|
||||||
const knex = require('./knex');
|
const knex = require('./knex');
|
||||||
const argv = require('./argv');
|
const argv = require('./argv');
|
||||||
const scrapers = require('./scrapers/scrapers');
|
const scrapers = require('./scrapers/scrapers');
|
||||||
|
|
@ -253,7 +252,7 @@ async function storeActor(actor, scraped = false, scrapeSuccess = false) {
|
||||||
|
|
||||||
await storeSocialLinks(actor.social, actorEntry.id);
|
await storeSocialLinks(actor.social, actorEntry.id);
|
||||||
|
|
||||||
logger.info(`Added new entry for actor '${actor.name}'`);
|
console.log(`Added new entry for actor '${actor.name}'`);
|
||||||
|
|
||||||
return actorEntry;
|
return actorEntry;
|
||||||
}
|
}
|
||||||
|
|
@ -268,7 +267,7 @@ async function updateActor(actor, scraped = false, scrapeSuccess = false) {
|
||||||
|
|
||||||
await storeSocialLinks(actor.social, actor.id);
|
await storeSocialLinks(actor.social, actor.id);
|
||||||
|
|
||||||
logger.info(`Updated entry for actor '${actor.name}'`);
|
console.log(`Updated entry for actor '${actor.name}'`);
|
||||||
|
|
||||||
return actorEntry;
|
return actorEntry;
|
||||||
}
|
}
|
||||||
|
|
@ -339,7 +338,7 @@ async function scrapeActors(actorNames) {
|
||||||
const profile = await mergeProfiles(profiles, actorEntry);
|
const profile = await mergeProfiles(profiles, actorEntry);
|
||||||
|
|
||||||
if (profile === null) {
|
if (profile === null) {
|
||||||
logger.warn(`Could not find profile for actor '${actorName}'`);
|
console.log(`Could not find profile for actor '${actorName}'`);
|
||||||
|
|
||||||
if (argv.save) {
|
if (argv.save) {
|
||||||
await updateActor(profile, true, false);
|
await updateActor(profile, true, false);
|
||||||
|
|
@ -381,7 +380,7 @@ async function scrapeActors(actorNames) {
|
||||||
}, newActorEntry.name);
|
}, newActorEntry.name);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.warn(actorName, error);
|
console.warn(actorName, error);
|
||||||
}
|
}
|
||||||
}, {
|
}, {
|
||||||
concurrency: 3,
|
concurrency: 3,
|
||||||
|
|
@ -405,7 +404,7 @@ async function associateActors(mappedActors, releases) {
|
||||||
knex('releases_actors').whereIn('release_id', releases.map(release => release.id)),
|
knex('releases_actors').whereIn('release_id', releases.map(release => release.id)),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
console.log(mappedActors);
|
console.log(actorNames, actorSlugs, existingActorEntries.map(actor => actor.name));
|
||||||
|
|
||||||
const associations = await Promise.map(Object.entries(mappedActors), async ([actorName, releaseIds]) => {
|
const associations = await Promise.map(Object.entries(mappedActors), async ([actorName, releaseIds]) => {
|
||||||
try {
|
try {
|
||||||
|
|
@ -422,7 +421,7 @@ async function associateActors(mappedActors, releases) {
|
||||||
.some(associationEntry => associationEntry.actor_id === association.actor_id
|
.some(associationEntry => associationEntry.actor_id === association.actor_id
|
||||||
&& associationEntry.release_id === association.release_id));
|
&& associationEntry.release_id === association.release_id));
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(actorName, error);
|
console.error(actorName, error);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,6 @@
|
||||||
const Promise = require('bluebird');
|
const Promise = require('bluebird');
|
||||||
const moment = require('moment');
|
const moment = require('moment');
|
||||||
|
|
||||||
const logger = require('./logger');
|
|
||||||
const knex = require('./knex');
|
const knex = require('./knex');
|
||||||
const argv = require('./argv');
|
const argv = require('./argv');
|
||||||
const whereOr = require('./utils/where-or');
|
const whereOr = require('./utils/where-or');
|
||||||
|
|
@ -320,6 +319,8 @@ async function storeReleaseAssets(releases) {
|
||||||
|
|
||||||
await createMediaDirectory('releases', subpath);
|
await createMediaDirectory('releases', subpath);
|
||||||
|
|
||||||
|
console.log(release.id);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// don't use Promise.all to prevent concurrency issues with duplicate detection
|
// don't use Promise.all to prevent concurrency issues with duplicate detection
|
||||||
if (release.poster) {
|
if (release.poster) {
|
||||||
|
|
@ -378,7 +379,7 @@ async function storeRelease(release) {
|
||||||
|
|
||||||
if (updatedRelease) {
|
if (updatedRelease) {
|
||||||
await associateTags(release, updatedRelease.id);
|
await associateTags(release, updatedRelease.id);
|
||||||
logger.info(`Updated release "${release.title}" (${existingRelease.id}, ${release.site.name})`);
|
console.log(`Updated release "${release.title}" (${existingRelease.id}, ${release.site.name})`);
|
||||||
}
|
}
|
||||||
|
|
||||||
await associateTags(release, existingRelease.id);
|
await associateTags(release, existingRelease.id);
|
||||||
|
|
@ -392,7 +393,7 @@ async function storeRelease(release) {
|
||||||
|
|
||||||
await associateTags(release, releaseEntry.id);
|
await associateTags(release, releaseEntry.id);
|
||||||
|
|
||||||
logger.info(`Stored release "${release.title}" (${releaseEntry.id}, ${release.site.name})`);
|
console.log(`Stored release "${release.title}" (${releaseEntry.id}, ${release.site.name})`);
|
||||||
|
|
||||||
return releaseEntry.id;
|
return releaseEntry.id;
|
||||||
}
|
}
|
||||||
|
|
@ -409,7 +410,7 @@ async function storeReleases(releases) {
|
||||||
...releaseWithChannelSite,
|
...releaseWithChannelSite,
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(error);
|
console.error(error);
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,6 @@ const Promise = require('bluebird');
|
||||||
const moment = require('moment');
|
const moment = require('moment');
|
||||||
|
|
||||||
const argv = require('./argv');
|
const argv = require('./argv');
|
||||||
const logger = require('./logger');
|
|
||||||
const knex = require('./knex');
|
const knex = require('./knex');
|
||||||
const { fetchIncludedSites } = require('./sites');
|
const { fetchIncludedSites } = require('./sites');
|
||||||
const scrapers = require('./scrapers/scrapers');
|
const scrapers = require('./scrapers/scrapers');
|
||||||
|
|
@ -43,7 +42,7 @@ async function scrapeUniqueReleases(scraper, site, afterDate = getAfterDate(), a
|
||||||
.filter(release => !duplicateReleaseIds.has(String(release.entryId)) // release is already in database
|
.filter(release => !duplicateReleaseIds.has(String(release.entryId)) // release is already in database
|
||||||
&& moment(release.date).isAfter(afterDate)); // release is older than specified date limit
|
&& moment(release.date).isAfter(afterDate)); // release is older than specified date limit
|
||||||
|
|
||||||
logger.info(`\x1b[90m${site.name}: Scraped page ${page}, ${uniqueReleases.length} unique recent releases\x1b[0m`);
|
console.log(`\x1b[90m${site.name}: Scraped page ${page}, ${uniqueReleases.length} unique recent releases\x1b[0m`);
|
||||||
|
|
||||||
if (
|
if (
|
||||||
uniqueReleases.length > 0
|
uniqueReleases.length > 0
|
||||||
|
|
@ -79,7 +78,7 @@ async function deepFetchReleases(baseReleases) {
|
||||||
deep: true,
|
deep: true,
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(error);
|
console.error(error);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...release,
|
...release,
|
||||||
|
|
@ -100,7 +99,7 @@ async function scrapeSiteReleases(scraper, site) {
|
||||||
scrapeUpcomingReleases(scraper, site), // fetch basic release info from upcoming overview
|
scrapeUpcomingReleases(scraper, site), // fetch basic release info from upcoming overview
|
||||||
]);
|
]);
|
||||||
|
|
||||||
logger.info(`${site.name}: Found ${newReleases.length} recent releases, ${upcomingReleases.length} upcoming releases`);
|
console.log(`${site.name}: Found ${newReleases.length} recent releases, ${upcomingReleases.length} upcoming releases`);
|
||||||
|
|
||||||
const baseReleases = [...newReleases, ...upcomingReleases];
|
const baseReleases = [...newReleases, ...upcomingReleases];
|
||||||
|
|
||||||
|
|
@ -119,7 +118,7 @@ async function scrapeSites() {
|
||||||
const scraper = scrapers.releases[site.slug] || scrapers.releases[site.network.slug];
|
const scraper = scrapers.releases[site.slug] || scrapers.releases[site.network.slug];
|
||||||
|
|
||||||
if (!scraper) {
|
if (!scraper) {
|
||||||
logger.warn(`No scraper found for '${site.name}' (${site.slug})`);
|
console.warn(`No scraper found for '${site.name}' (${site.slug})`);
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -127,10 +126,10 @@ async function scrapeSites() {
|
||||||
return await scrapeSiteReleases(scraper, site);
|
return await scrapeSiteReleases(scraper, site);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (argv.debug) {
|
if (argv.debug) {
|
||||||
logger.error(`${site.name}: Failed to scrape releases`, error);
|
console.error(`${site.name}: Failed to scrape releases`, error);
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.warn(`${site.id}: Failed to scrape releases`);
|
console.warn(`${site.id}: Failed to scrape releases`);
|
||||||
|
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -112,7 +112,7 @@ async function fetchLatest(site, page = 1, upcoming = false) {
|
||||||
requests: [
|
requests: [
|
||||||
{
|
{
|
||||||
indexName: 'all_scenes',
|
indexName: 'all_scenes',
|
||||||
params: `query=&hitsPerPage=36&maxValuesPerFacet=100&page=${page - 1}&facetFilters=[["lesbian:"],["bisex:"],["shemale:"],["upcoming:${upcoming ? 1 : 0}"]]`,
|
params: `query=&hitsPerPage=36&maxValuesPerFacet=100&page=${page - 1}&facetFilters=[["lesbian:"],["bisex:"],["shemale:"],["upcoming:${upcoming ? '' : 0}"]]`,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
}, {
|
}, {
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,6 @@
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const knex = require('../knex');
|
const knex = require('../knex');
|
||||||
const logger = require('../logger');
|
|
||||||
|
|
||||||
async function upsert(table, items, identifier = ['id'], _knex) {
|
async function upsert(table, items, identifier = ['id'], _knex) {
|
||||||
const identifiers = Array.isArray(identifier) ? identifier : [identifier];
|
const identifiers = Array.isArray(identifier) ? identifier : [identifier];
|
||||||
|
|
@ -29,8 +28,8 @@ async function upsert(table, items, identifier = ['id'], _knex) {
|
||||||
});
|
});
|
||||||
|
|
||||||
if (knex) {
|
if (knex) {
|
||||||
logger.debug(`${table}: Inserting ${insert.length}`);
|
console.log(`${table}: Inserting ${insert.length}`);
|
||||||
logger.debug(`${table}: Updating ${update.length}`);
|
console.log(`${table}: Updating ${update.length}`);
|
||||||
|
|
||||||
const [inserted, updated] = await Promise.all([
|
const [inserted, updated] = await Promise.all([
|
||||||
knex(table).returning('*').insert(insert),
|
knex(table).returning('*').insert(insert),
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue