diff --git a/docs/julesjordan.md b/docs/julesjordan.md new file mode 100644 index 00000000..9839761d --- /dev/null +++ b/docs/julesjordan.md @@ -0,0 +1,36 @@ +## Distributed by Jules Jordan Video +* The Ass Factory +* Kink.com +* Amateur Allure +* Vince Voyeur Unleashed +* Morally Corrupt +* Trench Coat X +* Live Gonzo +* Sins + +## Manuel Ferrara exclusives +These DVDs supposedly do not appear on the Jules Jordan main website + +Anal Nymphos Anal Legends http://manuelferrara.com/members/dvds/anal-nymphos-anal-legends.html +Anal Nymphos Anal Legends #2 http://manuelferrara.com/members/dvds/anal-nymphos-anal-legends-2.html +Anal Nymphos Anal Legends #3 http://manuelferrara.com/members/dvds/anal-nymphos-anal-legends-3.html +Anal Nymphos Anal Legends #4 http://manuelferrara.com/members/dvds/anal-nymphos-anal-legends-4.html +Big Ass Tits http://manuelferrara.com/members/dvds/big-ass-tits.html +Bounce #1 http://manuelferrara.com/members/dvds/bounce.html +Bounce #2 http://manuelferrara.com/members/dvds/bounce-2.html +Bounce #3 http://manuelferrara.com/members/dvds/bounce-3.html +Bounce #4 http://manuelferrara.com/members/dvds/bounce-4.html +Dirty Talk #6 http://manuelferrara.com/members/dvds/dirty-talk-6.html +Dirty Talk #7 http://manuelferrara.com/members/dvds/dirty-talk-7.html +Pretty Little Sluts http://manuelferrara.com/members/dvds/pretty-little-sluts.html +Pretty Little Sluts #2 http://manuelferrara.com/members/dvds/pretty-little-sluts-2.html +Raw #30 http://manuelferrara.com/members/dvds/raw-30.html +Raw #31 http://manuelferrara.com/members/dvds/raw31.html +Raw #32 http://manuelferrara.com/members/dvds/raw-32.html +Raw #33 http://manuelferrara.com/members/dvds/raw-33.html +Raw #34 http://manuelferrara.com/members/dvds/raw-34.html +Raw #35 http://manuelferrara.com/members/dvds/raw-35.html +Raw #36 http://manuelferrara.com/members/dvds/raw-36.html +Raw #37 http://manuelferrara.com/members/dvds/raw-37.html +Raw #38 http://manuelferrara.com/members/dvds/raw-38.html +Sex Machines http://manuelferrara.com/members/dvds/sex-machines.html diff --git a/public/img/logos/kink/kink-dvd.png b/public/img/logos/kink/kink-dvd.png new file mode 100644 index 00000000..bfec6502 Binary files /dev/null and b/public/img/logos/kink/kink-dvd.png differ diff --git a/src/actors.js b/src/actors.js index 45f8e2b6..b85efde7 100644 --- a/src/actors.js +++ b/src/actors.js @@ -4,6 +4,7 @@ const Promise = require('bluebird'); const UrlPattern = require('url-pattern'); const moment = require('moment'); +const logger = require('./logger'); const knex = require('./knex'); const argv = require('./argv'); const scrapers = require('./scrapers/scrapers'); @@ -252,7 +253,7 @@ async function storeActor(actor, scraped = false, scrapeSuccess = false) { await storeSocialLinks(actor.social, actorEntry.id); - console.log(`Added new entry for actor '${actor.name}'`); + logger.info(`Added new entry for actor '${actor.name}'`); return actorEntry; } @@ -267,7 +268,7 @@ async function updateActor(actor, scraped = false, scrapeSuccess = false) { await storeSocialLinks(actor.social, actor.id); - console.log(`Updated entry for actor '${actor.name}'`); + logger.info(`Updated entry for actor '${actor.name}'`); return actorEntry; } @@ -338,7 +339,7 @@ async function scrapeActors(actorNames) { const profile = await mergeProfiles(profiles, actorEntry); if (profile === null) { - console.log(`Could not find profile for actor '${actorName}'`); + logger.warn(`Could not find profile for actor '${actorName}'`); if (argv.save) { await updateActor(profile, true, false); @@ -380,7 +381,7 @@ async function scrapeActors(actorNames) { }, newActorEntry.name); } } catch (error) { - console.warn(actorName, error); + logger.warn(actorName, error); } }, { concurrency: 3, @@ -404,7 +405,7 @@ async function associateActors(mappedActors, releases) { knex('releases_actors').whereIn('release_id', releases.map(release => release.id)), ]); - console.log(actorNames, actorSlugs, existingActorEntries.map(actor => actor.name)); + console.log(mappedActors); const associations = await Promise.map(Object.entries(mappedActors), async ([actorName, releaseIds]) => { try { @@ -421,7 +422,7 @@ async function associateActors(mappedActors, releases) { .some(associationEntry => associationEntry.actor_id === association.actor_id && associationEntry.release_id === association.release_id)); } catch (error) { - console.error(actorName, error); + logger.error(actorName, error); return null; } }); diff --git a/src/releases.js b/src/releases.js index 0ca6dcaa..adbf2a79 100644 --- a/src/releases.js +++ b/src/releases.js @@ -3,6 +3,7 @@ const Promise = require('bluebird'); const moment = require('moment'); +const logger = require('./logger'); const knex = require('./knex'); const argv = require('./argv'); const whereOr = require('./utils/where-or'); @@ -319,8 +320,6 @@ async function storeReleaseAssets(releases) { await createMediaDirectory('releases', subpath); - console.log(release.id); - try { // don't use Promise.all to prevent concurrency issues with duplicate detection if (release.poster) { @@ -379,7 +378,7 @@ async function storeRelease(release) { if (updatedRelease) { await associateTags(release, updatedRelease.id); - console.log(`Updated release "${release.title}" (${existingRelease.id}, ${release.site.name})`); + logger.info(`Updated release "${release.title}" (${existingRelease.id}, ${release.site.name})`); } await associateTags(release, existingRelease.id); @@ -393,7 +392,7 @@ async function storeRelease(release) { await associateTags(release, releaseEntry.id); - console.log(`Stored release "${release.title}" (${releaseEntry.id}, ${release.site.name})`); + logger.info(`Stored release "${release.title}" (${releaseEntry.id}, ${release.site.name})`); return releaseEntry.id; } @@ -410,7 +409,7 @@ async function storeReleases(releases) { ...releaseWithChannelSite, }; } catch (error) { - console.error(error); + logger.error(error); return null; } diff --git a/src/scrape-sites.js b/src/scrape-sites.js index f7cdf66d..9070346b 100644 --- a/src/scrape-sites.js +++ b/src/scrape-sites.js @@ -4,6 +4,7 @@ const Promise = require('bluebird'); const moment = require('moment'); const argv = require('./argv'); +const logger = require('./logger'); const knex = require('./knex'); const { fetchIncludedSites } = require('./sites'); const scrapers = require('./scrapers/scrapers'); @@ -42,7 +43,7 @@ async function scrapeUniqueReleases(scraper, site, afterDate = getAfterDate(), a .filter(release => !duplicateReleaseIds.has(String(release.entryId)) // release is already in database && moment(release.date).isAfter(afterDate)); // release is older than specified date limit - console.log(`\x1b[90m${site.name}: Scraped page ${page}, ${uniqueReleases.length} unique recent releases\x1b[0m`); + logger.info(`\x1b[90m${site.name}: Scraped page ${page}, ${uniqueReleases.length} unique recent releases\x1b[0m`); if ( uniqueReleases.length > 0 @@ -78,7 +79,7 @@ async function deepFetchReleases(baseReleases) { deep: true, }; } catch (error) { - console.error(error); + logger.error(error); return { ...release, @@ -99,7 +100,7 @@ async function scrapeSiteReleases(scraper, site) { scrapeUpcomingReleases(scraper, site), // fetch basic release info from upcoming overview ]); - console.log(`${site.name}: Found ${newReleases.length} recent releases, ${upcomingReleases.length} upcoming releases`); + logger.info(`${site.name}: Found ${newReleases.length} recent releases, ${upcomingReleases.length} upcoming releases`); const baseReleases = [...newReleases, ...upcomingReleases]; @@ -118,7 +119,7 @@ async function scrapeSites() { const scraper = scrapers.releases[site.slug] || scrapers.releases[site.network.slug]; if (!scraper) { - console.warn(`No scraper found for '${site.name}' (${site.slug})`); + logger.warn(`No scraper found for '${site.name}' (${site.slug})`); return []; } @@ -126,10 +127,10 @@ async function scrapeSites() { return await scrapeSiteReleases(scraper, site); } catch (error) { if (argv.debug) { - console.error(`${site.name}: Failed to scrape releases`, error); + logger.error(`${site.name}: Failed to scrape releases`, error); } - console.warn(`${site.id}: Failed to scrape releases`); + logger.warn(`${site.id}: Failed to scrape releases`); return []; } diff --git a/src/scrapers/evilangel.js b/src/scrapers/evilangel.js index e9a1461c..27374c48 100644 --- a/src/scrapers/evilangel.js +++ b/src/scrapers/evilangel.js @@ -112,7 +112,7 @@ async function fetchLatest(site, page = 1, upcoming = false) { requests: [ { indexName: 'all_scenes', - params: `query=&hitsPerPage=36&maxValuesPerFacet=100&page=${page - 1}&facetFilters=[["lesbian:"],["bisex:"],["shemale:"],["upcoming:${upcoming ? '' : 0}"]]`, + params: `query=&hitsPerPage=36&maxValuesPerFacet=100&page=${page - 1}&facetFilters=[["lesbian:"],["bisex:"],["shemale:"],["upcoming:${upcoming ? 1 : 0}"]]`, }, ], }, { diff --git a/src/utils/upsert.js b/src/utils/upsert.js index e8230697..eca86605 100644 --- a/src/utils/upsert.js +++ b/src/utils/upsert.js @@ -1,6 +1,7 @@ 'use strict'; const knex = require('../knex'); +const logger = require('../logger'); async function upsert(table, items, identifier = ['id'], _knex) { const identifiers = Array.isArray(identifier) ? identifier : [identifier]; @@ -28,8 +29,8 @@ async function upsert(table, items, identifier = ['id'], _knex) { }); if (knex) { - console.log(`${table}: Inserting ${insert.length}`); - console.log(`${table}: Updating ${update.length}`); + logger.debug(`${table}: Inserting ${insert.length}`); + logger.debug(`${table}: Updating ${update.length}`); const [inserted, updated] = await Promise.all([ knex(table).returning('*').insert(insert),