forked from DebaucheryLibrarian/traxxx
				
			Added Interracial Pass sites. Fixed Hush removing poster from base release.
This commit is contained in:
		
							parent
							
								
									f16251dc78
								
							
						
					
					
						commit
						1407700511
					
				
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 1.4 KiB | 
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 931 B | 
|  | @ -2189,13 +2189,14 @@ const sites = [ | |||
|         }, | ||||
|     }, | ||||
|     { | ||||
|         slug: 'buttnakedinstreets', | ||||
|         slug: 'buttnakedinthestreets', | ||||
|         name: 'Butt Naked In The Streets', | ||||
|         url: 'https://buttnakedinthestreets.com', | ||||
|         network: 'hushpass', | ||||
|         parameters: { | ||||
|             latest: 'https://hushpass.com/t1/categories/ButtNakedInStreets_%d_d.html', | ||||
|             media: 'https://hushpass.com', | ||||
|             match: 'Butt Naked In Streets', | ||||
|             t1: true, | ||||
|         }, | ||||
|     }, | ||||
|  | @ -2264,6 +2265,89 @@ const sites = [ | |||
|             t1: true, | ||||
|         }, | ||||
|     }, | ||||
|     { | ||||
|         slug: '2bigtobetrue', | ||||
|         name: '2 Big To Be True', | ||||
|         url: 'https://www.2bigtobetrue.com/', | ||||
|         tags: ['interracial'], | ||||
|         network: 'interracialpass', | ||||
|         parameters: { | ||||
|             latest: 'https://www.interracialpass.com/t1/categories/2-big-to-be-true_%d_d.html', | ||||
|             media: 'https://www.interracialpass.com', | ||||
|             t1: true, | ||||
|         }, | ||||
|     }, | ||||
|     { | ||||
|         slug: 'abominableblackman', | ||||
|         name: 'Abominable Black Man', | ||||
|         url: 'https://www.abominableblackman.com/', | ||||
|         tags: ['interracial'], | ||||
|         network: 'interracialpass', | ||||
|         parameters: { | ||||
|             latest: 'https://www.interracialpass.com/t1/categories/abominable-black-man_%d_d.html', | ||||
|             media: 'https://www.interracialpass.com', | ||||
|             t1: true, | ||||
|         }, | ||||
|     }, | ||||
|     { | ||||
|         slug: 'bootyannihilation', | ||||
|         name: 'Booty Annihilation', | ||||
|         tags: ['interracial'], | ||||
|         network: 'interracialpass', | ||||
|         parameters: { | ||||
|             latest: 'https://www.interracialpass.com/t1/categories/BootyAnnihilation_%d_d.html', | ||||
|             media: 'https://www.interracialpass.com', | ||||
|             t1: true, | ||||
|         }, | ||||
|     }, | ||||
|     { | ||||
|         slug: 'daddysworstnightmare', | ||||
|         name: 'Daddy\'s Worst Nightmare', | ||||
|         url: 'https://www.daddysworstnightmare.com/', | ||||
|         tags: ['interracial'], | ||||
|         network: 'interracialpass', | ||||
|         parameters: { | ||||
|             latest: 'https://www.interracialpass.com/t1/categories/daddys-worst-nightmare_%d_d.html', | ||||
|             media: 'https://www.interracialpass.com', | ||||
|             t1: true, | ||||
|         }, | ||||
|     }, | ||||
|     { | ||||
|         slug: 'monstercockfuckfest', | ||||
|         name: 'Monster Cock Fuck Fest', | ||||
|         url: 'https://www.monstercockfuckfest.com/', | ||||
|         tags: ['interracial'], | ||||
|         network: 'interracialpass', | ||||
|         parameters: { | ||||
|             latest: 'https://www.interracialpass.com/t1/categories/monster-cock-fuck-fest_%d_d.html', | ||||
|             media: 'https://www.interracialpass.com', | ||||
|             t1: true, | ||||
|         }, | ||||
|     }, | ||||
|     { | ||||
|         slug: 'mymomsfuckingblackzilla', | ||||
|         name: 'My Mom\'s Fucking Blackzilla', | ||||
|         url: 'https://www.mymomsfuckingblackzilla.com/', | ||||
|         tags: ['interracial'], | ||||
|         network: 'interracialpass', | ||||
|         parameters: { | ||||
|             latest: 'https://www.interracialpass.com/t1/categories/my-moms-fucking-blackzilla_%d_d.html', | ||||
|             media: 'https://www.interracialpass.com', | ||||
|             t1: true, | ||||
|         }, | ||||
|     }, | ||||
|     { | ||||
|         slug: 'mywifesfirstmonstercock', | ||||
|         name: 'My Wife\'s First Monster Cock', | ||||
|         url: 'https://www.mywifesfirstmonstercock.com/', | ||||
|         tags: ['interracial'], | ||||
|         network: 'interracialpass', | ||||
|         parameters: { | ||||
|             latest: 'https://www.interracialpass.com/t1/categories/my-wifes-first-monster-cock_%d_d.html', | ||||
|             media: 'https://www.interracialpass.com', | ||||
|             t1: true, | ||||
|         }, | ||||
|     }, | ||||
|     // INSEX
 | ||||
|     { | ||||
|         slug: 'sexuallybroken', | ||||
|  |  | |||
|  | @ -37,12 +37,12 @@ async function findDuplicateReleaseIds(latestReleases, accReleases) { | |||
|         .concat(accReleases.map(release => String(release.entryId)))); | ||||
| } | ||||
| 
 | ||||
| async function scrapeUniqueReleases(scraper, site, preflight, afterDate = getAfterDate(), accReleases = [], page = argv.page) { | ||||
| async function scrapeUniqueReleases(scraper, site, beforeFetchLatest, afterDate = getAfterDate(), accReleases = [], page = argv.page) { | ||||
|     if (!argv.latest || !scraper.fetchLatest) { | ||||
|         return []; | ||||
|     } | ||||
| 
 | ||||
|     const latestReleases = await scraper.fetchLatest(site, page, preflight); | ||||
|     const latestReleases = await scraper.fetchLatest(site, page, beforeFetchLatest); | ||||
| 
 | ||||
|     if (!Array.isArray(latestReleases)) { | ||||
|         logger.warn(`Scraper returned ${latestReleases || 'null'} when fetching latest from '${site.name}' on '${site.network.name}'`); | ||||
|  | @ -73,7 +73,7 @@ async function scrapeUniqueReleases(scraper, site, preflight, afterDate = getAft | |||
|             || (argv.last && accReleases.length + uniqueReleases.length < argv.last)) | ||||
|     ) { | ||||
|         // oldest release on page is newer that specified date range, or latest count has not yet been met, fetch next page
 | ||||
|         return scrapeUniqueReleases(scraper, site, preflight, afterDate, accReleases.concat(uniqueReleases), page + 1); | ||||
|         return scrapeUniqueReleases(scraper, site, beforeFetchLatest, afterDate, accReleases.concat(uniqueReleases), page + 1); | ||||
|     } | ||||
| 
 | ||||
|     if (argv.last && uniqueReleases.length >= argv.last) { | ||||
|  | @ -87,9 +87,9 @@ async function scrapeUniqueReleases(scraper, site, preflight, afterDate = getAft | |||
|     return accReleases.concat(uniqueReleases).slice(0, argv.nullDateLimit); | ||||
| } | ||||
| 
 | ||||
| async function scrapeUpcomingReleases(scraper, site, preflight) { | ||||
| async function scrapeUpcomingReleases(scraper, site, beforeFetchLatest) { | ||||
|     if (argv.upcoming && scraper.fetchUpcoming) { | ||||
|         const upcomingReleases = await scraper.fetchUpcoming(site, 1, preflight); | ||||
|         const upcomingReleases = await scraper.fetchUpcoming(site, 1, beforeFetchLatest); | ||||
| 
 | ||||
|         return upcomingReleases | ||||
|             ? upcomingReleases.map(release => ({ ...release, site, upcoming: true })) | ||||
|  | @ -99,11 +99,11 @@ async function scrapeUpcomingReleases(scraper, site, preflight) { | |||
|     return []; | ||||
| } | ||||
| 
 | ||||
| async function deepFetchReleases(baseReleases, preflight) { | ||||
| async function deepFetchReleases(baseReleases, beforeFetchLatest) { | ||||
|     return Promise.map(baseReleases, async (release) => { | ||||
|         if (release.url || (release.path && release.site)) { | ||||
|             try { | ||||
|                 const fullRelease = await scrapeRelease(release.url, release, 'scene', preflight); | ||||
|                 const fullRelease = await scrapeRelease(release.url, release, 'scene', beforeFetchLatest); | ||||
| 
 | ||||
|                 if (fullRelease) { | ||||
|                     return { | ||||
|  | @ -133,11 +133,11 @@ async function deepFetchReleases(baseReleases, preflight) { | |||
| } | ||||
| 
 | ||||
| async function scrapeSiteReleases(scraper, site) { | ||||
|     const preflight = await scraper.preflight?.(site); | ||||
|     const beforeFetchLatest = await scraper.beforeFetchLatest?.(site); | ||||
| 
 | ||||
|     const [newReleases, upcomingReleases] = await Promise.all([ | ||||
|         scrapeUniqueReleases(scraper, site, preflight), // fetch basic release info from scene overview
 | ||||
|         scrapeUpcomingReleases(scraper, site, preflight), // fetch basic release info from upcoming overview
 | ||||
|         scrapeUniqueReleases(scraper, site, beforeFetchLatest), // fetch basic release info from scene overview
 | ||||
|         scrapeUpcomingReleases(scraper, site, beforeFetchLatest), // fetch basic release info from upcoming overview
 | ||||
|     ]); | ||||
| 
 | ||||
|     if (argv.upcoming) { | ||||
|  | @ -148,7 +148,7 @@ async function scrapeSiteReleases(scraper, site) { | |||
| 
 | ||||
|     if (argv.deep) { | ||||
|         // follow URL for every release
 | ||||
|         return deepFetchReleases(baseReleases, preflight); | ||||
|         return deepFetchReleases(baseReleases, beforeFetchLatest); | ||||
|     } | ||||
| 
 | ||||
|     return baseReleases; | ||||
|  |  | |||
|  | @ -117,8 +117,8 @@ async function fetchLatest(site, page = 1, models) { | |||
|     return qLatest && scrapeLatest(qLatest, site, models); | ||||
| } | ||||
| 
 | ||||
| async function fetchScene(url, site, release, preflight) { | ||||
|     const models = preflight || await fetchModels(site); | ||||
| async function fetchScene(url, site, release, beforeFetchLatest) { | ||||
|     const models = beforeFetchLatest || await fetchModels(site); | ||||
|     const qScene = await get(url); | ||||
| 
 | ||||
|     return qScene && scrapeScene(qScene, url, site, models); | ||||
|  | @ -127,5 +127,5 @@ async function fetchScene(url, site, release, preflight) { | |||
| module.exports = { | ||||
|     fetchLatest, | ||||
|     fetchScene, | ||||
|     preflight: fetchModels, | ||||
|     beforeFetchLatest: fetchModels, | ||||
| }; | ||||
|  |  | |||
|  | @ -2,6 +2,7 @@ | |||
| 
 | ||||
| const util = require('util'); | ||||
| 
 | ||||
| const knex = require('../knex'); | ||||
| const { get, geta, fd } = require('../utils/q'); | ||||
| const slugify = require('../utils/slugify'); | ||||
| 
 | ||||
|  | @ -22,7 +23,7 @@ function extractPoster(posterPath, site, baseRelease) { | |||
|         return [posterSources, []]; | ||||
|     } | ||||
| 
 | ||||
|     return [null, []]; | ||||
|     return [baseRelease?.poster || null, []]; | ||||
| } | ||||
| 
 | ||||
| function scrapeLatest(scenes, site) { | ||||
|  | @ -59,8 +60,8 @@ function scrapeLatestT1(scenes, site) { | |||
|         release.date = qd('.more-info-div', 'MMM D, YYYY'); | ||||
|         release.duration = ql('.more-info-div'); | ||||
| 
 | ||||
|         release.entryId = q('.img-div img', 'id')?.match(/set-target-(\d+)/)[1] | ||||
|             || `${slugify(fd(release.date, 'YYYY-MM-DD'))}-${slugify(release.title)}`; | ||||
|         // release.entryId = q('.img-div img', 'id')?.match(/set-target-(\d+)/)[1];
 | ||||
|         release.entryId = `${slugify(fd(release.date, 'YYYY-MM-DD'))}-${slugify(release.title)}`; | ||||
| 
 | ||||
|         const posterPath = q('.img-div img', 'src0_1x') || qi('img.video_placeholder'); | ||||
| 
 | ||||
|  | @ -102,7 +103,7 @@ function scrapeScene({ html, q, qa, qd, ql }, site, url, baseRelease) { | |||
|     return release; | ||||
| } | ||||
| 
 | ||||
| function scrapeSceneT1({ html, q, qa, qd, ql, qtx }, site, url, baseRelease) { | ||||
| function scrapeSceneT1({ html, q, qa, qd, ql, qtx }, site, url, baseRelease, channelRegExp) { | ||||
|     const release = { url }; | ||||
| 
 | ||||
|     release.title = q('.trailer-section-head .section-title', true); | ||||
|  | @ -111,8 +112,8 @@ function scrapeSceneT1({ html, q, qa, qd, ql, qtx }, site, url, baseRelease) { | |||
|     release.date = qd('.update-info-row', 'MMM D, YYYY', /\w+ \d{1,2}, \d{4}/); | ||||
|     release.duration = ql('.update-info-row:nth-child(2)'); | ||||
| 
 | ||||
|     release.entryId = q('.player-thumb img', 'id').match(/set-target-(\d+)/)[1] | ||||
|         || `${slugify(fd(release.date, 'YYYY-MM-DD'))}-${slugify(release.title)}`; | ||||
|     // release.entryId = q('.player-thumb img', 'id')?.match(/set-target-(\d+)/)[1];
 | ||||
|     release.entryId = `${slugify(fd(release.date, 'YYYY-MM-DD'))}-${slugify(release.title)}`; | ||||
| 
 | ||||
|     release.actors = qa('.models-list-thumbs a').map(el => ({ | ||||
|         name: q(el, 'span', true), | ||||
|  | @ -136,8 +137,8 @@ function scrapeSceneT1({ html, q, qa, qd, ql, qtx }, site, url, baseRelease) { | |||
|     const stars = q('.update-rating', true).match(/\d.\d/)?.[0]; | ||||
|     if (stars) release.stars = Number(stars); | ||||
| 
 | ||||
|     if (site.slug === 'hushpass') { | ||||
|         const channel = release.tags.find(tag => /Shot Her First|WhiteZilla|Frat House Fuck Fest|Freaky First Timers|MILF Invaders|Housewives Need Cash|Bubble Butt Bonanza|Suburban Sex Party|Butt Naked In Streets/i.test(tag)); | ||||
|     if (channelRegExp) { | ||||
|         const channel = release.tags.find(tag => channelRegExp.test(tag)); | ||||
| 
 | ||||
|         if (channel) { | ||||
|             release.channel = { | ||||
|  | @ -150,6 +151,14 @@ function scrapeSceneT1({ html, q, qa, qd, ql, qtx }, site, url, baseRelease) { | |||
|     return release; | ||||
| } | ||||
| 
 | ||||
| async function getChannelRegExp(site) { | ||||
|     if (!['hushpass', 'interracialpass'].includes(site.network.slug)) return null; | ||||
| 
 | ||||
|     const sites = await knex('sites').where('network_id', site.network.id); | ||||
| 
 | ||||
|     return new RegExp(sites.map(channel => channel.parameters?.match || channel.name).join('|'), 'i'); | ||||
| } | ||||
| 
 | ||||
| async function fetchLatest(site, page = 1) { | ||||
|     const url = (site.parameters?.latest && util.format(site.parameters.latest, page)) | ||||
|         || (site.parameters?.t1 && `${site.url}/t1/categories/movies_${page}_d.html`) | ||||
|  | @ -162,16 +171,17 @@ async function fetchLatest(site, page = 1) { | |||
|     return site.parameters?.t1 ? scrapeLatestT1(qLatest, site) : scrapeLatest(qLatest, site); | ||||
| } | ||||
| 
 | ||||
| async function fetchScene(url, site, baseRelease) { | ||||
| async function fetchScene(url, site, baseRelease, beforeFetchLatest) { | ||||
|     const channelRegExp = beforeFetchLatest || await getChannelRegExp(site); | ||||
|     const qScene = await get(url); | ||||
| 
 | ||||
|     if (!qScene) return null; | ||||
| 
 | ||||
|     return site.parameters?.t1 ? scrapeSceneT1(qScene, site, url, baseRelease) : scrapeScene(qScene, site, url, baseRelease); | ||||
|     return site.parameters?.t1 ? scrapeSceneT1(qScene, site, url, baseRelease, channelRegExp) : scrapeScene(qScene, site, url, baseRelease); | ||||
| } | ||||
| 
 | ||||
| module.exports = { | ||||
|     // preflight,
 | ||||
|     beforeFetchLatest: getChannelRegExp, | ||||
|     fetchLatest, | ||||
|     fetchScene, | ||||
| }; | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue