forked from DebaucheryLibrarian/traxxx
				
			Added Interracial Pass sites. Fixed Hush removing poster from base release.
This commit is contained in:
		
							parent
							
								
									f16251dc78
								
							
						
					
					
						commit
						1407700511
					
				
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 1.4 KiB | 
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 931 B | 
|  | @ -2189,13 +2189,14 @@ const sites = [ | ||||||
|         }, |         }, | ||||||
|     }, |     }, | ||||||
|     { |     { | ||||||
|         slug: 'buttnakedinstreets', |         slug: 'buttnakedinthestreets', | ||||||
|         name: 'Butt Naked In The Streets', |         name: 'Butt Naked In The Streets', | ||||||
|         url: 'https://buttnakedinthestreets.com', |         url: 'https://buttnakedinthestreets.com', | ||||||
|         network: 'hushpass', |         network: 'hushpass', | ||||||
|         parameters: { |         parameters: { | ||||||
|             latest: 'https://hushpass.com/t1/categories/ButtNakedInStreets_%d_d.html', |             latest: 'https://hushpass.com/t1/categories/ButtNakedInStreets_%d_d.html', | ||||||
|             media: 'https://hushpass.com', |             media: 'https://hushpass.com', | ||||||
|  |             match: 'Butt Naked In Streets', | ||||||
|             t1: true, |             t1: true, | ||||||
|         }, |         }, | ||||||
|     }, |     }, | ||||||
|  | @ -2264,6 +2265,89 @@ const sites = [ | ||||||
|             t1: true, |             t1: true, | ||||||
|         }, |         }, | ||||||
|     }, |     }, | ||||||
|  |     { | ||||||
|  |         slug: '2bigtobetrue', | ||||||
|  |         name: '2 Big To Be True', | ||||||
|  |         url: 'https://www.2bigtobetrue.com/', | ||||||
|  |         tags: ['interracial'], | ||||||
|  |         network: 'interracialpass', | ||||||
|  |         parameters: { | ||||||
|  |             latest: 'https://www.interracialpass.com/t1/categories/2-big-to-be-true_%d_d.html', | ||||||
|  |             media: 'https://www.interracialpass.com', | ||||||
|  |             t1: true, | ||||||
|  |         }, | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |         slug: 'abominableblackman', | ||||||
|  |         name: 'Abominable Black Man', | ||||||
|  |         url: 'https://www.abominableblackman.com/', | ||||||
|  |         tags: ['interracial'], | ||||||
|  |         network: 'interracialpass', | ||||||
|  |         parameters: { | ||||||
|  |             latest: 'https://www.interracialpass.com/t1/categories/abominable-black-man_%d_d.html', | ||||||
|  |             media: 'https://www.interracialpass.com', | ||||||
|  |             t1: true, | ||||||
|  |         }, | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |         slug: 'bootyannihilation', | ||||||
|  |         name: 'Booty Annihilation', | ||||||
|  |         tags: ['interracial'], | ||||||
|  |         network: 'interracialpass', | ||||||
|  |         parameters: { | ||||||
|  |             latest: 'https://www.interracialpass.com/t1/categories/BootyAnnihilation_%d_d.html', | ||||||
|  |             media: 'https://www.interracialpass.com', | ||||||
|  |             t1: true, | ||||||
|  |         }, | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |         slug: 'daddysworstnightmare', | ||||||
|  |         name: 'Daddy\'s Worst Nightmare', | ||||||
|  |         url: 'https://www.daddysworstnightmare.com/', | ||||||
|  |         tags: ['interracial'], | ||||||
|  |         network: 'interracialpass', | ||||||
|  |         parameters: { | ||||||
|  |             latest: 'https://www.interracialpass.com/t1/categories/daddys-worst-nightmare_%d_d.html', | ||||||
|  |             media: 'https://www.interracialpass.com', | ||||||
|  |             t1: true, | ||||||
|  |         }, | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |         slug: 'monstercockfuckfest', | ||||||
|  |         name: 'Monster Cock Fuck Fest', | ||||||
|  |         url: 'https://www.monstercockfuckfest.com/', | ||||||
|  |         tags: ['interracial'], | ||||||
|  |         network: 'interracialpass', | ||||||
|  |         parameters: { | ||||||
|  |             latest: 'https://www.interracialpass.com/t1/categories/monster-cock-fuck-fest_%d_d.html', | ||||||
|  |             media: 'https://www.interracialpass.com', | ||||||
|  |             t1: true, | ||||||
|  |         }, | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |         slug: 'mymomsfuckingblackzilla', | ||||||
|  |         name: 'My Mom\'s Fucking Blackzilla', | ||||||
|  |         url: 'https://www.mymomsfuckingblackzilla.com/', | ||||||
|  |         tags: ['interracial'], | ||||||
|  |         network: 'interracialpass', | ||||||
|  |         parameters: { | ||||||
|  |             latest: 'https://www.interracialpass.com/t1/categories/my-moms-fucking-blackzilla_%d_d.html', | ||||||
|  |             media: 'https://www.interracialpass.com', | ||||||
|  |             t1: true, | ||||||
|  |         }, | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |         slug: 'mywifesfirstmonstercock', | ||||||
|  |         name: 'My Wife\'s First Monster Cock', | ||||||
|  |         url: 'https://www.mywifesfirstmonstercock.com/', | ||||||
|  |         tags: ['interracial'], | ||||||
|  |         network: 'interracialpass', | ||||||
|  |         parameters: { | ||||||
|  |             latest: 'https://www.interracialpass.com/t1/categories/my-wifes-first-monster-cock_%d_d.html', | ||||||
|  |             media: 'https://www.interracialpass.com', | ||||||
|  |             t1: true, | ||||||
|  |         }, | ||||||
|  |     }, | ||||||
|     // INSEX
 |     // INSEX
 | ||||||
|     { |     { | ||||||
|         slug: 'sexuallybroken', |         slug: 'sexuallybroken', | ||||||
|  |  | ||||||
|  | @ -37,12 +37,12 @@ async function findDuplicateReleaseIds(latestReleases, accReleases) { | ||||||
|         .concat(accReleases.map(release => String(release.entryId)))); |         .concat(accReleases.map(release => String(release.entryId)))); | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| async function scrapeUniqueReleases(scraper, site, preflight, afterDate = getAfterDate(), accReleases = [], page = argv.page) { | async function scrapeUniqueReleases(scraper, site, beforeFetchLatest, afterDate = getAfterDate(), accReleases = [], page = argv.page) { | ||||||
|     if (!argv.latest || !scraper.fetchLatest) { |     if (!argv.latest || !scraper.fetchLatest) { | ||||||
|         return []; |         return []; | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     const latestReleases = await scraper.fetchLatest(site, page, preflight); |     const latestReleases = await scraper.fetchLatest(site, page, beforeFetchLatest); | ||||||
| 
 | 
 | ||||||
|     if (!Array.isArray(latestReleases)) { |     if (!Array.isArray(latestReleases)) { | ||||||
|         logger.warn(`Scraper returned ${latestReleases || 'null'} when fetching latest from '${site.name}' on '${site.network.name}'`); |         logger.warn(`Scraper returned ${latestReleases || 'null'} when fetching latest from '${site.name}' on '${site.network.name}'`); | ||||||
|  | @ -73,7 +73,7 @@ async function scrapeUniqueReleases(scraper, site, preflight, afterDate = getAft | ||||||
|             || (argv.last && accReleases.length + uniqueReleases.length < argv.last)) |             || (argv.last && accReleases.length + uniqueReleases.length < argv.last)) | ||||||
|     ) { |     ) { | ||||||
|         // oldest release on page is newer that specified date range, or latest count has not yet been met, fetch next page
 |         // oldest release on page is newer that specified date range, or latest count has not yet been met, fetch next page
 | ||||||
|         return scrapeUniqueReleases(scraper, site, preflight, afterDate, accReleases.concat(uniqueReleases), page + 1); |         return scrapeUniqueReleases(scraper, site, beforeFetchLatest, afterDate, accReleases.concat(uniqueReleases), page + 1); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     if (argv.last && uniqueReleases.length >= argv.last) { |     if (argv.last && uniqueReleases.length >= argv.last) { | ||||||
|  | @ -87,9 +87,9 @@ async function scrapeUniqueReleases(scraper, site, preflight, afterDate = getAft | ||||||
|     return accReleases.concat(uniqueReleases).slice(0, argv.nullDateLimit); |     return accReleases.concat(uniqueReleases).slice(0, argv.nullDateLimit); | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| async function scrapeUpcomingReleases(scraper, site, preflight) { | async function scrapeUpcomingReleases(scraper, site, beforeFetchLatest) { | ||||||
|     if (argv.upcoming && scraper.fetchUpcoming) { |     if (argv.upcoming && scraper.fetchUpcoming) { | ||||||
|         const upcomingReleases = await scraper.fetchUpcoming(site, 1, preflight); |         const upcomingReleases = await scraper.fetchUpcoming(site, 1, beforeFetchLatest); | ||||||
| 
 | 
 | ||||||
|         return upcomingReleases |         return upcomingReleases | ||||||
|             ? upcomingReleases.map(release => ({ ...release, site, upcoming: true })) |             ? upcomingReleases.map(release => ({ ...release, site, upcoming: true })) | ||||||
|  | @ -99,11 +99,11 @@ async function scrapeUpcomingReleases(scraper, site, preflight) { | ||||||
|     return []; |     return []; | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| async function deepFetchReleases(baseReleases, preflight) { | async function deepFetchReleases(baseReleases, beforeFetchLatest) { | ||||||
|     return Promise.map(baseReleases, async (release) => { |     return Promise.map(baseReleases, async (release) => { | ||||||
|         if (release.url || (release.path && release.site)) { |         if (release.url || (release.path && release.site)) { | ||||||
|             try { |             try { | ||||||
|                 const fullRelease = await scrapeRelease(release.url, release, 'scene', preflight); |                 const fullRelease = await scrapeRelease(release.url, release, 'scene', beforeFetchLatest); | ||||||
| 
 | 
 | ||||||
|                 if (fullRelease) { |                 if (fullRelease) { | ||||||
|                     return { |                     return { | ||||||
|  | @ -133,11 +133,11 @@ async function deepFetchReleases(baseReleases, preflight) { | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| async function scrapeSiteReleases(scraper, site) { | async function scrapeSiteReleases(scraper, site) { | ||||||
|     const preflight = await scraper.preflight?.(site); |     const beforeFetchLatest = await scraper.beforeFetchLatest?.(site); | ||||||
| 
 | 
 | ||||||
|     const [newReleases, upcomingReleases] = await Promise.all([ |     const [newReleases, upcomingReleases] = await Promise.all([ | ||||||
|         scrapeUniqueReleases(scraper, site, preflight), // fetch basic release info from scene overview
 |         scrapeUniqueReleases(scraper, site, beforeFetchLatest), // fetch basic release info from scene overview
 | ||||||
|         scrapeUpcomingReleases(scraper, site, preflight), // fetch basic release info from upcoming overview
 |         scrapeUpcomingReleases(scraper, site, beforeFetchLatest), // fetch basic release info from upcoming overview
 | ||||||
|     ]); |     ]); | ||||||
| 
 | 
 | ||||||
|     if (argv.upcoming) { |     if (argv.upcoming) { | ||||||
|  | @ -148,7 +148,7 @@ async function scrapeSiteReleases(scraper, site) { | ||||||
| 
 | 
 | ||||||
|     if (argv.deep) { |     if (argv.deep) { | ||||||
|         // follow URL for every release
 |         // follow URL for every release
 | ||||||
|         return deepFetchReleases(baseReleases, preflight); |         return deepFetchReleases(baseReleases, beforeFetchLatest); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     return baseReleases; |     return baseReleases; | ||||||
|  |  | ||||||
|  | @ -117,8 +117,8 @@ async function fetchLatest(site, page = 1, models) { | ||||||
|     return qLatest && scrapeLatest(qLatest, site, models); |     return qLatest && scrapeLatest(qLatest, site, models); | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| async function fetchScene(url, site, release, preflight) { | async function fetchScene(url, site, release, beforeFetchLatest) { | ||||||
|     const models = preflight || await fetchModels(site); |     const models = beforeFetchLatest || await fetchModels(site); | ||||||
|     const qScene = await get(url); |     const qScene = await get(url); | ||||||
| 
 | 
 | ||||||
|     return qScene && scrapeScene(qScene, url, site, models); |     return qScene && scrapeScene(qScene, url, site, models); | ||||||
|  | @ -127,5 +127,5 @@ async function fetchScene(url, site, release, preflight) { | ||||||
| module.exports = { | module.exports = { | ||||||
|     fetchLatest, |     fetchLatest, | ||||||
|     fetchScene, |     fetchScene, | ||||||
|     preflight: fetchModels, |     beforeFetchLatest: fetchModels, | ||||||
| }; | }; | ||||||
|  |  | ||||||
|  | @ -2,6 +2,7 @@ | ||||||
| 
 | 
 | ||||||
| const util = require('util'); | const util = require('util'); | ||||||
| 
 | 
 | ||||||
|  | const knex = require('../knex'); | ||||||
| const { get, geta, fd } = require('../utils/q'); | const { get, geta, fd } = require('../utils/q'); | ||||||
| const slugify = require('../utils/slugify'); | const slugify = require('../utils/slugify'); | ||||||
| 
 | 
 | ||||||
|  | @ -22,7 +23,7 @@ function extractPoster(posterPath, site, baseRelease) { | ||||||
|         return [posterSources, []]; |         return [posterSources, []]; | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     return [null, []]; |     return [baseRelease?.poster || null, []]; | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| function scrapeLatest(scenes, site) { | function scrapeLatest(scenes, site) { | ||||||
|  | @ -59,8 +60,8 @@ function scrapeLatestT1(scenes, site) { | ||||||
|         release.date = qd('.more-info-div', 'MMM D, YYYY'); |         release.date = qd('.more-info-div', 'MMM D, YYYY'); | ||||||
|         release.duration = ql('.more-info-div'); |         release.duration = ql('.more-info-div'); | ||||||
| 
 | 
 | ||||||
|         release.entryId = q('.img-div img', 'id')?.match(/set-target-(\d+)/)[1] |         // release.entryId = q('.img-div img', 'id')?.match(/set-target-(\d+)/)[1];
 | ||||||
|             || `${slugify(fd(release.date, 'YYYY-MM-DD'))}-${slugify(release.title)}`; |         release.entryId = `${slugify(fd(release.date, 'YYYY-MM-DD'))}-${slugify(release.title)}`; | ||||||
| 
 | 
 | ||||||
|         const posterPath = q('.img-div img', 'src0_1x') || qi('img.video_placeholder'); |         const posterPath = q('.img-div img', 'src0_1x') || qi('img.video_placeholder'); | ||||||
| 
 | 
 | ||||||
|  | @ -102,7 +103,7 @@ function scrapeScene({ html, q, qa, qd, ql }, site, url, baseRelease) { | ||||||
|     return release; |     return release; | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| function scrapeSceneT1({ html, q, qa, qd, ql, qtx }, site, url, baseRelease) { | function scrapeSceneT1({ html, q, qa, qd, ql, qtx }, site, url, baseRelease, channelRegExp) { | ||||||
|     const release = { url }; |     const release = { url }; | ||||||
| 
 | 
 | ||||||
|     release.title = q('.trailer-section-head .section-title', true); |     release.title = q('.trailer-section-head .section-title', true); | ||||||
|  | @ -111,8 +112,8 @@ function scrapeSceneT1({ html, q, qa, qd, ql, qtx }, site, url, baseRelease) { | ||||||
|     release.date = qd('.update-info-row', 'MMM D, YYYY', /\w+ \d{1,2}, \d{4}/); |     release.date = qd('.update-info-row', 'MMM D, YYYY', /\w+ \d{1,2}, \d{4}/); | ||||||
|     release.duration = ql('.update-info-row:nth-child(2)'); |     release.duration = ql('.update-info-row:nth-child(2)'); | ||||||
| 
 | 
 | ||||||
|     release.entryId = q('.player-thumb img', 'id').match(/set-target-(\d+)/)[1] |     // release.entryId = q('.player-thumb img', 'id')?.match(/set-target-(\d+)/)[1];
 | ||||||
|         || `${slugify(fd(release.date, 'YYYY-MM-DD'))}-${slugify(release.title)}`; |     release.entryId = `${slugify(fd(release.date, 'YYYY-MM-DD'))}-${slugify(release.title)}`; | ||||||
| 
 | 
 | ||||||
|     release.actors = qa('.models-list-thumbs a').map(el => ({ |     release.actors = qa('.models-list-thumbs a').map(el => ({ | ||||||
|         name: q(el, 'span', true), |         name: q(el, 'span', true), | ||||||
|  | @ -136,8 +137,8 @@ function scrapeSceneT1({ html, q, qa, qd, ql, qtx }, site, url, baseRelease) { | ||||||
|     const stars = q('.update-rating', true).match(/\d.\d/)?.[0]; |     const stars = q('.update-rating', true).match(/\d.\d/)?.[0]; | ||||||
|     if (stars) release.stars = Number(stars); |     if (stars) release.stars = Number(stars); | ||||||
| 
 | 
 | ||||||
|     if (site.slug === 'hushpass') { |     if (channelRegExp) { | ||||||
|         const channel = release.tags.find(tag => /Shot Her First|WhiteZilla|Frat House Fuck Fest|Freaky First Timers|MILF Invaders|Housewives Need Cash|Bubble Butt Bonanza|Suburban Sex Party|Butt Naked In Streets/i.test(tag)); |         const channel = release.tags.find(tag => channelRegExp.test(tag)); | ||||||
| 
 | 
 | ||||||
|         if (channel) { |         if (channel) { | ||||||
|             release.channel = { |             release.channel = { | ||||||
|  | @ -150,6 +151,14 @@ function scrapeSceneT1({ html, q, qa, qd, ql, qtx }, site, url, baseRelease) { | ||||||
|     return release; |     return release; | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | async function getChannelRegExp(site) { | ||||||
|  |     if (!['hushpass', 'interracialpass'].includes(site.network.slug)) return null; | ||||||
|  | 
 | ||||||
|  |     const sites = await knex('sites').where('network_id', site.network.id); | ||||||
|  | 
 | ||||||
|  |     return new RegExp(sites.map(channel => channel.parameters?.match || channel.name).join('|'), 'i'); | ||||||
|  | } | ||||||
|  | 
 | ||||||
| async function fetchLatest(site, page = 1) { | async function fetchLatest(site, page = 1) { | ||||||
|     const url = (site.parameters?.latest && util.format(site.parameters.latest, page)) |     const url = (site.parameters?.latest && util.format(site.parameters.latest, page)) | ||||||
|         || (site.parameters?.t1 && `${site.url}/t1/categories/movies_${page}_d.html`) |         || (site.parameters?.t1 && `${site.url}/t1/categories/movies_${page}_d.html`) | ||||||
|  | @ -162,16 +171,17 @@ async function fetchLatest(site, page = 1) { | ||||||
|     return site.parameters?.t1 ? scrapeLatestT1(qLatest, site) : scrapeLatest(qLatest, site); |     return site.parameters?.t1 ? scrapeLatestT1(qLatest, site) : scrapeLatest(qLatest, site); | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| async function fetchScene(url, site, baseRelease) { | async function fetchScene(url, site, baseRelease, beforeFetchLatest) { | ||||||
|  |     const channelRegExp = beforeFetchLatest || await getChannelRegExp(site); | ||||||
|     const qScene = await get(url); |     const qScene = await get(url); | ||||||
| 
 | 
 | ||||||
|     if (!qScene) return null; |     if (!qScene) return null; | ||||||
| 
 | 
 | ||||||
|     return site.parameters?.t1 ? scrapeSceneT1(qScene, site, url, baseRelease) : scrapeScene(qScene, site, url, baseRelease); |     return site.parameters?.t1 ? scrapeSceneT1(qScene, site, url, baseRelease, channelRegExp) : scrapeScene(qScene, site, url, baseRelease); | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| module.exports = { | module.exports = { | ||||||
|     // preflight,
 |     beforeFetchLatest: getChannelRegExp, | ||||||
|     fetchLatest, |     fetchLatest, | ||||||
|     fetchScene, |     fetchScene, | ||||||
| }; | }; | ||||||
|  |  | ||||||
		Loading…
	
		Reference in New Issue