Using UTC to query date ranges. Removed stray console log from MindGeek scraper.
This commit is contained in:
		
							parent
							
								
									52f66e7982
								
							
						
					
					
						commit
						8611d738b0
					
				|  | @ -1,13 +1,16 @@ | |||
| import utc from 'dayjs/plugin/utc'; | ||||
| import dayjs from 'dayjs'; | ||||
| 
 | ||||
| dayjs.extend(utc); | ||||
| 
 | ||||
| const dateRanges = { | ||||
| 	latest: () => ({ | ||||
| 		after: '1900-01-01 00:00:00', | ||||
| 		before: dayjs().format('YYYY-MM-DD HH:mm:ss'), | ||||
| 		before: dayjs.utc().format('YYYY-MM-DD HH:mm:ss'), | ||||
| 		orderBy: 'DATE_DESC', | ||||
| 	}), | ||||
| 	upcoming: () => ({ | ||||
| 		after: dayjs().format('YYYY-MM-DD HH:mm:ss'), | ||||
| 		after: dayjs.utc().format('YYYY-MM-DD HH:mm:ss'), | ||||
| 		before: '2100-01-01 00:00:00', | ||||
| 		orderBy: 'DATE_ASC', | ||||
| 	}), | ||||
|  |  | |||
										
											Binary file not shown.
										
									
								
							| Before Width: | Height: | Size: 525 KiB After Width: | Height: | Size: 458 KiB | 
										
											Binary file not shown.
										
									
								
							| Before Width: | Height: | Size: 7.9 KiB After Width: | Height: | Size: 6.3 KiB | 
										
											Binary file not shown.
										
									
								
							| Before Width: | Height: | Size: 36 KiB After Width: | Height: | Size: 28 KiB | 
|  | @ -685,7 +685,7 @@ const tagPhotos = [ | |||
| 	['da-tp', 0, 'Natasha Teen in LegalPorno SZ2164'], | ||||
| 	['da-tp', 1, 'Francys Belle in SZ1702 for LegalPorno'], | ||||
| 	['dap', 6, 'Sheena Shaw in "Ass Worship 14" for Jules Jordan'], | ||||
| 	['dap', 9, 'Polly Pons in LegalPorno GIO1520'], | ||||
| 	['dap', 9, 'Vicky Sol in GIO1547 for LegalPorno'], | ||||
| 	['dap', 1, 'Ria Sunn in SZ1801 for LegalPorno'], | ||||
| 	['dap', 2, 'Lana Rhoades in "Lana Rhoades Unleashed" for HardX'], | ||||
| 	['dap', 5, 'Riley Reid in "The Gangbang of Riley Reid" for Jules Jordan'], | ||||
|  |  | |||
|  | @ -146,12 +146,16 @@ async function getSession(url) { | |||
| 	const cookieJar = new CookieJar(); | ||||
| 	const session = bhttp.session({ cookieJar }); | ||||
| 
 | ||||
| 	await session.get(url); | ||||
| 	const res = await session.get(url); | ||||
| 
 | ||||
| 	const cookieString = await cookieJar.getCookieStringAsync(url); | ||||
| 	const { instance_token: instanceToken } = cookieToData(cookieString); | ||||
| 	if (res.statusCode === 200) { | ||||
| 		const cookieString = await cookieJar.getCookieStringAsync(url); | ||||
| 		const { instance_token: instanceToken } = cookieToData(cookieString); | ||||
| 
 | ||||
| 	return { session, instanceToken }; | ||||
| 		return { session, instanceToken }; | ||||
| 	} | ||||
| 
 | ||||
| 	throw new Error(`Failed to acquire MindGeek session: ${res.statusCode}`); | ||||
| } | ||||
| 
 | ||||
| function scrapeProfile(data, html, releases = [], networkName) { | ||||
|  | @ -242,11 +246,14 @@ async function fetchUpcoming(site) { | |||
| } | ||||
| 
 | ||||
| async function fetchScene(url, site, baseScene) { | ||||
| 	if (baseScene?.entryId) { | ||||
| 		// overview and deep data is the same, don't hit server unnecessarily
 | ||||
| 		return baseScene; | ||||
| 	} | ||||
| 
 | ||||
| 	const entryId = url.match(/\d+/)[0]; | ||||
| 	const { session, instanceToken } = await getSession(url); | ||||
| 
 | ||||
| 	console.log(baseScene); | ||||
| 
 | ||||
| 	const res = await session.get(`https://site-api.project1service.com/v2/releases/${entryId}`, { | ||||
| 		headers: { | ||||
| 			Instance: instanceToken, | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue