Using UTC to query date ranges. Removed stray console log from MindGeek scraper.

This commit is contained in:
DebaucheryLibrarian 2020-08-26 02:01:38 +02:00
parent 52f66e7982
commit 8611d738b0
6 changed files with 19 additions and 9 deletions

View File

@ -1,13 +1,16 @@
import utc from 'dayjs/plugin/utc';
import dayjs from 'dayjs'; import dayjs from 'dayjs';
dayjs.extend(utc);
const dateRanges = { const dateRanges = {
latest: () => ({ latest: () => ({
after: '1900-01-01 00:00:00', after: '1900-01-01 00:00:00',
before: dayjs().format('YYYY-MM-DD HH:mm:ss'), before: dayjs.utc().format('YYYY-MM-DD HH:mm:ss'),
orderBy: 'DATE_DESC', orderBy: 'DATE_DESC',
}), }),
upcoming: () => ({ upcoming: () => ({
after: dayjs().format('YYYY-MM-DD HH:mm:ss'), after: dayjs.utc().format('YYYY-MM-DD HH:mm:ss'),
before: '2100-01-01 00:00:00', before: '2100-01-01 00:00:00',
orderBy: 'DATE_ASC', orderBy: 'DATE_ASC',
}), }),

Binary file not shown.

Before

Width:  |  Height:  |  Size: 525 KiB

After

Width:  |  Height:  |  Size: 458 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.9 KiB

After

Width:  |  Height:  |  Size: 6.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 36 KiB

After

Width:  |  Height:  |  Size: 28 KiB

View File

@ -685,7 +685,7 @@ const tagPhotos = [
['da-tp', 0, 'Natasha Teen in LegalPorno SZ2164'], ['da-tp', 0, 'Natasha Teen in LegalPorno SZ2164'],
['da-tp', 1, 'Francys Belle in SZ1702 for LegalPorno'], ['da-tp', 1, 'Francys Belle in SZ1702 for LegalPorno'],
['dap', 6, 'Sheena Shaw in "Ass Worship 14" for Jules Jordan'], ['dap', 6, 'Sheena Shaw in "Ass Worship 14" for Jules Jordan'],
['dap', 9, 'Polly Pons in LegalPorno GIO1520'], ['dap', 9, 'Vicky Sol in GIO1547 for LegalPorno'],
['dap', 1, 'Ria Sunn in SZ1801 for LegalPorno'], ['dap', 1, 'Ria Sunn in SZ1801 for LegalPorno'],
['dap', 2, 'Lana Rhoades in "Lana Rhoades Unleashed" for HardX'], ['dap', 2, 'Lana Rhoades in "Lana Rhoades Unleashed" for HardX'],
['dap', 5, 'Riley Reid in "The Gangbang of Riley Reid" for Jules Jordan'], ['dap', 5, 'Riley Reid in "The Gangbang of Riley Reid" for Jules Jordan'],

View File

@ -146,12 +146,16 @@ async function getSession(url) {
const cookieJar = new CookieJar(); const cookieJar = new CookieJar();
const session = bhttp.session({ cookieJar }); const session = bhttp.session({ cookieJar });
await session.get(url); const res = await session.get(url);
if (res.statusCode === 200) {
const cookieString = await cookieJar.getCookieStringAsync(url); const cookieString = await cookieJar.getCookieStringAsync(url);
const { instance_token: instanceToken } = cookieToData(cookieString); const { instance_token: instanceToken } = cookieToData(cookieString);
return { session, instanceToken }; return { session, instanceToken };
}
throw new Error(`Failed to acquire MindGeek session: ${res.statusCode}`);
} }
function scrapeProfile(data, html, releases = [], networkName) { function scrapeProfile(data, html, releases = [], networkName) {
@ -242,11 +246,14 @@ async function fetchUpcoming(site) {
} }
async function fetchScene(url, site, baseScene) { async function fetchScene(url, site, baseScene) {
if (baseScene?.entryId) {
// overview and deep data is the same, don't hit server unnecessarily
return baseScene;
}
const entryId = url.match(/\d+/)[0]; const entryId = url.match(/\d+/)[0];
const { session, instanceToken } = await getSession(url); const { session, instanceToken } = await getSession(url);
console.log(baseScene);
const res = await session.get(`https://site-api.project1service.com/v2/releases/${entryId}`, { const res = await session.get(`https://site-api.project1service.com/v2/releases/${entryId}`, {
headers: { headers: {
Instance: instanceToken, Instance: instanceToken,