Using channel URL instead of composed URL for session retrieval, should fix Brazzers.

This commit is contained in:
DebaucheryLibrarian 2020-09-18 02:54:05 +02:00
parent 53e8495d06
commit a4929819df
1 changed files with 12 additions and 10 deletions

View File

@ -121,9 +121,10 @@ function scrapeScene(data, url, _site, networkName) {
} }
function getUrl(site) { function getUrl(site) {
const { search } = new URL(site.url); const { searchParams } = new URL(site.url);
if (search.match(/\?site=\d+/)) { // if (search.match(/\?site=\d+/)) {
if (searchParams.has('site')) {
return site.url; return site.url;
} }
@ -142,11 +143,12 @@ function getUrl(site) {
throw new Error(`Mind Geek site '${site.name}' (${site.url}) not supported`); throw new Error(`Mind Geek site '${site.name}' (${site.url}) not supported`);
} }
async function getSession(url) { async function getSession(url, site) {
const cookieJar = new CookieJar(); const cookieJar = new CookieJar();
const session = bhttp.session({ cookieJar }); const session = bhttp.session({ cookieJar });
const res = await session.get(url); // const res = await session.get(url);
const res = await session.get(site.parameters?.siteId ? site.parent.url : site.url);
if (res.statusCode === 200) { if (res.statusCode === 200) {
const cookieString = await cookieJar.getCookieStringAsync(url); const cookieString = await cookieJar.getCookieStringAsync(url);
@ -198,10 +200,10 @@ function scrapeProfile(data, html, releases = [], networkName) {
async function fetchLatest(site, page = 1) { async function fetchLatest(site, page = 1) {
const url = getUrl(site); const url = getUrl(site);
const { search } = new URL(url); const { searchParams } = new URL(url);
const siteId = new URLSearchParams(search).get('site'); const siteId = searchParams.get('site');
const { session, instanceToken } = await getSession(url); const { session, instanceToken } = await getSession(url, site);
const beforeDate = moment().add('1', 'day').format('YYYY-MM-DD'); const beforeDate = moment().add('1', 'day').format('YYYY-MM-DD');
const limit = 10; const limit = 10;
@ -226,7 +228,7 @@ async function fetchLatest(site, page = 1) {
async function fetchUpcoming(site) { async function fetchUpcoming(site) {
const url = getUrl(site); const url = getUrl(site);
const { session, instanceToken } = await getSession(url); const { session, instanceToken } = await getSession(url, site);
const apiUrl = 'https://site-api.project1service.com/v2/upcoming-releases'; const apiUrl = 'https://site-api.project1service.com/v2/upcoming-releases';
@ -252,7 +254,7 @@ async function fetchScene(url, site, baseScene) {
} }
const entryId = url.match(/\d+/)[0]; const entryId = url.match(/\d+/)[0];
const { session, instanceToken } = await getSession(url); const { session, instanceToken } = await getSession(url, site);
const res = await session.get(`https://site-api.project1service.com/v2/releases/${entryId}`, { const res = await session.get(`https://site-api.project1service.com/v2/releases/${entryId}`, {
headers: { headers: {
@ -269,7 +271,7 @@ async function fetchScene(url, site, baseScene) {
async function fetchProfile({ name: actorName }, networkOrNetworkSlug, actorPath = 'model') { async function fetchProfile({ name: actorName }, networkOrNetworkSlug, actorPath = 'model') {
const url = `https://www.${networkOrNetworkSlug.slug || networkOrNetworkSlug}.com`; const url = `https://www.${networkOrNetworkSlug.slug || networkOrNetworkSlug}.com`;
const { session, instanceToken } = await getSession(url); const { session, instanceToken } = await getSession(url, networkOrNetworkSlug);
const res = await session.get(`https://site-api.project1service.com/v1/actors/?search=${encodeURI(actorName)}`, { const res = await session.get(`https://site-api.project1service.com/v1/actors/?search=${encodeURI(actorName)}`, {
headers: { headers: {