Adapted Score scraper for 18eighteen, 40 Something Mag and 50 Plus Milfs. Updated Score network logo and favicon.

This commit is contained in:
ThePendulum 2020-02-03 02:57:53 +01:00
parent dfa0183669
commit 5bae5b6e5f
9 changed files with 42 additions and 21 deletions

View File

@ -19,7 +19,12 @@ function initNetworksActions(store, _router) {
slug
url
sites(
orderBy: PRIORITY_DESC
orderBy: PRIORITY_DESC,
filter: {
enabled: {
equalTo: true,
},
},
) {
id
name

View File

@ -142,6 +142,11 @@ exports.up = knex => Promise.resolve()
table.text('description');
table.string('parameters');
table.integer('priority', 3)
.defaultTo(0);
table.boolean('enabled')
.defaultTo(true);
table.string('slug', 32)
.unique();

View File

@ -1,10 +0,0 @@
exports.up = async knex => Promise.resolve()
.then(() => knex.schema.table('sites', (table) => {
table.integer('priority', 3)
.defaultTo(0);
}));
exports.down = async knex => Promise.resolve()
.then(() => knex.schema.table('sites', (table) => {
table.dropColumn('priority');
}));

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.5 KiB

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.4 KiB

After

Width:  |  Height:  |  Size: 6.3 KiB

View File

@ -3054,23 +3054,27 @@ const sites = [
slug: '18eighteen',
url: 'https://www.18eighteen.com',
network: 'score',
parameters: { path: '/xxx-teen-videos' },
},
{
name: '40 Something Mag',
slug: '40somethingmag',
url: 'https://www.40somethingmag.com',
parameters: { path: '/xxx-mature-videos' },
network: 'score',
},
{
name: '50 Plus MILFs',
slug: '50plusmilfs',
url: 'https://www.50plusmilfs.com',
parameters: { path: '/xxx-milf-videos' },
network: 'score',
},
{
name: '60 Plus MILFs',
slug: '60plusmilfs',
url: 'https://www.60plusmilfs.com',
parameters: { path: '/xxx-granny-videos' },
network: 'score',
},
{
@ -3481,6 +3485,7 @@ const sites = [
url: 'https://www.scorepass.com/scorelandtv',
network: 'score',
priority: 1,
enabled: false, // appears to be streaming service for other sites
},
{
name: 'ScoreTV',
@ -3488,6 +3493,7 @@ const sites = [
url: 'https://www.scoretv.tv',
network: 'score',
priority: 1,
enabled: false, // similar to or same as Scoreland TV
},
{
name: 'Score Videos',
@ -3928,6 +3934,7 @@ exports.seed = knex => Promise.resolve()
parameters: JSON.stringify(site.parameters),
network_id: networksMap[site.network],
priority: site.priority,
enabled: site.enabled,
}));
return upsert('sites', sitesWithNetworks, 'slug', knex);

View File

@ -362,10 +362,12 @@ async function scrapeActors(actorNames) {
}
logger.verbose(`No profile for '${actorName}' available on ${scraperSlug}`);
throw new Error(`Profile for ${actorName} not available on ${scraperSlug}`);
throw Object.assign(new Error(`Profile for ${actorName} not available on ${scraperSlug}`), { warn: false });
}), Promise.reject(new Error()));
} catch (error) {
logger.warn(`Error in scraper ${source}: ${error.message}`);
if (error.warn !== false) {
logger.warn(`Error in scraper ${source}: ${error.message}`);
}
}
return null;

View File

@ -29,7 +29,7 @@ async function fetchPhotos(url) {
}
function scrapeAll(html) {
return exa(html, '.container .video').map(({ q, qa, qd, ql }) => {
return exa(html, '.container .video, .container-fluid .video').map(({ q, qa, qd, ql }) => {
const release = {};
release.title = q('.title, .i-title', true);
@ -70,10 +70,10 @@ async function scrapeScene(html, url) {
[release.entryId] = new URL(url).pathname.split('/').slice(-2);
release.title = q('h2.text-uppercase, h2.title', true);
release.title = q('h2.text-uppercase, h2.title, #breadcrumb-top + h1', true);
release.description = qtext('.p-desc, .desc');
release.actors = qa('.value a[href*=models], .value a[href*=performer]', true);
release.actors = qa('.value a[href*=models], .value a[href*=performer], .value a[href*=teen-babes]', true);
release.tags = qa('a[href*=tag]', true);
const dateEl = qa('.value').find(el => /\w+ \d+\w+, \d{4}/.test(el.textContent));
@ -161,8 +161,8 @@ function scrapeProfile(html) {
const avatar = qi('img');
if (avatar) profile.avatar = avatar;
const releases = ex(html, '#model-page + .container');
profile.releases = scrapeAll(releases.document.outerHTML);
const releases = ex(html, '#model-page + .container, #model-page + .container-fluid');
if (releases) profile.releases = scrapeAll(releases.document.outerHTML);
return profile;
}
@ -189,10 +189,16 @@ async function fetchScene(url, site) {
return null;
}
async function fetchProfile(actorName, scraperSlug, page = 1) {
async function fetchProfile(actorName, scraperSlug, page = 1, source = 0) {
const letter = actorName.charAt(0).toUpperCase();
const url = `https://www.scoreland.com/big-boob-models/browse/${letter}/?page=${page}`;
const sources = [
`https://www.scoreland.com/big-boob-models/browse/${letter}/?page=${page}`,
`https://www.50plusmilfs.com/xxx-milf-models/browse/${letter}/?page=${page}`,
];
const url = sources[source];
const res = await bhttp.get(url, {
followRedirects: false,
});
@ -210,7 +216,11 @@ async function fetchProfile(actorName, scraperSlug, page = 1) {
return null;
}
return fetchProfile(actorName, scraperSlug, page + 1);
return fetchProfile(actorName, scraperSlug, page + 1, source);
}
if (sources[source + 1]) {
return fetchProfile(actorName, scraperSlug, 1, source + 1);
}
return null;

View File

@ -148,6 +148,8 @@ const funcs = {
};
function ctx(element, window) {
if (!element) return null;
const contextFuncs = Object.entries(funcs) // dynamically attach methods with context
.reduce((acc, [key, func]) => ({
...acc,