forked from DebaucheryLibrarian/traxxx
Added DDFNetwork scraper. Modified tag matching query to be case insensitive.
This commit is contained in:
@@ -121,7 +121,6 @@ async function fetchNewReleases(scraper, site, afterDate, accReleases = [], page
|
||||
|
||||
async function fetchReleases() {
|
||||
const sites = await accumulateIncludedSites();
|
||||
// const releases = await getExistingReleases();
|
||||
|
||||
const scenesPerSite = await Promise.all(sites.map(async (site) => {
|
||||
const scraper = scrapers[site.id] || scrapers[site.networkId];
|
||||
|
||||
@@ -13,19 +13,19 @@ async function findSite(url) {
|
||||
.where({ url: `${protocol}//www.${hostname}` })
|
||||
.orWhere({ url: `${protocol}//${hostname}` })
|
||||
.first()
|
||||
// scenes might be listed on network site, let network scraper find channel site
|
||||
// scene might use generic network URL, let network scraper determine channel site
|
||||
|| await knex('networks')
|
||||
.where({ url: `${protocol}//www.${hostname}` })
|
||||
.orWhere({ url: `${protocol}//${hostname}` })
|
||||
.first();
|
||||
|
||||
|
||||
return {
|
||||
id: site.id,
|
||||
name: site.name,
|
||||
description: site.description,
|
||||
url: site.url,
|
||||
networkId: site.network_id || site.id,
|
||||
isFallback: site.network_id === undefined,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
91
src/scrapers/ddfnetwork.js
Normal file
91
src/scrapers/ddfnetwork.js
Normal file
@@ -0,0 +1,91 @@
|
||||
'use strict';
|
||||
|
||||
/* eslint-disable */
|
||||
const bhttp = require('bhttp');
|
||||
const cheerio = require('cheerio');
|
||||
const moment = require('moment');
|
||||
|
||||
const knex = require('../knex');
|
||||
const { matchTags } = require('../tags');
|
||||
|
||||
function scrapeLatest(html, site) {
|
||||
const $ = cheerio.load(html, { normalizeWhitespace: true });
|
||||
const sceneElements = $('.card.m-1').toArray();
|
||||
|
||||
return sceneElements.map((element) => {
|
||||
const sceneLinkElement = $(element).find('a').first();
|
||||
const title = sceneLinkElement.attr('title');
|
||||
const url = `${site.url}${sceneLinkElement.attr('href')}`;
|
||||
const entryId = url.split('/').slice(-1)[0];
|
||||
|
||||
const date = moment.utc($(element).find('.card-footer .text-muted').text(), 'MMMM DD, YYYY').toDate();
|
||||
const actors = $(element).find('.card-subtitle a').map((actorIndex, actorElement) => $(actorElement).text().trim()).toArray().filter(actor => actor);
|
||||
|
||||
const duration = Number($(element).find('.card-info div:nth-child(2) .card-text').text().slice(0, -4)) * 60;
|
||||
|
||||
return {
|
||||
url,
|
||||
entryId,
|
||||
title,
|
||||
actors,
|
||||
date,
|
||||
duration,
|
||||
rating: null,
|
||||
site,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async function scrapeScene(html, url, site) {
|
||||
const $ = cheerio.load(html, { normalizeWhitespace: true });
|
||||
|
||||
const entryId = url.split('/').slice(-1)[0];
|
||||
const title = $('.video-title h1').text();
|
||||
const description = $('.description-box .box-container').text();
|
||||
|
||||
const date = moment.utc($('.video-title .remain time').text(), 'MMMM DD, YYYY').toDate();
|
||||
const actors = $('.pornstars-box .pornstar-card .card-title a').map((actorIndex, actorElement) => $(actorElement).text()).toArray();
|
||||
|
||||
const likes = Number($('.info-panel.likes .likes').text());
|
||||
const duration = Number($('.info-panel.duration .duration').text().slice(0, -4)) * 60;
|
||||
|
||||
const { origin } = new URL($('.pornstar-card meta[itemprop="url"]').first().attr('content'));
|
||||
const rawTags = $('#tagsBox .tags a').map((tagIndex, tagElement) => $(tagElement).text()).toArray();
|
||||
|
||||
const [channelSite, tags] = await Promise.all([
|
||||
// don't find site if original is already specific
|
||||
site.isFallback ? knex('sites').where({ url: origin }).first() : site,
|
||||
matchTags(rawTags),
|
||||
]);
|
||||
|
||||
return {
|
||||
url: channelSite ? `${channelSite.url}${new URL(url).pathname}` : url,
|
||||
entryId,
|
||||
title,
|
||||
actors,
|
||||
date,
|
||||
duration,
|
||||
tags,
|
||||
rating: {
|
||||
likes,
|
||||
},
|
||||
site: channelSite || site,
|
||||
};
|
||||
}
|
||||
|
||||
async function fetchLatest(site, page = 1) {
|
||||
const res = await bhttp.get(`https://ddfnetwork.com/videos/search/latest/ever/${new URL(site.url).hostname}/-/${page}`);
|
||||
|
||||
return scrapeLatest(res.body.toString(), site);
|
||||
}
|
||||
|
||||
async function fetchScene(url, site) {
|
||||
const res = await bhttp.get(`https://ddfnetwork.com${new URL(url).pathname}`);
|
||||
|
||||
return scrapeScene(res.body.toString(), url, site);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
fetchLatest,
|
||||
fetchScene,
|
||||
};
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
const blowpass = require('./blowpass');
|
||||
const brazzers = require('./brazzers');
|
||||
const ddfnetwork = require('./ddfnetwork');
|
||||
const julesjordan = require('./julesjordan');
|
||||
const kink = require('./kink');
|
||||
const legalporno = require('./legalporno');
|
||||
@@ -14,6 +15,7 @@ const xempire = require('./xempire');
|
||||
module.exports = {
|
||||
blowpass,
|
||||
brazzers,
|
||||
ddfnetwork,
|
||||
julesjordan,
|
||||
kink,
|
||||
legalporno,
|
||||
|
||||
@@ -49,8 +49,6 @@ async function scrapeScene(html, url, shootId, ratingRes, site) {
|
||||
const title = $('h1.shoot-title span.favorite-button').attr('data-title');
|
||||
const actorsRaw = $('.shoot-info p.starring');
|
||||
|
||||
const sitename = $('.shoot-logo a').attr('href').split('/')[2];
|
||||
|
||||
const date = moment.utc($(actorsRaw)
|
||||
.prev()
|
||||
.text()
|
||||
@@ -64,6 +62,7 @@ async function scrapeScene(html, url, shootId, ratingRes, site) {
|
||||
|
||||
const { average: stars } = ratingRes.body;
|
||||
|
||||
const sitename = $('.shoot-logo a').attr('href').split('/')[2];
|
||||
const rawTags = $('.tag-list > a[href*="/tag"]').map((tagIndex, tagElement) => $(tagElement).text()).toArray();
|
||||
|
||||
const [channelSite, tags] = await Promise.all([
|
||||
|
||||
@@ -12,6 +12,8 @@ function scrapeLatest(html, site) {
|
||||
const sceneElements = $('.scenes-latest').toArray();
|
||||
|
||||
return sceneElements.map((element) => {
|
||||
const actors = $('.actors a').map((actorIndex, actorElement) => $(actorElement).text()).toArray();
|
||||
|
||||
return {
|
||||
url,
|
||||
shootId,
|
||||
@@ -33,6 +35,8 @@ function scrapeUpcoming(html, site) {
|
||||
const sceneElements = $('.scenes-upcoming').toArray();
|
||||
|
||||
return sceneElements.map((element) => {
|
||||
const actors = $('.actors a').map((actorIndex, actorElement) => $(actorElement).text()).toArray();
|
||||
|
||||
return {
|
||||
url,
|
||||
shootId,
|
||||
@@ -55,6 +59,11 @@ async function scrapeScene(html, url, site) {
|
||||
const rawTags = [];
|
||||
const tags = await matchTags(rawTags);
|
||||
|
||||
const actors = $('.actors a').map((actorIndex, actorElement) => $(actorElement).text()).toArray();
|
||||
|
||||
const rawTags = $('.tags a').map((tagIndex, tagElement) => $(tagElement).text()).toArray();
|
||||
const tags = await matchTags(rawTags);
|
||||
|
||||
return {
|
||||
url,
|
||||
shootId,
|
||||
|
||||
@@ -3,11 +3,14 @@
|
||||
const knex = require('./knex');
|
||||
|
||||
async function matchTags(rawTags) {
|
||||
const tagEntries = await knex('tags')
|
||||
const tagQuery = knex('tags')
|
||||
.select(knex.raw('ifnull(original.tag, tags.tag) as tag'), knex.raw('ifnull(original.tag, tags.tag) as tag'))
|
||||
.whereIn('tags.tag', rawTags)
|
||||
.orWhereIn('tags.tag', rawTags.map(tag => tag.toLowerCase()))
|
||||
.leftJoin('tags as original', 'tags.alias_for', 'original.tag');
|
||||
.leftJoin('tags as original', 'tags.alias_for', 'original.tag')
|
||||
.toString()
|
||||
.replace('where `tags`.`tag` in', 'where `tags`.`tag` collate NOCASE in');
|
||||
|
||||
const tagEntries = await knex.raw(tagQuery);
|
||||
|
||||
return Array.from(new Set(tagEntries.map(({ tag }) => tag))).sort(); // reduce to tag name and filter duplicates
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user