traxxx/src/tools/dogfart.js

77 lines
2.1 KiB
JavaScript

'use strict';
const fs = require('fs').promises;
const Promise = require('bluebird');
const qu = require('../utils/qu');
const qualities = {
sm: 360,
med: 480,
big: 720,
};
/*
async function scrape() {
const urlsByPage = await Promise.map(Array.from({ length: 140 }), async (value, index) => {
const res = await qu.get(`https://www.dogfartnetwork.com/tour/scenes/?p=${index + 1}`);
if (res.ok) {
return res.item.query.urls('.recent-updates > a', 'href', { origin: 'https://www.dogfartnetwork.com' });
}
return [];
}, { concurrency: 1 });
const urls = urlsByPage.flat();
await fs.writeFile('./dogfart-links', urls.join('\n'));
console.log(`Saved ${urls.length} URLs to file`);
}
async function compare() {
const newLinksFile = await fs.readFile('./dogfart-links', 'utf8');
const oldLinksFile = await fs.readFile('./dogfart-got', 'utf8');
const newLinks = newLinksFile.split('\n').filter(Boolean);
const oldLinks = new Set(oldLinksFile.split('\n').filter(Boolean));
const getLinks = newLinks.filter((link) => !oldLinks.has(link)).map((link) => `https://dogfartnetwork.com/tour/sites${link}`);
await fs.writeFile('./dogfart-new', getLinks.join('\n'));
console.log(getLinks);
}
*/
async function scrapeMembers() {
const titlesByPage = await Promise.map(Array.from({ length: 1 }), async (value, index) => {
const res = await qu.get(`https://sbj1e2bdv33.dogfartnetwork.com/members/blacksonboys/index.php?page=${index + 1}`);
if (res.ok) {
return qu.initAll(res.item.query.all('.scene-container')).map(({ query }) => ({
url: `https://www.dogfartnetwork.com/tour/sites/BlacksOnBoys/${query.img('.video-container img').match(/\/(\w+).jpg/)[1]}/`,
actors: query.contents('a[href*="model.php"]'),
trailer: query.urls('.trailer-link a').map((url) => ({
src: url,
quality: qualities[url.match(/_([a-z]+).mp4/)[1]],
})),
}));
}
return [];
}, { concurrency: 1 });
const urls = titlesByPage.flat().map((data) => JSON.stringify(data));
console.log(urls);
await fs.writeFile('./dogfart-blacksonboys', Array.from(new Set(urls)).join('\n'));
console.log(`Saved ${urls.length} URLs to file`);
}
scrapeMembers();