forked from DebaucheryLibrarian/traxxx
Removed upsert from store releases, observing effects; fixes ? inserted as . Removed query from stored Dogfart URLs.
This commit is contained in:
@@ -8,6 +8,7 @@ const moment = require('moment');
|
||||
const knex = require('knex');
|
||||
|
||||
const { matchTags } = require('../tags');
|
||||
const pluckPhotos = require('../utils/pluck-photos');
|
||||
|
||||
async function getPhoto(url) {
|
||||
const res = await bhttp.get(url);
|
||||
@@ -29,7 +30,7 @@ async function getPhotos(albumUrl, site, siteUrl) {
|
||||
|
||||
// dogfart has massive albums, pick 25 or specified number of photos: first, last and evenly inbetween
|
||||
const photoLimit = (site.network.parameters && site.network.parameters.photoLimit) || 25;
|
||||
const photoIndexes = [1].concat(Array.from({ length: photoLimit - 1 }, (value, index) => Math.floor((index + 1) * (lastPhotoIndex / (photoLimit - 1)))));
|
||||
const photoIndexes = pluckPhotos(lastPhotoIndex, photoLimit);
|
||||
|
||||
if (photoLimit > 25) {
|
||||
console.log(`${site.name}: Scraping ${photoLimit} album photos from ${siteUrl}, this may take some time...`);
|
||||
@@ -117,7 +118,7 @@ async function scrapeScene(html, url, site) {
|
||||
const { origin, pathname } = new URL(url);
|
||||
const photos = await getPhotos(`${origin}${pathname}${lastPhotosUrl}`, site, url);
|
||||
|
||||
const stars = Number(document.querySelector('span[itemprop="average"]').textContent) / 2;
|
||||
const stars = Math.floor(Number(document.querySelector('span[itemprop="average"]').textContent) / 2);
|
||||
const rawTags = Array.from(document.querySelectorAll('.scene-details .categories a')).map(({ textContent }) => textContent);
|
||||
|
||||
const [channelSite, tags] = await Promise.all([
|
||||
@@ -131,7 +132,7 @@ async function scrapeScene(html, url, site) {
|
||||
]);
|
||||
|
||||
return {
|
||||
url,
|
||||
url: `${origin}${pathname}`,
|
||||
title,
|
||||
description,
|
||||
actors,
|
||||
|
||||
Reference in New Issue
Block a user