From e4399380d3e84ad469e16e40d53b990a9999a37d Mon Sep 17 00:00:00 2001 From: Niels Simenon Date: Mon, 4 Nov 2019 06:04:43 +0100 Subject: [PATCH] Fixed photo plucker to prevent duplicate at end. --- src/scrapers/dogfart.js | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/scrapers/dogfart.js b/src/scrapers/dogfart.js index c21ccbb1..3870bbd8 100644 --- a/src/scrapers/dogfart.js +++ b/src/scrapers/dogfart.js @@ -27,11 +27,9 @@ async function getPhotos(albumUrl, site, siteUrl) { const lastPhotoPage = Array.from(document.querySelectorAll('.preview-image-container a')).slice(-1)[0].href; const lastPhotoIndex = parseInt(lastPhotoPage.match(/\d+.jpg/)[0], 10); - // dogfart has massive albums, pick 20 or specified photos: first, last and evenly inbetween + // dogfart has massive albums, pick 25 or specified number of photos: first, last and evenly inbetween const photoLimit = (site.network.parameters && site.network.parameters.photoLimit) || 25; - const photoIndexes = [1] - .concat(Array.from({ length: photoLimit - 2 }, (value, index) => Math.floor((index + 1) * (lastPhotoIndex / (photoLimit - 2))))) - .concat(lastPhotoIndex); + const photoIndexes = [1].concat(Array.from({ length: photoLimit - 1 }, (value, index) => Math.floor((index + 1) * (lastPhotoIndex / (photoLimit - 1))))); if (photoLimit > 25) { console.log(`${site.name}: Scraping ${photoLimit} album photos from ${siteUrl}, this may take some time...`);