Added unextracted property to keep paginating when extracting scenes.

This commit is contained in:
DebaucheryLibrarian
2021-10-28 01:59:53 +02:00
parent 53357d4bd2
commit 0864154a0e
3 changed files with 23 additions and 7 deletions

View File

@@ -95,8 +95,12 @@ function scrapeLatestX(data, site, filterChannel) {
async function scrapeLatest(items, site, filterChannel) {
const latestReleases = items.map(data => scrapeLatestX(data, site, filterChannel));
const extractedScenes = latestReleases.filter(Boolean);
return latestReleases.filter(Boolean);
return {
scenes: extractedScenes,
unextracted: latestReleases.length - extractedScenes.length,
};
}
function scrapeScene(data, url, _site, networkName) {
@@ -240,7 +244,7 @@ async function fetchLatest(site, page = 1, options) {
const { session, instanceToken } = options.beforeNetwork || await getSession(site, options.parameters);
const beforeDate = moment().add('1', 'day').format('YYYY-MM-DD');
const limit = 10;
const limit = 24;
const apiUrl = site.parameters?.native || site.parameters?.extract
? `https://site-api.project1service.com/v2/releases?dateReleased=<${beforeDate}&limit=${limit}&offset=${limit * (page - 1)}&orderBy=-dateReleased&type=scene`
: `https://site-api.project1service.com/v2/releases?collectionId=${siteId}&dateReleased=<${beforeDate}&limit=${limit}&offset=${limit * (page - 1)}&orderBy=-dateReleased&type=scene`;