Improved duplicate filtering, now also applying to upcoming updates. Updated Gamma fetchLatest method parameters. Added shortcut for SFW-mode.

This commit is contained in:
2020-03-28 04:37:04 +01:00
parent 95d115b585
commit 6d9f96c5d5
10 changed files with 112 additions and 40 deletions

View File

@@ -112,6 +112,8 @@ async function scrapeRelease(baseRelease, sites, type = 'scene') {
}
try {
logger.verbose(`Fetching ${type} ${baseRelease.url}`);
const scrapedRelease = type === 'scene'
? await scraper.fetchScene(baseRelease.url, site, baseRelease, null, include)
: await scraper.fetchMovie(baseRelease.url, site, baseRelease, null, include);

View File

@@ -136,7 +136,7 @@ async function scrapeApiReleases(json, site) {
];
}
release.movie = `${site.url}/en/movie/${scene.url_movie_title}/${scene.movie_id}`;
// release.movie = `${site.url}/en/movie/${scene.url_movie_title}/${scene.movie_id}`;
return release;
}).filter(Boolean);
@@ -408,7 +408,7 @@ async function fetchApiCredentials(referer, site) {
return getApiUrl(appId, apiKey);
}
async function fetchApiLatest(site, page = 1, upcoming = false) {
async function fetchApiLatest(site, page = 1, preData, include, upcoming = false) {
const referer = site.parameters?.referer || `${site.parameters?.networkReferer ? site.network.url : site.url}/en/videos`;
const { apiUrl } = await fetchApiCredentials(referer, site);
@@ -433,8 +433,8 @@ async function fetchApiLatest(site, page = 1, upcoming = false) {
return [];
}
async function fetchApiUpcoming(site) {
return fetchApiLatest(site, 1, true);
async function fetchApiUpcoming(site, page = 1, preData, include) {
return fetchApiLatest(site, page, preData, include, true);
}
function getLatestUrl(site, page) {

View File

@@ -110,9 +110,27 @@ function attachReleaseIds(releases, storedReleases) {
return releasesWithId;
}
async function extractUniqueReleases(releases) {
function filterInternalDuplicateReleases(releases) {
const releasesBySiteIdAndEntryId = releases.reduce((acc, release) => {
if (!acc[release.site.id]) {
acc[release.site.id] = {};
}
acc[release.site.id][release.entryId] = release;
return acc;
}, {});
return Object.values(releasesBySiteIdAndEntryId)
.map(siteReleases => Object.values(siteReleases))
.flat();
}
async function filterDuplicateReleases(releases) {
const internalUniqueReleases = filterInternalDuplicateReleases(releases);
const duplicateReleaseEntries = await knex('releases')
.whereIn(['entry_id', 'site_id'], releases.map(release => [release.entryId, release.site.id]));
.whereIn(['entry_id', 'site_id'], internalUniqueReleases.map(release => [release.entryId, release.site.id]));
const duplicateReleasesBySiteIdAndEntryId = duplicateReleaseEntries.reduce((acc, release) => {
if (!acc[release.site_id]) acc[release.site_id] = {};
@@ -121,8 +139,8 @@ async function extractUniqueReleases(releases) {
return acc;
}, {});
const duplicateReleases = releases.filter(release => duplicateReleasesBySiteIdAndEntryId[release.site.id]?.[release.entryId]);
const uniqueReleases = releases.filter(release => !duplicateReleasesBySiteIdAndEntryId[release.site.id]?.[release.entryId]);
const duplicateReleases = internalUniqueReleases.filter(release => duplicateReleasesBySiteIdAndEntryId[release.site.id]?.[release.entryId]);
const uniqueReleases = internalUniqueReleases.filter(release => !duplicateReleasesBySiteIdAndEntryId[release.site.id]?.[release.entryId]);
return {
uniqueReleases,
@@ -138,21 +156,25 @@ async function storeReleases(releases) {
const releasesWithStudios = await attachStudios(releasesWithSites);
// uniqueness is site ID + entry ID, filter uniques after adding sites
const { uniqueReleases, duplicateReleaseEntries } = await extractUniqueReleases(releasesWithStudios);
const { uniqueReleases, duplicateReleases, duplicateReleaseEntries } = await filterDuplicateReleases(releasesWithStudios);
const curatedNewReleaseEntries = uniqueReleases.map(release => curateReleaseEntry(release, batchId));
// console.log(curatedNewReleaseEntries);
const storedReleases = await knex('releases').insert(curatedNewReleaseEntries).returning('*');
// TODO: update duplicate releases
const storedReleaseEntries = Array.isArray(storedReleases) ? storedReleases : [];
const releasesWithId = attachReleaseIds(releases, [].concat(storedReleaseEntries, duplicateReleaseEntries));
const releasesWithId = attachReleaseIds([].concat(uniqueReleases, duplicateReleases), [].concat(storedReleaseEntries, duplicateReleaseEntries));
await Promise.all([
associateTags(releasesWithId),
associateActors(releasesWithId),
]);
logger.info(`Stored ${storedReleaseEntries.length} releases`);
return releasesWithId;
}

View File

@@ -54,7 +54,7 @@ function buildReleaseTagAssociations(releases, tagIdsBySlug, siteTagIdsBySiteId)
? releaseTags // obsolete scraper returned pre-matched tags
: releaseTags.map(tag => tagIdsBySlug[slugify(tag)]);
return [...new Set(
const tags = [...new Set(
// filter duplicates and empties
releaseTagIds
.concat(siteTagIds)
@@ -64,14 +64,17 @@ function buildReleaseTagAssociations(releases, tagIdsBySlug, siteTagIdsBySiteId)
release_id: release.id,
tag_id: tagId,
}));
return tags;
})
.flat();
return tagAssociations;
}
async function extractUniqueAssociations(tagAssociations) {
const duplicateAssociations = await knex('releases_tags').whereIn(['release_id', 'tag_id'], tagAssociations.map(association => [association.release_id, association.tag_id]));
async function filterUniqueAssociations(tagAssociations) {
const duplicateAssociations = await knex('releases_tags')
.whereIn(['release_id', 'tag_id'], tagAssociations.map(association => [association.release_id, association.tag_id]));
const duplicateAssociationsByReleaseIdAndTagId = duplicateAssociations.reduce((acc, association) => {
if (!acc[association.release_id]) {
@@ -94,7 +97,7 @@ async function associateTags(releases) {
const siteTagIdsBySiteId = await getSiteTags(releases);
const tagAssociations = buildReleaseTagAssociations(releases, tagIdsBySlug, siteTagIdsBySiteId);
const uniqueAssociations = await extractUniqueAssociations(tagAssociations);
const uniqueAssociations = await filterUniqueAssociations(tagAssociations);
await knex('releases_tags').insert(uniqueAssociations);
}

View File

@@ -25,7 +25,7 @@ const afterDate = (() => {
.toDate();
})();
async function extractUniqueReleases(latestReleases, accReleases) {
async function filterUniqueReleases(latestReleases, accReleases) {
const latestReleaseIdentifiers = latestReleases
.map(release => [release.site.id, release.entryId]);
@@ -75,13 +75,11 @@ function needNextPage(uniqueReleases, pageAccReleases) {
return pageAccReleases.length <= argv.nullDateLimit;
}
async function scrapeLatestReleases(scraper, site, preData) {
if (!scraper.fetchLatest) {
return [];
}
async function scrapeReleases(scraper, site, preData, upcoming = false) {
const scrapePage = async (page = 1, accReleases = []) => {
const latestReleases = await scraper.fetchLatest(site, page, preData, include);
const latestReleases = upcoming
? await scraper.fetchUpcoming(site, page, preData, include)
: await scraper.fetchLatest(site, page, preData, include);
if (!Array.isArray(latestReleases)) {
// scraper is unable to fetch the releases and returned a HTTP code or null
@@ -98,11 +96,11 @@ async function scrapeLatestReleases(scraper, site, preData) {
const uniqueReleases = argv.redownload
? latestReleasesWithSite
: await extractUniqueReleases(latestReleasesWithSite, accReleases);
: await filterUniqueReleases(latestReleasesWithSite, accReleases);
const pageAccReleases = accReleases.concat(uniqueReleases);
logger.verbose(`Scraped '${site.name}' (${site.network.name}) page ${page}, found ${uniqueReleases.length} unique releases`);
logger.verbose(`Scraped '${site.name}' (${site.network.name}) ${upcoming ? 'upcoming' : 'latest'} page ${page}, found ${uniqueReleases.length} unique updates`);
if (needNextPage(uniqueReleases, pageAccReleases)) {
return scrapePage(page + 1, pageAccReleases);
@@ -111,37 +109,45 @@ async function scrapeLatestReleases(scraper, site, preData) {
return pageAccReleases;
};
const releases = await scrapePage(1, []);
const rawReleases = await scrapePage(1, []);
const releases = upcoming
? rawReleases.map(rawRelease => ({ ...rawRelease, upcoming: true }))
: rawReleases;
if (argv.last) {
return releases.slice(0, argv.last);
}
if (releases.every(release => release.date)) {
return releases
.filter(release => moment(release.date).isAfter(afterDate));
return releases.filter(release => moment(release.date).isAfter(afterDate));
}
return releases.slice(0, argv.nullDateLimit);
}
async function scrapeLatestReleases(scraper, site, preData) {
if (!scraper.fetchLatest) {
return [];
}
try {
return await scrapeReleases(scraper, site, preData, false);
} catch (error) {
logger.warn(`Failed to scrape latest updates for '${site.slug}' (${site.network.slug})`);
}
return [];
}
async function scrapeUpcomingReleases(scraper, site, preData) {
if (!scraper.fetchUpcoming) {
return [];
}
try {
const upcomingReleases = await scraper.fetchUpcoming(site, 1, preData, include);
if (upcomingReleases) {
return upcomingReleases.map(release => ({
...release,
site,
upcoming: true,
}));
}
return await scrapeReleases(scraper, site, preData, true);
} catch (error) {
logger.warn(`Failed to scrape upcoming releases for '${site.slug}' (${site.network.slug})`);
logger.warn(`Failed to scrape upcoming updates for '${site.slug}' (${site.network.slug})`);
}
return [];
@@ -157,6 +163,8 @@ async function scrapeSiteReleases(scraper, site, preData) {
: [],
]);
logger.info(`Fetching ${latestReleases.length} latest and ${upcomingReleases.length} upcoming updates for '${site.name}' (${site.network.name})`);
return [...latestReleases, ...upcomingReleases];
}