(Temporarily) removed studio filter from entity query for performance reasons.

This commit is contained in:
DebaucheryLibrarian 2020-09-18 22:43:45 +02:00
parent 88c16e096a
commit 3789ef51f2
4 changed files with 29 additions and 35 deletions

View File

@ -79,8 +79,6 @@ function initEntitiesActions(store, router) {
orderBy: $orderBy orderBy: $orderBy
filter: { filter: {
and: [ and: [
{
or: [
{ {
entity: { entity: {
or: [ or: [
@ -104,13 +102,6 @@ function initEntitiesActions(store, router) {
] ]
} }
} }
{
studio: {
slug: { equalTo: $entitySlug },
}
}
]
}
{ {
or: [ or: [
{ {

View File

@ -1,10 +1,10 @@
'use strict'; 'use strict';
const bhttp = require('bhttp');
const { JSDOM } = require('jsdom'); const { JSDOM } = require('jsdom');
const cheerio = require('cheerio'); const cheerio = require('cheerio');
const moment = require('moment'); const moment = require('moment');
const http = require('../utils/http');
const slugify = require('../utils/slugify'); const slugify = require('../utils/slugify');
function extractTitle(originalTitle) { function extractTitle(originalTitle) {
@ -164,7 +164,7 @@ async function scrapeProfile(html, _url, actorName) {
} }
async function fetchLatest(site, page = 1) { async function fetchLatest(site, page = 1) {
const res = await bhttp.get(`${site.url}/new-videos/${page}`); const res = await http.get(`${site.url}/new-videos/${page}`);
return scrapeAll(res.body.toString(), site); return scrapeAll(res.body.toString(), site);
} }
@ -174,20 +174,20 @@ async function fetchScene(url, site) {
// TODO: fall back on screenshots when gallery is not available // TODO: fall back on screenshots when gallery is not available
const res = useGallery const res = useGallery
? await bhttp.get(`${url}/gallery#gallery`) ? await http.get(`${url}/gallery#gallery`)
: await bhttp.get(`${url}/screenshots#screenshots`); : await http.get(`${url}/screenshots#screenshots`);
return scrapeScene(res.body.toString(), url, site, useGallery); return scrapeScene(res.body.toString(), url, site, useGallery);
} }
async function fetchProfile({ name: actorName }) { async function fetchProfile({ name: actorName }) {
const res = await bhttp.get(`https://www.legalporno.com/api/autocomplete/search?q=${actorName.replace(' ', '+')}`); const res = await http.get(`https://www.legalporno.com/api/autocomplete/search?q=${actorName.replace(' ', '+')}`);
const data = res.body; const data = res.body;
const result = data.terms.find(item => item.type === 'model'); const result = data.terms.find(item => item.type === 'model');
if (result) { if (result) {
const bioRes = await bhttp.get(result.url); const bioRes = await http.get(result.url);
const html = bioRes.body.toString(); const html = bioRes.body.toString();
return scrapeProfile(html, result.url, actorName); return scrapeProfile(html, result.url, actorName);

View File

@ -299,7 +299,6 @@ async function storeScenes(releases) {
const curatedNewReleaseEntries = await Promise.all(uniqueReleases.map(release => curateReleaseEntry(release, batchId))); const curatedNewReleaseEntries = await Promise.all(uniqueReleases.map(release => curateReleaseEntry(release, batchId)));
const storedReleases = await bulkInsert('releases', curatedNewReleaseEntries); const storedReleases = await bulkInsert('releases', curatedNewReleaseEntries);
// TODO: update duplicate releases
const storedReleaseEntries = Array.isArray(storedReleases) ? storedReleases : []; const storedReleaseEntries = Array.isArray(storedReleases) ? storedReleases : [];
const releasesWithId = attachReleaseIds([].concat(uniqueReleases, duplicateReleases), [].concat(storedReleaseEntries, duplicateReleaseEntries)); const releasesWithId = attachReleaseIds([].concat(uniqueReleases, duplicateReleases), [].concat(storedReleaseEntries, duplicateReleaseEntries));

View File

@ -19,7 +19,11 @@ async function bulkUpsert(table, items, conflict, update = true, chunkSize) {
`); `);
return knex.transaction(async (transaction) => { return knex.transaction(async (transaction) => {
const queries = chunk(items, chunkSize) const chunked = chunk(items, chunkSize);
// console.log(items.length, chunkSize, chunked.length, chunked[0]?.length);
const queries = chunked
.map(chunkItems => knex.raw(updated || ':query RETURNING *;', { .map(chunkItems => knex.raw(updated || ':query RETURNING *;', {
query: knex(table).insert(chunkItems).transacting(transaction), query: knex(table).insert(chunkItems).transacting(transaction),
})); }));