(Temporarily) removed studio filter from entity query for performance reasons.

This commit is contained in:
DebaucheryLibrarian 2020-09-18 22:43:45 +02:00
parent 88c16e096a
commit 3789ef51f2
4 changed files with 29 additions and 35 deletions

View File

@ -80,36 +80,27 @@ function initEntitiesActions(store, router) {
filter: {
and: [
{
or: [
{
entity: {
or: [
{
entity: {
or: [
{
slug: { equalTo: $entitySlug }
},
{
parent: {
slug: { equalTo: $entitySlug }
type: { equalTo: $entityType }
}
},
{
parent: {
parent: {
slug: { equalTo: $entitySlug }
},
{
parent: {
slug: { equalTo: $entitySlug }
type: { equalTo: $entityType }
}
},
{
parent: {
parent: {
slug: { equalTo: $entitySlug }
type: { equalTo: $entityType }
}
}
type: { equalTo: $entityType }
}
]
}
}
}
{
studio: {
slug: { equalTo: $entitySlug },
}
}
]
]
}
}
{
or: [

View File

@ -1,10 +1,10 @@
'use strict';
const bhttp = require('bhttp');
const { JSDOM } = require('jsdom');
const cheerio = require('cheerio');
const moment = require('moment');
const http = require('../utils/http');
const slugify = require('../utils/slugify');
function extractTitle(originalTitle) {
@ -164,7 +164,7 @@ async function scrapeProfile(html, _url, actorName) {
}
async function fetchLatest(site, page = 1) {
const res = await bhttp.get(`${site.url}/new-videos/${page}`);
const res = await http.get(`${site.url}/new-videos/${page}`);
return scrapeAll(res.body.toString(), site);
}
@ -174,20 +174,20 @@ async function fetchScene(url, site) {
// TODO: fall back on screenshots when gallery is not available
const res = useGallery
? await bhttp.get(`${url}/gallery#gallery`)
: await bhttp.get(`${url}/screenshots#screenshots`);
? await http.get(`${url}/gallery#gallery`)
: await http.get(`${url}/screenshots#screenshots`);
return scrapeScene(res.body.toString(), url, site, useGallery);
}
async function fetchProfile({ name: actorName }) {
const res = await bhttp.get(`https://www.legalporno.com/api/autocomplete/search?q=${actorName.replace(' ', '+')}`);
const res = await http.get(`https://www.legalporno.com/api/autocomplete/search?q=${actorName.replace(' ', '+')}`);
const data = res.body;
const result = data.terms.find(item => item.type === 'model');
if (result) {
const bioRes = await bhttp.get(result.url);
const bioRes = await http.get(result.url);
const html = bioRes.body.toString();
return scrapeProfile(html, result.url, actorName);

View File

@ -299,7 +299,6 @@ async function storeScenes(releases) {
const curatedNewReleaseEntries = await Promise.all(uniqueReleases.map(release => curateReleaseEntry(release, batchId)));
const storedReleases = await bulkInsert('releases', curatedNewReleaseEntries);
// TODO: update duplicate releases
const storedReleaseEntries = Array.isArray(storedReleases) ? storedReleases : [];
const releasesWithId = attachReleaseIds([].concat(uniqueReleases, duplicateReleases), [].concat(storedReleaseEntries, duplicateReleaseEntries));

View File

@ -19,7 +19,11 @@ async function bulkUpsert(table, items, conflict, update = true, chunkSize) {
`);
return knex.transaction(async (transaction) => {
const queries = chunk(items, chunkSize)
const chunked = chunk(items, chunkSize);
// console.log(items.length, chunkSize, chunked.length, chunked[0]?.length);
const queries = chunked
.map(chunkItems => knex.raw(updated || ':query RETURNING *;', {
query: knex(table).insert(chunkItems).transacting(transaction),
}));