forked from DebaucheryLibrarian/traxxx
(Temporarily) removed studio filter from entity query for performance reasons.
This commit is contained in:
parent
88c16e096a
commit
3789ef51f2
|
@ -80,36 +80,27 @@ function initEntitiesActions(store, router) {
|
||||||
filter: {
|
filter: {
|
||||||
and: [
|
and: [
|
||||||
{
|
{
|
||||||
or: [
|
entity: {
|
||||||
{
|
or: [
|
||||||
entity: {
|
{
|
||||||
or: [
|
slug: { equalTo: $entitySlug }
|
||||||
{
|
},
|
||||||
|
{
|
||||||
|
parent: {
|
||||||
|
slug: { equalTo: $entitySlug }
|
||||||
|
type: { equalTo: $entityType }
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
parent: {
|
||||||
|
parent: {
|
||||||
slug: { equalTo: $entitySlug }
|
slug: { equalTo: $entitySlug }
|
||||||
},
|
type: { equalTo: $entityType }
|
||||||
{
|
|
||||||
parent: {
|
|
||||||
slug: { equalTo: $entitySlug }
|
|
||||||
type: { equalTo: $entityType }
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
parent: {
|
|
||||||
parent: {
|
|
||||||
slug: { equalTo: $entitySlug }
|
|
||||||
type: { equalTo: $entityType }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
]
|
}
|
||||||
}
|
}
|
||||||
}
|
]
|
||||||
{
|
}
|
||||||
studio: {
|
|
||||||
slug: { equalTo: $entitySlug },
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
or: [
|
or: [
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const bhttp = require('bhttp');
|
|
||||||
const { JSDOM } = require('jsdom');
|
const { JSDOM } = require('jsdom');
|
||||||
const cheerio = require('cheerio');
|
const cheerio = require('cheerio');
|
||||||
const moment = require('moment');
|
const moment = require('moment');
|
||||||
|
|
||||||
|
const http = require('../utils/http');
|
||||||
const slugify = require('../utils/slugify');
|
const slugify = require('../utils/slugify');
|
||||||
|
|
||||||
function extractTitle(originalTitle) {
|
function extractTitle(originalTitle) {
|
||||||
|
@ -164,7 +164,7 @@ async function scrapeProfile(html, _url, actorName) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function fetchLatest(site, page = 1) {
|
async function fetchLatest(site, page = 1) {
|
||||||
const res = await bhttp.get(`${site.url}/new-videos/${page}`);
|
const res = await http.get(`${site.url}/new-videos/${page}`);
|
||||||
|
|
||||||
return scrapeAll(res.body.toString(), site);
|
return scrapeAll(res.body.toString(), site);
|
||||||
}
|
}
|
||||||
|
@ -174,20 +174,20 @@ async function fetchScene(url, site) {
|
||||||
|
|
||||||
// TODO: fall back on screenshots when gallery is not available
|
// TODO: fall back on screenshots when gallery is not available
|
||||||
const res = useGallery
|
const res = useGallery
|
||||||
? await bhttp.get(`${url}/gallery#gallery`)
|
? await http.get(`${url}/gallery#gallery`)
|
||||||
: await bhttp.get(`${url}/screenshots#screenshots`);
|
: await http.get(`${url}/screenshots#screenshots`);
|
||||||
|
|
||||||
return scrapeScene(res.body.toString(), url, site, useGallery);
|
return scrapeScene(res.body.toString(), url, site, useGallery);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function fetchProfile({ name: actorName }) {
|
async function fetchProfile({ name: actorName }) {
|
||||||
const res = await bhttp.get(`https://www.legalporno.com/api/autocomplete/search?q=${actorName.replace(' ', '+')}`);
|
const res = await http.get(`https://www.legalporno.com/api/autocomplete/search?q=${actorName.replace(' ', '+')}`);
|
||||||
const data = res.body;
|
const data = res.body;
|
||||||
|
|
||||||
const result = data.terms.find(item => item.type === 'model');
|
const result = data.terms.find(item => item.type === 'model');
|
||||||
|
|
||||||
if (result) {
|
if (result) {
|
||||||
const bioRes = await bhttp.get(result.url);
|
const bioRes = await http.get(result.url);
|
||||||
const html = bioRes.body.toString();
|
const html = bioRes.body.toString();
|
||||||
|
|
||||||
return scrapeProfile(html, result.url, actorName);
|
return scrapeProfile(html, result.url, actorName);
|
||||||
|
|
|
@ -299,7 +299,6 @@ async function storeScenes(releases) {
|
||||||
const curatedNewReleaseEntries = await Promise.all(uniqueReleases.map(release => curateReleaseEntry(release, batchId)));
|
const curatedNewReleaseEntries = await Promise.all(uniqueReleases.map(release => curateReleaseEntry(release, batchId)));
|
||||||
|
|
||||||
const storedReleases = await bulkInsert('releases', curatedNewReleaseEntries);
|
const storedReleases = await bulkInsert('releases', curatedNewReleaseEntries);
|
||||||
// TODO: update duplicate releases
|
|
||||||
|
|
||||||
const storedReleaseEntries = Array.isArray(storedReleases) ? storedReleases : [];
|
const storedReleaseEntries = Array.isArray(storedReleases) ? storedReleases : [];
|
||||||
const releasesWithId = attachReleaseIds([].concat(uniqueReleases, duplicateReleases), [].concat(storedReleaseEntries, duplicateReleaseEntries));
|
const releasesWithId = attachReleaseIds([].concat(uniqueReleases, duplicateReleases), [].concat(storedReleaseEntries, duplicateReleaseEntries));
|
||||||
|
|
|
@ -19,7 +19,11 @@ async function bulkUpsert(table, items, conflict, update = true, chunkSize) {
|
||||||
`);
|
`);
|
||||||
|
|
||||||
return knex.transaction(async (transaction) => {
|
return knex.transaction(async (transaction) => {
|
||||||
const queries = chunk(items, chunkSize)
|
const chunked = chunk(items, chunkSize);
|
||||||
|
|
||||||
|
// console.log(items.length, chunkSize, chunked.length, chunked[0]?.length);
|
||||||
|
|
||||||
|
const queries = chunked
|
||||||
.map(chunkItems => knex.raw(updated || ':query RETURNING *;', {
|
.map(chunkItems => knex.raw(updated || ':query RETURNING *;', {
|
||||||
query: knex(table).insert(chunkItems).transacting(transaction),
|
query: knex(table).insert(chunkItems).transacting(transaction),
|
||||||
}));
|
}));
|
||||||
|
|
Loading…
Reference in New Issue