forked from DebaucheryLibrarian/traxxx
Removed upsert from store releases, observing effects; fixes ? inserted as . Removed query from stored Dogfart URLs.
This commit is contained in:
@@ -206,34 +206,41 @@ async function storeReleases(releases = []) {
|
||||
description: release.description,
|
||||
// director: release.director,
|
||||
duration: release.duration,
|
||||
// photos: release.photos ? release.photos.length : 0,
|
||||
likes: release.rating && release.rating.likes,
|
||||
dislikes: release.rating && release.rating.dislikes,
|
||||
rating: release.rating && release.rating.stars,
|
||||
rating: release.rating && Math.floor(release.rating.stars),
|
||||
deep: argv.deep,
|
||||
};
|
||||
|
||||
/*
|
||||
const releaseQuery = `${knex('releases').insert(curatedRelease).toString()} ON CONFLICT DO NOTHING RETURNING *`;
|
||||
const releaseEntry = await knex.raw(releaseQuery);
|
||||
*/
|
||||
|
||||
if (releaseEntry.rows.length > 0) {
|
||||
console.log(`Stored (${release.site.name}, ${releaseEntry.rows[0].id}) "${release.title}"`);
|
||||
const releaseEntries = await knex('releases')
|
||||
.insert(curatedRelease)
|
||||
.returning('*');
|
||||
|
||||
if (releaseEntries.length) {
|
||||
const releaseEntry = releaseEntries[0];
|
||||
|
||||
console.log(`Stored (${release.site.name}, ${releaseEntry.id}) "${release.title}"`);
|
||||
|
||||
if (release.poster || (release.photos && release.photos.length)) {
|
||||
await fs.mkdir(path.join(config.photoPath, release.site.slug, releaseEntry.rows[0].id.toString()), { recursive: true });
|
||||
await fs.mkdir(path.join(config.photoPath, release.site.slug, releaseEntry.id.toString()), { recursive: true });
|
||||
}
|
||||
|
||||
await Promise.all([
|
||||
release.actors && release.actors.length > 0
|
||||
? storeActors(release, releaseEntry.rows[0]) : Promise.resolve(),
|
||||
? storeActors(release, releaseEntry) : Promise.resolve(),
|
||||
release.tags && release.tags.length > 0
|
||||
? storeTags(release, releaseEntry.rows[0]) : Promise.resolve(),
|
||||
? storeTags(release, releaseEntry) : Promise.resolve(),
|
||||
release.photos && release.photos.length > 0
|
||||
? storePhotos(release, releaseEntry.rows[0]) : Promise.resolve(),
|
||||
? storePhotos(release, releaseEntry) : Promise.resolve(),
|
||||
release.poster
|
||||
? storePoster(release, releaseEntry.rows[0]) : Promise.resolve(),
|
||||
? storePoster(release, releaseEntry) : Promise.resolve(),
|
||||
release.trailer
|
||||
? storeTrailer(release, releaseEntry.rows[0]) : Promise.resolve(),
|
||||
? storeTrailer(release, releaseEntry) : Promise.resolve(),
|
||||
]);
|
||||
|
||||
return;
|
||||
|
||||
@@ -8,6 +8,7 @@ const moment = require('moment');
|
||||
const knex = require('knex');
|
||||
|
||||
const { matchTags } = require('../tags');
|
||||
const pluckPhotos = require('../utils/pluck-photos');
|
||||
|
||||
async function getPhoto(url) {
|
||||
const res = await bhttp.get(url);
|
||||
@@ -29,7 +30,7 @@ async function getPhotos(albumUrl, site, siteUrl) {
|
||||
|
||||
// dogfart has massive albums, pick 25 or specified number of photos: first, last and evenly inbetween
|
||||
const photoLimit = (site.network.parameters && site.network.parameters.photoLimit) || 25;
|
||||
const photoIndexes = [1].concat(Array.from({ length: photoLimit - 1 }, (value, index) => Math.floor((index + 1) * (lastPhotoIndex / (photoLimit - 1)))));
|
||||
const photoIndexes = pluckPhotos(lastPhotoIndex, photoLimit);
|
||||
|
||||
if (photoLimit > 25) {
|
||||
console.log(`${site.name}: Scraping ${photoLimit} album photos from ${siteUrl}, this may take some time...`);
|
||||
@@ -117,7 +118,7 @@ async function scrapeScene(html, url, site) {
|
||||
const { origin, pathname } = new URL(url);
|
||||
const photos = await getPhotos(`${origin}${pathname}${lastPhotosUrl}`, site, url);
|
||||
|
||||
const stars = Number(document.querySelector('span[itemprop="average"]').textContent) / 2;
|
||||
const stars = Math.floor(Number(document.querySelector('span[itemprop="average"]').textContent) / 2);
|
||||
const rawTags = Array.from(document.querySelectorAll('.scene-details .categories a')).map(({ textContent }) => textContent);
|
||||
|
||||
const [channelSite, tags] = await Promise.all([
|
||||
@@ -131,7 +132,7 @@ async function scrapeScene(html, url, site) {
|
||||
]);
|
||||
|
||||
return {
|
||||
url,
|
||||
url: `${origin}${pathname}`,
|
||||
title,
|
||||
description,
|
||||
actors,
|
||||
|
||||
8
src/utils/pluck-photos.js
Normal file
8
src/utils/pluck-photos.js
Normal file
@@ -0,0 +1,8 @@
|
||||
'use strict';
|
||||
|
||||
// pick {photoLimit} photos evenly distributed photos from a set with {photoTotal} photos, return array of indexes starting at 1
|
||||
function pluckPhotos(photoTotal, photoLimit) {
|
||||
return [1].concat(Array.from({ length: photoLimit - 1 }, (value, index) => Math.floor((index + 1) * (photoTotal / (photoLimit - 1)))));
|
||||
}
|
||||
|
||||
module.exports = pluckPhotos;
|
||||
Reference in New Issue
Block a user