Fixed photo query in Aziana scraper.

This commit is contained in:
DebaucheryLibrarian 2023-08-03 23:24:23 +02:00
parent 2ebc2d441f
commit b287f5c2db
4 changed files with 15 additions and 3 deletions

View File

@ -5,7 +5,8 @@ exports.up = async (knex) => {
table.integer('release_id')
.notNullable()
.references('id')
.inTable('releases');
.inTable('releases')
.onDelete('cascade');
table.text('media_id')
.notNullable()

View File

@ -2,10 +2,18 @@ exports.up = async (knex) => {
await knex.schema.alterTable('releases', (table) => {
table.specificType('alt_titles', 'text ARRAY');
});
await knex.schema.alterTable('movies', (table) => {
table.specificType('alt_titles', 'text ARRAY');
});
};
exports.down = async (knex) => {
await knex.schema.alterTable('releases', (table) => {
table.dropColumn('alt_titles');
});
await knex.schema.alterTable('movies', (table) => {
table.dropColumn('alt_titles');
});
};

View File

@ -14,6 +14,7 @@ const http = require('./utils/http');
const logger = require('./logger')(__filename);
const knex = require('./knex');
const redis = require('./redis');
const fetchUpdates = require('./updates');
const { fetchScenes, fetchMovies } = require('./deep');
const { storeScenes, storeMovies, associateMovieScenes } = require('./store-releases');
@ -212,6 +213,8 @@ async function init() {
await http.destroyBrowserSessions();
knex.destroy();
redis.disconnect();
done = true;
}

View File

@ -36,7 +36,7 @@ function scrapeAll(scenes, site) {
});
}
function scrapeScene({ html, qu }, url) {
function scrapeScene({ html, qu }, url, channel) {
const release = { url };
release.entryId = qu.q('.stdimage', 'id', true).match(/set-target-(\d+)/)[1];
@ -58,7 +58,7 @@ function scrapeScene({ html, qu }, url) {
const poster = qu.img('a img');
release.poster = getFallbacks(poster);
release.photos = qu.imgs('.featured-video img', 'src0_1x').map((source) => getFallbacks(source));
release.caps = qu.imgs('.featured-video img', 'src0_1x', { origin: channel.url }).map((source) => getFallbacks(source));
return release;
}