Compare commits

...

2 Commits

Author SHA1 Message Date
ThePendulum f684923a8a Actor tiles maintain aspect ratio. 2020-04-27 02:37:30 +02:00
ThePendulum a223f933ce Added 2 minute timeout to media fetch. 2020-04-26 04:21:57 +02:00
5 changed files with 91 additions and 60 deletions

View File

@ -136,7 +136,7 @@ export default {
.tiles {
display: grid;
grid-template-columns: repeat(auto-fit, 10rem);
grid-template-columns: repeat(auto-fill, minmax(10rem, 1fr));
grid-gap: 0 .5rem;
padding: 1rem;
flex-grow: 1;
@ -209,7 +209,7 @@ export default {
@media(max-width: $breakpoint) {
.tiles {
grid-template-columns: repeat(auto-fit, minmax(8rem, 1fr));
grid-template-columns: repeat(auto-fill, minmax(8rem, 1fr));
}
.genders {

View File

@ -400,13 +400,14 @@ export default {
}
.actors {
display: flex;
display: grid;
grid-template-columns: repeat(auto-fill, minmax(10rem, 1fr));
grid-gap: 1rem;
flex-grow: 1;
flex-wrap: wrap;
}
.actor {
width: 10rem;
margin: 0 1rem .5rem 0;
}
@ -477,5 +478,9 @@ export default {
width: 15rem;
max-width: 100%;
}
.actors {
grid-template-columns: repeat(auto-fill, minmax(8rem, 1fr));
}
}
</style>

View File

@ -22,8 +22,10 @@
<span
v-else
v-tooltip.top="actor.name"
class="handle name"
>{{ actor.name }}</span>
class="handle"
>
<span class="name">{{ actor.name }}</span>
</span>
<div class="avatar-container">
<img
@ -103,12 +105,28 @@ export default {
.actor {
width: 100%;
display: inline-block;
position: relative;
margin: 0 .5rem .5rem 0;
box-shadow: 0 0 3px var(--darken-weak);
background: var(--profile);
&::before {
content: '';
display: inline-block;
width: 1px;
height: 0;
padding-bottom: 150%;
}
}
.link {
width: 100%;
height: 100%;
display: flex;
flex-direction: column;
position: absolute;
top: 0;
left: 0;
color: var(--text-light);
text-decoration: none;
@ -144,15 +162,18 @@ export default {
}
.avatar-container {
display: flex;
flex-grow: 1;
position: relative;
}
.avatar {
color: var(--darken-weak);
background: var(--darken-hint);
height: 13rem;
width: 100%;
height: 100%;
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
object-fit: cover;

View File

@ -334,7 +334,7 @@ async function fetchSource(source, baseMedia) {
}
const { pathname } = new URL(source.src);
const mimetype = mime.getType(pathname);
const mimetype = res.headers['content-type'] || mime.getType(pathname);
const extension = mime.getExtension(mimetype);
const type = mimetype?.split('/')[0] || 'image';
@ -342,7 +342,9 @@ async function fetchSource(source, baseMedia) {
hasher.setEncoding('hex');
const hashStream = new PassThrough();
const metaStream = type === 'image' ? sharp() : new PassThrough();
const metaStream = type === 'image'
? sharp()
: new PassThrough();
const tempFilePath = path.join(config.media.path, 'temp', `${baseMedia.id}.${extension}`);
const tempThumbPath = path.join(config.media.path, 'temp', `${baseMedia.id}_thumb.${extension}`);
@ -353,6 +355,8 @@ async function fetchSource(source, baseMedia) {
hashStream.on('data', chunk => hasher.write(chunk));
if (type === 'image') {
// generate thumbnail
/*
metaStream
.clone()
.resize({
@ -362,34 +366,21 @@ async function fetchSource(source, baseMedia) {
.jpeg({ quality: config.media.thumbnailQuality })
.pipe(tempThumbTarget)
.on('error', error => logger.error(error));
*/
}
// pipeline destroys streams
// pipeline destroys streams, so attach info event first
const infoPromise = type === 'image' ? once(metaStream, 'info') : Promise.resolve([{}]);
const metaPromise = type === 'image' ? metaStream.stats() : Promise.resolve();
await pipeline(
res.originalRes,
metaStream,
// metaStream,
hashStream,
tempFileTarget,
);
/*
res.originalRes
.pipe(metaStream)
.pipe(hashStream)
.pipe(tempFileTarget);
*/
logger.silly(`Temporarily saved media from ${source.src}`);
const [stats, info] = await Promise.all([
metaPromise,
infoPromise,
]);
logger.silly(`Ended pipeline for ${source.src}`);
const [stats, info] = await Promise.all([metaPromise, infoPromise]);
hasher.end();
@ -398,7 +389,7 @@ async function fetchSource(source, baseMedia) {
peakMemoryUsage = Math.max(getMemoryUsage(), peakMemoryUsage);
logger.silly(`Retrieved metadata from ${source.src}`);
logger.silly(`Fetched media from ${source.src}, memory usage ${peakMemoryUsage.toFixed(2)} MB`);
return {
...source,
@ -422,14 +413,21 @@ async function fetchSource(source, baseMedia) {
if (attempts < 3) {
await Promise.delay(1000);
return attempt(attempts + 1);
return Promise.race([
attempt(attempts + 1),
Promise.delay(120 * 1000).then(() => { throw new Error(`Media fetch attempt ${attempts}/3 timed out, aborting ${source.src}`); }),
]);
}
throw new Error(`Failed to fetch ${source.src}: ${error.message}`);
}
}
return attempt(1);
return Promise.race([
attempt(1),
Promise.delay(120 * 1000).then(() => { throw new Error(`Media fetch timed out, aborting ${source.src}`); }),
]);
}
async function trySource(baseSource, existingMedias, baseMedia) {
@ -437,14 +435,14 @@ async function trySource(baseSource, existingMedias, baseMedia) {
const extractedSource = await extractSource(baseSource, existingMedias);
const existingSourceMedia = existingMedias.existingSourceMediaByUrl[extractedSource.src];
if (extractedSource.entry) {
if (!argv.force && extractedSource.entry) {
logger.silly(`Media page URL already in database, not extracting ${baseSource.url}`);
// media entry found during extraction, don't fetch
return extractedSource;
}
if (existingSourceMedia) {
if (!argv.force && existingSourceMedia) {
logger.silly(`Media source URL already in database, skipping ${baseSource.src}`);
// media entry found by source URL, don't fetch
@ -461,7 +459,13 @@ async function fetchMedia(baseMedia, existingMedias) {
try {
const source = await baseMedia.sources.reduce(
// try each source until success
(result, baseSource, baseSourceIndex) => result.catch(async () => trySource(baseSource, existingMedias, baseMedia, baseSourceIndex)),
(result, baseSource, baseSourceIndex) => result.catch(async (error) => {
if (error.message) {
logger.warn(error.message);
}
return trySource(baseSource, existingMedias, baseMedia, baseSourceIndex);
}),
Promise.reject(new Error()),
);
@ -529,7 +533,6 @@ async function storeMedias(baseMedias) {
const savedMedias = await Promise.map(
baseMedias,
async baseMedia => fetchMedia(baseMedia, { existingSourceMediaByUrl, existingExtractMediaByUrl }),
{ concurrency: 10 },
);
const [uniqueHashMedias, existingHashMedias] = await findHashDuplicates(savedMedias);
@ -599,7 +602,9 @@ async function associateReleaseMedia(releases) {
}, [])
.filter(Boolean);
await knex.raw(`${knex(`releases_${role}`).insert(associations)} ON CONFLICT DO NOTHING`);
if (associations.length > 0) {
await knex.raw(`${knex(`releases_${role}`).insert(associations)} ON CONFLICT DO NOTHING`);
}
}, Promise.resolve());
logger.debug(`Peak media fetching memory usage: ${peakMemoryUsage.toFixed(2)} MB`);

View File

@ -42,6 +42,32 @@ function getAvatarFallbacks(avatar) {
.flat();
}
async function getTrailer(scene, site, url) {
const qualities = [360, 480, 720, 1080, 2160];
const tokenRes = await post(`${site.url}/api/__record_tknreq`, {
file: scene.previewVideoUrl1080P,
sizes: qualities.join('+'),
type: 'trailer',
}, { referer: url });
if (!tokenRes.ok) {
return null;
}
const trailerUrl = `${site.url}/api${tokenRes.body.data.url}`;
const trailersRes = await post(trailerUrl, null, { referer: url });
if (trailersRes.ok) {
return qualities.map(quality => (trailersRes.body[quality] ? {
src: trailersRes.body[quality].token,
quality,
} : null)).filter(Boolean);
}
return null;
}
function scrapeAll(scenes, site, origin) {
return scenes.map((scene) => {
const release = {};
@ -90,32 +116,6 @@ function scrapeUpcoming(scene, site) {
return [release];
}
async function getTrailer(scene, site, url) {
const qualities = [360, 480, 720, 1080, 2160];
const tokenRes = await post(`${site.url}/api/__record_tknreq`, {
file: scene.previewVideoUrl1080P,
sizes: qualities.join('+'),
type: 'trailer',
}, { referer: url });
if (!tokenRes.ok) {
return null;
}
const trailerUrl = `${site.url}/api${tokenRes.body.data.url}`;
const trailersRes = await post(trailerUrl, null, { referer: url });
if (trailersRes.ok) {
return qualities.map(quality => (trailersRes.body[quality] ? {
src: trailersRes.body[quality].token,
quality,
} : null)).filter(Boolean);
}
return null;
}
async function scrapeScene(data, url, site, baseRelease) {
const scene = data.video;