forked from DebaucheryLibrarian/traxxx
Merge branch 'master' into experimental
This commit is contained in:
156
src/media.js
156
src/media.js
@@ -14,6 +14,7 @@ const ffmpeg = require('fluent-ffmpeg');
|
||||
const sharp = require('sharp');
|
||||
const blake2 = require('blake2');
|
||||
const taskQueue = require('promise-task-queue');
|
||||
const AWS = require('aws-sdk');
|
||||
|
||||
const logger = require('./logger')(__filename);
|
||||
const argv = require('./argv');
|
||||
@@ -25,6 +26,17 @@ const { get } = require('./utils/qu');
|
||||
const pipeline = util.promisify(stream.pipeline);
|
||||
const streamQueue = taskQueue();
|
||||
|
||||
const endpoint = new AWS.Endpoint('s3.eu-central-1.wasabisys.com');
|
||||
|
||||
const s3 = new AWS.S3({
|
||||
// region: 'eu-central-1',
|
||||
endpoint,
|
||||
credentials: {
|
||||
accessKeyId: config.s3.accessKey,
|
||||
secretAccessKey: config.s3.secretKey,
|
||||
},
|
||||
});
|
||||
|
||||
function sampleMedias(medias, limit = argv.mediaLimit, preferLast = true) {
|
||||
// limit media sets, use extras as fallbacks
|
||||
if (medias.length <= limit) {
|
||||
@@ -303,14 +315,68 @@ async function extractSource(baseSource, { existingExtractMediaByUrl }) {
|
||||
throw new Error(`Could not extract source from ${baseSource.url}: ${res.status}`);
|
||||
}
|
||||
|
||||
async function storeS3Object(filepath, media) {
|
||||
const fullFilepath = path.join(config.media.path, filepath);
|
||||
const file = fs.createReadStream(fullFilepath);
|
||||
|
||||
const status = await s3.upload({
|
||||
Bucket: config.s3.bucket,
|
||||
Body: file,
|
||||
Key: filepath,
|
||||
ContentType: media.meta.mimetype,
|
||||
}).promise();
|
||||
|
||||
await fsPromises.unlink(fullFilepath);
|
||||
|
||||
logger.silly(`Uploaded '${media.id}' from ${media.src} to S3 bucket '${status.Bucket}' at ${status.Location}`);
|
||||
|
||||
return status;
|
||||
}
|
||||
|
||||
async function writeImage(image, media, info, filepath, isProcessed) {
|
||||
if (isProcessed && info.pages) {
|
||||
// convert animated image to WebP and write to permanent location
|
||||
await image
|
||||
.webp()
|
||||
.toFile(path.join(config.media.path, filepath));
|
||||
}
|
||||
|
||||
if (isProcessed) {
|
||||
// convert to JPEG and write to permanent location
|
||||
await image
|
||||
.jpeg()
|
||||
.toFile(path.join(config.media.path, filepath));
|
||||
}
|
||||
}
|
||||
|
||||
async function writeThumbnail(image, thumbpath) {
|
||||
return image
|
||||
.resize({
|
||||
height: config.media.thumbnailSize,
|
||||
withoutEnlargement: true,
|
||||
})
|
||||
.jpeg({ quality: config.media.thumbnailQuality })
|
||||
.toFile(path.join(config.media.path, thumbpath));
|
||||
}
|
||||
|
||||
async function writeLazy(image, lazypath) {
|
||||
return image
|
||||
.resize({
|
||||
height: config.media.lazySize,
|
||||
withoutEnlargement: true,
|
||||
})
|
||||
.jpeg({ quality: config.media.lazyQuality })
|
||||
.toFile(path.join(config.media.path, lazypath));
|
||||
}
|
||||
|
||||
async function storeImageFile(media, hashDir, hashSubDir, filename, filedir, filepath, options) {
|
||||
logger.silly(`Storing permanent media files for ${media.id} from ${media.src} at ${filepath}`);
|
||||
|
||||
try {
|
||||
const thumbdir = path.join(media.role, 'thumbs', hashDir, hashSubDir);
|
||||
const thumbdir = config.s3.enabled ? path.join(media.role, 'thumbs') : path.join(media.role, 'thumbs', hashDir, hashSubDir);
|
||||
const thumbpath = path.join(thumbdir, filename);
|
||||
|
||||
const lazydir = path.join(media.role, 'lazy', hashDir, hashSubDir);
|
||||
const lazydir = config.s3.enabled ? path.join(media.role, 'lazy') : path.join(media.role, 'lazy', hashDir, hashSubDir);
|
||||
const lazypath = path.join(lazydir, filename);
|
||||
|
||||
await Promise.all([
|
||||
@@ -343,46 +409,28 @@ async function storeImageFile(media, hashDir, hashSubDir, filename, filedir, fil
|
||||
});
|
||||
}
|
||||
|
||||
if (isProcessed) {
|
||||
if (info.pages) {
|
||||
// convert animated image to WebP and write to permanent location
|
||||
await image
|
||||
.webp()
|
||||
.toFile(path.join(config.media.path, filepath));
|
||||
} else {
|
||||
// convert to JPEG and write to permanent location
|
||||
await image
|
||||
.jpeg()
|
||||
.toFile(path.join(config.media.path, filepath));
|
||||
}
|
||||
}
|
||||
|
||||
// generate thumbnail and lazy
|
||||
await Promise.all([
|
||||
image
|
||||
.resize({
|
||||
height: config.media.thumbnailSize,
|
||||
withoutEnlargement: true,
|
||||
})
|
||||
.jpeg({ quality: config.media.thumbnailQuality })
|
||||
.toFile(path.join(config.media.path, thumbpath)),
|
||||
image
|
||||
.resize({
|
||||
height: config.media.lazySize,
|
||||
withoutEnlargement: true,
|
||||
})
|
||||
.jpeg({ quality: config.media.lazyQuality })
|
||||
.toFile(path.join(config.media.path, lazypath)),
|
||||
writeImage(image, media, info, filepath, isProcessed),
|
||||
writeThumbnail(image, thumbpath),
|
||||
writeLazy(image, lazypath),
|
||||
]);
|
||||
|
||||
if (isProcessed) {
|
||||
// remove temp file
|
||||
// file already stored, remove temporary file
|
||||
await fsPromises.unlink(media.file.path);
|
||||
} else {
|
||||
// move temp file to permanent location
|
||||
// image not processed, simply move temporary file to final location
|
||||
await fsPromises.rename(media.file.path, path.join(config.media.path, filepath));
|
||||
}
|
||||
|
||||
if (config.s3.enabled) {
|
||||
await Promise.all([
|
||||
storeS3Object(filepath, media),
|
||||
storeS3Object(thumbpath, media),
|
||||
storeS3Object(lazypath, media),
|
||||
]);
|
||||
}
|
||||
|
||||
logger.silly(`Stored thumbnail, lazy and permanent media file for ${media.id} from ${media.src} at ${filepath}`);
|
||||
|
||||
return {
|
||||
@@ -413,13 +461,13 @@ async function storeFile(media, options) {
|
||||
try {
|
||||
const hashDir = media.meta.hash.slice(0, 2);
|
||||
const hashSubDir = media.meta.hash.slice(2, 4);
|
||||
const hashFilename = media.meta.hash.slice(4);
|
||||
const hashFilename = config.s3.enabled ? media.meta.hash : media.meta.hash.slice(4);
|
||||
|
||||
const filename = media.quality
|
||||
? `${hashFilename}_${media.quality}.${media.meta.extension}`
|
||||
: `${hashFilename}.${media.meta.extension}`;
|
||||
|
||||
const filedir = path.join(media.role, hashDir, hashSubDir);
|
||||
const filedir = config.s3.enabled ? media.role : path.join(media.role, hashDir, hashSubDir);
|
||||
const filepath = path.join(filedir, filename);
|
||||
|
||||
if (argv.force) {
|
||||
@@ -447,6 +495,11 @@ async function storeFile(media, options) {
|
||||
// move temp file to permanent location
|
||||
await fsPromises.rename(media.file.path, path.join(config.media.path, filepath));
|
||||
|
||||
if (config.s3.enabled) {
|
||||
// upload the file to S3 storage, will remove original
|
||||
await storeS3Object(filepath, media);
|
||||
}
|
||||
|
||||
logger.silly(`Stored permanent media file for ${media.id} from ${media.src} at ${filepath}`);
|
||||
|
||||
return {
|
||||
@@ -521,7 +574,6 @@ async function fetchSource(source, baseMedia) {
|
||||
|
||||
try {
|
||||
const tempFilePath = path.join(config.media.path, 'temp', `${baseMedia.id}`);
|
||||
|
||||
const tempFileTarget = fs.createWriteStream(tempFilePath);
|
||||
const hashStream = new stream.PassThrough();
|
||||
let size = 0;
|
||||
@@ -648,6 +700,7 @@ function curateMediaEntry(media, index) {
|
||||
path: media.file.path,
|
||||
thumbnail: media.file.thumbnail,
|
||||
lazy: media.file.lazy,
|
||||
is_s3: config.s3.enabled,
|
||||
index,
|
||||
mime: media.meta.mimetype,
|
||||
hash: media.meta.hash,
|
||||
@@ -816,6 +869,29 @@ async function associateAvatars(profiles) {
|
||||
return profilesWithAvatarIds;
|
||||
}
|
||||
|
||||
async function deleteS3Objects(media) {
|
||||
const objects = media
|
||||
.map(item => [
|
||||
{ Key: item.path },
|
||||
{ Key: item.thumbnail },
|
||||
{ Key: item.lazy },
|
||||
])
|
||||
.flat()
|
||||
.filter(item => item.Key);
|
||||
|
||||
const status = await s3.deleteObjects({
|
||||
Bucket: config.s3.bucket,
|
||||
Delete: {
|
||||
Objects: objects,
|
||||
Quiet: false,
|
||||
},
|
||||
}).promise();
|
||||
|
||||
logger.info(`Removed ${status.Deleted.length} media files from S3 bucket '${config.s3.bucket}', ${status.Errors.length} errors`);
|
||||
|
||||
return status;
|
||||
}
|
||||
|
||||
async function flushOrphanedMedia() {
|
||||
const orphanedMedia = await knex('media')
|
||||
.where('is_sfw', false)
|
||||
@@ -843,10 +919,10 @@ async function flushOrphanedMedia() {
|
||||
)
|
||||
.whereRaw('associations.media_id = media.id'),
|
||||
)
|
||||
.returning(['media.path', 'media.thumbnail', 'media.lazy'])
|
||||
.returning(['media.id', 'media.is_s3', 'media.path', 'media.thumbnail', 'media.lazy'])
|
||||
.delete();
|
||||
|
||||
await Promise.all(orphanedMedia.map(media => Promise.all([
|
||||
await Promise.all(orphanedMedia.filter(media => !media.is_s3).map(media => Promise.all([
|
||||
media.path && fsPromises.unlink(path.join(config.media.path, media.path)).catch(() => { /* probably file not found */ }),
|
||||
media.thumbnail && fsPromises.unlink(path.join(config.media.path, media.thumbnail)).catch(() => { /* probably file not found */ }),
|
||||
media.lazy && fsPromises.unlink(path.join(config.media.path, media.lazy)).catch(() => { /* probably file not found */ }),
|
||||
@@ -854,6 +930,10 @@ async function flushOrphanedMedia() {
|
||||
|
||||
logger.info(`Removed ${orphanedMedia.length} media files from database and storage`);
|
||||
|
||||
if (config.s3.enabled) {
|
||||
await deleteS3Objects(orphanedMedia.filter(media => media.is_s3));
|
||||
}
|
||||
|
||||
await fsPromises.rmdir(path.join(config.media.path, 'temp'), { recursive: true });
|
||||
|
||||
logger.info('Cleared temporary media directory');
|
||||
|
||||
@@ -200,8 +200,6 @@ function scrapeAll(html, site, networkUrl, hasTeaser = true) {
|
||||
];
|
||||
}
|
||||
|
||||
console.log(release);
|
||||
|
||||
return release;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -13,7 +13,9 @@ const { cookieToData } = require('../utils/cookies');
|
||||
|
||||
function getThumbs(scene) {
|
||||
if (scene.images.poster) {
|
||||
return scene.images.poster.map(image => image.xl.url);
|
||||
return Object.values(scene.images.poster) // can be { 0: {}, 1: {}, ... } instead of array
|
||||
.filter(img => typeof img === 'object') // remove alternateText property
|
||||
.map(image => image.xl.url);
|
||||
}
|
||||
|
||||
if (scene.images.card_main_rect) {
|
||||
|
||||
40
src/utils/s3.js
Normal file
40
src/utils/s3.js
Normal file
@@ -0,0 +1,40 @@
|
||||
'use strict';
|
||||
|
||||
const config = require('config');
|
||||
const AWS = require('aws-sdk');
|
||||
const fs = require('fs');
|
||||
const nanoid = require('nanoid');
|
||||
|
||||
async function init() {
|
||||
const filepath = './public/img/sfw/animals/j0iiByCxGfA.jpeg';
|
||||
const endpoint = new AWS.Endpoint('s3.wasabisys.com');
|
||||
|
||||
const s3 = new AWS.S3({
|
||||
// region: 'eu-central-1',
|
||||
endpoint,
|
||||
credentials: {
|
||||
accessKeyId: config.s3.accessKey,
|
||||
secretAccessKey: config.s3.secretKey,
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
const data = await s3.listBuckets().promise();
|
||||
const file = fs.createReadStream(filepath);
|
||||
const key = `img/${nanoid()}.jpg`;
|
||||
|
||||
const status = await s3.upload({
|
||||
Bucket: config.s3.bucket,
|
||||
Body: file,
|
||||
Key: key,
|
||||
ContentType: 'image/jpeg',
|
||||
}).promise();
|
||||
|
||||
console.log(data);
|
||||
console.log(status);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
}
|
||||
|
||||
init();
|
||||
@@ -1,14 +1,14 @@
|
||||
'use strict';
|
||||
|
||||
const bhttp = require('bhttp');
|
||||
const http = require('./http');
|
||||
|
||||
const sleep = 5000;
|
||||
const timeout = 1000;
|
||||
|
||||
async function init() {
|
||||
try {
|
||||
const res = await bhttp.get(`https://httpstat.us/200?sleep=${sleep}`, {
|
||||
responseTimeout: timeout,
|
||||
const res = await http.get(`https://httpstat.us/200?sleep=${sleep}`, {
|
||||
timeout,
|
||||
});
|
||||
|
||||
console.log(res.statusCode);
|
||||
@@ -17,15 +17,4 @@ async function init() {
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
/home/pendulum/projectx/node_modules/bhttp/lib/bhttp.js:159
|
||||
err.response = response;
|
||||
^
|
||||
|
||||
TypeError: Cannot assign to read only property 'response' of object '[object Object]'
|
||||
at addErrorData (/home/pendulum/projectx/node_modules/bhttp/lib/bhttp.js:159:16)
|
||||
at Timeout.timeoutHandler [as _onTimeout] (/home/pendulum/projectx/node_modules/bhttp/lib/bhttp.js:525:27)
|
||||
*/
|
||||
|
||||
|
||||
init();
|
||||
|
||||
Reference in New Issue
Block a user