Fixed pagination for Killergram, added pagination to actor profiles, added Killergram Platinum. Added experimental m3u8 stream support.
This commit is contained in:
80
src/media.js
80
src/media.js
@@ -1,14 +1,16 @@
|
||||
'use strict';
|
||||
|
||||
const config = require('config');
|
||||
const util = require('util');
|
||||
const Promise = require('bluebird');
|
||||
const fs = require('fs');
|
||||
const fsPromises = require('fs').promises;
|
||||
const path = require('path');
|
||||
const { PassThrough } = require('stream');
|
||||
const stream = require('stream');
|
||||
const nanoid = require('nanoid/non-secure');
|
||||
const mime = require('mime');
|
||||
// const fileType = require('file-type');
|
||||
const youtubeDl = require('youtube-dl');
|
||||
const sharp = require('sharp');
|
||||
const blake2 = require('blake2');
|
||||
|
||||
@@ -18,6 +20,8 @@ const knex = require('./knex');
|
||||
const http = require('./utils/http');
|
||||
const { get } = require('./utils/qu');
|
||||
|
||||
const pipeline = util.promisify(stream.pipeline);
|
||||
|
||||
function sampleMedias(medias, limit = config.media.limit, preferLast = true) {
|
||||
// limit media sets, use extras as fallbacks
|
||||
if (medias.length <= limit) {
|
||||
@@ -77,7 +81,7 @@ function itemsByKey(items, key) {
|
||||
}
|
||||
|
||||
function toBaseSource(rawSource) {
|
||||
if (rawSource && (rawSource.src || (rawSource.extract && rawSource.url))) {
|
||||
if (rawSource && (rawSource.src || (rawSource.extract && rawSource.url) || rawSource.stream)) {
|
||||
const baseSource = {};
|
||||
|
||||
if (rawSource.src) baseSource.src = rawSource.src;
|
||||
@@ -87,6 +91,11 @@ function toBaseSource(rawSource) {
|
||||
if (rawSource.url) baseSource.url = rawSource.url;
|
||||
if (rawSource.extract) baseSource.extract = rawSource.extract;
|
||||
|
||||
if (rawSource.stream) {
|
||||
baseSource.src = rawSource.stream;
|
||||
baseSource.stream = rawSource.stream;
|
||||
}
|
||||
|
||||
if (rawSource.referer) baseSource.referer = rawSource.referer;
|
||||
if (rawSource.host) baseSource.host = rawSource.host;
|
||||
if (rawSource.attempts) baseSource.attempts = rawSource.attempts;
|
||||
@@ -368,19 +377,57 @@ async function storeFile(media) {
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchHttpSource(source, tempFileTarget, hashStream) {
|
||||
const res = await http.get(source.src, {
|
||||
...(source.referer && { referer: source.referer }),
|
||||
...(source.host && { host: source.host }),
|
||||
}, {
|
||||
stream: true, // sources are fetched in parallel, don't gobble up memory
|
||||
transforms: [hashStream],
|
||||
destination: tempFileTarget,
|
||||
queueMethod: source.queueMethod || null, // use http module's default
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Response ${res.status} not OK`);
|
||||
}
|
||||
|
||||
return {
|
||||
mimetype: res.headers['content-type'] || mime.getType(new URL(source.src).pathname),
|
||||
};
|
||||
}
|
||||
|
||||
async function fetchStreamSource(source, tempFileTarget, hashStream) {
|
||||
const video = youtubeDl(source.stream);
|
||||
|
||||
video.on('info', (info) => {
|
||||
console.log(info);
|
||||
logger.verbose(`Starting fetching stream from ${source.stream}`);
|
||||
});
|
||||
|
||||
video.on('end', (info) => {
|
||||
console.log(info);
|
||||
logger.verbose(`Finished fetching stream from ${source.stream}`);
|
||||
});
|
||||
|
||||
await pipeline(video, hashStream, tempFileTarget);
|
||||
|
||||
return { mimetype: null };
|
||||
}
|
||||
|
||||
async function fetchSource(source, baseMedia) {
|
||||
logger.silly(`Fetching media from ${source.src}`);
|
||||
// attempts
|
||||
|
||||
async function attempt(attempts = 1) {
|
||||
const hasher = new blake2.Hash('blake2b', { digestLength: 24 });
|
||||
hasher.setEncoding('hex');
|
||||
|
||||
try {
|
||||
const tempFilePath = path.join(config.media.path, 'temp', `${baseMedia.id}`);
|
||||
|
||||
const hasher = new blake2.Hash('blake2b', { digestLength: 24 });
|
||||
hasher.setEncoding('hex');
|
||||
|
||||
const tempFileTarget = fs.createWriteStream(tempFilePath);
|
||||
const hashStream = new PassThrough();
|
||||
const hashStream = new stream.PassThrough();
|
||||
let size = 0;
|
||||
|
||||
hashStream.on('data', (chunk) => {
|
||||
@@ -388,28 +435,18 @@ async function fetchSource(source, baseMedia) {
|
||||
hasher.write(chunk);
|
||||
});
|
||||
|
||||
const res = await http.get(source.src, {
|
||||
...(source.referer && { referer: source.referer }),
|
||||
...(source.host && { host: source.host }),
|
||||
}, {
|
||||
stream: true, // sources are fetched in parallel, don't gobble up memory
|
||||
transforms: [hashStream],
|
||||
destination: tempFileTarget,
|
||||
queueMethod: source.queueMethod || null, // use http module's default
|
||||
});
|
||||
console.log(source);
|
||||
|
||||
const { mimetype } = source.stream
|
||||
? await fetchStreamSource(source, tempFileTarget, hashStream)
|
||||
: await fetchHttpSource(source, tempFileTarget, hashStream);
|
||||
|
||||
hasher.end();
|
||||
|
||||
const hash = hasher.read();
|
||||
const { pathname } = new URL(source.src);
|
||||
const mimetype = res.headers['content-type'] || mime.getType(pathname);
|
||||
const [type, subtype] = mimetype.split('/');
|
||||
const extension = mime.getExtension(mimetype);
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Response ${res.status} not OK`);
|
||||
}
|
||||
|
||||
return {
|
||||
...source,
|
||||
file: {
|
||||
@@ -425,6 +462,7 @@ async function fetchSource(source, baseMedia) {
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
hasher.end();
|
||||
const maxAttempts = source.attempts || 3;
|
||||
|
||||
logger.warn(`Failed attempt ${attempts}/${maxAttempts} to fetch ${source.src}: ${error.message}`);
|
||||
|
||||
Reference in New Issue
Block a user