Fixed extension getting cut off at filename limit. Cleaned up save module.

This commit is contained in:
DebaucheryLibrarian 2024-09-11 05:16:56 +02:00
parent 99c7d143f7
commit dcf7fdd274
1 changed files with 41 additions and 31 deletions

View File

@ -3,38 +3,37 @@
const config = require('config'); const config = require('config');
const fs = require('fs-extra'); const fs = require('fs-extra');
const path = require('path'); const path = require('path');
const ffmpeg = require('fluent-ffmpeg');
function save(requestedFilepath, streams, item, post) { function limitPathElement(element, limit) {
const filepath = requestedFilepath.split('/').map(component => { return element.split('/').map((component) => {
console.log(component); if (config.library.truncate && component.length > limit) {
return component.slice(0, limit - config.library.truncate.truncator.length) + config.library.truncate.truncator;
if(config.library.truncate && component.length > config.library.truncate.limit) {
return component.slice(0, config.library.truncate.limit - config.library.truncate.truncator.length) + config.library.truncate.truncator;
} }
return component; return component;
}).join(path.sep); }).join(path.sep);
}
console.log(filepath); function getPathElements(requestedFilepath) {
const originalPathElements = path.parse(requestedFilepath);
const pathComponents = path.parse(filepath); return {
root: originalPathElements.root,
dir: limitPathElement(originalPathElements.dir, config.library.truncate.limit),
name: limitPathElement(originalPathElements.name, config.library.truncate.limit - originalPathElements.ext.length),
ext: originalPathElements.ext,
};
}
// allow for single stream argument function pipeStreamToFile(target, stream, item) {
streams = [].concat(streams);
return Promise.resolve().then(() => {
return fs.ensureDir(pathComponents.dir);
}).then(() => {
return Promise.all(streams.map((stream, index) => {
const target = streams.length > 1 ? path.join(pathComponents.dir, `${pathComponents.name}-${index}${pathComponents.ext}`) : filepath;
const file = fs.createWriteStream(target); const file = fs.createWriteStream(target);
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
stream.pipe(file).on('error', error => { stream.pipe(file);
reject(error);
}).on('finish', () => { stream.on('error', reject);
if(item && item.mux) { stream.on('finish', () => {
if (item && item.mux) {
console.log(`Temporarily saved '${target}', queued for muxing`); console.log(`Temporarily saved '${target}', queued for muxing`);
} else { } else {
console.log('\x1b[32m%s\x1b[0m', `Saved '${target}'`); console.log('\x1b[32m%s\x1b[0m', `Saved '${target}'`);
@ -43,8 +42,19 @@ function save(requestedFilepath, streams, item, post) {
resolve(target); resolve(target);
}); });
}); });
}
async function save(requestedFilepath, streamOrStreams, item) {
const pathElements = getPathElements(requestedFilepath);
const streams = [].concat(streamOrStreams); // allow for single stream argument
await fs.ensureDir(pathElements.dir);
return Promise.all(streams.map((stream, index) => {
const target = path.join(pathElements.root, pathElements.dir, `${pathElements.name}${streams.length > 1 ? `-${index}` : ''}${pathElements.ext}`);
return pipeStreamToFile(target, stream, item);
})); }));
}); }
};
module.exports = save; module.exports = save;