105 lines
3.0 KiB
JavaScript
105 lines
3.0 KiB
JavaScript
'use strict';
|
|
|
|
const config = require('config');
|
|
const Snoowrap = require('snoowrap');
|
|
const fs = require('fs-extra');
|
|
const Promise = require('bluebird');
|
|
const exiftool = require('node-exiftool');
|
|
const exiftoolBin = require('dist-exiftool');
|
|
const cron = require('node-cron');
|
|
const { format } = require('date-fns');
|
|
|
|
require('array.prototype.flatten').shim();
|
|
|
|
const reddit = new Snoowrap(config.reddit.api);
|
|
const args = require('./cli.js')();
|
|
|
|
const curatePosts = require('./curate/posts.js');
|
|
|
|
const attachContentInfo = require('./fetch/info.js');
|
|
const fetchSaveContent = require('./fetch/content.js');
|
|
|
|
const getPosts = require('./sources/getPosts.js')(reddit, args);
|
|
const getUserPosts = require('./sources/getUserPosts.js')(reddit, args);
|
|
|
|
async function getFileContents(location, label) {
|
|
try {
|
|
const fileContents = await fs.readFile(location, 'utf8');
|
|
|
|
return fileContents.split('\n').filter(entry => entry);
|
|
} catch (error) {
|
|
console.log('\x1b[31m%s\x1b[0m', `Could not read ${label} file '${location}': ${error}.`);
|
|
|
|
return [];
|
|
}
|
|
}
|
|
|
|
async function getCompleteUserPosts() {
|
|
let userPosts = {};
|
|
let ignoreIds = [];
|
|
let usernames = args.users || [];
|
|
let postIds = args.posts || [];
|
|
|
|
if (args.fileUsers) {
|
|
usernames = usernames.concat(await getFileContents(args.fileUsers, 'username'));
|
|
}
|
|
|
|
if (args.filePosts) {
|
|
postIds = postIds.concat(await getFileContents(args.filePosts, 'post ID'));
|
|
}
|
|
|
|
if (!usernames.length && !postIds.length) {
|
|
throw new Error('Could not retrieve any posts. Did you supply --users, --posts, --file-users or --file-posts?');
|
|
}
|
|
|
|
if (usernames.length) {
|
|
userPosts = await getUserPosts(usernames);
|
|
}
|
|
|
|
if (postIds.length) {
|
|
userPosts = await getPosts(postIds, userPosts);
|
|
}
|
|
|
|
if (args.fileIgnore) {
|
|
ignoreIds = await getFileContents(args.fileIgnore, 'ignore');
|
|
}
|
|
|
|
const curatedUserPosts = curatePosts(userPosts, ignoreIds, args);
|
|
|
|
return attachContentInfo(curatedUserPosts);
|
|
}
|
|
|
|
function fetchSavePosts(userPosts, ep) {
|
|
// don't map to apply concurrency limit and reduce network stress
|
|
return Promise.reduce(Object.values(userPosts), (acc, user) => fetchSaveContent(user, ep, args), null);
|
|
}
|
|
|
|
async function initApp() {
|
|
try {
|
|
const userPosts = await getCompleteUserPosts();
|
|
const ep = new exiftool.ExiftoolProcess(exiftoolBin);
|
|
|
|
await ep.open();
|
|
await fetchSavePosts(userPosts, ep);
|
|
await ep.close();
|
|
|
|
if (args.watch) {
|
|
console.log(`[${format(new Date(), 'YYYY-MM-DD HH:mm:ss')}] Watch-mode enabled, checking again for new posts according to crontab '${config.fetch.watch.schedule}'.`);
|
|
}
|
|
} catch (error) {
|
|
if (args.debug) {
|
|
console.log('\x1b[31m%s\x1b[0m', error.stack);
|
|
} else {
|
|
console.log('\x1b[31m%s\x1b[0m', error.message);
|
|
}
|
|
}
|
|
|
|
return true;
|
|
}
|
|
|
|
initApp();
|
|
|
|
if (args.watch) {
|
|
cron.schedule(config.fetch.watch.schedule, initApp);
|
|
}
|