Allow usernames and post IDs to be read from file.

This commit is contained in:
ThePendulum 2018-07-02 00:25:48 +02:00
parent 5b023adf30
commit 7335db1593
4 changed files with 43 additions and 13 deletions

2
.gitignore vendored
View File

@ -3,3 +3,5 @@ config/*.js
!config/default.js !config/default.js
output/ output/
dist/ dist/
users
posts

View File

@ -2,6 +2,7 @@
const config = require('config'); const config = require('config');
const Snoowrap = require('snoowrap'); const Snoowrap = require('snoowrap');
const fs = require('fs-extra');
const exiftool = require('node-exiftool'); const exiftool = require('node-exiftool');
const exiftoolBin = require('dist-exiftool'); const exiftoolBin = require('dist-exiftool');
const cron = require('node-cron'); const cron = require('node-cron');
@ -20,15 +21,41 @@ const fetchSaveContent = require('./fetch/content.js');
const getPosts = require('./sources/getPosts.js')(reddit, args); const getPosts = require('./sources/getPosts.js')(reddit, args);
const getUserPosts = require('./sources/getUserPosts.js')(reddit, args); const getUserPosts = require('./sources/getUserPosts.js')(reddit, args);
async function getFileContents(location, label) {
try {
const fileContents = await fs.readFile(location, 'utf8');
return fileContents.split('\n').filter(entry => entry);
} catch (error) {
console.log('\x1b[31m%s\x1b[0m', `Could not read ${label} file '${location}': ${error}.`);
return [];
}
}
async function getCompleteUserPosts() { async function getCompleteUserPosts() {
let userPosts = {}; let userPosts = {};
let usernames = args.users || [];
let postIds = args.posts || [];
if (args.users) { if (args.fileUsers) {
userPosts = await getUserPosts(args.users); usernames = usernames.concat(await getFileContents(args.fileUsers, 'username'));
} }
if (args.posts) { if (args.filePosts) {
userPosts = await getPosts(args.posts, userPosts); postIds = postIds.concat(await getFileContents(args.filePosts, 'post ID'));
}
if (!usernames.length && !postIds.length) {
throw new Error('Could not retrieve any posts. Did you supply --users, --posts, --file-users or --file-posts?');
}
if (usernames.length) {
userPosts = await getUserPosts(usernames);
}
if (postIds.length) {
userPosts = await getPosts(postIds, userPosts);
} }
const curatedUserPosts = curatePosts(userPosts, args); const curatedUserPosts = curatePosts(userPosts, args);
@ -41,13 +68,6 @@ function fetchSavePosts(userPosts, ep) {
} }
async function initApp() { async function initApp() {
const usersProvided = args.users && args.users.length;
const postIdsProvided = args.posts && args.posts.length;
if (!usersProvided && !postIdsProvided) {
return console.log('\x1b[31m%s\x1b[0m', 'Please supply at least one user or post ID. See --help for more details.');
}
try { try {
const userPosts = await getCompleteUserPosts(); const userPosts = await getCompleteUserPosts();
const ep = new exiftool.ExiftoolProcess(exiftoolBin); const ep = new exiftool.ExiftoolProcess(exiftoolBin);
@ -60,7 +80,7 @@ async function initApp() {
console.log(`[${format(new Date(), 'YYYY-MM-DD HH:mm:ss')}] Watch-mode enabled, checking again for new posts according to crontab '${config.fetch.watch.schedule}'.`); console.log(`[${format(new Date(), 'YYYY-MM-DD HH:mm:ss')}] Watch-mode enabled, checking again for new posts according to crontab '${config.fetch.watch.schedule}'.`);
} }
} catch (error) { } catch (error) {
console.error(error); console.log('\x1b[31m%s\x1b[0m', error.message);
} }
return true; return true;

View File

@ -11,11 +11,19 @@ function getArgs() {
describe: 'Reddit usernames to fetch posts from', describe: 'Reddit usernames to fetch posts from',
type: 'array', type: 'array',
}) })
.option('file-users', {
describe: 'Load reddit usernames from file',
type: 'string',
})
.option('posts', { .option('posts', {
alias: 'post', alias: 'post',
describe: 'Reddit post IDs to fetch', describe: 'Reddit post IDs to fetch',
type: 'array', type: 'array',
}) })
.option('file-posts', {
describe: 'Load reddit post IDs from file',
type: 'string',
})
.option('limit', { .option('limit', {
describe: 'Maximum amount of posts to fetch per supplied user (!), after filtering out ignored, cross- and reposts', describe: 'Maximum amount of posts to fetch per supplied user (!), after filtering out ignored, cross- and reposts',
type: 'number', type: 'number',

View File

@ -38,7 +38,7 @@ const getPostsWrap = reddit => function getPosts(postIds, userPosts = {}) {
} }
const user = await getUser(post.author.name, reddit); const user = await getUser(post.author.name, reddit);
const { profile, posts: indexed } = await getIndexedPosts(user); const { profile, posts: indexed } = await getIndex(user);
return { ...accUserPosts, [post.author.name]: { ...user, posts: [post], indexed: { profile, original: indexed, updated: [] } } }; return { ...accUserPosts, [post.author.name]: { ...user, posts: [post], indexed: { profile, original: indexed, updated: [] } } };
}), userPosts); }), userPosts);