Added watch-mode. Waiting for profile detail write to finalize before new watch cycle and capture details in index file.
This commit is contained in:
parent
f9f9c8a0bd
commit
e7eef86b23
|
@ -45,6 +45,10 @@ module.exports = {
|
||||||
limit: 1000,
|
limit: 1000,
|
||||||
avoidDuplicates: true,
|
avoidDuplicates: true,
|
||||||
retries: 3,
|
retries: 3,
|
||||||
|
watch: {
|
||||||
|
interval: 30,
|
||||||
|
ignoreErrors: true,
|
||||||
|
},
|
||||||
archives: {
|
archives: {
|
||||||
search: false,
|
search: false,
|
||||||
preview: true,
|
preview: true,
|
||||||
|
|
14
src/app.js
14
src/app.js
|
@ -39,6 +39,12 @@ function fetchSavePosts(userPosts, ep) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function initApp() {
|
async function initApp() {
|
||||||
|
function watch() {
|
||||||
|
console.log(`Watch-mode enabled, checking for new posts ${config.fetch.watch.interval} minutes from now.`);
|
||||||
|
|
||||||
|
setTimeout(initApp, Math.ceil(config.fetch.watch.interval) * 1000 * 60);
|
||||||
|
}
|
||||||
|
|
||||||
const usersProvided = args.users && args.users.length;
|
const usersProvided = args.users && args.users.length;
|
||||||
const postIdsProvided = args.posts && args.posts.length;
|
const postIdsProvided = args.posts && args.posts.length;
|
||||||
|
|
||||||
|
@ -53,8 +59,16 @@ async function initApp() {
|
||||||
await ep.open();
|
await ep.open();
|
||||||
await fetchSavePosts(userPosts, ep);
|
await fetchSavePosts(userPosts, ep);
|
||||||
await ep.close();
|
await ep.close();
|
||||||
|
|
||||||
|
if (args.watch) {
|
||||||
|
watch();
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(error);
|
console.error(error);
|
||||||
|
|
||||||
|
if (args.watch && config.fetch.watch.ignoreErrors) {
|
||||||
|
watch();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -57,6 +57,10 @@ function getArgs() {
|
||||||
describe: 'Ignore index file and force a redownload of everything in the selection. Does not affect [before|after]-indexed',
|
describe: 'Ignore index file and force a redownload of everything in the selection. Does not affect [before|after]-indexed',
|
||||||
type: 'boolean',
|
type: 'boolean',
|
||||||
})
|
})
|
||||||
|
.option('watch', {
|
||||||
|
describe: 'Keep the process running and periodically check for new posts',
|
||||||
|
type: 'boolean',
|
||||||
|
})
|
||||||
.option('archives', {
|
.option('archives', {
|
||||||
describe: 'Search archives for deleted posts',
|
describe: 'Search archives for deleted posts',
|
||||||
type: 'boolean',
|
type: 'boolean',
|
||||||
|
|
|
@ -48,8 +48,7 @@ function getFilepath(item, post, user) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function fetchSaveContent(user, ep) {
|
async function fetchSaveContent(user, ep) {
|
||||||
// async, nothing depends on its success so don't await
|
const profilePaths = await saveProfileDetails(user);
|
||||||
saveProfileDetails(user);
|
|
||||||
|
|
||||||
const posts = await Promise.map(user.posts, async (post) => {
|
const posts = await Promise.map(user.posts, async (post) => {
|
||||||
await Promise.reduce(post.content.items, async (accItems, originalItem, index) => {
|
await Promise.reduce(post.content.items, async (accItems, originalItem, index) => {
|
||||||
|
@ -76,7 +75,7 @@ async function fetchSaveContent(user, ep) {
|
||||||
return post;
|
return post;
|
||||||
});
|
});
|
||||||
|
|
||||||
return writeToIndex(posts, user);
|
return writeToIndex(posts, profilePaths, user);
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = fetchSaveContent;
|
module.exports = fetchSaveContent;
|
||||||
|
|
|
@ -1,45 +1,71 @@
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const config = require('config');
|
const config = require('config');
|
||||||
const urlPattern = require('url-pattern');
|
const Promise = require('bluebird');
|
||||||
|
const UrlPattern = require('url-pattern');
|
||||||
|
|
||||||
const interpolate = require('../interpolate.js');
|
const interpolate = require('../interpolate.js');
|
||||||
const fetchItem = require('../fetch/item.js');
|
const fetchItem = require('../fetch/item.js');
|
||||||
const textToStream = require('./textToStream.js');
|
const textToStream = require('./textToStream.js');
|
||||||
const save = require('./save.js');
|
const save = require('./save.js');
|
||||||
|
|
||||||
function saveProfileDetails(user) {
|
async function saveProfileImage(user) {
|
||||||
if(config.library.profile.image && !user.fallback && !user.deleted) {
|
if (config.library.profile.image && !user.fallback && !user.deleted) {
|
||||||
const image = user.profile ? user.profile.image : user.image;
|
const image = user.profile ? user.profile.image : user.image;
|
||||||
|
|
||||||
if(config.library.profile.avoidAvatar && new urlPattern('http(s)\\://(www.)redditstatic.com/avatars/:id(.:ext)(?:query)').match(image)) {
|
if (config.library.profile.avoidAvatar && new UrlPattern('http(s)\\://(www.)redditstatic.com/avatars/:id(.:ext)(?:query)').match(image)) {
|
||||||
console.log('\x1b[33m%s\x1b[0m', `Ignoring standard avatar profile image for '${user.name}'`);
|
console.log('\x1b[33m%s\x1b[0m', `Ignoring standard avatar profile image for '${user.name}'`);
|
||||||
} else {
|
|
||||||
const filepath = interpolate(config.library.profile.image, user, null, {
|
|
||||||
// pass profile image as item to interpolate extension variable
|
|
||||||
url: image
|
|
||||||
});
|
|
||||||
|
|
||||||
fetchItem(image, 0, {permalink: `https://reddit.com/user/${user.name}`}).then(stream => save(filepath, stream)).catch(error => {
|
return null;
|
||||||
console.log('\x1b[33m%s\x1b[0m', `Could not save profile image for '${user.name}': ${error}`);
|
}
|
||||||
});
|
|
||||||
|
const filepath = interpolate(config.library.profile.image, user, null, {
|
||||||
|
// pass profile image as item to interpolate extension variable
|
||||||
|
url: image,
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
const stream = await fetchItem(image, 0, { permalink: `https://reddit.com/user/${user.name}` })
|
||||||
|
const targets = await save(filepath, stream);
|
||||||
|
|
||||||
|
return targets[0];
|
||||||
|
} catch (error) {
|
||||||
|
console.log('\x1b[33m%s\x1b[0m', `Could not save profile image for '${user.name}': ${error}`);
|
||||||
|
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if(config.library.profile.description && !user.fallback && !user.deleted) {
|
async function saveProfileDescription(user) {
|
||||||
if(user.profile && user.profile.description) {
|
if (config.library.profile.description && !user.fallback && !user.deleted) {
|
||||||
|
if (user.profile && user.profile.description) {
|
||||||
const filepath = interpolate(config.library.profile.description, user);
|
const filepath = interpolate(config.library.profile.description, user);
|
||||||
const stream = textToStream(user.profile.description);
|
const stream = textToStream(user.profile.description);
|
||||||
|
|
||||||
save(filepath, stream).catch(error => {
|
console.log(filepath);
|
||||||
console.log('\x1b[33m%s\x1b[0m', `Could not save profile description for '${user.name}': ${error}`);
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
console.log('\x1b[33m%s\x1b[0m', `No profile description for '${user.name}'`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return user;
|
try {
|
||||||
};
|
const targets = await save(filepath, stream);
|
||||||
|
|
||||||
|
return targets[0];
|
||||||
|
} catch (error) {
|
||||||
|
console.log('\x1b[33m%s\x1b[0m', `Could not save profile description for '${user.name}': ${error}`);
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('\x1b[33m%s\x1b[0m', `No profile description for '${user.name}'`);
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function saveProfileDetails(user) {
|
||||||
|
const [image, description] = await Promise.all([saveProfileImage(user), saveProfileDescription(user)]);
|
||||||
|
|
||||||
|
return { image, description };
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = saveProfileDetails;
|
module.exports = saveProfileDetails;
|
||||||
|
|
|
@ -32,7 +32,7 @@ function pipeStreamToFile(target, stream, item) {
|
||||||
stream.pipe(file);
|
stream.pipe(file);
|
||||||
|
|
||||||
stream.on('error', reject);
|
stream.on('error', reject);
|
||||||
stream.on('finish', () => {
|
stream.on('end', () => {
|
||||||
if (item && item.mux) {
|
if (item && item.mux) {
|
||||||
console.log(`Temporarily saved '${target}', queued for muxing`);
|
console.log(`Temporarily saved '${target}', queued for muxing`);
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -6,10 +6,11 @@ const yaml = require('js-yaml');
|
||||||
|
|
||||||
const interpolate = require('../interpolate.js');
|
const interpolate = require('../interpolate.js');
|
||||||
|
|
||||||
async function writeToIndex(posts, user) {
|
async function writeToIndex(posts, profilePaths, user) {
|
||||||
const filename = interpolate(config.library.index.file, user, null, false);
|
const filename = interpolate(config.library.index.file, user, null, false);
|
||||||
const now = new Date();
|
const now = new Date();
|
||||||
|
|
||||||
|
/*
|
||||||
// Individual posts are wrapped in [] to get a YAML array value for each individual item, allowing them to be joined manually with a newline
|
// Individual posts are wrapped in [] to get a YAML array value for each individual item, allowing them to be joined manually with a newline
|
||||||
// between each entry to improve human readability of the index while maintaining a valid YAML list
|
// between each entry to improve human readability of the index while maintaining a valid YAML list
|
||||||
const originalEntries = user.indexed.original.map(entry => yaml.safeDump([entry]));
|
const originalEntries = user.indexed.original.map(entry => yaml.safeDump([entry]));
|
||||||
|
@ -25,8 +26,28 @@ async function writeToIndex(posts, user) {
|
||||||
}]));
|
}]));
|
||||||
|
|
||||||
const entries = newAndUpdatedEntries.concat(originalEntries).join('\n');
|
const entries = newAndUpdatedEntries.concat(originalEntries).join('\n');
|
||||||
|
*/
|
||||||
|
|
||||||
return fs.writeFile(filename, entries);
|
const newAndUpdatedEntries = posts.concat(user.indexed.updated).map(post => ({
|
||||||
|
id: post.id,
|
||||||
|
subreddit: post.subreddit,
|
||||||
|
permalink: post.permalink,
|
||||||
|
url: post.url,
|
||||||
|
hostId: post.host.id,
|
||||||
|
date: post.datetime,
|
||||||
|
indexed: now,
|
||||||
|
title: post.title,
|
||||||
|
}));
|
||||||
|
|
||||||
|
const data = {
|
||||||
|
profile: {
|
||||||
|
image: profilePaths.image,
|
||||||
|
description: profilePaths.description,
|
||||||
|
},
|
||||||
|
posts: newAndUpdatedEntries.concat(user.indexed.original),
|
||||||
|
};
|
||||||
|
|
||||||
|
return fs.writeFile(filename, yaml.safeDump(data));
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = writeToIndex;
|
module.exports = writeToIndex;
|
||||||
|
|
|
@ -16,7 +16,7 @@ async function getIndexedPosts(user) {
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log('\x1b[33m%s\x1b[0m', `Could not load index file for '${user.name}' at '${indexFilePath}': ${error}`);
|
console.log('\x1b[33m%s\x1b[0m', `Could not load index file for '${user.name}' at '${indexFilePath}': ${error}`);
|
||||||
|
|
||||||
return [];
|
return { profile: { image: null, description: null }, posts: [] };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -54,7 +54,7 @@ function getUserPostsWrap(reddit, args) {
|
||||||
getPosts(username, reddit, args),
|
getPosts(username, reddit, args),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const indexed = await getIndexedPosts(user);
|
const { posts: indexed } = await getIndexedPosts(user);
|
||||||
|
|
||||||
if (args.archives) {
|
if (args.archives) {
|
||||||
posts.push(...await getArchivedPosts(username, posts, reddit));
|
posts.push(...await getArchivedPosts(username, posts, reddit));
|
||||||
|
|
Loading…
Reference in New Issue