Refactored post fetching. Cleaner code that will now fetch archived posts even if the user no longer exists on reddit. Expanded gfycat URL detection.

This commit is contained in:
DebaucheryLibrarian 2024-09-11 05:16:55 +02:00
parent e0ca4a0822
commit c2cf26dde6
7 changed files with 49 additions and 42 deletions

View File

@ -46,4 +46,3 @@ Promise.resolve().then(() => {
}).catch(error => {
return console.error(error);
});

View File

@ -31,5 +31,6 @@ module.exports = yargs.command('npm start -- --user <username>').option('users',
type: 'array'
}).option('archives', {
describe: 'Search archives for deleted posts',
type: 'boolean'
type: 'boolean',
default: config.fetch.archives.search
}).argv;

View File

@ -11,6 +11,7 @@ function curateUser(user) {
gold: user.is_gold,
verified: user.verified,
verifiedEmail: user.has_verified_email,
fallback: false
};
if(user.subreddit) {

View File

@ -37,7 +37,7 @@ const hosts = [{
}, {
method: 'gfycat',
label: 'gfycat',
pattern: new urlPattern('http(s)\\://(:server.)gfycat.com/:id(.:ext)')
pattern: new urlPattern('http(s)\\://(:server.)gfycat.com/(gifs/detail/):id(.:ext)')
}, {
method: 'eroshareAlbum',
label: 'eroshare',

View File

@ -38,7 +38,7 @@ function interpolate(pattern, user, post, item) {
Object.assign(vars, {
$postId: post.id,
$postTitle: (post.title || '').slice(0, config.library.titleLength),
$postUser: post.user || user.user,
$postUser: post.user,
$postDate: dateFns.format(post.datetime, dateFormat),
$postIndex: post.index + config.library.indexOffset,
$subreddit: post.subreddit,

View File

@ -9,7 +9,7 @@ const textToStream = require('./textToStream.js');
const save = require('./save.js');
function saveProfileDetails(user) {
if(config.library.profile.image) {
if(config.library.profile.image && !user.fallback) {
const image = user.profile ? user.profile.image : user.image;
if(config.library.profile.avoidAvatar && new urlPattern('http(s)\\://(www.)redditstatic.com/avatars/:id(.:ext)(?:query)').match(image)) {
@ -20,13 +20,13 @@ function saveProfileDetails(user) {
url: image
});
fetchItem(image).then(stream => save(filepath, stream)).catch(error => {
fetchItem(image, 0, {permalink: `https://reddit.com/user/${user.name}`}).then(stream => save(filepath, stream)).catch(error => {
console.log('\x1b[33m%s\x1b[0m', `Could not save profile image for '${user.name}': ${error}`);
});
}
}
if(config.library.profile.description) {
if(config.library.profile.description && !user.fallback) {
if(user.profile && user.profile.description) {
const filepath = interpolate(config.library.profile.description, user);
const stream = textToStream(user.profile.description);

View File

@ -6,42 +6,48 @@ const getArchivePostIds = require('../archives/getArchivePostIds.js');
const curateUser = require('../curate/user.js');
const saveProfileDetails = require('../save/profileDetails.js');
function getUserPostsWrap(reddit, args) {
return function getUserPosts(usernames) {
return usernames.reduce((chain, username) => {
return chain.then(accPosts => {
return reddit.getUser(username).fetch().then(curateUser).then(saveProfileDetails).then(user => ({
user,
accPosts
}));
}).then(({user, accPosts}) => {
return reddit.getUser(username).getSubmissions({
sort: args.sort,
limit: Infinity
}).then(posts => ({
user,
accPosts: accPosts.concat(posts)
}));
}).then(({user, accPosts}) => {
if(args.archives || config.fetch.archives.search) {
return getArchivePostIds(username, accPosts.map(post => post.id)).then(postIds => {
return Promise.all(postIds.map(postId => {
return reddit.getSubmission(postId).fetch();
}));
}).then(archivedPosts => {
return {
user,
accPosts: accPosts.concat(archivedPosts)
};
});
}
const getUser = (username, reddit) => {
return reddit.getUser(username).fetch().then(user => curateUser(user)).catch(error => {
console.log('\x1b[31m%s\x1b[0m', `Failed to fetch reddit user '${username}': ${error.message} (https://reddit.com/user/${username})`);
return {user, accPosts};
}).then(({user, accPosts}) => {
return accPosts.map(post => Object.assign(post, {user}));
});
}, Promise.resolve([]));
};
return {
name: username,
fallback: true
};
});
};
const getPosts = (username, reddit, args) => {
return reddit.getUser(username).getSubmissions({
sort: args.sort,
limit: Infinity
}).catch(error => {
console.log('\x1b[31m%s\x1b[0m', `Failed to fetch posts from reddit user '${username}': ${error.message} (https://reddit.com/user/${username})`);
return [];
});
};
const getUserPostsWrap = (reddit, args) => usernames => Promise.all(
usernames.map(async username => {
const [user, posts] = await Promise.all([
getUser(username, reddit),
getPosts(username, reddit, args)
]);
if(user) {
saveProfileDetails(user);
}
if(args.archives) {
const postIds = await getArchivePostIds(username, posts.map(post => post.id));
const archivedPosts = await Promise.all(postIds.map(postId => reddit.getSubmission(postId).fetch()));
posts.push(...archivedPosts);
}
return posts.map(post => Object.assign(post, {user}));
})
).then(posts => posts.flatten());
module.exports = getUserPostsWrap;