Using new HTTP module with a dynamic rate limiter.

This commit is contained in:
DebaucheryLibrarian
2020-11-22 04:07:09 +01:00
parent 5d0fe44130
commit b9b777c621
27 changed files with 358 additions and 175 deletions

View File

@@ -420,15 +420,18 @@ async function storeFile(media) {
} catch (error) {
logger.warn(`Failed to store ${media.src}: ${error.message}`);
await fsPromises.unlink(media.file.path);
return null;
}
}
async function fetchHttpSource(source, tempFileTarget, hashStream) {
const res = await http.get(source.src, {
...(source.referer && { referer: source.referer }),
...(source.host && { host: source.host }),
}, {
headers: {
...(source.referer && { referer: source.referer }),
...(source.host && { host: source.host }),
},
stream: true, // sources are fetched in parallel, don't gobble up memory
transforms: [hashStream],
destination: tempFileTarget,
@@ -642,7 +645,7 @@ async function storeMedias(baseMedias) {
);
}
const newMediaWithEntries = savedMedias.map((media, index) => curateMediaEntry(media, index));
const newMediaWithEntries = savedMedias.filter(Boolean).map((media, index) => curateMediaEntry(media, index));
const newMediaEntries = newMediaWithEntries.filter(media => media.newEntry).map(media => media.entry);
await bulkInsert('media', newMediaEntries);