Fixed Vixen scraper, using new token URL for trailers.
This commit is contained in:
118
src/utils/buffer.js
Normal file
118
src/utils/buffer.js
Normal file
@@ -0,0 +1,118 @@
|
||||
'use strict';
|
||||
|
||||
const bhttp = require('bhttp');
|
||||
const Promise = require('bluebird');
|
||||
const fsPromises = require('fs').promises;
|
||||
const fs = require('fs');
|
||||
const { PassThrough } = require('stream');
|
||||
const blake2 = require('blake2');
|
||||
const argv = require('yargs').argv;
|
||||
|
||||
const file = 'https://speed.hetzner.de/100MB.bin';
|
||||
// const file = 'https://speed.hetzner.de/1GB.bin';
|
||||
// const file = 'https://speed.hetzner.de/10GB.bin';
|
||||
|
||||
function getMemoryUsage() {
|
||||
return process.memoryUsage().rss / (10 ** 6);
|
||||
}
|
||||
|
||||
const stats = {
|
||||
peakMemoryUsage: getMemoryUsage(),
|
||||
done: false,
|
||||
downloads: {},
|
||||
};
|
||||
|
||||
function render() {
|
||||
const downloads = Object.entries(stats.downloads);
|
||||
|
||||
process.stdout.clearScreenDown();
|
||||
|
||||
process.stdout.write(`peak memory: ${stats.peakMemoryUsage.toFixed(2)} MB\n`);
|
||||
|
||||
downloads.forEach(([download, progress]) => {
|
||||
process.stdout.write(`${download}: ${progress}${typeof progress === 'string' ? '' : '%'}\n`);
|
||||
});
|
||||
|
||||
process.stdout.moveCursor(0, -(downloads.length + 1));
|
||||
process.stdout.cursorTo(0);
|
||||
|
||||
if (downloads.length === 0 || !downloads.every(([_label, download]) => typeof download === 'string')) {
|
||||
setTimeout(() => render(), 1000);
|
||||
return;
|
||||
}
|
||||
|
||||
process.stdout.moveCursor(0, downloads.length + 1);
|
||||
}
|
||||
|
||||
function setProgress(label, completedBytes, totalBytes, hash) {
|
||||
const memory = getMemoryUsage();
|
||||
|
||||
stats.peakMemoryUsage = Math.max(memory, stats.peakMemoryUsage);
|
||||
stats.downloads[label] = hash || Math.round((completedBytes / totalBytes) * 100);
|
||||
}
|
||||
|
||||
async function buffered(label) {
|
||||
const hash = new blake2.Hash('blake2b');
|
||||
|
||||
const imageRes = await bhttp.get(file, {
|
||||
onDownloadProgress(completedBytes, totalBytes) {
|
||||
setProgress(label, completedBytes, totalBytes);
|
||||
},
|
||||
});
|
||||
|
||||
hash.update(imageRes.body);
|
||||
setProgress(label, null, null, hash.digest('hex'));
|
||||
|
||||
await fsPromises.writeFile(`/mnt/stor/Pictures/traxxx/temp/buffered-${label}.bin`, imageRes.body);
|
||||
}
|
||||
|
||||
async function streamed(label) {
|
||||
const hash = new blake2.Hash('blake2b');
|
||||
hash.setEncoding('hex');
|
||||
|
||||
const hashStream = new PassThrough();
|
||||
const targetStream = fs.createWriteStream(`/mnt/stor/Pictures/traxxx/temp/streamed-${label}.bin`);
|
||||
|
||||
const imageRes = await bhttp.get(file, {
|
||||
stream: true,
|
||||
});
|
||||
|
||||
const stream = imageRes
|
||||
.pipe(hashStream)
|
||||
.pipe(targetStream);
|
||||
|
||||
imageRes.on('progress', (completedBytes, totalBytes) => {
|
||||
setProgress(label, completedBytes, totalBytes);
|
||||
});
|
||||
|
||||
hashStream.on('data', (chunk) => {
|
||||
hash.write(chunk);
|
||||
});
|
||||
|
||||
stream.on('finish', () => {
|
||||
hash.end();
|
||||
setProgress(label, null, null, hash.read());
|
||||
});
|
||||
}
|
||||
|
||||
async function init() {
|
||||
const n = argv.n || 1;
|
||||
|
||||
if (argv._.includes('stream')) {
|
||||
console.log('using streams');
|
||||
render();
|
||||
|
||||
await Promise.map(Array.from({ length: n }), async (value, index) => streamed(index + 1));
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (argv._.includes('buffer')) {
|
||||
console.log('using buffers');
|
||||
render();
|
||||
|
||||
await Promise.map(Array.from({ length: n }), async (value, index) => buffered(index + 1));
|
||||
}
|
||||
}
|
||||
|
||||
init();
|
||||
Reference in New Issue
Block a user