Completed Erome module. Added content URL list fetching.
This commit is contained in:
parent
aecf79fa25
commit
443f0fc028
|
@ -15,6 +15,8 @@ Most features are optional and can easily be disabled!
|
||||||
* Reddit text/self, images and videos[\*](#reddit-videos)
|
* Reddit text/self, images and videos[\*](#reddit-videos)
|
||||||
* Imgur (requires API key as of late 2019)
|
* Imgur (requires API key as of late 2019)
|
||||||
* Gfycat
|
* Gfycat
|
||||||
|
* Erome
|
||||||
|
* Vidble
|
||||||
* Eroshare archive
|
* Eroshare archive
|
||||||
|
|
||||||
### Plans and ideas
|
### Plans and ideas
|
||||||
|
|
46
src/app.js
46
src/app.js
|
@ -70,33 +70,27 @@ async function getCompletePosts() {
|
||||||
return attachContentInfo(curatedUserPosts);
|
return attachContentInfo(curatedUserPosts);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getDirectContent(link, ep) {
|
async function getDirectContent(links, ep) {
|
||||||
const host = dissectLink(link);
|
return Promise.map(links, async (link) => {
|
||||||
const info = await getInfo(host);
|
const host = dissectLink(link);
|
||||||
|
const info = await getInfo(host);
|
||||||
|
|
||||||
console.log(info);
|
return fetchSaveDirectContent(info, host, ep);
|
||||||
|
}, {
|
||||||
|
concurrency: 5,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
return fetchSaveDirectContent(info, host, ep);
|
async function getCompleteContents(ep) {
|
||||||
|
if (args.fetch) {
|
||||||
|
return getDirectContent([args.fetch], ep);
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
if (args.fileDirect) {
|
||||||
console.log(info);
|
return getDirectContent(await getFileContents(args.fileDirect, 'direct'), ep);
|
||||||
|
}
|
||||||
|
|
||||||
await Promise.all(info.items.map(async (item, index) => {
|
return null;
|
||||||
const stream = await fetchItem(item.url);
|
|
||||||
|
|
||||||
if (info.album) {
|
|
||||||
const filepath = `./output/${host.label}/${host.id} - ${info.album.title}/${index + 1} - ${item.id}${path.extname(url.parse(item.url).pathname)}`;
|
|
||||||
console.log(filepath);
|
|
||||||
|
|
||||||
return saveItem(filepath, stream, item);
|
|
||||||
}
|
|
||||||
|
|
||||||
const filepath = `./output/${host.label}/${item.id}${path.extname(url.parse(item.url).pathname)}`;
|
|
||||||
console.log(filepath);
|
|
||||||
|
|
||||||
return saveItem(filepath, stream, item);
|
|
||||||
}));
|
|
||||||
*/
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function fetchSavePosts(userPosts, ep) {
|
function fetchSavePosts(userPosts, ep) {
|
||||||
|
@ -107,15 +101,15 @@ function fetchSavePosts(userPosts, ep) {
|
||||||
async function initApp() {
|
async function initApp() {
|
||||||
try {
|
try {
|
||||||
const ep = new exiftool.ExiftoolProcess(exiftoolBin);
|
const ep = new exiftool.ExiftoolProcess(exiftoolBin);
|
||||||
|
await ep.open();
|
||||||
|
|
||||||
if (args.fetch) {
|
if (args.fetch || args.fileDirect) {
|
||||||
await getDirectContent(args.fetch, ep);
|
await getCompleteContents(ep);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const userPosts = await getCompletePosts();
|
const userPosts = await getCompletePosts();
|
||||||
|
|
||||||
await ep.open();
|
|
||||||
await fetchSavePosts(userPosts, ep);
|
await fetchSavePosts(userPosts, ep);
|
||||||
await ep.close();
|
await ep.close();
|
||||||
|
|
||||||
|
|
|
@ -29,6 +29,11 @@ function getArgs() {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
alias: 'fetch',
|
alias: 'fetch',
|
||||||
})
|
})
|
||||||
|
.option('file-direct', {
|
||||||
|
describe: 'Load direct content URLs from file',
|
||||||
|
type: 'string',
|
||||||
|
alias: 'file-fetch',
|
||||||
|
})
|
||||||
.option('limit', {
|
.option('limit', {
|
||||||
describe: 'Maximum amount of posts to fetch per supplied user (!), after filtering out ignored, cross- and reposts',
|
describe: 'Maximum amount of posts to fetch per supplied user (!), after filtering out ignored, cross- and reposts',
|
||||||
type: 'number',
|
type: 'number',
|
||||||
|
|
|
@ -42,6 +42,10 @@ const hosts = [{
|
||||||
method: 'gfycat',
|
method: 'gfycat',
|
||||||
label: 'gfycat',
|
label: 'gfycat',
|
||||||
pattern: new UrlPattern('http(s)\\://(:server.)gfycat.com/(gifs/detail/)(:id-mobile)(:id-size_restricted)(:id)(.:ext)(?*)'),
|
pattern: new UrlPattern('http(s)\\://(:server.)gfycat.com/(gifs/detail/)(:id-mobile)(:id-size_restricted)(:id)(.:ext)(?*)'),
|
||||||
|
}, {
|
||||||
|
method: 'erome',
|
||||||
|
label: 'erome',
|
||||||
|
pattern: new UrlPattern('http(s)\\://(www.)erome.com/a/:id(?*)'),
|
||||||
}, {
|
}, {
|
||||||
method: 'eroshareAlbum',
|
method: 'eroshareAlbum',
|
||||||
label: 'eroshare',
|
label: 'eroshare',
|
||||||
|
|
|
@ -1,30 +1,76 @@
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
|
const config = require('config');
|
||||||
const fetch = require('node-fetch');
|
const fetch = require('node-fetch');
|
||||||
const cheerio = require('cheerio');
|
const cheerio = require('cheerio');
|
||||||
|
|
||||||
const base = 'https://www.erome.com/';
|
const base = 'https://www.erome.com/';
|
||||||
|
|
||||||
async function erome(host) {
|
async function erome(host, post) {
|
||||||
const res = await fetch(`${base}a/${host.id}`);
|
const url = `${base}a/${host.id}`;
|
||||||
|
const res = await fetch(url);
|
||||||
|
|
||||||
if (res.ok) {
|
if (!res.ok) {
|
||||||
throw new Error(`Unable to retrieve info for Erome album '${host.id}' :(`);
|
throw new Error(`Unable to retrieve info for Erome album '${host.id}' :(`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const html = await res.text();
|
const html = await res.text();
|
||||||
|
|
||||||
const $ = cheerio.load(html);
|
const $ = cheerio.load(html);
|
||||||
const videoUrls = $('video').toArray().map((videoEl) => {
|
|
||||||
const sourceHd = $(videoEl).find('source[label="HD"]');
|
|
||||||
const sourceSd = $(videoEl).find('source[label="SD"]');
|
|
||||||
|
|
||||||
console.log(sourceHd.attr('src'));
|
const title = $('meta[property="og:title"]').attr('content') || $('meta[property="twitter:title"]').attr('content');
|
||||||
|
|
||||||
return sourceHd ? base + sourceHd.attr('src') : base + sourceSd.attr('src');
|
const items = $('.media-group').toArray().map((mediaItem) => {
|
||||||
|
const mediaElement = $(mediaItem);
|
||||||
|
const videoElement = mediaElement.find('.video video');
|
||||||
|
|
||||||
|
const id = mediaElement.attr('id');
|
||||||
|
const itemTitle = mediaElement.find('h2.media-title').text();
|
||||||
|
|
||||||
|
if (videoElement.length) {
|
||||||
|
const sourceHd = videoElement.find('source[label="HD"]');
|
||||||
|
const sourceSd = videoElement.find('source[label="SD"]');
|
||||||
|
|
||||||
|
return {
|
||||||
|
id,
|
||||||
|
title: itemTitle,
|
||||||
|
url: sourceHd.length ? sourceHd.attr('src') : sourceSd.attr('src'),
|
||||||
|
type: (sourceHd.length ? sourceHd.attr('type') : sourceSd.attr('type')) || 'video/mp4',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const img = mediaElement.find('.img-front').attr('data-src');
|
||||||
|
|
||||||
|
return {
|
||||||
|
id,
|
||||||
|
title: itemTitle,
|
||||||
|
url: img,
|
||||||
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log(videoUrls);
|
const extract = config.library.extractSingleAlbumItem && (items.length === 1);
|
||||||
|
|
||||||
|
if (extract) {
|
||||||
|
console.log('\x1b[36m%s\x1b[0m', `Extracting single item from album '${url}' (${post ? post.url : 'no post'})`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
album: extract ? null : {
|
||||||
|
id: host.id,
|
||||||
|
url,
|
||||||
|
title,
|
||||||
|
description: null,
|
||||||
|
datetime: null,
|
||||||
|
},
|
||||||
|
items: items.map(item => ({
|
||||||
|
extracted: extract,
|
||||||
|
id: item.id,
|
||||||
|
url: item.url,
|
||||||
|
title: item.title || title || null,
|
||||||
|
description: null,
|
||||||
|
type: item.type || 'image/jpeg',
|
||||||
|
datetime: null,
|
||||||
|
})),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = erome;
|
module.exports = erome;
|
||||||
|
|
Loading…
Reference in New Issue