forked from DebaucheryLibrarian/traxxx
Added configurable proxy to HTTP module (also used by qu). Added network and site URL to search documents.
This commit is contained in:
parent
e4b269956e
commit
4b310e9dfa
|
@ -139,6 +139,19 @@ module.exports = {
|
|||
'freeones',
|
||||
'freeonesLegacy',
|
||||
],
|
||||
proxy: {
|
||||
enable: false,
|
||||
host: '',
|
||||
port: 8888,
|
||||
hostnames: [
|
||||
'www.vixen.com',
|
||||
'www.blacked.com',
|
||||
'www.blackedraw.com',
|
||||
'www.tushy.com',
|
||||
'www.tushyraw.com',
|
||||
'www.deeper.com',
|
||||
],
|
||||
},
|
||||
fetchAfter: [1, 'week'],
|
||||
nullDateLimit: 3,
|
||||
media: {
|
||||
|
|
|
@ -11050,6 +11050,11 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"tunnel": {
|
||||
"version": "0.0.6",
|
||||
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
|
||||
"integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="
|
||||
},
|
||||
"tunnel-agent": {
|
||||
"version": "0.6.0",
|
||||
"resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
|
||||
|
|
|
@ -110,6 +110,7 @@
|
|||
"template-format": "^1.2.5",
|
||||
"tough-cookie": "^3.0.1",
|
||||
"tty-table": "^2.8.12",
|
||||
"tunnel": "0.0.6",
|
||||
"url-pattern": "^1.0.3",
|
||||
"v-tooltip": "^2.0.3",
|
||||
"vue": "^2.6.11",
|
||||
|
|
Binary file not shown.
After Width: | Height: | Size: 12 KiB |
|
@ -8,6 +8,7 @@ const knex = require('./knex');
|
|||
const fetchUpdates = require('./updates');
|
||||
const fetchDeep = require('./deep');
|
||||
const { storeReleases } = require('./store-releases');
|
||||
const { updateReleasesSearch } = require('./releases');
|
||||
// const { storeReleaseActors } = require('./actors');
|
||||
|
||||
async function init() {
|
||||
|
@ -16,6 +17,12 @@ async function init() {
|
|||
return;
|
||||
}
|
||||
|
||||
if (argv.updateSearch) {
|
||||
await updateReleasesSearch();
|
||||
knex.destroy();
|
||||
return;
|
||||
}
|
||||
|
||||
const updateBaseReleases = (argv.scrape || argv.sites || argv.networks) && await fetchUpdates();
|
||||
|
||||
const updateDeepReleases = updateBaseReleases && await fetchDeep(updateBaseReleases);
|
||||
|
|
|
@ -368,8 +368,10 @@ async function updateReleasesSearch(releaseIds) {
|
|||
releases.title || ' ' ||
|
||||
networks.name || ' ' ||
|
||||
networks.slug || ' ' ||
|
||||
networks.url || ' ' ||
|
||||
sites.name || ' ' ||
|
||||
sites.slug || ' ' ||
|
||||
COALESCE(sites.url, '') || ' ' ||
|
||||
COALESCE(sites.alias, '') || ' ' ||
|
||||
COALESCE(releases.shoot_id, '') || ' ' ||
|
||||
COALESCE(TO_CHAR(releases.date, 'YYYY YY MM FMMM FMmonth mon DD FMDD'), '') || ' ' ||
|
||||
|
@ -386,7 +388,7 @@ async function updateReleasesSearch(releaseIds) {
|
|||
LEFT JOIN tags ON local_tags.tag_id = tags.id
|
||||
LEFT JOIN tags as tags_aliases ON local_tags.tag_id = tags_aliases.alias_for
|
||||
${releaseIds ? 'WHERE releases.id = ANY(?)' : ''}
|
||||
GROUP BY releases.id, sites.name, sites.slug, sites.alias, networks.name, networks.slug;
|
||||
GROUP BY releases.id, sites.name, sites.slug, sites.alias, sites.url, networks.name, networks.slug, networks.url;
|
||||
`, releaseIds && [releaseIds]);
|
||||
|
||||
if (documents.rows?.length > 0) {
|
||||
|
|
|
@ -1,10 +1,28 @@
|
|||
'use strict';
|
||||
|
||||
const config = require('config');
|
||||
const tunnel = require('tunnel');
|
||||
const bhttp = require('bhttp');
|
||||
const taskQueue = require('promise-task-queue');
|
||||
|
||||
const logger = require('../logger')(__filename);
|
||||
|
||||
const proxyAgent = tunnel.httpsOverHttp({
|
||||
proxy: {
|
||||
host: config.proxy.host,
|
||||
port: config.proxy.port,
|
||||
},
|
||||
});
|
||||
|
||||
function useProxy(url) {
|
||||
if (!config.proxy.enable) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const { hostname } = new URL(url);
|
||||
return config.proxy.hostnames.includes(hostname);
|
||||
}
|
||||
|
||||
const queue = taskQueue();
|
||||
|
||||
queue.on('concurrencyReached:httpGet', () => {
|
||||
|
@ -15,47 +33,42 @@ queue.on('concurrencyReached:httpPost', () => {
|
|||
logger.silly('Queueing POST requests');
|
||||
});
|
||||
|
||||
queue.define('httpGet', async ({
|
||||
url,
|
||||
timeout = 30000,
|
||||
options = {},
|
||||
}) => {
|
||||
logger.silly(`GET ${url}`);
|
||||
|
||||
const res = await bhttp.get(url, {
|
||||
responseTimeout: timeout,
|
||||
...options,
|
||||
});
|
||||
|
||||
res.code = res.statusCode;
|
||||
|
||||
return res;
|
||||
}, {
|
||||
concurrency: 20,
|
||||
});
|
||||
|
||||
queue.define('httpPost', async ({
|
||||
queue.define('http', async ({
|
||||
url,
|
||||
method = 'GET',
|
||||
body,
|
||||
timeout = 30000,
|
||||
options = {},
|
||||
}) => {
|
||||
logger.silly(`POST ${url} with ${body}`);
|
||||
if (body) {
|
||||
logger.silly(`${method.toUpperCase()} ${url} with ${body}`);
|
||||
} else {
|
||||
logger.silly(`${method.toUpperCase()} ${url}`);
|
||||
}
|
||||
|
||||
const res = await bhttp.post(url, body, {
|
||||
const reqOptions = {
|
||||
responseTimeout: timeout,
|
||||
...options,
|
||||
});
|
||||
};
|
||||
|
||||
res.code = res.statusCode;
|
||||
if (useProxy(url)) {
|
||||
reqOptions.agent = proxyAgent;
|
||||
}
|
||||
|
||||
return res;
|
||||
const res = ['POST', 'PUT', 'PATCH'].includes(method.toUpperCase())
|
||||
? await bhttp[method.toLowerCase()](url, body, reqOptions)
|
||||
: await bhttp[method.toLowerCase()](url, reqOptions);
|
||||
|
||||
return {
|
||||
...res,
|
||||
code: res.statusCode,
|
||||
};
|
||||
}, {
|
||||
concurrency: 20,
|
||||
});
|
||||
|
||||
async function get(url, options) {
|
||||
return queue.push('httpGet', {
|
||||
return queue.push('http', {
|
||||
method: 'get',
|
||||
url,
|
||||
options,
|
||||
|
@ -63,7 +76,7 @@ async function get(url, options) {
|
|||
}
|
||||
|
||||
async function post(url, body, options) {
|
||||
return queue.push('httpPost', {
|
||||
return queue.push('http', {
|
||||
url,
|
||||
body,
|
||||
options,
|
||||
|
|
|
@ -287,7 +287,7 @@ function extractAll(htmlValue, selector) {
|
|||
return initAll(window.document, selector, window);
|
||||
}
|
||||
|
||||
async function get(urlValue, selector, headers, queryAll = false) {
|
||||
async function get(urlValue, selector, headers, options, queryAll = false) {
|
||||
const res = await http.get(urlValue, {
|
||||
headers,
|
||||
});
|
||||
|
@ -315,8 +315,8 @@ async function get(urlValue, selector, headers, queryAll = false) {
|
|||
};
|
||||
}
|
||||
|
||||
async function getAll(urlValue, selector, headers) {
|
||||
return get(urlValue, selector, headers, true);
|
||||
async function getAll(urlValue, selector, headers, options) {
|
||||
return get(urlValue, selector, headers, options, true);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
|
Loading…
Reference in New Issue