109 Commits

Author SHA1 Message Date
DebaucheryLibrarian
818669011e 1.252.7 2026-04-16 04:01:34 +02:00
DebaucheryLibrarian
f962d71d10 Fixed Aylo session acquire, migrated to unprint. Fixed Jules Jordan profile test. 2026-04-16 04:01:31 +02:00
DebaucheryLibrarian
04278284af 1.252.6 2026-04-16 01:16:02 +02:00
DebaucheryLibrarian
946afcb229 Updated Jules Jordan scraper for new site, changed entryId to always use URLs (upcoming WIP). 2026-04-16 01:15:58 +02:00
DebaucheryLibrarian
f96e938417 1.252.5 2026-04-04 00:19:10 +02:00
DebaucheryLibrarian
56169f00d6 Added feeds db table. Using undici agent for proxy. 2026-04-04 00:19:08 +02:00
DebaucheryLibrarian
dc5e510151 1.252.4 2026-04-03 22:45:42 +02:00
DebaucheryLibrarian
3d1b6a7cd6 Using channel origin instead of url for Teen Core Club scene links. 2026-04-03 22:45:39 +02:00
DebaucheryLibrarian
c2f784cc91 1.252.3 2026-04-03 01:16:54 +02:00
DebaucheryLibrarian
bb168969ae Fixed undici in http module breaking on headers duplicated due casing. 2026-04-03 01:16:52 +02:00
DebaucheryLibrarian
2d1897413e 1.252.2 2026-04-03 00:40:48 +02:00
DebaucheryLibrarian
802fd44260 Enabled gay results in Teen Core Club query. 2026-04-03 00:40:45 +02:00
DebaucheryLibrarian
d0ae7040f6 1.252.1 2026-04-03 00:26:16 +02:00
DebaucheryLibrarian
a492401db0 Storing scene language and production date precision. Refactored Teen Core Club. 2026-04-03 00:26:13 +02:00
DebaucheryLibrarian
ed07a6c249 1.252.0 2026-04-02 04:45:37 +02:00
DebaucheryLibrarian
a96ec64d61 Refactored media fetch with undici for http/2 support. 2026-04-02 04:45:31 +02:00
DebaucheryLibrarian
6cabfc3090 1.251.0 2026-03-31 06:29:33 +02:00
DebaucheryLibrarian
ebb8832096 Added DarkkoTV scraper. Removed some obsolete web components. 2026-03-31 06:29:30 +02:00
DebaucheryLibrarian
64cdba6f6d 1.250.54 2026-03-28 16:22:56 +01:00
DebaucheryLibrarian
a00707338c Added Hentaied Men affiliate link. 2026-03-28 16:22:53 +01:00
DebaucheryLibrarian
568f8aad45 1.250.53 2026-03-27 03:32:29 +01:00
DebaucheryLibrarian
01c4b61f03 No longer Bang @id field for entryId, this is now the full URL for some reason. 2026-03-27 03:32:26 +01:00
DebaucheryLibrarian
f5da798cd2 Fixed expect type associated to wrong object in Bang scraper. 2026-03-27 03:22:51 +01:00
DebaucheryLibrarian
69d410666d Fixed expect instead of expectType property in Bang scraper. 2026-03-27 03:12:07 +01:00
DebaucheryLibrarian
d60cf83f84 1.250.52 2026-03-27 03:11:03 +01:00
DebaucheryLibrarian
ae3a181503 Added octet expectation to Bang photos. 2026-03-27 03:11:00 +01:00
DebaucheryLibrarian
b5e969b7b0 1.250.51 2026-03-27 00:55:55 +01:00
DebaucheryLibrarian
bd92f56729 Added Hentaied Men. 2026-03-27 00:55:52 +01:00
DebaucheryLibrarian
2ef25f0b14 1.250.50 2026-03-22 04:46:43 +01:00
DebaucheryLibrarian
ba2a5c2225 Fixed unmatched censored tags. 2026-03-22 04:46:40 +01:00
DebaucheryLibrarian
ea289c2b49 1.250.49 2026-03-21 00:32:12 +01:00
DebaucheryLibrarian
8600bece93 Updated unprint for better timeout handling. 2026-03-21 00:32:10 +01:00
DebaucheryLibrarian
aabd0a5790 1.250.48 2026-03-20 23:43:02 +01:00
DebaucheryLibrarian
bbacaed5a5 Updated unprint with browser clean-up fix. 2026-03-20 23:42:59 +01:00
DebaucheryLibrarian
35dbe29861 1.250.47 2026-03-20 23:38:10 +01:00
DebaucheryLibrarian
4b4aeaf447 Updated unprint with better browser error handling. 2026-03-20 23:38:08 +01:00
DebaucheryLibrarian
be6ad83a4c 1.250.46 2026-03-20 23:10:18 +01:00
DebaucheryLibrarian
1c6a28247a Removed remote browser config into bypass browser section. 2026-03-20 23:10:01 +01:00
DebaucheryLibrarian
63bda10c7d 1.250.45 2026-03-20 23:01:59 +01:00
DebaucheryLibrarian
f46b56f606 Updated unprint, integrated remote browser. Updated Kink affiliate URLs. 2026-03-20 23:01:57 +01:00
DebaucheryLibrarian
283c44d2fc 1.250.44 2026-03-20 01:59:25 +01:00
DebaucheryLibrarian
a7df43eb55 Added Pascal's Subsluts affiliate. 2026-03-20 01:59:22 +01:00
DebaucheryLibrarian
7e227a4ea5 1.250.43 2026-03-17 23:54:35 +01:00
DebaucheryLibrarian
1c8df6415d Added Jawked to Karups, set up affiliate links. 2026-03-17 23:54:32 +01:00
DebaucheryLibrarian
0beb54312a Updating Manticore scenes after tag reassociation. 2026-03-16 04:58:46 +01:00
DebaucheryLibrarian
95d68fa966 1.250.42 2026-03-16 04:43:04 +01:00
DebaucheryLibrarian
50e7b1a437 Separated spitroast from MFM tag, added Hardwerk DP tags. 2026-03-16 04:43:02 +01:00
DebaucheryLibrarian
6cad394e88 Removed stray console log. 2026-03-16 02:50:31 +01:00
DebaucheryLibrarian
186f9660c3 Increased dangerous query bindings threshold. 2026-03-15 22:30:55 +01:00
DebaucheryLibrarian
ef7c24ab47 1.250.41 2026-03-15 21:33:16 +01:00
DebaucheryLibrarian
1b6aaafe10 Using batch insert module for media, calculating chunk size based on item size. 2026-03-15 21:33:14 +01:00
DebaucheryLibrarian
31aa1118e7 1.250.40 2026-03-15 20:07:53 +01:00
DebaucheryLibrarian
74d03b7483 Using browser for Nubiles, added She's Breeding Material. 2026-03-15 20:07:51 +01:00
DebaucheryLibrarian
40ea7eb80a 1.250.39 2026-03-15 17:42:08 +01:00
DebaucheryLibrarian
0d30115ad5 Refreshing entity slug cache in seeds. Added Hardwerk to Radical. 2026-03-15 17:42:07 +01:00
DebaucheryLibrarian
0560fac1ff 1.250.38 2026-03-13 05:14:45 +01:00
DebaucheryLibrarian
108bf3b168 Integrated manticore stash sync tool. 2026-03-13 05:14:42 +01:00
DebaucheryLibrarian
155e235246 Fixed Aylo specifying wrong host for media. 2026-03-10 05:54:00 +01:00
DebaucheryLibrarian
bff665c6ec 1.250.37 2026-03-10 04:41:36 +01:00
DebaucheryLibrarian
c7111329dc Improved knex error reporting. 2026-03-10 04:41:30 +01:00
DebaucheryLibrarian
d7c1c0ae5c 1.250.36 2026-03-09 05:36:21 +01:00
DebaucheryLibrarian
ea298d7edb Fixed Aylo scraper ignoring session configuration. 2026-03-09 05:36:17 +01:00
DebaucheryLibrarian
99dfcae920 1.250.35 2026-03-08 04:03:57 +01:00
DebaucheryLibrarian
24cba1e1fa Deleting flushed scenes from manticore. 2026-03-08 04:03:55 +01:00
DebaucheryLibrarian
076bdad310 1.250.34 2026-03-06 04:25:55 +01:00
DebaucheryLibrarian
d432d291dd Added See Him Solo to Hussie Pass, added HP affiliates. 2026-03-06 04:25:51 +01:00
DebaucheryLibrarian
220f7e787d 1.250.33 2026-03-05 02:00:53 +01:00
DebaucheryLibrarian
f1caa77e4b Added scene tags table to manticore scenes tool. 2026-03-05 02:00:43 +01:00
DebaucheryLibrarian
ff633436cb 1.250.32 2026-03-04 02:53:21 +01:00
DebaucheryLibrarian
6860072a51 Added database support for actor-specific scene tags. 2026-03-04 02:53:17 +01:00
DebaucheryLibrarian
2c7b4cfc22 1.250.31 2026-03-04 01:57:39 +01:00
DebaucheryLibrarian
7d9e1be8d4 Added Lesbian Factor. 2026-03-04 01:57:33 +01:00
DebaucheryLibrarian
00db4b1b5b 1.250.30 2026-03-03 23:47:50 +01:00
DebaucheryLibrarian
9f1cf1575a Added ASG Max channel parameters. 2026-03-03 23:47:48 +01:00
DebaucheryLibrarian
4f13e4ed28 1.250.29 2026-03-03 23:11:33 +01:00
DebaucheryLibrarian
9805aa7b5b Added Deep Inside to Disruptive Films. Added Sodomy Squad affiliate. 2026-03-03 23:11:32 +01:00
DebaucheryLibrarian
0cc6ebc305 1.250.28 2026-03-03 22:40:52 +01:00
DebaucheryLibrarian
016c24af28 Added channel filter option to Gamma scraper, re-added Disruptive Films channel. 2026-03-03 22:40:49 +01:00
DebaucheryLibrarian
2158550091 1.250.27 2026-03-03 01:21:00 +01:00
DebaucheryLibrarian
68ddc8cb78 Added Wicked affiliate. Improved Gamma banner tool filename composition. 2026-03-03 01:20:58 +01:00
DebaucheryLibrarian
bc5693e44a 1.250.26 2026-03-02 23:55:11 +01:00
DebaucheryLibrarian
7276d90629 Disabled tags by default in Gamma banner tool filenames. Added Gangbang Creampie, Gloryhole Secrets and Taboo Heat affiliates. 2026-03-02 23:55:09 +01:00
DebaucheryLibrarian
1a1af95a10 1.250.25 2026-03-02 22:36:08 +01:00
DebaucheryLibrarian
bcb7a56588 Added alt descriptions and attributes columns to series. 2026-03-02 22:36:05 +01:00
DebaucheryLibrarian
16648d50f6 Re-enabled filename actors and tags in Gamma banner tool, improved disable argument. 2026-03-02 06:24:54 +01:00
DebaucheryLibrarian
062dc0e75e 1.250.24 2026-03-02 06:21:30 +01:00
DebaucheryLibrarian
42effd53fc Added Diabolic affiliate. Disabled filename actors and tags in Gamma banner tool, unreliable. 2026-03-02 06:21:27 +01:00
DebaucheryLibrarian
3a3403bb1f 1.250.23 2026-03-02 06:07:49 +01:00
DebaucheryLibrarian
6fb4989256 Added Chaos Men affiliate. 2026-03-02 06:07:45 +01:00
DebaucheryLibrarian
9750ca4b79 1.250.22 2026-03-02 05:52:48 +01:00
DebaucheryLibrarian
0500f7eda8 Added Burning Angel affiliate. Fixed Gamma banner tool breaking on invalid URL. 2026-03-02 05:52:46 +01:00
DebaucheryLibrarian
19beff7dbc 1.250.21 2026-03-02 05:38:52 +01:00
DebaucheryLibrarian
dfe1b84992 Explicitly unsetting channel parent in seed. 2026-03-02 05:38:50 +01:00
DebaucheryLibrarian
3d3b544cb4 1.250.20 2026-03-02 05:07:26 +01:00
DebaucheryLibrarian
65fa6027ee Prioritized pissing tag. 2026-03-02 05:07:23 +01:00
DebaucheryLibrarian
b3a0ba72eb 1.250.19 2026-03-02 04:01:41 +01:00
DebaucheryLibrarian
f3e2143b45 Fixed wrong date parse function call in Gamma scraper. Added Biphoria affiliate link. 2026-03-02 04:01:39 +01:00
DebaucheryLibrarian
d289f95d3d 1.250.18 2026-03-02 03:46:48 +01:00
DebaucheryLibrarian
d8b41ec9b5 Use request interface for Vixen deep fetch, seemingly less chance of a 403. 2026-03-02 03:46:46 +01:00
DebaucheryLibrarian
05f7d8b814 1.250.17 2026-03-02 03:27:27 +01:00
DebaucheryLibrarian
c2fc09fdaa Removed redundant program filter from Gamma banner tool. 2026-03-02 03:27:24 +01:00
DebaucheryLibrarian
8a7210a3b9 1.250.16 2026-03-02 03:08:28 +01:00
DebaucheryLibrarian
e029ca7fd0 Added Gamma banner downloader. 2026-03-02 03:08:26 +01:00
DebaucheryLibrarian
ffcfae69d5 1.250.15 2026-03-02 03:07:12 +01:00
DebaucheryLibrarian
dcaee01ce8 Using channel origin instead of URL for Gamma referer URL composition. 2026-03-02 03:07:10 +01:00
DebaucheryLibrarian
7561a4577e 1.250.14 2026-03-02 01:41:38 +01:00
DebaucheryLibrarian
98b735dbae Added Vivid and Zero Tolerance affiliate links. Restored BAM Visions profile scraper, site is back online. 2026-03-02 01:41:36 +01:00
DebaucheryLibrarian
d2daed788c 1.250.13 2026-03-02 01:14:27 +01:00
DebaucheryLibrarian
23257745a7 Fixed profile updated_at timestamp not updating. 2026-03-02 01:14:23 +01:00
61 changed files with 2889 additions and 3070 deletions

View File

@@ -27,7 +27,7 @@
"require-await": "off",
"no-param-reassign": ["error", {
"props": true,
"ignorePropertyModificationsFor": ["state", "acc", "req"]
"ignorePropertyModificationsFor": ["state", "acc", "req", "error"]
}]
},
"globals": {

View File

@@ -1,32 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width,height=device-height,initial-scale=1,maximum-scale=1,user-scalable=no">
<meta name="theme-color" content="#ff2288">
<title>traxxx</title>
<link rel="icon" href="/img/favicon/favicon-32.ico">
<link rel="icon" href="/img/favicon/favicon.svg" type="image/svg+xml">
<link rel="apple-touch-icon" href="/img/favicon/favicon-180.png">
<link rel="manifest" href="/img/favicon/manifest.webmanifest">
<meta name="msapplication-TileColor" content="#aa2c66">
<meta name="msapplication-config" content="/img/favicon/browserconfig.xml">
<link rel="stylesheet" href="/css/style.css">
<script src="/js/bundle.js" defer></script>
<% if (analytics.enabled) { %>
<script async src="<%- analytics.address %>" data-website-id="<%- analytics.siteId %>"></script>
<% } %>
</head>
<body>
<div id="container"></div>
<script>window.env = <%- env %>;</script>
<!-- flag icons by https://www.flaticon.com/authors/freepik -->
</body>
</html>

View File

@@ -27,21 +27,6 @@ module.exports = {
destroyTimeoutMillis: 300000,
},
},
web: {
host: '0.0.0.0',
port: 5000,
sfwHost: '0.0.0.0',
sfwPort: 5001,
session: {
secret: '12345678abcdefghij',
resave: false,
saveUninitialized: false,
cookie: {
secure: true,
maxAge: 2629800000, // 1 month
},
},
},
redis: {
host: 'localhost',
port: 6379,
@@ -188,6 +173,8 @@ module.exports = {
'wishescumtrue',
// hentaied
'somegore',
// digital playground
'digitalplayground', // no longer updates, produces a bunch of garbage for some reason
],
networks: [
// dummy network for testing
@@ -227,6 +214,12 @@ module.exports = {
'www.kink.com',
'store2.psmcdn.net', // Team Skeet API
],
remote: {
enable: false,
use: false,
address: 'ws://10.0.0.1:3333/browser',
key: null,
},
},
cloudflare: {
enable: false,
@@ -279,7 +272,7 @@ module.exports = {
trailerQuality: [540, 720, 960, 480, 1080, 360, 320, 1440, 1600, 1920, 2160, 270, 240, 180],
limit: 25, // max number of photos per release
attempts: 2,
flushOrphaned: true,
flushOrphaned: false,
flushWindow: 1000,
streams: {
enabled: true, // fetch streams

View File

@@ -0,0 +1,13 @@
exports.up = async function(knex) {
await knex.schema.alterTable('series', (table) => {
table.specificType('alt_descriptions', 'text ARRAY');
table.json('attributes');
});
};
exports.down = async function(knex) {
await knex.schema.alterTable('series', (table) => {
table.dropColumn('alt_descriptions');
table.dropColumn('attributes');
});
};

View File

@@ -0,0 +1,21 @@
exports.up = async function(knex) {
await knex.schema.alterTable('releases_tags', (table) => {
table.integer('actor_id')
.references('id')
.inTable('actors');
table.dropUnique(['tag_id', 'release_id']);
});
await knex.raw('CREATE UNIQUE INDEX releases_tags_tag_id_release_id_actor_id ON releases_tags (tag_id, release_id, COALESCE(actor_id, -1))');
};
exports.down = async function(knex) {
await knex.schema.alterTable('releases_tags', (table) => {
table.dropColumn('actor_id');
table.unique(['tag_id', 'release_id']);
});
await knex.raw('DROP INDEX IF EXISTS releases_tags_tag_id_release_id_actor_id');
};

View File

@@ -0,0 +1,27 @@
exports.up = async function(knex) {
await knex.schema.createTable('languages', (table) => {
table.string('alpha2')
.primary();
table.text('name');
table.text('name_native');
});
await knex.schema.alterTable('releases', (table) => {
table.enum('production_date_precision', ['year', 'month', 'week', 'day', 'hour', 'minute', 'second'])
.defaultTo('day');
table.string('language_alpha2')
.references('alpha2')
.inTable('languages');
});
};
exports.down = async function(knex) {
await knex.schema.alterTable('releases', (table) => {
table.dropColumn('production_date_precision');
table.dropColumn('language_alpha2');
});
await knex.schema.dropTable('languages');
};

View File

@@ -0,0 +1,100 @@
exports.up = async function(knex) {
await knex.schema.createTable('feeds', (table) => {
table.increments('id');
table.integer('user_id')
.notNullable()
.references('id')
.inTable('users')
.onDelete('cascade');
table.string('name')
.notNullable();
table.string('slug')
.notNullable();
table.boolean('public');
table.boolean('primary');
table.text('comment');
table.json('meta');
table.datetime('created_at')
.notNullable()
.defaultTo(knex.fn.now());
});
await knex.schema.createTable('feeds_entities', (table) => {
table.increments('id');
table.integer('feed_id')
.notNullable()
.references('id')
.inTable('feeds')
.onDelete('cascade');
table.integer('entity_id')
.notNullable()
.references('id')
.inTable('entities')
.onDelete('cascade');
table.text('comment');
table.datetime('created_at')
.notNullable()
.defaultTo(knex.fn.now());
});
await knex.schema.createTable('feeds_actors', (table) => {
table.increments('id');
table.integer('feed_id')
.notNullable()
.references('id')
.inTable('feeds')
.onDelete('cascade');
table.integer('actor_id')
.notNullable()
.references('id')
.inTable('actors')
.onDelete('cascade');
table.text('comment');
table.datetime('created_at')
.notNullable()
.defaultTo(knex.fn.now());
});
await knex.schema.createTable('feeds_tags', (table) => {
table.increments('id');
table.integer('feed_id')
.notNullable()
.references('id')
.inTable('feeds')
.onDelete('cascade');
table.integer('tag_id')
.notNullable()
.references('id')
.inTable('tags')
.onDelete('cascade');
table.text('comment');
table.datetime('created_at')
.notNullable()
.defaultTo(knex.fn.now());
});
};
exports.down = async function(knex) {
await knex.schema.dropTable('feeds_tags');
await knex.schema.dropTable('feeds_actors');
await knex.schema.dropTable('feeds_entities');
await knex.schema.dropTable('feeds');
};

965
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "traxxx",
"version": "1.250.12",
"version": "1.252.7",
"description": "All the latest porn releases in one place",
"main": "src/app.js",
"scripts": {
@@ -88,6 +88,7 @@
"casual": "^1.6.2",
"cheerio": "^1.0.0-rc.12",
"cli-confirm": "^1.0.1",
"clipboardy": "^2.3.0",
"cloudscraper": "^4.6.0",
"config": "^3.3.9",
"connect-session-knex": "^4.0.0",
@@ -152,8 +153,8 @@
"tough-cookie": "^4.1.3",
"tunnel": "0.0.6",
"ua-parser-js": "^1.0.37",
"undici": "^5.28.1",
"unprint": "^0.18.35",
"undici": "^7.24.7",
"unprint": "^0.19.13",
"url-pattern": "^1.0.3",
"v-tooltip": "^2.1.3",
"video.js": "^8.6.1",

View File

@@ -140,6 +140,11 @@ const tags = [
slug: 'asian',
group: 'ethnicity',
},
{
name: 'ass',
slug: 'ass',
group: 'body',
},
{
name: 'athletic',
slug: 'athletic',
@@ -276,6 +281,11 @@ const tags = [
description: 'Taking a dick in your mouth, sucking, licking and kissing it, often while giving a [handjob](/tag/handjob). You may slide it all the way [down your throat](/tag/deepthroat), or let them [fuck your face](/tag/facefucking).',
group: 'oral',
},
{
slug: 'oral',
name: 'oral',
group: 'oral',
},
{
name: 'blowbang',
slug: 'blowbang',
@@ -368,6 +378,10 @@ const tags = [
name: 'cuckold',
slug: 'cuckold',
},
{
name: 'cum',
slug: 'cum',
},
{
name: 'cum drunk',
slug: 'cum-drunk',
@@ -815,6 +829,10 @@ const tags = [
description: 'Two men fucking one woman, but not eachother. Typically involves a \'spitroast\', where one guy gets a blowjob and the other fucks her pussy or ass.',
group: 'group',
},
{
name: 'spitroast',
slug: 'spitroast',
},
{
name: 'military',
slug: 'military',
@@ -1164,6 +1182,11 @@ const tags = [
slug: 'threesome',
group: 'group',
},
{
name: 'foursome',
slug: 'foursome',
group: 'group',
},
{
name: 'throatpie',
slug: 'throatpie',
@@ -1893,6 +1916,14 @@ const aliases = [
name: 'double penetration (dp)',
for: 'dp',
},
{
name: 'double penetration ass pussy',
for: 'dp',
},
{
name: 'double penetration mouth pussy',
for: 'spitroast',
},
{
name: 'double penetration - dp',
for: 'dp',
@@ -2355,11 +2386,6 @@ const aliases = [
name: 'spit',
for: 'saliva',
},
{
name: 'spitroast',
for: 'mfm',
secondary: true,
},
{
name: 'spoon',
for: 'spooning',
@@ -2828,7 +2854,7 @@ const aliases = [
},
{
name: 'amateur b--w--bs',
for: 'amateur blowjobs',
for: 'blowjob',
},
{
name: 'a--l',
@@ -2980,7 +3006,7 @@ const aliases = [
},
{
name: 'r----b',
for: 'rimjob',
for: 'ass-eating',
},
{
name: 's--n--r',
@@ -3019,11 +3045,13 @@ const priorities = [ // higher index is higher priority
['facial', 'swallowing', 'creampie', 'anal-creampie', 'oral-creampie', 'cum-in-mouth', 'throatpie'],
['lesbian', 'rough', 'milf', 'male-focus', 'bdsm', 'oil'],
['threesome', 'mfm', 'mff', 'trainbang', 'pissing'],
['anal', 'bukkake'],
['anal', 'bukkake', 'spitroast'],
['dp', 'dap', 'triple-penetration', 'tap', 'dvp', 'tvp', 'airtight'],
['blowbang', 'orgy'],
['gangbang'],
['gay', 'transsexual', 'bisexual', 'hentai'],
['pissing'],
['compilation', 'bts'],
].reduce((acc, slugs, index) => {
slugs.forEach((slug) => { acc[slug] = index; });
@@ -3073,12 +3101,18 @@ exports.seed = (knex) => Promise.resolve()
const tagEntries = await knex('tags').select('*').where({ alias_for: null });
const tagsMap = tagEntries.reduce((acc, { id, slug }) => ({ ...acc, [slug]: id }), {});
const tagAliases = aliases.map((alias) => ({
const tagAliases = aliases.map((alias) => {
if (!tagsMap[alias.for]) {
console.log(`UNMATCHED ALIAS: ${alias.name} -> ${alias.for}`);
}
return {
name: alias.name,
alias_for: tagsMap[alias.for],
implied_tag_ids: alias.implies?.map((slug) => tagsMap[slug]),
secondary: !!alias.secondary,
}));
};
});
return upsert('tags', tagAliases, 'name');
});

View File

@@ -2,6 +2,14 @@
const omit = require('object.omit');
const upsert = require('../src/utils/upsert');
const redis = require('../src/redis');
const entityPrefixes = {
channel: '',
network: '_',
studio: '*',
info: '@',
};
const grandParentNetworks = [
{
@@ -788,6 +796,9 @@ const networks = [
slug: 'teencoreclub',
name: 'Teen Core Club',
url: 'https://teencoreclub.com',
parameters: {
studioId: 1624,
},
},
{
slug: 'teenmegaworld',
@@ -905,8 +916,7 @@ const networks = [
},
];
exports.seed = (knex) => Promise.resolve()
.then(async () => {
exports.seed = async (knex) => {
await Promise.all([].concat(grandParentNetworks, parentNetworks, networks).map(async (network) => {
if (network.rename) {
return knex('entities')
@@ -981,4 +991,13 @@ exports.seed = (knex) => Promise.resolve()
.flat();
await upsert('entities_tags', tagAssociations, ['entity_id', 'tag_id'], knex);
});
const entities = await knex('entities').select('id', 'slug', 'type');
await redis.connect();
await redis.del('traxxx:entities:id_by_slug');
await redis.hSet('traxxx:entities:id_by_slug', entities.map((entity) => [`${entityPrefixes[entity.type]}${entity.slug}`, entity.id]));
await redis.disconnect();
};

File diff suppressed because it is too large Load Diff

View File

@@ -208,6 +208,11 @@ const affiliates = [
url: 'https://www.g2buddy.com/disruptivefilms/go.php?pr=9&su=2&si=119&ad=277470&pa=index&ar=&buffer=',
comment: 'per signup',
},
{
channel: 'sodomysquad',
url: 'https://www.g2buddy.com/sodomysquad/go.php?pr=9&su=2&si=137&ad=277470&pa=index&ar=&buffer=',
comment: 'per signup',
},
// gamma > ags max > next door studios
// excluded affiliate links that link back to main site and don't seem to track properly
{
@@ -494,6 +499,92 @@ const affiliates = [
scene: false, // redirects to Adult Time
},
},
// gamma > vivid
{
network: 'vivid',
url: 'https://www.g2fame.com/vivid/go.php?pr=8&su=2&si=330&ad=277470&pa=index&ar=&buffer=',
comment: 'per signup',
parameters: {
scene: false, // redirects to homepage
},
},
{
channel: 'wheretheboysarent',
url: 'https://www.g2fame.com/wheretheboysarent/go.php?pr=8&su=2&si=368&ad=277470&pa=index&ar=&buffer=',
comment: 'per signup',
},
{
channel: 'thebrats',
url: 'https://www.g2fame.com/thebrats/go.php?pr=8&su=2&si=369&ad=277470&pa=index&ar=&buffer=',
comment: 'per signup',
},
// gamma > zero tolerance
{
network: 'zerotolerance',
url: 'https://www.g2fame.com/zerotolerancefilms/go.php?pr=8&su=2&si=507&ad=277470&pa=index&ar=&buffer=',
comment: 'per signup',
},
{
channel: 'zerotolerancefilms',
url: 'https://www.g2fame.com/zerotolerancefilms/go.php?pr=8&su=2&si=507&ad=277470&pa=index&ar=&buffer=',
comment: 'per signup',
},
{
channel: '3rddegreefilms',
url: 'https://www.g2fame.com/3rddegreefilms/go.php?pr=8&su=2&si=537&ad=277470&pa=index&ar=&buffer=',
comment: 'per signup',
},
{
channel: 'addicted2girls',
url: 'https://www.g2fame.com/addicted2girls/go.php?pr=8&su=2&si=477&ad=277470&pa=index&ar=&buffer=',
comment: 'per signup',
},
{
channel: 'genderxfilms',
url: 'https://www.g2fame.com/genderxfilms/go.php?pr=8&su=2&si=397&ad=277470&pa=index&ar=&buffer=',
comment: 'per signup',
},
{
channel: 'gangbangcreampie',
url: 'https://www.g2fame.com/gangbangcreampie/go.php?pr=8&su=2&si=656&ad=277470&pa=index&ar=&buffer=',
comment: 'per signup',
},
{
channel: 'gloryholesecrets',
url: 'https://www.g2fame.com/gloryholesecrets/go.php?pr=8&su=2&si=655&ad=277470&pa=index&ar=&buffer=',
comment: 'per signup',
},
{
channel: 'tabooheat',
url: 'https://www.g2fame.com/tabooheat/go.php?pr=8&su=2&si=552&ad=277470&pa=index&ar=&buffer=',
comment: 'per signup',
},
{
channel: 'wicked',
url: 'https://www.g2fame.com/wicked/go.php?pr=8&su=2&si=371&ad=277470&pa=index&ar=&buffer=',
comment: 'per signup',
},
// gamma > independent channels
{
channel: 'biphoria',
url: 'https://www.g2fame.com/biphoria/go.php?pr=8&su=2&si=418&ad=277470&pa=index&ar=&buffer=',
comment: 'per signup',
},
{
channel: 'burningangel',
url: 'https://www.g2fame.com/burningangel/go.php?pr=8&su=2&si=174&ad=277470&pa=index&ar=&buffer=',
comment: 'per signup',
},
{
channel: 'chaosmen',
url: 'https://www.g2fame.com/chaosmen/go.php?pr=8&su=2&si=608&ad=277470&pa=index&ar=&buffer=',
comment: 'per signup',
},
{
channel: 'diabolic',
url: 'https://www.g2fame.com/diabolic/go.php?pr=8&su=2&si=523&ad=277470&pa=index&ar=&buffer=',
comment: 'per signup',
},
// kelly madison / 8k
{
network: 'kellymadison',
@@ -628,6 +719,11 @@ const affiliates = [
url: 'https://register.join-toughlovex.com/track/MzAwMDA5NzkuMy43Ni4xOTcuMC4wLjAuMC4w',
comment: 'rev share',
},
{
channel: 'hardwerk',
url: 'https://register.hardwerk.com/track/MzAwMDA5NzkuMy4xNTEuMzM5LjAuMC4wLjAuMA',
comment: 'rev share',
},
// radical > topwebmodels
{
network: 'topwebmodels',
@@ -667,42 +763,42 @@ const affiliates = [
// kink
{
network: 'kink',
url: 'https://www.c4cgmn8trk.com/G483KW96H/225JFQ',
url: 'https://www.dfrs31kg.com/G483KW96H/225JFQ',
comment: '50%',
parameters: {
dynamicScene: 'https://www.c4cgmn8trk.com/G483KW96H/225JFQ/?uid=18&ef_page={scenePath}',
dynamicEntity: 'https://www.c4cgmn8trk.com/G483KW96H/225JFQ/?uid=18&ef_page={entityPath}',
dynamicScene: 'https://www.dfrs31kg.com/G483KW96H/225JFQ/?uid=18&ef_page={scenePath}',
dynamicEntity: 'https://www.dfrs31kg.com/G483KW96H/225JFQ/?uid=18&ef_page={entityPath}',
prefixSlash: false,
},
},
{
channel: 'kinktrans',
url: 'https://www.c4cgmn8trk.com/G483KW96H/XCQZJ/',
url: 'https://www.dfrs31kg.com/G483KW96H/XCQZJ/',
comment: '50%',
parameters: {
dynamicScene: 'https://www.c4cgmn8trk.com/G483KW96H/XCQZJ/?uid=21&ef_page={scenePath}',
dynamicEntity: 'https://www.c4cgmn8trk.com/G483KW96H/XCQZJ/?uid=21&ef_page={entityPath}',
dynamicScene: 'https://www.dfrs31kg.com/G483KW96H/XCQZJ/?uid=21&ef_page={scenePath}',
dynamicEntity: 'https://www.dfrs31kg.com/G483KW96H/XCQZJ/?uid=21&ef_page={entityPath}',
prefixSlash: false,
},
},
{
network: 'kinkmen',
url: 'https://www.c4cgmn8trk.com/G483KW96H/SL1HB/',
url: 'https://www.dfrs31kg.com/G483KW96H/SL1HB/',
comment: '50%',
parameters: {
dynamicScene: 'https://www.c4cgmn8trk.com/G483KW96H/SL1HB/?uid=20&ef_page={scenePath}',
dynamicEntity: 'https://www.c4cgmn8trk.com/G483KW96H/SL1HB/?uid=20&ef_page={entityPath}',
dynamicScene: 'https://www.dfrs31kg.com/G483KW96H/SL1HB/?uid=20&ef_page={scenePath}',
dynamicEntity: 'https://www.dfrs31kg.com/G483KW96H/SL1HB/?uid=20&ef_page={entityPath}',
prefixSlash: false,
},
},
{
channel: 'kinkvr',
url: 'https://www.c4cgmn8trk.com/G483KW96H/PS824/',
url: 'https://www.dfrs31kg.com/G483KW96H/PS824/',
comment: '50%',
parameters: {
// no dedicated scene pages, use Kink main
dynamicScene: 'https://www.c4cgmn8trk.com/G483KW96H/225JFQ/?uid=18&ef_page={scenePath}',
dynamicEntity: 'https://www.c4cgmn8trk.com/G483KW96H/225JFQ/?uid=18&ef_page={entityPath}',
dynamicScene: 'https://www.dfrs31kg.com/G483KW96H/225JFQ/?uid=18&ef_page={scenePath}',
dynamicEntity: 'https://www.dfrs31kg.com/G483KW96H/225JFQ/?uid=18&ef_page={entityPath}',
prefixSlash: false,
},
},
@@ -738,6 +834,15 @@ const affiliates = [
global: false, // banners might be a bit too disturbing for front-page
},
},
{
channel: 'hentaiedmen',
url: 'https://hentaiedmen.com/amember/aff/go/jvonbywnrocdkzvjtf',
parameters: {
query: 'ref=npjyjuekbvehqzpyqukrgqoogmwbkz',
dynamicScene: 'https://hentaiedmen.com{scenePath}?ref=jvonbywnrocdkzvjtf',
global: false, // banners might be a bit too disturbing for front-page
},
},
{
channel: 'defeated',
url: 'https://defeatedsexfight.com/amember/aff/go/jlfowppdazbfucxrgfmgpno',
@@ -854,6 +959,83 @@ const affiliates = [
query: 'ref=4c331ef6',
},
},
// POV Porn Cash / HussiePass
{
network: 'hussiepass',
url: 'https://secure.hussiepass.com/track/MTk0NS4xLjUuNy4wLjAuMC4wLjA',
comment: '50% revshare',
parameters: {
// hussiepass website does not show network scenes
channelScenes: false,
},
},
{
channel: 'povpornstars',
url: 'https://join.povpornstars.com/track/MTk0NS4xLjMuNS4wLjAuMC4wLjA',
comment: '50% revshare',
},
{
channel: 'interracialpovs',
url: 'https://join.interracialpovs.com/track/MTk0NS4xLjYuOC4wLjAuMC4wLjA',
comment: '50% revshare',
},
{
channel: 'ravebunnys',
url: 'https://secure.ravebunnys.com/track/MTk0NS4xLjExLjI5LjAuMC4wLjAuMA',
comment: '50% revshare',
},
{
channel: 'hotandtatted',
url: 'https://join.hotandtatted.com/track/MTk0NS4xLjEwLjEyLjAuMC4wLjAuMA',
comment: '50% revshare',
},
{
channel: 'seehimfuck',
url: 'https://join.seehimfuck.com/track/MTk0NS4xLjcuOS4wLjAuMC4wLjA',
comment: '50% revshare',
},
{
channel: 'seehimsolo',
url: 'https://join.seehimsolo.com/track/MTk0NS4xLjguMTAuMC4wLjAuMC4w',
comment: '50% revshare',
},
// karups
{
network: 'karups',
url: 'https://secure.karups.com/track/MjAwMTAwMS4xLjEuMS4wLjAuMC4wLjA',
comment: 'revshare',
},
{
channel: 'hometownamateurs',
url: 'https://secure.karupsha.com/track/MjAwMTAwMS4xLjMuMy4wLjAuMC4wLjA',
comment: 'revshare',
},
{
channel: 'olderwomen',
url: 'https://secure.karupsow.com/track/MjAwMTAwMS4xLjQuNC4wLjAuMC4wLjA',
comment: 'revshare',
},
{
channel: 'privatecollection',
url: 'https://secure.karupspc.com/track/MjAwMTAwMS4xLjIuMi4wLjAuMC4wLjA',
comment: 'revshare',
},
{
channel: 'boyfun',
url: 'https://secure.boyfun.com/track/MjAwMTAwMS4xLjUuNS4wLjAuMC4wLjA',
comment: 'revshare',
},
{
channel: 'jawked',
url: 'https://secure.jawked.com/track/MjAwMTAwMS4xLjExLjExLjAuMC4wLjAuMA',
comment: 'revshare',
},
// ARL cash
{
channel: 'pascalssubsluts',
url: 'https://join.pascalssubsluts.com/track/MTc2Ny4xOC41LjkuMC4wLjAuMC4w',
comment: 'revshare',
},
// etc
{
network: 'bang',

196
seeds/07_languages.js Executable file
View File

@@ -0,0 +1,196 @@
const upsert = require('../src/utils/upsert');
const languages = {
ab: { name: 'Abkhaz', nativeName: 'аҧсуа' },
aa: { name: 'Afar', nativeName: 'Afaraf' },
af: { name: 'Afrikaans', nativeName: 'Afrikaans' },
ak: { name: 'Akan', nativeName: 'Akan' },
sq: { name: 'Albanian', nativeName: 'Shqip' },
am: { name: 'Amharic', nativeName: 'አማርኛ' },
ar: { name: 'Arabic', nativeName: 'العربية' },
an: { name: 'Aragonese', nativeName: 'Aragonés' },
hy: { name: 'Armenian', nativeName: 'Հայերեն' },
as: { name: 'Assamese', nativeName: 'অসমীয়া' },
av: { name: 'Avaric', nativeName: 'авар мацӀ, магӀарул мацӀ' },
ae: { name: 'Avestan', nativeName: 'avesta' },
ay: { name: 'Aymara', nativeName: 'aymar aru' },
az: { name: 'Azerbaijani', nativeName: 'azərbaycan dili' },
bm: { name: 'Bambara', nativeName: 'bamanankan' },
ba: { name: 'Bashkir', nativeName: 'башҡорт теле' },
eu: { name: 'Basque', nativeName: 'euskara, euskera' },
be: { name: 'Belarusian', nativeName: 'Беларуская' },
bn: { name: 'Bengali', nativeName: 'বাংলা' },
bh: { name: 'Bihari', nativeName: 'भोजपुरी' },
bi: { name: 'Bislama', nativeName: 'Bislama' },
bs: { name: 'Bosnian', nativeName: 'bosanski jezik' },
br: { name: 'Breton', nativeName: 'brezhoneg' },
bg: { name: 'Bulgarian', nativeName: 'български език' },
my: { name: 'Burmese', nativeName: 'ဗမာစာ' },
ca: { name: 'Catalan; Valencian', nativeName: 'Català' },
ch: { name: 'Chamorro', nativeName: 'Chamoru' },
ce: { name: 'Chechen', nativeName: 'нохчийн мотт' },
ny: { name: 'Chichewa; Chewa; Nyanja', nativeName: 'chiCheŵa, chinyanja' },
zh: { name: 'Chinese', nativeName: '中文 (Zhōngwén), 汉语, 漢語' },
cv: { name: 'Chuvash', nativeName: 'чӑваш чӗлхи' },
kw: { name: 'Cornish', nativeName: 'Kernewek' },
co: { name: 'Corsican', nativeName: 'corsu, lingua corsa' },
cr: { name: 'Cree', nativeName: 'ᓀᐦᐃᔭᐍᐏᐣ' },
hr: { name: 'Croatian', nativeName: 'hrvatski' },
cs: { name: 'Czech', nativeName: 'česky, čeština' },
da: { name: 'Danish', nativeName: 'dansk' },
dv: { name: 'Divehi; Dhivehi; Maldivian;', nativeName: 'ދިވެހި' },
nl: { name: 'Dutch', nativeName: 'Nederlands, Vlaams' },
en: { name: 'English', nativeName: 'English' },
eo: { name: 'Esperanto', nativeName: 'Esperanto' },
et: { name: 'Estonian', nativeName: 'eesti, eesti keel' },
ee: { name: 'Ewe', nativeName: 'Eʋegbe' },
fo: { name: 'Faroese', nativeName: 'føroyskt' },
fj: { name: 'Fijian', nativeName: 'vosa Vakaviti' },
fi: { name: 'Finnish', nativeName: 'suomi, suomen kieli' },
fr: { name: 'French', nativeName: 'français, langue française' },
ff: { name: 'Fula; Fulah; Pulaar; Pular', nativeName: 'Fulfulde, Pulaar, Pular' },
gl: { name: 'Galician', nativeName: 'Galego' },
ka: { name: 'Georgian', nativeName: 'ქართული' },
de: { name: 'German', nativeName: 'Deutsch' },
el: { name: 'Greek, Modern', nativeName: 'Ελληνικά' },
gn: { name: 'Guaraní', nativeName: 'Avañeẽ' },
gu: { name: 'Gujarati', nativeName: 'ગુજરાતી' },
ht: { name: 'Haitian; Haitian Creole', nativeName: 'Kreyòl ayisyen' },
ha: { name: 'Hausa', nativeName: 'Hausa, هَوُسَ' },
he: { name: 'Hebrew (modern)', nativeName: 'עברית' },
hz: { name: 'Herero', nativeName: 'Otjiherero' },
hi: { name: 'Hindi', nativeName: 'हिन्दी, हिंदी' },
ho: { name: 'Hiri Motu', nativeName: 'Hiri Motu' },
hu: { name: 'Hungarian', nativeName: 'Magyar' },
ia: { name: 'Interlingua', nativeName: 'Interlingua' },
id: { name: 'Indonesian', nativeName: 'Bahasa Indonesia' },
ie: { name: 'Interlingue', nativeName: 'Originally called Occidental; then Interlingue after WWII' },
ga: { name: 'Irish', nativeName: 'Gaeilge' },
ig: { name: 'Igbo', nativeName: 'Asụsụ Igbo' },
ik: { name: 'Inupiaq', nativeName: 'Iñupiaq, Iñupiatun' },
io: { name: 'Ido', nativeName: 'Ido' },
is: { name: 'Icelandic', nativeName: 'Íslenska' },
it: { name: 'Italian', nativeName: 'Italiano' },
iu: { name: 'Inuktitut', nativeName: 'ᐃᓄᒃᑎᑐᑦ' },
ja: { name: 'Japanese', nativeName: '日本語 (にほんご/にっぽんご)' },
jv: { name: 'Javanese', nativeName: 'basa Jawa' },
kl: { name: 'Kalaallisut, Greenlandic', nativeName: 'kalaallisut, kalaallit oqaasii' },
kn: { name: 'Kannada', nativeName: 'ಕನ್ನಡ' },
kr: { name: 'Kanuri', nativeName: 'Kanuri' },
ks: { name: 'Kashmiri', nativeName: 'कश्मीरी, كشميري‎' },
kk: { name: 'Kazakh', nativeName: 'Қазақ тілі' },
km: { name: 'Khmer', nativeName: 'ភាសាខ្មែរ' },
ki: { name: 'Kikuyu, Gikuyu', nativeName: 'Gĩkũyũ' },
rw: { name: 'Kinyarwanda', nativeName: 'Ikinyarwanda' },
ky: { name: 'Kirghiz, Kyrgyz', nativeName: 'кыргыз тили' },
kv: { name: 'Komi', nativeName: 'коми кыв' },
kg: { name: 'Kongo', nativeName: 'KiKongo' },
ko: { name: 'Korean', nativeName: '한국어 (韓國語), 조선말 (朝鮮語)' },
ku: { name: 'Kurdish', nativeName: 'Kurdî, كوردی‎' },
kj: { name: 'Kwanyama, Kuanyama', nativeName: 'Kuanyama' },
la: { name: 'Latin', nativeName: 'latine, lingua latina' },
lb: { name: 'Luxembourgish, Letzeburgesch', nativeName: 'Lëtzebuergesch' },
lg: { name: 'Luganda', nativeName: 'Luganda' },
li: { name: 'Limburgish, Limburgan, Limburger', nativeName: 'Limburgs' },
ln: { name: 'Lingala', nativeName: 'Lingála' },
lo: { name: 'Lao', nativeName: 'ພາສາລາວ' },
lt: { name: 'Lithuanian', nativeName: 'lietuvių kalba' },
lu: { name: 'Luba-Katanga', nativeName: '' },
lv: { name: 'Latvian', nativeName: 'latviešu valoda' },
gv: { name: 'Manx', nativeName: 'Gaelg, Gailck' },
mk: { name: 'Macedonian', nativeName: 'македонски јазик' },
mg: { name: 'Malagasy', nativeName: 'Malagasy fiteny' },
ms: { name: 'Malay', nativeName: 'bahasa Melayu, بهاس ملايو‎' },
ml: { name: 'Malayalam', nativeName: 'മലയാളം' },
mt: { name: 'Maltese', nativeName: 'Malti' },
mi: { name: 'Māori', nativeName: 'te reo Māori' },
mr: { name: 'Marathi (Marāṭhī)', nativeName: 'मराठी' },
mh: { name: 'Marshallese', nativeName: 'Kajin M̧ajeļ' },
mn: { name: 'Mongolian', nativeName: 'монгол' },
na: { name: 'Nauru', nativeName: 'Ekakairũ Naoero' },
nv: { name: 'Navajo, Navaho', nativeName: 'Diné bizaad, Dinékʼehǰí' },
nb: { name: 'Norwegian Bokmål', nativeName: 'Norsk bokmål' },
nd: { name: 'North Ndebele', nativeName: 'isiNdebele' },
ne: { name: 'Nepali', nativeName: 'नेपाली' },
ng: { name: 'Ndonga', nativeName: 'Owambo' },
nn: { name: 'Norwegian Nynorsk', nativeName: 'Norsk nynorsk' },
no: { name: 'Norwegian', nativeName: 'Norsk' },
ii: { name: 'Nuosu', nativeName: 'ꆈꌠ꒿ Nuosuhxop' },
nr: { name: 'South Ndebele', nativeName: 'isiNdebele' },
oc: { name: 'Occitan', nativeName: 'Occitan' },
oj: { name: 'Ojibwe, Ojibwa', nativeName: 'ᐊᓂᔑᓈᐯᒧᐎᓐ' },
cu: { name: 'Old Church Slavonic, Church Slavic, Church Slavonic, Old Bulgarian, Old Slavonic', nativeName: 'ѩзыкъ словѣньскъ' },
om: { name: 'Oromo', nativeName: 'Afaan Oromoo' },
or: { name: 'Oriya', nativeName: 'ଓଡ଼ିଆ' },
os: { name: 'Ossetian, Ossetic', nativeName: 'ирон æвзаг' },
pa: { name: 'Panjabi, Punjabi', nativeName: 'ਪੰਜਾਬੀ, پنجابی‎' },
pi: { name: 'Pāli', nativeName: 'पाऴि' },
fa: { name: 'Persian', nativeName: 'فارسی' },
pl: { name: 'Polish', nativeName: 'polski' },
ps: { name: 'Pashto, Pushto', nativeName: 'پښتو' },
pt: { name: 'Portuguese', nativeName: 'Português' },
qu: { name: 'Quechua', nativeName: 'Runa Simi, Kichwa' },
rm: { name: 'Romansh', nativeName: 'rumantsch grischun' },
rn: { name: 'Kirundi', nativeName: 'kiRundi' },
ro: { name: 'Romanian, Moldavian, Moldovan', nativeName: 'română' },
ru: { name: 'Russian', nativeName: 'русский язык' },
sa: { name: 'Sanskrit (Saṁskṛta)', nativeName: 'संस्कृतम्' },
sc: { name: 'Sardinian', nativeName: 'sardu' },
sd: { name: 'Sindhi', nativeName: 'सिन्धी, سنڌي، سندھی‎' },
se: { name: 'Northern Sami', nativeName: 'Davvisámegiella' },
sm: { name: 'Samoan', nativeName: 'gagana faa Samoa' },
sg: { name: 'Sango', nativeName: 'yângâ tî sängö' },
sr: { name: 'Serbian', nativeName: 'српски језик' },
gd: { name: 'Scottish Gaelic; Gaelic', nativeName: 'Gàidhlig' },
sn: { name: 'Shona', nativeName: 'chiShona' },
si: { name: 'Sinhala, Sinhalese', nativeName: 'සිංහල' },
sk: { name: 'Slovak', nativeName: 'slovenčina' },
sl: { name: 'Slovene', nativeName: 'slovenščina' },
so: { name: 'Somali', nativeName: 'Soomaaliga, af Soomaali' },
st: { name: 'Southern Sotho', nativeName: 'Sesotho' },
es: { name: 'Spanish; Castilian', nativeName: 'español, castellano' },
su: { name: 'Sundanese', nativeName: 'Basa Sunda' },
sw: { name: 'Swahili', nativeName: 'Kiswahili' },
ss: { name: 'Swati', nativeName: 'SiSwati' },
sv: { name: 'Swedish', nativeName: 'svenska' },
ta: { name: 'Tamil', nativeName: 'தமிழ்' },
te: { name: 'Telugu', nativeName: 'తెలుగు' },
tg: { name: 'Tajik', nativeName: 'тоҷикӣ, toğikī, تاجیکی‎' },
th: { name: 'Thai', nativeName: 'ไทย' },
ti: { name: 'Tigrinya', nativeName: 'ትግርኛ' },
bo: { name: 'Tibetan Standard, Tibetan, Central', nativeName: 'བོད་ཡིག' },
tk: { name: 'Turkmen', nativeName: 'Türkmen, Түркмен' },
tl: { name: 'Tagalog', nativeName: 'Wikang Tagalog, ᜏᜒᜃᜅ᜔ ᜆᜄᜎᜓᜄ᜔' },
tn: { name: 'Tswana', nativeName: 'Setswana' },
to: { name: 'Tonga (Tonga Islands)', nativeName: 'faka Tonga' },
tr: { name: 'Turkish', nativeName: 'Türkçe' },
ts: { name: 'Tsonga', nativeName: 'Xitsonga' },
tt: { name: 'Tatar', nativeName: 'татарча, tatarça, تاتارچا‎' },
tw: { name: 'Twi', nativeName: 'Twi' },
ty: { name: 'Tahitian', nativeName: 'Reo Tahiti' },
ug: { name: 'Uighur, Uyghur', nativeName: 'Uyƣurqə, ئۇيغۇرچە‎' },
uk: { name: 'Ukrainian', nativeName: 'українська' },
ur: { name: 'Urdu', nativeName: 'اردو' },
uz: { name: 'Uzbek', nativeName: 'zbek, Ўзбек, أۇزبېك‎' },
ve: { name: 'Venda', nativeName: 'Tshivenḓa' },
vi: { name: 'Vietnamese', nativeName: 'Tiếng Việt' },
vo: { name: 'Volapük', nativeName: 'Volapük' },
wa: { name: 'Walloon', nativeName: 'Walon' },
cy: { name: 'Welsh', nativeName: 'Cymraeg' },
wo: { name: 'Wolof', nativeName: 'Wollof' },
fy: { name: 'Western Frisian', nativeName: 'Frysk' },
xh: { name: 'Xhosa', nativeName: 'isiXhosa' },
yi: { name: 'Yiddish', nativeName: 'ייִדיש' },
yo: { name: 'Yoruba', nativeName: 'Yorùbá' },
za: { name: 'Zhuang, Chuang', nativeName: 'Saɯ cueŋƅ, Saw cuengh' },
};
exports.seed = async (knex) => {
const curatedLanguages = Object.entries(languages).map(([alpha2, names]) => ({
alpha2: alpha2.toUpperCase(),
name: names.name,
name_native: names.nativeName,
}));
await upsert('languages', curatedLanguages, 'alpha2', knex);
};

View File

@@ -351,6 +351,7 @@ function curateProfileEntry(profile) {
tattoos: profile.tattoos,
blood_type: profile.bloodType,
avatar_media_id: profile.avatarMediaId || null,
updated_at: knex.raw('DEFAULT'), // default should be NOW(), this will update the column
};
return curatedProfileEntry;

View File

@@ -11,7 +11,6 @@ const fs = require('fs').promises;
const { format, intervalToDuration } = require('date-fns');
const argv = require('./argv');
const initServer = require('./web/server');
const http = require('./utils/http');
const logger = require('./logger')(__filename);
@@ -31,13 +30,14 @@ const getFileEntries = require('./utils/file-entries');
const inspector = new Inspector();
let done = false;
unprint.options({
const unprintOptions = {
logErrors: false,
timeout: argv.requestTimeout,
userAgent: 'traxxx',
browserUserAgent: 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/141.0.0.0 Safari/537.36',
apiUserAgent: 'traxxx',
clientRetirement: config.bypass.browser.clientRetirement,
remote: config.bypass.browser.remote,
limits: {
...config.limits,
default: {
@@ -47,10 +47,12 @@ unprint.options({
browser: config.limits.browser,
},
proxy: config.proxy,
});
};
unprint.on('requestInit', (event) => logger.debug(`Unprint ${event.method} (${event.interval}ms/${event.concurrency}p${event.isProxied ? ' proxied' : ''}${event.isBrowser ? ' browser' : ''}) ${event.url}`));
unprint.on('requestError', (event) => logger.error(`Unprint failed ${event.isProxied ? 'proxied ' : ''}${event.isBrowser ? 'browser ' : ''}${event.method} ${event.url} (${event.status}): ${event.statusText}`));
unprint.options(unprintOptions);
unprint.on('requestInit', (event) => logger.debug(`Unprint ${event.method} (${event.interval}ms/${event.concurrency}p${event.isRemote ? ' remote' : ''}${event.isProxied ? ' proxied' : ''}${event.isBrowser ? ' browser' : ''}) ${event.url}`));
unprint.on('requestError', (event) => logger.error(`Unprint failed ${event.isRemote ? ' remote' : ''}${event.isProxied ? 'proxied ' : ''}${event.isBrowser ? 'browser ' : ''}${event.method} ${event.url} (${event.status}): ${event.statusText}`));
unprint.on('browserOpen', (event) => logger.debug(`Unprint opened browsers ${event.keys} (${event.active}/${config.bypass.browser.clientRetirement} active, ${event.clients} clients)`));
unprint.on('browserClose', (event) => logger.debug(`Unprint closed${event.retired ? ' retired' : ''} browsers ${event.keys} (${event.active}/${config.bypass.browser.clientRetirement} active, ${event.clients} clients)`));
@@ -127,11 +129,6 @@ async function init() {
try {
await redis.connect();
if (argv.server) {
await initServer();
return;
}
if (argv.sampleMemory) {
await startMemorySample(config.memorySampling.snapshotIntervals);
}

View File

@@ -3,7 +3,7 @@
const config = require('config');
const knex = require('knex');
module.exports = knex({
const knexInstance = knex({
client: 'pg',
connection: config.database.owner,
pool: config.database.pool,
@@ -11,3 +11,23 @@ module.exports = knex({
asyncStackTraces: process.env.NODE_ENV === 'development',
// debug: process.env.NODE_ENV === 'development',
});
knexInstance.on('query', function onQuery(query) {
const bindingCount = query.bindings?.length ?? 0;
if (bindingCount > 50000) {
const error = new Error(`[knex] Dangerous query: ${bindingCount} bindings detected: ${query.sql?.slice(0, 200)}${query.sql?.length > 200 ? '...' : ''}`);
Error.captureStackTrace(error, onQuery);
// console.error(error);
throw error; // optionally hard-fail so you get a real stack trace
}
});
knexInstance.on('query-error', (error, query) => {
error.knexSql = `${query.sql?.slice(0, 200)}${query.sql?.length > 200 ? '...' : ''}`;
error.knexBindingCount = query.bindings?.length;
});
module.exports = knexInstance;

View File

@@ -23,7 +23,7 @@ const logger = require('./logger')(__filename);
const argv = require('./argv');
const knex = require('./knex');
const http = require('./utils/http');
const bulkInsert = require('./utils/bulk-insert');
const batchInsert = require('./utils/batch-insert');
const chunk = require('./utils/chunk');
const { get } = require('./utils/qu');
const { fetchEntityReleaseIds } = require('./entity-releases');
@@ -558,6 +558,10 @@ async function storeImageFile(media, hashDir, hashSubDir, filename, filedir, fil
},
};
} catch (error) {
if (argv.debug) {
console.trace(error);
}
logger.error(`Failed to store ${media.id} from ${media.src} at ${filepath}: ${error.message}`);
await fsPromises.unlink(media.file.path);
@@ -647,8 +651,10 @@ async function fetchHttpSource(source, tempFileTarget, hashStream) {
const res = await http.get(source.src, {
limits: 'media',
headers: {
// explicit host not allowed in HTTP/2
// host: new URL(source.src).hostname,
// ...(source.host && { host: source.host }),
...(source.referer && { referer: source.referer }),
...(source.host && { host: source.host }),
},
stream: true, // sources are fetched in parallel, don't gobble up memory
followRedirects: source.followRedirects,
@@ -923,7 +929,7 @@ async function storeMedias(baseMedias, options) {
const newMediaEntries = newMediaWithEntries.filter((media) => media.newEntry).map((media) => media.entry);
try {
await bulkInsert('media', newMediaEntries, false);
await batchInsert('media', newMediaEntries, { confict: false });
return [...newMediaWithEntries, ...existingHashMedias];
} catch (error) {
@@ -992,11 +998,15 @@ async function associateReleaseMedia(releases, type = 'release') {
.filter(Boolean);
if (associations.length > 0) {
await bulkInsert(`${type}s_${role}`, associations, false);
await batchInsert(`${type}s_${role}`, associations, { conflict: false });
}
} catch (error) {
if (error.entries) {
logger.error(util.inspect(error.entries, null, null, { color: true }));
logger.error(util.inspect(error.entries.slice(0, 2), null, null, { color: true }), `${Math.min(error.entries.length, 2)} of ${error.length}`);
}
if (argv.debug) {
console.trace(error);
}
logger.error(`Failed to store ${type} ${role}: ${error.message} (${error.detail || 'no detail'})`);

View File

@@ -8,117 +8,6 @@ const argv = require('./argv');
const { updateSceneSearch } = require('./update-search');
const { flushOrphanedMedia } = require('./media');
const { graphql } = require('./web/graphql');
const releaseFields = `
id
entryId
shootId
title
url
date
description
duration
entity {
id
name
slug
parent {
id
name
slug
}
}
actors: releasesActors {
actor {
id
name
slug
gender
aliasFor
entityId
entryId
}
}
tags: releasesTags {
tag {
id
name
slug
}
}
chapters(orderBy: TIME_ASC) @include(if: $full) {
id
index
time
duration
title
description
tags: chaptersTags {
tag {
id
name
slug
}
}
poster: chaptersPoster {
media {
id
path
thumbnail
s3: isS3
width
height
size
}
}
photos: chaptersPhotos {
media {
id
path
thumbnail
s3: isS3
width
height
size
}
}
}
poster: releasesPoster {
media {
id
path
thumbnail
s3: isS3
width
height
size
}
}
photos: releasesPhotos @include (if: $full) {
media {
id
path
thumbnail
s3: isS3
width
height
size
}
}
trailer: releasesTrailer @include (if: $full) {
media {
id
path
s3: isS3
vr: isVr
quality
size
}
}
createdAt
`;
function curateRelease(release, withMedia = false, withPoster = true) {
if (!release) {
return null;
@@ -193,107 +82,6 @@ function curateRelease(release, withMedia = false, withPoster = true) {
};
}
function curateGraphqlRelease(release) {
if (!release) {
return null;
}
return {
id: release.id,
...(release.relevance && { relevance: release.relevance }),
entryId: release.entryId,
shootId: release.shootId,
title: release.title || null,
url: release.url || null,
date: release.date,
description: release.description || null,
duration: release.duration,
entity: release.entity,
actors: release.actors.map((actor) => actor.actor),
tags: release.tags.map((tag) => tag.tag),
...(release.chapters && { chapters: release.chapters.map((chapter) => ({
...chapter,
tags: chapter.tags.map((tag) => tag.tag),
poster: chapter.poster?.media || null,
photos: chapter.photos.map((photo) => photo.media),
})) }),
poster: release.poster?.media || null,
...(release.photos && { photos: release.photos.map((photo) => photo.media) }),
trailer: release.trailer?.media || null,
createdAt: release.createdAt,
};
}
async function fetchScene(releaseId) {
const { release } = await graphql(`
query Release(
$releaseId: Int!
$full: Boolean = true
) {
release(id: $releaseId) {
${releaseFields}
}
}
`, {
releaseId: Number(releaseId),
});
return curateGraphqlRelease(release);
}
async function fetchScenes(limit = 100) {
const { releases } = await graphql(`
query SearchReleases(
$limit: Int = 20
$full: Boolean = false
) {
releases(
first: $limit
orderBy: DATE_DESC
) {
${releaseFields}
}
}
`, {
limit: Math.min(limit, 10000),
});
return releases.map((release) => curateGraphqlRelease(release));
}
async function searchScenes(query, limit = 100, relevance = 0) {
const { releases } = await graphql(`
query SearchReleases(
$query: String!
$limit: Int = 20
$relevance: Float = 0.025
$full: Boolean = false
) {
releases: searchReleases(
query: $query
first: $limit
orderBy: RANK_DESC
filter: {
rank: {
greaterThan: $relevance
}
}
) {
rank
release {
${releaseFields}
}
}
}
`, {
query,
limit,
relevance,
});
return releases.map((release) => curateGraphqlRelease({ ...release.release, relevance: release.rank }));
}
async function deleteScenes(sceneIds) {
if (sceneIds.length === 0) {
return 0;
@@ -483,13 +271,10 @@ async function flushBatches(batchIds) {
module.exports = {
curateRelease,
fetchScene,
fetchScenes,
flushBatches,
flushMovies,
flushSeries,
flushScenes,
searchScenes,
deleteScenes,
deleteMovies,
deleteSeries,

View File

@@ -5,12 +5,13 @@ const angelogodshackoriginal = require('./angelogodshackoriginal');
// const americanpornstar = require('./americanpornstar'); // offline
const aziani = require('./aziani');
const badoink = require('./badoink');
// const bamvisions = require('./bamvisions');
const bamvisions = require('./bamvisions');
const bang = require('./bang');
const bradmontana = require('./bradmontana');
const cherrypimps = require('./cherrypimps');
const cumlouder = require('./cumlouder');
const modelmedia = require('./modelmedia');
const darkkotv = require('./darkkotv');
const dorcel = require('./dorcel');
// const famedigital = require('./famedigital');
const firstanalquest = require('./firstanalquest');
@@ -140,6 +141,7 @@ module.exports = {
purgatoryx: radical,
topwebmodels: radical,
lucidflix: radical,
hardwerk: radical,
// hush / hussiepass
eyeontheguy: hush,
hushpass: hush,
@@ -215,13 +217,14 @@ module.exports = {
angelogodshackoriginal,
babevr: badoink,
badoinkvr: badoink,
// bamvisions, // DNS error, site offline?
bamvisions,
bang,
meidenvanholland: bluedonkeymedia, // Vurig Vlaanderen uses same database
boobpedia,
bradmontana,
cherrypimps,
cumlouder,
darkkotv,
dorcelclub: dorcel,
freeones,
hitzefrei,

View File

@@ -1,15 +1,10 @@
'use strict';
/* eslint-disable newline-per-chained-call */
const Promise = require('bluebird');
const { CookieJar } = Promise.promisifyAll(require('tough-cookie'));
const cookie = require('cookie');
const moment = require('moment');
// const unprint = require('unprint');
const unprint = require('unprint');
const qu = require('../utils/qu');
const slugify = require('../utils/slugify');
const http = require('../utils/http');
const { inchesToCm, lbsToKg } = require('../utils/convert');
function getBasePath(parameters, channel, path = '/scene') {
@@ -57,7 +52,7 @@ function getCovers(images, target = 'cover') {
}
function getVideos(data) {
const teaserSources = data.videos.mediabook?.files;
const teaserSources = data.videos?.mediabook?.files;
const trailerSources = data.children.find((child) => child.type === 'trailer')?.videos.full?.files;
const teaser = teaserSources && Object.values(teaserSources).map((source) => ({
@@ -84,7 +79,7 @@ function scrapeLatestX(data, site, filterChannel, options) {
release.url = `${basepath}/${data.id}/${slugify(release.title)}`; // spartanId doesn't work in URLs
release.date = new Date(data.dateReleased);
release.duration = data.videos.mediabook?.length > 1 ? data.videos.mediabook.length : null;
release.duration = data.videos?.mediabook?.length > 1 ? data.videos.mediabook.length : null;
release.actors = data.actors.map((actor) => ({ name: actor.name, gender: actor.gender }));
release.tags = data.tags.map((tag) => tag.name);
@@ -126,7 +121,124 @@ async function scrapeLatest(items, site, filterChannel, options) {
};
}
function getUrl(site) {
const { searchParams, pathname } = new URL(site.url);
// if (search.match(/\?site=\d+/)) {
if (searchParams.has('site') || /\/site\/\d+/.test(pathname)) {
return site.url;
}
if (site.parameters?.native) {
return `${site.url}/scenes`;
}
if (site.parameters?.extract) {
return `${site.url}/scenes`;
}
if (site.parameters?.siteId) {
return `${site.parent.url}/scenes?site=${site.parameters.siteId}`;
}
throw new Error(`Aylo site '${site.name}' (${site.url}) not supported`);
}
async function getSession(site, _parameters, url) {
// if (site.slug === 'aylo' || site.parameters?.parentSession === false) {
if (site.slug === 'aylo') {
// most MG sites have a parent network to acquire a session from, don't try to acquire session from mindgeek.com for independent channels
return null;
}
const sessionUrl = site.parameters?.siteId && !(site.parameters?.native || site.parameters?.childSession || site.parent?.parameters?.childSession || site.parameters?.parentSession === false)
? site.parent.url
: (url || site.url);
const res = await unprint.get(sessionUrl, {
headers: {
'Accept-Language': 'en-US,en;', // somehow seems essential for some MG sites
Connection: 'keep-alive',
},
});
if (res.status === 200) {
const instanceToken = res.cookies.instance_token;
if (instanceToken) {
return { instanceToken };
}
}
throw new Error(`Failed to acquire Aylo session (${res.statusCode})`);
}
async function fetchLatest(site, page = 1, options) {
const url = getUrl(site);
const { searchParams, pathname } = new URL(url);
const siteId = searchParams.get('site') || Number(pathname.match(/\/site\/(\d+)\//)?.[1]);
if (!siteId && !site.parameters?.native && !site.parameters?.extract) {
return null;
}
const { instanceToken } = options.beforeNetwork?.instanceToken && !(options.parameters?.native || options.parameters?.childSession || options.parameters?.parentSession === false)
? options.beforeNetwork
: await getSession(site, options.parameters, url);
const beforeDate = moment().add('1', 'day').format('YYYY-MM-DD');
const limit = 24;
const apiUrl = site.parameters?.native || site.parameters?.extract
? `https://site-api.project1service.com/v2/releases?dateReleased=<${beforeDate}&limit=${limit}&offset=${limit * (page - 1)}&orderBy=-dateReleased&type=scene`
: `https://site-api.project1service.com/v2/releases?collectionId=${siteId}&dateReleased=<${beforeDate}&limit=${limit}&offset=${limit * (page - 1)}&orderBy=-dateReleased&type=scene`;
const res = await unprint.get(apiUrl, {
interval: options.parameters.interval,
concurrency: options.parameters.concurrency,
headers: {
Instance: instanceToken,
Origin: site.url,
Referer: url,
'Accept-Language': 'en-US,en;', // somehow seems essential for some MG sites
},
});
if (res.status === 200 && res.data.result) {
return scrapeLatest(res.data.result, site, false, options);
}
return res.status;
}
async function fetchUpcoming(site, _page, options) {
const url = getUrl(site);
const { instanceToken } = await getSession(site, options.parameters);
const apiUrl = 'https://site-api.project1service.com/v2/upcoming-releases';
const res = await unprint.get(apiUrl, {
interval: options.parameters.interval,
concurrency: options.parameters.concurrency,
headers: {
Instance: instanceToken,
Origin: site.url,
Referer: url,
'Accept-Language': 'en-US,en;', // somehow seems essential for some MG sites
},
});
if (res.status === 200 && res.data.result) {
return scrapeLatest(res.data.result, site, true, options);
}
return res.status;
}
function scrapeRelease(data, url, channel, networkName, options) {
if (Array.isArray(data)) {
return null;
}
const release = {};
const { title, description } = data;
@@ -136,7 +248,7 @@ function scrapeRelease(data, url, channel, networkName, options) {
release.description = description;
release.date = new Date(data.dateReleased);
release.duration = data.videos.mediabook?.length > 1 ? data.videos.mediabook.length : null;
release.duration = data.videos?.mediabook?.length > 1 ? data.videos.mediabook.length : null;
release.actors = data.actors.map((actor) => ({ name: actor.name, gender: actor.gender }));
release.tags = data.tags.map((tag) => tag.name);
@@ -144,7 +256,6 @@ function scrapeRelease(data, url, channel, networkName, options) {
[release.poster, ...release.photos] = getThumbs(data).map((src) => ({
src,
referer: url,
host: 'mediavault-private-fl.project1content.com',
}));
const { teaser, trailer } = getVideos(data);
@@ -189,140 +300,6 @@ function scrapeRelease(data, url, channel, networkName, options) {
return release;
}
function getUrl(site) {
const { searchParams, pathname } = new URL(site.url);
// if (search.match(/\?site=\d+/)) {
if (searchParams.has('site') || /\/site\/\d+/.test(pathname)) {
return site.url;
}
if (site.parameters?.native) {
return `${site.url}/scenes`;
}
if (site.parameters?.extract) {
return `${site.url}/scenes`;
}
if (site.parameters?.siteId) {
return `${site.parent.url}/scenes?site=${site.parameters.siteId}`;
}
throw new Error(`Aylo site '${site.name}' (${site.url}) not supported`);
}
async function getSession(site, parameters, url) {
// if (site.slug === 'aylo' || site.parameters?.parentSession === false) {
if (site.slug === 'aylo') {
// most MG sites have a parent network to acquire a session from, don't try to acquire session from mindgeek.com for independent channels
return null;
}
const cookieJar = new CookieJar();
const session = http.session({ cookieJar });
const sessionUrl = site.parameters?.siteId && !(site.parameters?.native || site.parameters?.childSession || site.parent?.parameters?.childSession || site.parameters?.parentSession === false)
? site.parent.url
: (url || site.url);
/*
await unprint.browserRequest(sessionUrl, {
browser: {
headless: false,
},
async control() {
await new Promise((resolve) => { setTimeout(() => resolve(), 10000); });
},
});
*/
const res = await http.get(sessionUrl, {
session,
headers: {
'Accept-Language': 'en-US,en;', // somehow seems essential for some MG sites
Connection: 'keep-alive',
'User-Agent': 'HTTPie/3.2.1',
},
interval: parameters?.interval,
concurrency: parameters?.concurrency,
parse: false,
});
if (res.status === 200) {
const cookieString = await cookieJar.getCookieStringAsync(sessionUrl);
const { instance_token: instanceToken } = cookie.parse(cookieString);
if (instanceToken) {
return { session, instanceToken };
}
}
throw new Error(`Failed to acquire Aylo session (${res.statusCode})`);
}
async function fetchLatest(site, page = 1, options) {
const url = getUrl(site);
const { searchParams, pathname } = new URL(url);
const siteId = searchParams.get('site') || Number(pathname.match(/\/site\/(\d+)\//)?.[1]);
if (!siteId && !site.parameters?.native && !site.parameters?.extract) {
return null;
}
const { instanceToken } = options.beforeNetwork?.instanceToken
? options.beforeNetwork
: await getSession(site, options.parameters, url);
const beforeDate = moment().add('1', 'day').format('YYYY-MM-DD');
const limit = 24;
const apiUrl = site.parameters?.native || site.parameters?.extract
? `https://site-api.project1service.com/v2/releases?dateReleased=<${beforeDate}&limit=${limit}&offset=${limit * (page - 1)}&orderBy=-dateReleased&type=scene`
: `https://site-api.project1service.com/v2/releases?collectionId=${siteId}&dateReleased=<${beforeDate}&limit=${limit}&offset=${limit * (page - 1)}&orderBy=-dateReleased&type=scene`;
const res = await http.get(apiUrl, {
interval: options.parameters.interval,
concurrency: options.parameters.concurrency,
headers: {
Instance: instanceToken,
Origin: site.url,
Referer: url,
'Accept-Language': 'en-US,en;', // somehow seems essential for some MG sites
},
});
if (res.status === 200 && res.body.result) {
return scrapeLatest(res.body.result, site, false, options);
}
return res.statusCode;
}
async function fetchUpcoming(site, page, options) {
const url = getUrl(site);
const { session, instanceToken } = await getSession(site, options.parameters);
const apiUrl = 'https://site-api.project1service.com/v2/upcoming-releases';
const res = await http.get(apiUrl, {
session,
interval: options.parameters.interval,
concurrency: options.parameters.concurrency,
headers: {
Instance: instanceToken,
Origin: site.url,
Referer: url,
'Accept-Language': 'en-US,en;', // somehow seems essential for some MG sites
},
});
if (res.statusCode === 200 && res.body.result) {
return scrapeLatest(res.body.result, site, true, options);
}
return res.statusCode;
}
async function fetchRelease(url, site, baseScene, options) {
if (baseScene?.entryId && !baseScene.shallow && !options.parameters.forceDeep) {
// overview and deep data is the same, don't hit server unnecessarily
@@ -330,10 +307,9 @@ async function fetchRelease(url, site, baseScene, options) {
}
const entryId = new URL(url).pathname.match(/\/(\d+)/)?.[1];
const { session, instanceToken } = options.beforeFetchScenes || await getSession(site, options.parameters);
const { instanceToken } = options.beforeFetchScenes || await getSession(site, options.parameters);
const res = await http.get(`https://site-api.project1service.com/v2/releases/${entryId}`, {
session,
const res = await unprint.get(`https://site-api.project1service.com/v2/releases/${entryId}`, {
interval: options.parameters.interval,
concurrency: options.parameters.concurrency,
headers: {
@@ -342,16 +318,16 @@ async function fetchRelease(url, site, baseScene, options) {
},
});
if (res.status === 200 && res.body.result) {
if (res.status === 200 && res.data.result) {
return {
scene: scrapeRelease(res.body.result, url, site, null, options),
scene: scrapeRelease(res.data.result, url, site, null, options),
};
}
return null;
}
function scrapeProfile(data, networkName, _releases = []) {
function scrapeProfile(data, _networkName, _releases = []) {
const profile = {
description: data.bio,
aliases: data.aliases.filter(Boolean),
@@ -365,7 +341,7 @@ function scrapeProfile(data, networkName, _releases = []) {
profile.measurements = data.measurements;
}
profile.dateOfBirth = qu.parseDate(data.birthday);
profile.dateOfBirth = unprint.extractDate(data.birthday);
profile.birthPlace = data.birthPlace;
profile.height = inchesToCm(data.height);
profile.weight = lbsToKg(data.weight);
@@ -404,10 +380,9 @@ function scrapeProfile(data, networkName, _releases = []) {
async function fetchProfile({ name: actorName }, { entity, parameters }, include) {
// const url = `https://www.${networkOrNetworkSlug.slug || networkOrNetworkSlug}.com`;
const { session, instanceToken } = await getSession(entity, parameters);
const { instanceToken } = await getSession(entity, parameters);
const res = await http.get(`https://site-api.project1service.com/v1/actors/?search=${encodeURI(actorName)}`, {
session,
const res = await unprint.get(`https://site-api.project1service.com/v1/actors/?search=${encodeURI(actorName)}`, {
interval: parameters.interval,
concurrency: parameters.concurrency,
headers: {
@@ -416,14 +391,13 @@ async function fetchProfile({ name: actorName }, { entity, parameters }, include
},
});
if (res.statusCode === 200) {
const actorData = res.body.result.find((actor) => actor.name.toLowerCase() === actorName.toLowerCase());
if (res.status === 200) {
const actorData = res.data.result.find((actor) => actor.name.toLowerCase() === actorName.toLowerCase());
if (actorData) {
const actorReleasesUrl = `https://site-api.project1service.com/v2/releases?actorId=${actorData.id}&limit=100&offset=0&orderBy=-dateReleased&type=scene`;
const actorReleasesRes = include.includeActorScenes && await http.get(actorReleasesUrl, {
session,
const actorReleasesRes = include.includeActorScenes && await unprint.get(actorReleasesUrl, {
interval: parameters.interval,
concurrency: parameters.concurrency,
headers: {
@@ -431,8 +405,8 @@ async function fetchProfile({ name: actorName }, { entity, parameters }, include
},
});
if (actorReleasesRes.statusCode === 200 && actorReleasesRes.body.result) {
return scrapeProfile(actorData, entity.slug, actorReleasesRes.body.result);
if (actorReleasesRes.status === 200 && actorReleasesRes.data.result) {
return scrapeProfile(actorData, entity.slug, actorReleasesRes.data.result);
}
return scrapeProfile(actorData, entity.slug, []);

View File

@@ -76,7 +76,12 @@ function scrapeAll(scenes, entity) {
release.poster = [
`${posterUrl.origin}${posterUrl.pathname}`,
posterUrl.href,
];
].map((src) => ({
src,
expectType: {
'application/octet-stream': 'image/jpeg',
},
}));
}
const videoData = query.json('a', { attribute: 'data-videopreview-sources-value' });
@@ -120,7 +125,9 @@ async function scrapeScene({ query }, { url, entity }) {
const release = {};
const data = query.json('//script[contains(text(), "VideoObject")]');
release.entryId = data?.['@id'] || decodeId(new URL(url).pathname.match(/\/video\/([\w-]+)\//)?.[1]);
// @id can be URL for some reason
// release.entryId = data?.['@id'] || decodeId(new URL(url).pathname.match(/\/video\/([\w-]+)\//)?.[1]);
release.entryId = decodeId(new URL(url).pathname.match(/\/video\/([\w-]+)\//)?.[1]);
// data title is not capitalized, prefer markup
release.title = query.attribute('meta[property="og:title"]', 'content') || query.content('.video-container + div h1') || data?.name;
@@ -147,7 +154,17 @@ async function scrapeScene({ query }, { url, entity }) {
const sourcesData = query.json('.video-container [data-videopreview-sources-value]', { attribute: 'data-videopreview-sources-value' });
release.poster = data?.thumbnailUrl || query.attribute('meta[property="og:image"]', 'content');
const poster = data?.thumbnailUrl || query.attribute('meta[property="og:image"]', 'content');
if (poster) {
release.poster = {
src: poster,
expectType: {
'application/octet-stream': 'image/jpeg',
},
};
}
release.teaser = (sourcesData && [
sourcesData.mp4_large,
sourcesData.webm_large,
@@ -158,7 +175,13 @@ async function scrapeScene({ query }, { url, entity }) {
|| query.attribute('meta[property="og:video"]')
|| query.video('video[data-videocontainer-target] source');
release.photos = query.sourceSets('div[data-controller] a[href^="/photos"] img');
release.photos = query.sourceSets('div[data-controller] a[href^="/photos"] img').map((sourceSet) => sourceSet.map((src) => ({
src,
expectType: {
'application/octet-stream': 'image/jpeg',
},
})));
release.photoCount = query.number('//h2[contains(text(), "Photos")]/following-sibling::span');
const channelName = query.content('.video-container + div a[href*="?in="]')?.trim();

157
src/scrapers/darkkotv.js Executable file
View File

@@ -0,0 +1,157 @@
'use strict';
const unprint = require('unprint');
const slugify = require('../utils/slugify');
const tryUrls = require('../utils/try-urls');
const { convert } = require('../utils/convert');
function getEntryId(url) {
return slugify(new URL(url).pathname.match(/\/scenes\/(.*?)(_vids)?.html/)[1]);
}
function scrapeAll(scenes, channel) {
return scenes.map(({ query }) => {
const release = {};
release.url = query.url('.videoPic a, h4 a');
release.entryId = getEntryId(release.url);
release.title = query.content('h4 a');
release.date = query.date('.videoInfo li:first-child ', 'MM-DD-YYYY');
release.duration = query.number('.videoInfo li:nth-child(2)') * 60 || null;
release.actors = query.all('a[href*="models/"]').map((actorEl) => ({
name: unprint.query.content(actorEl),
url: unprint.query.url(actorEl, null),
}));
release.poster = Array.from({ length: 4 }, (_value, index) => query.img('.videoPic img', { attribute: `src0_${4 - index}x`, origin: channel.origin }));
return release;
});
}
async function fetchLatest(channel, page = 1) {
const url = `${channel.url}/categories/movies_${page}.html`;
const res = await unprint.get(url, { selectAll: '.latestUpdateB' });
if (res.ok) {
return scrapeAll(res.context, channel);
}
return res.status;
}
async function fetchCaps(url) {
if (!url) {
return null;
}
const res = await unprint.get(url, { select: '.photoDetailsArea' });
if (res.ok) {
return res.context.query.imgs('.photoDPic img');
}
return null;
}
async function scrapeScene({ query: pageQuery, html }, { url, entity, include }) {
const release = {};
const { query } = unprint.init(pageQuery.element('.latestUpdateBinfo'));
release.entryId = getEntryId(url);
release.title = pageQuery.content('.vidImgTitle h4');
release.description = query.content('.vidImgContent p');
release.date = query.date('.videoInfo li:first-child ', 'MM-DD-YYYY');
release.duration = query.number('.videoInfo li:nth-child(2)') * 60 || null;
release.actors = query.all('a[href*="models/"]').map((actorEl) => ({
name: unprint.query.content(actorEl),
url: unprint.query.url(actorEl, null),
}));
release.tags = query.contents('.blogTags a');
const posterPath = html.match(/useimage\s*=\s*"(.*?)"/i)?.[1];
const capsUrl = pageQuery.url('a[href*="_caps"]');
if (posterPath) {
release.poster = Array.from({ length: 4 }, (_value, index) => unprint.prefixUrl(posterPath.replace('-4x', `-${4 - index}x`), entity.url));
}
if (include.photos && capsUrl) {
release.caps = await fetchCaps(capsUrl, entity);
}
release.trailer = pageQuery.video('#download_select option[value*=".mp4"]', { attribute: 'value' });
return release;
}
function scrapeProfile({ query }, { url, actorName }) {
const profile = { url };
const bio = Object.fromEntries(query.contents('.vitalStats li').map((entry) => {
const [key, value] = entry.split(':');
if (!key || !value) {
return null;
}
return [slugify(key, '_'), value?.trim()];
}).filter(Boolean));
profile.description = `${query.content('.modelBioInfo')?.replace(new RegExp(`professional bio of ${actorName}`, 'i'), '')}${bio.awards ? ` Awards: ${bio.awards}` : ''}`;
profile.dateOfBirth = unprint.extractDate(bio.date_of_birth, 'MMMM D, YYYY');
profile.birthPlace = bio.birthplace;
profile.ethnicity = bio.ethnicity;
profile.height = unprint.extractNumber(bio.height, { match: /(\d+)\s*cm/i, matchIndex: 1 })
|| convert(bio.height?.match(/\d+\s*ft \d+\s*in/)?.[0], 'cm');
profile.weight = unprint.extractNumber(bio.weight, { match: /(\d+)\s*kg/i, matchIndex: 1 })
|| convert(bio.weight?.match(/\d+\s*lbs/)[0], 'lb', 'kg');
profile.measurements = bio.measurements;
if (/yes/i.test(bio.natural_breasts)) profile.naturalBoobs = true;
if (/no/i.test(bio.natural_breasts)) profile.naturalBoobs = false;
if (/yes/i.test(bio.tattoos)) profile.hasTattoos = true;
if (/no/i.test(bio.tattoos)) profile.hasTattoos = false;
if (/yes/i.test(bio.piercings)) profile.hasPiercings = true;
if (/no/i.test(bio.piercings)) profile.hasPiercings = false;
profile.socials = query.urls('.vitalStats a[href*="onlyfans"], .vitalStats a[href*="twitter"], .vitalStats a[href*="instagram"]');
profile.avatar = Array.from({ length: 4 }, (_value, index) => query.img('.modelBioPic img', { attribute: `src0_${4 - index}x` }));
return profile;
}
async function fetchProfile({ name: actorName, url: actorUrl }, entity) {
const { res, url } = await tryUrls([
actorUrl,
`${entity.url}/models/${slugify(actorName, '-')}.html`,
`${entity.url}/models/${slugify(actorName, '')}.html`,
`${entity.url}/models/${slugify(actorName, '_')}.html`,
]);
if (res.ok) {
return scrapeProfile(res.context, { url, entity, actorName });
}
return res.status;
}
module.exports = {
fetchLatest,
fetchProfile,
scrapeScene,
};

View File

@@ -21,6 +21,10 @@ async function scrapeApiReleases(json, site, options) {
return acc;
}
if (options.parameters?.filterChannel && scene.mainChannel?.id !== options.parameters.filterChannel) {
return acc;
}
const release = {
entryId: scene.clip_id,
description: scene.description,
@@ -128,7 +132,7 @@ async function fetchApiCredentials(referer, site) {
}
async function fetchLatest(site, page = 1, options, _preData, upcoming = false) {
const referer = options.parameters?.referer || `${options.parameters?.networkReferer ? site.parent.url : site.url}/en/videos`;
const referer = options.parameters?.referer || `${options.parameters?.networkReferer ? site.parent.origin : site.origin}/en/videos`;
const { apiUrl } = await fetchApiCredentials(referer, site);
const slug = options.parameters.querySlug || site.slug;
@@ -179,7 +183,7 @@ async function getFullPhotos(entryId, site, parameters) {
}
async function getThumbs(entryId, site, parameters) {
const referer = parameters?.referer || `${parameters?.networkReferer ? site.parent.url : site.url}/en/videos`;
const referer = parameters?.referer || `${parameters?.networkReferer ? site.parent.origin : site.origin}/en/videos`;
const { apiUrl } = await fetchApiCredentials(referer, site);
const res = await unprint.post(apiUrl, {
@@ -237,7 +241,7 @@ async function scrapeScene(data, site, options, movieScenes) {
release.entryId = data.clip_id || data.movie_id;
release.title = curateTitle(data.title, site);
release.duration = data.length;
release.date = (data.date && new Date(data.date * 1000)) || unprint.parseDate(data.release_date || data.last_modified, 'YYYY-MM-DD');
release.date = (data.date && new Date(data.date * 1000)) || unprint.extractDate(data.release_date || data.last_modified, 'YYYY-MM-DD');
release.director = data.directors[0]?.name || null;
release.actors = data.actors.map((actor) => ({
@@ -298,7 +302,7 @@ async function scrapeScene(data, site, options, movieScenes) {
}
async function fetchScene(url, site, baseRelease, options) {
const referer = options.parameters?.referer || `${site.parameters?.networkReferer ? site.parent.url : site.url}/en/videos`;
const referer = options.parameters?.referer || `${site.parameters?.networkReferer ? site.parent.origin : site.origin}/en/videos`;
const { apiUrl } = await fetchApiCredentials(referer, site);
const entryId = (baseRelease?.path || new URL(url).pathname).match(/\/(\d{2,})(\/|$)/)?.[1];

View File

@@ -1,44 +1,38 @@
'use strict';
const util = require('util');
const Promise = require('bluebird');
const unprint = require('unprint');
const argv = require('../argv');
const { heightToCm } = require('../utils/convert');
const slugify = require('../utils/slugify');
const tryUrls = require('../utils/try-urls');
function getEntryId(html) {
const entryId = html.match(/showtagform\((\d+)\)/);
if (entryId) {
return entryId[1];
function getEntryId(url) {
if (!url) {
return null;
}
const setIdIndex = html.indexOf('setid:"');
const entryId = new URL(url).pathname.split('/').at(-1).match('(.*?)_vids.html');
if (setIdIndex) {
return html.slice(setIdIndex, html.indexOf(',', setIdIndex)).match(/\d+/)?.[0];
if (entryId) {
return slugify(entryId[1]);
}
return null;
}
function getEntryIdFromTitle(release) {
// return slugify([release.title, release.date && unprint.formatDate(release.date, 'YYYY-MM-DD')]); // date not shown on updates page
// return slugify(release.title);
return slugify([release.title, ...(release.actors?.map((actor) => actor.name || actor).toSorted() || [])]);
}
function scrapeAll(scenes, site, entryIdFromTitle) {
return scenes.map(({ element, query }) => {
function scrapeAll(scenes, site) {
return scenes.map(({ query }) => {
const release = {};
const title = query.content('.content_img div, .dvd_info > a, a.update_title, .update_title a, a[title] + a[title], .overlay-text')
const title = query.content('.jj-card-title, .content_img div, .dvd_info > a, a.update_title, .update_title a, a[title] + a[title], .overlay-text')
|| query.content('a[title*=" "]');
release.title = title?.slice(0, title.match(/starring:/i)?.index || Infinity).trim();
release.url = query.url('.content_img a, .dvd_info > a, a.update_title, .update_title a, a[title]');
release.date = query.date('.update_date', ['MM/DD/YYYY', 'YYYY-MM-DD']);
release.url = query.url('.jj-card-thumb, .content_img a, .dvd_info > a, a.update_title, .update_title a, a[title]');
release.date = query.date('.jj-card-date, .update_date', ['MM/DD/YYYY', 'YYYY-MM-DD', 'MMMM D, YYYY']);
release.entryId = getEntryId(release.url);
release.actors = query.all('.content_img .update_models a, .update_models a').map((actorEl) => ({
name: unprint.query.content(actorEl),
@@ -70,16 +64,15 @@ function scrapeAll(scenes, site, entryIdFromTitle) {
return null;
}).filter(Boolean);
release.teaser = query.video('.jj-card-video', { attribute: 'data-src' });
if (!release.teaser) {
const teaserScript = query.html('script');
if (teaserScript) {
release.teaser = teaserScript.slice(teaserScript.indexOf('http'), teaserScript.indexOf('.mp4') + 4);
}
release.entryId = (entryIdFromTitle && getEntryIdFromTitle(release))
|| element.dataset.setid
|| query.element('.rating_box')?.dataset.id
|| query.attribute('a img', 'id')?.match(/set-target-(\d+)/)?.[1];
}
return release;
});
@@ -91,7 +84,7 @@ async function fetchLatest(site, page = 1, include, preData, entryIdFromTitle =
: `${site.url}/trial/categories/movies_${page}_d.html`;
// const res = await http.get(url);
const res = await unprint.get(url, { selectAll: '.update_details, .grid-item' });
const res = await unprint.get(url, { selectAll: '.scenes-listing-grid .jj-content-card' });
if (res.ok) {
return scrapeAll(res.context, site, typeof site.parameters?.entryIdFromTitle === 'boolean' ? site.parameters.entryIdFromTitle : entryIdFromTitle);
@@ -100,7 +93,8 @@ async function fetchLatest(site, page = 1, include, preData, entryIdFromTitle =
return res.status;
}
function scrapeUpcoming(scenes, channel) {
/* disable until we have entryId solution
function scrapeUpcoming(scenes, _channel) {
return scenes.map(({ query, html }) => {
const release = {};
@@ -135,6 +129,7 @@ async function fetchUpcoming(site) {
return res.status;
}
*/
function extractLegacyTrailer(html, context) {
const trailerLines = html.split('\n').filter((line) => /movie\["trailer\w*"\]\[/i.test(line));
@@ -194,17 +189,19 @@ function getPhotos(query, release, context) {
async function scrapeScene({ html, query }, context) {
const release = {};
release.title = query.content('.title_bar_hilite, .movie_title');
release.description = query.content('.update_description') || query.text('//div[./span[contains(text(), "Description")]]');
release.entryId = getEntryId(context.url);
release.date = query.date(['.update_date', '//div[./span[contains(text(), "Date")]]'], ['MM/DD/YYYY', 'YYYY-MM-DD']);
release.title = query.content('.scene-title, .title_bar_hilite, .movie_title');
release.description = query.content('.scene-desc, .update_description') || query.text('//div[./span[contains(text(), "Description")]]');
release.actors = query.all('.backgroundcolor_info > .update_models a, .item .update_models a, .player-scene-description .update_models a').map((actorEl) => ({
release.date = query.date(['.meta-item:nth-child(2) .val, .update_date', '//div[./span[contains(text(), "Date")]]'], ['MM/DD/YYYY', 'YYYY-MM-DD', 'MMMM D, YYYY']);
release.actors = query.all('.meta-item .update_models a, .backgroundcolor_info > .update_models a, .item .update_models a, .player-scene-description .update_models a').map((actorEl) => ({
name: unprint.query.content(actorEl),
url: unprint.query.url(actorEl, null),
}));
release.tags = query.contents('.update_tags a, .player-scene-description a[href*="/categories"]');
release.tags = query.contents('.scene-cats a, .update_tags a, .player-scene-description a[href*="/categories"]');
release.director = release.tags?.find((tag) => ['mike john', 'van styles'].includes(tag?.trim().toLowerCase()));
const posterPath = query.poster('#video-player', { forceGetAttribute: true }) // without getAttribute, missing poster is returned as page URL
@@ -245,7 +242,7 @@ async function scrapeScene({ html, query }, context) {
// #images img selects a list of images that is present on every page; the JJ website removes the ones that failed to load with JS (lol)
release.photos = [
...context.baseRelease?.photos?.map((sources) => sources.at(-1).src) || [],
...query.imgs('#images img'),
...query.imgs('.tp-photos-strip img, #images img'),
...query.imgs('img.update_thumb', { attribute: 'src0_1x' }),
].filter(Boolean).map((source) => Array.from(new Set([
source.replace(/.jpg$/, '-full.jpg'),
@@ -270,9 +267,7 @@ async function scrapeScene({ html, query }, context) {
release.stars = query.number('.avg_rating');
release.entryId = context.entity.parameters?.entryIdFromTitle
? getEntryIdFromTitle(release)
: getEntryId(html);
release.qualities = query.contents('.res-item .res-lbl').map((resolution) => Number(resolution.match(/\d+$/)?.[0])).filter(Boolean);
return release;
}
@@ -296,7 +291,7 @@ function scrapeMovie({ query }, { url }) {
scene.date = unprint.query.date(sceneEl, '//span[contains(@class, "dvd-scene-description") and span[contains(text(), "Date")]]', 'MM/DD/YYYY');
scene.actors = unprint.query.contents(sceneEl, '.update_models a');
scene.entryId = getEntryIdFromTitle(scene);
scene.entryId = getEntryId(scene.url);
return scene;
});
@@ -339,48 +334,34 @@ function scrapeProfile({ query }, url, entity) {
verifyType: 'image',
}));
profile.scenes = scrapeAll(unprint.initAll(query.all('.grid-item')), entity, true);
profile.scenes = scrapeAll(unprint.initAll(query.all('.mbp-scenes-grid .jj-content-card, .grid-item')), entity, true);
return profile;
}
async function fetchProfile({ name: actorName, url }, entity) {
async function fetchProfile({ name: actorName, url: actorUrl }, entity) {
const actorSlugA = slugify(actorName, '');
const actorSlugB = slugify(actorName, '-');
const urls = [
url,
actorUrl,
`${entity.parameters?.profile || `${entity.url}/trial/models`}/${actorSlugA}.html`,
`${entity.parameters?.profile || `${entity.url}/trial/models`}/${actorSlugB}.html`,
];
return urls.reduce(async (chain, profileUrl) => {
const profile = await chain;
if (profile) {
return profile;
}
if (!profileUrl) {
return null;
}
const res = await unprint.get(profileUrl, {
followRedirects: false,
});
const { res, url } = await tryUrls(urls);
if (res.ok) {
return scrapeProfile(res.context, profileUrl, entity);
return scrapeProfile(res.context, url, entity);
}
return null;
}, Promise.resolve());
return res.status;
}
module.exports = {
fetchLatest,
fetchProfile,
fetchUpcoming,
// fetchUpcoming,
scrapeScene,
scrapeMovie,
};

View File

@@ -36,7 +36,7 @@ function scrapeAll(scenes) {
}
async function fetchLatest(channel, page) {
const res = await unprint.get(new URL(`./videos/page${page}.html`, channel.url).href, { // some sites require a trailing slash, join paths properly
const res = await unprint.get(new URL(`./videos/page${page}.html`, channel.url).href, { // some sites require a trailing slash, join paths properly; don't use origin in case channel path is used
selectAll: '.listing-videos .item',
cookies: {
warningHidden: 'hide',

View File

@@ -70,8 +70,7 @@ function scrapeAll(scenes, entity) {
async function fetchLatest(site, page = 1) {
const url = `${site.url}/video/gallery/${(page - 1) * 12}`; // /0 redirects back to /
const res = await unprint.get(url, {
interface: 'request',
const res = await unprint.browser(url, {
selectAll: '.content-grid-item',
});
@@ -86,9 +85,8 @@ async function fetchUpcoming(site) {
if (site.parameters?.upcoming) {
const url = `${site.url}/video/upcoming`;
const res = await unprint.get(url, {
const res = await unprint.browser(url, {
selectAll: '.content-grid-item',
interface: 'request',
});
if (res.ok) {
@@ -139,9 +137,7 @@ async function scrapeScene({ query }, { url, entity, include }) {
}
async function fetchScene(url, entity, _baseRelease, include) {
const res = await unprint.get(url, {
interface: 'request',
});
const res = await unprint.browser(url);
if (res.ok) {
return scrapeScene(res.context, { url, entity, include });
@@ -185,9 +181,7 @@ async function findModel(actor, entity) {
const url = `${origin}/model/alpha/${firstLetter}`;
const resModels = await unprint.get(url, {
interface: 'request',
});
const resModels = await unprint.browser(url);
if (!resModels.ok) {
return resModels.status;
@@ -217,9 +211,7 @@ async function fetchProfile(actor, { entity }) {
const model = await findModel(actor, entity);
if (model) {
const resModel = await unprint.get(model.url, {
interface: 'request',
});
const resModel = await unprint.browser(model.url);
if (resModel.ok) {
return scrapeProfile(resModel.context, model.avatar);

View File

@@ -14,8 +14,10 @@ function scrapeAll(months, channel, year) {
return unprint.initAll(scenes).map(({ query }) => {
const release = {};
release.url = query.url('a.video-pop-up', { origin: `${channel.origin}/submissive/` });
release.entryId = new URL(release.url).searchParams.get('id');
const videoUrl = query.url('a.video-pop-up', { origin: `${channel.origin}/submissive/` });
release.entryId = new URL(videoUrl).searchParams.get('id');
release.forceDeep = true;
release.title = query.content('.updates-item-title h4');
@@ -53,20 +55,24 @@ async function fetchLatest(channel, page = 1) {
return res.status;
}
function scrapeScene({ html }, url) {
function scrapeScene({ html }, baseRelease) {
const release = {};
release.entryId = new URL(url).searchParams.get('id');
release.entryId = baseRelease.entryId;
release.trailer = html.match(/file: '(.*)'/)[1];
return release;
}
async function fetchScene(url, channel) {
const res = await unprint.get(url);
async function fetchScene(_url, channel, baseRelease) {
if (!baseRelease.entryId) {
return null;
}
const res = await unprint.get(`${channel.origin}/submissive/player-load.php?id=${baseRelease.entryId}`);
if (res.ok) {
return scrapeScene(res.context, url, channel);
return scrapeScene(res.context, baseRelease);
}
return res.status;

View File

@@ -215,7 +215,7 @@ function scrapeProfile(data, channel, scenes, parameters) {
async function fetchProfile(actor, { channel, parameters }) {
const endpoint = await fetchEndpoint(channel);
const res = await http.get(`${channel.url}/_next/data/${endpoint}/models/${actor.slug}.json?slug=${actor.slug}`);
const res = await http.get(`${channel.url}/_next/data/${endpoint}/${parameters.actors || 'models'}/${actor.slug}.json?slug=${actor.slug}`);
if (res.ok && res.body.pageProps?.model) {
return scrapeProfile(res.body.pageProps.model, channel, res.body.pageProps.model_contents, parameters);

View File

@@ -16,6 +16,7 @@ const cherrypimps = require('./cherrypimps');
const cliffmedia = require('./cliffmedia');
const cumlouder = require('./cumlouder');
const czechav = require('./czechav');
const darkkotv = require('./darkkotv');
const modelmedia = require('./modelmedia');
const dorcel = require('./dorcel');
const fabulouscash = require('./fabulouscash');
@@ -118,6 +119,7 @@ module.exports = {
cumlouder,
czechav,
pornworld,
darkkotv,
delphine: modelmedia,
dorcel,
elegantangel: adultempire,

View File

@@ -1,155 +1,84 @@
'use strict';
const moment = require('moment');
const unprint = require('unprint');
const logger = require('../logger')(__filename);
const http = require('../utils/http');
const qu = require('../utils/qu');
const slugify = require('../utils/slugify');
const { prefixUrl } = require('../utils/qu');
function pickLocale(item) {
if (!item) {
return null;
}
function scrapeAll(scenes, entity) {
return scenes.map((scene) => {
if (item.en) {
return item.en;
}
return Object.values(item)[0];
}
function scrapeScene(scene, channel) {
const release = {};
release.entryId = scene.id;
release.url = `${new URL(entity.url).origin}/video/${scene.id}/${scene.slug}`;
release.url = `${channel.origin}/video/${scene.id}/${scene.slug}`;
if (/bic/i.test(scene.title)) {
release.shootId = scene.title.toUpperCase().replace('-', '_');
release.title = pickLocale(scene.title);
release.description = pickLocale(scene.description);
release.date = new Date(scene.publication_date);
release.duration = scene.meta?.duration_seconds || unprint.extractDuration(scene.meta?.duration);
release.productionDate = scene.meta.year && new Date(Date.UTC(scene.meta.year, 0, 1));
release.productionDatePrecision = 'year';
release.actors = scene.actors?.map((actor) => ({
name: actor.name,
entryId: actor.id,
url: `${channel.url}/videos/browse/cast/${actor.id}`,
}));
const poster = scene.artwork?.original;
const photos = [scene.artwork_f16, scene.cover].map((art) => art.original).filter(Boolean);
if (poster) {
release.poster = poster;
release.photos = photos;
} else {
release.title = scene.title;
// not observed, but artwork_f16 is suitable as poster
release.poster = photos[0];
release.photos = photos.slice(1);
}
release.description = scene.description;
release.date = moment.utc(scene.year, 'YYYY').toDate();
release.datePrecision = 'year';
release.caps = scene.screenshots?.map((src) => unprint.prefixUrl(src, 'https://s02.uni73d.net')) || [];
release.teaser = unprint.prefixUrl(scene.preview?.url, 'https://s02.uni73d.net');
release.actors = scene.actors.map((actor) => ({
name: actor.name.trim(),
avatar: actor.image || null,
})).filter((actor) => actor.name && slugify(actor.name) !== 'amateur-girl');
release.duration = scene.duration;
release.stars = scene.video_rating_score;
[release.poster, ...release.photos] = scene.screenshots.map((url) => prefixUrl(url));
release.tags = scene.display_genres?.map((genre) => pickLocale(genre.title)).filter(Boolean) || [];
if (scene.is_gay) {
release.tags = ['gay'];
release.tags = release.tags.concat('gay');
}
return release;
});
}
async function scrapeScene({ query }, url) {
const release = {};
const { pathname, origin, host } = new URL(url);
const entryId = pathname.match(/\/video\/(\d+)/)[1];
release.entryId = entryId;
const title = query.meta('name=title');
if (/bic/i.test(title)) {
release.shootId = title.toUpperCase().replace('-', '_');
} else {
release.title = title;
}
release.date = query.date('.detail-meta li:nth-child(2)', 'YYYY');
release.datePrecision = 'year';
release.description = query.q('.detail-description', true);
release.duration = query.dur('.detail-meta li:first-child');
const actors = [query.q('.detail-hero-title h1', true)?.trim()].filter((name) => name && slugify(name) !== 'amateur-girl');
if (actors.length > 0) {
release.actors = actors;
}
release.poster = query.q('.detail-hero').style['background-image'].match(/url\((.+)\)/)[1];
release.photos = query.imgs('.detail-grabs img');
const streamData = await http.get(`${origin}/video/source/${entryId}`, {
headers: {
host,
referer: url,
},
}, {
interval: 5000,
concurrency: 1,
});
if (streamData.ok && streamData.body.status === 'success') {
release.trailer = {
stream: streamData.body.link,
};
} else {
logger.warn(`Failed to fetch trailer for ${url}: ${streamData.ok ? streamData.body.status : streamData.status }`);
}
release.language = scene.meta?.language;
return release;
}
async function scrapeProfile(actor, entity, include) {
const profile = {};
async function fetchLatest(channel, page = 1, { parameters }) {
// sg: show gay
const url = `https://api.fundorado.com/api/videos/browse/labels/${parameters.legacySiteId}?page=${page}&sg=true&sort=release&video_type=scene&lang=en`;
const res = await unprint.get(url);
if (actor.image) {
profile.avatar = `https://teencoreclub.com${actor.image}`;
}
if (include.releases) {
const res = await http.get(`https://teencoreclub.com/browsevideos/api/all?actor=${actor.id}`);
if (res.ok) {
profile.releases = scrapeAll(res.body.data, entity);
}
}
return profile;
}
async function fetchLatest(entity, page = 1) {
// console.log(entity, page);
if (entity.parameters?.siteId) {
const res = await http.get(`https://teencoreclub.com/browsevideos/api/all?resType=latest&page=${page}&label=${entity.parameters.siteId}`);
if (res.ok) {
return scrapeAll(res.body.data, entity);
if (res.ok && res.data?.videos?.data) {
return res.data.videos.data.map((scene) => scrapeScene(scene, channel));
}
return res.status;
}
return null;
}
async function fetchScene(url, channel) {
const entryId = new URL(url).pathname.match(/\/video\/(\d+)/)[1];
const res = await unprint.get(`https://api.fundorado.com/api/videodetail/${entryId}`);
async function fetchScene(url, entity) {
const { pathname } = new URL(url);
const res = await qu.get(`https://teencoreclub.com${pathname}`);
if (res.ok) {
return scrapeScene(res.item, url, entity);
}
return res.status;
}
async function fetchProfile({ name: actorName }, { entity }, include) {
const res = await http.get(`https://teencoreclub.com/api/actors?query=${actorName}`);
if (res.ok) {
const actor = res.body.data.find((item) => slugify(item.name) === slugify(actorName));
if (actor) {
return scrapeProfile(actor, entity, include);
}
return null;
if (res.ok && res.data?.video) {
return scrapeScene(res.data.video, channel);
}
return res.status;
@@ -158,5 +87,4 @@ async function fetchProfile({ name: actorName }, { entity }, include) {
module.exports = {
fetchLatest,
fetchScene,
fetchProfile,
};

View File

@@ -30,6 +30,17 @@ function scrapeAll(scenes) {
});
}
async function fetchLatest(channel, page = 1) {
const url = `${channel.url}/${page}`;
const res = await unprint.get(url, { selectAll: '.scene' });
if (res.ok) {
return scrapeAll(res.context, channel);
}
return res.status;
}
function scrapeScene({ query }, { url }) {
const release = {};
@@ -62,17 +73,6 @@ function scrapeProfile({ query }) {
return profile;
}
async function fetchLatest(channel, page = 1) {
const url = `${channel.url}/${page}`;
const res = await unprint.get(url, { selectAll: '.scene' });
if (res.ok) {
return scrapeAll(res.context, channel);
}
return res.status;
}
async function fetchProfile({ name: actorName }, entity) {
const url = `${entity.url}/actors/${slugify(actorName, '_')}`;
const res = await unprint.get(url);

View File

@@ -502,6 +502,7 @@ async function fetchScene(url, channel, baseRelease, options) {
}
const res = await unprint.get(url, {
interface: 'request',
useBrowser: !!options.parameters?.useBrowser,
});

View File

@@ -70,6 +70,7 @@ async function curateReleaseEntry(release, batchId, existingRelease, type = 'sce
if (type === 'scene') {
curatedRelease.shoot_id = release.shootId || null;
curatedRelease.production_date = Number(release.productionDate) ? release.productionDate : null;
curatedRelease.production_date_precision = release.productionDatePrecision;
curatedRelease.duration = Math.round(release.duration) || null; // float may happen if scraper converts duration from milliseconds with a simple / 1000
curatedRelease.qualities = Array.from(new Set(release.qualities?.map(Number).filter(Boolean))).sort((qualityA, qualityB) => qualityB - qualityA);
}
@@ -89,6 +90,20 @@ async function curateReleaseEntry(release, batchId, existingRelease, type = 'sce
}
}
if (release.language) {
const curatedLanguage = release.language.toLowerCase();
const language = await knex('languages')
.where(knex.raw('lower(alpha2)'), curatedLanguage)
.orWhere(knex.raw('lower(name)'), curatedLanguage)
.orWhere(knex.raw('lower(name_native)'), curatedLanguage)
.first();
if (language) {
curatedRelease.language_alpha2 = language.alpha2;
}
}
if (!existingRelease && !release.id) {
curatedRelease.created_batch_id = batchId;
}
@@ -431,6 +446,7 @@ async function storeScenes(releases, useBatchId) {
const uniqueReleasesWithId = attachReleaseIds(uniqueReleases, storedReleaseEntries, batchId);
const duplicateReleasesWithId = attachReleaseIds(duplicateReleases, duplicateReleaseEntries, batchId);
const curatedDuplicateReleases = await Promise.all(duplicateReleasesWithId.map((release) => curateReleaseEntry(release, batchId)));
const releasesWithId = uniqueReleasesWithId.concat(duplicateReleasesWithId);
const updatedChunks = await Promise.all(chunk(curatedDuplicateReleases, 500).map(async (chunkedReleases) => knex.raw(`
@@ -442,13 +458,17 @@ async function storeScenes(releases, useBatchId) {
description = COALESCE(new.description, releases.description),
shoot_id = COALESCE(new.shoot_id, releases.shoot_id),
duration = COALESCE(new.duration, releases.duration),
production_date = COALESCE(new.production_date, releases.production_date),
production_date_precision = COALESCE(new.production_date_precision, releases.production_date_precision),
language_alpha2 = COALESCE(new.language_alpha2, releases.language_alpha2),
comment = COALESCE(new.comment, releases.comment),
attributes = COALESCE(new.attributes::jsonb || releases.attributes::jsonb, new.attributes::jsonb, releases.attributes::jsonb),
deep = new.url IS NOT NULL,
updated_at = NOW()
FROM json_to_recordset(:scenes)
AS new(id int, url text, date timestamptz, entity json, title text, description text, shoot_id text, duration integer, comment text, attributes json, deep boolean)
AS new(id int, url text, date timestamptz, entity json, title text, description text, shoot_id text, duration integer, production_date timestamptz, production_date_precision text, language_alpha2 text, comment text, attributes json, deep boolean)
WHERE releases.id = new.id
RETURNING releases.*
`, {
scenes: JSON.stringify(chunkedReleases),
})));

View File

@@ -4,6 +4,7 @@ const logger = require('./logger')(__filename);
const knex = require('./knex');
const { fetchEntityReleaseIds } = require('./entities');
const { updateSceneSearch } = require('./update-search');
const slugify = require('./utils/slugify');
const batchInsert = require('./utils/batch-insert');
@@ -79,7 +80,7 @@ function withRelations(queryBuilder, withMedia) {
async function matchTags(rawTags) {
const tags = rawTags
.map((tag) => tag?.trim().match(/[a-z0-9()]+/ig)?.join(' ').toLowerCase())
.map((tag) => tag?.trim().match(/[a-z0-9()-]+/ig)?.join(' ').toLowerCase())
.filter(Boolean);
const tagEntries = await knex('tags')
@@ -153,7 +154,7 @@ function buildReleaseTagAssociations(releases, tagIdsBySlug, entityTagIdsByEntit
async function associateReleaseTags(releases, type = 'release') {
if (releases.length === 0) {
return;
return {};
}
const tagIdsBySlug = await matchTags(releases.flatMap((release) => release.tags));
@@ -162,6 +163,18 @@ async function associateReleaseTags(releases, type = 'release') {
const tagAssociations = buildReleaseTagAssociations(releases, tagIdsBySlug, entityTagIdsByEntityId, type);
await batchInsert(`${type}s_tags`, tagAssociations, { conflict: false });
return tagAssociations.reduce((acc, association) => {
if (!acc[association.release_id]) {
acc[association.release_id] = [];
}
if (association.tag_id) {
acc[association.release_id].push(association.tag_id);
}
return acc;
}, {});
}
async function fetchTag(tagId) {
@@ -199,6 +212,8 @@ async function reassociateTagEntries(tagEntries, rematch) {
tag_id: matchedTags[slugify(tagEntry.original_tag)],
})).filter((tagEntry) => tagEntry.tag_id);
const sceneIds = Array.from(new Set(updatedTagEntries.map((tagEntry) => tagEntry.release_id))).filter(Boolean);
if (updatedTagEntries.length > 0) {
const trx = await knex.transaction();
@@ -212,10 +227,13 @@ async function reassociateTagEntries(tagEntries, rematch) {
})), {
conflict: false,
transaction: trx,
commit: true,
});
await updateSceneSearch(sceneIds);
}
logger.info(`Updated ${updatedTagEntries.length} tags in ${new Set(updatedTagEntries.map((tagEntry) => tagEntry.release_id)).size} scenes`);
logger.info(`Updated ${updatedTagEntries.length} tags in ${sceneIds.length} scenes`);
}
async function reassociateReleaseTags(rawSceneIds, rematch) {

136
src/tools/gamma_banners.js Normal file
View File

@@ -0,0 +1,136 @@
'use strict';
const unprint = require('unprint');
const fs = require('fs');
const { Readable } = require('stream');
const { pipeline } = require('stream/promises');
const knex = require('../knex');
const argv = require('../argv');
const slugify = require('../utils/slugify');
const apiUrl = 'https://vjoc5ygk89-dsn.algolia.net/1/indexes/*/queries?x-algolia-agent=Algolia%20for%20JavaScript%20(3.33.0)%3B%20Browser%20(lite)%3B%20react%20(16.8.6)%3B%20react-instantsearch%20(5.7.0)%3B%20JS%20Helper%20(2.28.1)&x-algolia-application-id=VJOC5YGK89&x-algolia-api-key=c5546bdfb4d3f31daf49ed3bb1463561';
async function fetchBanners() {
const res = await unprint.post(
apiUrl,
{
requests: [
{
indexName: 'creatives',
params: new URLSearchParams({
hitsPerPage: 1000,
maxValuesPerFacet: 100,
page: 0,
filters: '(ProgramType:Legacy OR ProgramType:Internal) AND NOT OverlayActive:false',
facets: '["SceneActors","SceneCategories","ProgramName","Size","Niche","MediaExt","SiteTag","OverlayName"]',
facetFilters: `[["SiteTag:${argv.site}"],["MediaExt:jpg", "MediaExt:png", "MediaExt:gif"]]`,
}).toString(),
},
],
},
{
headers: {
'content-type': 'application/x-www-form-urlencoded',
referer: 'https://creatives.gammae.com/',
},
},
);
if (res.ok && res.data.results[0]) {
return res.data.results[0].hits;
}
console.error(`Failed API request (${res.status}): ${res.body}`);
return null;
}
async function matchTags(rawTags) {
if (!rawTags) {
return [];
}
const tags = rawTags
.map((tag) => tag?.trim().match(/[a-z0-9()]+/ig)?.join(' ').toLowerCase())
.filter(Boolean);
const tagEntries = await knex('tags')
.select('tags.slug', 'aliases.slug as alias_slug')
.whereIn(knex.raw('lower(tags.name)'), tags)
.leftJoin('tags as aliases', 'aliases.id', 'tags.alias_for')
.orderByRaw('CASE WHEN tags.alias_for IS NOT NULL THEN aliases.priority ELSE tags.priority END DESC');
return tagEntries.map((tagEntry) => tagEntry.alias_slug || tagEntry.slug);
}
async function init() {
const banners = await fetchBanners();
if (!banners) {
return;
}
await banners.reduce(async (chain, banner) => {
await chain;
const channel = slugify(banner.SiteTag, '');
const url = unprint.prefixUrl(banner.MediaLocation || banner.CreativeURL, 'https://cdn.banhq.com');
if (!url) {
console.log('No URL found');
console.log(banner);
return;
}
const tags = await matchTags([
...banner.Tags?.map((tag) => tag.Value) || [],
...banner.SceneCategories || [],
banner.Niche,
].filter(Boolean));
const fileTags = tags.slice(0, 4).join('_');
const fileActors = banner.SceneActors?.slice(0, 2).map((actor) => slugify(actor, '_')).join('_');
// tags are unreliable and describe entire scene, not banner, don't include by default
const segments = [channel, banner.Width, banner.Height, banner.MediaID, argv.actors?.[0] !== false && fileActors].filter(Boolean);
const filename = `${segments.join('_')}${argv.tags && argv.tags ? `-${fileTags}` : ''}.${banner.MediaExt || 'jpg'}`;
const filepath = `/tmp/gamma/${channel}/${filename}`;
if (argv.inspect) {
console.log(banner);
}
if (argv.preview) {
console.log(`Preview ${url}: ${filepath}`);
return;
}
await fs.promises.mkdir(`/tmp/gamma/${channel}`, { recursive: true });
try {
const res = await fetch(url);
if (res.ok && res.body) {
const writer = fs.createWriteStream(filepath);
await pipeline(Readable.fromWeb(res.body), writer);
if (argv.actors) {
console.log(`Saved ${url} to ${filepath}`);
} else {
console.log(`Saved ${url} to ${filepath}, actors ${banner.SceneActors?.join(', ') || ''}`);
}
} else {
console.log(`Failed to fetch ${url} (${res.status})`);
}
} catch (error) {
console.log(`Failed to fetch ${url}: ${error.message}`);
}
}, Promise.resolve());
await knex.destroy();
}
init();

15
src/tools/huge-query.js Normal file
View File

@@ -0,0 +1,15 @@
'use strict';
const knex = require('../knex');
async function init() {
const data = Array.from({ length: 100_000 }, (value, index) => ({
id: `test_affiliate_${index}`,
}));
await knex('affiliates').insert(data);
console.log('Done!');
}
init();

View File

@@ -41,7 +41,7 @@ async function fetchScenes() {
studios.name as studio_name,
grandparents.id as parent_network_id,
COALESCE(JSON_AGG(DISTINCT (actors.id, actors.name)) FILTER (WHERE actors.id IS NOT NULL), '[]') as actors,
COALESCE(JSON_AGG(DISTINCT (tags.id, tags.name, tags.priority, tags_aliases.name)) FILTER (WHERE tags.id IS NOT NULL), '[]') as tags,
COALESCE(JSON_AGG(DISTINCT (tags.id, tags.name, tags.priority, tags_aliases.name, local_tags.actor_id)) FILTER (WHERE tags.id IS NOT NULL), '[]') as tags,
COALESCE(JSON_AGG(DISTINCT (movies.id, movies.title)) FILTER (WHERE movies.id IS NOT NULL), '[]') as movies,
COALESCE(JSON_AGG(DISTINCT (series.id, series.title)) FILTER (WHERE series.id IS NOT NULL), '[]') as series,
COALESCE(JSON_AGG(DISTINCT (releases_fingerprints.hash)) FILTER (WHERE releases_fingerprints.hash IS NOT NULL), '[]') as fingerprints,
@@ -136,6 +136,14 @@ async function init() {
dupe_index int
)`);
await utilsApi.sql('drop table if exists scenes_tags');
await utilsApi.sql(`create table scenes_tags (
id int,
scene_id int,
tag_id int,
actor_id int
)`);
console.log('Recreated scenes table');
console.log('Fetching scenes from primary database');
@@ -143,12 +151,13 @@ async function init() {
console.log('Fetched scenes from primary database');
const docs = scenes.map((scene) => {
const docs = scenes.flatMap((scene) => {
const flatActors = scene.actors.flatMap((actor) => actor.f2.match(/[\w']+/g)); // match word characters to filter out brackets etc.
const flatTags = scene.tags.filter((tag) => tag.f3 > 6).flatMap((tag) => (tag.f4 ? `${tag.f2} ${tag.f4}` : tag.f2).match(/[\w']+/g)); // only make top tags searchable to minimize cluttered results
const filteredTitle = filterTitle(scene.title, [...flatActors, ...flatTags]);
return {
return [
{
replace: {
index: 'scenes',
id: scene.id,
@@ -185,7 +194,19 @@ async function init() {
dupe_index: scene.dupe_index || 0,
},
},
};
},
...scene.tags.map((tag) => ({
replace: {
index: 'scenes_tags',
// id: scene.id,
doc: {
scene_id: scene.id,
tag_id: tag.f1,
actor_id: tag.f5,
},
},
})),
];
});
// const accData = chunk(docs, 10000).reduce(async (chain, docsChunk, index, array) => {

View File

@@ -0,0 +1,88 @@
'use strict';
const config = require('config');
const manticore = require('manticoresearch');
const knex = require('../knex');
const chunk = require('../utils/chunk');
const mantiClient = new manticore.ApiClient();
mantiClient.basePath = `http://${config.database.manticore.host}:${config.database.manticore.httpPort}`;
const utilsApi = new manticore.UtilsApi(mantiClient);
const indexApi = new manticore.IndexApi(mantiClient);
async function syncStashes(domain = 'scene') {
await utilsApi.sql(`truncate table ${domain}s_stashed`);
const stashes = await knex(`stashes_${domain}s`)
.select(
`stashes_${domain}s.id as stashed_id`,
`stashes_${domain}s.${domain}_id`,
'stashes.id as stash_id',
'stashes.user_id as user_id',
`stashes_${domain}s.created_at as created_at`,
)
.leftJoin('stashes', 'stashes.id', `stashes_${domain}s.stash_id`);
await chunk(stashes, 1000).reduce(async (chain, stashChunk, index) => {
await chain;
const stashDocs = stashChunk.map((stash) => ({
replace: {
index: `${domain}s_stashed`,
id: stash.stashed_id,
doc: {
[`${domain}_id`]: stash[`${domain}_id`],
stash_id: stash.stash_id,
user_id: stash.user_id,
created_at: Math.round(stash.created_at.getTime() / 1000),
},
},
}));
await indexApi.bulk(stashDocs.map((doc) => JSON.stringify(doc)).join('\n'));
console.log(`Synced ${index * 1000 + stashChunk.length}/${stashes.length} ${domain} stashes`);
}, Promise.resolve());
}
async function init() {
await utilsApi.sql('drop table if exists scenes_stashed');
await utilsApi.sql(`create table if not exists scenes_stashed (
scene_id int,
stash_id int,
user_id int,
created_at timestamp
)`);
await utilsApi.sql('drop table if exists movies_stashed');
await utilsApi.sql(`create table if not exists movies_stashed (
movie_id int,
stash_id int,
user_id int,
created_at timestamp
)`);
await utilsApi.sql('drop table if exists actors_stashed');
await utilsApi.sql(`create table if not exists actors_stashed (
actor_id int,
stash_id int,
user_id int,
created_at timestamp
)`);
await syncStashes('scene');
await syncStashes('actor');
await syncStashes('movie');
console.log('Done!');
knex.destroy();
}
init();

View File

@@ -1,655 +0,0 @@
'use strict';
const config = require('config');
const fs = require('fs');
const path = require('path');
const moment = require('moment');
const Promise = require('bluebird');
const bhttp = require('bhttp');
const { nanoid } = require('nanoid/non-secure');
const { Upload } = require('@aws-sdk/lib-storage');
const { S3Client } = require('@aws-sdk/client-s3');
const { graphql } = require('../web/graphql');
const knex = require('../knex');
const args = require('../argv');
const s3 = new S3Client({
region: 'eu-central-1',
endpoint: 'https://s3.eu-central-1.wasabisys.com',
credentials: {
accessKeyId: config.s3.accessKey,
secretAccessKey: config.s3.secretKey,
},
});
// NOT TRANSFERRED, unutilized on old server: production location, availabile qualities, actor alias for, actor entry id, chapter posters, chapter photos
const sceneFields = `
entryId
shootId
title
url
date
datePrecision
productionDate
description
duration
entity {
slug
type
}
studio {
slug
}
movies: moviesScenesBySceneId {
movie {
title
entryId
entity {
slug
type
}
}
}
actors: releasesActors {
actor {
name
slug
entryId
entity {
slug
type
}
}
}
directors: releasesDirectors {
director {
name
slug
entryId
entity {
slug
type
}
}
}
tags: releasesTags {
tag {
slug
}
}
chapters(orderBy: TIME_ASC) {
index
time
duration
title
description
tags: chaptersTags {
tag {
slug
}
}
}
poster: releasesPoster {
media {
hash
path
thumbnail
lazy
s3: isS3
mime
index
width
height
size
source
sourcePage
}
}
photos: releasesPhotos {
media {
hash
path
thumbnail
lazy
s3: isS3
mime
index
width
height
size
source
sourcePage
}
}
covers: releasesCovers {
media {
hash
path
thumbnail
lazy
s3: isS3
mime
index
width
height
size
source
sourcePage
}
}
trailer: releasesTrailer {
media {
hash
path
thumbnail
lazy
s3: isS3
mime
index
width
height
size
source
sourcePage
}
}
teaser: releasesTeaser {
media {
hash
path
thumbnail
lazy
s3: isS3
mime
index
width
height
size
source
sourcePage
}
}
createdAt
`;
const movieFields = `
entryId
title
url
date
datePrecision
entity {
slug
type
}
poster: moviesPoster {
media {
hash
path
thumbnail
lazy
s3: isS3
mime
index
width
height
size
source
sourcePage
}
}
covers: moviesCovers {
media {
hash
path
thumbnail
lazy
s3: isS3
mime
index
width
height
size
source
sourcePage
}
}
createdAt
`;
async function save() {
const limit = args.limit || 1000;
const offset = args.start || 0;
const { releases } = await graphql(`
query SearchScenes(
$limit: Int = 20
$offset: Int = 0
) {
releases(
first: $limit
offset: $offset
orderBy: DATE_DESC
) {
${sceneFields}
}
}
`, {
limit,
offset,
}, 'owner');
const { movies } = await graphql(`
query SearchScenes(
$limit: Int = 20
$offset: Int = 0
) {
movies(
first: $limit
offset: $offset
orderBy: DATE_DESC
) {
${movieFields}
}
}
`, {
limit,
offset,
}, 'owner');
const filename = `export-${offset}-${offset + limit}-${moment().format('YYYY-MM-DD_hh_mm_ss')}.json`;
let savedScenes = 0;
let savedMovies = 0;
await releases.reduce(async (chain, release) => {
await chain;
const entry = JSON.stringify({
...release,
type: 'release',
actors: release.actors.filter(Boolean).map(({ actor }) => actor),
directors: release.directors.filter(Boolean).map(({ director }) => director),
studio: release.studio?.slug,
tags: release.tags.map(({ tag }) => tag?.slug).filter(Boolean),
movies: release.movies?.map(({ movie }) => movie) || [],
chapters: release.chapters.filter(Boolean).map((chapter) => ({
...chapter,
tags: chapter.tags.map(({ tag }) => tag?.slug).filter(Boolean),
})),
poster: release.poster?.media,
trailer: release.trailer?.media,
teaser: release.teaser?.media,
photos: release.photos.filter(Boolean).map(({ media }) => media),
covers: release.covers.filter(Boolean).map(({ media }) => media),
});
await fs.promises.appendFile(filename, `${entry}\n`);
savedScenes += 1;
}, Promise.resolve());
await movies.reduce(async (chain, movie) => {
await chain;
const entry = JSON.stringify({
...movie,
type: 'movie',
poster: movie.poster?.media,
covers: movie.covers.filter(Boolean).map(({ media }) => media),
});
await fs.promises.appendFile(filename, `${entry}\n`);
savedMovies += 1;
}, Promise.resolve());
console.log(`Saved ${savedScenes} scenes and ${savedMovies} movies to ${filename}`);
process.exit();
}
async function addReleaseTags(release, context) {
if (release.tags.length === 0) {
return;
}
await knex('releases_tags').insert(release.tags.map((tag) => ({
tag_id: context.tagIdsBySlug[tag],
release_id: release.id,
original_tag: tag,
})));
}
async function addNewActor(actor, entity, context) {
const [{ id: actorId }] = await knex('actors')
.insert({
name: actor.name,
slug: actor.slug,
entity_id: entity?.id,
batch_id: context.batchId,
})
.returning('id');
return actorId;
}
async function addReleaseActors(release, context, target = 'actor') {
await release[`${target}s`].reduce(async (chain, actor) => {
await chain;
const entity = actor.entity
? await knex('entities').where(actor.entity).first()
: null;
if (actor.entity && !entity) {
throw new Error(`Actor ${actor.slug} contains non-existent ${release.entity.type} '${release.entity.slug}'`);
}
const existingActor = await knex('actors')
.where('slug', actor.slug)
.where((builder) => {
if (entity) {
builder.where('entity_id', entity.id);
return;
}
builder.whereNull('entity_id');
})
.first();
const actorId = existingActor?.id
|| await addNewActor(actor, entity, context);
await knex(`releases_${target}s`).insert({
release_id: release.id,
[`${target}_id`]: actorId,
});
}, Promise.resolve());
}
async function addReleaseDirectors(release, context) {
return addReleaseActors(release, context, 'director');
}
async function addReleaseChapters(release, context) {
await release.chapters.reduce(async (chain, chapter) => {
await chain;
const [{ id: chapterId }] = await knex('chapters')
.insert({
release_id: release.id,
index: chapter.index,
time: chapter.time,
duration: chapter.duration,
description: chapter.description,
})
.returning('id');
if (chapter.tags.length > 0) {
await knex('chapters_tags').insert(chapter.tags.map((tag) => ({
tag_id: context.tagIdsBySlug[tag],
chapter_id: chapterId,
original_tag: tag,
})));
}
}, Promise.resolve());
}
const dirs = {
path: '',
thumbnail: 'thumbs',
lazy: 'lazy',
};
async function transferMedia(media, target) {
return ['path', 'thumbnail', 'lazy'].reduce(async (chain, type) => {
await chain;
const filename = `${media.hash}${path.extname(media[type])}`;
const filepath = path.join(target, dirs[type], filename);
const temp = path.join('media/temp', filepath);
const url = new URL(media[type], `${media.s3 ? config.media.transferSources.s3 : config.media.transferSources.local}/`).href;
if (args.logLevel === 'debug') {
console.log('Transferring media', url);
}
const res = await bhttp.get(url, { stream: true });
if (res.statusCode !== 200) {
console.warn(`Missing ${target} ${url}`);
return;
}
await fs.promises.mkdir(path.dirname(temp), { recursive: true });
await new Promise((resolve, reject) => {
const fileStream = fs.createWriteStream(temp);
res.pipe(fileStream);
res.on('error', () => { reject(); });
fileStream.on('finish', () => { resolve(); });
fileStream.on('error', () => { reject(); });
});
await new Upload({
client: s3,
params: {
Bucket: config.s3.bucket,
Body: fs.createReadStream(temp),
Key: filepath,
ContentType: media.mime,
},
}).done();
await fs.promises.unlink(temp);
}, Promise.resolve());
}
async function addReleaseMedia(medias, release, target) {
await medias.filter(Boolean).reduce(async (chain, media) => {
await chain;
const existingMedia = await knex('media')
.where('hash', media.hash)
.orWhere('source', media.source)
.first();
const id = existingMedia?.id || nanoid();
if (!existingMedia) {
await knex('media').insert({
id,
hash: media.hash,
path: path.join(target, '', `${media.hash}${path.extname(media.path)}`),
thumbnail: path.join(target, 'thumbs', `${media.hash}${path.extname(media.thumbnail)}`),
lazy: path.join(target, 'lazy', `${media.hash}${path.extname(media.lazy)}`),
// is_s3: media.s3,
is_s3: true,
index: media.index,
mime: media.mime,
size: media.size,
width: media.width,
height: media.height,
source: media.source,
source_page: media.sourcePage,
});
await transferMedia(media, target);
}
try {
await knex(`${release.type}s_${target}`).insert({
[`${release.type}_id`]: release.id,
media_id: id,
});
} catch (error) {
console.warn(`Ignored duplicate ${release.type} ${target} association ${media.hash} with ${release.id} "${release.title}"`);
}
}, Promise.resolve());
}
async function linkMovieScenes(release, context) {
await release.movies.reduce(async (chain, linkedMovie) => {
await chain;
const movie = context.movies.find((storedMovie) => storedMovie.entryId === linkedMovie.entryId
&& storedMovie.entity.slug === linkedMovie.entity.slug
&& storedMovie.entity.type === linkedMovie.entity.type);
if (!movie) {
throw new Error(`Missing ${linkedMovie.entity.slug} movie '${linkedMovie.title}' in '${release.title}'`);
}
await knex('movies_scenes').insert({
movie_id: movie.id,
scene_id: release.id,
});
}, Promise.resolve());
}
async function addRelease(release, context) {
const existingRelease = await knex(`${release.type}s`)
.select(`${release.type}s.*`, 'entities.name as entity_name')
.leftJoin('entities', 'entities.id', `${release.type}s.entity_id`)
.where('entry_id', release.entryId)
.where('entities.slug', release.entity.slug)
.where('entities.type', release.entity.type)
.first();
if (existingRelease) {
console.log(`Skipping ${release.entity.slug} release "${release.title}", already in database`);
return {
...release,
skipped: true,
id: existingRelease.id,
entityName: existingRelease.entity_name,
};
}
const [entity] = await Promise.all([
knex('entities').select(['id', 'name']).where(release.entity).first(),
]);
if (!entity) {
throw new Error(`Release "${release.title}" contains non-existent ${release.entity.type} '${release.entity.slug}'`);
}
const [releaseEntry] = await knex(`${release.type}s`)
.insert({
entry_id: release.entryId,
entity_id: entity.id,
url: release.url,
title: release.title,
slug: release.slug,
date: release.date,
date_precision: release.datePrecision,
created_batch_id: context.batchId,
updated_batch_id: context.batchId,
...(release.type === 'scene' && {
shoot_id: release.shootId,
studio_id: context.studioIdsBySlug[release.studio],
production_date: release.productionDate,
description: release.description,
duration: release.duration,
}),
})
.returning(['id', 'entry_id']);
const releaseWithId = {
...release,
id: releaseEntry.id,
entityName: entity.name,
};
await addReleaseMedia([releaseWithId.poster], releaseWithId, 'posters', context);
if (release.type === 'release') {
await Promise.all([
addReleaseTags(releaseWithId, context),
addReleaseActors(releaseWithId, context),
addReleaseDirectors(releaseWithId, context),
addReleaseChapters(releaseWithId, context),
linkMovieScenes(releaseWithId, context),
addReleaseMedia(releaseWithId.photos, releaseWithId, 'photos', context),
]);
}
if (release.type === 'movie') {
await addReleaseMedia(releaseWithId.covers, releaseWithId, 'covers', context);
}
return releaseWithId;
}
async function load() {
const file = await fs.promises.readFile(args.file, 'utf8');
const start = args.start || 0;
const end = args.limit ? start + args.limit : Infinity;
const releases = file.split('\n')
.filter(Boolean)
.map((data) => JSON.parse(data))
.filter((release) => (args.entity ? release.entity.slug === args.entity : true))
.slice(start, end);
if (releases.length === 0) {
console.log('Nothing to load');
return;
}
const [{ id: batchId }] = await knex('batches').insert({ comment: `import ${args.file}` }).returning('id');
const aggTags = Array.from(new Set(releases.filter((release) => release.type === 'release').flatMap((release) => [...release.tags, ...release.chapters.flatMap((chapter) => chapter.tags)]).filter(Boolean)));
const aggStudios = Array.from(new Set(releases.map((release) => release.studio).filter(Boolean)));
const tags = await knex('tags')
.select('id', 'slug')
.whereIn('slug', aggTags);
const studios = await knex('entities')
.select('id', 'slug')
.where('type', 'studio')
.whereIn('slug', aggStudios);
const tagIdsBySlug = Object.fromEntries(tags.map((tag) => [tag.slug, tag.id]));
const studioIdsBySlug = Object.fromEntries(studios.map((studio) => [studio.slug, studio.id]));
const addedMovies = await releases.filter((release) => release.type === 'movie').reduce(async (chain, release, index, array) => {
const acc = await chain;
const movie = await addRelease(release, { batchId, tagIdsBySlug, studioIdsBySlug });
if (!movie.skipped) {
console.log(`Loaded ${index}/${array.length} '${movie.entityName}' movie "${movie.title}"`);
}
return acc.concat(movie);
}, Promise.resolve([]));
const addedScenes = await releases.filter((release) => release.type === 'release').reduce(async (chain, release, index, array) => {
const acc = await chain;
const scene = await addRelease(release, { batchId, movies: addedMovies, tagIdsBySlug, studioIdsBySlug });
if (!scene.skipped) {
console.log(`Loaded ${index}/${array.length} '${scene.entityName}' scene "${scene.title}"`);
}
return acc.concat((!!scene && !scene.skipped));
}, Promise.resolve([]));
console.log(`Loaded ${addedMovies.filter((movie) => movie && !movie.skipped).length}/${releases.filter((release) => release.type === 'movie').length} movies in batch ${batchId}`);
console.log(`Loaded ${addedScenes.filter((scene) => scene && !scene.skipped).length}/${releases.filter((release) => release.type === 'release').length} scenes in batch ${batchId}`);
process.exit();
}
({
save,
load,
})[args._]();

View File

@@ -16,14 +16,14 @@ async function updateManticoreStashedScenes(docs) {
await chunk(docs, 1000).reduce(async (chain, docsChunk) => {
await chain;
const sceneIds = docsChunk.map((doc) => doc.replace.id);
const sceneIds = docsChunk.filter((doc) => !!doc.replace).map((doc) => doc.replace.id);
const stashes = await knex('stashes_scenes')
.select('stashes_scenes.id as stashed_id', 'stashes_scenes.scene_id', 'stashes_scenes.created_at', 'stashes.id as stash_id', 'stashes.user_id as user_id')
.leftJoin('stashes', 'stashes.id', 'stashes_scenes.stash_id')
.whereIn('scene_id', sceneIds);
const stashDocs = docsChunk.flatMap((doc) => {
const stashDocs = docsChunk.filter((doc) => doc.replace).flatMap((doc) => {
const sceneStashes = stashes.filter((stash) => stash.scene_id === doc.replace.id);
if (sceneStashes.length === 0) {
@@ -50,6 +50,25 @@ async function updateManticoreStashedScenes(docs) {
if (stashDocs.length > 0) {
await indexApi.bulk(stashDocs.map((doc) => JSON.stringify(doc)).join('\n'));
}
const deleteSceneIds = docs.filter((doc) => doc.delete).map((doc) => doc.delete.id);
if (deleteSceneIds.length > 0) {
await indexApi.callDelete({
index: 'scenes_stashed',
query: {
bool: {
must: [
{
in: {
scene_id: deleteSceneIds,
},
},
],
},
},
});
}
}, Promise.resolve());
}
@@ -128,9 +147,20 @@ async function updateManticoreSceneSearch(releaseIds) {
studios.showcased
`, releaseIds && [releaseIds]);
// console.log(scenes.rows);
const scenesById = Object.fromEntries(scenes.rows.map((scene) => [scene.id, scene]));
const docs = releaseIds.map((sceneId) => {
const scene = scenesById[sceneId];
if (!scene) {
return {
delete: {
index: 'scenes',
id: sceneId,
},
};
}
const docs = scenes.rows.map((scene) => {
const flatActors = scene.actors.flatMap((actor) => actor.f2.split(' '));
const flatTags = scene.tags.filter((tag) => tag.f3 > 6).flatMap((tag) => [tag.f2].concat(tag.f4)).filter(Boolean); // only make top tags searchable to minimize cluttered results
const filteredTitle = filterTitle(scene.title, [...flatActors, ...flatTags]);
@@ -291,7 +321,20 @@ async function updateManticoreMovieSearch(movieIds) {
movies_covers.*
`, movieIds && [movieIds]);
const docs = movies.rows.map((movie) => {
const moviesById = Object.fromEntries(movies.rows.map((movie) => [movie.id, movie]));
const docs = movieIds.map((movieId) => {
const movie = moviesById[movieId];
if (!movie) {
return {
delete: {
index: 'movies',
id: movieId,
},
};
}
const combinedTags = Object.values(Object.fromEntries(movie.tags.concat(movie.movie_tags).map((tag) => [tag.f1, {
id: tag.f1,
name: tag.f2,

View File

@@ -4,11 +4,12 @@ const knex = require('../knex');
const chunk = require('./chunk');
const logger = require('../logger')(__filename);
const chunkTarget = 50_000; // PostgreSQL allows 65,535 binding parameters, allow for a bit of margin
// improved version of bulkInsert
async function batchInsert(table, items, {
conflict = true,
update = false,
chunkSize = 1000,
concurrent = false,
transaction,
commit = false,
@@ -17,6 +18,10 @@ async function batchInsert(table, items, {
throw new Error('No table specified for batch insert');
}
if (conflict && update) {
throw new Error('Batch insert conflict must specify columns, or update must be disabled');
}
if (!Array.isArray(items)) {
throw new Error('Batch insert items are not an array');
}
@@ -25,8 +30,20 @@ async function batchInsert(table, items, {
return [];
}
const chunks = chunk(items, chunkSize);
// PostgreSQL's bindings limit applies to individual values, so item size needs to be taken into account
const itemSize = items.reduce((acc, item) => Math.max(acc, Object.keys(item).length), 0);
if (itemSize === 0) {
throw new Error('Batch insert items are empty');
}
const chunks = chunk(items, Math.floor(chunkTarget / itemSize));
const conflicts = [].concat(conflict).filter((column) => typeof column === 'string'); // conflict might be 'true'
if (conflicts.length > 0 && !update) {
throw new Error('Batch insert conflict columns must be specified together with update');
}
const trx = transaction || await knex.transaction();
try {
@@ -49,12 +66,6 @@ async function batchInsert(table, items, {
.onConflict(conflicts)
.merge();
}
throw new Error('Batch insert conflict columns must be specified together with update');
}
if (conflict && update) {
throw new Error('Batch insert conflict must specify columns, or update must be disabled');
}
// error on any conflict

View File

@@ -3,10 +3,11 @@
const config = require('config');
const Promise = require('bluebird');
const bhttp = require('bhttp');
const undici = require('undici');
const fs = require('fs').promises;
const util = require('util');
const stream = require('stream');
const tunnel = require('tunnel');
// const util = require('util');
// const stream = require('stream');
const { pipeline } = require('stream/promises');
const Bottleneck = require('bottleneck');
const { JSDOM, toughCookie } = require('jsdom');
const puppeteer = require('puppeteer-extra');
@@ -18,7 +19,7 @@ const logger = require('../logger')(__filename);
const virtualConsole = require('./virtual-console')(__filename);
const argv = require('../argv');
const pipeline = util.promisify(stream.pipeline);
// const pipeline = util.promisify(stream.pipeline);
const limiters = {
bypass: new Bottleneck({
@@ -47,13 +48,6 @@ const defaultOptions = {
},
};
const proxyAgent = tunnel.httpsOverHttp({
proxy: {
host: config.proxy.host,
port: config.proxy.port,
},
});
function useProxy(url) {
if (!config.proxy.enable) {
return false;
@@ -326,56 +320,78 @@ async function bypassCloudflareRequest(url, method, body, cloudflareBypass, opti
};
}
async function request(method = 'get', url, body, requestOptions = {}, limiter) {
const http = requestOptions.session || bhttp;
const defaultAgent = new undici.Agent({
allowH2: true,
connect: {
rejectUnauthorized: false,
},
});
const options = {
...requestOptions,
session: null,
};
const proxyAgent = new undici.ProxyAgent({
uri: `http://${config.proxy.host}:${config.proxy.port}`,
});
async function request(method = 'get', url, body, requestOptions = {}, limiter, redirects = 0) {
const withProxy = useProxy(url);
const withBrowserBypass = useBrowserBypass(url, options);
const withCloudflareBypass = useCloudflareBypass(url, options);
const withBrowserBypass = useBrowserBypass(url, requestOptions);
const withCloudflareBypass = useCloudflareBypass(url, requestOptions);
if (withProxy) {
options.agent = proxyAgent;
}
logger.debug(`${method.toUpperCase()} (${limiter._store.storeOptions.minTime}ms/${limiter._store.storeOptions.maxConcurrent}p${withProxy ? ' proxy' : ''}${withBrowserBypass || withCloudflareBypass ? ' bypass' : ''}) ${url}`);
logger.debug(`${redirects > 0 ? 'REDIRECT ' : ''}${method.toUpperCase()} (${limiter._store.storeOptions.minTime}ms/${limiter._store.storeOptions.maxConcurrent}p${withProxy ? ' proxy' : ''}${withBrowserBypass || withCloudflareBypass ? ' bypass' : ''}) ${url}`);
if (withBrowserBypass) {
if (method !== 'get') {
throw new Error('Browser bypass only supports GET');
}
return bypassBrowserRequest(url, options);
if (method !== 'get') throw new Error('Browser bypass only supports GET');
return bypassBrowserRequest(url, requestOptions);
}
if (withCloudflareBypass) {
return bypassCloudflareRequest(url, method, body, withCloudflareBypass, options);
return bypassCloudflareRequest(url, method, body, withCloudflareBypass, requestOptions);
}
const res = await (body
? http[method](url, body, options)
: http[method](url, options));
const headers = {
...requestOptions.headers,
};
const res = await undici.request(url, {
method: method.toUpperCase(),
headers,
body: body ?? null,
dispatcher: withProxy
? proxyAgent
: defaultAgent,
maxRedirections: 0, // handle manually
});
if (res.headers.location && redirects < 3) {
// Drain the body to free the socket before redirecting
await res.body.dump();
const nextUrl = new URL(res.headers.location, url).href;
return request(method, nextUrl, body, requestOptions, limiter, redirects + 1);
}
return res;
}
async function finalizeResult(res, options) {
async function finalizeResult(res, url, options) {
if (options.destination) {
// res.on('progress', (bytes, totalBytes) => logger.silly(`Downloaded ${Math.round((bytes / totalBytes) * 100)}% of ${url}`));
await pipeline(
res.body,
...(options.transforms || []),
options.destination,
);
await pipeline(res, ...(options.transforms || []), options.destination);
return {
statusCode: res.statusCode,
headers: res.headers,
status: res.statusCode,
ok: res.statusCode >= 200 && res.statusCode <= 299,
};
}
if (Buffer.isBuffer(res.body)) {
const html = res.body.toString();
const buffer = await res.body.arrayBuffer();
const html = Buffer.from(buffer).toString();
const window = options?.parse ? new JSDOM(html, { virtualConsole, ...options.extract }).window : null;
const pathname = new URL(res.request.url).pathname.replace(/\//g, '_');
const pathname = new URL(url).pathname.replace(/\//g, '_');
// allow window.close to be called after scraping is done, only for deep scrapes where the URL is known outside the scraper
if (window && /fetchScene|fetchMovie/.test(new Error().stack)) {
windows.set(pathname, window);
}
@@ -385,28 +401,19 @@ async function finalizeResult(res, options) {
}
return {
...res,
body: html,
html,
statusCode: res.statusCode,
status: res.statusCode,
headers: res.headers,
body: html,
html,
document: window?.document || null,
window,
ok: res.statusCode >= 200 && res.statusCode <= 299,
};
}
return {
...res,
body: res.body,
status: res.statusCode,
headers: res.headers,
ok: res.statusCode >= 200 && res.statusCode <= 299,
};
}
function getTimeout(options, url) {
return new Promise((resolve, reject, onCancel) => {
return new Promise((_resolve, reject, onCancel) => {
const timeout = setTimeout(() => {
logger.debug(`Canceled timed out request to ${url}`);
reject(new Error(`URL ${url} timed out`));
@@ -427,8 +434,8 @@ async function scheduleRequest(method = 'get', url, body, requestOptions = {}) {
...defaultOptions,
...requestOptions,
headers: {
...(requestOptions.includeDefaultHeaders === false ? {} : defaultOptions.headers),
...requestOptions.headers,
...Object.fromEntries(requestOptions.includeDefaultHeaders === false ? {} : Object.entries(defaultOptions.headers).map(([key, value]) => [key.toLowerCase(), value])),
...Object.fromEntries(Object.entries(requestOptions.headers || {}).map(([key, value]) => [key.toLowerCase(), value])),
},
responseTimeout: requestOptions.responseTimeout || requestOptions.timeout || defaultOptions.timeout,
stream: !!requestOptions.destination,
@@ -441,7 +448,7 @@ async function scheduleRequest(method = 'get', url, body, requestOptions = {}) {
timeout.cancel();
const curatedResult = await finalizeResult(result, options);
const curatedResult = await finalizeResult(result, url, options);
logger.silly(`Response ${curatedResult.status} for ${method.toUpperCase()} ${url}`);

View File

@@ -1,32 +0,0 @@
'use strict';
const { fetchActor, searchActors } = require('../actors');
async function fetchActorApi(req, res) {
const actor = await fetchActor(req.params.actorId);
if (actor) {
res.send({ actor });
return;
}
res.status(404).send({ actor: null });
}
async function fetchActorsApi(req, res) {
const query = req.query.query || req.query.q;
if (query) {
const actors = await searchActors(query, req.query.limit);
res.send({ actors });
return;
}
res.send({ hint: 'specify a query or ID', actors: [] });
}
module.exports = {
fetchActor: fetchActorApi,
fetchActors: fetchActorsApi,
};

View File

@@ -1,34 +0,0 @@
'use strict';
const { addAlert, removeAlert, updateNotifications, updateNotification } = require('../alerts');
async function addAlertApi(req, res) {
const alertId = await addAlert(req.body, req.session.user);
res.send({ id: alertId });
}
async function removeAlertApi(req, res) {
await removeAlert(req.params.alertId);
res.status(204).send();
}
async function updateNotificationsApi(req, res) {
await updateNotifications(req.body, req.session.user);
res.status(204).send();
}
async function updateNotificationApi(req, res) {
await updateNotification(req.params.notificationId, req.body, req.session.user);
res.status(204).send();
}
module.exports = {
addAlert: addAlertApi,
removeAlert: removeAlertApi,
updateNotifications: updateNotificationsApi,
updateNotification: updateNotificationApi,
};

View File

@@ -1,46 +0,0 @@
'use strict';
const { login, signup } = require('../auth');
const { fetchUser } = require('../users');
async function loginApi(req, res) {
const user = await login(req.body);
req.session.user = user;
res.send(user);
}
async function logoutApi(req, res) {
req.session.destroy((error) => {
if (error) {
res.status(500).send();
}
res.status(204).send();
});
}
async function fetchMeApi(req, res) {
if (req.session.user) {
req.session.user = await fetchUser(req.session.user.id, false, req.session.user);
res.send(req.session.user);
return;
}
res.status(401).send();
}
async function signupApi(req, res) {
const user = await signup(req.body);
req.session.user = user;
res.send(user);
}
module.exports = {
login: loginApi,
logout: logoutApi,
fetchMe: fetchMeApi,
signup: signupApi,
};

View File

@@ -1,29 +0,0 @@
'use strict';
const { fetchEntity, fetchEntities, searchEntities } = require('../entities');
async function fetchEntityApi(req, res, type) {
const entity = await fetchEntity(req.params.entityId, type || req.query.type);
if (entity) {
res.send({ entity });
return;
}
res.status(404).send({ entity: null });
}
async function fetchEntitiesApi(req, res, type) {
const query = req.query.query || req.query.q;
const entities = query
? await searchEntities(query, type || req.query.type, req.query.limit)
: await fetchEntities(type || req.query.type, req.query.limit);
res.send({ entities });
}
module.exports = {
fetchEntity: fetchEntityApi,
fetchEntities: fetchEntitiesApi,
};

View File

@@ -1,22 +0,0 @@
'use strict';
const argv = require('../argv');
const logger = require('../logger')(__filename);
function errorHandler(error, req, res, _next) {
logger.warn(`Failed to fulfill request to ${req.path}: ${error.message}`);
if (argv.debug) {
logger.error(error);
}
if (error.httpCode) {
res.status(error.httpCode).send(error.message);
return;
}
res.status(500).send('Oops... our server messed up. We will be investigating this incident, our apologies for the inconvenience.');
}
module.exports = errorHandler;

View File

@@ -1,27 +0,0 @@
'use strict';
const config = require('config');
const { withPostGraphileContext } = require('postgraphile');
const { graphql } = require('graphql');
const initPg = require('./postgraphile');
const logger = require('../logger')(__filename);
async function query(graphqlQuery, params, role = 'query') {
const pg = initPg(config.database[role]);
return withPostGraphileContext(pg, async (context) => {
const schema = await pg.getGraphQLSchema();
const result = await graphql(schema, graphqlQuery, null, context, params);
if (result.errors?.length > 0) {
logger.error(result.errors);
throw result.errors[0];
}
return result.data;
});
}
module.exports = { graphql: query };

View File

@@ -1,90 +0,0 @@
'use strict';
const { makeExtendSchemaPlugin, gql } = require('graphile-utils');
const moment = require('moment');
const { cmToFeetInches, cmToInches, kgToLbs } = require('../../utils/convert');
const schemaExtender = makeExtendSchemaPlugin((_build) => ({
typeDefs: gql`
enum Units {
METRIC
IMPERIAL
}
extend type Actor {
isFavorited: Boolean @requires(columns: ["stashesActors"])
isStashed(includeFavorites: Boolean = false): Boolean @requires(columns: ["stashesActors"])
ageFromBirth: Int @requires(columns: ["dateOfBirth"])
ageAtDeath: Int @requires(columns: ["dateOfBirth", "dateOfDeath"])
height(units: Units): String @requires(columns: ["height"])
weight(units: Units): String @requires(columns: ["weight"])
penisLength(units: Units): String @requires(columns: ["penis_length"])
penisGirth(units: Units): String @requires(columns: ["penis_girth"])
}
`,
resolvers: {
Actor: {
isFavorited(parent) {
if (!parent['@stashes'] || (parent['@stashes'].length > 0 && typeof parent['@stashes'][0]['@stash'].primary === 'undefined')) {
return null;
}
return parent['@stashes'].some(({ '@stash': stash }) => stash.primary);
},
isStashed(parent, args) {
if (!parent['@stashes'] || (parent['@stashes'].length > 0 && typeof parent['@stashes'][0]['@stash'].primary === 'undefined')) {
return null;
}
if (args.includeFavorites) {
return parent['@stashes'].length > 0;
}
return parent['@stashes'].some(({ '@stash': stash }) => !stash.primary);
},
ageFromBirth(parent, _args, _context, _info) {
if (!parent.dateOfBirth) return null;
return moment().diff(parent.dateOfBirth, 'years');
},
ageAtDeath(parent, _args, _context, _info) {
if (!parent.dateOfDeath) return null;
return moment(parent.dateOfDeath).diff(parent.dateOfBirth, 'years');
},
height(parent, args, _context, _info) {
if (!parent.height) return null;
if (args.units === 'IMPERIAL') {
const { feet, inches } = cmToFeetInches(parent.height);
return `${feet}' ${inches}"`;
}
return parent.height.toString();
},
weight(parent, args, _context, _info) {
if (!parent.weight) return null;
return args.units === 'IMPERIAL'
? kgToLbs(parent.weight).toString()
: parent.weight.toString();
},
penisLength(parent, args, _context, _info) {
if (!parent.penisLength) return null;
return args.units === 'IMPERIAL'
? (Math.round(cmToInches(parent.penisLength) * 4) / 4).toString() // round to nearest quarter inch
: parent.penisLength.toString();
},
penisGirth(parent, args, _context, _info) {
if (!parent.penisGirth) return null;
return args.units === 'IMPERIAL'
? (Math.round(cmToInches(parent.penisGirth) * 4) / 4).toString() // round to nearest quarter inch
: parent.penisGirth.toString();
},
},
},
}));
module.exports = [schemaExtender];

View File

@@ -1,43 +0,0 @@
'use strict';
const config = require('config');
const { makeExtendSchemaPlugin, gql } = require('graphile-utils');
const schemaExtender = makeExtendSchemaPlugin((_build) => ({
typeDefs: gql`
extend type Media {
thumbnailWidth: Int @requires(columns: ["width", "height"])
thumbnailHeight: Int @requires(columns: ["height", "width"])
}
`,
resolvers: {
Media: {
thumbnailWidth(parent, _args, _context, _info) {
if (!parent.width || !parent.height) {
return null;
}
if (parent.height <= config.media.thumbnailSize) {
// thumbnails aren't upscaled
return parent.width;
}
return Math.round(parent.width / (parent.height / config.media.thumbnailSize));
},
thumbnailHeight(parent, _args, _context, _info) {
if (!parent.width || !parent.height) {
return null;
}
if (parent.height <= config.media.thumbnailSize) {
// thumbnails aren't upscaled
return parent.height;
}
return config.media.thumbnailSize;
},
},
},
}));
module.exports = [schemaExtender];

View File

@@ -1,13 +0,0 @@
'use strict';
const ActorPlugins = require('./actors');
const SitePlugins = require('./sites');
const ReleasePlugins = require('./releases');
const MediaPlugins = require('./media');
module.exports = {
ActorPlugins,
SitePlugins,
ReleasePlugins,
MediaPlugins,
};

View File

@@ -1,49 +0,0 @@
'use strict';
const { makeExtendSchemaPlugin, gql } = require('graphile-utils');
function isFavorited(parent) {
if (!parent['@stashes'] || (parent['@stashes'].length > 0 && typeof parent['@stashes'][0]['@stash'].primary === 'undefined')) {
return null;
}
return parent['@stashes'].some(({ '@stash': stash }) => stash.primary);
}
function isStashed(parent, args) {
if (!parent['@stashes'] || (parent['@stashes'].length > 0 && typeof parent['@stashes'][0]['@stash'].primary === 'undefined')) {
return null;
}
if (args.includeFavorites) {
return parent['@stashes'].length > 0;
}
return parent['@stashes'].some(({ '@stash': stash }) => !stash.primary);
}
const schemaExtender = makeExtendSchemaPlugin((_build) => ({
typeDefs: gql`
extend type Release {
isFavorited: Boolean @requires(columns: ["stashesScenesBySceneId"])
isStashed(includeFavorites: Boolean = false): Boolean @requires(columns: ["stashesScenesBySceneId"])
}
extend type Movie {
isFavorited: Boolean @requires(columns: ["stashesMovies"])
isStashed(includeFavorites: Boolean = false): Boolean @requires(columns: ["stashesMovies"])
}
`,
resolvers: {
Release: {
isFavorited,
isStashed,
},
Movie: {
isFavorited,
isStashed,
},
},
}));
module.exports = [schemaExtender];

View File

@@ -1,20 +0,0 @@
'use strict';
const { makeExtendSchemaPlugin, gql } = require('graphile-utils');
const schemaExtender = makeExtendSchemaPlugin((_build) => ({
typeDefs: gql`
extend type Site {
independent: Boolean @requires(columns: ["parameters"])
}
`,
resolvers: {
Site: {
independent(parent, _args, _context, _info) {
return !!parent.parameters?.independent;
},
},
},
}));
module.exports = [schemaExtender];

View File

@@ -1,56 +0,0 @@
'use strict';
/* eslint-disable arrow-body-style */
const config = require('config');
const { postgraphile } = require('postgraphile');
const PgConnectionFilterPlugin = require('postgraphile-plugin-connection-filter');
const PgSimplifyInflectorPlugin = require('@graphile-contrib/pg-simplify-inflector');
const PgOrderByRelatedPlugin = require('@graphile-contrib/pg-order-by-related');
const { ActorPlugins, SitePlugins, ReleasePlugins, MediaPlugins } = require('./plugins/plugins');
async function pgSettings(req) {
return {
'user.id': req.session.user?.id || null, // undefined is passed as an empty string, avoid
statement_timeout: config.database.timeout,
};
}
function initPostgraphile(credentials) {
const connectionString = `postgres://${credentials.user}:${credentials.password}@${credentials.host}:5432/${credentials.database}`;
return postgraphile(
connectionString,
'public',
{
// watchPg: true,
disableDefaultMutations: true,
dynamicJson: true,
graphiql: config.database.graphiql,
enhanceGraphiql: true,
allowExplain: () => true,
// simpleCollections: 'only',
simpleCollections: 'both',
graphileBuildOptions: {
pgOmitListSuffix: true,
// connectionFilterUseListInflectors: true,
connectionFilterRelations: true,
connectionFilterAllowNullInput: true,
},
appendPlugins: [
PgSimplifyInflectorPlugin,
PgConnectionFilterPlugin,
PgOrderByRelatedPlugin,
...ActorPlugins,
...SitePlugins,
...ReleasePlugins,
...MediaPlugins,
],
pgSettings,
},
pgSettings,
);
}
module.exports = initPostgraphile;

View File

@@ -1,48 +0,0 @@
'use strict';
const config = require('config');
const path = require('path');
const { fetchScene, fetchScenes, searchScenes } = require('../releases');
async function fetchSceneApi(req, res) {
const release = await fetchScene(req.params.releaseId);
if (release) {
res.send({ scene: release });
return;
}
res.status(404).send({ scene: null });
}
async function fetchScenesApi(req, res) {
const query = req.query.query || req.query.q;
const limit = req.query.limit && Number(req.query.limit);
const relevance = req.query.relevance && Number(req.query.relevance);
const releases = query
? await searchScenes(query, limit, relevance)
: await fetchScenes(req.query.limit);
res.send({ scenes: releases });
}
async function fetchScenePosterApi(req, res) {
const scene = await fetchScene(req.params.releaseId);
const posterPath = scene?.poster?.path;
if (posterPath) {
res.sendFile(path.resolve(config.media.path, posterPath));
return;
}
res.status(404).send();
}
module.exports = {
fetchScene: fetchSceneApi,
fetchScenes: fetchScenesApi,
fetchScenePoster: fetchScenePosterApi,
};

View File

@@ -1,174 +0,0 @@
'use strict';
const path = require('path');
const config = require('config');
const express = require('express');
const Router = require('express-promise-router');
const bodyParser = require('body-parser');
const session = require('express-session');
const KnexSessionStore = require('connect-session-knex')(session);
const { nanoid } = require('nanoid');
const logger = require('../logger')(__filename);
const knex = require('../knex');
const errorHandler = require('./error');
const initPg = require('./postgraphile');
const {
login,
logout,
signup,
fetchMe,
} = require('./auth');
const {
fetchScene,
fetchScenes,
fetchScenePoster,
} = require('./releases');
const {
fetchActor,
fetchActors,
} = require('./actors');
const {
fetchEntity,
fetchEntities,
} = require('./entities');
const {
fetchTag,
fetchTags,
} = require('./tags');
const {
createStash,
removeStash,
stashActor,
stashScene,
stashMovie,
unstashActor,
unstashScene,
unstashMovie,
updateStash,
} = require('./stashes');
const {
addAlert,
removeAlert,
updateNotifications,
updateNotification,
} = require('./alerts');
function getIp(req) {
return req.headers['x-forwarded-for'] ? req.headers['x-forwarded-for'].split(',')[0] : req.connection.remoteAddress; // See src/ws
}
async function initServer() {
const app = express();
const router = Router();
const store = new KnexSessionStore({ knex });
app.set('view engine', 'ejs');
app.disable('x-powered-by');
router.use('/media', express.static(config.media.path));
router.use(express.static('public'));
router.use('/img', (_req, res) => {
res.status(404).send();
});
router.use(bodyParser.json({ strict: false }));
router.use(session({ ...config.web.session, store }));
router.use(initPg(config.database.query));
router.use((req, _res, next) => {
req.session.safeId = req.session.safeId || nanoid();
next();
});
router.use((req, _res, next) => {
const ip = getIp(req);
logger.silly(`${ip} (${req.headers['CF-IPCountry'] || 'country N/A'}) requested ${req.originalUrl} as ${req.session.user ? `${req.session.user.username} (${req.session.user.id})` : 'guest'}`);
next();
});
router.get('/api/session', fetchMe);
router.post('/api/session', login);
router.delete('/api/session', logout);
router.post('/api/users', signup);
router.patch('/api/users/:userId/notifications', updateNotifications);
router.patch('/api/users/:userId/notifications/:notificationId', updateNotification);
router.post('/api/stashes', createStash);
router.patch('/api/stashes/:stashId', updateStash);
router.delete('/api/stashes/:stashId', removeStash);
router.post('/api/stashes/:stashId/actors', stashActor);
router.post('/api/stashes/:stashId/scenes', stashScene);
router.post('/api/stashes/:stashId/movies', stashMovie);
router.delete('/api/stashes/:stashId/actors/:actorId', unstashActor);
router.delete('/api/stashes/:stashId/scenes/:sceneId', unstashScene);
router.delete('/api/stashes/:stashId/movies/:movieId', unstashMovie);
router.post('/api/alerts', addAlert);
router.delete('/api/alerts/:alertId', removeAlert);
router.get('/api/scenes', fetchScenes);
router.get('/api/scenes/:releaseId', fetchScene);
router.get('/api/scenes/:releaseId/poster', fetchScenePoster);
// router.get('/api/movies', fetchMovies);
// router.get('/api/movies/:releaseId', fetchMovie);
router.get('/api/actors', fetchActors);
router.get('/api/actors/:actorId', fetchActor);
router.get('/api/entities', async (req, res) => fetchEntities(req, res, null));
router.get('/api/entities/:entityId', async (req, res) => fetchEntity(req, res, null));
router.get('/api/channels', async (req, res) => fetchEntities(req, res, 'channel'));
router.get('/api/channels/:entityId', async (req, res) => fetchEntity(req, res, 'channel'));
router.get('/api/networks', async (req, res) => fetchEntities(req, res, 'network'));
router.get('/api/networks/:entityId', async (req, res) => fetchEntity(req, res, 'network'));
router.get('/api/studios', async (req, res) => fetchEntities(req, res, 'studio'));
router.get('/api/studios/:entityId', async (req, res) => fetchEntity(req, res, 'studio'));
router.get('/api/tags', fetchTags);
router.get('/api/tags/:tagId', fetchTag);
router.get('*', (req, res) => {
res.render(path.join(__dirname, '../../assets/index.ejs'), {
analytics: config.analytics,
env: JSON.stringify({
sfw: !!req.headers.sfw || Object.prototype.hasOwnProperty.call(req.query, 'sfw'),
login: config.auth.login,
signup: config.auth.signup,
sessionId: req.session.safeId,
}),
});
});
router.use(errorHandler);
app.use(router);
const server = app.listen(config.web.port, config.web.host, () => {
const { address, port } = server.address();
logger.info(`Web server listening on ${address}:${port}`);
});
}
module.exports = initServer;

View File

@@ -1,26 +0,0 @@
'use strict';
const { fetchSites, fetchSitesFromReleases } = require('../sites');
async function fetchSitesApi(req, res) {
const siteId = typeof req.params.siteId === 'number' ? req.params.siteId : undefined;
const siteSlug = typeof req.params.siteId === 'string' ? req.params.siteId : undefined;
const sites = await fetchSites({
id: siteId,
slug: siteSlug,
});
res.send(sites);
}
async function fetchSitesFromReleasesApi(req, res) {
const sites = await fetchSitesFromReleases();
res.send(sites);
}
module.exports = {
fetchSites: fetchSitesApi,
fetchSitesFromReleases: fetchSitesFromReleasesApi,
};

View File

@@ -1,79 +0,0 @@
'use strict';
const {
createStash,
removeStash,
stashActor,
stashScene,
stashMovie,
unstashActor,
unstashScene,
unstashMovie,
updateStash,
} = require('../stashes');
async function createStashApi(req, res) {
const stash = await createStash(req.body, req.session.user);
res.send(stash);
}
async function updateStashApi(req, res) {
const stash = await updateStash(req.params.stashId, req.body, req.session.user);
res.send(stash);
}
async function removeStashApi(req, res) {
await removeStash(req.params.stashId, req.session.user);
res.status(204).send();
}
async function stashActorApi(req, res) {
const stashes = await stashActor(req.body.actorId, Number(req.params.stashId), req.session.user);
res.send(stashes);
}
async function stashSceneApi(req, res) {
const stashes = await stashScene(req.body.sceneId, Number(req.params.stashId), req.session.user);
res.send(stashes);
}
async function stashMovieApi(req, res) {
const stashes = await stashMovie(req.body.movieId, Number(req.params.stashId), req.session.user);
res.send(stashes);
}
async function unstashActorApi(req, res) {
const stashes = await unstashActor(Number(req.params.actorId), Number(req.params.stashId), req.session.user);
res.send(stashes);
}
async function unstashSceneApi(req, res) {
const stashes = await unstashScene(Number(req.params.sceneId), Number(req.params.stashId), req.session.user);
res.send(stashes);
}
async function unstashMovieApi(req, res) {
const stashes = await unstashMovie(Number(req.params.movieId), Number(req.params.stashId), req.session.user);
res.send(stashes);
}
module.exports = {
createStash: createStashApi,
removeStash: removeStashApi,
stashActor: stashActorApi,
stashScene: stashSceneApi,
stashMovie: stashMovieApi,
unstashActor: unstashActorApi,
unstashScene: unstashSceneApi,
unstashMovie: unstashMovieApi,
updateStash: updateStashApi,
};

View File

@@ -1,25 +0,0 @@
'use strict';
const { fetchTag, fetchTags } = require('../tags');
async function fetchTagApi(req, res) {
const tag = await fetchTag(req.params.tagId);
if (tag) {
res.send({ tag });
return;
}
res.status(404).send({ tag: null });
}
async function fetchTagsApi(req, res) {
const tags = await fetchTags(req.query.limit);
res.send({ tags });
}
module.exports = {
fetchTag: fetchTagApi,
fetchTags: fetchTagsApi,
};

View File

@@ -145,7 +145,7 @@ const actors = [
// perv city
{ entity: 'pervcity', name: 'Brooklyn Gray', fields: ['avatar', 'description', 'dateOfBirth', 'birthPlace', 'ethnicity', 'height', 'weight', 'eyes', 'hairColor'] },
{ entity: 'dpdiva', name: 'Liz Jordan', fields: ['avatar', 'description', 'dateOfBirth', 'birthPlace', 'ethnicity', 'height', 'weight', 'eyes', 'hairColor'] },
// { entity: 'bamvisions', name: 'Abella Danger', fields: ['avatar', 'height', 'measurements'] }, // site offline as of 2026-02-25
{ entity: 'bamvisions', name: 'Abella Danger', fields: ['avatar', 'height', 'measurements'] }, // site offline as of 2026-02-25
// radical
{ entity: 'bjraw', name: 'Nikki Knightly', fields: ['avatar', 'description', 'gender', 'dateOfBirth', 'birthPlace', 'measurements', 'height', 'weight', 'eyes', 'hairColor'] },
{ entity: 'gotfilled', name: 'Alexa Chains', fields: ['avatar', 'description', 'gender', 'dateOfBirth', 'birthPlace', 'measurements', 'height', 'weight', 'eyes', 'hairColor'] },
@@ -153,6 +153,7 @@ const actors = [
{ entity: 'topwebmodels', name: 'Lexi Belle', fields: ['avatar', 'dateOfBirth', 'birthPlace', 'measurements', 'height', 'weight', 'eyes', 'hairColor'] },
{ entity: 'purgatoryx', name: 'Kenzie Reeves', fields: ['avatar', 'description', 'gender', 'dateOfBirth', 'birthPlace', 'measurements', 'height', 'weight', 'eyes', 'hairColor'] },
{ entity: 'lucidflix', name: 'Ava Amira', fields: ['avatar', 'description', 'gender'] },
{ entity: 'hardwerk', name: 'Luna Silver', fields: ['avatar', 'gender'] },
// wankz
{ entity: 'wankzvr', name: 'Melody Marks', fields: ['avatar', 'gender', 'description', 'birthPlace', 'height', 'measurements', 'age'] },
{ entity: 'milfvr', name: 'Ember Snow', fields: ['avatar', 'gender', 'description', 'measurements', 'birthPlace', 'height', 'age'] },
@@ -212,7 +213,7 @@ const actors = [
{ entity: 'naughtyamerica', name: 'Nicole Aniston', fields: ['avatar', 'description'] },
{ entity: 'tonightsgirlfriend', name: 'Abella Danger', fields: ['avatar'] },
// jules jordan scraper
{ entity: 'julesjordan', name: 'Vanna Bardot', fields: ['height', 'dateOfBirth', 'measurements', 'description', 'avatar'] },
{ entity: 'julesjordan', name: 'Vanna Bardot', fields: ['height', 'dateOfBirth', 'measurements', 'avatar'] },
{ entity: 'amateurallure', name: 'Ava Amira', fields: ['avatar', 'description'] },
{ entity: 'swallowsalon', name: 'Abella Danger', fields: ['avatar'] },
// exploitedx
@@ -263,6 +264,7 @@ const actors = [
{ entity: 'theflourishxxx', name: 'XWifeKaren', fields: ['avatar', 'description'] },
{ entity: 'tokyohot', name: 'Mai Kawana', url: 'https://my.tokyo-hot.com/cast/2099/', fields: ['avatar', 'birthPlace', 'height', 'cup', 'bust', 'waist', 'hip', 'hairStyle', 'shoeSize', 'bloodType'] },
{ entity: 'wakeupnfuck', name: 'Abby Lee Brazil', fields: ['avatar', 'nationality'] },
{ entity: 'darkkotv', name: 'Aidra Fox', fields: ['avatar', 'description', 'dateOfBirth', 'birthPlace', 'ethnicity', 'height', 'weight', 'measurements', 'naturalBoobs', 'hasTattoos', 'hasPiercings'] },
];
const actorScrapers = scrapers.actors;