Updated all dependencies. Updated MindGeek to Aylo.

This commit is contained in:
DebaucheryLibrarian
2023-11-30 03:12:47 +01:00
parent 90aa29d2d4
commit 98bf7ea9dd
83 changed files with 9071 additions and 23082 deletions

View File

@@ -11,6 +11,7 @@
"no-unused-vars": ["error", {"argsIgnorePattern": "^_"}],
"no-console": 0,
"no-underscore-dangle": 0,
"default-param-last": 0,
"prefer-destructuring": "off",
"template-curly-spacing": "off",
"object-curly-newline": "off",

View File

@@ -897,7 +897,7 @@ async function scrapeActors(argNames) {
const newBaseActors = baseActors.filter((baseActor) => !existingActorEntriesBySlugAndEntryId[baseActor.slug]?.[baseActor.entryId]);
const [batchId] = newBaseActors.length > 0 ? await knex('batches').insert({ comment: null }).returning('id') : [null];
const [{ id: batchId }] = newBaseActors.length > 0 ? await knex('batches').insert({ comment: null }).returning('id') : [{ id: null }];
const curatedActorEntries = batchId && curateActorEntries(newBaseActors, batchId);
// TODO: associate entity when entry ID is provided

View File

@@ -19,7 +19,7 @@ async function addAlert(alert, sessionUser) {
throw new HttpError('Match must define a property and an expression', 400);
}
const [alertId] = await knex('alerts')
const [{ id: alertId }] = await knex('alerts')
.insert({
user_id: sessionUser.id,
notify: alert.notify,

View File

@@ -85,7 +85,7 @@ async function signup(credentials) {
const hashedPassword = (await scrypt(credentials.password, salt, 64)).toString('hex');
const storedPassword = `${salt}/${hashedPassword}`;
const [userId] = await knex('users')
const [{ id: userId }] = await knex('users')
.insert({
username: curatedUsername,
email: credentials.email,

View File

@@ -14,7 +14,9 @@ const ffmpeg = require('fluent-ffmpeg');
const sharp = require('sharp');
const blake2 = require('blake2');
const taskQueue = require('promise-task-queue');
const AWS = require('aws-sdk');
const { Upload } = require('@aws-sdk/lib-storage');
const { S3Client } = require('@aws-sdk/client-s3');
const logger = require('./logger')(__filename);
const argv = require('./argv');
@@ -27,11 +29,9 @@ const { get } = require('./utils/qu');
// const pipeline = util.promisify(stream.pipeline);
const streamQueue = taskQueue();
const endpoint = new AWS.Endpoint('s3.eu-central-1.wasabisys.com');
const s3 = new AWS.S3({
// region: 'eu-central-1',
endpoint,
const s3 = new S3Client({
region: 'eu-central-1',
endpoint: 'https://s3.eu-central-1.wasabisys.com',
credentials: {
accessKeyId: config.s3.accessKey,
secretAccessKey: config.s3.secretKey,
@@ -391,12 +391,15 @@ async function storeS3Object(filepath, media) {
const fullFilepath = path.join(config.media.path, filepath);
const file = fs.createReadStream(fullFilepath);
const status = await s3.upload({
Bucket: config.s3.bucket,
Body: file,
Key: filepath,
ContentType: media.meta.mimetype,
}).promise();
const status = await new Upload({
client: s3,
params: {
Bucket: config.s3.bucket,
Body: file,
Key: filepath,
ContentType: media.meta.mimetype,
},
}).done();
await fsPromises.unlink(fullFilepath);
@@ -992,7 +995,7 @@ async function deleteS3Objects(media) {
Objects: objects,
Quiet: false,
},
}).promise();
});
logger.info(`Removed ${status.Deleted.length} media files from S3 bucket '${config.s3.bucket}', ${status.Errors.length} errors`);

View File

@@ -224,13 +224,15 @@ async function getSession(site, parameters, url) {
session,
headers: {
'Accept-Language': 'en-US,en;', // somehow seems essential for some MG sites
Connection: 'keep-alive',
'User-Agent': 'HTTPie/3.2.1',
},
interval: parameters?.interval,
concurrency: parameters?.concurrency,
parse: false,
});
if (res.statusCode === 200) {
if (res.status === 200) {
const cookieString = await cookieJar.getCookieStringAsync(sessionUrl);
const { instance_token: instanceToken } = cookie.parse(cookieString);
@@ -242,7 +244,7 @@ async function getSession(site, parameters, url) {
throw new Error(`Failed to acquire MindGeek session (${res.statusCode})`);
}
function scrapeProfile(data, releases = [], networkName) {
function scrapeProfile(data, networkName, releases = []) {
const profile = {
description: data.bio,
aliases: data.aliases.filter(Boolean),
@@ -297,7 +299,7 @@ async function fetchLatest(site, page = 1, options) {
return null;
}
const { session, instanceToken } = options.beforeNetwork?.headers?.Instance
const { instanceToken } = options.beforeNetwork?.headers?.Instance
? options.beforeNetwork
: await getSession(site, options.parameters, url);
@@ -308,7 +310,6 @@ async function fetchLatest(site, page = 1, options) {
: `https://site-api.project1service.com/v2/releases?collectionId=${siteId}&dateReleased=<${beforeDate}&limit=${limit}&offset=${limit * (page - 1)}&orderBy=-dateReleased&type=scene`;
const res = await http.get(apiUrl, {
session,
interval: options.parameters.interval,
concurrency: options.parameters.concurrency,
headers: {
@@ -409,10 +410,10 @@ async function fetchProfile({ name: actorName }, { entity, parameters }, include
});
if (actorReleasesRes.statusCode === 200 && actorReleasesRes.body.result) {
return scrapeProfile(actorData, actorReleasesRes.body.result, entity.slug);
return scrapeProfile(actorData, entity.slug, actorReleasesRes.body.result);
}
return scrapeProfile(actorData, [], entity.slug);
return scrapeProfile(actorData, entity.slug, []);
}
}

View File

@@ -40,7 +40,7 @@ const littlecapricedreams = require('./littlecapricedreams');
const loveherfilms = require('./loveherfilms');
const bluedonkeymedia = require('./bluedonkeymedia');
const mikeadriano = require('./mikeadriano');
const mindgeek = require('./mindgeek');
const aylo = require('./aylo');
const naughtyamerica = require('./naughtyamerica');
const newsensations = require('./newsensations');
const nubiles = require('./nubiles');
@@ -90,7 +90,7 @@ const scrapers = {
bangbros,
bluedonkeymedia,
bradmontana,
brazzers: mindgeek,
brazzers: aylo,
cherrypimps,
cumlouder,
czechav,
@@ -130,7 +130,7 @@ const scrapers = {
mamacitaz: porndoe,
mariskax,
mikeadriano,
mindgeek,
aylo,
mylf: teamskeet,
naughtyamerica,
nebraskacoeds: elevatedx,
@@ -150,7 +150,7 @@ const scrapers = {
rickysroom,
sayuncle: teamskeet,
score,
sexyhub: mindgeek,
sexyhub: aylo,
spizoo,
swallowsalon: julesjordan,
theflourish: archangel,
@@ -182,7 +182,7 @@ const scrapers = {
anilos: nubiles,
archangel,
asiam: modelmedia,
babes: mindgeek,
babes: aylo,
babevr: badoink,
backroomcastingcouch: elevatedx,
baddaddypov: fullpornnetwork,
@@ -201,14 +201,14 @@ const scrapers = {
boobpedia,
bradmontana,
brattysis: nubiles,
brazzers: mindgeek,
brazzers: aylo,
burningangel: gamma,
cherrypimps,
cumlouder,
deeper: vixen,
deeplush: nubiles,
devilsfilm: famedigital,
digitalplayground: mindgeek,
digitalplayground: aylo,
dorcelclub: dorcel,
doubleviewcasting: firstanalquest,
dtfsluts: fullpornnetwork,
@@ -216,7 +216,7 @@ const scrapers = {
evilangel: gamma,
exploitedcollegegirls: elevatedx,
eyeontheguy: hush,
fakehub: mindgeek,
fakehub: aylo,
firstanalquest,
forbondage: porndoe,
freeones,
@@ -232,7 +232,7 @@ const scrapers = {
hotcrazymess: nubiles,
hushpass: hush,
hussiepass: hush,
iconmale: mindgeek,
iconmale: aylo,
inserted: radical,
interracialpass: hush,
interracialpovs: hush,
@@ -253,13 +253,13 @@ const scrapers = {
letsdoeit: porndoe,
littlecapricedreams,
mamacitaz: porndoe,
men: mindgeek,
men: aylo,
mariskax,
metrohd: mindgeek,
milehighmedia: mindgeek,
metrohd: aylo,
milehighmedia: aylo,
milfy: vixen,
milfvr: wankzvr,
mofos: mindgeek,
mofos: aylo,
mylf: teamskeet,
mugfucked: fullpornnetwork,
naughtyamerica,
@@ -283,14 +283,14 @@ const scrapers = {
povpornstars: hush,
private: privateNetwork,
purgatoryx,
realitykings: mindgeek,
realitykings: aylo,
realvr: badoink,
rickysroom,
roccosiffredi: famedigital,
sayuncle: teamskeet,
score,
seehimfuck: hush,
sexyhub: mindgeek,
sexyhub: aylo,
silverstonedvd: famedigital,
silviasaint: famedigital,
spizoo,
@@ -306,13 +306,13 @@ const scrapers = {
testedefudelidade,
thatsitcomshow: nubiles,
tokyohot,
transangels: mindgeek,
transangels: aylo,
transbella: porndoe,
tranzvr: wankzvr,
trueanal: mikeadriano,
tushy: vixen,
tushyraw: vixen,
twistys: mindgeek,
twistys: aylo,
vipsexvault: porndoe,
vixen,
vrcosplayx: badoink,

View File

@@ -3,7 +3,7 @@
/* eslint-disable no-unused-vars */
const config = require('config');
const faker = require('faker');
const { faker } = require('@faker-js/faker');
const { nanoid } = require('nanoid');
const moment = require('moment');

View File

@@ -345,7 +345,7 @@ async function storeMovies(movies, useBatchId) {
}
const { uniqueReleases } = await filterDuplicateReleases(movies);
const [batchId] = useBatchId ? [useBatchId] : await knex('batches').insert({ showcased: argv.showcased, comment: null }).returning('id');
const [{ id: batchId }] = useBatchId ? [{ id: useBatchId }] : await knex('batches').insert({ showcased: argv.showcased, comment: null }).returning('id');
const curatedMovieEntries = await Promise.all(uniqueReleases.map((release) => curateReleaseEntry(release, batchId, null, 'movie')));
@@ -364,7 +364,7 @@ async function storeSeries(series, useBatchId) {
}
const { uniqueReleases } = await filterDuplicateReleases(series);
const [batchId] = useBatchId ? [useBatchId] : await knex('batches').insert({ showcased: argv.showcased, comment: null }).returning('id');
const [{ id: batchId }] = useBatchId ? [{ id: useBatchId }] : await knex('batches').insert({ showcased: argv.showcased, comment: null }).returning('id');
const curatedSerieEntries = await Promise.all(uniqueReleases.map((release) => curateReleaseEntry(release, batchId, null, 'serie')));
@@ -382,7 +382,7 @@ async function storeScenes(releases, useBatchId) {
return [];
}
const [batchId] = useBatchId ? [useBatchId] : await knex('batches').insert({ showcased: argv.showcased, comment: null }).returning('id');
const [{ id: batchId }] = useBatchId ? [{ id: useBatchId }] : await knex('batches').insert({ showcased: argv.showcased, comment: null }).returning('id');
const releasesWithChannels = await attachChannelEntities(releases);
const releasesWithBaseActors = releasesWithChannels.map((release) => ({ ...release, actors: toBaseActors(release.actors) }));

View File

@@ -116,7 +116,7 @@ async function load() {
throw new Error(`No user ${alert.username}`);
}
const [alertId] = await knex('alerts')
const [{ id: alertId }] = await knex('alerts')
.insert({
user_id: user.id,
notify: alert.notify,

View File

@@ -22,7 +22,7 @@ async function getStashId(stash, user) {
return existingStash.id;
}
const [stashId] = await knex('stashes')
const [{ id: stashId }] = await knex('stashes')
.insert({
user_id: user.id,
name: stash.name,

View File

@@ -7,17 +7,16 @@ const moment = require('moment');
const Promise = require('bluebird');
const bhttp = require('bhttp');
const { nanoid } = require('nanoid/non-secure');
const AWS = require('aws-sdk');
const { Upload } = require('@aws-sdk/lib-storage');
const { S3Client } = require('@aws-sdk/client-s3');
const { graphql } = require('../web/graphql');
const knex = require('../knex');
const args = require('../argv');
const endpoint = new AWS.Endpoint('s3.eu-central-1.wasabisys.com');
const s3 = new AWS.S3({
// region: 'eu-central-1',
endpoint,
const s3 = new S3Client({
region: 'eu-central-1',
endpoint: 'https://s3.eu-central-1.wasabisys.com',
credentials: {
accessKeyId: config.s3.accessKey,
secretAccessKey: config.s3.secretKey,
@@ -325,7 +324,7 @@ async function addReleaseTags(release, context) {
}
async function addNewActor(actor, entity, context) {
const [actorId] = await knex('actors')
const [{ id: actorId }] = await knex('actors')
.insert({
name: actor.name,
slug: actor.slug,
@@ -379,7 +378,7 @@ async function addReleaseChapters(release, context) {
await release.chapters.reduce(async (chain, chapter) => {
await chain;
const [chapterId] = await knex('chapters')
const [{ id: chapterId }] = await knex('chapters')
.insert({
release_id: release.id,
index: chapter.index,
@@ -438,12 +437,15 @@ async function transferMedia(media, target) {
fileStream.on('error', () => { reject(); });
});
await s3.upload({
Bucket: config.s3.bucket,
Body: fs.createReadStream(temp),
Key: filepath,
ContentType: media.mime,
}).promise();
await new Upload({
client: s3,
params: {
Bucket: config.s3.bucket,
Body: fs.createReadStream(temp),
Key: filepath,
ContentType: media.mime,
},
}).done();
await fs.promises.unlink(temp);
}, Promise.resolve());
@@ -602,7 +604,7 @@ async function load() {
return;
}
const [batchId] = await knex('batches').insert({ comment: `import ${args.file}` }).returning('id');
const [{ id: batchId }] = await knex('batches').insert({ comment: `import ${args.file}` }).returning('id');
const aggTags = Array.from(new Set(releases.filter((release) => release.type === 'release').flatMap((release) => [...release.tags, ...release.chapters.flatMap((chapter) => chapter.tags)]).filter(Boolean)));
const aggStudios = Array.from(new Set(releases.map((release) => release.studio).filter(Boolean)));

View File

@@ -330,6 +330,8 @@ async function request(method = 'get', url, body, requestOptions = {}, limiter)
return bypassCloudflareRequest(url, method, body, withCloudflareBypass, options);
}
console.log(url, options);
const res = await (body
? http[method](url, body, options)
: http[method](url, options));

View File

@@ -1,17 +1,17 @@
'use strict';
const config = require('config');
const AWS = require('aws-sdk');
const fs = require('fs');
const { nanoid } = require('nanoid');
const { Upload } = require('@aws-sdk/lib-storage');
const { S3Client } = require('@aws-sdk/client-s3');
async function init() {
const filepath = './public/img/sfw/animals/j0iiByCxGfA.jpeg';
const endpoint = new AWS.Endpoint('s3.wasabisys.com');
const s3 = new AWS.S3({
// region: 'eu-central-1',
endpoint,
const s3 = new S3Client({
region: 'eu-central-1',
endpoint: 's3.eu-central-1.wasabisys.com',
credentials: {
accessKeyId: config.s3.accessKey,
secretAccessKey: config.s3.secretKey,
@@ -19,18 +19,21 @@ async function init() {
});
try {
const data = await s3.listBuckets().promise();
// const data = await s3.listBuckets().promise();
const file = fs.createReadStream(filepath);
const key = `img/${nanoid()}.jpg`;
const status = await s3.upload({
Bucket: config.s3.bucket,
Body: file,
Key: key,
ContentType: 'image/jpeg',
}).promise();
const status = await new Upload({
client: s3,
params: {
Bucket: config.s3.bucket,
Body: file,
Key: key,
ContentType: 'image/jpeg',
},
}).done();
console.log(data);
// console.log(data);
console.log(status);
} catch (error) {
console.log(error);

View File

@@ -34,6 +34,7 @@ function initPostgraphile(credentials) {
simpleCollections: 'both',
graphileBuildOptions: {
pgOmitListSuffix: true,
// connectionFilterUseListInflectors: true,
connectionFilterRelations: true,
connectionFilterAllowNullInput: true,
},