Associating actors without network.
|
@ -145,7 +145,7 @@ export default {
|
||||||
|
|
||||||
.tiles {
|
.tiles {
|
||||||
display: grid;
|
display: grid;
|
||||||
grid-template-columns: repeat(auto-fill, minmax(10rem, 1fr));
|
grid-template-columns: repeat(auto-fill, minmax(12rem, 1fr));
|
||||||
grid-gap: 0 .5rem;
|
grid-gap: 0 .5rem;
|
||||||
padding: 1rem;
|
padding: 1rem;
|
||||||
flex-grow: 1;
|
flex-grow: 1;
|
||||||
|
|
|
@ -68,6 +68,7 @@
|
||||||
<div class="header-toggles">
|
<div class="header-toggles">
|
||||||
<Icon
|
<Icon
|
||||||
v-show="!sfw"
|
v-show="!sfw"
|
||||||
|
v-tooltip="'Hit S to use SFW mode'"
|
||||||
icon="flower"
|
icon="flower"
|
||||||
class="toggle noselect"
|
class="toggle noselect"
|
||||||
@click.native="setSfw(true)"
|
@click.native="setSfw(true)"
|
||||||
|
@ -75,6 +76,7 @@
|
||||||
|
|
||||||
<Icon
|
<Icon
|
||||||
v-show="sfw"
|
v-show="sfw"
|
||||||
|
v-tooltip="'Hit N to use NSFW mode'"
|
||||||
icon="flower"
|
icon="flower"
|
||||||
class="toggle active noselect"
|
class="toggle active noselect"
|
||||||
@click.native="setSfw(false)"
|
@click.native="setSfw(false)"
|
||||||
|
@ -82,6 +84,7 @@
|
||||||
|
|
||||||
<Icon
|
<Icon
|
||||||
v-show="theme === 'light'"
|
v-show="theme === 'light'"
|
||||||
|
v-tooltip="'Hit D to use dark theme'"
|
||||||
icon="moon"
|
icon="moon"
|
||||||
class="toggle noselect"
|
class="toggle noselect"
|
||||||
@click.native="setTheme('dark')"
|
@click.native="setTheme('dark')"
|
||||||
|
@ -89,6 +92,7 @@
|
||||||
|
|
||||||
<Icon
|
<Icon
|
||||||
v-show="theme === 'dark'"
|
v-show="theme === 'dark'"
|
||||||
|
v-tooltip="'Hit L to use light theme'"
|
||||||
icon="sun"
|
icon="sun"
|
||||||
class="toggle noselect"
|
class="toggle noselect"
|
||||||
@click.native="setTheme('light')"
|
@click.native="setTheme('light')"
|
||||||
|
|
|
@ -14,6 +14,12 @@
|
||||||
<Icon icon="price-tag4" />
|
<Icon icon="price-tag4" />
|
||||||
{{ tag.name }}
|
{{ tag.name }}
|
||||||
</h2>
|
</h2>
|
||||||
|
|
||||||
|
<p
|
||||||
|
v-if="description"
|
||||||
|
class="description header-description"
|
||||||
|
v-html="description"
|
||||||
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="sidebar">
|
<div class="sidebar">
|
||||||
|
@ -116,6 +122,12 @@ export default {
|
||||||
color: var(--primary);
|
color: var(--primary);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.header .description,
|
||||||
|
.header .description p {
|
||||||
|
padding: 0;
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
|
|
||||||
<style lang="scss" scoped>
|
<style lang="scss" scoped>
|
||||||
|
@ -147,15 +159,12 @@ export default {
|
||||||
background: var(--profile);
|
background: var(--profile);
|
||||||
color: var(--text-light);
|
color: var(--text-light);
|
||||||
display: none;
|
display: none;
|
||||||
|
justify-content: space-between;
|
||||||
padding: .5rem 1rem;
|
padding: .5rem 1rem;
|
||||||
|
|
||||||
.title {
|
.title {
|
||||||
margin: 0 2rem 0 0;
|
margin: 0 2rem 0 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
.description {
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.sidebar {
|
.sidebar {
|
||||||
|
|
|
@ -34,18 +34,19 @@ async function mounted() {
|
||||||
popular: [
|
popular: [
|
||||||
'anal',
|
'anal',
|
||||||
'lesbian',
|
'lesbian',
|
||||||
|
'interracial',
|
||||||
'mff',
|
'mff',
|
||||||
'mfm',
|
'mfm',
|
||||||
'interracial',
|
'natural-boobs',
|
||||||
|
'fake-boobs',
|
||||||
|
'teen',
|
||||||
|
'milf',
|
||||||
'blowjob',
|
'blowjob',
|
||||||
'orgy',
|
'orgy',
|
||||||
'gangbang',
|
'gangbang',
|
||||||
'double-penetration',
|
'double-penetration',
|
||||||
'airtight',
|
|
||||||
'facial',
|
'facial',
|
||||||
'creampie',
|
'creampie',
|
||||||
'teen',
|
|
||||||
'milf',
|
|
||||||
],
|
],
|
||||||
oral: [
|
oral: [
|
||||||
'deepthroat',
|
'deepthroat',
|
||||||
|
@ -65,6 +66,14 @@ async function mounted() {
|
||||||
'brunette',
|
'brunette',
|
||||||
'redhead',
|
'redhead',
|
||||||
],
|
],
|
||||||
|
extreme: [
|
||||||
|
'airtight',
|
||||||
|
'double-anal',
|
||||||
|
'double-vaginal',
|
||||||
|
'da-tp',
|
||||||
|
'dv-tp',
|
||||||
|
'triple-anal',
|
||||||
|
],
|
||||||
cumshot: [
|
cumshot: [
|
||||||
'facial',
|
'facial',
|
||||||
'bukkake',
|
'bukkake',
|
||||||
|
@ -72,13 +81,6 @@ async function mounted() {
|
||||||
'anal-creampie',
|
'anal-creampie',
|
||||||
'cum-in-mouth',
|
'cum-in-mouth',
|
||||||
],
|
],
|
||||||
extreme: [
|
|
||||||
'double-anal',
|
|
||||||
'double-vaginal',
|
|
||||||
'da-tp',
|
|
||||||
'dv-tp',
|
|
||||||
'triple-anal',
|
|
||||||
],
|
|
||||||
roleplay: [
|
roleplay: [
|
||||||
'family',
|
'family',
|
||||||
'schoolgirl',
|
'schoolgirl',
|
||||||
|
@ -130,7 +132,7 @@ export default {
|
||||||
|
|
||||||
.tiles {
|
.tiles {
|
||||||
display: grid;
|
display: grid;
|
||||||
grid-template-columns: repeat(auto-fit, minmax(23rem, .33fr));
|
grid-template-columns: repeat(auto-fill, minmax(23rem, .33fr));
|
||||||
grid-gap: 1rem;
|
grid-gap: 1rem;
|
||||||
margin: 0 0 1.5rem 0;
|
margin: 0 0 1.5rem 0;
|
||||||
}
|
}
|
||||||
|
@ -142,13 +144,13 @@ export default {
|
||||||
|
|
||||||
@media(max-width: $breakpoint3) {
|
@media(max-width: $breakpoint3) {
|
||||||
.tiles {
|
.tiles {
|
||||||
grid-template-columns: repeat(auto-fit, minmax(21rem, .5fr));
|
grid-template-columns: repeat(auto-fill, minmax(21rem, .5fr));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@media(max-width: $breakpoint) {
|
@media(max-width: $breakpoint) {
|
||||||
.tiles {
|
.tiles {
|
||||||
grid-template-columns: repeat(auto-fit, minmax(20rem, 1fr));
|
grid-template-columns: repeat(auto-fill, minmax(20rem, 1fr));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
|
|
|
@ -132,6 +132,7 @@
|
||||||
</span>
|
</span>
|
||||||
|
|
||||||
<ul
|
<ul
|
||||||
|
v-if="release.tags.length > 0"
|
||||||
:title="release.tags.map(tag => tag.name).join(', ')"
|
:title="release.tags.map(tag => tag.name).join(', ')"
|
||||||
class="tags nolist"
|
class="tags nolist"
|
||||||
>
|
>
|
||||||
|
|
|
@ -8,6 +8,10 @@ import { curateRelease } from '../curate';
|
||||||
import getDateRange from '../get-date-range';
|
import getDateRange from '../get-date-range';
|
||||||
|
|
||||||
function curateActor(actor) {
|
function curateActor(actor) {
|
||||||
|
if (!actor) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
const curatedActor = {
|
const curatedActor = {
|
||||||
...actor,
|
...actor,
|
||||||
height: actor.heightMetric && {
|
height: actor.heightMetric && {
|
||||||
|
@ -49,7 +53,7 @@ function initActorActions(store, _router) {
|
||||||
async function fetchActorBySlug({ _commit }, { actorSlug, limit = 100, range = 'latest' }) {
|
async function fetchActorBySlug({ _commit }, { actorSlug, limit = 100, range = 'latest' }) {
|
||||||
const { before, after, orderBy } = getDateRange(range);
|
const { before, after, orderBy } = getDateRange(range);
|
||||||
|
|
||||||
const { actor } = await graphql(`
|
const { actors: [actor] } = await graphql(`
|
||||||
query Actor(
|
query Actor(
|
||||||
$actorSlug: String!
|
$actorSlug: String!
|
||||||
$limit:Int = 1000,
|
$limit:Int = 1000,
|
||||||
|
@ -58,7 +62,14 @@ function initActorActions(store, _router) {
|
||||||
$orderBy:[ReleasesActorsOrderBy!]
|
$orderBy:[ReleasesActorsOrderBy!]
|
||||||
$exclude: [String!]
|
$exclude: [String!]
|
||||||
) {
|
) {
|
||||||
actor: actorBySlug(slug: $actorSlug) {
|
actors(filter: {
|
||||||
|
slug: {
|
||||||
|
equalTo: $actorSlug,
|
||||||
|
},
|
||||||
|
networkId: {
|
||||||
|
isNull: true,
|
||||||
|
},
|
||||||
|
}) {
|
||||||
id
|
id
|
||||||
name
|
name
|
||||||
slug
|
slug
|
||||||
|
@ -84,11 +95,13 @@ function initActorActions(store, _router) {
|
||||||
name
|
name
|
||||||
slug
|
slug
|
||||||
}
|
}
|
||||||
avatar: actorsAvatarByActorId {
|
actorsProfiles {
|
||||||
media {
|
actorsAvatarByProfileId {
|
||||||
thumbnail
|
media {
|
||||||
path
|
path
|
||||||
copyright
|
thumbnail
|
||||||
|
copyright
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
photos: actorsPhotos {
|
photos: actorsPhotos {
|
||||||
|
@ -225,9 +238,13 @@ function initActorActions(store, _router) {
|
||||||
name
|
name
|
||||||
slug
|
slug
|
||||||
}
|
}
|
||||||
avatar: actorsAvatarByActorId {
|
actorsProfiles {
|
||||||
media {
|
actorsAvatarByProfileId {
|
||||||
thumbnail
|
media {
|
||||||
|
path
|
||||||
|
thumbnail
|
||||||
|
copyright
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
birthCountry: countryByBirthCountryAlpha2 {
|
birthCountry: countryByBirthCountryAlpha2 {
|
||||||
|
|
|
@ -47,9 +47,13 @@ const actorFields = `
|
||||||
name
|
name
|
||||||
alias
|
alias
|
||||||
}
|
}
|
||||||
avatar: actorsAvatarByActorId {
|
actorsProfiles {
|
||||||
media {
|
actorsAvatarByProfileId {
|
||||||
thumbnail
|
media {
|
||||||
|
path
|
||||||
|
thumbnail
|
||||||
|
copyright
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
`;
|
`;
|
||||||
|
|
|
@ -11,6 +11,14 @@ function initUiObservers(store, _router) {
|
||||||
if (event.key === 'n') {
|
if (event.key === 'n') {
|
||||||
store.dispatch('setSfw', false);
|
store.dispatch('setSfw', false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (event.key === 'd') {
|
||||||
|
store.dispatch('setTheme', 'dark');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (event.key === 'l') {
|
||||||
|
store.dispatch('setTheme', 'light');
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -253,13 +253,66 @@ exports.up = knex => Promise.resolve()
|
||||||
table.string('name')
|
table.string('name')
|
||||||
.notNullable();
|
.notNullable();
|
||||||
|
|
||||||
table.string('slug', 32)
|
table.string('slug', 32);
|
||||||
.unique();
|
|
||||||
|
table.integer('network_id', 12)
|
||||||
|
.references('id')
|
||||||
|
.inTable('networks');
|
||||||
|
|
||||||
|
table.unique(['slug', 'network_id']);
|
||||||
|
|
||||||
table.integer('alias_for', 12)
|
table.integer('alias_for', 12)
|
||||||
.references('id')
|
.references('id')
|
||||||
.inTable('actors');
|
.inTable('actors');
|
||||||
|
|
||||||
|
table.date('birthdate');
|
||||||
|
table.string('gender', 18);
|
||||||
|
table.text('description');
|
||||||
|
|
||||||
|
table.string('birth_city');
|
||||||
|
table.string('birth_state');
|
||||||
|
table.string('birth_country_alpha2', 2)
|
||||||
|
.references('alpha2')
|
||||||
|
.inTable('countries');
|
||||||
|
|
||||||
|
table.string('residence_city');
|
||||||
|
table.string('residence_state');
|
||||||
|
table.string('residence_country_alpha2', 2)
|
||||||
|
.references('alpha2')
|
||||||
|
.inTable('countries');
|
||||||
|
|
||||||
|
table.string('ethnicity');
|
||||||
|
|
||||||
|
table.string('bust', 10);
|
||||||
|
table.integer('waist', 3);
|
||||||
|
table.integer('hip', 3);
|
||||||
|
table.boolean('natural_boobs');
|
||||||
|
|
||||||
|
table.integer('height', 3);
|
||||||
|
table.integer('weight', 3);
|
||||||
|
table.string('eyes');
|
||||||
|
table.string('hair');
|
||||||
|
|
||||||
|
table.boolean('has_tattoos');
|
||||||
|
table.boolean('has_piercings');
|
||||||
|
table.string('piercings');
|
||||||
|
table.string('tattoos');
|
||||||
|
|
||||||
|
table.integer('batch_id', 12);
|
||||||
|
|
||||||
|
table.datetime('updated_at')
|
||||||
|
.defaultTo(knex.fn.now());
|
||||||
|
|
||||||
|
table.datetime('created_at')
|
||||||
|
.defaultTo(knex.fn.now());
|
||||||
|
}))
|
||||||
|
.then(() => knex.schema.createTable('actors_profiles', (table) => {
|
||||||
|
table.increments('id', 12);
|
||||||
|
|
||||||
|
table.integer('actor_id', 12)
|
||||||
|
.references('id')
|
||||||
|
.inTable('actors');
|
||||||
|
|
||||||
table.integer('network_id', 12)
|
table.integer('network_id', 12)
|
||||||
.references('id')
|
.references('id')
|
||||||
.inTable('networks');
|
.inTable('networks');
|
||||||
|
@ -297,22 +350,15 @@ exports.up = knex => Promise.resolve()
|
||||||
table.string('piercings');
|
table.string('piercings');
|
||||||
table.string('tattoos');
|
table.string('tattoos');
|
||||||
|
|
||||||
|
table.datetime('scraped_at');
|
||||||
|
table.boolean('scrape_success');
|
||||||
|
|
||||||
table.datetime('updated_at')
|
table.datetime('updated_at')
|
||||||
.defaultTo(knex.fn.now());
|
.defaultTo(knex.fn.now());
|
||||||
|
|
||||||
table.datetime('created_at')
|
table.datetime('created_at')
|
||||||
.defaultTo(knex.fn.now());
|
.defaultTo(knex.fn.now());
|
||||||
}))
|
}))
|
||||||
.then(() => knex.raw('CREATE TABLE actors_profiles AS TABLE actors WITH NO DATA;'))
|
|
||||||
.then(() => knex.schema.alterTable('actors_profiles', (table) => {
|
|
||||||
table.integer('actor_id')
|
|
||||||
.references('id')
|
|
||||||
.inTable('actors')
|
|
||||||
.notNullable();
|
|
||||||
|
|
||||||
table.datetime('scraped_at');
|
|
||||||
table.boolean('scrape_success');
|
|
||||||
}))
|
|
||||||
.then(() => knex.schema.createTable('body', (table) => {
|
.then(() => knex.schema.createTable('body', (table) => {
|
||||||
table.string('slug', 20)
|
table.string('slug', 20)
|
||||||
.primary();
|
.primary();
|
||||||
|
@ -474,17 +520,17 @@ exports.up = knex => Promise.resolve()
|
||||||
.defaultTo(knex.fn.now());
|
.defaultTo(knex.fn.now());
|
||||||
}))
|
}))
|
||||||
.then(() => knex.schema.createTable('actors_avatars', (table) => {
|
.then(() => knex.schema.createTable('actors_avatars', (table) => {
|
||||||
table.integer('actor_id', 12)
|
table.integer('profile_id', 12)
|
||||||
.notNullable()
|
.notNullable()
|
||||||
.references('id')
|
.references('id')
|
||||||
.inTable('actors');
|
.inTable('actors_profiles');
|
||||||
|
|
||||||
table.string('media_id', 21)
|
table.string('media_id', 21)
|
||||||
.notNullable()
|
.notNullable()
|
||||||
.references('id')
|
.references('id')
|
||||||
.inTable('media');
|
.inTable('media');
|
||||||
|
|
||||||
table.unique('actor_id');
|
table.unique('profile_id');
|
||||||
}))
|
}))
|
||||||
.then(() => knex.schema.createTable('actors_photos', (table) => {
|
.then(() => knex.schema.createTable('actors_photos', (table) => {
|
||||||
table.integer('actor_id', 12)
|
table.integer('actor_id', 12)
|
||||||
|
|
Before Width: | Height: | Size: 107 KiB After Width: | Height: | Size: 970 KiB |
Before Width: | Height: | Size: 1.5 MiB |
Before Width: | Height: | Size: 6.7 KiB After Width: | Height: | Size: 7.0 KiB |
Before Width: | Height: | Size: 26 KiB After Width: | Height: | Size: 30 KiB |
After Width: | Height: | Size: 602 KiB |
After Width: | Height: | Size: 1.2 MiB |
After Width: | Height: | Size: 7.9 KiB |
After Width: | Height: | Size: 7.2 KiB |
After Width: | Height: | Size: 32 KiB |
After Width: | Height: | Size: 30 KiB |
After Width: | Height: | Size: 548 KiB |
After Width: | Height: | Size: 6.1 KiB |
After Width: | Height: | Size: 27 KiB |
After Width: | Height: | Size: 742 KiB |
After Width: | Height: | Size: 8.8 KiB |
After Width: | Height: | Size: 37 KiB |
Before Width: | Height: | Size: 100 KiB |
Before Width: | Height: | Size: 130 KiB |
After Width: | Height: | Size: 802 KiB |
After Width: | Height: | Size: 8.5 KiB |
After Width: | Height: | Size: 34 KiB |
Before Width: | Height: | Size: 5.5 MiB After Width: | Height: | Size: 1016 KiB |
Before Width: | Height: | Size: 7.9 KiB After Width: | Height: | Size: 6.6 KiB |
Before Width: | Height: | Size: 130 KiB |
Before Width: | Height: | Size: 33 KiB After Width: | Height: | Size: 26 KiB |
Before Width: | Height: | Size: 1001 KiB After Width: | Height: | Size: 5.2 MiB |
Before Width: | Height: | Size: 8.3 KiB After Width: | Height: | Size: 8.4 KiB |
Before Width: | Height: | Size: 35 KiB After Width: | Height: | Size: 34 KiB |
Before Width: | Height: | Size: 1.3 MiB After Width: | Height: | Size: 1.8 MiB |
Before Width: | Height: | Size: 7.2 KiB After Width: | Height: | Size: 7.4 KiB |
Before Width: | Height: | Size: 30 KiB After Width: | Height: | Size: 31 KiB |
After Width: | Height: | Size: 741 KiB |
After Width: | Height: | Size: 7.5 KiB |
After Width: | Height: | Size: 30 KiB |
Before Width: | Height: | Size: 358 KiB After Width: | Height: | Size: 4.7 MiB |
Before Width: | Height: | Size: 7.4 KiB After Width: | Height: | Size: 7.5 KiB |
Before Width: | Height: | Size: 29 KiB After Width: | Height: | Size: 31 KiB |
|
@ -186,6 +186,7 @@ const tags = [
|
||||||
{
|
{
|
||||||
name: 'bisexual',
|
name: 'bisexual',
|
||||||
slug: 'bisexual',
|
slug: 'bisexual',
|
||||||
|
priority: 10,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'black hair',
|
name: 'black hair',
|
||||||
|
@ -293,6 +294,36 @@ const tags = [
|
||||||
priority: 8,
|
priority: 8,
|
||||||
group: 'penetration',
|
group: 'penetration',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: 'double dildo',
|
||||||
|
slug: 'double-dildo',
|
||||||
|
description: 'Two girls fucking eachother using either end of a double-sided dildo.',
|
||||||
|
priority: 4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'double dildo anal',
|
||||||
|
slug: 'double-dildo-anal',
|
||||||
|
description: 'Two people ass-fucking eachother with either end of a [double-sided dildo](/tag/double-dildo), "ass to ass".',
|
||||||
|
priority: 4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'double dildo DP',
|
||||||
|
slug: 'double-dildo-dp',
|
||||||
|
description: 'A girl using a [double-sided dildo](/tag/double-dildo) on both her ass and pussy.',
|
||||||
|
priority: 4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'double dildo blowjob',
|
||||||
|
slug: 'double-dildo-blowjob',
|
||||||
|
description: 'Two people sucking and gagging on either end of a [double-sided dildo](/tag/double-dildo). They may deepthroat the dildo for a [double dildo kiss](/tag/double-dildo-kiss).',
|
||||||
|
priority: 4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'double dildo kiss',
|
||||||
|
slug: 'double-dildo-kiss',
|
||||||
|
description: 'Two people deepthroating a [double-sided dildo](/tag/double-dildo) from either end during a [double dildo blowjob](/tag/double-dildo-blowjob), all the way until they can kiss eachother\'s lips.',
|
||||||
|
priority: 4,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
name: 'triple anal',
|
name: 'triple anal',
|
||||||
slug: 'triple-anal',
|
slug: 'triple-anal',
|
||||||
|
|
|
@ -3,7 +3,7 @@ const upsert = require('../src/utils/upsert');
|
||||||
|
|
||||||
const tagPosters = [
|
const tagPosters = [
|
||||||
['airtight', 6, 'Remy Lacroix in "Ass Worship 14" for Jules Jordan'],
|
['airtight', 6, 'Remy Lacroix in "Ass Worship 14" for Jules Jordan'],
|
||||||
['anal', 4, 'Lana Roy in "Anal In The Club" for 21Naturals'],
|
['anal', 0, 'Adriana Chechik in "Manuel Creampies Their Asses 3" for Jules Jordan'],
|
||||||
['anal-creampie', 0, 'Gina Valentina and Jane Wilde in "A Very Special Anniversary" for Tushy'],
|
['anal-creampie', 0, 'Gina Valentina and Jane Wilde in "A Very Special Anniversary" for Tushy'],
|
||||||
['ass-eating', 0, 'Kendra Sunderland and Ana Foxxx in "Kendra\'s Obsession, Part 3" for Blacked'],
|
['ass-eating', 0, 'Kendra Sunderland and Ana Foxxx in "Kendra\'s Obsession, Part 3" for Blacked'],
|
||||||
['asian', 0, 'Alina Li in "Slut Puppies 8" for Jules Jordan'],
|
['asian', 0, 'Alina Li in "Slut Puppies 8" for Jules Jordan'],
|
||||||
|
@ -22,26 +22,28 @@ const tagPosters = [
|
||||||
['deepthroat', 0, 'Chanel Grey in "Deepthroating Is Fun" for Throated'],
|
['deepthroat', 0, 'Chanel Grey in "Deepthroating Is Fun" for Throated'],
|
||||||
['double-anal', 7, 'Adriana Chechik in "DP Masters 6" for Jules Jordan'],
|
['double-anal', 7, 'Adriana Chechik in "DP Masters 6" for Jules Jordan'],
|
||||||
['double-blowjob', 1, 'Veronica Rodriguez and Penny Pax in "Fucking Older Guys 5" for Penthouse'],
|
['double-blowjob', 1, 'Veronica Rodriguez and Penny Pax in "Fucking Older Guys 5" for Penthouse'],
|
||||||
|
['double-dildo-blowjob', 0, 'Adriana Chechik and Vicki Chase in "Anal Savages 1" for Jules Jordan'],
|
||||||
['double-penetration', 2, 'Megan Rain in "DP Masters 4" for Jules Jordan'],
|
['double-penetration', 2, 'Megan Rain in "DP Masters 4" for Jules Jordan'],
|
||||||
['double-vaginal', 'poster', 'Riley Reid in "Pizza That Ass" for Reid My Lips'],
|
['double-vaginal', 'poster', 'Riley Reid in "Pizza That Ass" for Reid My Lips'],
|
||||||
['dv-tp', 'poster', 'Juelz Ventura in "Gangbanged 5" for Elegant Angel'],
|
['dv-tp', 'poster', 'Juelz Ventura in "Gangbanged 5" for Elegant Angel'],
|
||||||
['ebony', 1, 'Ana Foxxx in "DP Me 4" for HardX'],
|
['ebony', 1, 'Ana Foxxx in "DP Me 4" for HardX'],
|
||||||
['facefucking', 2, 'Jynx Maze for Throated'],
|
['facefucking', 2, 'Jynx Maze for Throated'],
|
||||||
['facial', 0, 'Brooklyn Gray in "All About Ass 4" for Evil Angel'],
|
['facial', 0, 'Brooklyn Gray in "All About Ass 4" for Evil Angel'],
|
||||||
['fake-boobs', 0, 'Marsha May in "Once You Go Black 7" for Jules Jordan'],
|
['fake-boobs', 1, 'Lela Star in "Thick" for Jules Jordan'],
|
||||||
['family', 0, 'Teanna Trump in "A Family Appear: Part One" for Brazzers'],
|
['family', 0, 'Teanna Trump in "A Family Appear: Part One" for Brazzers'],
|
||||||
['gangbang', 4, 'Marley Brinx in "The Gangbang of Marley Brinx" for Jules Jordan'],
|
['gangbang', 5, 'Carter Cruise\'s first gangbang in "Slut Puppies 9" for Jules Jordan'],
|
||||||
['gaping', 1, 'Vina Sky in "Vina Sky Does Anal" for HardX'],
|
['gaping', 1, 'Vina Sky in "Vina Sky Does Anal" for HardX'],
|
||||||
['interracial', 0, 'Kali Roses and Jax Slayher in "Kali Roses Gets An Interracial Creampie" for Jules Jordan'],
|
['interracial', 0, 'Jaye Summers and Prince Yahshua in "Platinum Pussy 3" for Jules Jordan'],
|
||||||
['latina', 'poster', 'Alexis Love for Penthouse'],
|
['latina', 'poster', 'Alexis Love for Penthouse'],
|
||||||
['lesbian', 0, 'Reena Sky and Sarah Banks for Brazzers'],
|
['lesbian', 0, 'Jenna Sativa and Alina Lopez in "Opposites Attract" for Girl Girl'],
|
||||||
['maid', 0, 'Whitney Wright in "Dredd Up Your Ass 2" for Jules Jordan'],
|
['maid', 0, 'Whitney Wright in "Dredd Up Your Ass 2" for Jules Jordan'],
|
||||||
['milf', 0, 'Olivia Austin in "Dredd 3" for Jules Jordan'],
|
['milf', 0, 'Olivia Austin in "Dredd 3" for Jules Jordan'],
|
||||||
['mff', 0, 'Madison Ivy, Adriana Chechik and Keiran Lee in "Day With A Pornstar" for Brazzers'],
|
['mff', 0, 'Madison Ivy, Adriana Chechik and Keiran Lee in "Day With A Pornstar" for Brazzers'],
|
||||||
['mfm', 5, 'Vina Sky in "Slut Puppies 15" for Jules Jordan'],
|
['mfm', 5, 'Vina Sky in "Slut Puppies 15" for Jules Jordan'],
|
||||||
|
['natural-boobs', 0, 'Autumn Falls in "Manuel Ferrara\'s Ripe 7" for Jules Jordan'],
|
||||||
['nurse', 0, 'Sarah Vandella in "Cum For Nurse Sarah" for Brazzers'],
|
['nurse', 0, 'Sarah Vandella in "Cum For Nurse Sarah" for Brazzers'],
|
||||||
['orgy', 1, 'Megan Rain (DP), Morgan Lee (anal), Jessa Rhodes, Melissa Moore and Kimmy Granger in "Orgy Masters 8" for Jules Jordan'],
|
['orgy', 1, 'Megan Rain (DP), Morgan Lee (anal), Jessa Rhodes, Melissa Moore and Kimmy Granger in "Orgy Masters 8" for Jules Jordan'],
|
||||||
['pussy-eating', 0, 'Elena Kosha and Ivy Wolfe in "Bare" for Jules Jordan'],
|
['pussy-eating', 0, 'Kali Roses licking Emily Willis\' pussy in "Peeping On My Neighbor" for Girl Girl'],
|
||||||
['redhead', 0, 'Penny Pax in "The Submission of Emma Marx: Boundaries" for New Sensations'],
|
['redhead', 0, 'Penny Pax in "The Submission of Emma Marx: Boundaries" for New Sensations'],
|
||||||
['schoolgirl', 1, 'Eliza Ibarra for Brazzers'],
|
['schoolgirl', 1, 'Eliza Ibarra for Brazzers'],
|
||||||
['swallowing', 'poster'],
|
['swallowing', 'poster'],
|
||||||
|
@ -69,7 +71,7 @@ const tagPhotos = [
|
||||||
['asian', 'poster', 'Vina Sky in "Slut Puppies 15" for Jules Jordan'],
|
['asian', 'poster', 'Vina Sky in "Slut Puppies 15" for Jules Jordan'],
|
||||||
// ['asian', 1, 'Alina Li in "Oil Overload 11" for Jules Jordan'],
|
// ['asian', 1, 'Alina Li in "Oil Overload 11" for Jules Jordan'],
|
||||||
// ['anal', 'poster', 'Jynx Maze in "Anal Buffet 6" for Evil Angel'],
|
// ['anal', 'poster', 'Jynx Maze in "Anal Buffet 6" for Evil Angel'],
|
||||||
['anal', 2, 'Gabbie Carter for Tushy Raw'],
|
['anal', 4, 'Lana Roy in "Anal In The Club" for 21Naturals'],
|
||||||
['anal', 3, 'Dakota Skye for Brazzers'],
|
['anal', 3, 'Dakota Skye for Brazzers'],
|
||||||
// ['anal', 1, 'Veronica Leal and Tina Kay in "Agents On Anal Mission" for Asshole Fever'],
|
// ['anal', 1, 'Veronica Leal and Tina Kay in "Agents On Anal Mission" for Asshole Fever'],
|
||||||
// ['anal', 0, 'Veronica Leal'],
|
// ['anal', 0, 'Veronica Leal'],
|
||||||
|
@ -86,6 +88,8 @@ const tagPhotos = [
|
||||||
['double-anal', 0, 'Nicole Black doing double anal during a gangbang in GIO971 for LegalPorno'],
|
['double-anal', 0, 'Nicole Black doing double anal during a gangbang in GIO971 for LegalPorno'],
|
||||||
['double-anal', 1, 'Ria Sunn in SZ1801 for LegalPorno'],
|
['double-anal', 1, 'Ria Sunn in SZ1801 for LegalPorno'],
|
||||||
['double-blowjob', 0, 'Kira Noir and Kali Roses for Brazzers'],
|
['double-blowjob', 0, 'Kira Noir and Kali Roses for Brazzers'],
|
||||||
|
['double-dildo-blowjob', 1, 'Aidra Fox and Reena Sky in "Reena\'s Got A Staring Problem" for Brazzers'],
|
||||||
|
['double-dildo-dp', 0, 'u/LacyCrow "Sometimes you have to do it yourself"'],
|
||||||
['double-penetration', 'poster', 'Mia Malkova in "DP Me 8" for HardX'],
|
['double-penetration', 'poster', 'Mia Malkova in "DP Me 8" for HardX'],
|
||||||
['double-penetration', 0, 'Zoey Monroe in "Slut Puppies 7" for Jules Jordan'],
|
['double-penetration', 0, 'Zoey Monroe in "Slut Puppies 7" for Jules Jordan'],
|
||||||
['double-penetration', 1, 'Jynx Maze in "Don\'t Make Me Beg 4" for Evil Angel'],
|
['double-penetration', 1, 'Jynx Maze in "Don\'t Make Me Beg 4" for Evil Angel'],
|
||||||
|
@ -95,13 +99,13 @@ const tagPhotos = [
|
||||||
['facial', 1, 'Ella Knox in "Mr Saltys Adult Emporium Adventure 2" for Aziani'],
|
['facial', 1, 'Ella Knox in "Mr Saltys Adult Emporium Adventure 2" for Aziani'],
|
||||||
['facial', 'poster', 'Jynx Maze'],
|
['facial', 'poster', 'Jynx Maze'],
|
||||||
['facefucking', 1, 'Carrie for Young Throats'],
|
['facefucking', 1, 'Carrie for Young Throats'],
|
||||||
|
// ['fake-boobs', 0, 'Marsha May in "Once You Go Black 7" for Jules Jordan'],
|
||||||
['gangbang', 'poster', 'Kristen Scott in "Interracial Gangbang!" for Jules Jordan'],
|
['gangbang', 'poster', 'Kristen Scott in "Interracial Gangbang!" for Jules Jordan'],
|
||||||
['gangbang', 0, '"4 On 1 Gangbangs" for Doghouse Digital'],
|
['gangbang', 0, '"4 On 1 Gangbangs" for Doghouse Digital'],
|
||||||
|
['gangbang', 4, 'Marley Brinx in "The Gangbang of Marley Brinx" for Jules Jordan'],
|
||||||
['gangbang', 1, 'Ginger Lynn in "Gangbang Mystique", a photoset shot by Suze Randall for Puritan No. 10, 1984. This photo pushed the boundaries of pornography at the time, as depicting a woman \'fully occupied\' was unheard of.'],
|
['gangbang', 1, 'Ginger Lynn in "Gangbang Mystique", a photoset shot by Suze Randall for Puritan No. 10, 1984. This photo pushed the boundaries of pornography at the time, as depicting a woman \'fully occupied\' was unheard of.'],
|
||||||
['gangbang', 2, 'Riley Reid\'s double anal in "The Gangbang of Riley Reid" for Jules Jordan'],
|
|
||||||
['gaping', 'poster', 'Zoey Monroe in "Manuel DPs Them All 5" for Jules Jordan'],
|
['gaping', 'poster', 'Zoey Monroe in "Manuel DPs Them All 5" for Jules Jordan'],
|
||||||
['gaping', 2, 'Alex Grey in "DP Masters 5" for Jules Jordan'],
|
['gaping', 2, 'Alex Grey in "DP Masters 5" for Jules Jordan'],
|
||||||
['interracial', 'poster', 'Khloe Kapri and Jax Slayher in "First Interracial Anal" for Hussie Pass'],
|
|
||||||
['latina', 0, 'Abby Lee Brazil for Bang Bros'],
|
['latina', 0, 'Abby Lee Brazil for Bang Bros'],
|
||||||
// ['mfm', 0, 'Vina Sky in "Jules Jordan\'s Three Ways" for Jules Jordan'],
|
// ['mfm', 0, 'Vina Sky in "Jules Jordan\'s Three Ways" for Jules Jordan'],
|
||||||
['mfm', 1, 'Jynx Maze in "Don\'t Make Me Beg 4" for Evil Angel'],
|
['mfm', 1, 'Jynx Maze in "Don\'t Make Me Beg 4" for Evil Angel'],
|
||||||
|
|
|
@ -186,6 +186,7 @@ const countries = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'Bolivia (Plurinational State of)',
|
name: 'Bolivia (Plurinational State of)',
|
||||||
|
alias: 'Bolivia',
|
||||||
code: 68,
|
code: 68,
|
||||||
alpha2: 'BO',
|
alpha2: 'BO',
|
||||||
alpha3: 'BOL',
|
alpha3: 'BOL',
|
||||||
|
@ -193,6 +194,7 @@ const countries = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'Bonaire, Sint Eustatius and Saba',
|
name: 'Bonaire, Sint Eustatius and Saba',
|
||||||
|
alias: 'Bonaire',
|
||||||
code: 535,
|
code: 535,
|
||||||
alpha2: 'BQ',
|
alpha2: 'BQ',
|
||||||
alpha3: 'BES',
|
alpha3: 'BES',
|
||||||
|
@ -200,6 +202,7 @@ const countries = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'Bosnia and Herzegovina',
|
name: 'Bosnia and Herzegovina',
|
||||||
|
alias: 'Bosnia',
|
||||||
code: 70,
|
code: 70,
|
||||||
alpha2: 'BA',
|
alpha2: 'BA',
|
||||||
alpha3: 'BIH',
|
alpha3: 'BIH',
|
||||||
|
@ -355,6 +358,7 @@ const countries = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'Congo (Republic of the)',
|
name: 'Congo (Republic of the)',
|
||||||
|
alias: 'Congo Republic',
|
||||||
code: 178,
|
code: 178,
|
||||||
alpha2: 'CG',
|
alpha2: 'CG',
|
||||||
alpha3: 'COG',
|
alpha3: 'COG',
|
||||||
|
@ -362,6 +366,7 @@ const countries = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'Congo (Democratic Republic of the)',
|
name: 'Congo (Democratic Republic of the)',
|
||||||
|
alias: 'DR Congo',
|
||||||
code: 180,
|
code: 180,
|
||||||
alpha2: 'CD',
|
alpha2: 'CD',
|
||||||
alpha3: 'COD',
|
alpha3: 'COD',
|
||||||
|
@ -502,6 +507,7 @@ const countries = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'Falkland Islands (Malvinas)',
|
name: 'Falkland Islands (Malvinas)',
|
||||||
|
alias: 'Falkland Islands',
|
||||||
code: 238,
|
code: 238,
|
||||||
alpha2: 'FK',
|
alpha2: 'FK',
|
||||||
alpha3: 'FLK',
|
alpha3: 'FLK',
|
||||||
|
@ -824,6 +830,7 @@ const countries = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Korea (Democratic People's Republic of)",
|
name: "Korea (Democratic People's Republic of)",
|
||||||
|
alias: 'North Korea',
|
||||||
code: 408,
|
code: 408,
|
||||||
alpha2: 'KP',
|
alpha2: 'KP',
|
||||||
alpha3: 'PRK',
|
alpha3: 'PRK',
|
||||||
|
@ -831,6 +838,7 @@ const countries = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'Korea (Republic of)',
|
name: 'Korea (Republic of)',
|
||||||
|
alias: 'South Korea',
|
||||||
code: 410,
|
code: 410,
|
||||||
alpha2: 'KR',
|
alpha2: 'KR',
|
||||||
alpha3: 'KOR',
|
alpha3: 'KOR',
|
||||||
|
@ -859,7 +867,8 @@ const countries = [
|
||||||
nationality: 'Kyrgyzstani, Kyrgyz, Kirgiz, Kirghiz',
|
nationality: 'Kyrgyzstani, Kyrgyz, Kirgiz, Kirghiz',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Lao People's Democratic Republic",
|
name: 'Lao People\'s Democratic Republic',
|
||||||
|
alias: 'Laos',
|
||||||
code: 418,
|
code: 418,
|
||||||
alpha2: 'LA',
|
alpha2: 'LA',
|
||||||
alpha3: 'LAO',
|
alpha3: 'LAO',
|
||||||
|
@ -931,6 +940,7 @@ const countries = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'Macedonia (the former Yugoslav Republic of)',
|
name: 'Macedonia (the former Yugoslav Republic of)',
|
||||||
|
alias: 'Macedonia',
|
||||||
code: 807,
|
code: 807,
|
||||||
alpha2: 'MK',
|
alpha2: 'MK',
|
||||||
alpha3: 'MKD',
|
alpha3: 'MKD',
|
||||||
|
@ -1022,6 +1032,7 @@ const countries = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'Micronesia (Federated States of)',
|
name: 'Micronesia (Federated States of)',
|
||||||
|
alias: 'Micronesia',
|
||||||
code: 583,
|
code: 583,
|
||||||
alpha2: 'FM',
|
alpha2: 'FM',
|
||||||
alpha3: 'FSM',
|
alpha3: 'FSM',
|
||||||
|
@ -1029,6 +1040,7 @@ const countries = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'Moldova (Republic of)',
|
name: 'Moldova (Republic of)',
|
||||||
|
alias: 'Moldova',
|
||||||
code: 498,
|
code: 498,
|
||||||
alpha2: 'MD',
|
alpha2: 'MD',
|
||||||
alpha3: 'MDA',
|
alpha3: 'MDA',
|
||||||
|
@ -1331,6 +1343,7 @@ const countries = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'Saint Martin (French part)',
|
name: 'Saint Martin (French part)',
|
||||||
|
alias: 'Saint Martin',
|
||||||
code: 663,
|
code: 663,
|
||||||
alpha2: 'MF',
|
alpha2: 'MF',
|
||||||
alpha3: 'MAF',
|
alpha3: 'MAF',
|
||||||
|
@ -1415,6 +1428,7 @@ const countries = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'Sint Maarten (Dutch part)',
|
name: 'Sint Maarten (Dutch part)',
|
||||||
|
alias: 'Sint Maarten',
|
||||||
code: 534,
|
code: 534,
|
||||||
alpha2: 'SX',
|
alpha2: 'SX',
|
||||||
alpha3: 'SXM',
|
alpha3: 'SXM',
|
||||||
|
@ -1527,6 +1541,7 @@ const countries = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'Syrian Arab Republic',
|
name: 'Syrian Arab Republic',
|
||||||
|
alias: 'Syria',
|
||||||
code: 760,
|
code: 760,
|
||||||
alpha2: 'SY',
|
alpha2: 'SY',
|
||||||
alpha3: 'SYR',
|
alpha3: 'SYR',
|
||||||
|
@ -1550,6 +1565,7 @@ const countries = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'Tanzania, United Republic of',
|
name: 'Tanzania, United Republic of',
|
||||||
|
alias: 'Tanzania',
|
||||||
code: 834,
|
code: 834,
|
||||||
alpha2: 'TZ',
|
alpha2: 'TZ',
|
||||||
alpha3: 'TZA',
|
alpha3: 'TZA',
|
||||||
|
@ -1701,6 +1717,7 @@ const countries = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'Venezuela (Bolivarian Republic of)',
|
name: 'Venezuela (Bolivarian Republic of)',
|
||||||
|
alias: 'Venezuela',
|
||||||
code: 862,
|
code: 862,
|
||||||
alpha2: 'VE',
|
alpha2: 'VE',
|
||||||
alpha3: 'VEN',
|
alpha3: 'VEN',
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const logger = require('./logger')(__filename);
|
// const logger = require('./logger')(__filename);
|
||||||
const knex = require('./knex');
|
const knex = require('./knex');
|
||||||
const slugify = require('./utils/slugify');
|
const slugify = require('./utils/slugify');
|
||||||
const capitalize = require('./utils/capitalize');
|
const capitalize = require('./utils/capitalize');
|
||||||
|
@ -13,9 +13,7 @@ function toBaseActors(actorsOrNames, release) {
|
||||||
const baseActor = {
|
const baseActor = {
|
||||||
name,
|
name,
|
||||||
slug,
|
slug,
|
||||||
hasSingleName: name.split(/\s+/).length === 1,
|
|
||||||
network: release.site.network,
|
network: release.site.network,
|
||||||
slugWithNetworkSlug: `${slug}-${release.site.network.slug}`,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if (actorOrName.name) {
|
if (actorOrName.name) {
|
||||||
|
@ -29,39 +27,38 @@ function toBaseActors(actorsOrNames, release) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function curateActorEntry(baseActor) {
|
function curateActorEntry(baseActor, batchId) {
|
||||||
if (baseActor.hasSingleName) {
|
|
||||||
logger.warn(`Assigning single name actor '${baseActor.name}' to network '${baseActor.network.name}'`);
|
|
||||||
|
|
||||||
// attach network ID to allow separating actors with the same name
|
|
||||||
return {
|
|
||||||
name: baseActor.name,
|
|
||||||
slug: baseActor.slugWithNetworkSlug,
|
|
||||||
network_id: baseActor.network.id,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
name: baseActor.name,
|
name: baseActor.name,
|
||||||
slug: baseActor.slug,
|
slug: baseActor.slug,
|
||||||
|
network_id: null,
|
||||||
|
batch_id: batchId,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function curateActorEntries(baseActors) {
|
function curateActorEntries(baseActors, batchId) {
|
||||||
return baseActors.map(baseActor => curateActorEntry(baseActor));
|
return baseActors.map(baseActor => curateActorEntry(baseActor, batchId));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getOrCreateActors(baseActors) {
|
async function getOrCreateActors(baseActors, batchId) {
|
||||||
const existingActors = await knex('actors')
|
const existingActors = await knex('actors')
|
||||||
.select('id', 'name', 'slug', 'network_id')
|
.select('id', 'name', 'slug', 'network_id')
|
||||||
.whereIn('slug', baseActors.map(baseActor => baseActor.slug))
|
.whereIn('slug', baseActors.map(baseActor => baseActor.slug))
|
||||||
.whereNull('network_id')
|
.whereNull('network_id')
|
||||||
.orWhereIn(['slug', 'network_id'], baseActors.map(baseActor => [baseActor.slugWithNetworkSlug, baseActor.network.id]));
|
.orWhereIn(['slug', 'network_id'], baseActors.map(baseActor => [baseActor.slug, baseActor.network.id]));
|
||||||
|
|
||||||
const existingActorSlugs = new Set(existingActors.map(actor => actor.slug));
|
// const existingActorSlugs = new Set(existingActors.map(actor => actor.slug));
|
||||||
const uniqueBaseActors = baseActors.filter(baseActor => !existingActorSlugs.has(baseActor.slug) && !existingActorSlugs.has(baseActor.slugWithNetworkSlug));
|
const existingActorSlugs = existingActors.reduce((acc, actor) => ({
|
||||||
|
...acc,
|
||||||
|
[actor.network_id]: {
|
||||||
|
...acc[actor.network_id],
|
||||||
|
[actor.slug]: true,
|
||||||
|
},
|
||||||
|
}), {});
|
||||||
|
|
||||||
const curatedActorEntries = curateActorEntries(uniqueBaseActors);
|
const uniqueBaseActors = baseActors.filter(baseActor => !existingActorSlugs[baseActor.network.id]?.[baseActor.slug] && !existingActorSlugs.null?.[baseActor.slug]);
|
||||||
|
|
||||||
|
const curatedActorEntries = curateActorEntries(uniqueBaseActors, batchId);
|
||||||
const newActors = await knex('actors').insert(curatedActorEntries, ['id', 'name', 'slug', 'network_id']);
|
const newActors = await knex('actors').insert(curatedActorEntries, ['id', 'name', 'slug', 'network_id']);
|
||||||
|
|
||||||
if (Array.isArray(newActors)) {
|
if (Array.isArray(newActors)) {
|
||||||
|
@ -71,7 +68,7 @@ async function getOrCreateActors(baseActors) {
|
||||||
return existingActors;
|
return existingActors;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function associateActors(releases) {
|
async function associateActors(releases, batchId) {
|
||||||
const baseActorsByReleaseId = releases.reduce((acc, release) => {
|
const baseActorsByReleaseId = releases.reduce((acc, release) => {
|
||||||
if (release.actors) {
|
if (release.actors) {
|
||||||
acc[release.id] = toBaseActors(release.actors, release);
|
acc[release.id] = toBaseActors(release.actors, release);
|
||||||
|
@ -86,17 +83,32 @@ async function associateActors(releases) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const baseActorsBySlug = baseActors.reduce((acc, baseActor) => ({ ...acc, [baseActor.slug]: baseActor }), {});
|
// const baseActorsBySlug = baseActors.reduce((acc, baseActor) => ({ ...acc, [baseActor.slug]: baseActor }), {});
|
||||||
const uniqueBaseActors = Object.values(baseActorsBySlug);
|
const baseActorsBySlugAndNetworkId = baseActors.reduce((acc, baseActor) => ({
|
||||||
|
...acc,
|
||||||
|
[baseActor.slug]: {
|
||||||
|
...acc[baseActor.slug],
|
||||||
|
[baseActor.network.id]: baseActor,
|
||||||
|
},
|
||||||
|
}), {});
|
||||||
|
|
||||||
const actors = await getOrCreateActors(uniqueBaseActors);
|
const uniqueBaseActors = Object.values(baseActorsBySlugAndNetworkId).map(baseActorsByNetworkId => Object.values(baseActorsByNetworkId)).flat();
|
||||||
const actorIdsBySlug = actors.reduce((acc, actor) => ({ ...acc, [actor.slug]: actor.id }), {});
|
|
||||||
|
const actors = await getOrCreateActors(uniqueBaseActors, batchId);
|
||||||
|
// const actorIdsBySlug = actors.reduce((acc, actor) => ({ ...acc, [actor.slug]: actor.id }), {});
|
||||||
|
const actorIdsBySlugAndNetworkId = actors.reduce((acc, actor) => ({
|
||||||
|
...acc,
|
||||||
|
[actor.network_id]: {
|
||||||
|
...acc[actor.network_id],
|
||||||
|
[actor.slug]: actor.id,
|
||||||
|
},
|
||||||
|
}), {});
|
||||||
|
|
||||||
const releaseActorAssociations = Object.entries(baseActorsByReleaseId)
|
const releaseActorAssociations = Object.entries(baseActorsByReleaseId)
|
||||||
.map(([releaseId, releaseActors]) => releaseActors
|
.map(([releaseId, releaseActors]) => releaseActors
|
||||||
.map(releaseActor => ({
|
.map(releaseActor => ({
|
||||||
release_id: releaseId,
|
release_id: releaseId,
|
||||||
actor_id: actorIdsBySlug[releaseActor.slug] || actorIdsBySlug[releaseActor.slugWithNetworkSlug],
|
actor_id: actorIdsBySlugAndNetworkId[releaseActor.network.id]?.[releaseActor.slug] || actorIdsBySlugAndNetworkId.null[releaseActor.slug],
|
||||||
})))
|
})))
|
||||||
.flat();
|
.flat();
|
||||||
|
|
||||||
|
|
|
@ -1,445 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
const config = require('config');
|
|
||||||
const Promise = require('bluebird');
|
|
||||||
// const bhttp = require('bhttp');
|
|
||||||
const mime = require('mime');
|
|
||||||
const fs = require('fs-extra');
|
|
||||||
const sharp = require('sharp');
|
|
||||||
const path = require('path');
|
|
||||||
const blake2 = require('blake2');
|
|
||||||
|
|
||||||
const argv = require('./argv');
|
|
||||||
const logger = require('./logger')(__filename);
|
|
||||||
const knex = require('./knex');
|
|
||||||
const { get } = require('./utils/http');
|
|
||||||
const { ex } = require('./utils/q');
|
|
||||||
const chunk = require('./utils/chunk');
|
|
||||||
|
|
||||||
function getHash(buffer) {
|
|
||||||
const hash = blake2.createHash('blake2b', { digestLength: 24 });
|
|
||||||
hash.update(buffer);
|
|
||||||
|
|
||||||
return hash.digest('hex');
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getMeta(buffer, withHash = false) {
|
|
||||||
try {
|
|
||||||
const { entropy } = await sharp(buffer).stats();
|
|
||||||
const { width, height, size } = await sharp(buffer).metadata();
|
|
||||||
|
|
||||||
const hash = withHash && getHash(buffer);
|
|
||||||
|
|
||||||
return {
|
|
||||||
width,
|
|
||||||
height,
|
|
||||||
size,
|
|
||||||
entropy,
|
|
||||||
hash,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn(`Failed to retrieve image entropy, using 7.5: ${error.message}`);
|
|
||||||
|
|
||||||
return 7.5;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function createThumbnail(buffer, height = config.media.thumbnailSize) {
|
|
||||||
try {
|
|
||||||
const thumbnail = sharp(buffer)
|
|
||||||
.resize({
|
|
||||||
height,
|
|
||||||
withoutEnlargement: true,
|
|
||||||
})
|
|
||||||
.jpeg({
|
|
||||||
quality: config.media.thumbnailQuality,
|
|
||||||
})
|
|
||||||
.toBuffer();
|
|
||||||
|
|
||||||
return thumbnail;
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`Failed to create thumbnail: ${error.message}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function groupFallbacksByPriority(chunks) {
|
|
||||||
/*
|
|
||||||
Chunks naturally give priority to all of the first item's fallbacks, generally lower quality images.
|
|
||||||
This function ensures every item's first source is tried, before trying every item's second source, etc., example:
|
|
||||||
IN: [[1, 2, 3,], 10, [1, 2, 3, 4, 5], [1, 2, 3]]
|
|
||||||
OUT [[1, 1, 1], [2, 2, 2], [3, 3, 3], [4], [5]]
|
|
||||||
*/
|
|
||||||
return chunks.map(group => group.reduce((acc, item) => {
|
|
||||||
if (Array.isArray(item)) {
|
|
||||||
// place provided fallbacks at same index (priority) in parent array
|
|
||||||
item.forEach((fallback, fallbackIndex) => {
|
|
||||||
if (!acc[fallbackIndex]) {
|
|
||||||
acc[fallbackIndex] = [];
|
|
||||||
}
|
|
||||||
|
|
||||||
acc[fallbackIndex].push(fallback);
|
|
||||||
});
|
|
||||||
|
|
||||||
return acc;
|
|
||||||
}
|
|
||||||
|
|
||||||
// no fallbacks provided, first priority
|
|
||||||
if (!acc[0]) {
|
|
||||||
acc[0] = [];
|
|
||||||
}
|
|
||||||
|
|
||||||
acc[0].push(item);
|
|
||||||
|
|
||||||
return acc;
|
|
||||||
}, []).flat());
|
|
||||||
}
|
|
||||||
|
|
||||||
function pluckItems(items, specifiedLimit, asFallbacks = true) {
|
|
||||||
const limit = specifiedLimit || argv.mediaLimit;
|
|
||||||
|
|
||||||
if (!items || items.length <= limit) return items;
|
|
||||||
|
|
||||||
if (asFallbacks) {
|
|
||||||
const chunks = chunk(items, Math.ceil(items.length / limit));
|
|
||||||
const fallbacks = groupFallbacksByPriority(chunks);
|
|
||||||
|
|
||||||
return fallbacks;
|
|
||||||
}
|
|
||||||
|
|
||||||
const plucked = [1]
|
|
||||||
.concat(
|
|
||||||
Array.from({ length: limit - 1 }, (value, index) => Math.round((index + 1) * (items.length / (limit - 1)))),
|
|
||||||
);
|
|
||||||
|
|
||||||
return Array.from(new Set(plucked)).map(itemIndex => items[itemIndex - 1]); // remove duplicates, may happen when photo total and photo limit are close
|
|
||||||
}
|
|
||||||
|
|
||||||
function pickQuality(items) {
|
|
||||||
const itemsByQuality = items.reduce((acc, item) => ({ ...acc, [item.quality]: item }), {});
|
|
||||||
const item = config.media.videoQuality.reduce((acc, quality) => acc || itemsByQuality[quality], null);
|
|
||||||
|
|
||||||
return item || items[0];
|
|
||||||
}
|
|
||||||
|
|
||||||
async function extractItem(source) {
|
|
||||||
// const res = await bhttp.get(source.src);
|
|
||||||
const res = await get(source.src);
|
|
||||||
|
|
||||||
if (res.statusCode === 200) {
|
|
||||||
const { qu } = ex(res.body.toString());
|
|
||||||
|
|
||||||
return source.extract(qu);
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function fetchSource(source, domain, role) {
|
|
||||||
logger.silly(`Fetching ${domain} ${role} from ${source.src || source}`);
|
|
||||||
|
|
||||||
// const res = await bhttp.get(source.src || source);
|
|
||||||
const res = await get(source.src || source, {
|
|
||||||
headers: {
|
|
||||||
...(source.referer && { referer: source.referer }),
|
|
||||||
...(source.host && { host: source.host }),
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (res.statusCode === 200) {
|
|
||||||
const { pathname } = new URL(source.src || source);
|
|
||||||
const mimetype = mime.getType(pathname);
|
|
||||||
const extension = mime.getExtension(mimetype);
|
|
||||||
const hash = getHash(res.body);
|
|
||||||
const { entropy, size, width, height } = /image/.test(mimetype) ? await getMeta(res.body) : {};
|
|
||||||
|
|
||||||
logger.silly(`Fetched media item from ${source.src || source}`);
|
|
||||||
|
|
||||||
return {
|
|
||||||
file: res.body,
|
|
||||||
mimetype,
|
|
||||||
extension,
|
|
||||||
hash,
|
|
||||||
entropy: entropy || null,
|
|
||||||
size: size || null,
|
|
||||||
width: width || null,
|
|
||||||
height: height || null,
|
|
||||||
quality: source.quality || null,
|
|
||||||
source: source.src || source,
|
|
||||||
scraper: source.scraper,
|
|
||||||
copyright: source.copyright,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new Error(`Response ${res.statusCode} not OK`);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function fetchItem(source, index, existingItemsBySource, domain, role, attempt = 1, originalSource = null, sourceIndex = 0) {
|
|
||||||
try {
|
|
||||||
if (!source) {
|
|
||||||
throw new Error(`Empty ${domain} ${role} source in ${originalSource}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Array.isArray(source)) {
|
|
||||||
if (source.every(sourceX => sourceX.quality)) {
|
|
||||||
// various video qualities provided
|
|
||||||
const selectedSource = pickQuality(source);
|
|
||||||
return fetchItem(selectedSource, index, existingItemsBySource, domain, role, attempt, originalSource);
|
|
||||||
}
|
|
||||||
|
|
||||||
// fallbacks provided
|
|
||||||
return source.reduce((outcome, sourceX, sourceIndexX) => outcome.catch(
|
|
||||||
async () => fetchItem(sourceX, index, existingItemsBySource, domain, role, attempt, source, sourceIndexX),
|
|
||||||
), Promise.reject(new Error()));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (source.src && source.extract) {
|
|
||||||
// source links to page containing a (presumably) tokenized photo
|
|
||||||
const itemSource = await extractItem(source);
|
|
||||||
|
|
||||||
return fetchItem(itemSource, index, existingItemsBySource, domain, role, attempt, source, sourceIndex);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (existingItemsBySource[source]) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return await fetchSource(source, domain, role, originalSource);
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn(`Failed attempt ${attempt}/3 to fetch ${domain} ${role} ${index + 1} (${source.src || source}): ${error}`);
|
|
||||||
|
|
||||||
if (source && attempt < 3) {
|
|
||||||
// only retry if source is provided at all
|
|
||||||
await Promise.delay(5000);
|
|
||||||
return fetchItem(source, index, existingItemsBySource, domain, role, attempt + 1, originalSource, sourceIndex);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (originalSource && sourceIndex < originalSource.length - 1) {
|
|
||||||
throw error; // gets caught to try next source
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function fetchItems(itemSources, existingItemsBySource, domain, role) {
|
|
||||||
return Promise.map(itemSources, async (source, index) => fetchItem(source, index, existingItemsBySource, domain, role)).filter(Boolean);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function saveItems(items, domain, role) {
|
|
||||||
return Promise.map(items, async (item) => {
|
|
||||||
try {
|
|
||||||
const dir = item.hash.slice(0, 2);
|
|
||||||
const subdir = item.hash.slice(2, 4);
|
|
||||||
const filename = item.quality
|
|
||||||
? `${item.hash.slice(4)}_${item.quality}.${item.extension}`
|
|
||||||
: `${item.hash.slice(4)}.${item.extension}`;
|
|
||||||
|
|
||||||
const filedir = path.join(`${role}s`, dir, subdir);
|
|
||||||
const filepath = path.join(filedir, filename);
|
|
||||||
|
|
||||||
await fs.mkdir(path.join(config.media.path, filedir), { recursive: true });
|
|
||||||
await fs.writeFile(path.join(config.media.path, filepath), item.file);
|
|
||||||
|
|
||||||
if (/image/.test(item.mimetype)) {
|
|
||||||
const thumbnail = await createThumbnail(item.file);
|
|
||||||
|
|
||||||
const thumbdir = path.join(`${role}s`, 'thumbs', dir, subdir);
|
|
||||||
const thumbpath = path.join(thumbdir, filename);
|
|
||||||
|
|
||||||
await fs.mkdir(path.join(config.media.path, thumbdir), { recursive: true });
|
|
||||||
await fs.writeFile(path.join(config.media.path, thumbpath), thumbnail);
|
|
||||||
|
|
||||||
logger.verbose(`Saved ${domain} ${role} with thumbnail to ${filepath}`);
|
|
||||||
|
|
||||||
return {
|
|
||||||
thumbnail,
|
|
||||||
filepath,
|
|
||||||
thumbpath,
|
|
||||||
mimetype: item.mimetype,
|
|
||||||
extension: item.extension,
|
|
||||||
hash: item.hash,
|
|
||||||
size: item.size,
|
|
||||||
width: item.width,
|
|
||||||
height: item.height,
|
|
||||||
quality: item.quality,
|
|
||||||
entropy: item.entropy,
|
|
||||||
scraper: item.scraper,
|
|
||||||
copyright: item.copyright,
|
|
||||||
source: item.source,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.verbose(`Saved ${domain} ${role} to ${filepath}`);
|
|
||||||
|
|
||||||
return {
|
|
||||||
filepath,
|
|
||||||
mimetype: item.mimetype,
|
|
||||||
extension: item.extension,
|
|
||||||
hash: item.hash,
|
|
||||||
size: item.size,
|
|
||||||
width: item.width,
|
|
||||||
height: item.height,
|
|
||||||
quality: item.quality,
|
|
||||||
entropy: item.entropy,
|
|
||||||
scraper: item.scraper,
|
|
||||||
copyright: item.copyright,
|
|
||||||
source: item.source,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`Failed to store ${domain} ${role} from ${item.source}: ${error.message}`);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function curateItemEntries(items) {
|
|
||||||
return items.filter(Boolean).map((item, index) => ({
|
|
||||||
path: item.filepath,
|
|
||||||
thumbnail: item.thumbpath,
|
|
||||||
mime: item.mimetype,
|
|
||||||
hash: item.hash,
|
|
||||||
size: item.size,
|
|
||||||
width: item.width,
|
|
||||||
height: item.height,
|
|
||||||
quality: item.quality,
|
|
||||||
entropy: item.entropy,
|
|
||||||
source: item.source,
|
|
||||||
scraper: item.scraper,
|
|
||||||
copyright: item.copyright,
|
|
||||||
index,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
function groupItems(items) {
|
|
||||||
return items.reduce((acc, item) => ({
|
|
||||||
source: { ...acc.source, [item.source]: item },
|
|
||||||
hash: { ...acc.hash, [item.hash]: item },
|
|
||||||
}), {
|
|
||||||
source: {},
|
|
||||||
hash: {},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function storeMedia(sources, domain, role, { entropyFilter = 2.5 } = {}) {
|
|
||||||
const presentSources = sources.filter(source => typeof source === 'string' || Array.isArray(source) || (source && source.src));
|
|
||||||
|
|
||||||
if (presentSources.length === 0) {
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
// split up source list to prevent excessive RAM usage
|
|
||||||
const itemChunksBySource = await Promise.all(chunk(presentSources, 50).map(async (sourceChunk, index) => {
|
|
||||||
try {
|
|
||||||
// find source duplicates that don't need to be re-downloaded or re-saved
|
|
||||||
const existingSourceItems = await knex('media').whereIn('source', sourceChunk.flat().map(source => source.src || source));
|
|
||||||
const { source: existingSourceItemsBySource, hash: existingSourceItemsByHash } = groupItems(existingSourceItems);
|
|
||||||
|
|
||||||
// download media items from new sources
|
|
||||||
const fetchedItems = await fetchItems(sourceChunk, existingSourceItemsBySource, domain, role);
|
|
||||||
const { hash: fetchedItemsByHash } = groupItems(fetchedItems);
|
|
||||||
|
|
||||||
// find hash duplicates that don't need to be re-saved
|
|
||||||
const uniqueFetchedItems = Object.values(fetchedItemsByHash).filter(item => !entropyFilter || item.entropy === null || item.entropy >= entropyFilter);
|
|
||||||
const existingHashItems = await knex('media').whereIn('hash', uniqueFetchedItems.map(item => item.hash));
|
|
||||||
const { hash: existingHashItemsByHash } = groupItems(existingHashItems);
|
|
||||||
|
|
||||||
// save new items to disk
|
|
||||||
const newItems = uniqueFetchedItems.filter(item => !existingHashItemsByHash[item.hash]);
|
|
||||||
const savedItems = await saveItems(newItems, domain, role);
|
|
||||||
|
|
||||||
// store new items in database
|
|
||||||
const curatedItemEntries = curateItemEntries(savedItems);
|
|
||||||
const storedItems = await knex('media').insert(curatedItemEntries).returning('*');
|
|
||||||
const { hash: storedItemsByHash } = groupItems(Array.isArray(storedItems) ? storedItems : []);
|
|
||||||
|
|
||||||
// accumulate existing and new items by source to be mapped onto releases
|
|
||||||
const itemsByHash = { ...existingSourceItemsByHash, ...existingHashItemsByHash, ...storedItemsByHash };
|
|
||||||
const itemsBySource = {
|
|
||||||
...existingSourceItemsBySource,
|
|
||||||
...fetchedItems.reduce((acc, item) => ({ ...acc, [item.source]: itemsByHash[item.hash] }), {}),
|
|
||||||
};
|
|
||||||
|
|
||||||
logger.info(`Stored batch ${index + 1} with ${fetchedItems.length} of new ${domain} ${role}s`);
|
|
||||||
|
|
||||||
return itemsBySource;
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`Failed to store ${domain} ${role} batch ${index + 1}: ${error.message}`);
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}));
|
|
||||||
|
|
||||||
return itemChunksBySource.reduce((acc, itemChunk) => ({ ...acc, ...itemChunk }), {});
|
|
||||||
}
|
|
||||||
|
|
||||||
function extractPrimaryItem(associations, targetId, role, primaryRole, primaryItemsByTargetId) {
|
|
||||||
if (!primaryRole) {
|
|
||||||
return { [role]: associations, [primaryRole]: null };
|
|
||||||
}
|
|
||||||
|
|
||||||
if (primaryItemsByTargetId[targetId]) {
|
|
||||||
const remainingAssociations = associations.filter(association => association.media_id !== primaryItemsByTargetId[targetId].media_id);
|
|
||||||
|
|
||||||
return { [role]: remainingAssociations, [primaryRole]: null };
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
[role]: associations.slice(1),
|
|
||||||
[primaryRole]: associations.slice(0, 1)[0],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function associateTargetMedia(targetId, sources, mediaBySource, domain, role, primaryRole, primaryItemsByTargetId) {
|
|
||||||
if (!sources) return { [role]: null, [primaryRole]: null };
|
|
||||||
|
|
||||||
const mediaIds = sources
|
|
||||||
.map((source) => {
|
|
||||||
if (!source) return null;
|
|
||||||
|
|
||||||
if (Array.isArray(source)) {
|
|
||||||
const availableSource = source.find(fallbackSource => mediaBySource[fallbackSource.src || fallbackSource]);
|
|
||||||
return mediaBySource[availableSource];
|
|
||||||
}
|
|
||||||
|
|
||||||
return mediaBySource[source.src || source];
|
|
||||||
})
|
|
||||||
.filter(Boolean)
|
|
||||||
// .sort((mediaItemA, mediaItemB) => mediaItemB.height - mediaItemA.height) // prefer high res images for primary item
|
|
||||||
.map(mediaItem => mediaItem.id);
|
|
||||||
|
|
||||||
const uniqueMediaIds = Array.from(new Set(mediaIds));
|
|
||||||
const associations = uniqueMediaIds.map(mediaId => ({ [`${domain}_id`]: targetId, media_id: mediaId }));
|
|
||||||
|
|
||||||
logger.silly(`Associating ${associations.length} ${role}s to ${domain} ${targetId}`);
|
|
||||||
|
|
||||||
return extractPrimaryItem(associations, targetId, role, primaryRole, primaryItemsByTargetId);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function associateMedia(sourcesByTargetId, mediaBySource, domain, role, primaryRole) {
|
|
||||||
const primaryItems = primaryRole ? await knex(`${domain}s_${primaryRole}s`).whereIn(`${domain}_id`, Object.keys(sourcesByTargetId)) : [];
|
|
||||||
const primaryItemsByTargetId = primaryItems.reduce((acc, item) => ({ ...acc, [item[`${domain}_id`]]: item }), {});
|
|
||||||
|
|
||||||
const associationsPerTarget = await Promise.map(Object.entries(sourcesByTargetId), ([targetId, sources]) => associateTargetMedia(targetId, sources, mediaBySource, domain, role, primaryRole, primaryItemsByTargetId));
|
|
||||||
|
|
||||||
const associations = associationsPerTarget.map(association => association[role]).flat().filter(Boolean);
|
|
||||||
const primaryAssociations = associationsPerTarget.map(association => association[primaryRole]).filter(Boolean);
|
|
||||||
|
|
||||||
logger.info(`Associated ${associations.length} ${role}s to ${domain}s`);
|
|
||||||
if (primaryRole) logger.info(`Associated ${primaryAssociations.length} extracted ${primaryRole}s to ${domain}s`);
|
|
||||||
|
|
||||||
return Promise.all([
|
|
||||||
(associations.length > 0 && knex.raw(`${knex(`${domain}s_${role}s`).insert(associations).toString()} ON CONFLICT DO NOTHING`)),
|
|
||||||
(primaryAssociations.length > 0 && knex.raw(`${knex(`${domain}s_${primaryRole}s`).insert(primaryAssociations).toString()} ON CONFLICT DO NOTHING`)),
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
associateMedia,
|
|
||||||
createThumbnail,
|
|
||||||
getHash,
|
|
||||||
getMeta,
|
|
||||||
pluckItems,
|
|
||||||
storeMedia,
|
|
||||||
};
|
|
|
@ -114,7 +114,7 @@ module.exports = {
|
||||||
perfectgonzo,
|
perfectgonzo,
|
||||||
pervcity,
|
pervcity,
|
||||||
pimpxxx: cherrypimps,
|
pimpxxx: cherrypimps,
|
||||||
ornpros: whalemember,
|
pornpros: whalemember,
|
||||||
private: privateNetwork,
|
private: privateNetwork,
|
||||||
puretaboo,
|
puretaboo,
|
||||||
realitykings,
|
realitykings,
|
||||||
|
|
|
@ -214,11 +214,13 @@ async function storeReleases(releases) {
|
||||||
const releasesWithId = attachReleaseIds([].concat(uniqueReleases, duplicateReleases), [].concat(storedReleaseEntries, duplicateReleaseEntries));
|
const releasesWithId = attachReleaseIds([].concat(uniqueReleases, duplicateReleases), [].concat(storedReleaseEntries, duplicateReleaseEntries));
|
||||||
|
|
||||||
await Promise.all([
|
await Promise.all([
|
||||||
associateActors(releasesWithId),
|
associateActors(releasesWithId, batchId),
|
||||||
associateReleaseTags(releasesWithId),
|
associateReleaseTags(releasesWithId),
|
||||||
associateReleaseMedia(releasesWithId),
|
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
// media is more error-prone, associate separately
|
||||||
|
await associateReleaseMedia(releasesWithId);
|
||||||
|
|
||||||
logger.info(`Stored ${storedReleaseEntries.length} releases`);
|
logger.info(`Stored ${storedReleaseEntries.length} releases`);
|
||||||
|
|
||||||
await updateReleasesSearch(releasesWithId.map(release => release.id));
|
await updateReleasesSearch(releasesWithId.map(release => release.id));
|
||||||
|
|