Further refactoring. Fixed actor pages and more.

This commit is contained in:
ThePendulum 2019-12-31 03:12:52 +01:00
parent 1c43884102
commit 5a6bf2b42f
11 changed files with 206 additions and 137 deletions

View File

@ -232,7 +232,7 @@
/> />
</div> </div>
<Releases :releases="releases" /> <Releases :releases="actor.releases" />
</div> </div>
</div> </div>
</div> </div>
@ -252,10 +252,9 @@ function scrollPhotos(event) {
} }
async function mounted() { async function mounted() {
[this.actor] = await Promise.all([ this.actor = await this.$store.dispatch('fetchActors', { actorSlug: this.$route.params.actorSlug });
this.$store.dispatch('fetchActors', { actorSlug: this.$route.params.actorSlug }),
this.fetchReleases(), console.log(this.actor.releases[0]);
]);
if (this.actor) { if (this.actor) {
this.pageTitle = this.actor.name; this.pageTitle = this.actor.name;

View File

@ -1,9 +1,15 @@
import { graphql, get } from '../api'; import { graphql, get } from '../api';
import {
releasePosterFragment,
releaseActorsFragment,
releaseTagsFragment,
} from '../fragments';
import { curateRelease } from '../curate';
function curateActor(actor) { function curateActor(actor) {
const curatedActor = { const curatedActor = {
...actor, ...actor,
avatar: actor.avatar[0], avatar: actor.avatar.media,
height: actor.heightMetric && { height: actor.heightMetric && {
metric: actor.heightMetric, metric: actor.heightMetric,
imperial: actor.heightImperial, imperial: actor.heightImperial,
@ -24,6 +30,14 @@ function curateActor(actor) {
}, },
}; };
if (actor.releases) {
curatedActor.releases = actor.releases.map(release => curateRelease(release.release));
}
if (actor.photos) {
curatedActor.photos = actor.photos.map(photo => photo.media);
}
return curatedActor; return curatedActor;
} }
@ -50,15 +64,19 @@ function initActorActions(store, _router) {
hasPiercings hasPiercings
tattoos tattoos
piercings piercings
avatar: actorsMediasByTargetId(condition: { role:"avatar" }) { avatar: actorsAvatarByActorId {
thumbnail media {
path thumbnail
path
}
} }
photos: actorsMediasByTargetId(condition: { role:"photo" }) { photos: actorsPhotos {
id media {
thumbnail id
path thumbnail
index path
index
}
} }
birthCity birthCity
birthState birthState
@ -74,7 +92,7 @@ function initActorActions(store, _router) {
name name
alias alias
} }
social: actorsSocialsByTargetId { social: actorsSocials {
id id
url url
platform platform
@ -84,6 +102,29 @@ function initActorActions(store, _router) {
name name
slug slug
} }
releases: releasesActors {
release {
id
url
title
date
${releaseActorsFragment}
${releaseTagsFragment}
${releasePosterFragment}
site {
id
name
slug
url
network {
id
name
slug
url
}
}
}
}
} }
} }
`, { `, {
@ -104,8 +145,10 @@ function initActorActions(store, _router) {
id id
name name
slug slug
avatar: actorsMediasByTargetId(condition: { role:"avatar" }) { avatar: actorsAvatarByActorId {
thumbnail media {
thumbnail
}
} }
birthCountry: countryByBirthCountryAlpha2 { birthCountry: countryByBirthCountryAlpha2 {
alpha2 alpha2

View File

@ -12,16 +12,18 @@ function curateActor(actor) {
} }
function curateRelease(release) { function curateRelease(release) {
console.log(release);
const curatedRelease = { const curatedRelease = {
...release, ...release,
actors: release.actors.map(({ actor }) => curateActor(actor)), actors: release.actors ? release.actors.map(({ actor }) => curateActor(actor)) : [],
poster: release.poster && release.poster.media, poster: release.poster && release.poster.media,
tags: release.tags.map(({ tag }) => tag), tags: release.tags ? release.tags.map(({ tag }) => tag) : [],
network: release.site.network, network: release.site.network,
}; };
if (release.photos) curatedRelease.photos = release.photos.map(({ media }) => media); if (release.photos) curatedRelease.photos = release.photos.map(({ media }) => media);
if (release.trailer) [curatedRelease.trailer] = release.trailer.media; if (release.trailer) curatedRelease.trailer = release.trailer.media;
return curatedRelease; return curatedRelease;
} }

View File

@ -378,6 +378,19 @@ exports.up = knex => Promise.resolve()
table.unique('release_id'); table.unique('release_id');
})) }))
.then(() => knex.schema.createTable('releases_covers', (table) => {
table.integer('release_id', 16)
.notNullable()
.references('id')
.inTable('releases');
table.integer('media_id', 16)
.notNullable()
.references('id')
.inTable('media');
table.unique(['release_id', 'media_id']);
}))
.then(() => knex.schema.createTable('releases_trailers', (table) => { .then(() => knex.schema.createTable('releases_trailers', (table) => {
table.integer('release_id', 16) table.integer('release_id', 16)
.notNullable() .notNullable()
@ -445,6 +458,7 @@ exports.down = knex => knex.raw(`
DROP TABLE IF EXISTS releases_directors CASCADE; DROP TABLE IF EXISTS releases_directors CASCADE;
DROP TABLE IF EXISTS releases_posters CASCADE; DROP TABLE IF EXISTS releases_posters CASCADE;
DROP TABLE IF EXISTS releases_photos CASCADE; DROP TABLE IF EXISTS releases_photos CASCADE;
DROP TABLE IF EXISTS releases_covers CASCADE;
DROP TABLE IF EXISTS releases_trailers CASCADE; DROP TABLE IF EXISTS releases_trailers CASCADE;
DROP TABLE IF EXISTS releases_tags CASCADE; DROP TABLE IF EXISTS releases_tags CASCADE;
DROP TABLE IF EXISTS actors_avatars CASCADE; DROP TABLE IF EXISTS actors_avatars CASCADE;

View File

@ -17,8 +17,8 @@ async function curateActor(actor) {
knex('media') knex('media')
.where({ domain: 'actors', target_id: actor.id }) .where({ domain: 'actors', target_id: actor.id })
.orderBy('index'), .orderBy('index'),
knex('social') knex('actors_social')
.where({ domain: 'actors', target_id: actor.id }) .where('actor_id', actor.id)
.orderBy('platform', 'desc'), .orderBy('platform', 'desc'),
]); ]);
@ -197,8 +197,7 @@ function curateSocialEntry(url, actorId) {
return { return {
url: match.url, url: match.url,
platform: match.platform, platform: match.platform,
domain: 'actors', actor_id: actorId,
target_id: actorId,
}; };
} }
@ -207,10 +206,7 @@ async function curateSocialEntries(urls, actorId) {
return []; return [];
} }
const existingSocialLinks = await knex('social').where({ const existingSocialLinks = await knex('actors_social').where('actor_id', actorId);
domain: 'actors',
target_id: actorId,
});
return urls.reduce((acc, url) => { return urls.reduce((acc, url) => {
const socialEntry = curateSocialEntry(url, actorId); const socialEntry = curateSocialEntry(url, actorId);
@ -243,7 +239,7 @@ async function fetchActors(queryObject, limit = 100) {
async function storeSocialLinks(urls, actorId) { async function storeSocialLinks(urls, actorId) {
const curatedSocialEntries = await curateSocialEntries(urls, actorId); const curatedSocialEntries = await curateSocialEntries(urls, actorId);
await knex('social').insert(curatedSocialEntries); await knex('actors_social').insert(curatedSocialEntries);
} }
async function storeActor(actor, scraped = false, scrapeSuccess = false) { async function storeActor(actor, scraped = false, scrapeSuccess = false) {
@ -358,7 +354,7 @@ async function scrapeActors(actorNames) {
updateActor(profile, true, true), updateActor(profile, true, true),
// storeAvatars(profile, actorEntry), // storeAvatars(profile, actorEntry),
storePhotos(profile.avatars, { storePhotos(profile.avatars, {
domain: 'actors', domain: 'actor',
role: 'photo', role: 'photo',
primaryRole: 'avatar', primaryRole: 'avatar',
targetId: actorEntry.id, targetId: actorEntry.id,

View File

@ -10,6 +10,7 @@ const sharp = require('sharp');
const blake2 = require('blake2'); const blake2 = require('blake2');
const knex = require('./knex'); const knex = require('./knex');
const upsert = require('./utils/upsert');
function getHash(buffer) { function getHash(buffer) {
const hash = blake2.createHash('blake2b', { digestLength: 24 }); const hash = blake2.createHash('blake2b', { digestLength: 24 });
@ -41,7 +42,7 @@ async function createThumbnail(buffer) {
withoutEnlargement: true, withoutEnlargement: true,
}) })
.jpeg({ .jpeg({
quality: 50, quality: 75,
}) })
.toBuffer(); .toBuffer();
} }
@ -53,7 +54,7 @@ async function createMediaDirectory(domain, subpath) {
return filepath; return filepath;
} }
function curatePhotoEntries(files, domain = 'releases', role = 'photo', targetId) { function curatePhotoEntries(files) {
return files.map((file, index) => ({ return files.map((file, index) => ({
path: file.filepath, path: file.filepath,
thumbnail: file.thumbpath, thumbnail: file.thumbpath,
@ -61,51 +62,33 @@ function curatePhotoEntries(files, domain = 'releases', role = 'photo', targetId
hash: file.hash, hash: file.hash,
source: file.source, source: file.source,
index, index,
domain,
target_id: targetId,
role: file.role || role,
})); }));
} }
// before fetching async function findDuplicates(photos, identifier, prop = null, label) {
async function filterSourceDuplicates(photos, domains = ['releases'], roles = ['photo'], identifier) { const duplicates = await knex('media')
const photoSourceEntries = await knex('media') .whereIn(identifier, photos.flat().map(photo => (prop ? photo[prop] : photo)));
.whereIn('source', photos.flat())
.whereIn('domain', domains)
.whereIn('role', roles); // accept string argument
const photoSources = new Set(photoSourceEntries.map(photo => photo.source)); const duplicateLookup = new Set(duplicates.map(photo => photo[prop || identifier]));
const newPhotos = photos.filter(source => (Array.isArray(source) // fallbacks provided? const originals = photos.filter(source => (Array.isArray(source) // fallbacks provided?
? !source.some(sourceX => photoSources.has(sourceX)) // ensure none of the sources match ? !source.some(sourceX => duplicateLookup.has(prop ? sourceX[prop] : sourceX)) // ensure none of the sources match
: !photoSources.has(source))); : !duplicateLookup.has(prop ? source[prop] : source)));
if (photoSourceEntries.length > 0) { if (duplicates.length > 0) {
console.log(`Ignoring ${photoSourceEntries.length} ${roles} items already present by source for ${identifier}`); console.log(`${duplicates.length} media items already present by ${identifier} for ${label}`);
} }
return newPhotos; if (originals.length > 0) {
} console.log(`Fetching ${originals.length} new media items for ${label}`);
// after fetching
async function filterHashDuplicates(files, domains = ['releases'], roles = ['photo'], identifier) {
const photoHashEntries = await knex('media')
.whereIn('hash', files.map(file => file.hash))
.whereIn('domain', [].concat(domains))
.whereIn('role', [].concat(roles)); // accept string argument
const photoHashes = new Set(photoHashEntries.map(entry => entry.hash));
if (photoHashEntries.length > 0) {
console.log(`Ignoring ${photoHashEntries.length} ${roles} items already present by hash for ${identifier}`);
} }
return files.filter(file => file && !photoHashes.has(file.hash)); return [duplicates, originals];
} }
async function fetchPhoto(photoUrl, index, identifier, attempt = 1) { async function fetchPhoto(photoUrl, index, label, attempt = 1) {
if (Array.isArray(photoUrl)) { if (Array.isArray(photoUrl)) {
return photoUrl.reduce(async (outcome, url) => outcome.catch(async () => { return photoUrl.reduce(async (outcome, url) => outcome.catch(async () => {
const photo = await fetchPhoto(url, index, identifier); const photo = await fetchPhoto(url, index, label);
if (photo) { if (photo) {
return photo; return photo;
@ -136,11 +119,11 @@ async function fetchPhoto(photoUrl, index, identifier, attempt = 1) {
throw new Error(`Response ${res.statusCode} not OK`); throw new Error(`Response ${res.statusCode} not OK`);
} catch (error) { } catch (error) {
console.warn(`Failed attempt ${attempt}/3 to fetch photo ${index + 1} for ${identifier} (${photoUrl}): ${error}`); console.warn(`Failed attempt ${attempt}/3 to fetch photo ${index + 1} for ${label} (${photoUrl}): ${error}`);
if (attempt < 3) { if (attempt < 3) {
await Promise.delay(1000); await Promise.delay(1000);
return fetchPhoto(photoUrl, index, identifier, attempt + 1); return fetchPhoto(photoUrl, index, label, attempt + 1);
} }
return null; return null;
@ -148,7 +131,7 @@ async function fetchPhoto(photoUrl, index, identifier, attempt = 1) {
} }
async function savePhotos(files, { async function savePhotos(files, {
domain = 'releases', domain = 'release',
subpath, subpath,
role = 'photo', role = 'photo',
naming = 'index', naming = 'index',
@ -158,11 +141,11 @@ async function savePhotos(files, {
const thumbnail = await createThumbnail(file.photo); const thumbnail = await createThumbnail(file.photo);
const filename = naming === 'index' const filename = naming === 'index'
? `${file.role || role}-${index + 1}` ? `${file.role || role}${index + 1}`
: `${timestamp + index}`; : `${timestamp + index}`;
const filepath = path.join(domain, subpath, `${filename}.${file.extension}`); const filepath = path.join(`${domain}s`, subpath, `${filename}.${file.extension}`);
const thumbpath = path.join(domain, subpath, `${filename}_thumb.${file.extension}`); const thumbpath = path.join(`${domain}s`, subpath, `${filename}_thumb.${file.extension}`);
await Promise.all([ await Promise.all([
fs.writeFile(path.join(config.media.path, filepath), file.photo), fs.writeFile(path.join(config.media.path, filepath), file.photo),
@ -179,49 +162,28 @@ async function savePhotos(files, {
} }
async function storePhotos(photos, { async function storePhotos(photos, {
domain = 'releases', domain = 'release',
role = 'photo', role = 'photo',
naming = 'index', naming = 'index',
targetId, targetId,
subpath, subpath,
primaryRole, // role to assign to first photo if not already in database, used mainly for avatars primaryRole, // role to assign to first photo if not already in database, used mainly for avatars
}, identifier) { }, label) {
if (!photos || photos.length === 0) { if (!photos || photos.length === 0) {
console.warn(`No ${role}s available for ${identifier}`); console.warn(`No ${role}s available for ${label}`);
return; return;
} }
const pluckedPhotos = pluckPhotos(photos); const pluckedPhotos = pluckPhotos(photos);
const roles = primaryRole ? [role, primaryRole] : [role]; const [sourceDuplicates, sourceOriginals] = await findDuplicates(pluckedPhotos, 'source', null, label);
const newPhotos = await filterSourceDuplicates(pluckedPhotos, [domain], roles, identifier); const metaFiles = await Promise.map(sourceOriginals, async (photoUrl, index) => fetchPhoto(photoUrl, index, label), {
if (newPhotos.length === 0) return;
console.log(`Fetching ${newPhotos.length} ${role}s for ${identifier}`);
const metaFiles = await Promise.map(newPhotos, async (photoUrl, index) => fetchPhoto(photoUrl, index, identifier), {
concurrency: 10, concurrency: 10,
}).filter(photo => photo); }).filter(photo => photo);
const [uniquePhotos, primaryPhoto] = await Promise.all([ const [hashDuplicates, hashOriginals] = await findDuplicates(metaFiles, 'hash', 'hash', label);
filterHashDuplicates(metaFiles, [domain], roles, identifier),
primaryRole
? await knex('media')
.where('domain', domain)
.where('target_id', targetId)
.where('role', primaryRole)
.first()
: null,
]);
if (primaryRole && !primaryPhoto) { const savedPhotos = await savePhotos(hashOriginals, {
console.log(`Setting first photo as ${primaryRole} for ${identifier}`);
uniquePhotos[0].role = primaryRole;
}
const savedPhotos = await savePhotos(uniquePhotos, {
domain, domain,
role, role,
targetId, targetId,
@ -231,59 +193,96 @@ async function storePhotos(photos, {
const curatedPhotoEntries = curatePhotoEntries(savedPhotos, domain, role, targetId); const curatedPhotoEntries = curatePhotoEntries(savedPhotos, domain, role, targetId);
await knex('media').insert(curatedPhotoEntries); const newPhotos = await knex('media').insert(curatedPhotoEntries).returning('*');
const photoEntries = Array.isArray(newPhotos)
? [...sourceDuplicates, ...hashDuplicates, ...newPhotos]
: [...sourceDuplicates, ...hashDuplicates];
console.log(`Stored ${newPhotos.length} ${role}s for ${identifier}`); const photoAssociations = photoEntries
.map(photoEntry => ({
[`${domain}_id`]: targetId,
media_id: photoEntry.id,
}));
if (primaryRole) {
// store one photo as a 'primary' photo, such as an avatar or cover
const primaryPhoto = await knex(`${domain}s_${primaryRole}s`)
.where(`${domain}_id`, targetId)
.first();
if (primaryPhoto) {
await upsert(`${domain}s_${role}s`, photoAssociations, [`${domain}_id`, 'media_id']);
return;
}
await Promise.all([
upsert(`${domain}s_${primaryRole}s`, photoAssociations.slice(0, 1), [`${domain}_id`, 'media_id']),
upsert(`${domain}s_${role}s`, photoAssociations.slice(1), [`${domain}_id`, 'media_id']),
]);
}
await upsert(`${domain}s_${role}s`, photoAssociations, [`${domain}_id`, 'media_id']);
} }
async function storeTrailer(trailers, { async function storeTrailer(trailers, {
domain = 'releases', domain = 'releases',
role = 'trailer',
targetId, targetId,
subpath, subpath,
}, identifier) { }, label) {
// support scrapers supplying multiple qualities // support scrapers supplying multiple qualities
const trailer = Array.isArray(trailers) const trailer = Array.isArray(trailers)
? trailers.find(trailerX => [1080, 720].includes(trailerX.quality)) || trailers[0] ? trailers.find(trailerX => [1080, 720].includes(trailerX.quality)) || trailers[0]
: trailers; : trailers;
if (!trailer || !trailer.src) { if (!trailer || !trailer.src) {
console.warn(`No trailer available for ${identifier}`); console.warn(`No trailer available for ${label}`);
return; return;
} }
console.log(`Storing trailer for ${identifier}`); const [sourceDuplicates, sourceOriginals] = await findDuplicates([trailer], 'source', 'src', label);
const { pathname } = new URL(trailer.src); const metaFiles = await Promise.map(sourceOriginals, async (trailerX) => {
const mimetype = trailer.type || mime.getType(pathname); console.log('trailer x', trailerX, trailerX.src);
const res = await bhttp.get(trailer.src); const { pathname } = new URL(trailerX.src);
const filepath = path.join('releases', subpath, `trailer${trailer.quality ? `_${trailer.quality}` : ''}.${mime.getExtension(mimetype)}`); const mimetype = trailerX.type || mime.getType(pathname);
await Promise.all([ const res = await bhttp.get(trailerX.src);
fs.writeFile(path.join(config.media.path, filepath), res.body), const hash = getHash(res.body);
knex('media').insert({ const filepath = path.join(domain, subpath, `trailer${trailerX.quality ? `_${trailerX.quality}` : ''}.${mime.getExtension(mimetype)}`);
return {
path: filepath, path: filepath,
mime: mimetype, mime: mimetype,
source: trailer.src, source: trailerX.src,
domain, quality: trailerX.quality || null,
target_id: targetId, hash,
role, };
quality: trailer.quality || null, });
}),
]);
}
async function findAvatar(actorId, domain = 'actors') { const [hashDuplicates, hashOriginals] = await findDuplicates(metaFiles, 'hash', null, label);
return knex('media')
.where('domain', domain) console.log('hash dup', hashDuplicates, hashOriginals);
.where('target_id', actorId)
.where('role', 'avatar'); const newTrailers = await knex('media')
.insert(hashOriginals)
.returning('*');
console.log(newTrailers);
await Promise.all([
// fs.writeFile(path.join(config.media.path, filepath), res.body),
/*
knex('releases_trailers').insert({
release_id: targetId,
media_id: mediaEntries[0].id,
}),
*/
]);
} }
module.exports = { module.exports = {
createMediaDirectory, createMediaDirectory,
findAvatar,
storePhotos, storePhotos,
storeTrailer, storeTrailer,
}; };

View File

@ -306,6 +306,7 @@ async function storeReleaseAssets(release, releaseId) {
targetId: releaseId, targetId: releaseId,
subpath, subpath,
}, identifier), }, identifier),
/*
storePhotos(release.covers, { storePhotos(release.covers, {
role: 'cover', role: 'cover',
targetId: releaseId, targetId: releaseId,
@ -315,6 +316,7 @@ async function storeReleaseAssets(release, releaseId) {
targetId: releaseId, targetId: releaseId,
subpath, subpath,
}, identifier), }, identifier),
*/
]); ]);
} catch (error) { } catch (error) {
console.log(release.url, error); console.log(release.url, error);

View File

@ -57,6 +57,7 @@ async function scrapeRelease(url, release, deep = true, type = 'scene') {
if (!deep && argv.save) { if (!deep && argv.save) {
// don't store release when called by site scraper // don't store release when called by site scraper
/*
const movie = scrapedRelease.movie const movie = scrapedRelease.movie
? await scrapeRelease(scrapedRelease.movie, null, false, 'movie') ? await scrapeRelease(scrapedRelease.movie, null, false, 'movie')
: null; : null;
@ -65,6 +66,7 @@ async function scrapeRelease(url, release, deep = true, type = 'scene') {
const { releases: [storedMovie] } = await storeReleases([movie]); const { releases: [storedMovie] } = await storeReleases([movie]);
curatedRelease.parentId = storedMovie.id; curatedRelease.parentId = storedMovie.id;
} }
*/
const { releases: [storedRelease] } = await storeReleases([curatedRelease]); const { releases: [storedRelease] } = await storeReleases([curatedRelease]);

View File

@ -54,13 +54,13 @@ module.exports = {
actors: { actors: {
// ordered by data priority // ordered by data priority
xempire, xempire,
julesjordan,
brazzers, brazzers,
legalporno, legalporno,
pornhub, pornhub,
freeones, freeones,
freeonesLegacy, freeonesLegacy,
kellymadison, kellymadison,
julesjordan,
ddfnetwork, ddfnetwork,
}, },
}; };

View File

@ -1,11 +1,21 @@
'use strict'; 'use strict';
async function upsert(table, items, identifier = 'id', knex) { const knex = require('../knex');
const duplicates = await knex(table).whereIn(identifier, items.map(item => item[identifier]));
const duplicatesByIdentifier = duplicates.reduce((acc, item) => ({ ...acc, [item[identifier]]: item }), {}); async function upsert(table, items, identifier = ['id'], _knex) {
const identifiers = Array.isArray(identifier) ? identifier : [identifier];
const duplicates = await knex(table).whereIn(identifiers, items.map(item => identifiers.map(identifierX => item[identifierX])));
const duplicatesByIdentifiers = duplicates.reduce((acc, duplicate) => {
const duplicateIdentifier = identifiers.map(identifierX => duplicate[identifierX]).toString();
return { ...acc, [duplicateIdentifier]: duplicate };
}, {});
const { insert, update } = items.reduce((acc, item) => { const { insert, update } = items.reduce((acc, item) => {
if (duplicatesByIdentifier[item[identifier]]) { const itemIdentifier = identifiers.map(identifierX => item[identifierX]).toString();
if (duplicatesByIdentifiers[itemIdentifier]) {
acc.update.push(item); acc.update.push(item);
return acc; return acc;
} }
@ -23,11 +33,15 @@ async function upsert(table, items, identifier = 'id', knex) {
const [inserted, updated] = await Promise.all([ const [inserted, updated] = await Promise.all([
knex(table).returning('*').insert(insert), knex(table).returning('*').insert(insert),
knex.transaction(async trx => Promise.all(update.map(item => trx knex.transaction(async trx => Promise.all(update.map((item) => {
.where({ [identifier]: item[identifier] }) const clause = identifiers.reduce((acc, identifierX) => ({ ...acc, [identifierX]: item[identifierX] }), {});
.update(item)
.into(table) return trx
.returning('*')))), .where(clause)
.update(item)
.into(table)
.returning('*');
}))),
]); ]);
return { return {

View File

@ -20,8 +20,6 @@ const schemaExtender = makeExtendSchemaPlugin(_build => ({
resolvers: { resolvers: {
Actor: { Actor: {
age(parent, _args, _context, _info) { age(parent, _args, _context, _info) {
console.log(parent);
if (!parent.birthdate) return null; if (!parent.birthdate) return null;
return moment().diff(parent.birthdate, 'years'); return moment().diff(parent.birthdate, 'years');