forked from DebaucheryLibrarian/traxxx
Moved networks to GraphQL.
This commit is contained in:
parent
e77dbca954
commit
70e27a6cd9
|
@ -254,8 +254,6 @@ function scrollPhotos(event) {
|
||||||
async function mounted() {
|
async function mounted() {
|
||||||
this.actor = await this.$store.dispatch('fetchActors', { actorSlug: this.$route.params.actorSlug });
|
this.actor = await this.$store.dispatch('fetchActors', { actorSlug: this.$route.params.actorSlug });
|
||||||
|
|
||||||
console.log(this.actor.releases[0]);
|
|
||||||
|
|
||||||
if (this.actor) {
|
if (this.actor) {
|
||||||
this.pageTitle = this.actor.name;
|
this.pageTitle = this.actor.name;
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
v-if="network"
|
v-if="network"
|
||||||
class="content network"
|
class="content network"
|
||||||
>
|
>
|
||||||
<FilterBar :fetch-releases="fetchReleases" />
|
<FilterBar :fetch-releases="fetchNetwork" />
|
||||||
|
|
||||||
<div class="content-inner">
|
<div class="content-inner">
|
||||||
<div class="header">
|
<div class="header">
|
||||||
|
@ -54,20 +54,18 @@ import FilterBar from '../header/filter-bar.vue';
|
||||||
import Releases from '../releases/releases.vue';
|
import Releases from '../releases/releases.vue';
|
||||||
import SiteTile from '../tile/site.vue';
|
import SiteTile from '../tile/site.vue';
|
||||||
|
|
||||||
async function fetchReleases() {
|
async function fetchNetwork() {
|
||||||
this.releases = await this.$store.dispatch('fetchNetworkReleases', this.$route.params.networkSlug);
|
this.network = await this.$store.dispatch('fetchNetworks', this.$route.params.networkSlug);
|
||||||
}
|
|
||||||
|
|
||||||
async function mounted() {
|
|
||||||
[[this.network]] = await Promise.all([
|
|
||||||
this.$store.dispatch('fetchNetworks', this.$route.params.networkSlug),
|
|
||||||
this.fetchReleases(),
|
|
||||||
]);
|
|
||||||
|
|
||||||
this.sites = this.network.sites
|
this.sites = this.network.sites
|
||||||
.filter(site => !site.independent)
|
.filter(site => !site.independent)
|
||||||
.sort(({ name: nameA }, { name: nameB }) => nameA.localeCompare(nameB));
|
.sort(({ name: nameA }, { name: nameB }) => nameA.localeCompare(nameB));
|
||||||
|
|
||||||
|
this.releases = this.network.sites.map(site => site.releases).flat();
|
||||||
|
}
|
||||||
|
|
||||||
|
async function mounted() {
|
||||||
|
await this.fetchNetwork();
|
||||||
this.pageTitle = this.network.name;
|
this.pageTitle = this.network.name;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -81,13 +79,13 @@ export default {
|
||||||
return {
|
return {
|
||||||
network: null,
|
network: null,
|
||||||
sites: null,
|
sites: null,
|
||||||
releases: null,
|
releases: [],
|
||||||
pageTitle: null,
|
pageTitle: null,
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
mounted,
|
mounted,
|
||||||
methods: {
|
methods: {
|
||||||
fetchReleases,
|
fetchNetwork,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
</script>
|
</script>
|
||||||
|
|
|
@ -249,9 +249,8 @@ export default {
|
||||||
}
|
}
|
||||||
|
|
||||||
.tags {
|
.tags {
|
||||||
max-height: 2.5rem;
|
max-height: .5rem;
|
||||||
padding: .25rem .5rem 1rem .5rem;
|
padding: .25rem .5rem 1rem .5rem;
|
||||||
line-height: 1.5rem;
|
|
||||||
word-wrap: break-word;
|
word-wrap: break-word;
|
||||||
overflow-y: hidden;
|
overflow-y: hidden;
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,8 +12,6 @@ function curateActor(actor) {
|
||||||
}
|
}
|
||||||
|
|
||||||
function curateRelease(release) {
|
function curateRelease(release) {
|
||||||
console.log(release);
|
|
||||||
|
|
||||||
const curatedRelease = {
|
const curatedRelease = {
|
||||||
...release,
|
...release,
|
||||||
actors: release.actors ? release.actors.map(({ actor }) => curateActor(actor)) : [],
|
actors: release.actors ? release.actors.map(({ actor }) => curateActor(actor)) : [],
|
||||||
|
@ -28,6 +26,35 @@ function curateRelease(release) {
|
||||||
return curatedRelease;
|
return curatedRelease;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function curateSite(site, network) {
|
||||||
|
const curatedSite = {
|
||||||
|
id: site.id,
|
||||||
|
name: site.name,
|
||||||
|
slug: site.slug,
|
||||||
|
url: site.url,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (site.releases) curatedSite.releases = site.releases.map(release => curateRelease(release));
|
||||||
|
if (site.network || network) curatedSite.network = site.network || network;
|
||||||
|
|
||||||
|
return curatedSite;
|
||||||
|
}
|
||||||
|
|
||||||
|
function curateNetwork(network) {
|
||||||
|
const curatedNetwork = {
|
||||||
|
id: network.id,
|
||||||
|
name: network.name,
|
||||||
|
slug: network.slug,
|
||||||
|
url: network.url,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (network.sites) {
|
||||||
|
curatedNetwork.sites = network.sites.map(site => curateSite(site, curatedNetwork));
|
||||||
|
}
|
||||||
|
|
||||||
|
return curatedNetwork;
|
||||||
|
}
|
||||||
|
|
||||||
function curateTag(tag) {
|
function curateTag(tag) {
|
||||||
const curatedTag = {
|
const curatedTag = {
|
||||||
...tag,
|
...tag,
|
||||||
|
@ -43,5 +70,7 @@ function curateTag(tag) {
|
||||||
export {
|
export {
|
||||||
curateActor,
|
curateActor,
|
||||||
curateRelease,
|
curateRelease,
|
||||||
|
curateSite,
|
||||||
|
curateNetwork,
|
||||||
curateTag,
|
curateTag,
|
||||||
};
|
};
|
||||||
|
|
|
@ -13,6 +13,21 @@ const siteFragment = `
|
||||||
}
|
}
|
||||||
`;
|
`;
|
||||||
|
|
||||||
|
const sitesFragment = `
|
||||||
|
sites {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
slug
|
||||||
|
url
|
||||||
|
network {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
slug
|
||||||
|
url
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
||||||
const releaseActorsFragment = `
|
const releaseActorsFragment = `
|
||||||
actors: releasesActorsSortables(orderBy: GENDER_ASC) {
|
actors: releasesActorsSortables(orderBy: GENDER_ASC) {
|
||||||
actor {
|
actor {
|
||||||
|
@ -36,7 +51,7 @@ const releaseActorsFragment = `
|
||||||
`;
|
`;
|
||||||
|
|
||||||
const releaseTagsFragment = `
|
const releaseTagsFragment = `
|
||||||
tags: releasesTags {
|
tags: releasesTagsSortables(orderBy: PRIORITY_DESC) {
|
||||||
tag {
|
tag {
|
||||||
name
|
name
|
||||||
priority
|
priority
|
||||||
|
@ -135,4 +150,5 @@ export {
|
||||||
releasesFragment,
|
releasesFragment,
|
||||||
releaseFragment,
|
releaseFragment,
|
||||||
siteFragment,
|
siteFragment,
|
||||||
|
sitesFragment,
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,27 +1,68 @@
|
||||||
import { get } from '../api';
|
import { graphql } from '../api';
|
||||||
|
import { sitesFragment, releasesFragment } from '../fragments';
|
||||||
|
import { curateNetwork } from '../curate';
|
||||||
|
|
||||||
function initNetworksActions(store, _router) {
|
function initNetworksActions(store, _router) {
|
||||||
async function fetchNetworks({ _commit }, networkId) {
|
async function fetchNetworkBySlug(networkSlug, limit = 100) {
|
||||||
const networks = await get(`/networks/${networkId || ''}`, {
|
const { network } = await graphql(`
|
||||||
|
query Network(
|
||||||
});
|
$networkSlug: String!
|
||||||
|
$limit:Int = 1000,
|
||||||
return networks;
|
$after:Date = "1900-01-01",
|
||||||
|
$before:Date = "2100-01-01",
|
||||||
|
) {
|
||||||
|
network: networkBySlug(slug: $networkSlug) {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
slug
|
||||||
|
url
|
||||||
|
sites {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
slug
|
||||||
|
url
|
||||||
|
${releasesFragment}
|
||||||
|
network {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
slug
|
||||||
|
url
|
||||||
}
|
}
|
||||||
|
}
|
||||||
async function fetchNetworkReleases({ _commit }, networkId) {
|
}
|
||||||
const releases = await get(`/networks/${networkId}/releases`, {
|
}
|
||||||
filter: store.state.ui.filter,
|
`, {
|
||||||
|
networkSlug,
|
||||||
|
limit,
|
||||||
after: store.getters.after,
|
after: store.getters.after,
|
||||||
before: store.getters.before,
|
before: store.getters.before,
|
||||||
});
|
});
|
||||||
|
|
||||||
return releases;
|
return curateNetwork(network);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchNetworks({ _commit }, networkSlug) {
|
||||||
|
if (networkSlug) {
|
||||||
|
return fetchNetworkBySlug(networkSlug);
|
||||||
|
}
|
||||||
|
|
||||||
|
const { networks } = await graphql(`
|
||||||
|
query Networks {
|
||||||
|
networks {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
slug
|
||||||
|
url
|
||||||
|
${sitesFragment}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`);
|
||||||
|
|
||||||
|
return networks.map(network => curateNetwork(network));
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
fetchNetworks,
|
fetchNetworks,
|
||||||
fetchNetworkReleases,
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -435,7 +435,12 @@ exports.up = knex => Promise.resolve()
|
||||||
SELECT releases_actors.*, actors.gender, actors.name, actors.birthdate FROM releases_actors
|
SELECT releases_actors.*, actors.gender, actors.name, actors.birthdate FROM releases_actors
|
||||||
JOIN actors ON releases_actors.actor_id = actors.id;
|
JOIN actors ON releases_actors.actor_id = actors.id;
|
||||||
|
|
||||||
|
CREATE VIEW releases_tags_sortable AS
|
||||||
|
SELECT releases_tags.*, tags.name, tags.priority FROM releases_tags
|
||||||
|
JOIN tags ON releases_tags.tag_id = tags.id;
|
||||||
|
|
||||||
COMMENT ON VIEW releases_actors_sortable IS E'@foreignKey (release_id) references releases (id)\n@foreignKey (actor_id) references actors (id)';
|
COMMENT ON VIEW releases_actors_sortable IS E'@foreignKey (release_id) references releases (id)\n@foreignKey (actor_id) references actors (id)';
|
||||||
|
COMMENT ON VIEW releases_tags_sortable IS E'@foreignKey (release_id) references releases (id)\n@foreignKey (tag_id) references tags (id)';
|
||||||
|
|
||||||
/* allow conversion resolver to be added for height and weight */
|
/* allow conversion resolver to be added for height and weight */
|
||||||
COMMENT ON COLUMN actors.height IS E'@omit read,update,create,delete,all,many';
|
COMMENT ON COLUMN actors.height IS E'@omit read,update,create,delete,all,many';
|
||||||
|
|
|
@ -215,9 +215,8 @@
|
||||||
line-height: 1.5rem;
|
line-height: 1.5rem;
|
||||||
}
|
}
|
||||||
.tags[data-v-3abcf101] {
|
.tags[data-v-3abcf101] {
|
||||||
max-height: 2.5rem;
|
max-height: .5rem;
|
||||||
padding: .25rem .5rem 1rem .5rem;
|
padding: .25rem .5rem 1rem .5rem;
|
||||||
line-height: 1.5rem;
|
|
||||||
word-wrap: break-word;
|
word-wrap: break-word;
|
||||||
overflow-y: hidden;
|
overflow-y: hidden;
|
||||||
}
|
}
|
||||||
|
|
|
@ -370,7 +370,7 @@ async function scrapeActors(actorNames) {
|
||||||
|
|
||||||
await createMediaDirectory('actors', `${newActorEntry.slug}/`);
|
await createMediaDirectory('actors', `${newActorEntry.slug}/`);
|
||||||
await storePhotos(profile.avatars, {
|
await storePhotos(profile.avatars, {
|
||||||
domain: 'actors',
|
domain: 'actor',
|
||||||
role: 'photo',
|
role: 'photo',
|
||||||
primaryRole: 'avatar',
|
primaryRole: 'avatar',
|
||||||
targetId: newActorEntry.id,
|
targetId: newActorEntry.id,
|
||||||
|
|
12
src/app.js
12
src/app.js
|
@ -1,26 +1,20 @@
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const Promise = require('bluebird');
|
|
||||||
|
|
||||||
const argv = require('./argv');
|
const argv = require('./argv');
|
||||||
const knex = require('./knex');
|
const knex = require('./knex');
|
||||||
const initServer = require('./web/server');
|
const initServer = require('./web/server');
|
||||||
|
|
||||||
const scrapeSites = require('./scrape-sites');
|
const scrapeSites = require('./scrape-sites');
|
||||||
const scrapeRelease = require('./scrape-release');
|
const { scrapeReleases } = require('./scrape-releases');
|
||||||
const { scrapeActors, scrapeBasicActors } = require('./actors');
|
const { scrapeActors, scrapeBasicActors } = require('./actors');
|
||||||
|
|
||||||
async function init() {
|
async function init() {
|
||||||
if (argv.scene) {
|
if (argv.scene) {
|
||||||
await Promise.map(argv.scene, async url => scrapeRelease(url, null, false, 'scene'), {
|
await scrapeReleases(argv.scene, null, 'scene');
|
||||||
concurrency: 5,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (argv.movie) {
|
if (argv.movie) {
|
||||||
await Promise.map(argv.movie, async url => scrapeRelease(url, null, false, 'movie'), {
|
await scrapeReleases(argv.movie, null, 'movie');
|
||||||
concurrency: 5,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (argv.scrape || argv.networks || argv.sites) {
|
if (argv.scrape || argv.networks || argv.sites) {
|
||||||
|
|
33
src/media.js
33
src/media.js
|
@ -48,6 +48,7 @@ async function createThumbnail(buffer) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function createMediaDirectory(domain, subpath) {
|
async function createMediaDirectory(domain, subpath) {
|
||||||
|
console.log(domain, subpath);
|
||||||
const filepath = path.join(config.media.path, domain, subpath);
|
const filepath = path.join(config.media.path, domain, subpath);
|
||||||
|
|
||||||
await fs.mkdir(filepath, { recursive: true });
|
await fs.mkdir(filepath, { recursive: true });
|
||||||
|
@ -246,8 +247,6 @@ async function storeTrailer(trailers, {
|
||||||
const [sourceDuplicates, sourceOriginals] = await findDuplicates([trailer], 'source', 'src', label);
|
const [sourceDuplicates, sourceOriginals] = await findDuplicates([trailer], 'source', 'src', label);
|
||||||
|
|
||||||
const metaFiles = await Promise.map(sourceOriginals, async (trailerX) => {
|
const metaFiles = await Promise.map(sourceOriginals, async (trailerX) => {
|
||||||
console.log('trailer x', trailerX, trailerX.src);
|
|
||||||
|
|
||||||
const { pathname } = new URL(trailerX.src);
|
const { pathname } = new URL(trailerX.src);
|
||||||
const mimetype = trailerX.type || mime.getType(pathname);
|
const mimetype = trailerX.type || mime.getType(pathname);
|
||||||
|
|
||||||
|
@ -256,6 +255,7 @@ async function storeTrailer(trailers, {
|
||||||
const filepath = path.join(domain, subpath, `trailer${trailerX.quality ? `_${trailerX.quality}` : ''}.${mime.getExtension(mimetype)}`);
|
const filepath = path.join(domain, subpath, `trailer${trailerX.quality ? `_${trailerX.quality}` : ''}.${mime.getExtension(mimetype)}`);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
trailer: res.body,
|
||||||
path: filepath,
|
path: filepath,
|
||||||
mime: mimetype,
|
mime: mimetype,
|
||||||
source: trailerX.src,
|
source: trailerX.src,
|
||||||
|
@ -264,25 +264,28 @@ async function storeTrailer(trailers, {
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
const [hashDuplicates, hashOriginals] = await findDuplicates(metaFiles, 'hash', null, label);
|
const [hashDuplicates, hashOriginals] = await findDuplicates(metaFiles, 'hash', 'hash', label);
|
||||||
|
|
||||||
console.log('hash dup', hashDuplicates, hashOriginals);
|
|
||||||
|
|
||||||
const newTrailers = await knex('media')
|
const newTrailers = await knex('media')
|
||||||
.insert(hashOriginals)
|
.insert(hashOriginals.map(trailerX => ({
|
||||||
|
path: trailerX.path,
|
||||||
|
mime: trailerX.mime,
|
||||||
|
source: trailerX.source,
|
||||||
|
quality: trailerX.quality,
|
||||||
|
hash: trailerX.hash,
|
||||||
|
})))
|
||||||
.returning('*');
|
.returning('*');
|
||||||
|
|
||||||
console.log(newTrailers);
|
await Promise.all(hashOriginals.map(trailerX => fs.writeFile(path.join(config.media.path, trailerX.path), trailerX.trailer)));
|
||||||
|
|
||||||
await Promise.all([
|
const trailerEntries = Array.isArray(newTrailers)
|
||||||
// fs.writeFile(path.join(config.media.path, filepath), res.body),
|
? [...sourceDuplicates, ...hashDuplicates, ...newTrailers]
|
||||||
/*
|
: [...sourceDuplicates, ...hashDuplicates];
|
||||||
knex('releases_trailers').insert({
|
|
||||||
|
await upsert('releases_trailers', trailerEntries.map(trailerEntry => ({
|
||||||
release_id: targetId,
|
release_id: targetId,
|
||||||
media_id: mediaEntries[0].id,
|
media_id: trailerEntry.id,
|
||||||
}),
|
})), ['release_id', 'media_id']);
|
||||||
*/
|
|
||||||
]);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const config = require('config');
|
const config = require('config');
|
||||||
|
const Promise = require('bluebird');
|
||||||
|
|
||||||
const argv = require('./argv');
|
const argv = require('./argv');
|
||||||
const scrapers = require('./scrapers/scrapers');
|
const scrapers = require('./scrapers/scrapers');
|
||||||
|
@ -28,7 +29,7 @@ async function findSite(url, release) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function scrapeRelease(url, release, deep = true, type = 'scene') {
|
async function scrapeRelease(url, release, type = 'scene') {
|
||||||
const site = await findSite(url, release);
|
const site = await findSite(url, release);
|
||||||
|
|
||||||
if (!site) {
|
if (!site) {
|
||||||
|
@ -53,10 +54,17 @@ async function scrapeRelease(url, release, deep = true, type = 'scene') {
|
||||||
? await scraper.fetchScene(url, site, release)
|
? await scraper.fetchScene(url, site, release)
|
||||||
: await scraper.fetchMovie(url, site, release);
|
: await scraper.fetchMovie(url, site, release);
|
||||||
|
|
||||||
const curatedRelease = { ...scrapedRelease, type };
|
return scrapedRelease;
|
||||||
|
}
|
||||||
|
|
||||||
if (!deep && argv.save) {
|
async function scrapeReleases(urls, release, type = 'scene') {
|
||||||
// don't store release when called by site scraper
|
const scrapedReleases = await Promise.map(urls, async url => scrapeRelease(url, release, type), {
|
||||||
|
concurrency: 5,
|
||||||
|
});
|
||||||
|
|
||||||
|
const curatedReleases = scrapedReleases.map(scrapedRelease => ({ ...scrapedRelease, type }));
|
||||||
|
|
||||||
|
if (argv.save) {
|
||||||
/*
|
/*
|
||||||
const movie = scrapedRelease.movie
|
const movie = scrapedRelease.movie
|
||||||
? await scrapeRelease(scrapedRelease.movie, null, false, 'movie')
|
? await scrapeRelease(scrapedRelease.movie, null, false, 'movie')
|
||||||
|
@ -68,14 +76,15 @@ async function scrapeRelease(url, release, deep = true, type = 'scene') {
|
||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
|
|
||||||
const { releases: [storedRelease] } = await storeReleases([curatedRelease]);
|
const { releases: storedReleases } = await storeReleases(curatedReleases);
|
||||||
|
|
||||||
if (storedRelease) {
|
if (storedReleases) {
|
||||||
console.log(`http://${config.web.host}:${config.web.port}/scene/${storedRelease.id}`);
|
console.log(storedReleases.map(storedRelease => `http://${config.web.host}:${config.web.port}/scene/${storedRelease.id}`).join('\n'));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return scrapedRelease;
|
module.exports = {
|
||||||
}
|
scrapeRelease,
|
||||||
|
scrapeReleases,
|
||||||
module.exports = scrapeRelease;
|
};
|
|
@ -7,7 +7,7 @@ const argv = require('./argv');
|
||||||
const knex = require('./knex');
|
const knex = require('./knex');
|
||||||
const { fetchIncludedSites } = require('./sites');
|
const { fetchIncludedSites } = require('./sites');
|
||||||
const scrapers = require('./scrapers/scrapers');
|
const scrapers = require('./scrapers/scrapers');
|
||||||
const scrapeRelease = require('./scrape-release');
|
const { scrapeRelease } = require('./scrape-releases');
|
||||||
const { storeReleases } = require('./releases');
|
const { storeReleases } = require('./releases');
|
||||||
|
|
||||||
function getAfterDate() {
|
function getAfterDate() {
|
||||||
|
@ -70,7 +70,7 @@ async function deepFetchReleases(baseReleases) {
|
||||||
return Promise.map(baseReleases, async (release) => {
|
return Promise.map(baseReleases, async (release) => {
|
||||||
if (release.url) {
|
if (release.url) {
|
||||||
try {
|
try {
|
||||||
const fullRelease = await scrapeRelease(release.url, release, true, 'scene');
|
const fullRelease = await scrapeRelease(release.url, release, 'scene');
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...release,
|
...release,
|
||||||
|
@ -111,7 +111,7 @@ async function scrapeSiteReleases(scraper, site) {
|
||||||
return baseReleases;
|
return baseReleases;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function scrapeReleases() {
|
async function scrapeSites() {
|
||||||
const networks = await fetchIncludedSites();
|
const networks = await fetchIncludedSites();
|
||||||
|
|
||||||
const scrapedNetworks = await Promise.map(networks, async network => Promise.map(network.sites, async (site) => {
|
const scrapedNetworks = await Promise.map(networks, async network => Promise.map(network.sites, async (site) => {
|
||||||
|
@ -147,4 +147,4 @@ async function scrapeReleases() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = scrapeReleases;
|
module.exports = scrapeSites;
|
||||||
|
|
|
@ -50,7 +50,9 @@ function scrapeProfile(html, actorName) {
|
||||||
if (bio.weight) profile.weight = Number(bio.weight.split(',')[0]);
|
if (bio.weight) profile.weight = Number(bio.weight.split(',')[0]);
|
||||||
|
|
||||||
profile.social = Array.from(document.querySelectorAll('.profile-meta-item a.social-icons'), el => el.href);
|
profile.social = Array.from(document.querySelectorAll('.profile-meta-item a.social-icons'), el => el.href);
|
||||||
profile.avatar = document.querySelector('.profile-image-large img').src;
|
|
||||||
|
const avatar = document.querySelector('.profile-image-large img').src;
|
||||||
|
if (!avatar.match('placeholder')) profile.avatar = document.querySelector('.profile-image-large img').src;
|
||||||
|
|
||||||
return profile;
|
return profile;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue