Reordered scraper arguments. Fixed Jules Jordan scraper for Amateur Allure.
This commit is contained in:
parent
f59e809713
commit
a3d281192d
|
@ -43,7 +43,7 @@
|
|||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
class="date"
|
||||
>{{ formatDate(release.date, 'MMMM D, YYYY', release.datePrecision) }}</a>
|
||||
><Icon icon="share2" />{{ formatDate(release.date, 'MMMM D, YYYY', release.datePrecision) }}</a>
|
||||
|
||||
<a
|
||||
v-else
|
||||
|
@ -104,22 +104,16 @@ export default {
|
|||
padding: .5rem;
|
||||
color: var(--text-light);
|
||||
text-decoration: none;
|
||||
|
||||
.icon {
|
||||
fill: var(--lighten-weak);
|
||||
margin: 0 .25rem 0 0;
|
||||
}
|
||||
|
||||
/*
|
||||
.date {
|
||||
&.new:before {
|
||||
content: '';
|
||||
background: var(--primary);
|
||||
width: .5rem;
|
||||
display: inline-block;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
left: -.5rem;
|
||||
&:hover .icon {
|
||||
fill: var(--text-light);
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
.site-link {
|
||||
display: flex;
|
||||
|
|
|
@ -52,7 +52,7 @@
|
|||
>
|
||||
<h3
|
||||
v-if="release.title"
|
||||
v-tooltip.top="release.title"
|
||||
v-tooltip.bottom="release.title"
|
||||
:title="release.title"
|
||||
class="title"
|
||||
>{{ release.title }}</h3>
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 929 KiB After Width: | Height: | Size: 1.8 MiB |
Binary file not shown.
Before Width: | Height: | Size: 7.9 KiB After Width: | Height: | Size: 7.7 KiB |
Binary file not shown.
Before Width: | Height: | Size: 36 KiB After Width: | Height: | Size: 29 KiB |
|
@ -1295,6 +1295,10 @@ const aliases = [
|
|||
for: 'femdom',
|
||||
secondary: true,
|
||||
},
|
||||
{
|
||||
name: 'dp',
|
||||
for: 'dp',
|
||||
},
|
||||
{
|
||||
name: 'double penetration (dp)',
|
||||
for: 'dp',
|
||||
|
|
|
@ -597,7 +597,7 @@ const tagPosters = [
|
|||
['blonde', 0, 'Anikka Albrite in "Black Owned 4" for Jules Jordan'],
|
||||
['blowbang', 0, 'Lacy Lennon in "Lacy Lennon\'s First Blowbang" for HardX'],
|
||||
['blowjob', 0, 'Adriana Chechik in "The Dinner Party" for Real Wife Stories (Brazzers)'],
|
||||
['brunette', 0, 'Nicole Black in GIO971 for LegalPorno'],
|
||||
['brunette', 0, 'Liv Wild in "Dirty Talk 9" for Manuel Ferrara'],
|
||||
['bukkake', 0, 'Jaye Summers in "Facialized 5" for HardX'],
|
||||
['caucasian', 0, 'Remy Lacroix for HardX'],
|
||||
['creampie', 'poster', 'ALina Lopez in "Making Yourself Unforgettable" for Blacked'],
|
||||
|
|
|
@ -112,8 +112,8 @@ async function scrapeRelease(baseRelease, entities, type = 'scene') {
|
|||
logger.verbose(`Fetching ${type} ${baseRelease.url}`);
|
||||
|
||||
const scrapedRelease = type === 'scene'
|
||||
? await scraper.fetchScene(baseRelease.url, entity, baseRelease, null, include)
|
||||
: await scraper.fetchMovie(baseRelease.url, entity, baseRelease, null, include);
|
||||
? await scraper.fetchScene(baseRelease.url, entity, baseRelease, include, null)
|
||||
: await scraper.fetchMovie(baseRelease.url, entity, baseRelease, include, null);
|
||||
|
||||
const mergedRelease = {
|
||||
...baseRelease,
|
||||
|
|
|
@ -37,8 +37,9 @@ async function fetchLatestWrap(site, page = 1) {
|
|||
return latest.map(scene => extractActors(scene));
|
||||
}
|
||||
|
||||
async function fetchSceneWrap(url, site) {
|
||||
const scene = await fetchScene(url, site);
|
||||
async function fetchSceneWrap(url, channel, baseRelease, include) {
|
||||
console.log(include);
|
||||
const scene = await fetchScene(url, channel, baseRelease, include);
|
||||
|
||||
return extractActors(scene);
|
||||
}
|
||||
|
|
|
@ -41,7 +41,7 @@ function scrapeLatest(scenes, site, models) {
|
|||
});
|
||||
}
|
||||
|
||||
function scrapeScene({ html, qu }, url, site, models) {
|
||||
function scrapeScene({ html, qu }, url, site, include, models) {
|
||||
const release = { url };
|
||||
|
||||
[release.entryId] = url.split('/').slice(-1);
|
||||
|
|
|
@ -356,7 +356,7 @@ function scrapeProfileTour({ el, qu }, site) {
|
|||
return profile;
|
||||
}
|
||||
|
||||
async function fetchLatest(site, page = 1, _beforeFetchLatest, accSiteReleases) {
|
||||
async function fetchLatest(site, page = 1, include, preflight, accSiteReleases) {
|
||||
const url = (site.parameters?.latest && util.format(site.parameters.latest, page))
|
||||
|| (site.parameters?.t1 && `${site.url}/t1/categories/movies_${page}_d.html`)
|
||||
|| `${site.url}/categories/movies_${page}_d.html`;
|
||||
|
|
|
@ -139,8 +139,8 @@ function scrapeAll(scenes, site) {
|
|||
|
||||
release.entryId = el.dataset.setid || qu.q('.rating_box')?.dataset.id;
|
||||
|
||||
release.url = qu.url('.update_title, .dvd_info > a, a ~ a');
|
||||
release.title = qu.q('.update_title, .dvd_info > a, a ~ a', true);
|
||||
release.url = qu.url('.update_title a, .dvd_info > a, a ~ a');
|
||||
release.title = qu.q('.update_title a, .dvd_info > a, a ~ a', true);
|
||||
release.date = qu.date('.update_date', 'MM/DD/YYYY');
|
||||
|
||||
release.actors = qu.all('.update_models a', true);
|
||||
|
@ -247,11 +247,16 @@ async function scrapeScene({ html, qu }, url, site, include) {
|
|||
if (include.trailer && site.slug !== 'manuelferrara') {
|
||||
const trailerLines = html.split('\n').filter(line => /movie\["trailer\w*"\]\[/i.test(line));
|
||||
|
||||
console.log(trailerLines);
|
||||
|
||||
if (trailerLines.length) {
|
||||
release.trailer = trailerLines.map((trailerLine) => {
|
||||
const src = trailerLine.match(/path:"([\w:/.&=?%]+)"/)?.[1];
|
||||
// const src = trailerLine.match(/path:"([\w-:/.&=?%]+)"/)?.[1];
|
||||
const src = trailerLine.match(/path:"(.+)"/)?.[1];
|
||||
const quality = trailerLine.match(/movie_height:'(\d+)/)?.[1];
|
||||
|
||||
console.log(src, quality);
|
||||
|
||||
return src && {
|
||||
src: /^http/.test(src) ? src : `${site.url}${src}`,
|
||||
quality: quality && Number(quality.replace('558', '540')),
|
||||
|
@ -368,7 +373,7 @@ async function fetchUpcoming(site) {
|
|||
return res.statusCode;
|
||||
}
|
||||
|
||||
async function fetchScene(url, site, baseRelease, preflight, include) {
|
||||
async function fetchScene(url, site, baseRelease, include) {
|
||||
const res = await get(url);
|
||||
|
||||
return res.ok ? scrapeScene(res.item, url, site, include) : res.status;
|
||||
|
|
|
@ -80,10 +80,8 @@ function needNextPage(uniqueReleases, pageAccReleases) {
|
|||
async function scrapeReleases(scraper, entity, preData, upcoming = false) {
|
||||
const scrapePage = async (page = 1, accReleases = []) => {
|
||||
const latestReleases = upcoming
|
||||
? await scraper.fetchUpcoming(entity, page, preData, include)
|
||||
: await scraper.fetchLatest(entity, page, preData, include);
|
||||
|
||||
await scraper.fetchMovies(entity, page);
|
||||
? await scraper.fetchUpcoming(entity, page, include, preData)
|
||||
: await scraper.fetchLatest(entity, page, include, preData);
|
||||
|
||||
if (!Array.isArray(latestReleases)) {
|
||||
// scraper is unable to fetch the releases and returned a HTTP code or null
|
||||
|
@ -173,6 +171,10 @@ async function scrapeChannelReleases(scraper, channelEntity, preData) {
|
|||
: [],
|
||||
]);
|
||||
|
||||
if (scraper.fetchMovies) {
|
||||
await scraper.fetchMovies(channelEntity);
|
||||
}
|
||||
|
||||
logger.info(`Fetching ${latestReleases.length} latest and ${upcomingReleases.length} upcoming updates for '${channelEntity.name}' (${channelEntity.parent?.name})`);
|
||||
|
||||
return [...latestReleases, ...upcomingReleases];
|
||||
|
|
Loading…
Reference in New Issue