Scrapers can now iterate through pages. Filtering unique releases before saving to database. Improved scrapers and rendering.
This commit is contained in:
@@ -16,7 +16,7 @@ function scrapeLatest(html, site) {
|
||||
const href = sceneLinkElement.attr('href');
|
||||
const url = `https://kink.com${href}`;
|
||||
const shootId = href.split('/')[2];
|
||||
const title = sceneLinkElement.text();
|
||||
const title = sceneLinkElement.text().trim();
|
||||
|
||||
const date = moment.utc($(element).find('.date').text(), 'MMM DD, YYYY').toDate();
|
||||
const actors = $(element).find('.shoot-thumb-models a').map((actorIndex, actorElement) => $(actorElement).text()).toArray();
|
||||
@@ -85,8 +85,8 @@ async function scrapeScene(html, url, shootId, ratingRes, site) {
|
||||
};
|
||||
}
|
||||
|
||||
async function fetchLatest(site) {
|
||||
const res = await bhttp.get(`${site.url}/latest`);
|
||||
async function fetchLatest(site, page = 1) {
|
||||
const res = await bhttp.get(`${site.url}/latest/page/${page}`);
|
||||
|
||||
return scrapeLatest(res.body.toString(), site);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user