forked from DebaucheryLibrarian/traxxx
Preventing unnecessary deep scrapes in Team Skeet scraper.
This commit is contained in:
parent
df4d860d35
commit
5918364cf5
|
@ -129,6 +129,11 @@ async function fetchLatest(channel, page = 1, { parameters }) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function fetchScene(url, channel, baseScene, { parameters }) {
|
async function fetchScene(url, channel, baseScene, { parameters }) {
|
||||||
|
if (baseScene?.entryId) {
|
||||||
|
// overview and deep data is the same, don't hit server unnecessarily
|
||||||
|
return baseScene;
|
||||||
|
}
|
||||||
|
|
||||||
const sceneSlug = new URL(url).pathname.match(/\/([\w-]+$)/)[1];
|
const sceneSlug = new URL(url).pathname.match(/\/([\w-]+$)/)[1];
|
||||||
const res = await http.get(`${parameters.videos}/${sceneSlug}`);
|
const res = await http.get(`${parameters.videos}/${sceneSlug}`);
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue