Skip to content

Commit

Permalink
fix: categories
Browse files Browse the repository at this point in the history
  • Loading branch information
NTFSvolume committed Jan 26, 2025
1 parent da7bcb3 commit 321d8f5
Showing 1 changed file with 10 additions and 10 deletions.
20 changes: 10 additions & 10 deletions cyberdrop_dl/scraper/crawlers/pornpics_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,23 +29,23 @@ def __init__(self, manager: Manager) -> None:
@create_task_id
async def fetch(self, scrape_item: ScrapeItem) -> None:
"""Determines where to send the scrape item based on the url."""
multi_part = len(scrape_item.url.parts) > 2
if self.is_cdn(scrape_item.url):
await self.image(scrape_item)
elif scrape_item.url.query.get("q"):
await self.collection(scrape_item, "search")
elif len(scrape_item.url.parts) < 3:
raise ValueError

if "galleries" in scrape_item.url.parts:
elif "galleries" in scrape_item.url.parts and multi_part:
await self.gallery(scrape_item)
elif "channels" in scrape_item.url.parts:
elif "channels" in scrape_item.url.parts and multi_part:
await self.collection(scrape_item, "channel")
elif "pornstars" in scrape_item.url.parts:
elif "pornstars" in scrape_item.url.parts and multi_part:
await self.collection(scrape_item, "pornstar")
elif "tags" in scrape_item.url.parts:
elif "tags" in scrape_item.url.parts and multi_part:
await self.collection(scrape_item, "tag")
else:
elif len(scrape_item.url.parts) == 2:
await self.collection(scrape_item, "category")
elif scrape_item.url.query.get("q"):
await self.collection(scrape_item, "search")
else:
raise ValueError

@error_handling_wrapper
async def collection(self, scrape_item: ScrapeItem, type: str) -> None:
Expand Down

0 comments on commit 321d8f5

Please sign in to comment.