Skip to content
This repository has been archived by the owner on Jul 5, 2024. It is now read-only.

Commit

Permalink
update jpg.church and saint handling
Browse files Browse the repository at this point in the history
  • Loading branch information
Jules-WinnfieldX committed Feb 5, 2024
1 parent 8e103d5 commit 2706b60
Show file tree
Hide file tree
Showing 6 changed files with 18 additions and 14 deletions.
2 changes: 1 addition & 1 deletion cyberdrop_dl/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "5.1.61"
__version__ = "5.1.62"
6 changes: 3 additions & 3 deletions cyberdrop_dl/scraper/crawlers/jpgchurch_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,8 +127,8 @@ async def image(self, scrape_item: ScrapeItem) -> None:
async def handle_direct_link(self, scrape_item: ScrapeItem) -> None:
"""Handles a direct link"""
scrape_item.url = scrape_item.url.with_name(scrape_item.url.name.replace('.md.', '.').replace('.th.', '.'))
pattern = r"simp([1-5])\.jpg\.fish/"
scrape_item.url = URL(re.sub(pattern, r'simp\1.jpg.church/', str(scrape_item.url)))
pattern = r"(jpg\.fish/)|(jpg\.fishing/)|(jpg\.church/)"
scrape_item.url = URL(re.sub(pattern, r'host.church/', str(scrape_item.url)))
filename, ext = await get_filename_and_ext(scrape_item.url.name)
await self.handle_file(scrape_item.url, scrape_item, filename, ext)

Expand All @@ -141,7 +141,7 @@ async def parse_datetime(self, date: str) -> int:

async def check_direct_link(self, url: URL) -> bool:
"""Determines if the url is a direct link or not"""
cdn_possibilities = r"^(?:(jpg.church\/images\/...)|(simp..jpg.church)|(jpg.fish\/images\/...)|(simp..jpg.fish)|(jpg.fishing\/images\/...)|(simp..jpg.fishing))"
cdn_possibilities = r"^(?:(jpg.church\/images\/...)|(simp..jpg.church)|(jpg.fish\/images\/...)|(simp..jpg.fish)|(jpg.fishing\/images\/...)|(simp..jpg.fishing)|(simp..host.church))"
if not re.match(cdn_possibilities, url.host):
return False
return True
2 changes: 2 additions & 0 deletions cyberdrop_dl/scraper/crawlers/saint_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,15 @@
class SaintCrawler(Crawler):
def __init__(self, manager: Manager):
super().__init__(manager, "saint", "Saint")
self.primary_base_domain = URL("https://bunkr.sk")
self.request_limiter = AsyncLimiter(10, 1)

"""~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"""

async def fetch(self, scrape_item: ScrapeItem) -> None:
"""Determines where to send the scrape item based on the url"""
task_id = await self.scraping_progress.add_task(scrape_item.url)
scrape_item.url = self.primary_base_domain.with_path(scrape_item.url.path)

await self.video(scrape_item)

Expand Down
9 changes: 5 additions & 4 deletions cyberdrop_dl/scraper/scraper.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,10 @@ def __init__(self, manager: Manager):
"jpg.homes": self.jpgchurch, "jpg.fish": self.jpgchurch, "jpg.fishing": self.jpgchurch,
"jpg.pet": self.jpgchurch, "jpeg.pet": self.jpgchurch, "jpg1.su": self.jpgchurch,
"jpg2.su": self.jpgchurch, "jpg3.su": self.jpgchurch, "jpg4.su": self.jpgchurch,
"kemono": self.kemono, "leakedmodels": self.leakedmodels, "mediafire": self.mediafire,
"nudostar.com": self.nudostar, "nudostar.tv": self.nudostartv, "omegascans": self.omegascans,
"pimpandhost": self.pimpandhost, "pixeldrain": self.pixeldrain, "postimg": self.postimg,
"reddit": self.reddit, "redd.it": self.reddit, "redgifs": self.redgifs,
"host.church": self.jpgchurch, "kemono": self.kemono, "leakedmodels": self.leakedmodels,
"mediafire": self.mediafire, "nudostar.com": self.nudostar, "nudostar.tv": self.nudostartv,
"omegascans": self.omegascans, "pimpandhost": self.pimpandhost, "pixeldrain": self.pixeldrain,
"postimg": self.postimg, "reddit": self.reddit, "redd.it": self.reddit, "redgifs": self.redgifs,
"rule34.xxx": self.rule34xxx, "rule34.xyz": self.rule34xyz, "saint": self.saint,
"scrolller": self.scrolller, "simpcity": self.simpcity,
"socialmediagirls": self.socialmediagirls, "toonily": self.toonily, "xbunker": self.xbunker,
Expand Down Expand Up @@ -138,6 +138,7 @@ async def jpgchurch(self) -> None:
self.existing_crawlers['jpg2.su'] = self.existing_crawlers['jpg.church']
self.existing_crawlers['jpg3.su'] = self.existing_crawlers['jpg.church']
self.existing_crawlers['jpg4.su'] = self.existing_crawlers['jpg.church']
self.existing_crawlers['host.church'] = self.existing_crawlers['jpg.church']

async def kemono(self) -> None:
"""Creates a Kemono Crawler instance"""
Expand Down
11 changes: 6 additions & 5 deletions cyberdrop_dl/utils/dataclasses/supported_domains.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,12 @@ class SupportedDomains:
"e-hentai", "erome", "fapello", "f95zone", "gofile", "hotpic",
"ibb.co", "imageban", "imgbox", "imgur", "img.kiwi", "jpg.church",
"jpg.homes", "jpg.fish", "jpg.fishing", "jpg.pet", "jpeg.pet",
"jpg1.su", "jpg2.su", "jpg3.su", "jpg4.su", "kemono", "leakedmodels",
"mediafire", "nudostar.com", "nudostar.tv", "omegascans",
"pimpandhost", "pixeldrain", "postimg", "reddit", "redd.it",
"redgifs", "rule34.xxx", "rule34.xyz", "saint", "scrolller",
"simpcity", "socialmediagirls", "toonily", "xbunker", "xbunkr")
"jpg1.su", "jpg2.su", "jpg3.su", "jpg4.su", "host.church", "kemono",
"leakedmodels", "mediafire", "nudostar.com", "nudostar.tv",
"omegascans", "pimpandhost", "pixeldrain", "postimg", "reddit",
"redd.it", "redgifs", "rule34.xxx", "rule34.xyz", "saint",
"scrolller", "simpcity", "socialmediagirls", "toonily", "xbunker",
"xbunkr")

supported_forums: ClassVar[Tuple[str, ...]] = ("celebforum.to", "f95zone.to", "leakedmodels.com", "nudostar.com",
"simpcity.su", "forums.socialmediagirls.com", "xbunker.nu")
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "cyberdrop-dl"
version = "5.1.61"
version = "5.1.62"
description = "Bulk downloader for multiple file hosts"
authors = ["Jules Winnfield <[email protected]>"]
readme = "README.md"
Expand Down

0 comments on commit 2706b60

Please sign in to comment.