Skip to content
This repository has been archived by the owner on Jul 5, 2024. It is now read-only.

Commit

Permalink
check for post number in forum fragment
Browse files Browse the repository at this point in the history
  • Loading branch information
Jules-WinnfieldX committed Jan 3, 2024
1 parent 663e4bf commit ae0d168
Show file tree
Hide file tree
Showing 10 changed files with 120 additions and 101 deletions.
2 changes: 1 addition & 1 deletion cyberdrop_dl/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "5.1.32"
__version__ = "5.1.33"
2 changes: 1 addition & 1 deletion cyberdrop_dl/scraper/crawlers/celebforum_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ async def forum(self, scrape_item: ScrapeItem) -> None:
thread_url = scrape_item.url
post_number = 0
if len(scrape_item.url.parts) > 3:
if "post-" in str(scrape_item.url.parts[3]):
if "post-" in str(scrape_item.url.parts[3]) or "post-" in scrape_item.url.fragment:
url_parts = str(scrape_item.url).rsplit("post-", 1)
thread_url = URL(url_parts[0].rstrip("#"))
post_number = int(url_parts[-1].strip("/")) if len(url_parts) == 2 else 0
Expand Down
2 changes: 1 addition & 1 deletion cyberdrop_dl/scraper/crawlers/f95zone_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ async def forum(self, scrape_item: ScrapeItem) -> None:
thread_url = scrape_item.url
post_number = 0
if len(scrape_item.url.parts) > 3:
if "post-" in str(scrape_item.url.parts[3]):
if "post-" in str(scrape_item.url.parts[3]) or "post-" in scrape_item.url.fragment:
url_parts = str(scrape_item.url).rsplit("post-", 1)
thread_url = URL(url_parts[0].rstrip("#"))
post_number = int(url_parts[-1].strip("/")) if len(url_parts) == 2 else 0
Expand Down
2 changes: 1 addition & 1 deletion cyberdrop_dl/scraper/crawlers/leakedmodels_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ async def forum(self, scrape_item: ScrapeItem) -> None:
thread_url = scrape_item.url
post_number = 0
if len(scrape_item.url.parts) > 4:
if "post-" in str(scrape_item.url.parts[4]):
if "post-" in str(scrape_item.url.parts[4]) or "post-" in scrape_item.url.fragment:
url_parts = str(scrape_item.url).rsplit("post-", 1)
thread_url = URL(url_parts[0].rstrip("#"))
post_number = int(url_parts[-1].strip("/")) if len(url_parts) == 2 else 0
Expand Down
2 changes: 1 addition & 1 deletion cyberdrop_dl/scraper/crawlers/nudostar_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ async def forum(self, scrape_item: ScrapeItem) -> None:
thread_url = scrape_item.url
post_number = 0
if len(scrape_item.url.parts) > 4:
if "post-" in str(scrape_item.url.parts[4]):
if "post-" in str(scrape_item.url.parts[4]) or "post-" in scrape_item.url.fragment:
url_parts = str(scrape_item.url).rsplit("post-", 1)
thread_url = URL(url_parts[0].rstrip("#"))
post_number = int(url_parts[-1].strip("/")) if len(url_parts) == 2 else 0
Expand Down
2 changes: 1 addition & 1 deletion cyberdrop_dl/scraper/crawlers/simpcity_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ async def forum(self, scrape_item: ScrapeItem) -> None:
thread_url = scrape_item.url
post_number = 0
if len(scrape_item.url.parts) > 3:
if "post-" in str(scrape_item.url.parts[3]):
if "post-" in str(scrape_item.url.parts[3]) or "post-" in scrape_item.url.fragment:
url_parts = str(scrape_item.url).rsplit("post-", 1)
thread_url = URL(url_parts[0].rstrip("#"))
post_number = int(url_parts[-1].strip("/")) if len(url_parts) == 2 else 0
Expand Down
2 changes: 1 addition & 1 deletion cyberdrop_dl/scraper/crawlers/socialmediagirls_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ async def forum(self, scrape_item: ScrapeItem) -> None:
thread_url = scrape_item.url
post_number = 0
if len(scrape_item.url.parts) > 3:
if "post-" in str(scrape_item.url.parts[3]):
if "post-" in str(scrape_item.url.parts[3]) or "post-" in scrape_item.url.fragment:
url_parts = str(scrape_item.url).rsplit("post-", 1)
thread_url = URL(url_parts[0].rstrip("#"))
post_number = int(url_parts[-1].strip("/")) if len(url_parts) == 2 else 0
Expand Down
2 changes: 1 addition & 1 deletion cyberdrop_dl/scraper/crawlers/xbunker_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ async def forum(self, scrape_item: ScrapeItem) -> None:
thread_url = scrape_item.url
post_number = 0
if len(scrape_item.url.parts) > 3:
if "post-" in str(scrape_item.url.parts[3]):
if "post-" in str(scrape_item.url.parts[3]) or "post-" in scrape_item.url.fragment:
url_parts = str(scrape_item.url).rsplit("post-", 1)
thread_url = URL(url_parts[0].rstrip("#"))
post_number = int(url_parts[-1].strip("/")) if len(url_parts) == 2 else 0
Expand Down
203 changes: 111 additions & 92 deletions poetry.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "cyberdrop-dl"
version = "5.1.32"
version = "5.1.33"
description = "Bulk downloader for multiple file hosts"
authors = ["Jules Winnfield <[email protected]>"]
readme = "README.md"
Expand Down

0 comments on commit ae0d168

Please sign in to comment.