From 1382961967044da044db2b8cf625cb401f8525ec Mon Sep 17 00:00:00 2001 From: Jules-WinnfieldX Date: Wed, 22 May 2024 09:29:26 -0600 Subject: [PATCH] Minor fixes and line removals --- cyberdrop_dl/__init__.py | 2 +- cyberdrop_dl/scraper/crawlers/cyberdrop_crawler.py | 8 ++++++-- cyberdrop_dl/scraper/crawlers/cyberfile_crawler.py | 1 - cyberdrop_dl/utils/args/args.py | 2 -- pyproject.toml | 2 +- 5 files changed, 8 insertions(+), 7 deletions(-) diff --git a/cyberdrop_dl/__init__.py b/cyberdrop_dl/__init__.py index e6ba6c35b..eb58b2698 100644 --- a/cyberdrop_dl/__init__.py +++ b/cyberdrop_dl/__init__.py @@ -1 +1 @@ -__version__ = "5.3.17" +__version__ = "5.3.18" diff --git a/cyberdrop_dl/scraper/crawlers/cyberdrop_crawler.py b/cyberdrop_dl/scraper/crawlers/cyberdrop_crawler.py index 5f0ace5e4..e37b6a493 100644 --- a/cyberdrop_dl/scraper/crawlers/cyberdrop_crawler.py +++ b/cyberdrop_dl/scraper/crawlers/cyberdrop_crawler.py @@ -18,7 +18,7 @@ class CyberdropCrawler(Crawler): def __init__(self, manager: Manager): super().__init__(manager, "cyberdrop", "Cyberdrop") - self.api_url = URL("https://cyberdrop.me/api/") + self.api_url = URL("https://api.cyberdrop.me/api/") self.primary_base_url = URL("https://cyberdrop.me/") self.request_limiter = AsyncLimiter(1.0, 2.0) @@ -62,9 +62,13 @@ async def file(self, scrape_item: ScrapeItem) -> None: return async with self.request_limiter: - JSON_Resp = await self.client.get_json(self.domain, self.api_url / "f" / scrape_item.url.path[3:]) + JSON_Resp = await self.client.get_json(self.domain, self.api_url / "file" / "info" / scrape_item.url.path[3:]) filename, ext = await get_filename_and_ext(JSON_Resp["name"]) + + async with self.request_limiter: + JSON_Resp = await self.client.get_json(self.domain, self.api_url / "file" / "auth" / scrape_item.url.path[3:]) + link = URL(JSON_Resp['url']) await self.handle_file(link, scrape_item, filename, ext) diff --git a/cyberdrop_dl/scraper/crawlers/cyberfile_crawler.py b/cyberdrop_dl/scraper/crawlers/cyberfile_crawler.py index 95df66d55..2204b30b4 100644 --- a/cyberdrop_dl/scraper/crawlers/cyberfile_crawler.py +++ b/cyberdrop_dl/scraper/crawlers/cyberfile_crawler.py @@ -121,7 +121,6 @@ async def shared(self, scrape_item: ScrapeItem) -> None: node_id = str(new_folders.pop(0)) page = 1 - @error_handling_wrapper async def file(self, scrape_item: ScrapeItem) -> None: """Scrapes a file""" diff --git a/cyberdrop_dl/utils/args/args.py b/cyberdrop_dl/utils/args/args.py index 4d38da773..0c0d86a9c 100644 --- a/cyberdrop_dl/utils/args/args.py +++ b/cyberdrop_dl/utils/args/args.py @@ -80,8 +80,6 @@ def parse_args() -> argparse.Namespace: ui_options.add_argument("--scraping-item-limit", type=int, help="number of lines to allow for scraping items before overflow (default: %(default)s)", default=5) ui_options.add_argument("--downloading-item-limit", type=int, help="number of lines to allow for downloading items before overflow (default: %(default)s)", default=5) - - # Links parser.add_argument("links", metavar="link", nargs="*", help="link to content to download (passing multiple links is supported)", default=[]) return parser.parse_args() diff --git a/pyproject.toml b/pyproject.toml index 67d476c94..965090f18 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "cyberdrop-dl" -version = "5.3.17" +version = "5.3.18" description = "Bulk downloader for multiple file hosts" authors = ["Jules Winnfield "] readme = "README.md"