diff --git a/cyberdrop_dl/__init__.py b/cyberdrop_dl/__init__.py index ebdaae45a..72a555294 100644 --- a/cyberdrop_dl/__init__.py +++ b/cyberdrop_dl/__init__.py @@ -1 +1 @@ -__version__ = "4.2.223" +__version__ = "4.2.224" diff --git a/cyberdrop_dl/crawlers/Bunkr_Spider.py b/cyberdrop_dl/crawlers/Bunkr_Spider.py index b20c06188..9b420d0b1 100644 --- a/cyberdrop_dl/crawlers/Bunkr_Spider.py +++ b/cyberdrop_dl/crawlers/Bunkr_Spider.py @@ -28,8 +28,7 @@ def __init__(self, quiet: bool, SQL_Helper: SQLHelper, remove_bunkr_id: bool, er self.quiet = quiet self.SQL_Helper = SQL_Helper self.remove_bunkr_id = remove_bunkr_id - self.limiter = AsyncLimiter(10, 1) - self.small_limiter = AsyncLimiter(1, 1) + self.limiter = AsyncLimiter(3, 1) self.error_writer = error_writer diff --git a/cyberdrop_dl/crawlers/Cyberdrop_Spider.py b/cyberdrop_dl/crawlers/Cyberdrop_Spider.py index 8b5d26756..8fb5e2140 100644 --- a/cyberdrop_dl/crawlers/Cyberdrop_Spider.py +++ b/cyberdrop_dl/crawlers/Cyberdrop_Spider.py @@ -1,18 +1,17 @@ from __future__ import annotations +import asyncio from typing import TYPE_CHECKING +from aiolimiter import AsyncLimiter from yarl import URL from ..base_functions.base_functions import ( - check_direct, - create_media_item, log, logger, make_title_safe, get_filename_and_ext, ) from ..base_functions.data_classes import AlbumItem, MediaItem -from ..base_functions.error_classes import InvalidContentTypeFailure, NoExtensionFailure if TYPE_CHECKING: from ..base_functions.base_functions import ErrorFileWriter @@ -25,13 +24,13 @@ def __init__(self, *, include_id=False, quiet: bool, SQL_Helper: SQLHelper, erro self.include_id = include_id self.SQL_Helper = SQL_Helper self.quiet = quiet + self.limiter = AsyncLimiter(1.0, 2.0) self.error_writer = error_writer async def fetch(self, session: ScrapeSession, url: URL) -> AlbumItem: """Cyberdrop scraper""" album_obj = AlbumItem("Loose Cyberdrop Files", []) - log(f"Starting: {url}", quiet=self.quiet, style="green") try: @@ -49,7 +48,8 @@ async def fetch(self, session: ScrapeSession, url: URL) -> AlbumItem: async def get_album(self, session: ScrapeSession, url: URL, album_obj: AlbumItem) -> None: """Cyberdrop scraper""" - soup = await session.get_BS4(url) + async with self.limiter: + soup = await session.get_BS4(url) title = await make_title_safe(soup.select_one("h1[id=title]").text) title = title.strip() await album_obj.set_new_title(title) @@ -65,7 +65,8 @@ async def get_album(self, session: ScrapeSession, url: URL, album_obj: AlbumItem async def get_file(self, session: ScrapeSession, url: URL, album_obj: AlbumItem) -> None: """Cyberdrop scraper""" url = URL("https://cyberdrop.me/api/") / url.path[1:] - soup = await session.get_json(url) + async with self.limiter: + soup = await session.get_json(url) filename, ext = await get_filename_and_ext(soup["name"]) link = URL(soup['url']) complete = await self.SQL_Helper.check_complete_singular("cyberdrop", link) diff --git a/cyberdrop_dl/downloader/downloader_utils.py b/cyberdrop_dl/downloader/downloader_utils.py index dc5179987..e1f310215 100644 --- a/cyberdrop_dl/downloader/downloader_utils.py +++ b/cyberdrop_dl/downloader/downloader_utils.py @@ -51,7 +51,7 @@ async def check_free_space(required_space_gb: int, download_directory: Path) -> def get_threads_number(args: Dict, domain: str) -> int: threads = args["Runtime"]["max_concurrent_downloads_per_domain"] or multiprocessing.cpu_count() - if any(s in domain for s in ('bunkr',)): + if any(s in domain for s in ('bunkr', 'cyberdrop')): return min(threads, 1) if any(s in domain for s in ('anonfiles', 'pixeldrain', 'cyberfile')): return min(threads, 2) diff --git a/cyberdrop_dl/downloader/downloaders.py b/cyberdrop_dl/downloader/downloaders.py index 9e3d6ae2b..611fedb2b 100644 --- a/cyberdrop_dl/downloader/downloaders.py +++ b/cyberdrop_dl/downloader/downloaders.py @@ -74,7 +74,7 @@ def __init__(self, args: Dict, client: Client, files: OverallFileProgress, SQL_H self.File_Lock = FileLock() # Limits - self.delay = {'cyberdrop': 1.0, 'cyberfile': 1.0, 'anonfiles': 1.0, "coomer": 0.2, "kemono": 0.2, 'bunkr': 0.5} + self.delay = {'cyberdrop': 1.0, 'cyberfile': 1.0, 'anonfiles': 1.0, "coomer": 0.2, "kemono": 0.2, 'bunkr': 1.0,} # Exclude Args self.exclude_audio = args["Ignore"]["exclude_audio"] diff --git a/cyberdrop_dl/scraper/Scraper.py b/cyberdrop_dl/scraper/Scraper.py index 0242d4549..9d3f8c3c5 100644 --- a/cyberdrop_dl/scraper/Scraper.py +++ b/cyberdrop_dl/scraper/Scraper.py @@ -87,6 +87,7 @@ def __init__(self, args: Dict, client: Client, SQL_Helper: SQLHelper, quiet: boo self.jdownloader = JDownloader(args['JDownloader'], quiet) self.bunkr_semaphore = asyncio.Semaphore(1) + self.cyberdrop_semaphore = asyncio.Semaphore(1) self.coomero_semaphore = asyncio.Semaphore(4) self.gofile_semaphore = asyncio.Semaphore(1) self.nudostar_semaphore = asyncio.Semaphore(1) @@ -157,7 +158,8 @@ async def Cyberdrop(self, url: URL, title=None): if not self.cyberdrop_crawler: self.cyberdrop_crawler = CyberdropCrawler(include_id=self.include_id, quiet=self.quiet, SQL_Helper=self.SQL_Helper, error_writer=self.error_writer) - album_obj = await self.cyberdrop_crawler.fetch(cyberdrop_session, url) + async with self.cyberdrop_semaphore: + album_obj = await self.cyberdrop_crawler.fetch(cyberdrop_session, url) await self._handle_album_additions("cyberdrop", album_obj, title) await cyberdrop_session.exit_handler()