Skip to content
This repository has been archived by the owner on Jul 5, 2024. It is now read-only.

Commit

Permalink
Adjust ratelimits
Browse files Browse the repository at this point in the history
  • Loading branch information
Jules-WinnfieldX committed Nov 19, 2023
1 parent 709ad7e commit aafebd2
Show file tree
Hide file tree
Showing 6 changed files with 14 additions and 12 deletions.
2 changes: 1 addition & 1 deletion cyberdrop_dl/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "4.2.223"
__version__ = "4.2.224"
3 changes: 1 addition & 2 deletions cyberdrop_dl/crawlers/Bunkr_Spider.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,7 @@ def __init__(self, quiet: bool, SQL_Helper: SQLHelper, remove_bunkr_id: bool, er
self.quiet = quiet
self.SQL_Helper = SQL_Helper
self.remove_bunkr_id = remove_bunkr_id
self.limiter = AsyncLimiter(10, 1)
self.small_limiter = AsyncLimiter(1, 1)
self.limiter = AsyncLimiter(3, 1)

self.error_writer = error_writer

Expand Down
13 changes: 7 additions & 6 deletions cyberdrop_dl/crawlers/Cyberdrop_Spider.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,17 @@
from __future__ import annotations

import asyncio
from typing import TYPE_CHECKING

from aiolimiter import AsyncLimiter
from yarl import URL

from ..base_functions.base_functions import (
check_direct,
create_media_item,
log,
logger,
make_title_safe, get_filename_and_ext,
)
from ..base_functions.data_classes import AlbumItem, MediaItem
from ..base_functions.error_classes import InvalidContentTypeFailure, NoExtensionFailure

if TYPE_CHECKING:
from ..base_functions.base_functions import ErrorFileWriter
Expand All @@ -25,13 +24,13 @@ def __init__(self, *, include_id=False, quiet: bool, SQL_Helper: SQLHelper, erro
self.include_id = include_id
self.SQL_Helper = SQL_Helper
self.quiet = quiet
self.limiter = AsyncLimiter(1.0, 2.0)

self.error_writer = error_writer

async def fetch(self, session: ScrapeSession, url: URL) -> AlbumItem:
"""Cyberdrop scraper"""
album_obj = AlbumItem("Loose Cyberdrop Files", [])

log(f"Starting: {url}", quiet=self.quiet, style="green")

try:
Expand All @@ -49,7 +48,8 @@ async def fetch(self, session: ScrapeSession, url: URL) -> AlbumItem:

async def get_album(self, session: ScrapeSession, url: URL, album_obj: AlbumItem) -> None:
"""Cyberdrop scraper"""
soup = await session.get_BS4(url)
async with self.limiter:
soup = await session.get_BS4(url)
title = await make_title_safe(soup.select_one("h1[id=title]").text)
title = title.strip()
await album_obj.set_new_title(title)
Expand All @@ -65,7 +65,8 @@ async def get_album(self, session: ScrapeSession, url: URL, album_obj: AlbumItem
async def get_file(self, session: ScrapeSession, url: URL, album_obj: AlbumItem) -> None:
"""Cyberdrop scraper"""
url = URL("https://cyberdrop.me/api/") / url.path[1:]
soup = await session.get_json(url)
async with self.limiter:
soup = await session.get_json(url)
filename, ext = await get_filename_and_ext(soup["name"])
link = URL(soup['url'])
complete = await self.SQL_Helper.check_complete_singular("cyberdrop", link)
Expand Down
2 changes: 1 addition & 1 deletion cyberdrop_dl/downloader/downloader_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ async def check_free_space(required_space_gb: int, download_directory: Path) ->

def get_threads_number(args: Dict, domain: str) -> int:
threads = args["Runtime"]["max_concurrent_downloads_per_domain"] or multiprocessing.cpu_count()
if any(s in domain for s in ('bunkr',)):
if any(s in domain for s in ('bunkr', 'cyberdrop')):
return min(threads, 1)
if any(s in domain for s in ('anonfiles', 'pixeldrain', 'cyberfile')):
return min(threads, 2)
Expand Down
2 changes: 1 addition & 1 deletion cyberdrop_dl/downloader/downloaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def __init__(self, args: Dict, client: Client, files: OverallFileProgress, SQL_H
self.File_Lock = FileLock()

# Limits
self.delay = {'cyberdrop': 1.0, 'cyberfile': 1.0, 'anonfiles': 1.0, "coomer": 0.2, "kemono": 0.2, 'bunkr': 0.5}
self.delay = {'cyberdrop': 1.0, 'cyberfile': 1.0, 'anonfiles': 1.0, "coomer": 0.2, "kemono": 0.2, 'bunkr': 1.0,}

# Exclude Args
self.exclude_audio = args["Ignore"]["exclude_audio"]
Expand Down
4 changes: 3 additions & 1 deletion cyberdrop_dl/scraper/Scraper.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@ def __init__(self, args: Dict, client: Client, SQL_Helper: SQLHelper, quiet: boo
self.jdownloader = JDownloader(args['JDownloader'], quiet)

self.bunkr_semaphore = asyncio.Semaphore(1)
self.cyberdrop_semaphore = asyncio.Semaphore(1)
self.coomero_semaphore = asyncio.Semaphore(4)
self.gofile_semaphore = asyncio.Semaphore(1)
self.nudostar_semaphore = asyncio.Semaphore(1)
Expand Down Expand Up @@ -157,7 +158,8 @@ async def Cyberdrop(self, url: URL, title=None):
if not self.cyberdrop_crawler:
self.cyberdrop_crawler = CyberdropCrawler(include_id=self.include_id, quiet=self.quiet,
SQL_Helper=self.SQL_Helper, error_writer=self.error_writer)
album_obj = await self.cyberdrop_crawler.fetch(cyberdrop_session, url)
async with self.cyberdrop_semaphore:
album_obj = await self.cyberdrop_crawler.fetch(cyberdrop_session, url)
await self._handle_album_additions("cyberdrop", album_obj, title)
await cyberdrop_session.exit_handler()

Expand Down

0 comments on commit aafebd2

Please sign in to comment.