From 24ed68c1b6fedf52874b9f7c53686a680b338919 Mon Sep 17 00:00:00 2001 From: NTFSvolume <172021377+NTFSvolume@users.noreply.github.com> Date: Fri, 8 Nov 2024 07:59:51 -0500 Subject: [PATCH] fix: minor fixes 2 --- cyberdrop_dl/clients/download_client.py | 3 ++- cyberdrop_dl/clients/errors.py | 2 +- cyberdrop_dl/clients/hash_client.py | 3 ++- cyberdrop_dl/clients/scraper_client.py | 4 +++- cyberdrop_dl/downloader/downloader.py | 6 ++++-- cyberdrop_dl/managers/args_manager.py | 2 +- cyberdrop_dl/managers/config_manager.py | 4 +++- cyberdrop_dl/managers/leaky.py | 2 +- cyberdrop_dl/managers/live_manager.py | 5 +++-- cyberdrop_dl/managers/log_manager.py | 2 +- cyberdrop_dl/managers/real_debrid/api.py | 2 +- cyberdrop_dl/managers/real_debrid/errors.py | 5 ++++- cyberdrop_dl/scraper/crawlers/chevereto_crawler.py | 3 ++- cyberdrop_dl/scraper/crawlers/reddit_crawler.py | 3 ++- cyberdrop_dl/scraper/crawlers/tokyomotion_crawler.py | 3 ++- cyberdrop_dl/scraper/crawlers/xxxbunker_crawler.py | 3 ++- cyberdrop_dl/scraper/jdownloader.py | 4 +++- cyberdrop_dl/scraper/scraper.py | 3 ++- cyberdrop_dl/utils/args/args.py | 9 ++++++++- cyberdrop_dl/utils/args/browser_cookie_extraction.py | 3 ++- cyberdrop_dl/utils/database/tables/history_table.py | 3 ++- cyberdrop_dl/utils/transfer/first_time_setup.py | 3 ++- cyberdrop_dl/utils/utilities.py | 3 ++- 23 files changed, 55 insertions(+), 25 deletions(-) diff --git a/cyberdrop_dl/clients/download_client.py b/cyberdrop_dl/clients/download_client.py index 797094cd3..50692959a 100644 --- a/cyberdrop_dl/clients/download_client.py +++ b/cyberdrop_dl/clients/download_client.py @@ -17,7 +17,8 @@ from cyberdrop_dl.utils.logger import log if TYPE_CHECKING: - from typing import Any, Callable, Coroutine + from collections.abc import Callable, Coroutine + from typing import Any from cyberdrop_dl.managers.client_manager import ClientManager from cyberdrop_dl.managers.manager import Manager diff --git a/cyberdrop_dl/clients/errors.py b/cyberdrop_dl/clients/errors.py index c7625452b..bddb984ff 100644 --- a/cyberdrop_dl/clients/errors.py +++ b/cyberdrop_dl/clients/errors.py @@ -26,7 +26,7 @@ def __init__( self.ui_message = ui_message self.message = message or ui_message self.origin = origin - if origin and not isinstance(origin, (URL, Path)): + if origin and not isinstance(origin, URL | Path): self.origin = origin.parents[0] if origin.parents else None super().__init__(self.message) if status: diff --git a/cyberdrop_dl/clients/hash_client.py b/cyberdrop_dl/clients/hash_client.py index f4719bbfc..85527e040 100644 --- a/cyberdrop_dl/clients/hash_client.py +++ b/cyberdrop_dl/clients/hash_client.py @@ -3,7 +3,6 @@ import asyncio import time from collections import defaultdict -from collections.abc import AsyncGenerator from contextlib import asynccontextmanager from pathlib import Path from typing import TYPE_CHECKING @@ -14,6 +13,8 @@ from cyberdrop_dl.utils.logger import log if TYPE_CHECKING: + from collections.abc import AsyncGenerator + from yarl import URL from cyberdrop_dl.managers.manager import Manager diff --git a/cyberdrop_dl/clients/scraper_client.py b/cyberdrop_dl/clients/scraper_client.py index 2f46200c2..4fc498cd3 100644 --- a/cyberdrop_dl/clients/scraper_client.py +++ b/cyberdrop_dl/clients/scraper_client.py @@ -2,7 +2,7 @@ import json from functools import wraps -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any import aiohttp from aiohttp import ClientSession @@ -14,6 +14,8 @@ from cyberdrop_dl.utils.logger import log if TYPE_CHECKING: + from collections.abc import Callable + from multidict import CIMultiDictProxy from cyberdrop_dl.managers.client_manager import ClientManager diff --git a/cyberdrop_dl/downloader/downloader.py b/cyberdrop_dl/downloader/downloader.py index 4d62daf15..3ce321606 100644 --- a/cyberdrop_dl/downloader/downloader.py +++ b/cyberdrop_dl/downloader/downloader.py @@ -6,7 +6,7 @@ from functools import wraps from http import HTTPStatus from pathlib import Path -from typing import TYPE_CHECKING, Callable +from typing import TYPE_CHECKING import aiohttp from filedate import File @@ -18,6 +18,8 @@ from cyberdrop_dl.utils.logger import log if TYPE_CHECKING: + from collections.abc import Callable + from cyberdrop_dl.clients.download_client import DownloadClient from cyberdrop_dl.managers.manager import Manager from cyberdrop_dl.utils.dataclasses.url_objects import MediaItem @@ -214,7 +216,7 @@ async def download(self, media_item: MediaItem) -> None: FileNotFoundError, PermissionError, aiohttp.ServerDisconnectedError, - asyncio.TimeoutError, + TimeoutError, aiohttp.ServerTimeoutError, ) as e: ui_message = getattr(e, "status", type(e).__name__) diff --git a/cyberdrop_dl/managers/args_manager.py b/cyberdrop_dl/managers/args_manager.py index 94bf56349..adc6eb756 100644 --- a/cyberdrop_dl/managers/args_manager.py +++ b/cyberdrop_dl/managers/args_manager.py @@ -150,7 +150,7 @@ def startup(self) -> None: del self.parsed_args["retry_all"] del self.parsed_args["retry_maintenance"] del self.parsed_args["input_file"] - del self.parsed_args["output_folder"] + del self.parsed_args["download_dir"] del self.parsed_args["appdata_dir"] del self.parsed_args["config_file"] del self.parsed_args["log_folder"] diff --git a/cyberdrop_dl/managers/config_manager.py b/cyberdrop_dl/managers/config_manager.py index e6467c98a..cbd358954 100644 --- a/cyberdrop_dl/managers/config_manager.py +++ b/cyberdrop_dl/managers/config_manager.py @@ -285,7 +285,9 @@ def write_updated_settings_config(self) -> None: settings_data["Logs"]["log_folder"] = str(settings_data["Logs"]["log_folder"]) settings_data["Logs"]["webhook_url"] = str(settings_data["Logs"]["webhook_url"]) settings_data["Sorting"]["sort_folder"] = str(settings_data["Sorting"]["sort_folder"]) - settings_data["Sorting"]["scan_folder"] = str(settings_data["Sorting"]["scan_folder"]) + settings_data["Sorting"]["scan_folder"] = ( + str(settings_data["Sorting"]["scan_folder"]) if settings_data["Sorting"]["scan_folder"] else None + ) _save_yaml(self.settings, settings_data) diff --git a/cyberdrop_dl/managers/leaky.py b/cyberdrop_dl/managers/leaky.py index dd49152e3..2e3f1fafb 100644 --- a/cyberdrop_dl/managers/leaky.py +++ b/cyberdrop_dl/managers/leaky.py @@ -33,7 +33,7 @@ async def acquire(self, amount: float = 1) -> None: # 'early' if capacity has come up fut = loop.create_future() self._waiters[task] = fut - with contextlib.suppress(asyncio.TimeoutError): + with contextlib.suppress(TimeoutError): await wait_for(asyncio.shield(fut), 1 / self._rate_per_sec * amount, loop=loop) fut.cancel() self._waiters.pop(task, None) diff --git a/cyberdrop_dl/managers/live_manager.py b/cyberdrop_dl/managers/live_manager.py index 5bcb7c5df..ba07f8a87 100644 --- a/cyberdrop_dl/managers/live_manager.py +++ b/cyberdrop_dl/managers/live_manager.py @@ -1,6 +1,5 @@ from __future__ import annotations -from collections.abc import Generator from contextlib import contextmanager from typing import TYPE_CHECKING @@ -10,6 +9,8 @@ from cyberdrop_dl.utils.logger import console, log if TYPE_CHECKING: + from collections.abc import Generator + from rich.layout import Layout from cyberdrop_dl.managers.manager import Manager @@ -46,7 +47,7 @@ def get_live(self, layout: Layout, stop: bool = False) -> Generator[Live]: for sub_exception in e.exceptions: msg = f"Multiple exception caught: {type(sub_exception).__name__} - {sub_exception}" log(msg, 50, exc_info=sub_exception) - raise e + raise finally: if stop: self.live.stop() diff --git a/cyberdrop_dl/managers/log_manager.py b/cyberdrop_dl/managers/log_manager.py index 8ba389f75..73cb67150 100644 --- a/cyberdrop_dl/managers/log_manager.py +++ b/cyberdrop_dl/managers/log_manager.py @@ -94,7 +94,7 @@ async def update_last_forum_post(self) -> None: new_base_urls.append(base_url) updated_urls = current_urls.copy() - for new_url, base in zip(new_urls, new_base_urls): + for new_url, base in zip(new_urls, new_base_urls, strict=False): if base in current_base_urls: index = current_base_urls.index(base) old_url = current_urls[index] diff --git a/cyberdrop_dl/managers/real_debrid/api.py b/cyberdrop_dl/managers/real_debrid/api.py index 5721a597b..7ed9dd0ba 100644 --- a/cyberdrop_dl/managers/real_debrid/api.py +++ b/cyberdrop_dl/managers/real_debrid/api.py @@ -1,7 +1,6 @@ from __future__ import annotations import time -from collections.abc import Generator from contextlib import contextmanager from datetime import date, datetime, timedelta from typing import TYPE_CHECKING @@ -13,6 +12,7 @@ from cyberdrop_dl.managers.real_debrid.errors import RealDebridError if TYPE_CHECKING: + from collections.abc import Generator from pathlib import Path from requests import Response diff --git a/cyberdrop_dl/managers/real_debrid/errors.py b/cyberdrop_dl/managers/real_debrid/errors.py index 72e4e8ce1..f861c1e3b 100644 --- a/cyberdrop_dl/managers/real_debrid/errors.py +++ b/cyberdrop_dl/managers/real_debrid/errors.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from http import HTTPStatus from typing import TYPE_CHECKING @@ -50,7 +52,7 @@ class RealDebridError(BaseException): """Base RealDebrid API error.""" - def __init__(self, response: "Response") -> None: + def __init__(self, response: Response) -> None: self.path = URL(response.url).path try: JSONResp: dict = response.json() @@ -63,5 +65,6 @@ def __init__(self, response: "Response") -> None: self.code = response.status_code self.error = f"{self.code} - {HTTPStatus(self.code).phrase}" + self.error = self.error.capitalize() self.msg = f"{self.code}: {self.error} at {self.path}" super().__init__(self.msg) diff --git a/cyberdrop_dl/scraper/crawlers/chevereto_crawler.py b/cyberdrop_dl/scraper/crawlers/chevereto_crawler.py index 0bba39d03..ac19f7d75 100644 --- a/cyberdrop_dl/scraper/crawlers/chevereto_crawler.py +++ b/cyberdrop_dl/scraper/crawlers/chevereto_crawler.py @@ -3,7 +3,6 @@ import calendar import datetime import re -from collections.abc import AsyncGenerator from typing import TYPE_CHECKING, ClassVar from aiolimiter import AsyncLimiter @@ -15,6 +14,8 @@ from cyberdrop_dl.utils.utilities import error_handling_wrapper, get_filename_and_ext if TYPE_CHECKING: + from collections.abc import AsyncGenerator + from cyberdrop_dl.managers.manager import Manager from cyberdrop_dl.utils.dataclasses.url_objects import ScrapeItem diff --git a/cyberdrop_dl/scraper/crawlers/reddit_crawler.py b/cyberdrop_dl/scraper/crawlers/reddit_crawler.py index 14a5e1ec0..768eb2c78 100644 --- a/cyberdrop_dl/scraper/crawlers/reddit_crawler.py +++ b/cyberdrop_dl/scraper/crawlers/reddit_crawler.py @@ -1,7 +1,6 @@ from __future__ import annotations import contextlib -from collections.abc import AsyncIterator from typing import TYPE_CHECKING import aiohttp @@ -17,6 +16,8 @@ from cyberdrop_dl.utils.utilities import error_handling_wrapper, get_filename_and_ext if TYPE_CHECKING: + from collections.abc import AsyncIterator + from asyncpraw.models import Redditor, Submission, Subreddit from cyberdrop_dl.managers.manager import Manager diff --git a/cyberdrop_dl/scraper/crawlers/tokyomotion_crawler.py b/cyberdrop_dl/scraper/crawlers/tokyomotion_crawler.py index 157509563..5fea32569 100644 --- a/cyberdrop_dl/scraper/crawlers/tokyomotion_crawler.py +++ b/cyberdrop_dl/scraper/crawlers/tokyomotion_crawler.py @@ -2,7 +2,6 @@ import re from calendar import timegm -from collections.abc import AsyncGenerator from datetime import datetime, timedelta from typing import TYPE_CHECKING @@ -15,6 +14,8 @@ from cyberdrop_dl.utils.utilities import error_handling_wrapper, get_filename_and_ext if TYPE_CHECKING: + from collections.abc import AsyncGenerator + from bs4 import BeautifulSoup from cyberdrop_dl.managers.manager import Manager diff --git a/cyberdrop_dl/scraper/crawlers/xxxbunker_crawler.py b/cyberdrop_dl/scraper/crawlers/xxxbunker_crawler.py index f5f273953..5724d3ffe 100644 --- a/cyberdrop_dl/scraper/crawlers/xxxbunker_crawler.py +++ b/cyberdrop_dl/scraper/crawlers/xxxbunker_crawler.py @@ -3,7 +3,6 @@ import asyncio import re from calendar import timegm -from collections.abc import AsyncGenerator from datetime import datetime, timedelta from typing import TYPE_CHECKING @@ -17,6 +16,8 @@ from cyberdrop_dl.utils.utilities import error_handling_wrapper, get_filename_and_ext if TYPE_CHECKING: + from collections.abc import AsyncGenerator + from cyberdrop_dl.managers.manager import Manager from cyberdrop_dl.utils.dataclasses.url_objects import ScrapeItem diff --git a/cyberdrop_dl/scraper/jdownloader.py b/cyberdrop_dl/scraper/jdownloader.py index e38ea05ef..b81ce5c26 100644 --- a/cyberdrop_dl/scraper/jdownloader.py +++ b/cyberdrop_dl/scraper/jdownloader.py @@ -3,7 +3,7 @@ from dataclasses import field from functools import wraps from pathlib import Path -from typing import TYPE_CHECKING, Callable +from typing import TYPE_CHECKING from myjdapi import myjdapi @@ -11,6 +11,8 @@ from cyberdrop_dl.utils.logger import log if TYPE_CHECKING: + from collections.abc import Callable + from yarl import URL from cyberdrop_dl.managers.manager import Manager diff --git a/cyberdrop_dl/scraper/scraper.py b/cyberdrop_dl/scraper/scraper.py index 495a205ab..d6348f997 100644 --- a/cyberdrop_dl/scraper/scraper.py +++ b/cyberdrop_dl/scraper/scraper.py @@ -587,7 +587,8 @@ async def send_to_crawler(self, scrape_item: ScrapeItem) -> None: log(f"Unsupported URL: {scrape_item.url}", 30) await self.manager.log_manager.write_unsupported_urls_log( - scrape_item.url, scrape_item.parents[0] if scrape_item.parents else None + scrape_item.url, + scrape_item.parents[0] if scrape_item.parents else None, ) self.manager.progress_manager.scrape_stats_progress.add_unsupported() diff --git a/cyberdrop_dl/utils/args/args.py b/cyberdrop_dl/utils/args/args.py index c526ec082..2623346d6 100644 --- a/cyberdrop_dl/utils/args/args.py +++ b/cyberdrop_dl/utils/args/args.py @@ -59,7 +59,14 @@ def parse_args() -> argparse.Namespace: help="path to txt file containing urls to download", default="", ) - file_paths.add_argument("-d", "--output-folder", type=str, help="path to download folder", default="") + file_paths.add_argument( + "-d", + "--output-folder", + dest="download_dir", + type=str, + help="path to download folder", + default="", + ) file_paths.add_argument("--config-file", type=str, help="path to the CDL settings.yaml file to load", default="") file_paths.add_argument( "--appdata-folder", diff --git a/cyberdrop_dl/utils/args/browser_cookie_extraction.py b/cyberdrop_dl/utils/args/browser_cookie_extraction.py index 1b8123eb8..a8862db49 100644 --- a/cyberdrop_dl/utils/args/browser_cookie_extraction.py +++ b/cyberdrop_dl/utils/args/browser_cookie_extraction.py @@ -1,7 +1,7 @@ from __future__ import annotations from functools import wraps -from typing import TYPE_CHECKING, Callable +from typing import TYPE_CHECKING import browser_cookie3 from InquirerPy import inquirer @@ -10,6 +10,7 @@ from cyberdrop_dl.utils.dataclasses.supported_domains import SupportedDomains if TYPE_CHECKING: + from collections.abc import Callable from http.cookiejar import CookieJar from cyberdrop_dl.managers.manager import Manager diff --git a/cyberdrop_dl/utils/database/tables/history_table.py b/cyberdrop_dl/utils/database/tables/history_table.py index 1520505c3..05384bf1c 100644 --- a/cyberdrop_dl/utils/database/tables/history_table.py +++ b/cyberdrop_dl/utils/database/tables/history_table.py @@ -2,13 +2,14 @@ import pathlib from sqlite3 import IntegrityError, Row -from typing import TYPE_CHECKING, Any, Iterable +from typing import TYPE_CHECKING, Any from cyberdrop_dl.utils.database.table_definitions import create_fixed_history, create_history from cyberdrop_dl.utils.utilities import log if TYPE_CHECKING: import datetime + from collections.abc import Iterable import aiosqlite from yarl import URL diff --git a/cyberdrop_dl/utils/transfer/first_time_setup.py b/cyberdrop_dl/utils/transfer/first_time_setup.py index 1cbd57181..cac9927f6 100644 --- a/cyberdrop_dl/utils/transfer/first_time_setup.py +++ b/cyberdrop_dl/utils/transfer/first_time_setup.py @@ -50,7 +50,8 @@ def startup(self) -> None: if (constants.APP_STORAGE / "download_history.sqlite").is_file(): transfer_v4_db( - constants.APP_STORAGE / "download_history.sqlite", constants.APP_STORAGE / "Cache" / "cyberdrop.db" + constants.APP_STORAGE / "download_history.sqlite", + constants.APP_STORAGE / "Cache" / "cyberdrop.db", ) (constants.APP_STORAGE / "download_history.sqlite").rename(OLD_FILES / "download_history2.sqlite") diff --git a/cyberdrop_dl/utils/utilities.py b/cyberdrop_dl/utils/utilities.py index 8b836fe4e..17562e05f 100644 --- a/cyberdrop_dl/utils/utilities.py +++ b/cyberdrop_dl/utils/utilities.py @@ -3,7 +3,6 @@ import contextlib import os import re -from collections.abc import Callable from functools import wraps from pathlib import Path from typing import TYPE_CHECKING @@ -21,6 +20,8 @@ from cyberdrop_dl.utils.logger import log, log_with_color if TYPE_CHECKING: + from collections.abc import Callable + from cyberdrop_dl.managers.manager import Manager from cyberdrop_dl.scraper.crawler import Crawler from cyberdrop_dl.utils.dataclasses.url_objects import ScrapeItem