Skip to content

Commit

Permalink
fix: minor fixes 2
Browse files Browse the repository at this point in the history
  • Loading branch information
NTFSvolume committed Nov 8, 2024
1 parent b9178c9 commit 24ed68c
Show file tree
Hide file tree
Showing 23 changed files with 55 additions and 25 deletions.
3 changes: 2 additions & 1 deletion cyberdrop_dl/clients/download_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@
from cyberdrop_dl.utils.logger import log

if TYPE_CHECKING:
from typing import Any, Callable, Coroutine
from collections.abc import Callable, Coroutine
from typing import Any

from cyberdrop_dl.managers.client_manager import ClientManager
from cyberdrop_dl.managers.manager import Manager
Expand Down
2 changes: 1 addition & 1 deletion cyberdrop_dl/clients/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def __init__(
self.ui_message = ui_message
self.message = message or ui_message
self.origin = origin
if origin and not isinstance(origin, (URL, Path)):
if origin and not isinstance(origin, URL | Path):
self.origin = origin.parents[0] if origin.parents else None
super().__init__(self.message)
if status:
Expand Down
3 changes: 2 additions & 1 deletion cyberdrop_dl/clients/hash_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import asyncio
import time
from collections import defaultdict
from collections.abc import AsyncGenerator
from contextlib import asynccontextmanager
from pathlib import Path
from typing import TYPE_CHECKING
Expand All @@ -14,6 +13,8 @@
from cyberdrop_dl.utils.logger import log

if TYPE_CHECKING:
from collections.abc import AsyncGenerator

from yarl import URL

from cyberdrop_dl.managers.manager import Manager
Expand Down
4 changes: 3 additions & 1 deletion cyberdrop_dl/clients/scraper_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import json
from functools import wraps
from typing import TYPE_CHECKING, Any, Callable
from typing import TYPE_CHECKING, Any

import aiohttp
from aiohttp import ClientSession
Expand All @@ -14,6 +14,8 @@
from cyberdrop_dl.utils.logger import log

if TYPE_CHECKING:
from collections.abc import Callable

from multidict import CIMultiDictProxy

from cyberdrop_dl.managers.client_manager import ClientManager
Expand Down
6 changes: 4 additions & 2 deletions cyberdrop_dl/downloader/downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from functools import wraps
from http import HTTPStatus
from pathlib import Path
from typing import TYPE_CHECKING, Callable
from typing import TYPE_CHECKING

import aiohttp
from filedate import File
Expand All @@ -18,6 +18,8 @@
from cyberdrop_dl.utils.logger import log

if TYPE_CHECKING:
from collections.abc import Callable

from cyberdrop_dl.clients.download_client import DownloadClient
from cyberdrop_dl.managers.manager import Manager
from cyberdrop_dl.utils.dataclasses.url_objects import MediaItem
Expand Down Expand Up @@ -214,7 +216,7 @@ async def download(self, media_item: MediaItem) -> None:
FileNotFoundError,
PermissionError,
aiohttp.ServerDisconnectedError,
asyncio.TimeoutError,
TimeoutError,
aiohttp.ServerTimeoutError,
) as e:
ui_message = getattr(e, "status", type(e).__name__)
Expand Down
2 changes: 1 addition & 1 deletion cyberdrop_dl/managers/args_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ def startup(self) -> None:
del self.parsed_args["retry_all"]
del self.parsed_args["retry_maintenance"]
del self.parsed_args["input_file"]
del self.parsed_args["output_folder"]
del self.parsed_args["download_dir"]
del self.parsed_args["appdata_dir"]
del self.parsed_args["config_file"]
del self.parsed_args["log_folder"]
Expand Down
4 changes: 3 additions & 1 deletion cyberdrop_dl/managers/config_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,7 +285,9 @@ def write_updated_settings_config(self) -> None:
settings_data["Logs"]["log_folder"] = str(settings_data["Logs"]["log_folder"])
settings_data["Logs"]["webhook_url"] = str(settings_data["Logs"]["webhook_url"])
settings_data["Sorting"]["sort_folder"] = str(settings_data["Sorting"]["sort_folder"])
settings_data["Sorting"]["scan_folder"] = str(settings_data["Sorting"]["scan_folder"])
settings_data["Sorting"]["scan_folder"] = (
str(settings_data["Sorting"]["scan_folder"]) if settings_data["Sorting"]["scan_folder"] else None
)

_save_yaml(self.settings, settings_data)

Expand Down
2 changes: 1 addition & 1 deletion cyberdrop_dl/managers/leaky.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ async def acquire(self, amount: float = 1) -> None:
# 'early' if capacity has come up
fut = loop.create_future()
self._waiters[task] = fut
with contextlib.suppress(asyncio.TimeoutError):
with contextlib.suppress(TimeoutError):
await wait_for(asyncio.shield(fut), 1 / self._rate_per_sec * amount, loop=loop)
fut.cancel()
self._waiters.pop(task, None)
Expand Down
5 changes: 3 additions & 2 deletions cyberdrop_dl/managers/live_manager.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from __future__ import annotations

from collections.abc import Generator
from contextlib import contextmanager
from typing import TYPE_CHECKING

Expand All @@ -10,6 +9,8 @@
from cyberdrop_dl.utils.logger import console, log

if TYPE_CHECKING:
from collections.abc import Generator

from rich.layout import Layout

from cyberdrop_dl.managers.manager import Manager
Expand Down Expand Up @@ -46,7 +47,7 @@ def get_live(self, layout: Layout, stop: bool = False) -> Generator[Live]:
for sub_exception in e.exceptions:
msg = f"Multiple exception caught: {type(sub_exception).__name__} - {sub_exception}"
log(msg, 50, exc_info=sub_exception)
raise e
raise
finally:
if stop:
self.live.stop()
Expand Down
2 changes: 1 addition & 1 deletion cyberdrop_dl/managers/log_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ async def update_last_forum_post(self) -> None:
new_base_urls.append(base_url)

updated_urls = current_urls.copy()
for new_url, base in zip(new_urls, new_base_urls):
for new_url, base in zip(new_urls, new_base_urls, strict=False):
if base in current_base_urls:
index = current_base_urls.index(base)
old_url = current_urls[index]
Expand Down
2 changes: 1 addition & 1 deletion cyberdrop_dl/managers/real_debrid/api.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from __future__ import annotations

import time
from collections.abc import Generator
from contextlib import contextmanager
from datetime import date, datetime, timedelta
from typing import TYPE_CHECKING
Expand All @@ -13,6 +12,7 @@
from cyberdrop_dl.managers.real_debrid.errors import RealDebridError

if TYPE_CHECKING:
from collections.abc import Generator
from pathlib import Path

from requests import Response
Expand Down
5 changes: 4 additions & 1 deletion cyberdrop_dl/managers/real_debrid/errors.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from __future__ import annotations

from http import HTTPStatus
from typing import TYPE_CHECKING

Expand Down Expand Up @@ -50,7 +52,7 @@
class RealDebridError(BaseException):
"""Base RealDebrid API error."""

def __init__(self, response: "Response") -> None:
def __init__(self, response: Response) -> None:
self.path = URL(response.url).path
try:
JSONResp: dict = response.json()
Expand All @@ -63,5 +65,6 @@ def __init__(self, response: "Response") -> None:
self.code = response.status_code
self.error = f"{self.code} - {HTTPStatus(self.code).phrase}"

self.error = self.error.capitalize()
self.msg = f"{self.code}: {self.error} at {self.path}"
super().__init__(self.msg)
3 changes: 2 additions & 1 deletion cyberdrop_dl/scraper/crawlers/chevereto_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import calendar
import datetime
import re
from collections.abc import AsyncGenerator
from typing import TYPE_CHECKING, ClassVar

from aiolimiter import AsyncLimiter
Expand All @@ -15,6 +14,8 @@
from cyberdrop_dl.utils.utilities import error_handling_wrapper, get_filename_and_ext

if TYPE_CHECKING:
from collections.abc import AsyncGenerator

from cyberdrop_dl.managers.manager import Manager
from cyberdrop_dl.utils.dataclasses.url_objects import ScrapeItem

Expand Down
3 changes: 2 additions & 1 deletion cyberdrop_dl/scraper/crawlers/reddit_crawler.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from __future__ import annotations

import contextlib
from collections.abc import AsyncIterator
from typing import TYPE_CHECKING

import aiohttp
Expand All @@ -17,6 +16,8 @@
from cyberdrop_dl.utils.utilities import error_handling_wrapper, get_filename_and_ext

if TYPE_CHECKING:
from collections.abc import AsyncIterator

from asyncpraw.models import Redditor, Submission, Subreddit

from cyberdrop_dl.managers.manager import Manager
Expand Down
3 changes: 2 additions & 1 deletion cyberdrop_dl/scraper/crawlers/tokyomotion_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import re
from calendar import timegm
from collections.abc import AsyncGenerator
from datetime import datetime, timedelta
from typing import TYPE_CHECKING

Expand All @@ -15,6 +14,8 @@
from cyberdrop_dl.utils.utilities import error_handling_wrapper, get_filename_and_ext

if TYPE_CHECKING:
from collections.abc import AsyncGenerator

from bs4 import BeautifulSoup

from cyberdrop_dl.managers.manager import Manager
Expand Down
3 changes: 2 additions & 1 deletion cyberdrop_dl/scraper/crawlers/xxxbunker_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import asyncio
import re
from calendar import timegm
from collections.abc import AsyncGenerator
from datetime import datetime, timedelta
from typing import TYPE_CHECKING

Expand All @@ -17,6 +16,8 @@
from cyberdrop_dl.utils.utilities import error_handling_wrapper, get_filename_and_ext

if TYPE_CHECKING:
from collections.abc import AsyncGenerator

from cyberdrop_dl.managers.manager import Manager
from cyberdrop_dl.utils.dataclasses.url_objects import ScrapeItem

Expand Down
4 changes: 3 additions & 1 deletion cyberdrop_dl/scraper/jdownloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,16 @@
from dataclasses import field
from functools import wraps
from pathlib import Path
from typing import TYPE_CHECKING, Callable
from typing import TYPE_CHECKING

from myjdapi import myjdapi

from cyberdrop_dl.clients.errors import JDownloaderError
from cyberdrop_dl.utils.logger import log

if TYPE_CHECKING:
from collections.abc import Callable

from yarl import URL

from cyberdrop_dl.managers.manager import Manager
Expand Down
3 changes: 2 additions & 1 deletion cyberdrop_dl/scraper/scraper.py
Original file line number Diff line number Diff line change
Expand Up @@ -587,7 +587,8 @@ async def send_to_crawler(self, scrape_item: ScrapeItem) -> None:

log(f"Unsupported URL: {scrape_item.url}", 30)
await self.manager.log_manager.write_unsupported_urls_log(
scrape_item.url, scrape_item.parents[0] if scrape_item.parents else None
scrape_item.url,
scrape_item.parents[0] if scrape_item.parents else None,
)
self.manager.progress_manager.scrape_stats_progress.add_unsupported()

Expand Down
9 changes: 8 additions & 1 deletion cyberdrop_dl/utils/args/args.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,14 @@ def parse_args() -> argparse.Namespace:
help="path to txt file containing urls to download",
default="",
)
file_paths.add_argument("-d", "--output-folder", type=str, help="path to download folder", default="")
file_paths.add_argument(
"-d",
"--output-folder",
dest="download_dir",
type=str,
help="path to download folder",
default="",
)
file_paths.add_argument("--config-file", type=str, help="path to the CDL settings.yaml file to load", default="")
file_paths.add_argument(
"--appdata-folder",
Expand Down
3 changes: 2 additions & 1 deletion cyberdrop_dl/utils/args/browser_cookie_extraction.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from __future__ import annotations

from functools import wraps
from typing import TYPE_CHECKING, Callable
from typing import TYPE_CHECKING

import browser_cookie3
from InquirerPy import inquirer
Expand All @@ -10,6 +10,7 @@
from cyberdrop_dl.utils.dataclasses.supported_domains import SupportedDomains

if TYPE_CHECKING:
from collections.abc import Callable
from http.cookiejar import CookieJar

from cyberdrop_dl.managers.manager import Manager
Expand Down
3 changes: 2 additions & 1 deletion cyberdrop_dl/utils/database/tables/history_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,14 @@

import pathlib
from sqlite3 import IntegrityError, Row
from typing import TYPE_CHECKING, Any, Iterable
from typing import TYPE_CHECKING, Any

from cyberdrop_dl.utils.database.table_definitions import create_fixed_history, create_history
from cyberdrop_dl.utils.utilities import log

if TYPE_CHECKING:
import datetime
from collections.abc import Iterable

import aiosqlite
from yarl import URL
Expand Down
3 changes: 2 additions & 1 deletion cyberdrop_dl/utils/transfer/first_time_setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,8 @@ def startup(self) -> None:

if (constants.APP_STORAGE / "download_history.sqlite").is_file():
transfer_v4_db(
constants.APP_STORAGE / "download_history.sqlite", constants.APP_STORAGE / "Cache" / "cyberdrop.db"
constants.APP_STORAGE / "download_history.sqlite",
constants.APP_STORAGE / "Cache" / "cyberdrop.db",
)
(constants.APP_STORAGE / "download_history.sqlite").rename(OLD_FILES / "download_history2.sqlite")

Expand Down
3 changes: 2 additions & 1 deletion cyberdrop_dl/utils/utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import contextlib
import os
import re
from collections.abc import Callable
from functools import wraps
from pathlib import Path
from typing import TYPE_CHECKING
Expand All @@ -21,6 +20,8 @@
from cyberdrop_dl.utils.logger import log, log_with_color

if TYPE_CHECKING:
from collections.abc import Callable

from cyberdrop_dl.managers.manager import Manager
from cyberdrop_dl.scraper.crawler import Crawler
from cyberdrop_dl.utils.dataclasses.url_objects import ScrapeItem
Expand Down

0 comments on commit 24ed68c

Please sign in to comment.