Skip to content

Commit

Permalink
refactor: remove unnecesary async definitions
Browse files Browse the repository at this point in the history
  • Loading branch information
NTFSvolume committed Nov 8, 2024
1 parent 8a28f30 commit 9cfcef5
Show file tree
Hide file tree
Showing 70 changed files with 531 additions and 557 deletions.
57 changes: 26 additions & 31 deletions cyberdrop_dl/clients/download_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import aiohttp
from aiohttp import ClientSession

from cyberdrop_dl.clients.errors import DownloadError, InvalidContentTypeError
from cyberdrop_dl.clients.errors import DownloadError, InsufficientFreeSpaceError, InvalidContentTypeError
from cyberdrop_dl.utils.constants import DEBUG_VAR, FILE_FORMATS
from cyberdrop_dl.utils.logger import log

Expand Down Expand Up @@ -105,13 +105,13 @@ async def _download(
domain == "pixeldrain"
and self.manager.config_manager.authentication_data["PixelDrain"]["pixeldrain_api_key"]
):
download_headers["Authorization"] = await self.manager.download_manager.basic_auth(
download_headers["Authorization"] = self.manager.download_manager.basic_auth(
"Cyberdrop-DL",
self.manager.config_manager.authentication_data["PixelDrain"]["pixeldrain_api_key"],
)

downloaded_filename = await self.manager.db_manager.history_table.get_downloaded_filename(domain, media_item)
download_dir = await self.get_download_dir(media_item)
download_dir = self.get_download_dir(media_item)
media_item.partial_file = download_dir / f"{downloaded_filename}.part"

resume_point = 0
Expand All @@ -138,13 +138,13 @@ async def _download(
if not isinstance(media_item.complete_file, Path):
proceed, skip = await self.get_final_file_info(media_item, domain)
await self.mark_incomplete(media_item, domain)
await self.client_manager.check_bunkr_maint(resp.headers)
self.client_manager.check_bunkr_maint(resp.headers)
if skip:
await self.manager.progress_manager.download_progress.add_skipped()
self.manager.progress_manager.download_progress.add_skipped()
return False
if not proceed:
log(f"Skipping {media_item.url} as it has already been downloaded", 10)
await self.manager.progress_manager.download_progress.add_previously_completed(False)
self.manager.progress_manager.download_progress.add_previously_completed(False)
await self.process_completed(media_item, domain)
await self.handle_media_item_completion(media_item, downloaded=False)

Expand All @@ -161,13 +161,13 @@ async def _download(
if resp.status != HTTPStatus.PARTIAL_CONTENT and media_item.partial_file.is_file():
media_item.partial_file.unlink()

media_item.task_id = await self.manager.progress_manager.file_progress.add_task(
media_item.task_id = self.manager.progress_manager.file_progress.add_task(
f"({domain.upper()}) {media_item.filename}",
media_item.filesize + resume_point,
)
if media_item.partial_file.is_file():
resume_point = media_item.partial_file.stat().st_size
await self.manager.progress_manager.file_progress.advance_file(media_item.task_id, resume_point)
self.manager.progress_manager.file_progress.advance_file(media_item.task_id, resume_point)

await save_content(resp.content)
return True
Expand All @@ -179,8 +179,8 @@ async def _append_content(
update_progress: partial,
) -> None:
"""Appends content to a file."""
if not await self.client_manager.manager.download_manager.check_free_space(media_item.download_folder):
raise DownloadError(status="Insufficient Free Space", message="Not enough free space")
if not self.client_manager.manager.download_manager.check_free_space(media_item.download_folder):
raise InsufficientFreeSpaceError(origin=media_item)

media_item.partial_file.parent.mkdir(parents=True, exist_ok=True)
if not media_item.partial_file.is_file():
Expand All @@ -190,7 +190,7 @@ async def _append_content(
await self.client_manager.check_bucket(chunk)
await asyncio.sleep(0)
await f.write(chunk)
await update_progress(len(chunk))
update_progress(len(chunk))
if not content.total_bytes and not media_item.partial_file.stat().st_size:
media_item.partial_file.unlink()
raise DownloadError(status=HTTPStatus.INTERNAL_SERVER_ERROR, message="File is empty")
Expand All @@ -199,7 +199,7 @@ async def download_file(self, manager: Manager, domain: str, media_item: MediaIt
"""Starts a file."""
if self.manager.config_manager.settings_data["Download_Options"]["skip_download_mark_completed"]:
log(f"Download Skip {media_item.url} due to mark completed option", 10)
await self.manager.progress_manager.download_progress.add_skipped()
self.manager.progress_manager.download_progress.add_skipped()
# set completed path
await self.mark_incomplete(media_item, domain)
await self.process_completed(media_item, domain)
Expand Down Expand Up @@ -235,7 +235,7 @@ async def mark_completed(self, domain: str, media_item: MediaItem) -> None:

async def add_file_size(self, domain: str, media_item: MediaItem) -> None:
if not isinstance(media_item.complete_file, Path):
media_item.complete_file = await self.get_file_location(media_item)
media_item.complete_file = self.get_file_location(media_item)
if media_item.complete_file.exists():
await self.manager.db_manager.history_table.add_filesize(domain, media_item)

Expand All @@ -255,7 +255,7 @@ async def handle_media_item_completion(self, media_item: MediaItem, downloaded:

"""~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"""

async def get_download_dir(self, media_item: MediaItem) -> Path:
def get_download_dir(self, media_item: MediaItem) -> Path:
"""Returns the download directory for the media item."""
download_folder = media_item.download_folder
if self.manager.args_manager.retry_any:
Expand All @@ -267,13 +267,13 @@ async def get_download_dir(self, media_item: MediaItem) -> Path:
media_item.download_folder = download_folder
return download_folder

async def get_file_location(self, media_item: MediaItem) -> Path:
download_dir = await self.get_download_dir(media_item)
def get_file_location(self, media_item: MediaItem) -> Path:
download_dir = self.get_download_dir(media_item)
return download_dir / media_item.filename

async def get_final_file_info(self, media_item: MediaItem, domain: str) -> tuple[bool, bool]:
"""Complicated checker for if a file already exists, and was already downloaded."""
media_item.complete_file = await self.get_file_location(media_item)
media_item.complete_file = self.get_file_location(media_item)
media_item.partial_file = media_item.complete_file.with_suffix(media_item.complete_file.suffix + ".part")

expected_size = media_item.filesize if isinstance(media_item.filesize, int) else None
Expand All @@ -297,7 +297,7 @@ async def get_final_file_info(self, media_item: MediaItem, domain: str) -> tuple

while True:
if expected_size:
file_size_check = await self.check_filesize_limits(media_item)
file_size_check = self.check_filesize_limits(media_item)
if not file_size_check:
log(f"Download Skip {media_item.url} due to filesize restrictions", 10)
proceed = False
Expand Down Expand Up @@ -370,20 +370,15 @@ async def iterate_filename(self, complete_file: Path, media_item: MediaItem) ->
break
return complete_file, partial_file

async def check_filesize_limits(self, media: MediaItem) -> bool:
def check_filesize_limits(self, media: MediaItem) -> bool:
"""Checks if the file size is within the limits."""
max_video_filesize = self.manager.config_manager.settings_data["File_Size_Limits"][
"maximum_video_size"
] or float("inf")
min_video_filesize = self.manager.config_manager.settings_data["File_Size_Limits"]["minimum_video_size"]
max_image_filesize = self.manager.config_manager.settings_data["File_Size_Limits"][
"maximum_image_size"
] or float("inf")
min_image_filesize = self.manager.config_manager.settings_data["File_Size_Limits"]["minimum_image_size"]
max_other_filesize = self.manager.config_manager.settings_data["File_Size_Limits"][
"maximum_other_size"
] or float("inf")
min_other_filesize = self.manager.config_manager.settings_data["File_Size_Limits"]["minimum_other_size"]
file_size_limits = self.manager.config_manager.settings_data["File_Size_Limits"]
max_video_filesize = file_size_limits["maximum_video_size"] or float("inf")
min_video_filesize = file_size_limits["minimum_video_size"]
max_image_filesize = file_size_limits["maximum_image_size"] or float("inf")
min_image_filesize = file_size_limits["minimum_image_size"]
max_other_filesize = file_size_limits["maximum_other_size"] or float("inf")
min_other_filesize = file_size_limits["minimum_other_size"]

if media.ext in FILE_FORMATS["Images"]:
proceed = min_image_filesize < media.filesize < max_image_filesize
Expand Down
20 changes: 10 additions & 10 deletions cyberdrop_dl/clients/hash_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ async def hash_item(self, file: Path | str, original_filename: str, referer: URL
return None
if self.hashes[key]:
return self.hashes[key]
await self.manager.progress_manager.hash_progress.update_currently_hashing(file)
self.manager.progress_manager.hash_progress.update_currently_hashing(file)
hash = await self.manager.db_manager.hash_table.get_file_hash_exists(file)
try:
if not hash:
Expand All @@ -80,9 +80,9 @@ async def hash_item(self, file: Path | str, original_filename: str, referer: URL
original_filename,
referer,
)
await self.manager.progress_manager.hash_progress.add_new_completed_hash()
self.manager.progress_manager.hash_progress.add_new_completed_hash()
else:
await self.manager.progress_manager.hash_progress.add_prev_hash()
self.manager.progress_manager.hash_progress.add_prev_hash()
await self.manager.db_manager.hash_table.insert_or_update_hash_db(
hash,
file,
Expand All @@ -102,12 +102,12 @@ async def hash_item_during_download(self, media_item: MediaItem) -> None:
log(f"After hash processing failed: {media_item.complete_file} with error {e}", 40, exc_info=True)

async def cleanup_dupes(self) -> None:
async with self.manager.live_manager.get_hash_live():
with self.manager.live_manager.get_hash_live():
if not self.manager.config_manager.global_settings_data["Dupe_Cleanup_Options"]["delete_after_download"]:
return
file_hashes_dict = await self.get_file_hashes_dict()
async with self.manager.live_manager.get_remove_file_via_hash_live():
final_candiates_dict = await self.get_candiate_per_group(file_hashes_dict)
final_candiates_dict = self.get_candiate_per_group(file_hashes_dict)
await self.final_dupe_cleanup(final_candiates_dict)

async def final_dupe_cleanup(self, final_dict: dict[str, dict]) -> None:
Expand Down Expand Up @@ -135,7 +135,7 @@ async def final_dupe_cleanup(self, final_dict: dict[str, dict]) -> None:
try:
self.send2trash(ele)
log(f"Sent prev download: {ele!s} to trash with hash {hash}", 10)
await self.manager.progress_manager.hash_progress.add_removed_prev_file()
self.manager.progress_manager.hash_progress.add_removed_prev_file()
except OSError:
continue
# keep a previous downloads
Expand All @@ -146,7 +146,7 @@ async def final_dupe_cleanup(self, final_dict: dict[str, dict]) -> None:
try:
self.send2trash(ele)
log(f"Sent prev download: {ele!s} to trash with hash {hash}", 10)
await self.manager.progress_manager.hash_progress.add_removed_prev_file()
self.manager.progress_manager.hash_progress.add_removed_prev_file()
except OSError:
continue
# delete current download
Expand All @@ -155,7 +155,7 @@ async def final_dupe_cleanup(self, final_dict: dict[str, dict]) -> None:
if selected_file.exists():
self.send2trash(selected_file)
log(f"Sent new download:{selected_file} to trash with hash {hash}", 10)
await self.manager.progress_manager.hash_progress.add_removed_file()
self.manager.progress_manager.hash_progress.add_removed_file()

except OSError:
pass
Expand All @@ -174,7 +174,7 @@ async def get_file_hashes_dict(self) -> dict:
log(f"After hash processing failed: {item} with error {e}", 40, exc_info=True)
return hashes_dict

async def get_candiate_per_group(self, hashes_dict: dict[str, dict[int, list[Path]]]) -> dict:
def get_candiate_per_group(self, hashes_dict: dict[str, dict[int, list[Path]]]) -> dict:
# remove downloaded files, so each group only has the one previously downloaded file or the first downloaded file
for hash, size_dict in hashes_dict.items():
for size, files in size_dict.items():
Expand All @@ -189,7 +189,7 @@ async def get_candiate_per_group(self, hashes_dict: dict[str, dict[int, list[Pat
try:
self.send2trash(file)
log(f"Sent new download : {file} to trash with hash {hash}", 10)
await self.manager.progress_manager.hash_progress.add_removed_file()
self.manager.progress_manager.hash_progress.add_removed_file()
except OSError:
pass

Expand Down
Loading

0 comments on commit 9cfcef5

Please sign in to comment.