From 44138a13d86f31d5b394489c2a2c4066d8edab9f Mon Sep 17 00:00:00 2001 From: Jules-WinnfieldX Date: Wed, 26 Jun 2024 14:55:13 -0600 Subject: [PATCH] mediafire throw error --- cyberdrop_dl/scraper/crawlers/mediafire_crawler.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/cyberdrop_dl/scraper/crawlers/mediafire_crawler.py b/cyberdrop_dl/scraper/crawlers/mediafire_crawler.py index 6623d5596..1e51c883c 100644 --- a/cyberdrop_dl/scraper/crawlers/mediafire_crawler.py +++ b/cyberdrop_dl/scraper/crawlers/mediafire_crawler.py @@ -5,9 +5,10 @@ from typing import TYPE_CHECKING from aiolimiter import AsyncLimiter -from mediafire import MediaFireApi +from mediafire import MediaFireApi, api from yarl import URL +from cyberdrop_dl.clients.errors import ScrapeFailure from cyberdrop_dl.scraper.crawler import Crawler from cyberdrop_dl.utils.dataclasses.url_objects import ScrapeItem from cyberdrop_dl.utils.utilities import error_handling_wrapper, get_filename_and_ext @@ -46,7 +47,10 @@ async def folder(self, scrape_item: ScrapeItem) -> None: chunk = 1 chunk_size = 100 while True: - folder_contents = self.api.folder_get_content(folder_key=folder_key, content_type='files', chunk=chunk, chunk_size=chunk_size) + try: + folder_contents = self.api.folder_get_content(folder_key=folder_key, content_type='files', chunk=chunk, chunk_size=chunk_size) + except api.MediaFireConnectionError: + raise ScrapeFailure(500, "MediaFire connection closed") files = folder_contents['folder_content']['files'] for file in files: