Skip to content
This repository has been archived by the owner on Jul 5, 2024. It is now read-only.

Commit

Permalink
mediafire throw error
Browse files Browse the repository at this point in the history
  • Loading branch information
Jules-WinnfieldX committed Jun 26, 2024
1 parent eafd99f commit 44138a1
Showing 1 changed file with 6 additions and 2 deletions.
8 changes: 6 additions & 2 deletions cyberdrop_dl/scraper/crawlers/mediafire_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,10 @@
from typing import TYPE_CHECKING

from aiolimiter import AsyncLimiter
from mediafire import MediaFireApi
from mediafire import MediaFireApi, api
from yarl import URL

from cyberdrop_dl.clients.errors import ScrapeFailure
from cyberdrop_dl.scraper.crawler import Crawler
from cyberdrop_dl.utils.dataclasses.url_objects import ScrapeItem
from cyberdrop_dl.utils.utilities import error_handling_wrapper, get_filename_and_ext
Expand Down Expand Up @@ -46,7 +47,10 @@ async def folder(self, scrape_item: ScrapeItem) -> None:
chunk = 1
chunk_size = 100
while True:
folder_contents = self.api.folder_get_content(folder_key=folder_key, content_type='files', chunk=chunk, chunk_size=chunk_size)
try:
folder_contents = self.api.folder_get_content(folder_key=folder_key, content_type='files', chunk=chunk, chunk_size=chunk_size)
except api.MediaFireConnectionError:
raise ScrapeFailure(500, "MediaFire connection closed")
files = folder_contents['folder_content']['files']

for file in files:
Expand Down

0 comments on commit 44138a1

Please sign in to comment.