diff --git a/cyberdrop_dl/managers/client_manager.py b/cyberdrop_dl/managers/client_manager.py index 12b4f7d3e..72088d865 100644 --- a/cyberdrop_dl/managers/client_manager.py +++ b/cyberdrop_dl/managers/client_manager.py @@ -4,6 +4,7 @@ import ssl from http import HTTPStatus from typing import TYPE_CHECKING +import os import aiohttp import certifi @@ -14,7 +15,7 @@ from cyberdrop_dl.clients.download_client import DownloadClient from cyberdrop_dl.clients.errors import DownloadFailure from cyberdrop_dl.clients.scraper_client import ScraperClient -from cyberdrop_dl.utils.utilities import CustomHTTPStatus +from cyberdrop_dl.utils.utilities import CustomHTTPStatus, log if TYPE_CHECKING: from cyberdrop_dl.managers.manager import Manager @@ -77,6 +78,10 @@ async def check_http_status(self, response: ClientResponse, download: bool = Fal headers = response.headers response_url = response.url + if os.getenv("CYDL_ENABLE_SENSITIVE_HTTP_LOGS") == "2": + # Log all responses + await log(f"Response: {response}\nRequest: {response.request_info}", 10) + if not headers.get('Content-Type'): raise DownloadFailure(status=CustomHTTPStatus.IM_A_TEAPOT, message="No content-type in response header") @@ -89,6 +94,10 @@ async def check_http_status(self, response: ClientResponse, download: bool = Fal if HTTPStatus.OK <= status < HTTPStatus.BAD_REQUEST: return + if os.getenv("CYDL_ENABLE_SENSITIVE_HTTP_LOGS") == "1": + # Only log responses to unsuccessful requests + await log(f"Response: {response}\nRequest: {response.request_info}", 10) + try: phrase = HTTPStatus(status).phrase except ValueError: diff --git a/cyberdrop_dl/scraper/crawlers/gofile_crawler.py b/cyberdrop_dl/scraper/crawlers/gofile_crawler.py index 2d52071d4..a93dc1030 100644 --- a/cyberdrop_dl/scraper/crawlers/gofile_crawler.py +++ b/cyberdrop_dl/scraper/crawlers/gofile_crawler.py @@ -9,7 +9,7 @@ from aiolimiter import AsyncLimiter from yarl import URL -from cyberdrop_dl.clients.errors import ScrapeFailure +from cyberdrop_dl.clients.errors import ScrapeFailure, DownloadFailure from cyberdrop_dl.scraper.crawler import Crawler from cyberdrop_dl.utils.dataclasses.url_objects import ScrapeItem from cyberdrop_dl.utils.utilities import get_filename_and_ext, error_handling_wrapper @@ -53,7 +53,7 @@ async def album(self, scrape_item: ScrapeItem) -> None: try: async with self.request_limiter: JSON_Resp = await self.client.get_json(self.domain, self.api_address / "getContent", params) - except aiohttp.client_exceptions.ClientResponseError as e: + except DownloadFailure as e: if e.status == http.HTTPStatus.UNAUTHORIZED: self.websiteToken = "" self.manager.cache_manager.remove("gofile_website_token")