From 6a6e05e531ca2e8d17cca0aaa914ad7fc1b9d276 Mon Sep 17 00:00:00 2001 From: monosans Date: Thu, 6 Mar 2025 09:57:26 +0300 Subject: [PATCH] handle UnicodeDecodeError on non-utf-8 check_website --- proxy_scraper_checker/proxy.py | 2 +- proxy_scraper_checker/settings.py | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/proxy_scraper_checker/proxy.py b/proxy_scraper_checker/proxy.py index bd4835e36..44690f616 100644 --- a/proxy_scraper_checker/proxy.py +++ b/proxy_scraper_checker/proxy.py @@ -69,7 +69,7 @@ async def check(self, *, settings: Settings) -> None: self.exit_ip = parse_ipv4(orjson.loads(content)["origin"]) elif settings.check_website_type == CheckWebsiteType.PLAIN_IP: self.exit_ip = parse_ipv4( - content.decode(response.get_encoding(), errors="replace") + content.decode(response.get_encoding()) ) else: self.exit_ip = None diff --git a/proxy_scraper_checker/settings.py b/proxy_scraper_checker/settings.py index 4e54251e1..d5ca6402e 100644 --- a/proxy_scraper_checker/settings.py +++ b/proxy_scraper_checker/settings.py @@ -158,8 +158,10 @@ async def _get_check_website_type_and_real_ip( except orjson.JSONDecodeError: try: return CheckWebsiteType.PLAIN_IP, parse_ipv4( - content.decode(response.get_encoding(), errors="replace") + content.decode(response.get_encoding()) ) + except UnicodeDecodeError: + _logger.error("Error when decoding check_website response as utf-8") except ValueError: pass else: