diff --git a/mediaflow_proxy/configs.py b/mediaflow_proxy/configs.py
index da6bfbd..061ff36 100644
--- a/mediaflow_proxy/configs.py
+++ b/mediaflow_proxy/configs.py
@@ -52,6 +52,7 @@ class Config:
class Settings(BaseSettings):
api_password: str # The password for accessing the API endpoints.
+ log_level: str = "INFO" # The logging level to use.
transport_config: TransportConfig = Field(default_factory=TransportConfig) # Configuration for httpx transport.
enable_streaming_progress: bool = False # Whether to enable streaming progress tracking.
diff --git a/mediaflow_proxy/const.py b/mediaflow_proxy/const.py
index 33d5c13..49d2464 100644
--- a/mediaflow_proxy/const.py
+++ b/mediaflow_proxy/const.py
@@ -12,13 +12,6 @@
]
SUPPORTED_REQUEST_HEADERS = [
- "accept",
- "accept-encoding",
- "accept-language",
- "connection",
"range",
"if-range",
- "user-agent",
- "referer",
- "origin",
]
diff --git a/mediaflow_proxy/extractors/base.py b/mediaflow_proxy/extractors/base.py
index 4cdd0f0..bf8a15b 100644
--- a/mediaflow_proxy/extractors/base.py
+++ b/mediaflow_proxy/extractors/base.py
@@ -1,5 +1,5 @@
from abc import ABC, abstractmethod
-from typing import Dict, Tuple, Optional
+from typing import Dict, Optional, Any
import httpx
@@ -7,23 +7,32 @@
from mediaflow_proxy.utils.http_utils import create_httpx_client
+class ExtractorError(Exception):
+ """Base exception for all extractors."""
+
+ pass
+
+
class BaseExtractor(ABC):
"""Base class for all URL extractors."""
def __init__(self, request_headers: dict):
self.base_headers = {
"user-agent": settings.user_agent,
- "accept-language": "en-US,en;q=0.5",
}
+ self.mediaflow_endpoint = "proxy_stream_endpoint"
self.base_headers.update(request_headers)
- async def _make_request(self, url: str, headers: Optional[Dict] = None, **kwargs) -> httpx.Response:
+ async def _make_request(
+ self, url: str, method: str = "GET", headers: Optional[Dict] = None, **kwargs
+ ) -> httpx.Response:
"""Make HTTP request with error handling."""
try:
async with create_httpx_client() as client:
request_headers = self.base_headers
request_headers.update(headers or {})
- response = await client.get(
+ response = await client.request(
+ method,
url,
headers=request_headers,
**kwargs,
@@ -31,11 +40,11 @@ async def _make_request(self, url: str, headers: Optional[Dict] = None, **kwargs
response.raise_for_status()
return response
except httpx.HTTPError as e:
- raise ValueError(f"HTTP request failed: {str(e)}")
+ raise ExtractorError(f"HTTP request failed: {str(e)}")
except Exception as e:
- raise ValueError(f"Request failed: {str(e)}")
+ raise ExtractorError(f"Request failed: {str(e)}")
@abstractmethod
- async def extract(self, url: str) -> Tuple[str, Dict[str, str]]:
+ async def extract(self, url: str, **kwargs) -> Dict[str, Any]:
"""Extract final URL and required headers."""
pass
diff --git a/mediaflow_proxy/extractors/doodstream.py b/mediaflow_proxy/extractors/doodstream.py
index 7229e7c..a8f851d 100644
--- a/mediaflow_proxy/extractors/doodstream.py
+++ b/mediaflow_proxy/extractors/doodstream.py
@@ -1,18 +1,18 @@
import re
import time
-from typing import Tuple, Dict
+from typing import Dict
-from mediaflow_proxy.extractors.base import BaseExtractor
+from mediaflow_proxy.extractors.base import BaseExtractor, ExtractorError
class DoodStreamExtractor(BaseExtractor):
"""DoodStream URL extractor."""
- def __init__(self, proxy_enabled: bool, request_headers: dict):
- super().__init__(proxy_enabled, request_headers)
+ def __init__(self, request_headers: dict):
+ super().__init__(request_headers)
self.base_url = "https://d000d.com"
- async def extract(self, url: str) -> Tuple[str, Dict[str, str]]:
+ async def extract(self, url: str, **kwargs) -> Dict[str, str]:
"""Extract DoodStream URL."""
response = await self._make_request(url)
@@ -20,15 +20,20 @@ async def extract(self, url: str) -> Tuple[str, Dict[str, str]]:
pattern = r"(\/pass_md5\/.*?)'.*(\?token=.*?expiry=)"
match = re.search(pattern, response.text, re.DOTALL)
if not match:
- raise ValueError("Failed to extract URL pattern")
+ raise ExtractorError("Failed to extract URL pattern")
# Build final URL
pass_url = f"{self.base_url}{match[1]}"
referer = f"{self.base_url}/"
headers = {"range": "bytes=0-", "referer": referer}
- rebobo_response = await self._make_request(pass_url, headers=headers)
+ response = await self._make_request(pass_url, headers=headers)
timestamp = str(int(time.time()))
- final_url = f"{rebobo_response.text}123456789{match[2]}{timestamp}"
-
- return final_url, {"Referer": referer}
+ final_url = f"{response.text}123456789{match[2]}{timestamp}"
+
+ self.base_headers["referer"] = referer
+ return {
+ "destination_url": final_url,
+ "request_headers": self.base_headers,
+ "mediaflow_endpoint": self.mediaflow_endpoint,
+ }
diff --git a/mediaflow_proxy/extractors/factory.py b/mediaflow_proxy/extractors/factory.py
index f7a4f45..80bcbfd 100644
--- a/mediaflow_proxy/extractors/factory.py
+++ b/mediaflow_proxy/extractors/factory.py
@@ -1,7 +1,8 @@
from typing import Dict, Type
-from mediaflow_proxy.extractors.base import BaseExtractor
+from mediaflow_proxy.extractors.base import BaseExtractor, ExtractorError
from mediaflow_proxy.extractors.doodstream import DoodStreamExtractor
+from mediaflow_proxy.extractors.livetv import LiveTVExtractor
from mediaflow_proxy.extractors.mixdrop import MixdropExtractor
from mediaflow_proxy.extractors.uqload import UqloadExtractor
@@ -13,6 +14,7 @@ class ExtractorFactory:
"Doodstream": DoodStreamExtractor,
"Uqload": UqloadExtractor,
"Mixdrop": MixdropExtractor,
+ "LiveTV": LiveTVExtractor,
}
@classmethod
@@ -20,5 +22,5 @@ def get_extractor(cls, host: str, request_headers: dict) -> BaseExtractor:
"""Get appropriate extractor instance for the given host."""
extractor_class = cls._extractors.get(host)
if not extractor_class:
- raise ValueError(f"Unsupported host: {host}")
+ raise ExtractorError(f"Unsupported host: {host}")
return extractor_class(request_headers)
diff --git a/mediaflow_proxy/extractors/livetv.py b/mediaflow_proxy/extractors/livetv.py
new file mode 100644
index 0000000..8106aed
--- /dev/null
+++ b/mediaflow_proxy/extractors/livetv.py
@@ -0,0 +1,251 @@
+import re
+from typing import Dict, Tuple
+from urllib.parse import urljoin, urlparse, unquote
+
+from httpx import Response
+
+from mediaflow_proxy.extractors.base import BaseExtractor, ExtractorError
+
+
+class LiveTVExtractor(BaseExtractor):
+ """LiveTV URL extractor for both M3U8 and MPD streams."""
+
+ def __init__(self, request_headers: dict):
+ super().__init__(request_headers)
+ # Default to HLS proxy endpoint, will be updated based on stream type
+ self.mediaflow_endpoint = "hls_manifest_proxy"
+
+ # Patterns for stream URL extraction
+ self.fallback_pattern = re.compile(
+ r"source: [\'\"](.*?)[\'\"]\s*,\s*[\s\S]*?mimeType: [\'\"](application/x-mpegURL|application/vnd\.apple\.mpegURL|application/dash\+xml)[\'\"]",
+ re.IGNORECASE,
+ )
+ self.any_m3u8_pattern = re.compile(
+ r'["\']?(https?://.*?\.m3u8(?:\?[^"\']*)?)["\']?',
+ re.IGNORECASE,
+ )
+
+ async def extract(self, url: str, stream_title: str = None, **kwargs) -> Dict[str, str]:
+ """Extract LiveTV URL and required headers.
+
+ Args:
+ url: The channel page URL
+ stream_title: Optional stream title to filter specific stream
+
+ Returns:
+ Tuple[str, Dict[str, str]]: Stream URL and required headers
+ """
+ try:
+ # Get the channel page
+ response = await self._make_request(url)
+ self.base_headers["referer"] = urljoin(url, "/")
+
+ # Extract player API details
+ player_api_base, method = await self._extract_player_api_base(response.text)
+ if not player_api_base:
+ raise ExtractorError("Failed to extract player API URL")
+
+ # Get player options
+ options_data = await self._get_player_options(response.text)
+ if not options_data:
+ raise ExtractorError("No player options found")
+
+ # Process player options to find matching stream
+ for option in options_data:
+ current_title = option.get("title")
+ if stream_title and current_title != stream_title:
+ continue
+
+ # Get stream URL based on player option
+ stream_data = await self._process_player_option(
+ player_api_base, method, option.get("post"), option.get("nume"), option.get("type")
+ )
+
+ if stream_data:
+ stream_url = stream_data.get("url")
+ if not stream_url:
+ continue
+
+ response = {
+ "destination_url": stream_url,
+ "request_headers": self.base_headers,
+ "mediaflow_endpoint": self.mediaflow_endpoint,
+ }
+
+ # Set endpoint based on stream type
+ if stream_data.get("type") == "mpd":
+ if stream_data.get("drm_key_id") and stream_data.get("drm_key"):
+ response.update(
+ {
+ "query_params": {
+ "key_id": stream_data["drm_key_id"],
+ "key": stream_data["drm_key"],
+ },
+ "mediaflow_endpoint": "mpd_manifest_proxy",
+ }
+ )
+
+ return response
+
+ raise ExtractorError("No valid stream found")
+
+ except Exception as e:
+ raise ExtractorError(f"Extraction failed: {str(e)}")
+
+ async def _extract_player_api_base(self, html_content: str) -> Tuple[str | None, str | None]:
+ """Extract player API base URL and method."""
+ admin_ajax_pattern = r'"player_api"\s*:\s*"([^"]+)".*?"play_method"\s*:\s*"([^"]+)"'
+ match = re.search(admin_ajax_pattern, html_content)
+ if not match:
+ return None, None
+ url = match.group(1).replace("\\/", "/")
+ method = match.group(2)
+ if method == "wp_json":
+ return url, method
+ url = urljoin(url, "/wp-admin/admin-ajax.php")
+ return url, method
+
+ async def _get_player_options(self, html_content: str) -> list:
+ """Extract player options from HTML content."""
+ pattern = r'
]*class=["\']dooplay_player_option["\'][^>]*data-type=["\']([^"\']*)["\'][^>]*data-post=["\']([^"\']*)["\'][^>]*data-nume=["\']([^"\']*)["\'][^>]*>.*?([^<]*)'
+ matches = re.finditer(pattern, html_content, re.DOTALL)
+ return [
+ {"type": match.group(1), "post": match.group(2), "nume": match.group(3), "title": match.group(4).strip()}
+ for match in matches
+ ]
+
+ async def _process_player_option(self, api_base: str, method: str, post: str, nume: str, type_: str) -> Dict:
+ """Process player option to get stream URL."""
+ if method == "wp_json":
+ api_url = f"{api_base}{post}/{type_}/{nume}"
+ response = await self._make_request(api_url)
+ else:
+ form_data = {"action": "doo_player_ajax", "post": post, "nume": nume, "type": type_}
+ response = await self._make_request(api_base, method="POST", data=form_data)
+
+ # Get iframe URL from API response
+ try:
+ data = response.json()
+ iframe_url = urljoin(api_base, data.get("embed_url", "").replace("\\/", "/"))
+
+ # Get stream URL from iframe
+ iframe_response = await self._make_request(iframe_url)
+ stream_data = await self._extract_stream_url(iframe_response, iframe_url)
+ return stream_data
+
+ except Exception as e:
+ raise ExtractorError(f"Failed to process player option: {str(e)}")
+
+ async def _extract_stream_url(self, iframe_response: Response, iframe_url: str) -> Dict:
+ """
+ Extract final stream URL from iframe content.
+ """
+ try:
+ # Parse URL components
+ parsed_url = urlparse(iframe_url)
+ query_params = dict(param.split("=") for param in parsed_url.query.split("&") if "=" in param)
+
+ # Check if content is already a direct M3U8 stream
+ content_types = ["application/x-mpegurl", "application/vnd.apple.mpegurl"]
+
+ if any(ext in iframe_response.headers["content-type"] for ext in content_types):
+ return {"url": iframe_url, "type": "m3u8"}
+
+ stream_data = {}
+
+ # Check for source parameter in URL
+ if "source" in query_params:
+ stream_data = {
+ "url": urljoin(iframe_url, unquote(query_params["source"])),
+ "type": "m3u8",
+ }
+
+ # Check for MPD stream with DRM
+ elif "zy" in query_params and ".mpd``" in query_params["zy"]:
+ data = query_params["zy"].split("``")
+ url = data[0]
+ key_id, key = data[1].split(":")
+ stream_data = {"url": url, "type": "mpd", "drm_key_id": key_id, "drm_key": key}
+
+ # Check for tamilultra specific format
+ elif "tamilultra" in iframe_url:
+ stream_data = {"url": urljoin(iframe_url, parsed_url.query), "type": "m3u8"}
+
+ # Try pattern matching for stream URLs
+ else:
+ channel_id = query_params.get("id", [""])
+ stream_url = None
+
+ html_content = iframe_response.text
+
+ if channel_id:
+ # Try channel ID specific pattern
+ pattern = rf'{re.escape(channel_id)}["\']:\s*{{\s*["\']?url["\']?\s*:\s*["\']([^"\']+)["\']'
+ match = re.search(pattern, html_content)
+ if match:
+ stream_url = match.group(1)
+
+ # Try fallback patterns if channel ID pattern fails
+ if not stream_url:
+ for pattern in [self.fallback_pattern, self.any_m3u8_pattern]:
+ match = pattern.search(html_content)
+ if match:
+ stream_url = match.group(1)
+ break
+
+ if stream_url:
+ stream_data = {"url": stream_url, "type": "m3u8"} # Default to m3u8, will be updated
+
+ # Check for MPD stream and extract DRM keys
+ if stream_url.endswith(".mpd"):
+ stream_data["type"] = "mpd"
+ drm_data = await self._extract_drm_keys(html_content, channel_id)
+ if drm_data:
+ stream_data.update(drm_data)
+
+ # If no stream data found, raise error
+ if not stream_data:
+ raise ExtractorError("No valid stream URL found")
+
+ # Update stream type based on URL if not already set
+ if stream_data.get("type") == "m3u8":
+ if stream_data["url"].endswith(".mpd"):
+ stream_data["type"] = "mpd"
+ elif not any(ext in stream_data["url"] for ext in [".m3u8", ".m3u"]):
+ stream_data["type"] = "m3u8" # Default to m3u8 if no extension found
+
+ return stream_data
+
+ except Exception as e:
+ raise ExtractorError(f"Failed to extract stream URL: {str(e)}")
+
+ async def _extract_drm_keys(self, html_content: str, channel_id: str) -> Dict:
+ """
+ Extract DRM keys for MPD streams.
+ """
+ try:
+ # Pattern for channel entry
+ channel_pattern = rf'"{re.escape(channel_id)}":\s*{{[^}}]+}}'
+ channel_match = re.search(channel_pattern, html_content)
+
+ if channel_match:
+ channel_data = channel_match.group(0)
+
+ # Try clearkeys pattern first
+ clearkey_pattern = r'["\']?clearkeys["\']?\s*:\s*{\s*["\'](.+?)["\']:\s*["\'](.+?)["\']'
+ clearkey_match = re.search(clearkey_pattern, channel_data)
+
+ # Try k1/k2 pattern if clearkeys not found
+ if not clearkey_match:
+ k1k2_pattern = r'["\']?k1["\']?\s*:\s*["\'](.+?)["\'],\s*["\']?k2["\']?\s*:\s*["\'](.+?)["\']'
+ k1k2_match = re.search(k1k2_pattern, channel_data)
+
+ if k1k2_match:
+ return {"drm_key_id": k1k2_match.group(1), "drm_key": k1k2_match.group(2)}
+ else:
+ return {"drm_key_id": clearkey_match.group(1), "drm_key": clearkey_match.group(2)}
+
+ return {}
+
+ except Exception:
+ return {}
diff --git a/mediaflow_proxy/extractors/mixdrop.py b/mediaflow_proxy/extractors/mixdrop.py
index 6c3085b..26d91a7 100644
--- a/mediaflow_proxy/extractors/mixdrop.py
+++ b/mediaflow_proxy/extractors/mixdrop.py
@@ -1,21 +1,21 @@
import re
import string
-from typing import Dict, Tuple
+from typing import Dict, Any
-from mediaflow_proxy.extractors.base import BaseExtractor
+from mediaflow_proxy.extractors.base import BaseExtractor, ExtractorError
class MixdropExtractor(BaseExtractor):
"""Mixdrop URL extractor."""
- async def extract(self, url: str) -> Tuple[str, Dict[str, str]]:
+ async def extract(self, url: str, **kwargs) -> Dict[str, Any]:
"""Extract Mixdrop URL."""
- response = await self._make_request(url)
+ response = await self._make_request(url, headers={"accept-language": "en-US,en;q=0.5"})
# Extract and decode URL
- match = re.search(r"\}\('(.+)',.+,'(.+)'\.split", response.text)
+ match = re.search(r"}\('(.+)',.+,'(.+)'\.split", response.text)
if not match:
- raise ValueError("Failed to extract URL components")
+ raise ExtractorError("Failed to extract URL components")
s1, s2 = match.group(1, 2)
schema = s1.split(";")[2][5:-1]
@@ -28,4 +28,9 @@ async def extract(self, url: str) -> Tuple[str, Dict[str, str]]:
# Construct final URL
final_url = "https:" + "".join(char_map.get(c, c) for c in schema)
- return final_url, {"User-Agent": self.base_headers["User-Agent"]}
+ self.base_headers["referer"] = url
+ return {
+ "destination_url": final_url,
+ "request_headers": self.base_headers,
+ "mediaflow_endpoint": self.mediaflow_endpoint,
+ }
diff --git a/mediaflow_proxy/extractors/uqload.py b/mediaflow_proxy/extractors/uqload.py
index d85b816..91a49b0 100644
--- a/mediaflow_proxy/extractors/uqload.py
+++ b/mediaflow_proxy/extractors/uqload.py
@@ -1,18 +1,25 @@
import re
-from typing import Dict, Tuple
+from typing import Dict
-from mediaflow_proxy.extractors.base import BaseExtractor
+from mediaflow_proxy.extractors.base import BaseExtractor, ExtractorError
class UqloadExtractor(BaseExtractor):
"""Uqload URL extractor."""
- async def extract(self, url: str) -> Tuple[str, Dict[str, str]]:
+ referer = "https://uqload.to/"
+
+ async def extract(self, url: str, **kwargs) -> Dict[str, str]:
"""Extract Uqload URL."""
response = await self._make_request(url)
- video_url_match = re.search(r'sources: \["(.*?)"\]', response.text)
+ video_url_match = re.search(r'sources: \["(.*?)"]', response.text)
if not video_url_match:
- raise ValueError("Failed to extract video URL")
+ raise ExtractorError("Failed to extract video URL")
- return video_url_match.group(1), {"Referer": "https://uqload.to/"}
+ self.base_headers["referer"] = self.referer
+ return {
+ "destination_url": video_url_match.group(1),
+ "request_headers": self.base_headers,
+ "mediaflow_endpoint": self.mediaflow_endpoint,
+ }
diff --git a/mediaflow_proxy/handlers.py b/mediaflow_proxy/handlers.py
index 0d2b03f..9876438 100644
--- a/mediaflow_proxy/handlers.py
+++ b/mediaflow_proxy/handlers.py
@@ -6,7 +6,6 @@
from fastapi import Request, Response, HTTPException
from starlette.background import BackgroundTask
-from .configs import settings
from .const import SUPPORTED_RESPONSE_HEADERS
from .mpd_processor import process_manifest, process_playlist, process_segment
from .schemas import HLSManifestParams, ProxyStreamParams, MPDManifestParams, MPDPlaylistParams, MPDSegmentParams
diff --git a/mediaflow_proxy/main.py b/mediaflow_proxy/main.py
index 404ed8a..e2c0e3a 100644
--- a/mediaflow_proxy/main.py
+++ b/mediaflow_proxy/main.py
@@ -13,7 +13,7 @@
from mediaflow_proxy.utils.crypto_utils import EncryptionHandler, EncryptionMiddleware
from mediaflow_proxy.utils.http_utils import encode_mediaflow_proxy_url
-logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
+logging.basicConfig(level=settings.log_level, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
app = FastAPI()
api_password_query = APIKeyQuery(name="api_password", auto_error=False)
api_password_header = APIKeyHeader(name="api_password", auto_error=False)
diff --git a/mediaflow_proxy/routes/extractor.py b/mediaflow_proxy/routes/extractor.py
index cecd689..3bd757e 100644
--- a/mediaflow_proxy/routes/extractor.py
+++ b/mediaflow_proxy/routes/extractor.py
@@ -1,11 +1,13 @@
+import logging
from typing import Annotated
from fastapi import APIRouter, Query, HTTPException, Request, Depends
from fastapi.responses import RedirectResponse
-from mediaflow_proxy.configs import settings
+from mediaflow_proxy.extractors.base import ExtractorError
from mediaflow_proxy.extractors.factory import ExtractorFactory
from mediaflow_proxy.schemas import ExtractorURLParams
+from mediaflow_proxy.utils.cache_utils import get_cached_extractor_result, set_cache_extractor_result
from mediaflow_proxy.utils.http_utils import (
encode_mediaflow_proxy_url,
get_original_scheme,
@@ -14,6 +16,7 @@
)
extractor_router = APIRouter()
+logger = logging.getLogger(__name__)
@extractor_router.get("/video")
@@ -24,23 +27,32 @@ async def extract_url(
):
"""Extract clean links from various video hosting services."""
try:
- extractor = ExtractorFactory.get_extractor(extractor_params.host, proxy_headers.request)
- final_url, headers = await extractor.extract(extractor_params.destination)
+ cache_key = f"{extractor_params.host}_{extractor_params.model_dump_json()}"
+ response = await get_cached_extractor_result(cache_key)
+ if not response:
+ extractor = ExtractorFactory.get_extractor(extractor_params.host, proxy_headers.request)
+ response = await extractor.extract(extractor_params.destination, **extractor_params.extra_params)
+ await set_cache_extractor_result(cache_key, response)
+
+ response["mediaflow_proxy_url"] = str(
+ request.url_for(response.pop("mediaflow_endpoint")).replace(scheme=get_original_scheme(request))
+ )
if extractor_params.redirect_stream:
- headers.update(proxy_headers.request)
+ response["query_params"] = response.get("query_params", {})
+ # Add API password to query params
+ response["query_params"]["api_password"] = request.query_params.get("api_password")
stream_url = encode_mediaflow_proxy_url(
- str(request.url_for("proxy_stream_endpoint").replace(scheme=get_original_scheme(request))),
- destination_url=final_url,
- query_params={"api_password": settings.api_password},
- request_headers=headers,
+ **response,
response_headers=proxy_headers.response,
)
- return RedirectResponse(url=stream_url)
+ return RedirectResponse(url=stream_url, status_code=302)
- return {"url": final_url, "headers": headers}
+ return response
- except ValueError as e:
+ except ExtractorError as e:
+ logger.error(f"Extraction failed: {str(e)}")
raise HTTPException(status_code=400, detail=str(e))
except Exception as e:
+ logger.exception(f"Extraction failed: {str(e)}")
raise HTTPException(status_code=500, detail=f"Extraction failed: {str(e)}")
diff --git a/mediaflow_proxy/routes/proxy.py b/mediaflow_proxy/routes/proxy.py
index f99c4c3..fb936fa 100644
--- a/mediaflow_proxy/routes/proxy.py
+++ b/mediaflow_proxy/routes/proxy.py
@@ -24,7 +24,7 @@
@proxy_router.head("/hls/manifest.m3u8")
@proxy_router.get("/hls/manifest.m3u8")
-async def hls_stream_proxy(
+async def hls_manifest_proxy(
request: Request,
hls_params: Annotated[HLSManifestParams, Query()],
proxy_headers: Annotated[ProxyRequestHeaders, Depends(get_proxy_headers)],
@@ -70,7 +70,7 @@ async def proxy_stream_endpoint(
@proxy_router.get("/mpd/manifest.m3u8")
-async def manifest_endpoint(
+async def mpd_manifest_proxy(
request: Request,
manifest_params: Annotated[MPDManifestParams, Query()],
proxy_headers: Annotated[ProxyRequestHeaders, Depends(get_proxy_headers)],
diff --git a/mediaflow_proxy/schemas.py b/mediaflow_proxy/schemas.py
index e8ab6b1..2df187f 100644
--- a/mediaflow_proxy/schemas.py
+++ b/mediaflow_proxy/schemas.py
@@ -1,4 +1,4 @@
-from typing import Literal
+from typing import Literal, Dict, Any
from pydantic import BaseModel, Field, IPvAnyAddress, ConfigDict
@@ -59,6 +59,12 @@ class MPDSegmentParams(GenericParams):
class ExtractorURLParams(GenericParams):
- host: Literal["Doodstream", "Mixdrop", "Uqload"] = Field(..., description="The host to extract the URL from.")
+ host: Literal["Doodstream", "Mixdrop", "Uqload", "LiveTV"] = Field(
+ ..., description="The host to extract the URL from."
+ )
destination: str = Field(..., description="The URL of the stream.", alias="d")
redirect_stream: bool = Field(False, description="Whether to redirect to the stream endpoint automatically.")
+ extra_params: Dict[str, Any] = Field(
+ default_factory=dict,
+ description="Additional parameters required for specific extractors (e.g., stream_title for LiveTV)",
+ )
diff --git a/mediaflow_proxy/utils/cache_utils.py b/mediaflow_proxy/utils/cache_utils.py
index 10fda75..5d815b8 100644
--- a/mediaflow_proxy/utils/cache_utils.py
+++ b/mediaflow_proxy/utils/cache_utils.py
@@ -1,4 +1,5 @@
import asyncio
+import hashlib
import json
import logging
import os
@@ -89,7 +90,7 @@ def get(self, key: str) -> Optional[CacheEntry]:
else:
# Remove expired entry
self._current_size -= entry.size
- del self._cache[key]
+ self._cache.pop(key, None)
return None
def set(self, key: str, entry: CacheEntry) -> None:
@@ -121,12 +122,10 @@ def __init__(
cache_dir_name: str,
ttl: int,
max_memory_size: int = 100 * 1024 * 1024, # 100MB default
- file_shards: int = 256, # Number of subdirectories for sharding
executor_workers: int = 4,
):
self.cache_dir = Path(tempfile.gettempdir()) / cache_dir_name
self.ttl = ttl
- self.file_shards = file_shards
self.memory_cache = AsyncLRUMemoryCache(maxsize=max_memory_size)
self.stats = CacheStats()
self._executor = ThreadPoolExecutor(max_workers=executor_workers)
@@ -137,19 +136,15 @@ def __init__(
def _init_cache_dirs(self):
"""Initialize sharded cache directories."""
- for i in range(self.file_shards):
- shard_dir = self.cache_dir / f"shard_{i:03d}"
- os.makedirs(shard_dir, exist_ok=True)
+ os.makedirs(self.cache_dir, exist_ok=True)
- def _get_shard_path(self, key: str) -> Path:
- """Get the appropriate shard directory for a key."""
- shard_num = hash(key) % self.file_shards
- return self.cache_dir / f"shard_{shard_num:03d}"
+ def _get_md5_hash(self, key: str) -> str:
+ """Get the MD5 hash of a cache key."""
+ return hashlib.md5(key.encode()).hexdigest()
def _get_file_path(self, key: str) -> Path:
"""Get the file path for a cache key."""
- safe_key = str(hash(key))
- return self._get_shard_path(key) / safe_key
+ return self.cache_dir / key
async def get(self, key: str, default: Any = None) -> Optional[bytes]:
"""
@@ -162,6 +157,7 @@ async def get(self, key: str, default: Any = None) -> Optional[bytes]:
Returns:
Cached value or default if not found
"""
+ key = self._get_md5_hash(key)
# Try memory cache first
entry = self.memory_cache.get(key)
if entry is not None:
@@ -227,6 +223,7 @@ async def set(self, key: str, data: Union[bytes, bytearray, memoryview], ttl: Op
# Create cache entry
entry = CacheEntry(data=data, expires_at=expires_at, access_count=0, last_access=time.time(), size=len(data))
+ key = self._get_md5_hash(key)
# Update memory cache
self.memory_cache.set(key, entry)
@@ -287,14 +284,12 @@ async def check_and_clean_file(file_path: Path):
logger.error(f"Error cleaning up file {file_path}: {e}")
# Clean up each shard
- for i in range(self.file_shards):
- shard_dir = self.cache_dir / f"shard_{i:03d}"
- try:
- async for entry in aiofiles.os.scandir(shard_dir):
- if entry.is_file() and not entry.name.endswith(".tmp"):
- await check_and_clean_file(Path(entry.path))
- except Exception as e:
- logger.error(f"Error scanning shard directory {shard_dir}: {e}")
+ try:
+ async for entry in aiofiles.os.scandir(self.cache_dir):
+ if entry.is_file() and not entry.name.endswith(".tmp"):
+ await check_and_clean_file(Path(entry.path))
+ except Exception as e:
+ logger.error(f"Error scanning shard directory {self.cache_dir}: {e}")
# Create cache instances
@@ -302,21 +297,24 @@ async def check_and_clean_file(file_path: Path):
cache_dir_name="init_segment_cache",
ttl=3600, # 1 hour
max_memory_size=500 * 1024 * 1024, # 500MB for init segments
- file_shards=512, # More shards for better distribution
)
MPD_CACHE = OptimizedHybridCache(
cache_dir_name="mpd_cache",
ttl=300, # 5 minutes
max_memory_size=100 * 1024 * 1024, # 100MB for MPD files
- file_shards=128,
)
SPEEDTEST_CACHE = OptimizedHybridCache(
cache_dir_name="speedtest_cache",
ttl=3600, # 1 hour
- max_memory_size=50 * 1024 * 1024, # 50MB for speed test results
- file_shards=64,
+ max_memory_size=50 * 1024 * 1024,
+)
+
+EXTRACTOR_CACHE = OptimizedHybridCache(
+ cache_dir_name="extractor_cache",
+ ttl=5 * 60, # 5 minutes
+ max_memory_size=50 * 1024 * 1024,
)
@@ -391,3 +389,23 @@ async def set_cache_speedtest(task_id: str, task: SpeedTestTask) -> bool:
except Exception as e:
logger.error(f"Error caching speed test data: {e}")
return False
+
+
+async def get_cached_extractor_result(key: str) -> Optional[dict]:
+ """Get extractor result from cache."""
+ cached_data = await EXTRACTOR_CACHE.get(key)
+ if cached_data is not None:
+ try:
+ return json.loads(cached_data)
+ except json.JSONDecodeError:
+ await EXTRACTOR_CACHE.delete(key)
+ return None
+
+
+async def set_cache_extractor_result(key: str, result: dict) -> bool:
+ """Cache extractor result."""
+ try:
+ return await EXTRACTOR_CACHE.set(key, json.dumps(result).encode())
+ except Exception as e:
+ logger.error(f"Error caching extractor result: {e}")
+ return False
diff --git a/mediaflow_proxy/utils/m3u8_processor.py b/mediaflow_proxy/utils/m3u8_processor.py
index e59c684..83f8958 100644
--- a/mediaflow_proxy/utils/m3u8_processor.py
+++ b/mediaflow_proxy/utils/m3u8_processor.py
@@ -16,7 +16,9 @@ def __init__(self, request, key_url: str = None):
"""
self.request = request
self.key_url = parse.urlparse(key_url) if key_url else None
- self.mediaflow_proxy_url = str(request.url_for("hls_stream_proxy").replace(scheme=get_original_scheme(request)))
+ self.mediaflow_proxy_url = str(
+ request.url_for("hls_manifest_proxy").replace(scheme=get_original_scheme(request))
+ )
async def process_m3u8(self, content: str, base_url: str) -> str:
"""
diff --git a/poetry.lock b/poetry.lock
index c3009ca..50adff5 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -101,6 +101,85 @@ files = [
{file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"},
]
+[[package]]
+name = "cffi"
+version = "1.17.1"
+description = "Foreign Function Interface for Python calling C code."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"},
+ {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"},
+ {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"},
+ {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"},
+ {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"},
+ {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"},
+ {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"},
+ {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"},
+ {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"},
+ {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"},
+ {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"},
+ {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"},
+ {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"},
+ {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"},
+ {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"},
+ {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"},
+ {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"},
+ {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"},
+ {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"},
+ {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"},
+ {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"},
+ {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"},
+ {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"},
+ {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"},
+ {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"},
+ {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"},
+ {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"},
+ {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"},
+ {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"},
+ {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"},
+ {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"},
+ {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"},
+ {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"},
+ {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"},
+ {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"},
+ {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"},
+ {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"},
+]
+
+[package.dependencies]
+pycparser = "*"
+
[[package]]
name = "click"
version = "8.1.7"
@@ -142,13 +221,13 @@ test = ["pytest (>=6)"]
[[package]]
name = "fastapi"
-version = "0.115.4"
+version = "0.115.5"
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
optional = false
python-versions = ">=3.8"
files = [
- {file = "fastapi-0.115.4-py3-none-any.whl", hash = "sha256:0b504a063ffb3cf96a5e27dc1bc32c80ca743a2528574f9cdc77daa2d31b4742"},
- {file = "fastapi-0.115.4.tar.gz", hash = "sha256:db653475586b091cb8b2fec2ac54a680ac6a158e07406e1abae31679e8826349"},
+ {file = "fastapi-0.115.5-py3-none-any.whl", hash = "sha256:596b95adbe1474da47049e802f9a65ab2ffa9c2b07e7efee70eb8a66c9f2f796"},
+ {file = "fastapi-0.115.5.tar.gz", hash = "sha256:0e7a4d0dc0d01c68df21887cce0945e72d3c48b9f4f79dfe7a7d53aa08fbb289"},
]
[package.dependencies]
@@ -194,13 +273,13 @@ files = [
[[package]]
name = "httpcore"
-version = "1.0.6"
+version = "1.0.7"
description = "A minimal low-level HTTP client."
optional = false
python-versions = ">=3.8"
files = [
- {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"},
- {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"},
+ {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"},
+ {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"},
]
[package.dependencies]
@@ -231,6 +310,7 @@ httpcore = "==1.*"
idna = "*"
sniffio = "*"
socksio = {version = "==1.*", optional = true, markers = "extra == \"socks\""}
+zstandard = {version = ">=0.18.0", optional = true, markers = "extra == \"zstd\""}
[package.extras]
brotli = ["brotli", "brotlicffi"]
@@ -302,6 +382,17 @@ docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-a
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"]
type = ["mypy (>=1.11.2)"]
+[[package]]
+name = "pycparser"
+version = "2.22"
+description = "C parser in Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"},
+ {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"},
+]
+
[[package]]
name = "pycryptodome"
version = "3.21.0"
@@ -628,7 +719,119 @@ files = [
{file = "xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553"},
]
+[[package]]
+name = "zstandard"
+version = "0.23.0"
+description = "Zstandard bindings for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9"},
+ {file = "zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880"},
+ {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77da4c6bfa20dd5ea25cbf12c76f181a8e8cd7ea231c673828d0386b1740b8dc"},
+ {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2170c7e0367dde86a2647ed5b6f57394ea7f53545746104c6b09fc1f4223573"},
+ {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c16842b846a8d2a145223f520b7e18b57c8f476924bda92aeee3a88d11cfc391"},
+ {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:157e89ceb4054029a289fb504c98c6a9fe8010f1680de0201b3eb5dc20aa6d9e"},
+ {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:203d236f4c94cd8379d1ea61db2fce20730b4c38d7f1c34506a31b34edc87bdd"},
+ {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dc5d1a49d3f8262be192589a4b72f0d03b72dcf46c51ad5852a4fdc67be7b9e4"},
+ {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:752bf8a74412b9892f4e5b58f2f890a039f57037f52c89a740757ebd807f33ea"},
+ {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80080816b4f52a9d886e67f1f96912891074903238fe54f2de8b786f86baded2"},
+ {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:84433dddea68571a6d6bd4fbf8ff398236031149116a7fff6f777ff95cad3df9"},
+ {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ab19a2d91963ed9e42b4e8d77cd847ae8381576585bad79dbd0a8837a9f6620a"},
+ {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:59556bf80a7094d0cfb9f5e50bb2db27fefb75d5138bb16fb052b61b0e0eeeb0"},
+ {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:27d3ef2252d2e62476389ca8f9b0cf2bbafb082a3b6bfe9d90cbcbb5529ecf7c"},
+ {file = "zstandard-0.23.0-cp310-cp310-win32.whl", hash = "sha256:5d41d5e025f1e0bccae4928981e71b2334c60f580bdc8345f824e7c0a4c2a813"},
+ {file = "zstandard-0.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:519fbf169dfac1222a76ba8861ef4ac7f0530c35dd79ba5727014613f91613d4"},
+ {file = "zstandard-0.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:34895a41273ad33347b2fc70e1bff4240556de3c46c6ea430a7ed91f9042aa4e"},
+ {file = "zstandard-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77ea385f7dd5b5676d7fd943292ffa18fbf5c72ba98f7d09fc1fb9e819b34c23"},
+ {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:983b6efd649723474f29ed42e1467f90a35a74793437d0bc64a5bf482bedfa0a"},
+ {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80a539906390591dd39ebb8d773771dc4db82ace6372c4d41e2d293f8e32b8db"},
+ {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:445e4cb5048b04e90ce96a79b4b63140e3f4ab5f662321975679b5f6360b90e2"},
+ {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd30d9c67d13d891f2360b2a120186729c111238ac63b43dbd37a5a40670b8ca"},
+ {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d20fd853fbb5807c8e84c136c278827b6167ded66c72ec6f9a14b863d809211c"},
+ {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed1708dbf4d2e3a1c5c69110ba2b4eb6678262028afd6c6fbcc5a8dac9cda68e"},
+ {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:be9b5b8659dff1f913039c2feee1aca499cfbc19e98fa12bc85e037c17ec6ca5"},
+ {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:65308f4b4890aa12d9b6ad9f2844b7ee42c7f7a4fd3390425b242ffc57498f48"},
+ {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98da17ce9cbf3bfe4617e836d561e433f871129e3a7ac16d6ef4c680f13a839c"},
+ {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8ed7d27cb56b3e058d3cf684d7200703bcae623e1dcc06ed1e18ecda39fee003"},
+ {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:b69bb4f51daf461b15e7b3db033160937d3ff88303a7bc808c67bbc1eaf98c78"},
+ {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:034b88913ecc1b097f528e42b539453fa82c3557e414b3de9d5632c80439a473"},
+ {file = "zstandard-0.23.0-cp311-cp311-win32.whl", hash = "sha256:f2d4380bf5f62daabd7b751ea2339c1a21d1c9463f1feb7fc2bdcea2c29c3160"},
+ {file = "zstandard-0.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:62136da96a973bd2557f06ddd4e8e807f9e13cbb0bfb9cc06cfe6d98ea90dfe0"},
+ {file = "zstandard-0.23.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b4567955a6bc1b20e9c31612e615af6b53733491aeaa19a6b3b37f3b65477094"},
+ {file = "zstandard-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e172f57cd78c20f13a3415cc8dfe24bf388614324d25539146594c16d78fcc8"},
+ {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0e166f698c5a3e914947388c162be2583e0c638a4703fc6a543e23a88dea3c1"},
+ {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a289832e520c6bd4dcaad68e944b86da3bad0d339ef7989fb7e88f92e96072"},
+ {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d50d31bfedd53a928fed6707b15a8dbeef011bb6366297cc435accc888b27c20"},
+ {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72c68dda124a1a138340fb62fa21b9bf4848437d9ca60bd35db36f2d3345f373"},
+ {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53dd9d5e3d29f95acd5de6802e909ada8d8d8cfa37a3ac64836f3bc4bc5512db"},
+ {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6a41c120c3dbc0d81a8e8adc73312d668cd34acd7725f036992b1b72d22c1772"},
+ {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40b33d93c6eddf02d2c19f5773196068d875c41ca25730e8288e9b672897c105"},
+ {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9206649ec587e6b02bd124fb7799b86cddec350f6f6c14bc82a2b70183e708ba"},
+ {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76e79bc28a65f467e0409098fa2c4376931fd3207fbeb6b956c7c476d53746dd"},
+ {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:66b689c107857eceabf2cf3d3fc699c3c0fe8ccd18df2219d978c0283e4c508a"},
+ {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9c236e635582742fee16603042553d276cca506e824fa2e6489db04039521e90"},
+ {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8fffdbd9d1408006baaf02f1068d7dd1f016c6bcb7538682622c556e7b68e35"},
+ {file = "zstandard-0.23.0-cp312-cp312-win32.whl", hash = "sha256:dc1d33abb8a0d754ea4763bad944fd965d3d95b5baef6b121c0c9013eaf1907d"},
+ {file = "zstandard-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:64585e1dba664dc67c7cdabd56c1e5685233fbb1fc1966cfba2a340ec0dfff7b"},
+ {file = "zstandard-0.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:576856e8594e6649aee06ddbfc738fec6a834f7c85bf7cadd1c53d4a58186ef9"},
+ {file = "zstandard-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38302b78a850ff82656beaddeb0bb989a0322a8bbb1bf1ab10c17506681d772a"},
+ {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2240ddc86b74966c34554c49d00eaafa8200a18d3a5b6ffbf7da63b11d74ee2"},
+ {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ef230a8fd217a2015bc91b74f6b3b7d6522ba48be29ad4ea0ca3a3775bf7dd5"},
+ {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:774d45b1fac1461f48698a9d4b5fa19a69d47ece02fa469825b442263f04021f"},
+ {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f77fa49079891a4aab203d0b1744acc85577ed16d767b52fc089d83faf8d8ed"},
+ {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac184f87ff521f4840e6ea0b10c0ec90c6b1dcd0bad2f1e4a9a1b4fa177982ea"},
+ {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c363b53e257246a954ebc7c488304b5592b9c53fbe74d03bc1c64dda153fb847"},
+ {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e7792606d606c8df5277c32ccb58f29b9b8603bf83b48639b7aedf6df4fe8171"},
+ {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a0817825b900fcd43ac5d05b8b3079937073d2b1ff9cf89427590718b70dd840"},
+ {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9da6bc32faac9a293ddfdcb9108d4b20416219461e4ec64dfea8383cac186690"},
+ {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fd7699e8fd9969f455ef2926221e0233f81a2542921471382e77a9e2f2b57f4b"},
+ {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d477ed829077cd945b01fc3115edd132c47e6540ddcd96ca169facff28173057"},
+ {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ce8b52c5987b3e34d5674b0ab529a4602b632ebab0a93b07bfb4dfc8f8a33"},
+ {file = "zstandard-0.23.0-cp313-cp313-win32.whl", hash = "sha256:a9b07268d0c3ca5c170a385a0ab9fb7fdd9f5fd866be004c4ea39e44edce47dd"},
+ {file = "zstandard-0.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:f3513916e8c645d0610815c257cbfd3242adfd5c4cfa78be514e5a3ebb42a41b"},
+ {file = "zstandard-0.23.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2ef3775758346d9ac6214123887d25c7061c92afe1f2b354f9388e9e4d48acfc"},
+ {file = "zstandard-0.23.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4051e406288b8cdbb993798b9a45c59a4896b6ecee2f875424ec10276a895740"},
+ {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2d1a054f8f0a191004675755448d12be47fa9bebbcffa3cdf01db19f2d30a54"},
+ {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f83fa6cae3fff8e98691248c9320356971b59678a17f20656a9e59cd32cee6d8"},
+ {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32ba3b5ccde2d581b1e6aa952c836a6291e8435d788f656fe5976445865ae045"},
+ {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f146f50723defec2975fb7e388ae3a024eb7151542d1599527ec2aa9cacb152"},
+ {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1bfe8de1da6d104f15a60d4a8a768288f66aa953bbe00d027398b93fb9680b26"},
+ {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:29a2bc7c1b09b0af938b7a8343174b987ae021705acabcbae560166567f5a8db"},
+ {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:61f89436cbfede4bc4e91b4397eaa3e2108ebe96d05e93d6ccc95ab5714be512"},
+ {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:53ea7cdc96c6eb56e76bb06894bcfb5dfa93b7adcf59d61c6b92674e24e2dd5e"},
+ {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:a4ae99c57668ca1e78597d8b06d5af837f377f340f4cce993b551b2d7731778d"},
+ {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:379b378ae694ba78cef921581ebd420c938936a153ded602c4fea612b7eaa90d"},
+ {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:50a80baba0285386f97ea36239855f6020ce452456605f262b2d33ac35c7770b"},
+ {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:61062387ad820c654b6a6b5f0b94484fa19515e0c5116faf29f41a6bc91ded6e"},
+ {file = "zstandard-0.23.0-cp38-cp38-win32.whl", hash = "sha256:b8c0bd73aeac689beacd4e7667d48c299f61b959475cdbb91e7d3d88d27c56b9"},
+ {file = "zstandard-0.23.0-cp38-cp38-win_amd64.whl", hash = "sha256:a05e6d6218461eb1b4771d973728f0133b2a4613a6779995df557f70794fd60f"},
+ {file = "zstandard-0.23.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa014d55c3af933c1315eb4bb06dd0459661cc0b15cd61077afa6489bec63bb"},
+ {file = "zstandard-0.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7f0804bb3799414af278e9ad51be25edf67f78f916e08afdb983e74161b916"},
+ {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb2b1ecfef1e67897d336de3a0e3f52478182d6a47eda86cbd42504c5cbd009a"},
+ {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:837bb6764be6919963ef41235fd56a6486b132ea64afe5fafb4cb279ac44f259"},
+ {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1516c8c37d3a053b01c1c15b182f3b5f5eef19ced9b930b684a73bad121addf4"},
+ {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48ef6a43b1846f6025dde6ed9fee0c24e1149c1c25f7fb0a0585572b2f3adc58"},
+ {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11e3bf3c924853a2d5835b24f03eeba7fc9b07d8ca499e247e06ff5676461a15"},
+ {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2fb4535137de7e244c230e24f9d1ec194f61721c86ebea04e1581d9d06ea1269"},
+ {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8c24f21fa2af4bb9f2c492a86fe0c34e6d2c63812a839590edaf177b7398f700"},
+ {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a8c86881813a78a6f4508ef9daf9d4995b8ac2d147dcb1a450448941398091c9"},
+ {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fe3b385d996ee0822fd46528d9f0443b880d4d05528fd26a9119a54ec3f91c69"},
+ {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:82d17e94d735c99621bf8ebf9995f870a6b3e6d14543b99e201ae046dfe7de70"},
+ {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c7c517d74bea1a6afd39aa612fa025e6b8011982a0897768a2f7c8ab4ebb78a2"},
+ {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fd7e0f1cfb70eb2f95a19b472ee7ad6d9a0a992ec0ae53286870c104ca939e5"},
+ {file = "zstandard-0.23.0-cp39-cp39-win32.whl", hash = "sha256:43da0f0092281bf501f9c5f6f3b4c975a8a0ea82de49ba3f7100e64d422a1274"},
+ {file = "zstandard-0.23.0-cp39-cp39-win_amd64.whl", hash = "sha256:f8346bfa098532bc1fb6c7ef06783e969d87a99dd1d2a5a18a892c1d7a643c58"},
+ {file = "zstandard-0.23.0.tar.gz", hash = "sha256:b2d8c62d08e7255f68f7a740bae85b3c9b8e5466baa9cbf7f57f1cde0ac6bc09"},
+]
+
+[package.dependencies]
+cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\""}
+
+[package.extras]
+cffi = ["cffi (>=1.11)"]
+
[metadata]
lock-version = "2.0"
python-versions = ">=3.10"
-content-hash = "31ae30f007ef7dacc5a13f41d1f88f3d2112e10e72846df59cb0956593bb33b9"
+content-hash = "d8791df38e0abbc748ac52240a33c7ba3c715402d6f881eb1ed708bbfa6e191b"
diff --git a/pyproject.toml b/pyproject.toml
index d087928..aab5c81 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -23,8 +23,8 @@ include = ["LICENSE", "README.md", "mediaflow_proxy/static/*"]
[tool.poetry.dependencies]
python = ">=3.10"
-fastapi = "0.115.4"
-httpx = {extras = ["socks"], version = "^0.27.2"}
+fastapi = "0.115.5"
+httpx = {extras = ["socks", "zstd"], version = "^0.27.2"}
tenacity = "^9.0.0"
xmltodict = "^0.14.2"
pydantic-settings = "^2.6.1"