-
-
Notifications
You must be signed in to change notification settings - Fork 29
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Implement LiveTVExtractor and improve error handling
Added LiveTVExtractor for extracting M3U8 and MPD streams. Enhanced error handling by introducing ExtractorError and updated existing extractors to use this new error class. Removed support for unused request headers and integrated caching for extractor results.
- Loading branch information
Showing
17 changed files
with
603 additions
and
90 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,41 +1,50 @@ | ||
from abc import ABC, abstractmethod | ||
from typing import Dict, Tuple, Optional | ||
from typing import Dict, Optional, Any | ||
|
||
import httpx | ||
|
||
from mediaflow_proxy.configs import settings | ||
from mediaflow_proxy.utils.http_utils import create_httpx_client | ||
|
||
|
||
class ExtractorError(Exception): | ||
"""Base exception for all extractors.""" | ||
|
||
pass | ||
|
||
|
||
class BaseExtractor(ABC): | ||
"""Base class for all URL extractors.""" | ||
|
||
def __init__(self, request_headers: dict): | ||
self.base_headers = { | ||
"user-agent": settings.user_agent, | ||
"accept-language": "en-US,en;q=0.5", | ||
} | ||
self.mediaflow_endpoint = "proxy_stream_endpoint" | ||
self.base_headers.update(request_headers) | ||
|
||
async def _make_request(self, url: str, headers: Optional[Dict] = None, **kwargs) -> httpx.Response: | ||
async def _make_request( | ||
self, url: str, method: str = "GET", headers: Optional[Dict] = None, **kwargs | ||
) -> httpx.Response: | ||
"""Make HTTP request with error handling.""" | ||
try: | ||
async with create_httpx_client() as client: | ||
request_headers = self.base_headers | ||
request_headers.update(headers or {}) | ||
response = await client.get( | ||
response = await client.request( | ||
method, | ||
url, | ||
headers=request_headers, | ||
**kwargs, | ||
) | ||
response.raise_for_status() | ||
return response | ||
except httpx.HTTPError as e: | ||
raise ValueError(f"HTTP request failed: {str(e)}") | ||
raise ExtractorError(f"HTTP request failed: {str(e)}") | ||
except Exception as e: | ||
raise ValueError(f"Request failed: {str(e)}") | ||
raise ExtractorError(f"Request failed: {str(e)}") | ||
|
||
@abstractmethod | ||
async def extract(self, url: str) -> Tuple[str, Dict[str, str]]: | ||
async def extract(self, url: str, **kwargs) -> Dict[str, Any]: | ||
"""Extract final URL and required headers.""" | ||
pass |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,34 +1,39 @@ | ||
import re | ||
import time | ||
from typing import Tuple, Dict | ||
from typing import Dict | ||
|
||
from mediaflow_proxy.extractors.base import BaseExtractor | ||
from mediaflow_proxy.extractors.base import BaseExtractor, ExtractorError | ||
|
||
|
||
class DoodStreamExtractor(BaseExtractor): | ||
"""DoodStream URL extractor.""" | ||
|
||
def __init__(self, proxy_enabled: bool, request_headers: dict): | ||
super().__init__(proxy_enabled, request_headers) | ||
def __init__(self, request_headers: dict): | ||
super().__init__(request_headers) | ||
self.base_url = "https://d000d.com" | ||
|
||
async def extract(self, url: str) -> Tuple[str, Dict[str, str]]: | ||
async def extract(self, url: str, **kwargs) -> Dict[str, str]: | ||
"""Extract DoodStream URL.""" | ||
response = await self._make_request(url) | ||
|
||
# Extract URL pattern | ||
pattern = r"(\/pass_md5\/.*?)'.*(\?token=.*?expiry=)" | ||
match = re.search(pattern, response.text, re.DOTALL) | ||
if not match: | ||
raise ValueError("Failed to extract URL pattern") | ||
raise ExtractorError("Failed to extract URL pattern") | ||
|
||
# Build final URL | ||
pass_url = f"{self.base_url}{match[1]}" | ||
referer = f"{self.base_url}/" | ||
headers = {"range": "bytes=0-", "referer": referer} | ||
|
||
rebobo_response = await self._make_request(pass_url, headers=headers) | ||
response = await self._make_request(pass_url, headers=headers) | ||
timestamp = str(int(time.time())) | ||
final_url = f"{rebobo_response.text}123456789{match[2]}{timestamp}" | ||
|
||
return final_url, {"Referer": referer} | ||
final_url = f"{response.text}123456789{match[2]}{timestamp}" | ||
|
||
self.base_headers["referer"] = referer | ||
return { | ||
"destination_url": final_url, | ||
"request_headers": self.base_headers, | ||
"mediaflow_endpoint": self.mediaflow_endpoint, | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.