Version 2.2.1

This commit is contained in:
UrloMythus
2025-09-10 12:02:54 +02:00
parent 8f8c3b195e
commit 29a9c01418
58 changed files with 258 additions and 49 deletions

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -148,32 +148,51 @@ class DLHDExtractor(BaseExtractor):
continue
return None
def extract_bundle_format(js):
"""Extract parameters from new BUNDLE format"""
def extract_xjz_format(js):
"""Extract parameters from the new XJZ base64-encoded JSON format."""
try:
# Look for the XJZ variable assignment
xjz_pattern = r'const\s+XJZ\s*=\s*["\']([^"\']+)["\']'
match = re.search(xjz_pattern, js)
if not match:
return None
xjz_b64 = match.group(1)
import json
# Decode the first base64 layer (JSON)
xjz_json = base64.b64decode(xjz_b64).decode('utf-8')
xjz_obj = json.loads(xjz_json)
# Each value is also base64-encoded, decode each
decoded = {}
for k, v in xjz_obj.items():
try:
decoded[k] = base64.b64decode(v).decode('utf-8')
except Exception as e:
logger.warning(f"Failed to decode XJZ field {k}: {e}")
decoded[k] = v
return decoded
except Exception as e:
logger.warning(f"Failed to extract XJZ format: {e}")
return None
def extract_bundle_format(js):
"""Extract parameters from new BUNDLE format (legacy fallback)."""
try:
# Look for BUNDLE variable
bundle_patterns = [
r'const\s+BUNDLE\s*=\s*["\']([^"\']+)["\']',
r'var\s+BUNDLE\s*=\s*["\']([^"\']+)["\']',
r'let\s+BUNDLE\s*=\s*["\']([^"\']+)["\']'
]
bundle_data = None
for pattern in bundle_patterns:
match = re.search(pattern, js)
if match:
bundle_data = match.group(1)
break
if not bundle_data:
return None
# Decode the bundle (base64 -> JSON -> decode each field)
import json
bundle_json = base64.b64decode(bundle_data).decode('utf-8')
bundle_obj = json.loads(bundle_json)
# Decode each base64 field
decoded_bundle = {}
for key, value in bundle_obj.items():
try:
@@ -181,9 +200,7 @@ class DLHDExtractor(BaseExtractor):
except Exception as e:
logger.warning(f"Failed to decode bundle field {key}: {e}")
decoded_bundle[key] = value
return decoded_bundle
except Exception as e:
logger.warning(f"Failed to extract bundle format: {e}")
return None
@@ -204,10 +221,21 @@ class DLHDExtractor(BaseExtractor):
channel_key = match.group(1)
break
# Try new bundle format first
# Try new XJZ format first
xjz_data = extract_xjz_format(iframe_content)
if xjz_data:
logger.info("Using new XJZ format for parameter extraction")
auth_host = xjz_data.get('b_host')
auth_php = xjz_data.get('b_script')
auth_ts = xjz_data.get('b_ts')
auth_rnd = xjz_data.get('b_rnd')
auth_sig = xjz_data.get('b_sig')
logger.debug(f"XJZ data extracted: {xjz_data}")
else:
# Try bundle format (legacy fallback)
bundle_data = extract_bundle_format(iframe_content)
if bundle_data:
logger.info("Using new BUNDLE format for parameter extraction")
logger.info("Using BUNDLE format for parameter extraction")
auth_host = bundle_data.get('b_host')
auth_php = bundle_data.get('b_script')
auth_ts = bundle_data.get('b_ts')
@@ -248,7 +276,21 @@ class DLHDExtractor(BaseExtractor):
raise ExtractorError(f"Error extracting parameters: missing {', '.join(missing_params)}")
auth_sig = quote_plus(auth_sig)
# 6. Richiesta auth
auth_url = f'{auth_host}{auth_php}?channel_id={channel_key}&ts={auth_ts}&rnd={auth_rnd}&sig={auth_sig}'
# Se il sito fornisce ancora /a.php ma ora serve /auth.php, sostituisci
# Normalize and robustly replace any variant of a.php with /auth.php
if auth_php:
normalized_auth_php = auth_php.strip().lstrip('/')
if normalized_auth_php == 'a.php':
logger.info("Sostituisco qualunque variante di a.php con /auth.php per compatibilità.")
auth_php = '/auth.php'
# Unisci host e script senza doppio slash
if auth_host.endswith('/') and auth_php.startswith('/'):
auth_url = f'{auth_host[:-1]}{auth_php}'
elif not auth_host.endswith('/') and not auth_php.startswith('/'):
auth_url = f'{auth_host}/{auth_php}'
else:
auth_url = f'{auth_host}{auth_php}'
auth_url = f'{auth_url}?channel_id={channel_key}&ts={auth_ts}&rnd={auth_rnd}&sig={auth_sig}'
auth_resp = await self._make_request(auth_url, headers=daddylive_headers)
# 7. Lookup server - Extract host parameter
host = None

View File

@@ -3,6 +3,7 @@ from typing import Dict, Type
from mediaflow_proxy.extractors.base import BaseExtractor, ExtractorError
from mediaflow_proxy.extractors.dlhd import DLHDExtractor
from mediaflow_proxy.extractors.doodstream import DoodStreamExtractor
from mediaflow_proxy.extractors.filelions import FileLionsExtractor
from mediaflow_proxy.extractors.livetv import LiveTVExtractor
from mediaflow_proxy.extractors.maxstream import MaxstreamExtractor
from mediaflow_proxy.extractors.mixdrop import MixdropExtractor
@@ -19,6 +20,7 @@ class ExtractorFactory:
_extractors: Dict[str, Type[BaseExtractor]] = {
"Doodstream": DoodStreamExtractor,
"FileLions": FileLionsExtractor,
"Uqload": UqloadExtractor,
"Mixdrop": MixdropExtractor,
"Streamtape": StreamtapeExtractor,

View File

@@ -14,9 +14,9 @@ class FastreamExtractor(BaseExtractor):
async def extract(self, url: str, **kwargs) -> Dict[str, Any]:
headers = {'Accept': '*/*', 'Connection': 'keep-alive','Accept-Language': 'en-US,en;q=0.5','Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:138.0) Gecko/20100101 Firefox/138.0'}
pattern = r'file:"(.*?)"'
patterns = [r'file:"(.*?)"']
final_url = await eval_solver(self, url, headers, pattern)
final_url = await eval_solver(self, url, headers, patterns)
self.base_headers["referer"] = f'https://{url.replace("https://","").split("/")[0]}/'
self.base_headers["origin"] = f'https://{url.replace("https://","").split("/")[0]}'

View File

@@ -0,0 +1,25 @@
from typing import Dict, Any
from mediaflow_proxy.extractors.base import BaseExtractor
from mediaflow_proxy.utils.packed import eval_solver
class FileLionsExtractor(BaseExtractor):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.mediaflow_endpoint = "hls_manifest_proxy"
async def extract(self, url: str, **kwargs) -> Dict[str, Any]:
headers = {}
patterns = [ # See https://github.com/Gujal00/ResolveURL/blob/master/script.module.resolveurl/lib/resolveurl/plugins/filelions.py
r'''sources:\s*\[{file:\s*["'](?P<url>[^"']+)''',
r'''["']hls[24]["']:\s*["'](?P<url>[^"']+)'''
]
final_url = await eval_solver(self, url, headers, patterns)
self.base_headers["referer"] = url
return {
"destination_url": final_url,
"request_headers": self.base_headers,
"mediaflow_endpoint": self.mediaflow_endpoint,
}

View File

@@ -13,9 +13,9 @@ class MixdropExtractor(BaseExtractor):
url = url.replace("club", "ps").split("/2")[0]
headers = {"accept-language": "en-US,en;q=0.5"}
pattern = r'MDCore.wurl ?= ?"(.*?)"'
patterns = [r'MDCore.wurl ?= ?"(.*?)"']
final_url = f"https:{await eval_solver(self, url, headers, pattern)}"
final_url = await eval_solver(self, url, headers, patterns)
self.base_headers["referer"] = url
return {

View File

@@ -22,8 +22,7 @@ class OkruExtractor(BaseExtractor):
data_options = div.get("data-options")
data = json.loads(data_options)
metadata = json.loads(data["flashvars"]["metadata"])
final_url = metadata["hlsMasterPlaylistUrl"]
final_url = metadata.get("hlsMasterPlaylistUrl") or metadata.get("hlsManifestUrl")
self.base_headers["referer"] = url
return {
"destination_url": final_url,

View File

@@ -15,9 +15,9 @@ class SupervideoExtractor(BaseExtractor):
async def extract(self, url: str, **kwargs) -> Dict[str, Any]:
headers = {'Accept': '*/*', 'Connection': 'keep-alive', 'User-Agent': 'Mozilla/5.0 (Linux; Android 12) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.5060.71 Mobile Safari/537.36', 'user-agent': 'Mozilla/5.0 (Linux; Android 12) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.5060.71 Mobile Safari/537.36'}
pattern = r'file:"(.*?)"'
patterns = [r'file:"(.*?)"']
final_url = await eval_solver(self, url, headers, pattern)
final_url = await eval_solver(self, url, headers, patterns)
self.base_headers["referer"] = url
return {

View File

@@ -96,7 +96,7 @@ class MPDSegmentParams(GenericParams):
class ExtractorURLParams(GenericParams):
host: Literal[
"Doodstream", "Mixdrop", "Uqload", "Streamtape", "Supervideo", "VixCloud", "Okru", "Maxstream", "LiveTV", "DLHD", "Fastream"
"Doodstream", "FileLions", "Mixdrop", "Uqload", "Streamtape", "Supervideo", "VixCloud", "Okru", "Maxstream", "LiveTV", "DLHD", "Fastream"
] = Field(..., description="The host to extract the URL from.")
destination: str = Field(..., description="The URL of the stream.", alias="d")
redirect_stream: bool = Field(False, description="Whether to redirect to the stream endpoint automatically.")

View File

@@ -1,3 +1,137 @@
<!DOCTYPE html>
<p>"The App is running"</p>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>MediaFlow Proxy</title>
<link rel="icon" href="/logo.png" type="image/x-icon">
<style>
body {
font-family: Arial, sans-serif;
line-height: 1.6;
color: #333;
max-width: 800px;
margin: 0 auto;
padding: 20px;
background-color: #f9f9f9;
}
header {
background-color: #90aacc;
color: #fff;
padding: 10px 0;
text-align: center;
}
header img {
width: 200px;
height: 200px;
vertical-align: middle;
border-radius: 15px;
}
header h1 {
display: inline;
margin-left: 20px;
font-size: 36px;
}
.feature {
background-color: #f4f4f4;
border-left: 4px solid #3498db;
padding: 10px;
margin-bottom: 10px;
}
.speed-test-section {
background-color: #e8f4fd;
border-left: 4px solid #2196f3;
padding: 15px;
margin: 20px 0;
border-radius: 5px;
}
.speed-test-links {
display: flex;
gap: 15px;
margin-top: 10px;
flex-wrap: wrap;
}
.speed-test-link {
display: inline-block;
padding: 10px 20px;
background-color: #2196f3;
color: white;
text-decoration: none;
border-radius: 5px;
transition: background-color 0.3s;
}
.speed-test-link:hover {
background-color: #1976d2;
color: white;
}
.speed-test-link.browser {
background-color: #4caf50;
}
.speed-test-link.browser:hover {
background-color: #388e3c;
}
a {
color: #3498db;
}
</style>
</head>
<body>
<header>
<img src="/logo.png" alt="MediaFlow Proxy Logo">
<h1>MediaFlow Proxy</h1>
</header>
<p>A high-performance proxy server for streaming media, supporting HTTP(S), HLS, and MPEG-DASH with real-time DRM decryption.</p>
<h2>Key Features</h2>
<div class="feature">Convert MPEG-DASH streams (DRM-protected and non-protected) to HLS</div>
<div class="feature">Support for Clear Key DRM-protected MPD DASH streams</div>
<div class="feature">Handle both live and video-on-demand (VOD) DASH streams</div>
<div class="feature">Proxy HTTP/HTTPS links with custom headers</div>
<div class="feature">Proxy and modify HLS (M3U8) streams in real-time with custom headers and key URL modifications for bypassing some sneaky restrictions.</div>
<div class="feature">HLS and DASH pre-buffering for improved streaming performance and reduced latency</div>
<div class="feature">Protect against unauthorized access and network bandwidth abuses</div>
<div class="speed-test-section">
<h3>🚀 Speed Test Tool</h3>
<p>Test your connection speed with debrid services to optimize your streaming experience:</p>
<div class="speed-test-links">
<a href="/speedtest" class="speed-test-link browser">Browser Speed Test</a>
</div>
<p style="margin-top: 10px; font-size: 14px; color: #666;">
<strong>Browser Speed Test:</strong> Tests your actual connection speed through MediaFlow proxy vs direct connection with support for multiple servers, interactive charts, and comprehensive analytics.
</p>
</div>
<div class="speed-test-section">
<h3>🔗 Playlist Builder</h3>
<p>Create and combine M3U playlists with automatic link rewriting for optimal streaming experience:</p>
<div class="speed-test-links">
<a href="/playlist/builder" class="speed-test-link browser">Playlist Builder</a>
</div>
<p style="margin-top: 10px; font-size: 14px; color: #666;">
<strong>Playlist Builder:</strong> Combine multiple M3U playlists, automatically rewrite links for optimal streaming, and support custom headers for enhanced compatibility.
</p>
</div>
<h2>Getting Started</h2>
<p>Visit the <a href="https://github.com/mhdzumair/mediaflow-proxy">GitHub repository</a> for installation instructions and documentation.</p>
<h2>Premium Hosted Service</h2>
<p>For a hassle-free experience, check out <a href="https://store.elfhosted.com/product/mediaflow-proxy">premium hosted service on ElfHosted</a>.</p>
<h2>API Documentation</h2>
<p>Explore the <a href="/docs">Swagger UI</a> for comprehensive details about the API endpoints and their usage.</p>
</body>
</html>

View File

@@ -14,6 +14,7 @@
import re
from bs4 import BeautifulSoup, SoupStrainer
from urllib.parse import urljoin, urlparse
import logging
@@ -142,7 +143,7 @@ class UnpackingError(Exception):
async def eval_solver(self, url: str, headers, pattern: str) -> str:
async def eval_solver(self, url: str, headers: dict[str, str] | None, patterns: list[str]) -> str:
try:
response = await self._make_request(url, headers=headers)
soup = BeautifulSoup(response.text, "lxml",parse_only=SoupStrainer("script"))
@@ -150,10 +151,15 @@ async def eval_solver(self, url: str, headers, pattern: str) -> str:
for i in script_all:
if detect(i.text):
unpacked_code = unpack(i.text)
for pattern in patterns:
match = re.search(pattern, unpacked_code)
if match:
m3u8_url = match.group(1)
return m3u8_url
extracted_url = match.group(1)
if not urlparse(extracted_url).scheme:
extracted_url = urljoin(url, extracted_url)
return extracted_url
raise UnpackingError("No p.a.c.k.e.d JS found or no pattern matched.")
except Exception as e:
logger.error("Eval solver error\n",e)
raise Exception("Error in eval_solver")
logger.exception("Eval solver error for %s", url)
raise UnpackingError("Error in eval_solver") from e

View File

@@ -17,3 +17,4 @@ starlette
cachetools
tqdm
aiofiles
psutil