Added support for DaddyLive and Vavoo streams

This commit is contained in:
2025-07-09 22:27:01 -05:00
parent 2aa0dd5be4
commit e0129f04a0
7 changed files with 497 additions and 195 deletions

View File

@@ -3,4 +3,5 @@ uvicorn>=0.27.0
streamlink>=6.5.0
python-multipart>=0.0.9
typing-extensions>=4.9.0
python-dotenv==1.0.0
python-dotenv==1.0.0
requests>=2.28.1

View File

@@ -2,7 +2,7 @@ import json
import os
from typing import Optional
from dotenv import load_dotenv
from fastapi import Depends, HTTPException
from fastapi import HTTPException
# Load environment variables
load_dotenv()

397
src/utils/proxy_utils.py Normal file
View File

@@ -0,0 +1,397 @@
import requests
import re
import base64
import time
import logging
from urllib.parse import urlparse, unquote, quote_plus
# Setup logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# --- Classe VavooResolver per gestire i link Vavoo ---
class VavooResolver:
def __init__(self):
self.session = requests.Session()
self.session.headers.update({
'User-Agent': 'MediaHubMX/2'
})
def getAuthSignature(self):
"""Funzione che replica esattamente quella dell'addon utils.py"""
headers = {
"user-agent": "okhttp/4.11.0",
"accept": "application/json",
"content-type": "application/json; charset=utf-8",
"content-length": "1106",
"accept-encoding": "gzip"
}
data = {
"token": "tosFwQCJMS8qrW_AjLoHPQ41646J5dRNha6ZWHnijoYQQQoADQoXYSo7ki7O5-CsgN4CH0uRk6EEoJ0728ar9scCRQW3ZkbfrPfeCXW2VgopSW2FWDqPOoVYIuVPAOnXCZ5g",
"reason": "app-blur",
"locale": "de",
"theme": "dark",
"metadata": {
"device": {
"type": "Handset",
"brand": "google",
"model": "Nexus",
"name": "21081111RG",
"uniqueId": "d10e5d99ab665233"
},
"os": {
"name": "android",
"version": "7.1.2",
"abis": ["arm64-v8a", "armeabi-v7a", "armeabi"],
"host": "android"
},
"app": {
"platform": "android",
"version": "3.1.20",
"buildId": "289515000",
"engine": "hbc85",
"signatures": ["6e8a975e3cbf07d5de823a760d4c2547f86c1403105020adee5de67ac510999e"],
"installer": "app.revanced.manager.flutter"
},
"version": {
"package": "tv.vavoo.app",
"binary": "3.1.20",
"js": "3.1.20"
}
},
"appFocusTime": 0,
"playerActive": False,
"playDuration": 0,
"devMode": False,
"hasAddon": True,
"castConnected": False,
"package": "tv.vavoo.app",
"version": "3.1.20",
"process": "app",
"firstAppStart": 1743962904623,
"lastAppStart": 1743962904623,
"ipLocation": "",
"adblockEnabled": True,
"proxy": {
"supported": ["ss", "openvpn"],
"engine": "ss",
"ssVersion": 1,
"enabled": True,
"autoServer": True,
"id": "pl-waw"
},
"iap": {
"supported": False
}
}
try:
resp = self.session.post("https://www.vavoo.tv/api/app/ping", json=data, headers=headers, timeout=20)
resp.raise_for_status()
result = resp.json()
addon_sig = result.get("addonSig")
if addon_sig:
return addon_sig
else:
return None
except Exception:
return None
def resolve_vavoo_link(self, link, verbose=False):
"""
Risolve un link Vavoo usando solo il metodo principale (streammode=1)
"""
if "vavoo.to" not in link:
return None
# Solo metodo principale per il proxy
signature = self.getAuthSignature()
if not signature:
return None
headers = {
"user-agent": "MediaHubMX/2",
"accept": "application/json",
"content-type": "application/json; charset=utf-8",
"content-length": "115",
"accept-encoding": "gzip",
"mediahubmx-signature": signature
}
data = {
"language": "de",
"region": "AT",
"url": link,
"clientVersion": "3.0.2"
}
try:
resp = self.session.post("https://vavoo.to/mediahubmx-resolve.json", json=data, headers=headers, timeout=20)
resp.raise_for_status()
result = resp.json()
if isinstance(result, list) and result and result[0].get("url"):
resolved_url = result[0]["url"]
return resolved_url
elif isinstance(result, dict) and result.get("url"):
return result["url"]
else:
return None
except Exception as e:
logger.error(f"Errore nella risoluzione Vavoo: {e}")
return None
# Istanza globale del resolver Vavoo
vavoo_resolver = VavooResolver()
DADDYLIVE_BASE_URL = None
LAST_FETCH_TIME = 0
FETCH_INTERVAL = 3600
def get_daddylive_base_url():
"""Fetches and caches the dynamic base URL for DaddyLive."""
global DADDYLIVE_BASE_URL, LAST_FETCH_TIME
current_time = time.time()
if DADDYLIVE_BASE_URL and (current_time - LAST_FETCH_TIME < FETCH_INTERVAL):
return DADDYLIVE_BASE_URL
try:
logger.info("Fetching dynamic DaddyLive base URL from GitHub...")
github_url = 'https://raw.githubusercontent.com/thecrewwh/dl_url/refs/heads/main/dl.xml'
response = requests.get(github_url, timeout=10)
response.raise_for_status()
content = response.text
match = re.search(r'src\s*=\s*"([^"]*)"', content)
if match:
base_url = match.group(1)
if not base_url.endswith('/'):
base_url += '/'
DADDYLIVE_BASE_URL = base_url
LAST_FETCH_TIME = current_time
logger.info(f"Dynamic DaddyLive base URL updated to: {DADDYLIVE_BASE_URL}")
return DADDYLIVE_BASE_URL
except requests.RequestException as e:
logger.error(f"Error fetching dynamic DaddyLive URL: {e}. Using fallback.")
DADDYLIVE_BASE_URL = "https://daddylive.sx/"
logger.info(f"Using fallback DaddyLive URL: {DADDYLIVE_BASE_URL}")
return DADDYLIVE_BASE_URL
def extract_channel_id(url):
"""Estrae l'ID del canale da vari formati URL"""
match_premium = re.search(r'/premium(\d+)/mono\.m3u8$', url)
if match_premium:
return match_premium.group(1)
match_player = re.search(r'/(?:watch|stream|cast|player)/stream-(\d+)\.php', url)
if match_player:
return match_player.group(1)
return None
def process_daddylive_url(url):
"""Converte URL vecchi in formati compatibili con DaddyLive 2025"""
daddy_base_url = get_daddylive_base_url()
daddy_domain = urlparse(daddy_base_url).netloc
match_premium = re.search(r'/premium(\d+)/mono\.m3u8$', url)
if match_premium:
channel_id = match_premium.group(1)
new_url = f"{daddy_base_url}watch/stream-{channel_id}.php"
logger.info(f"URL processato da {url} a {new_url}")
return new_url
if daddy_domain in url and any(p in url for p in ['/watch/', '/stream/', '/cast/', '/player/']):
return url
if url.isdigit():
return f"{daddy_base_url}watch/stream-{url}.php"
return url
def resolve_m3u8_link(url, headers=None):
if not url:
logger.error("Errore: URL non fornito.")
return {"resolved_url": None, "headers": {}}
current_headers = headers.copy() if headers else {}
clean_url = url
extracted_headers = {}
if '&h_' in url or '%26h_' in url:
logger.info("Rilevati parametri header nell'URL - Estrazione in corso...")
temp_url = url
if 'vavoo.to' in temp_url.lower() and '%26' in temp_url:
temp_url = temp_url.replace('%26', '&')
if '%26h_' in temp_url:
temp_url = unquote(unquote(temp_url))
url_parts = temp_url.split('&h_', 1)
clean_url = url_parts[0]
header_params = '&h_' + url_parts[1]
for param in header_params.split('&'):
if param.startswith('h_'):
try:
key_value = param[2:].split('=', 1)
if len(key_value) == 2:
key = unquote(key_value[0]).replace('_', '-')
value = unquote(key_value[1])
extracted_headers[key] = value
except Exception as e:
logger.error(f"Errore nell'estrazione dell'header {param}: {e}")
final_headers = {**current_headers, **extracted_headers}
is_daddylive_link = (
'newkso.ru' in clean_url.lower() or
'/stream-' in clean_url.lower() or
re.search(r'/premium(\d+)/mono\.m3u8$', clean_url) is not None
)
if not is_daddylive_link:
if 'vavoo.to' in clean_url.lower() and ('/vavoo-iptv/play/' in clean_url.lower() or '/play/' in clean_url.lower()):
try:
resolved_vavoo = vavoo_resolver.resolve_vavoo_link(clean_url, verbose=True)
if resolved_vavoo:
return {
"resolved_url": resolved_vavoo,
"headers": final_headers
}
else:
return {
"resolved_url": clean_url,
"headers": final_headers
}
except Exception:
return {
"resolved_url": clean_url,
"headers": final_headers
}
return {
"resolved_url": clean_url,
"headers": final_headers
}
daddy_base_url = get_daddylive_base_url()
daddy_origin = urlparse(daddy_base_url).scheme + "://" + urlparse(daddy_base_url).netloc
daddylive_headers = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36',
'Referer': daddy_base_url,
'Origin': daddy_origin
}
final_headers_for_resolving = {**final_headers, **daddylive_headers}
try:
github_url = 'https://raw.githubusercontent.com/thecrewwh/dl_url/refs/heads/main/dl.xml'
main_url_req = requests.get(github_url, timeout=10)
main_url_req.raise_for_status()
main_url = main_url_req.text
baseurl = re.findall('(?s)src = "([^"]*)', main_url)[0]
channel_id = extract_channel_id(clean_url)
if not channel_id:
logger.error(f"Impossibile estrarre ID canale da {clean_url}")
return {"resolved_url": clean_url, "headers": current_headers}
stream_url = f"{baseurl}stream/stream-{channel_id}.php"
final_headers_for_resolving['Referer'] = baseurl + '/'
final_headers_for_resolving['Origin'] = baseurl
max_retries = 2
for retry in range(max_retries):
try:
response = requests.get(stream_url, headers=final_headers_for_resolving, timeout=15)
response.raise_for_status()
break
except requests.exceptions.ProxyError as e:
if "429" in str(e) and retry < max_retries - 1:
logger.warning(f"Proxy rate limited (429), retry {retry + 1}/{max_retries}: {stream_url}")
time.sleep(1)
continue
else:
raise
except requests.RequestException:
if retry < max_retries - 1:
logger.warning(f"Request failed, retry {retry + 1}/{max_retries}: {stream_url}")
time.sleep(0.5)
continue
else:
raise
iframes = re.findall(r'<a[^>]*href="([^"]+)"[^>]*>\s*<button[^>]*>\s*Player\s*2\s*</button>', response.text)
if not iframes:
logger.error("Nessun link Player 2 trovato")
return {"resolved_url": clean_url, "headers": current_headers}
url2 = iframes[0]
url2 = baseurl + url2
url2 = url2.replace('//cast', '/cast')
final_headers_for_resolving['Referer'] = url2
final_headers_for_resolving['Origin'] = url2
response = requests.get(url2, headers=final_headers_for_resolving, timeout=15)
response.raise_for_status()
iframes = re.findall(r'iframe src="([^"]*)', response.text)
if not iframes:
logger.error("Nessun iframe trovato nella pagina Player 2")
return {"resolved_url": clean_url, "headers": current_headers}
iframe_url = iframes[0]
response = requests.get(iframe_url, headers=final_headers_for_resolving, timeout=15)
response.raise_for_status()
iframe_content = response.text
try:
channel_key = re.findall(r'(?s) channelKey = \"([^"]*)', iframe_content)[0]
auth_ts_b64 = re.findall(r'(?s)c = atob\("([^"]*)', iframe_content)[0]
auth_ts = base64.b64decode(auth_ts_b64).decode('utf-8')
auth_rnd_b64 = re.findall(r'(?s)d = atob\("([^"]*)', iframe_content)[0]
auth_rnd = base64.b64decode(auth_rnd_b64).decode('utf-8')
auth_sig_b64 = re.findall(r'(?s)e = atob\("([^"]*)', iframe_content)[0]
auth_sig = base64.b64decode(auth_sig_b64).decode('utf-8')
auth_sig = quote_plus(auth_sig)
auth_host_b64 = re.findall(r'(?s)a = atob\("([^"]*)', iframe_content)[0]
auth_host = base64.b64decode(auth_host_b64).decode('utf-8')
auth_php_b64 = re.findall(r'(?s)b = atob\("([^"]*)', iframe_content)[0]
auth_php = base64.b64decode(auth_php_b64).decode('utf-8')
except (IndexError, Exception) as e:
logger.error(f"Errore estrazione parametri: {e}")
return {"resolved_url": clean_url, "headers": current_headers}
auth_url = f'{auth_host}{auth_php}?channel_id={channel_key}&ts={auth_ts}&rnd={auth_rnd}&sig={auth_sig}'
auth_response = requests.get(auth_url, headers=final_headers_for_resolving, timeout=15)
auth_response.raise_for_status()
host = re.findall('(?s)m3u8 =.*?:.*?:.*?".*?".*?"([^"]*)', iframe_content)[0]
server_lookup = re.findall(r'n fetchWithRetry\(\s*\'([^\']*)', iframe_content)[0]
server_lookup_url = f"https://{urlparse(iframe_url).netloc}{server_lookup}{channel_key}"
lookup_response = requests.get(server_lookup_url, headers=final_headers_for_resolving, timeout=15)
lookup_response.raise_for_status()
server_data = lookup_response.json()
server_key = server_data['server_key']
referer_raw = f'https://{urlparse(iframe_url).netloc}'
clean_m3u8_url = f'https://{server_key}{host}{server_key}/{channel_key}/mono.m3u8'
final_headers_for_fetch = {
'User-Agent': final_headers_for_resolving.get('User-Agent'),
'Referer': referer_raw,
'Origin': referer_raw
}
return {
"resolved_url": clean_m3u8_url,
"headers": {**final_headers, **final_headers_for_fetch}
}
except Exception:
return {"resolved_url": clean_url, "headers": final_headers}

View File

@@ -2,6 +2,7 @@
import asyncio
from asyncio import create_subprocess_exec, subprocess
from asyncio.subprocess import Process
from utils.proxy_utils import resolve_m3u8_link
async def stream_generator(process: Process, url: str, headers=None, proxy=None):
"""Generate streaming content asynchronously"""
@@ -24,13 +25,13 @@ async def stream_generator(process: Process, url: str, headers=None, proxy=None)
print(f"Error reading stream: {e}")
try:
process.terminate()
except:
except ProcessLookupError:
pass
process = await generate_streamlink_process(url, headers, proxy)
finally:
try:
process.terminate()
except:
except ProcessLookupError:
pass
async def generate_streamlink_process(url, headers=None, proxy=None) -> Process:
@@ -40,6 +41,11 @@ async def generate_streamlink_process(url, headers=None, proxy=None) -> Process:
url: Stream URL
headers: Optional dict of HTTP headers
"""
# Resolve the URL using the proxy logic
resolved_data = resolve_m3u8_link(url, headers)
resolved_url = resolved_data.get("resolved_url", url)
resolved_headers = resolved_data.get("headers", headers)
cmd = [
'streamlink',
'--ffmpeg-fout', 'mpegts',
@@ -56,11 +62,11 @@ async def generate_streamlink_process(url, headers=None, proxy=None) -> Process:
cmd.extend(['--http-proxy', proxy])
# Add headers if specified
if headers:
for key, value in headers.items():
if resolved_headers:
for key, value in resolved_headers.items():
cmd.extend(['--http-header', f'{key}={value}'])
cmd.extend(['--stdout', url, 'best'])
cmd.extend(['--stdout', resolved_url, 'best'])
process = await create_subprocess_exec(
*cmd,