mirror of
https://github.com/UrloMythus/UnHided.git
synced 2026-04-09 02:40:47 +00:00
New Version
This commit is contained in:
BIN
mediaflow_proxy/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/__pycache__/configs.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/__pycache__/configs.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/__pycache__/const.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/__pycache__/const.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/__pycache__/handlers.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/__pycache__/handlers.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/__pycache__/main.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/__pycache__/main.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/__pycache__/middleware.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/__pycache__/middleware.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/__pycache__/mpd_processor.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/__pycache__/mpd_processor.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/__pycache__/schemas.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/__pycache__/schemas.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/drm/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/drm/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/drm/__pycache__/decrypter.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/drm/__pycache__/decrypter.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/extractors/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/extractors/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/extractors/__pycache__/base.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/extractors/__pycache__/base.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/extractors/__pycache__/dlhd.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/extractors/__pycache__/dlhd.cpython-313.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
mediaflow_proxy/extractors/__pycache__/factory.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/extractors/__pycache__/factory.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/extractors/__pycache__/fastream.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/extractors/__pycache__/fastream.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/extractors/__pycache__/livetv.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/extractors/__pycache__/livetv.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/extractors/__pycache__/maxstream.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/extractors/__pycache__/maxstream.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/extractors/__pycache__/mixdrop.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/extractors/__pycache__/mixdrop.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/extractors/__pycache__/okru.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/extractors/__pycache__/okru.cpython-313.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
mediaflow_proxy/extractors/__pycache__/uqload.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/extractors/__pycache__/uqload.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/extractors/__pycache__/vixcloud.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/extractors/__pycache__/vixcloud.cpython-313.pyc
Normal file
Binary file not shown.
@@ -30,6 +30,7 @@ class BaseExtractor(ABC):
|
||||
try:
|
||||
async with create_httpx_client() as client:
|
||||
request_headers = self.base_headers
|
||||
print(request_headers)
|
||||
request_headers.update(headers or {})
|
||||
response = await client.request(
|
||||
method,
|
||||
|
||||
@@ -11,7 +11,7 @@ from mediaflow_proxy.extractors.streamtape import StreamtapeExtractor
|
||||
from mediaflow_proxy.extractors.supervideo import SupervideoExtractor
|
||||
from mediaflow_proxy.extractors.uqload import UqloadExtractor
|
||||
from mediaflow_proxy.extractors.vixcloud import VixCloudExtractor
|
||||
|
||||
from mediaflow_proxy.extractors.fastream import FastreamExtractor
|
||||
|
||||
class ExtractorFactory:
|
||||
"""Factory for creating URL extractors."""
|
||||
@@ -27,6 +27,7 @@ class ExtractorFactory:
|
||||
"Maxstream": MaxstreamExtractor,
|
||||
"LiveTV": LiveTVExtractor,
|
||||
"DLHD": DLHDExtractor,
|
||||
"Fastream": FastreamExtractor
|
||||
}
|
||||
|
||||
@classmethod
|
||||
|
||||
32
mediaflow_proxy/extractors/fastream.py
Normal file
32
mediaflow_proxy/extractors/fastream.py
Normal file
@@ -0,0 +1,32 @@
|
||||
import re
|
||||
from typing import Dict, Any
|
||||
|
||||
from mediaflow_proxy.extractors.base import BaseExtractor
|
||||
from mediaflow_proxy.utils.packed import eval_solver
|
||||
|
||||
|
||||
|
||||
|
||||
class FastreamExtractor(BaseExtractor):
|
||||
"""Fastream URL extractor."""
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.mediaflow_endpoint = "hls_manifest_proxy"
|
||||
|
||||
async def extract(self, url: str, **kwargs) -> Dict[str, Any]:
|
||||
#Init headers needed for the request.
|
||||
headers = {'Accept': '*/*', 'Connection': 'keep-alive','Accept-Language': 'en-US,en;q=0.5','Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:138.0) Gecko/20100101 Firefox/138.0'}
|
||||
"""Extract Fastream URL."""
|
||||
final_url = await eval_solver(self,url,headers)
|
||||
|
||||
self.base_headers["referer"] = f'https://{url.replace('https://','').split('/')[0]}/'
|
||||
self.base_headers["origin"] = f'https://{url.replace('https://','').split('/')[0]}'
|
||||
self.base_headers['Accept-Language'] = 'en-US,en;q=0.5'
|
||||
self.base_headers['Accept'] = '*/*'
|
||||
self.base_headers['user-agent'] = 'Mozilla/5.0 (X11; Linux x86_64; rv:138.0) Gecko/20100101 Firefox/138.0'
|
||||
|
||||
return {
|
||||
"destination_url": final_url,
|
||||
"request_headers": self.base_headers,
|
||||
"mediaflow_endpoint": self.mediaflow_endpoint,
|
||||
}
|
||||
@@ -1,6 +1,5 @@
|
||||
import re
|
||||
from typing import Dict, Any
|
||||
|
||||
from mediaflow_proxy.extractors.base import BaseExtractor, ExtractorError
|
||||
|
||||
|
||||
@@ -15,14 +14,10 @@ class StreamtapeExtractor(BaseExtractor):
|
||||
matches = re.findall(r"id=.*?(?=')", response.text)
|
||||
if not matches:
|
||||
raise ExtractorError("Failed to extract URL components")
|
||||
final_url = next(
|
||||
(
|
||||
f"https://streamtape.com/get_video?{matches[i + 1]}"
|
||||
for i in range(len(matches) - 1)
|
||||
if matches[i] == matches[i + 1]
|
||||
),
|
||||
None,
|
||||
)
|
||||
i = 0
|
||||
for i in range(len(matches)):
|
||||
if matches[i-1] == matches[i] and "ip=" in matches[i]:
|
||||
final_url = f"https://streamtape.com/get_video?{matches[i]}"
|
||||
|
||||
self.base_headers["referer"] = url
|
||||
return {
|
||||
|
||||
@@ -2,22 +2,24 @@ import re
|
||||
from typing import Dict, Any
|
||||
|
||||
from mediaflow_proxy.extractors.base import BaseExtractor
|
||||
from mediaflow_proxy.utils.packed import eval_solver
|
||||
|
||||
|
||||
|
||||
|
||||
class SupervideoExtractor(BaseExtractor):
|
||||
"""Supervideo URL extractor."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.mediaflow_endpoint = "hls_manifest_proxy"
|
||||
|
||||
async def extract(self, url: str, **kwargs) -> Dict[str, Any]:
|
||||
"""Extract Supervideo URL."""
|
||||
response = await self._make_request(url)
|
||||
# Extract and decode URL
|
||||
s2 = re.search(r"\}\('(.+)',.+,'(.+)'\.split", response.text).group(2)
|
||||
terms = s2.split("|")
|
||||
hfs = next(terms[i] for i in range(terms.index("file"), len(terms)) if "hfs" in terms[i])
|
||||
result = terms[terms.index("urlset") + 1 : terms.index("hls")]
|
||||
#Init headers needed for the request.
|
||||
headers = {'Accept': '*/*', 'Connection': 'keep-alive', 'User-Agent': 'Mozilla/5.0 (Linux; Android 12) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.5060.71 Mobile Safari/537.36', 'user-agent': 'Mozilla/5.0 (Linux; Android 12) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.5060.71 Mobile Safari/537.36'}
|
||||
|
||||
base_url = f"https://{hfs}.serversicuro.cc/hls/"
|
||||
final_url = base_url + ",".join(reversed(result)) + (".urlset/master.m3u8" if result else "")
|
||||
|
||||
"""Extract Supervideo URL."""
|
||||
final_url = await eval_solver(self,url,headers)
|
||||
|
||||
self.base_headers["referer"] = url
|
||||
return {
|
||||
|
||||
@@ -17,7 +17,7 @@ class VixCloudExtractor(BaseExtractor):
|
||||
|
||||
async def version(self, site_url: str) -> str:
|
||||
"""Get version of VixCloud Parent Site."""
|
||||
base_url = f"{site_url}/richiedi-un-titolo"
|
||||
base_url = f"{site_url}/request-a-title"
|
||||
response = await self._make_request(
|
||||
base_url,
|
||||
headers={
|
||||
|
||||
BIN
mediaflow_proxy/routes/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/routes/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/routes/__pycache__/extractor.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/routes/__pycache__/extractor.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/routes/__pycache__/proxy.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/routes/__pycache__/proxy.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/routes/__pycache__/speedtest.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/routes/__pycache__/speedtest.cpython-313.pyc
Normal file
Binary file not shown.
@@ -93,7 +93,7 @@ class MPDSegmentParams(GenericParams):
|
||||
|
||||
class ExtractorURLParams(GenericParams):
|
||||
host: Literal[
|
||||
"Doodstream", "Mixdrop", "Uqload", "Streamtape", "Supervideo", "VixCloud", "Okru", "Maxstream", "LiveTV", "DLHD"
|
||||
"Doodstream", "Mixdrop", "Uqload", "Streamtape", "Supervideo", "VixCloud", "Okru", "Maxstream", "LiveTV", "DLHD", "Fastream"
|
||||
] = Field(..., description="The host to extract the URL from.")
|
||||
destination: str = Field(..., description="The URL of the stream.", alias="d")
|
||||
redirect_stream: bool = Field(False, description="Whether to redirect to the stream endpoint automatically.")
|
||||
|
||||
BIN
mediaflow_proxy/speedtest/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/speedtest/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/speedtest/__pycache__/models.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/speedtest/__pycache__/models.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/speedtest/__pycache__/service.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/speedtest/__pycache__/service.cpython-313.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
mediaflow_proxy/utils/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/utils/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/utils/__pycache__/cache_utils.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/utils/__pycache__/cache_utils.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/utils/__pycache__/crypto_utils.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/utils/__pycache__/crypto_utils.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/utils/__pycache__/http_utils.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/utils/__pycache__/http_utils.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/utils/__pycache__/m3u8_processor.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/utils/__pycache__/m3u8_processor.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/utils/__pycache__/mpd_utils.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/utils/__pycache__/mpd_utils.cpython-313.pyc
Normal file
Binary file not shown.
BIN
mediaflow_proxy/utils/__pycache__/packed.cpython-313.pyc
Normal file
BIN
mediaflow_proxy/utils/__pycache__/packed.cpython-313.pyc
Normal file
Binary file not shown.
159
mediaflow_proxy/utils/packed.py
Normal file
159
mediaflow_proxy/utils/packed.py
Normal file
@@ -0,0 +1,159 @@
|
||||
#Adapted for use in MediaFlowProxy from:
|
||||
#https://github.com/einars/js-beautify/blob/master/python/jsbeautifier/unpackers/packer.py
|
||||
# Unpacker for Dean Edward's p.a.c.k.e.r, a part of javascript beautifier
|
||||
# by Einar Lielmanis <einar@beautifier.io>
|
||||
#
|
||||
# written by Stefano Sanfilippo <a.little.coder@gmail.com>
|
||||
#
|
||||
# usage:
|
||||
#
|
||||
# if detect(some_string):
|
||||
# unpacked = unpack(some_string)
|
||||
#
|
||||
"""Unpacker for Dean Edward's p.a.c.k.e.r"""
|
||||
|
||||
import re
|
||||
from bs4 import BeautifulSoup, SoupStrainer
|
||||
import logging
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
||||
|
||||
def detect(source):
|
||||
if "eval(function(p,a,c,k,e,d)" in source:
|
||||
mystr = "smth"
|
||||
return mystr is not None
|
||||
|
||||
|
||||
def unpack(source):
|
||||
"""Unpacks P.A.C.K.E.R. packed js code."""
|
||||
payload, symtab, radix, count = _filterargs(source)
|
||||
|
||||
if count != len(symtab):
|
||||
raise UnpackingError("Malformed p.a.c.k.e.r. symtab.")
|
||||
|
||||
try:
|
||||
unbase = Unbaser(radix)
|
||||
except TypeError:
|
||||
raise UnpackingError("Unknown p.a.c.k.e.r. encoding.")
|
||||
|
||||
def lookup(match):
|
||||
"""Look up symbols in the synthetic symtab."""
|
||||
word = match.group(0)
|
||||
return symtab[unbase(word)] or word
|
||||
|
||||
payload = payload.replace("\\\\", "\\").replace("\\'", "'")
|
||||
source = re.sub(r"\b\w+\b", lookup, payload)
|
||||
return _replacestrings(source)
|
||||
|
||||
|
||||
def _filterargs(source):
|
||||
"""Juice from a source file the four args needed by decoder."""
|
||||
juicers = [
|
||||
(r"}\('(.*)', *(\d+|\[\]), *(\d+), *'(.*)'\.split\('\|'\), *(\d+), *(.*)\)\)"),
|
||||
(r"}\('(.*)', *(\d+|\[\]), *(\d+), *'(.*)'\.split\('\|'\)"),
|
||||
]
|
||||
for juicer in juicers:
|
||||
args = re.search(juicer, source, re.DOTALL)
|
||||
if args:
|
||||
a = args.groups()
|
||||
if a[1] == "[]":
|
||||
a = list(a)
|
||||
a[1] = 62
|
||||
a = tuple(a)
|
||||
try:
|
||||
return a[0], a[3].split("|"), int(a[1]), int(a[2])
|
||||
except ValueError:
|
||||
raise UnpackingError("Corrupted p.a.c.k.e.r. data.")
|
||||
|
||||
# could not find a satisfying regex
|
||||
raise UnpackingError(
|
||||
"Could not make sense of p.a.c.k.e.r data (unexpected code structure)"
|
||||
)
|
||||
|
||||
|
||||
def _replacestrings(source):
|
||||
"""Strip string lookup table (list) and replace values in source."""
|
||||
match = re.search(r'var *(_\w+)\=\["(.*?)"\];', source, re.DOTALL)
|
||||
|
||||
if match:
|
||||
varname, strings = match.groups()
|
||||
startpoint = len(match.group(0))
|
||||
lookup = strings.split('","')
|
||||
variable = "%s[%%d]" % varname
|
||||
for index, value in enumerate(lookup):
|
||||
source = source.replace(variable % index, '"%s"' % value)
|
||||
return source[startpoint:]
|
||||
return source
|
||||
|
||||
|
||||
class Unbaser(object):
|
||||
"""Functor for a given base. Will efficiently convert
|
||||
strings to natural numbers."""
|
||||
|
||||
ALPHABET = {
|
||||
62: "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ",
|
||||
95: (
|
||||
" !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
||||
"[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~"
|
||||
),
|
||||
}
|
||||
|
||||
def __init__(self, base):
|
||||
self.base = base
|
||||
|
||||
# fill elements 37...61, if necessary
|
||||
if 36 < base < 62:
|
||||
if not hasattr(self.ALPHABET, self.ALPHABET[62][:base]):
|
||||
self.ALPHABET[base] = self.ALPHABET[62][:base]
|
||||
# attrs = self.ALPHABET
|
||||
# print ', '.join("%s: %s" % item for item in attrs.items())
|
||||
# If base can be handled by int() builtin, let it do it for us
|
||||
if 2 <= base <= 36:
|
||||
self.unbase = lambda string: int(string, base)
|
||||
else:
|
||||
# Build conversion dictionary cache
|
||||
try:
|
||||
self.dictionary = dict(
|
||||
(cipher, index) for index, cipher in enumerate(self.ALPHABET[base])
|
||||
)
|
||||
except KeyError:
|
||||
raise TypeError("Unsupported base encoding.")
|
||||
|
||||
self.unbase = self._dictunbaser
|
||||
|
||||
def __call__(self, string):
|
||||
return self.unbase(string)
|
||||
|
||||
def _dictunbaser(self, string):
|
||||
"""Decodes a value to an integer."""
|
||||
ret = 0
|
||||
for index, cipher in enumerate(string[::-1]):
|
||||
ret += (self.base**index) * self.dictionary[cipher]
|
||||
return ret
|
||||
class UnpackingError(Exception):
|
||||
"""Badly packed source or general error. Argument is a
|
||||
meaningful description."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
|
||||
async def eval_solver(self, url: str, headers):
|
||||
try:
|
||||
response = await self._make_request(url, headers = headers)
|
||||
soup = BeautifulSoup(response.text, "lxml",parse_only=SoupStrainer("script"))
|
||||
script_all = soup.find_all("script")
|
||||
for i in script_all:
|
||||
if detect(i.text):
|
||||
unpacked_code = unpack(i.text)
|
||||
match = re.search( r'file:"(.*?)"', unpacked_code)
|
||||
if match:
|
||||
m3u8_url = match.group(1)
|
||||
return m3u8_url
|
||||
except Exception as e:
|
||||
logger.error("Eval solver error\n",e)
|
||||
raise Exception("Error in eval_solver")
|
||||
Reference in New Issue
Block a user