Linted and formatted all files

This commit is contained in:
2025-05-28 21:52:39 -05:00
parent e46f13930d
commit 02913c7385
31 changed files with 1264 additions and 766 deletions

View File

@@ -2,11 +2,13 @@ import base64
import hashlib
import hmac
def calculate_secret_hash(username: str, client_id: str, client_secret: str) -> str:
"""
Calculate the Cognito SECRET_HASH using HMAC SHA256 for secret-enabled clients.
"""
msg = username + client_id
dig = hmac.new(client_secret.encode('utf-8'),
msg.encode('utf-8'), hashlib.sha256).digest()
return base64.b64encode(dig).decode()
dig = hmac.new(
client_secret.encode("utf-8"), msg.encode("utf-8"), hashlib.sha256
).digest()
return base64.b64encode(dig).decode()

View File

@@ -1,41 +1,50 @@
import os
import argparse
import requests
import logging
from requests.exceptions import RequestException, Timeout, ConnectionError, HTTPError
import os
import requests
from requests.exceptions import ConnectionError, HTTPError, RequestException, Timeout
class StreamValidator:
def __init__(self, timeout=10, user_agent=None):
self.timeout = timeout
self.session = requests.Session()
self.session.headers.update({
'User-Agent': user_agent or 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36'
})
self.session.headers.update(
{
"User-Agent": user_agent
or (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/120.0.0.0 Safari/537.36"
)
}
)
def validate_stream(self, url):
"""Validate a media stream URL with multiple fallback checks"""
try:
headers = {'Range': 'bytes=0-1024'}
headers = {"Range": "bytes=0-1024"}
with self.session.get(
url,
headers=headers,
timeout=self.timeout,
stream=True,
allow_redirects=True
allow_redirects=True,
) as response:
if response.status_code not in [200, 206]:
return False, f"Invalid status code: {response.status_code}"
content_type = response.headers.get('Content-Type', '')
content_type = response.headers.get("Content-Type", "")
if not self._is_valid_content_type(content_type):
return False, f"Invalid content type: {content_type}"
try:
next(response.iter_content(chunk_size=1024))
return True, "Stream is valid"
except (ConnectionError, Timeout):
return False, "Connection failed during content read"
except HTTPError as e:
return False, f"HTTP Error: {str(e)}"
except ConnectionError as e:
@@ -49,10 +58,13 @@ class StreamValidator:
def _is_valid_content_type(self, content_type):
valid_types = [
'video/mp2t', 'application/vnd.apple.mpegurl',
'application/dash+xml', 'video/mp4',
'video/webm', 'application/octet-stream',
'application/x-mpegURL'
"video/mp2t",
"application/vnd.apple.mpegurl",
"application/dash+xml",
"video/mp4",
"video/webm",
"application/octet-stream",
"application/x-mpegURL",
]
return any(ct in content_type for ct in valid_types)
@@ -60,45 +72,43 @@ class StreamValidator:
"""Extract stream URLs from M3U playlist file"""
urls = []
try:
with open(file_path, 'r') as f:
with open(file_path) as f:
for line in f:
line = line.strip()
if line and not line.startswith('#'):
if line and not line.startswith("#"):
urls.append(line)
except Exception as e:
logging.error(f"Error reading playlist file: {str(e)}")
raise
return urls
def main():
parser = argparse.ArgumentParser(
description='Validate streaming URLs from command line arguments or playlist files',
formatter_class=argparse.ArgumentDefaultsHelpFormatter
description=(
"Validate streaming URLs from command line arguments or playlist files"
),
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
'sources',
nargs='+',
help='List of URLs or file paths containing stream URLs'
"sources", nargs="+", help="List of URLs or file paths containing stream URLs"
)
parser.add_argument(
'--timeout',
type=int,
default=20,
help='Timeout in seconds for stream checks'
"--timeout", type=int, default=20, help="Timeout in seconds for stream checks"
)
parser.add_argument(
'--output',
default='deadstreams.txt',
help='Output file name for inactive streams'
"--output",
default="deadstreams.txt",
help="Output file name for inactive streams",
)
args = parser.parse_args()
# Configure logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s',
handlers=[logging.FileHandler('stream_check.log'), logging.StreamHandler()]
format="%(asctime)s - %(levelname)s - %(message)s",
handlers=[logging.FileHandler("stream_check.log"), logging.StreamHandler()],
)
validator = StreamValidator(timeout=args.timeout)
@@ -127,9 +137,10 @@ def main():
# Save dead streams to file
if dead_streams:
with open(args.output, 'w') as f:
f.write('\n'.join(dead_streams))
with open(args.output, "w") as f:
f.write("\n".join(dead_streams))
logging.info(f"Found {len(dead_streams)} dead streams. Saved to {args.output}.")
if __name__ == "__main__":
main()
main()

View File

@@ -21,4 +21,4 @@ IPTV_SERVER_ADMIN_USER = os.getenv("IPTV_SERVER_ADMIN_USER", "admin")
IPTV_SERVER_ADMIN_PASSWORD = os.getenv("IPTV_SERVER_ADMIN_PASSWORD", "adminpassword")
# URL for the EPG XML file to place in the playlist's header
EPG_URL = os.getenv("EPG_URL", "https://example.com/epg.xml.gz")
EPG_URL = os.getenv("EPG_URL", "https://example.com/epg.xml.gz")

View File

@@ -1,11 +1,13 @@
import os
import boto3
from app.models import Base
from .constants import AWS_REGION
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from functools import lru_cache
from app.models import Base
from .constants import AWS_REGION
def get_db_credentials():
"""Fetch and cache DB credentials from environment or SSM Parameter Store"""
@@ -14,29 +16,40 @@ def get_db_credentials():
f"postgresql://{os.getenv('DB_USER')}:{os.getenv('DB_PASSWORD')}"
f"@{os.getenv('DB_HOST')}/{os.getenv('DB_NAME')}"
)
ssm = boto3.client('ssm', region_name=AWS_REGION)
ssm = boto3.client("ssm", region_name=AWS_REGION)
try:
host = ssm.get_parameter(Name='/iptv-updater/DB_HOST', WithDecryption=True)['Parameter']['Value']
user = ssm.get_parameter(Name='/iptv-updater/DB_USER', WithDecryption=True)['Parameter']['Value']
password = ssm.get_parameter(Name='/iptv-updater/DB_PASSWORD', WithDecryption=True)['Parameter']['Value']
dbname = ssm.get_parameter(Name='/iptv-updater/DB_NAME', WithDecryption=True)['Parameter']['Value']
host = ssm.get_parameter(Name="/iptv-updater/DB_HOST", WithDecryption=True)[
"Parameter"
]["Value"]
user = ssm.get_parameter(Name="/iptv-updater/DB_USER", WithDecryption=True)[
"Parameter"
]["Value"]
password = ssm.get_parameter(
Name="/iptv-updater/DB_PASSWORD", WithDecryption=True
)["Parameter"]["Value"]
dbname = ssm.get_parameter(Name="/iptv-updater/DB_NAME", WithDecryption=True)[
"Parameter"
]["Value"]
return f"postgresql://{user}:{password}@{host}/{dbname}"
except Exception as e:
raise RuntimeError(f"Failed to fetch DB credentials from SSM: {str(e)}")
# Initialize engine and session maker
engine = create_engine(get_db_credentials())
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
def init_db():
"""Initialize database by creating all tables"""
Base.metadata.create_all(bind=engine)
def get_db():
"""Dependency for getting database session"""
db = SessionLocal()
try:
yield db
finally:
db.close()
db.close()