Added in_use and priority_id field for channels urls. Added priorities table. Setup sql alchemy migration. Generate first migration.
All checks were successful
AWS Deploy on Push / build (push) Successful in 2m4s
All checks were successful
AWS Deploy on Push / build (push) Successful in 2m4s
This commit is contained in:
141
alembic.ini
Normal file
141
alembic.ini
Normal file
@@ -0,0 +1,141 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts.
|
||||
# this is typically a path given in POSIX (e.g. forward slashes)
|
||||
# format, relative to the token %(here)s which refers to the location of this
|
||||
# ini file
|
||||
script_location = %(here)s/alembic
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||
# for all available tokens
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory. for multiple paths, the path separator
|
||||
# is defined by "path_separator" below.
|
||||
prepend_sys_path = .
|
||||
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
|
||||
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to ZoneInfo()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to <script_location>/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "path_separator"
|
||||
# below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
|
||||
|
||||
# path_separator; This indicates what character is used to split lists of file
|
||||
# paths, including version_locations and prepend_sys_path within configparser
|
||||
# files such as alembic.ini.
|
||||
# The default rendered in new alembic.ini files is "os", which uses os.pathsep
|
||||
# to provide os-dependent path splitting.
|
||||
#
|
||||
# Note that in order to support legacy alembic.ini files, this default does NOT
|
||||
# take place if path_separator is not present in alembic.ini. If this
|
||||
# option is omitted entirely, fallback logic is as follows:
|
||||
#
|
||||
# 1. Parsing of the version_locations option falls back to using the legacy
|
||||
# "version_path_separator" key, which if absent then falls back to the legacy
|
||||
# behavior of splitting on spaces and/or commas.
|
||||
# 2. Parsing of the prepend_sys_path option falls back to the legacy
|
||||
# behavior of splitting on spaces, commas, or colons.
|
||||
#
|
||||
# Valid values for path_separator are:
|
||||
#
|
||||
# path_separator = :
|
||||
# path_separator = ;
|
||||
# path_separator = space
|
||||
# path_separator = newline
|
||||
#
|
||||
# Use os.pathsep. Default configuration used for new projects.
|
||||
path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
# database URL. This is consumed by the user-maintained env.py script only.
|
||||
# other means of configuring database URLs may be customized within the env.py
|
||||
# file.
|
||||
sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
||||
# hooks = ruff
|
||||
# ruff.type = exec
|
||||
# ruff.executable = %(here)s/.venv/bin/ruff
|
||||
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration. This is also consumed by the user-maintained
|
||||
# env.py script only.
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARNING
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARNING
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
1
alembic/README
Normal file
1
alembic/README
Normal file
@@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
||||
82
alembic/env.py
Normal file
82
alembic/env.py
Normal file
@@ -0,0 +1,82 @@
|
||||
import os
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
from alembic import context
|
||||
from app.utils.database import get_db_credentials
|
||||
from app.models.db import Base
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Setup target metadata for autogenerate support
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# Override sqlalchemy.url with dynamic credentials
|
||||
if not context.is_offline_mode():
|
||||
config.set_main_option('sqlalchemy.url', get_db_credentials())
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
28
alembic/script.py.mako
Normal file
28
alembic/script.py.mako
Normal file
@@ -0,0 +1,28 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,59 @@
|
||||
"""Add priority and in_use fields
|
||||
|
||||
Revision ID: 036879e47172
|
||||
Revises:
|
||||
Create Date: 2025-05-26 19:21:32.285656
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '036879e47172'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
# 1. Create priorities table if not exists
|
||||
if not op.get_bind().engine.dialect.has_table(op.get_bind(), 'priorities'):
|
||||
op.create_table('priorities',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('description', sa.String(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# 2. Insert default priorities (skip if already exists)
|
||||
op.execute("""
|
||||
INSERT INTO priorities (id, description)
|
||||
VALUES (100, 'High'), (200, 'Medium'), (300, 'Low')
|
||||
ON CONFLICT (id) DO NOTHING
|
||||
""")
|
||||
# Add new columns with temporary nullable=True
|
||||
op.add_column('channels_urls', sa.Column('in_use', sa.Boolean(), nullable=True))
|
||||
op.add_column('channels_urls', sa.Column('priority_id', sa.Integer(), nullable=True))
|
||||
|
||||
# Set default values
|
||||
op.execute("UPDATE channels_urls SET in_use = false, priority_id = 100")
|
||||
|
||||
# Convert to NOT NULL
|
||||
op.alter_column('channels_urls', 'in_use', nullable=False)
|
||||
op.alter_column('channels_urls', 'priority_id', nullable=False)
|
||||
op.create_foreign_key(None, 'channels_urls', 'priorities', ['priority_id'], ['id'])
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint('channels_urls_priority_id_fkey', 'channels_urls', type_='foreignkey')
|
||||
op.drop_column('channels_urls', 'priority_id')
|
||||
op.drop_column('channels_urls', 'in_use')
|
||||
op.drop_table('priorities')
|
||||
# ### end Alembic commands ###
|
||||
@@ -1,5 +1,5 @@
|
||||
|
||||
from app.routers import channels, auth, playlist
|
||||
from app.routers import channels, auth, playlist, priorities
|
||||
from fastapi import FastAPI
|
||||
from fastapi.openapi.utils import get_openapi
|
||||
|
||||
@@ -56,3 +56,4 @@ async def root():
|
||||
app.include_router(auth.router)
|
||||
app.include_router(channels.router)
|
||||
app.include_router(playlist.router)
|
||||
app.include_router(priorities.router)
|
||||
@@ -1,12 +1,19 @@
|
||||
from datetime import datetime, timezone
|
||||
import uuid
|
||||
from sqlalchemy import Column, String, JSON, DateTime, UniqueConstraint, ForeignKey
|
||||
from sqlalchemy import Column, String, JSON, DateTime, UniqueConstraint, ForeignKey, Boolean, Integer
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class Priority(Base):
|
||||
"""SQLAlchemy model for channel URL priorities"""
|
||||
__tablename__ = "priorities"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
description = Column(String, nullable=False)
|
||||
|
||||
class ChannelDB(Base):
|
||||
"""SQLAlchemy model for IPTV channels"""
|
||||
__tablename__ = "channels"
|
||||
@@ -34,8 +41,11 @@ class ChannelURL(Base):
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
channel_id = Column(UUID(as_uuid=True), ForeignKey('channels.id', ondelete='CASCADE'), nullable=False)
|
||||
url = Column(String, nullable=False)
|
||||
in_use = Column(Boolean, default=False, nullable=False)
|
||||
priority_id = Column(Integer, ForeignKey('priorities.id'), nullable=False)
|
||||
created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
|
||||
|
||||
# Relationship with ChannelDB
|
||||
# Relationships
|
||||
channel = relationship("ChannelDB", back_populates="urls")
|
||||
priority = relationship("Priority")
|
||||
@@ -1,17 +1,36 @@
|
||||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
from pydantic import BaseModel
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
class PriorityBase(BaseModel):
|
||||
"""Base Pydantic model for priorities"""
|
||||
id: int
|
||||
description: str
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
class PriorityCreate(PriorityBase):
|
||||
"""Pydantic model for creating priorities"""
|
||||
pass
|
||||
|
||||
class PriorityResponse(PriorityBase):
|
||||
"""Pydantic model for priority responses"""
|
||||
pass
|
||||
|
||||
class ChannelURLCreate(BaseModel):
|
||||
"""Pydantic model for creating channel URLs"""
|
||||
url: str
|
||||
priority_id: int = Field(default=100, ge=100, le=300) # Default to High, validate range
|
||||
|
||||
class ChannelURLBase(ChannelURLCreate):
|
||||
"""Base Pydantic model for channel URL responses"""
|
||||
id: UUID
|
||||
in_use: bool
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
priority_id: int
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
@@ -22,20 +41,26 @@ class ChannelURLResponse(ChannelURLBase):
|
||||
|
||||
class ChannelCreate(BaseModel):
|
||||
"""Pydantic model for creating channels"""
|
||||
urls: List[str] # List of URLs to create with the channel
|
||||
urls: List[ChannelURLCreate] # List of URL objects with priority
|
||||
name: str
|
||||
group_title: str
|
||||
tvg_id: str
|
||||
tvg_logo: str
|
||||
tvg_name: str
|
||||
|
||||
class ChannelURLUpdate(BaseModel):
|
||||
"""Pydantic model for updating channel URLs"""
|
||||
url: Optional[str] = None
|
||||
in_use: Optional[bool] = None
|
||||
priority_id: Optional[int] = Field(default=None, ge=100, le=300)
|
||||
|
||||
class ChannelUpdate(BaseModel):
|
||||
"""Pydantic model for updating channels"""
|
||||
name: Optional[str] = None
|
||||
group_title: Optional[str] = None
|
||||
tvg_id: Optional[str] = None
|
||||
"""Pydantic model for updating channels (all fields optional)"""
|
||||
name: Optional[str] = Field(None, min_length=1)
|
||||
group_title: Optional[str] = Field(None, min_length=1)
|
||||
tvg_id: Optional[str] = Field(None, min_length=1)
|
||||
tvg_logo: Optional[str] = None
|
||||
tvg_name: Optional[str] = None
|
||||
tvg_name: Optional[str] = Field(None, min_length=1)
|
||||
|
||||
class ChannelResponse(BaseModel):
|
||||
"""Pydantic model for channel responses"""
|
||||
|
||||
@@ -13,6 +13,7 @@ from app.models import (
|
||||
ChannelURLCreate,
|
||||
ChannelURLResponse,
|
||||
)
|
||||
from app.models.schemas import ChannelURLUpdate
|
||||
from app.utils.database import get_db
|
||||
from app.auth.dependencies import get_current_user, require_roles
|
||||
from app.models.auth import CognitoUser
|
||||
@@ -45,16 +46,21 @@ def create_channel(
|
||||
)
|
||||
|
||||
# Create channel without URLs first
|
||||
channel_data = channel.model_dump()
|
||||
urls = channel_data.pop('urls', [])
|
||||
channel_data = channel.model_dump(exclude={'urls'})
|
||||
urls = channel.urls
|
||||
db_channel = ChannelDB(**channel_data)
|
||||
db.add(db_channel)
|
||||
db.commit()
|
||||
db.refresh(db_channel)
|
||||
|
||||
# Add URLs
|
||||
# Add URLs with priority
|
||||
for url in urls:
|
||||
db_url = ChannelURL(channel_id=db_channel.id, url=url)
|
||||
db_url = ChannelURL(
|
||||
channel_id=db_channel.id,
|
||||
url=url.url,
|
||||
priority_id=url.priority_id,
|
||||
in_use=False
|
||||
)
|
||||
db.add(db_url)
|
||||
|
||||
db.commit()
|
||||
@@ -91,22 +97,28 @@ def update_channel(
|
||||
detail="Channel not found"
|
||||
)
|
||||
|
||||
# Check for duplicate channel (same group_title + name) excluding current channel
|
||||
existing_channel = db.query(ChannelDB).filter(
|
||||
and_(
|
||||
ChannelDB.group_title == channel.group_title,
|
||||
ChannelDB.name == channel.name,
|
||||
ChannelDB.id != channel_id
|
||||
)
|
||||
).first()
|
||||
# Only check for duplicates if name or group_title are being updated
|
||||
if channel.name is not None or channel.group_title is not None:
|
||||
name = channel.name if channel.name is not None else db_channel.name
|
||||
group_title = channel.group_title if channel.group_title is not None else db_channel.group_title
|
||||
|
||||
if existing_channel:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail="Channel with same group_title and name already exists"
|
||||
)
|
||||
existing_channel = db.query(ChannelDB).filter(
|
||||
and_(
|
||||
ChannelDB.group_title == group_title,
|
||||
ChannelDB.name == name,
|
||||
ChannelDB.id != channel_id
|
||||
)
|
||||
).first()
|
||||
|
||||
for key, value in channel.model_dump().items():
|
||||
if existing_channel:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail="Channel with same group_title and name already exists"
|
||||
)
|
||||
|
||||
# Update only provided fields
|
||||
update_data = channel.model_dump(exclude_unset=True)
|
||||
for key, value in update_data.items():
|
||||
setattr(db_channel, key, value)
|
||||
|
||||
db.commit()
|
||||
@@ -160,12 +172,51 @@ def add_channel_url(
|
||||
detail="Channel not found"
|
||||
)
|
||||
|
||||
db_url = ChannelURL(channel_id=channel_id, url=url.url)
|
||||
db_url = ChannelURL(
|
||||
channel_id=channel_id,
|
||||
url=url.url,
|
||||
priority_id=url.priority_id,
|
||||
in_use=False # Default to not in use
|
||||
)
|
||||
db.add(db_url)
|
||||
db.commit()
|
||||
db.refresh(db_url)
|
||||
return db_url
|
||||
|
||||
@router.put("/{channel_id}/urls/{url_id}", response_model=ChannelURLResponse)
|
||||
@require_roles("admin")
|
||||
def update_channel_url(
|
||||
channel_id: UUID,
|
||||
url_id: UUID,
|
||||
url_update: ChannelURLUpdate,
|
||||
db: Session = Depends(get_db),
|
||||
user: CognitoUser = Depends(get_current_user)
|
||||
):
|
||||
"""Update a channel URL (url, in_use, or priority_id)"""
|
||||
db_url = db.query(ChannelURL).filter(
|
||||
and_(
|
||||
ChannelURL.id == url_id,
|
||||
ChannelURL.channel_id == channel_id
|
||||
)
|
||||
).first()
|
||||
|
||||
if not db_url:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="URL not found"
|
||||
)
|
||||
|
||||
if url_update.url is not None:
|
||||
db_url.url = url_update.url
|
||||
if url_update.in_use is not None:
|
||||
db_url.in_use = url_update.in_use
|
||||
if url_update.priority_id is not None:
|
||||
db_url.priority_id = url_update.priority_id
|
||||
|
||||
db.commit()
|
||||
db.refresh(db_url)
|
||||
return db_url
|
||||
|
||||
@router.delete("/{channel_id}/urls/{url_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@require_roles("admin")
|
||||
def delete_channel_url(
|
||||
|
||||
97
app/routers/priorities.py
Normal file
97
app/routers/priorities.py
Normal file
@@ -0,0 +1,97 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import select, delete
|
||||
from typing import List
|
||||
|
||||
from app.models.db import Priority
|
||||
from app.models.schemas import PriorityCreate, PriorityResponse
|
||||
from app.utils.database import get_db
|
||||
from app.auth.dependencies import get_current_user, require_roles
|
||||
from app.models.auth import CognitoUser
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/priorities",
|
||||
tags=["priorities"]
|
||||
)
|
||||
|
||||
@router.post("/", response_model=PriorityResponse, status_code=status.HTTP_201_CREATED)
|
||||
@require_roles("admin")
|
||||
def create_priority(
|
||||
priority: PriorityCreate,
|
||||
db: Session = Depends(get_db),
|
||||
user: CognitoUser = Depends(get_current_user)
|
||||
):
|
||||
"""Create a new priority"""
|
||||
# Check if priority with this ID already exists
|
||||
existing = db.get(Priority, priority.id)
|
||||
if existing:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail=f"Priority with ID {priority.id} already exists"
|
||||
)
|
||||
|
||||
db_priority = Priority(**priority.model_dump())
|
||||
db.add(db_priority)
|
||||
db.commit()
|
||||
db.refresh(db_priority)
|
||||
return db_priority
|
||||
|
||||
@router.get("/", response_model=List[PriorityResponse])
|
||||
@require_roles("admin")
|
||||
def list_priorities(
|
||||
db: Session = Depends(get_db),
|
||||
user: CognitoUser = Depends(get_current_user)
|
||||
):
|
||||
"""List all priorities"""
|
||||
return db.query(Priority).all()
|
||||
|
||||
@router.get("/{priority_id}", response_model=PriorityResponse)
|
||||
@require_roles("admin")
|
||||
def get_priority(
|
||||
priority_id: int,
|
||||
db: Session = Depends(get_db),
|
||||
user: CognitoUser = Depends(get_current_user)
|
||||
):
|
||||
"""Get a priority by id"""
|
||||
priority = db.get(Priority, priority_id)
|
||||
if not priority:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Priority not found"
|
||||
)
|
||||
return priority
|
||||
|
||||
@router.delete("/{priority_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@require_roles("admin")
|
||||
def delete_priority(
|
||||
priority_id: int,
|
||||
db: Session = Depends(get_db),
|
||||
user: CognitoUser = Depends(get_current_user)
|
||||
):
|
||||
"""Delete a priority (if not in use)"""
|
||||
from app.models.db import ChannelURL
|
||||
|
||||
# Check if priority exists
|
||||
priority = db.get(Priority, priority_id)
|
||||
if not priority:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Priority not found"
|
||||
)
|
||||
|
||||
# Check if priority is in use
|
||||
in_use = db.scalar(
|
||||
select(ChannelURL)
|
||||
.where(ChannelURL.priority_id == priority_id)
|
||||
.limit(1)
|
||||
)
|
||||
|
||||
if in_use:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail="Cannot delete priority that is in use by channel URLs"
|
||||
)
|
||||
|
||||
db.execute(delete(Priority).where(Priority.id == priority_id))
|
||||
db.commit()
|
||||
return None
|
||||
@@ -16,6 +16,12 @@ cd iptv-updater-aws
|
||||
# Install Python packages with --ignore-installed to prevent conflicts with RPM packages
|
||||
pip3 install --ignore-installed -r requirements.txt
|
||||
|
||||
# Run database migrations
|
||||
alembic upgrade head
|
||||
|
||||
# Seed initial priorities
|
||||
python3 -c "from app.utils.database import SessionLocal; from app.models.db import Priority; db = SessionLocal(); db.add_all([Priority(id=100, description='High'), Priority(id=200, description='Medium'), Priority(id=300, description='Low')]); db.commit()"
|
||||
|
||||
# Create systemd service file
|
||||
cat << 'EOF' > /etc/systemd/system/iptv-updater.service
|
||||
[Unit]
|
||||
|
||||
@@ -11,3 +11,4 @@ starlette>=0.27.0
|
||||
pyjwt==2.7.0
|
||||
sqlalchemy==2.0.23
|
||||
psycopg2-binary==2.9.9
|
||||
alembic==1.16.1
|
||||
@@ -35,7 +35,7 @@ for INSTANCE_ID in $INSTANCE_IDS; do
|
||||
aws ssm send-command \
|
||||
--instance-ids "$INSTANCE_ID" \
|
||||
--document-name "AWS-RunShellScript" \
|
||||
--parameters '{"commands":["cd /home/ec2-user/iptv-updater-aws && git pull && pip3 install -r requirements.txt && sudo systemctl restart iptv-updater"]}' \
|
||||
--parameters '{"commands":["cd /home/ec2-user/iptv-updater-aws && git pull && pip3 install -r requirements.txt && alembic upgrade head && sudo systemctl restart iptv-updater"]}' \
|
||||
--no-cli-pager \
|
||||
--no-paginate
|
||||
done
|
||||
|
||||
@@ -3,3 +3,9 @@
|
||||
# Install dependencies and deploy infrastructure
|
||||
npm install -g aws-cdk
|
||||
python3 -m pip install -r requirements.txt
|
||||
|
||||
# Initialize and run database migrations
|
||||
alembic upgrade head
|
||||
|
||||
# Seed initial data
|
||||
python3 -c "from app.utils.database import SessionLocal; from app.models.db import Priority; db = SessionLocal(); db.add_all([Priority(id=100, description='High'), Priority(id=200, description='Medium'), Priority(id=300, description='Low')]); db.commit()"
|
||||
@@ -10,6 +10,10 @@ export DB_PASSWORD=postgres
|
||||
export DB_HOST=localhost
|
||||
export DB_NAME=iptv_updater
|
||||
|
||||
# Run database migrations
|
||||
alembic upgrade head
|
||||
|
||||
# Start FastAPI
|
||||
nohup uvicorn app.main:app --host 127.0.0.1 --port 8000 > app.log 2>&1 &
|
||||
echo $! > iptv-updater.pid
|
||||
|
||||
|
||||
Reference in New Issue
Block a user