diff --git a/.dockerignore b/.dockerignore index 5073af60..c79ca7b4 100755 --- a/.dockerignore +++ b/.dockerignore @@ -11,6 +11,10 @@ **/.toolstarget **/.vs **/.vscode +**/.history +**/media +**/models +**/static **/*.*proj.user **/*.dbmdl **/*.jfm @@ -26,3 +30,4 @@ **/values.dev.yaml LICENSE README.md +data/ diff --git a/apps/accounts/api_urls.py b/apps/accounts/api_urls.py index e1518105..dda3832c 100644 --- a/apps/accounts/api_urls.py +++ b/apps/accounts/api_urls.py @@ -1,41 +1,39 @@ from django.urls import path, include from rest_framework.routers import DefaultRouter from .api_views import ( - AuthViewSet, UserViewSet, GroupViewSet, - list_permissions, initialize_superuser + AuthViewSet, + UserViewSet, + GroupViewSet, + TokenObtainPairView, + TokenRefreshView, + list_permissions, + initialize_superuser, ) from rest_framework_simplejwt import views as jwt_views -app_name = 'accounts' +app_name = "accounts" # 🔹 Register ViewSets with a Router router = DefaultRouter() -router.register(r'users', UserViewSet, basename='user') -router.register(r'groups', GroupViewSet, basename='group') +router.register(r"users", UserViewSet, basename="user") +router.register(r"groups", GroupViewSet, basename="group") # 🔹 Custom Authentication Endpoints -auth_view = AuthViewSet.as_view({ - 'post': 'login' -}) +auth_view = AuthViewSet.as_view({"post": "login"}) -logout_view = AuthViewSet.as_view({ - 'post': 'logout' -}) +logout_view = AuthViewSet.as_view({"post": "logout"}) # 🔹 Define API URL patterns urlpatterns = [ # Authentication - path('auth/login/', auth_view, name='user-login'), - path('auth/logout/', logout_view, name='user-logout'), - + path("auth/login/", auth_view, name="user-login"), + path("auth/logout/", logout_view, name="user-logout"), # Superuser API - path('initialize-superuser/', initialize_superuser, name='initialize_superuser'), - + path("initialize-superuser/", initialize_superuser, name="initialize_superuser"), # Permissions API - path('permissions/', list_permissions, name='list-permissions'), - - path('token/', jwt_views.TokenObtainPairView.as_view(), name='token_obtain_pair'), - path('token/refresh/', jwt_views.TokenRefreshView.as_view(), name='token_refresh'), + path("permissions/", list_permissions, name="list-permissions"), + path("token/", TokenObtainPairView.as_view(), name="token_obtain_pair"), + path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"), ] # 🔹 Include ViewSet routes diff --git a/apps/accounts/api_views.py b/apps/accounts/api_views.py index 27d844df..f6b48e55 100644 --- a/apps/accounts/api_views.py +++ b/apps/accounts/api_views.py @@ -2,16 +2,37 @@ from django.contrib.auth import authenticate, login, logout from django.contrib.auth.models import Group, Permission from django.http import JsonResponse, HttpResponse from django.views.decorators.csrf import csrf_exempt -from rest_framework.decorators import api_view, permission_classes -from rest_framework.permissions import IsAuthenticated, AllowAny +from rest_framework.decorators import api_view, permission_classes, action from rest_framework.response import Response -from rest_framework import viewsets +from rest_framework import viewsets, status from drf_yasg.utils import swagger_auto_schema from drf_yasg import openapi import json +from .permissions import IsAdmin, Authenticated +from dispatcharr.utils import network_access_allowed from .models import User from .serializers import UserSerializer, GroupSerializer, PermissionSerializer +from rest_framework_simplejwt.views import TokenObtainPairView, TokenRefreshView + + +class TokenObtainPairView(TokenObtainPairView): + def post(self, request, *args, **kwargs): + # Custom logic here + if not network_access_allowed(request, "UI"): + return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN) + + return super().post(request, *args, **kwargs) + + +class TokenRefreshView(TokenRefreshView): + def post(self, request, *args, **kwargs): + # Custom logic here + if not network_access_allowed(request, "UI"): + return Response({"error": "Unauthorized"}, status=status.HTTP_403_FORBIDDEN) + + return super().post(request, *args, **kwargs) + @csrf_exempt # In production, consider CSRF protection strategies or ensure this endpoint is only accessible when no superuser exists. def initialize_superuser(request): @@ -26,15 +47,20 @@ def initialize_superuser(request): password = data.get("password") email = data.get("email", "") if not username or not password: - return JsonResponse({"error": "Username and password are required."}, status=400) + return JsonResponse( + {"error": "Username and password are required."}, status=400 + ) # Create the superuser - User.objects.create_superuser(username=username, password=password, email=email) + User.objects.create_superuser( + username=username, password=password, email=email, user_level=10 + ) return JsonResponse({"superuser_exists": True}) except Exception as e: return JsonResponse({"error": str(e)}, status=500) # For GET requests, indicate no superuser exists return JsonResponse({"superuser_exists": False}) + # 🔹 1) Authentication APIs class AuthViewSet(viewsets.ViewSet): """Handles user login and logout""" @@ -43,36 +69,40 @@ class AuthViewSet(viewsets.ViewSet): operation_description="Authenticate and log in a user", request_body=openapi.Schema( type=openapi.TYPE_OBJECT, - required=['username', 'password'], + required=["username", "password"], properties={ - 'username': openapi.Schema(type=openapi.TYPE_STRING), - 'password': openapi.Schema(type=openapi.TYPE_STRING, format=openapi.FORMAT_PASSWORD) + "username": openapi.Schema(type=openapi.TYPE_STRING), + "password": openapi.Schema( + type=openapi.TYPE_STRING, format=openapi.FORMAT_PASSWORD + ), }, ), responses={200: "Login successful", 400: "Invalid credentials"}, ) def login(self, request): """Logs in a user and returns user details""" - username = request.data.get('username') - password = request.data.get('password') + username = request.data.get("username") + password = request.data.get("password") user = authenticate(request, username=username, password=password) if user: login(request, user) - return Response({ - "message": "Login successful", - "user": { - "id": user.id, - "username": user.username, - "email": user.email, - "groups": list(user.groups.values_list('name', flat=True)) + return Response( + { + "message": "Login successful", + "user": { + "id": user.id, + "username": user.username, + "email": user.email, + "groups": list(user.groups.values_list("name", flat=True)), + }, } - }) + ) return Response({"error": "Invalid credentials"}, status=400) @swagger_auto_schema( operation_description="Log out the current user", - responses={200: "Logout successful"} + responses={200: "Logout successful"}, ) def logout(self, request): """Logs out the authenticated user""" @@ -83,13 +113,19 @@ class AuthViewSet(viewsets.ViewSet): # 🔹 2) User Management APIs class UserViewSet(viewsets.ModelViewSet): """Handles CRUD operations for Users""" + queryset = User.objects.all() serializer_class = UserSerializer - permission_classes = [IsAuthenticated] + + def get_permissions(self): + if self.action == "me": + return [Authenticated()] + + return [IsAdmin()] @swagger_auto_schema( operation_description="Retrieve a list of users", - responses={200: UserSerializer(many=True)} + responses={200: UserSerializer(many=True)}, ) def list(self, request, *args, **kwargs): return super().list(request, *args, **kwargs) @@ -110,17 +146,28 @@ class UserViewSet(viewsets.ModelViewSet): def destroy(self, request, *args, **kwargs): return super().destroy(request, *args, **kwargs) + @swagger_auto_schema( + method="get", + operation_description="Get active user information", + ) + @action(detail=False, methods=["get"], url_path="me") + def me(self, request): + user = request.user + serializer = UserSerializer(user) + return Response(serializer.data) + # 🔹 3) Group Management APIs class GroupViewSet(viewsets.ModelViewSet): """Handles CRUD operations for Groups""" + queryset = Group.objects.all() serializer_class = GroupSerializer - permission_classes = [IsAuthenticated] + permission_classes = [Authenticated] @swagger_auto_schema( operation_description="Retrieve a list of groups", - responses={200: GroupSerializer(many=True)} + responses={200: GroupSerializer(many=True)}, ) def list(self, request, *args, **kwargs): return super().list(request, *args, **kwargs) @@ -144,12 +191,12 @@ class GroupViewSet(viewsets.ModelViewSet): # 🔹 4) Permissions List API @swagger_auto_schema( - method='get', + method="get", operation_description="Retrieve a list of all permissions", - responses={200: PermissionSerializer(many=True)} + responses={200: PermissionSerializer(many=True)}, ) -@api_view(['GET']) -@permission_classes([IsAuthenticated]) +@api_view(["GET"]) +@permission_classes([Authenticated]) def list_permissions(request): """Returns a list of all available permissions""" permissions = Permission.objects.all() diff --git a/apps/accounts/apps.py b/apps/accounts/apps.py index fe284bd6..603ea847 100644 --- a/apps/accounts/apps.py +++ b/apps/accounts/apps.py @@ -1,6 +1,7 @@ from django.apps import AppConfig + class AccountsConfig(AppConfig): - default_auto_field = 'django.db.models.BigAutoField' - name = 'apps.accounts' + default_auto_field = "django.db.models.BigAutoField" + name = "apps.accounts" verbose_name = "Accounts & Authentication" diff --git a/apps/accounts/migrations/0002_remove_user_channel_groups_user_channel_profiles_and_more.py b/apps/accounts/migrations/0002_remove_user_channel_groups_user_channel_profiles_and_more.py new file mode 100644 index 00000000..2a095773 --- /dev/null +++ b/apps/accounts/migrations/0002_remove_user_channel_groups_user_channel_profiles_and_more.py @@ -0,0 +1,43 @@ +# Generated by Django 5.1.6 on 2025-05-18 15:47 + +from django.db import migrations, models + + +def set_user_level_to_10(apps, schema_editor): + User = apps.get_model("accounts", "User") + User.objects.update(user_level=10) + + +class Migration(migrations.Migration): + + dependencies = [ + ("accounts", "0001_initial"), + ("dispatcharr_channels", "0021_channel_user_level"), + ] + + operations = [ + migrations.RemoveField( + model_name="user", + name="channel_groups", + ), + migrations.AddField( + model_name="user", + name="channel_profiles", + field=models.ManyToManyField( + blank=True, + related_name="users", + to="dispatcharr_channels.channelprofile", + ), + ), + migrations.AddField( + model_name="user", + name="user_level", + field=models.IntegerField(default=0), + ), + migrations.AddField( + model_name="user", + name="custom_properties", + field=models.TextField(blank=True, null=True), + ), + migrations.RunPython(set_user_level_to_10), + ] diff --git a/apps/accounts/models.py b/apps/accounts/models.py index 5b24549f..cbaa0f5e 100644 --- a/apps/accounts/models.py +++ b/apps/accounts/models.py @@ -2,17 +2,26 @@ from django.db import models from django.contrib.auth.models import AbstractUser, Permission + class User(AbstractUser): """ Custom user model for Dispatcharr. Inherits from Django's AbstractUser to add additional fields if needed. """ + + class UserLevel(models.IntegerChoices): + STREAMER = 0, "Streamer" + STANDARD = 1, "Standard User" + ADMIN = 10, "Admin" + avatar_config = models.JSONField(default=dict, blank=True, null=True) - channel_groups = models.ManyToManyField( - 'dispatcharr_channels.ChannelGroup', # Updated reference to renamed model + channel_profiles = models.ManyToManyField( + "dispatcharr_channels.ChannelProfile", blank=True, - related_name="users" + related_name="users", ) + user_level = models.IntegerField(default=UserLevel.STREAMER) + custom_properties = models.TextField(null=True, blank=True) def __str__(self): return self.username diff --git a/apps/accounts/permissions.py b/apps/accounts/permissions.py new file mode 100644 index 00000000..62673038 --- /dev/null +++ b/apps/accounts/permissions.py @@ -0,0 +1,56 @@ +from rest_framework.permissions import IsAuthenticated +from .models import User +from dispatcharr.utils import network_access_allowed + + +class Authenticated(IsAuthenticated): + def has_permission(self, request, view): + is_authenticated = super().has_permission(request, view) + network_allowed = network_access_allowed(request, "UI") + + return is_authenticated and network_allowed + + +class IsStandardUser(Authenticated): + def has_permission(self, request, view): + if not super().has_permission(request, view): + return False + + return request.user and request.user.user_level >= User.UserLevel.STANDARD + + +class IsAdmin(Authenticated): + def has_permission(self, request, view): + if not super().has_permission(request, view): + return False + + return request.user.user_level >= 10 + + +class IsOwnerOfObject(Authenticated): + def has_object_permission(self, request, view, obj): + if not super().has_permission(request, view): + return False + + is_admin = IsAdmin().has_permission(request, view) + is_owner = request.user in obj.users.all() + + return is_admin or is_owner + + +permission_classes_by_action = { + "list": [IsStandardUser], + "create": [IsAdmin], + "retrieve": [IsStandardUser], + "update": [IsAdmin], + "partial_update": [IsAdmin], + "destroy": [IsAdmin], +} + +permission_classes_by_method = { + "GET": [IsStandardUser], + "POST": [IsAdmin], + "PATCH": [IsAdmin], + "PUT": [IsAdmin], + "DELETE": [IsAdmin], +} diff --git a/apps/accounts/serializers.py b/apps/accounts/serializers.py index 2346946e..5aa81f3e 100644 --- a/apps/accounts/serializers.py +++ b/apps/accounts/serializers.py @@ -1,13 +1,14 @@ from rest_framework import serializers from django.contrib.auth.models import Group, Permission from .models import User +from apps.channels.models import ChannelProfile # 🔹 Fix for Permission serialization class PermissionSerializer(serializers.ModelSerializer): class Meta: model = Permission - fields = ['id', 'name', 'codename'] + fields = ["id", "name", "codename"] # 🔹 Fix for Group serialization @@ -18,15 +19,53 @@ class GroupSerializer(serializers.ModelSerializer): class Meta: model = Group - fields = ['id', 'name', 'permissions'] + fields = ["id", "name", "permissions"] # 🔹 Fix for User serialization class UserSerializer(serializers.ModelSerializer): - groups = serializers.SlugRelatedField( - many=True, queryset=Group.objects.all(), slug_field="name" - ) # ✅ Fix ManyToMany `_meta` error + password = serializers.CharField(write_only=True) + channel_profiles = serializers.PrimaryKeyRelatedField( + queryset=ChannelProfile.objects.all(), many=True, required=False + ) class Meta: model = User - fields = ['id', 'username', 'email', 'groups'] + fields = [ + "id", + "username", + "email", + "user_level", + "password", + "channel_profiles", + "custom_properties", + ] + + def create(self, validated_data): + channel_profiles = validated_data.pop("channel_profiles", []) + + user = User(**validated_data) + user.set_password(validated_data["password"]) + user.is_active = True + user.save() + + user.channel_profiles.set(channel_profiles) + + return user + + def update(self, instance, validated_data): + password = validated_data.pop("password", None) + channel_profiles = validated_data.pop("channel_profiles", None) + + for attr, value in validated_data.items(): + setattr(instance, attr, value) + + if password: + instance.set_password(password) + + instance.save() + + if channel_profiles is not None: + instance.channel_profiles.set(channel_profiles) + + return instance diff --git a/apps/accounts/signals.py b/apps/accounts/signals.py index 3bd1e246..dfc4f425 100644 --- a/apps/accounts/signals.py +++ b/apps/accounts/signals.py @@ -5,6 +5,7 @@ from django.db.models.signals import post_save from django.dispatch import receiver from .models import User + @receiver(post_save, sender=User) def handle_new_user(sender, instance, created, **kwargs): if created: diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 890dd247..3ffb98af 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -1,7 +1,7 @@ from rest_framework import viewsets, status from rest_framework.response import Response from rest_framework.views import APIView -from rest_framework.permissions import IsAuthenticated, AllowAny +from rest_framework.permissions import AllowAny from rest_framework.decorators import action from rest_framework.parsers import MultiPartParser, FormParser from drf_yasg.utils import swagger_auto_schema @@ -9,9 +9,33 @@ from drf_yasg import openapi from django.shortcuts import get_object_or_404, get_list_or_404 from django.db import transaction import os, json, requests +from apps.accounts.permissions import ( + Authenticated, + IsAdmin, + IsOwnerOfObject, + permission_classes_by_action, + permission_classes_by_method, +) -from .models import Stream, Channel, ChannelGroup, Logo, ChannelProfile, ChannelProfileMembership, Recording -from .serializers import StreamSerializer, ChannelSerializer, ChannelGroupSerializer, LogoSerializer, ChannelProfileMembershipSerializer, BulkChannelProfileMembershipSerializer, ChannelProfileSerializer, RecordingSerializer +from .models import ( + Stream, + Channel, + ChannelGroup, + Logo, + ChannelProfile, + ChannelProfileMembership, + Recording, +) +from .serializers import ( + StreamSerializer, + ChannelSerializer, + ChannelGroupSerializer, + LogoSerializer, + ChannelProfileMembershipSerializer, + BulkChannelProfileMembershipSerializer, + ChannelProfileSerializer, + RecordingSerializer, +) from .tasks import match_epg_channels import django_filters from django_filters.rest_framework import DjangoFilterBackend @@ -28,30 +52,46 @@ class OrInFilter(django_filters.Filter): """ Custom filter that handles the OR condition instead of AND. """ + def filter(self, queryset, value): if value: # Create a Q object for each value and combine them with OR query = Q() - for val in value.split(','): + for val in value.split(","): query |= Q(**{self.field_name: val}) return queryset.filter(query) return queryset + class StreamPagination(PageNumberPagination): page_size = 25 # Default page size - page_size_query_param = 'page_size' # Allow clients to specify page size + page_size_query_param = "page_size" # Allow clients to specify page size max_page_size = 10000 # Prevent excessive page sizes + class StreamFilter(django_filters.FilterSet): - name = django_filters.CharFilter(lookup_expr='icontains') - channel_group_name = OrInFilter(field_name="channel_group__name", lookup_expr="icontains") + name = django_filters.CharFilter(lookup_expr="icontains") + channel_group_name = OrInFilter( + field_name="channel_group__name", lookup_expr="icontains" + ) m3u_account = django_filters.NumberFilter(field_name="m3u_account__id") - m3u_account_name = django_filters.CharFilter(field_name="m3u_account__name", lookup_expr="icontains") - m3u_account_is_active = django_filters.BooleanFilter(field_name="m3u_account__is_active") + m3u_account_name = django_filters.CharFilter( + field_name="m3u_account__name", lookup_expr="icontains" + ) + m3u_account_is_active = django_filters.BooleanFilter( + field_name="m3u_account__is_active" + ) class Meta: model = Stream - fields = ['name', 'channel_group_name', 'm3u_account', 'm3u_account_name', 'm3u_account_is_active'] + fields = [ + "name", + "channel_group_name", + "m3u_account", + "m3u_account_name", + "m3u_account_is_active", + ] + # ───────────────────────────────────────────────────────── # 1) Stream API (CRUD) @@ -59,46 +99,51 @@ class StreamFilter(django_filters.FilterSet): class StreamViewSet(viewsets.ModelViewSet): queryset = Stream.objects.all() serializer_class = StreamSerializer - permission_classes = [IsAuthenticated] pagination_class = StreamPagination filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter] filterset_class = StreamFilter - search_fields = ['name', 'channel_group__name'] - ordering_fields = ['name', 'channel_group__name'] - ordering = ['-name'] + search_fields = ["name", "channel_group__name"] + ordering_fields = ["name", "channel_group__name"] + ordering = ["-name"] + + def get_permissions(self): + try: + return [perm() for perm in permission_classes_by_action[self.action]] + except KeyError: + return [Authenticated()] def get_queryset(self): qs = super().get_queryset() # Exclude streams from inactive M3U accounts qs = qs.exclude(m3u_account__is_active=False) - assigned = self.request.query_params.get('assigned') + assigned = self.request.query_params.get("assigned") if assigned is not None: qs = qs.filter(channels__id=assigned) - unassigned = self.request.query_params.get('unassigned') - if unassigned == '1': + unassigned = self.request.query_params.get("unassigned") + if unassigned == "1": qs = qs.filter(channels__isnull=True) - channel_group = self.request.query_params.get('channel_group') + channel_group = self.request.query_params.get("channel_group") if channel_group: - group_names = channel_group.split(',') + group_names = channel_group.split(",") qs = qs.filter(channel_group__name__in=group_names) return qs def list(self, request, *args, **kwargs): - ids = request.query_params.get('ids', None) + ids = request.query_params.get("ids", None) if ids: - ids = ids.split(',') + ids = ids.split(",") streams = get_list_or_404(Stream, id__in=ids) serializer = self.get_serializer(streams, many=True) return Response(serializer.data) return super().list(request, *args, **kwargs) - @action(detail=False, methods=['get'], url_path='ids') + @action(detail=False, methods=["get"], url_path="ids") def get_ids(self, request, *args, **kwargs): # Get the filtered queryset queryset = self.get_queryset() @@ -107,26 +152,37 @@ class StreamViewSet(viewsets.ModelViewSet): queryset = self.filter_queryset(queryset) # Return only the IDs from the queryset - stream_ids = queryset.values_list('id', flat=True) + stream_ids = queryset.values_list("id", flat=True) # Return the response with the list of IDs return Response(list(stream_ids)) - @action(detail=False, methods=['get'], url_path='groups') + @action(detail=False, methods=["get"], url_path="groups") def get_groups(self, request, *args, **kwargs): # Get unique ChannelGroup names that are linked to streams - group_names = ChannelGroup.objects.filter(streams__isnull=False).order_by('name').values_list('name', flat=True).distinct() + group_names = ( + ChannelGroup.objects.filter(streams__isnull=False) + .order_by("name") + .values_list("name", flat=True) + .distinct() + ) # Return the response with the list of unique group names return Response(list(group_names)) + # ───────────────────────────────────────────────────────── # 2) Channel Group Management (CRUD) # ───────────────────────────────────────────────────────── class ChannelGroupViewSet(viewsets.ModelViewSet): queryset = ChannelGroup.objects.all() serializer_class = ChannelGroupSerializer - permission_classes = [IsAuthenticated] + + def get_permissions(self): + try: + return [perm() for perm in permission_classes_by_action[self.action]] + except KeyError: + return [Authenticated()] # ───────────────────────────────────────────────────────── @@ -134,68 +190,103 @@ class ChannelGroupViewSet(viewsets.ModelViewSet): # ───────────────────────────────────────────────────────── class ChannelPagination(PageNumberPagination): page_size = 25 # Default page size - page_size_query_param = 'page_size' # Allow clients to specify page size + page_size_query_param = "page_size" # Allow clients to specify page size max_page_size = 10000 # Prevent excessive page sizes - def paginate_queryset(self, queryset, request, view=None): if not request.query_params.get(self.page_query_param): return None # disables pagination, returns full queryset return super().paginate_queryset(queryset, request, view) + class ChannelFilter(django_filters.FilterSet): - name = django_filters.CharFilter(lookup_expr='icontains') - channel_group_name = OrInFilter(field_name="channel_group__name", lookup_expr="icontains") + name = django_filters.CharFilter(lookup_expr="icontains") + channel_group_name = OrInFilter( + field_name="channel_group__name", lookup_expr="icontains" + ) class Meta: model = Channel - fields = ['name', 'channel_group_name',] + fields = [ + "name", + "channel_group_name", + ] + class ChannelViewSet(viewsets.ModelViewSet): queryset = Channel.objects.all() serializer_class = ChannelSerializer - permission_classes = [IsAuthenticated] pagination_class = ChannelPagination filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter] filterset_class = ChannelFilter - search_fields = ['name', 'channel_group__name'] - ordering_fields = ['channel_number', 'name', 'channel_group__name'] - ordering = ['-channel_number'] + search_fields = ["name", "channel_group__name"] + ordering_fields = ["channel_number", "name", "channel_group__name"] + ordering = ["-channel_number"] + + def get_permissions(self): + if self.action in [ + "edit_bulk", + "assign", + "from_stream", + "from_stream_bulk", + "match_epg", + "set_epg", + "batch_set_epg", + ]: + return [IsAdmin()] + + try: + return [perm() for perm in permission_classes_by_action[self.action]] + except KeyError: + return [Authenticated()] def get_queryset(self): - qs = super().get_queryset().select_related( - 'channel_group', - 'logo', - 'epg_data', - 'stream_profile', - ).prefetch_related('streams') + qs = ( + super() + .get_queryset() + .select_related( + "channel_group", + "logo", + "epg_data", + "stream_profile", + ) + .prefetch_related("streams") + ) - channel_group = self.request.query_params.get('channel_group') + channel_group = self.request.query_params.get("channel_group") if channel_group: - group_names = channel_group.split(',') + group_names = channel_group.split(",") qs = qs.filter(channel_group__name__in=group_names) + if self.request.user.user_level < 10: + qs = qs.filter(user_level__lte=self.request.user.user_level) + return qs def get_serializer_context(self): context = super().get_serializer_context() - include_streams = self.request.query_params.get('include_streams', 'false') == 'true' - context['include_streams'] = include_streams + include_streams = ( + self.request.query_params.get("include_streams", "false") == "true" + ) + context["include_streams"] = include_streams return context - @action(detail=False, methods=['patch'], url_path='edit/bulk') + @action(detail=False, methods=["patch"], url_path="edit/bulk") def edit_bulk(self, request): data_list = request.data if not isinstance(data_list, list): - return Response({"error": "Expected a list of channel objects objects"}, status=status.HTTP_400_BAD_REQUEST) + return Response( + {"error": "Expected a list of channel objects objects"}, + status=status.HTTP_400_BAD_REQUEST, + ) updated_channels = [] try: with transaction.atomic(): for item in data_list: - channel = Channel.objects.id(id=item.pop('id')) + channel = Channel.objects.id(id=item.pop("id")) for key, value in item.items(): setattr(channel, key, value) @@ -209,7 +300,7 @@ class ChannelViewSet(viewsets.ModelViewSet): return Response(response_data, status=status.HTTP_200_OK) - @action(detail=False, methods=['get'], url_path='ids') + @action(detail=False, methods=["get"], url_path="ids") def get_ids(self, request, *args, **kwargs): # Get the filtered queryset queryset = self.get_queryset() @@ -218,35 +309,38 @@ class ChannelViewSet(viewsets.ModelViewSet): queryset = self.filter_queryset(queryset) # Return only the IDs from the queryset - channel_ids = queryset.values_list('id', flat=True) + channel_ids = queryset.values_list("id", flat=True) # Return the response with the list of IDs return Response(list(channel_ids)) @swagger_auto_schema( - method='post', + method="post", operation_description="Auto-assign channel_number in bulk by an ordered list of channel IDs.", request_body=openapi.Schema( type=openapi.TYPE_OBJECT, required=["channel_ids"], properties={ - "starting_number": openapi.Schema(type=openapi.TYPE_NUMBER, description="Starting channel number to assign (can be decimal)"), + "starting_number": openapi.Schema( + type=openapi.TYPE_NUMBER, + description="Starting channel number to assign (can be decimal)", + ), "channel_ids": openapi.Schema( type=openapi.TYPE_ARRAY, items=openapi.Items(type=openapi.TYPE_INTEGER), - description="Channel IDs to assign" - ) - } + description="Channel IDs to assign", + ), + }, ), - responses={200: "Channels have been auto-assigned!"} + responses={200: "Channels have been auto-assigned!"}, ) - @action(detail=False, methods=['post'], url_path='assign') + @action(detail=False, methods=["post"], url_path="assign") def assign(self, request): with transaction.atomic(): - channel_ids = request.data.get('channel_ids', []) + channel_ids = request.data.get("channel_ids", []) # Ensure starting_number is processed as a float try: - channel_num = float(request.data.get('starting_number', 1)) + channel_num = float(request.data.get("starting_number", 1)) except (ValueError, TypeError): channel_num = 1.0 @@ -254,10 +348,12 @@ class ChannelViewSet(viewsets.ModelViewSet): Channel.objects.filter(id=channel_id).update(channel_number=channel_num) channel_num = channel_num + 1 - return Response({"message": "Channels have been auto-assigned!"}, status=status.HTTP_200_OK) + return Response( + {"message": "Channels have been auto-assigned!"}, status=status.HTTP_200_OK + ) @swagger_auto_schema( - method='post', + method="post", operation_description=( "Create a new channel from an existing stream. " "If 'channel_number' is provided, it will be used (if available); " @@ -272,71 +368,81 @@ class ChannelViewSet(viewsets.ModelViewSet): ), "channel_number": openapi.Schema( type=openapi.TYPE_NUMBER, - description="(Optional) Desired channel number. Must not be in use." + description="(Optional) Desired channel number. Must not be in use.", ), "name": openapi.Schema( type=openapi.TYPE_STRING, description="Desired channel name" - ) - } + ), + }, ), - responses={201: ChannelSerializer()} + responses={201: ChannelSerializer()}, ) - @action(detail=False, methods=['post'], url_path='from-stream') + @action(detail=False, methods=["post"], url_path="from-stream") def from_stream(self, request): - stream_id = request.data.get('stream_id') + stream_id = request.data.get("stream_id") if not stream_id: - return Response({"error": "Missing stream_id"}, status=status.HTTP_400_BAD_REQUEST) + return Response( + {"error": "Missing stream_id"}, status=status.HTTP_400_BAD_REQUEST + ) stream = get_object_or_404(Stream, pk=stream_id) channel_group = stream.channel_group - name = request.data.get('name') + name = request.data.get("name") if name is None: name = stream.name # Check if client provided a channel_number; if not, auto-assign one. - stream_custom_props = json.loads(stream.custom_properties) if stream.custom_properties else {} + stream_custom_props = ( + json.loads(stream.custom_properties) if stream.custom_properties else {} + ) channel_number = None - if 'tvg-chno' in stream_custom_props: - channel_number = float(stream_custom_props['tvg-chno']) - elif 'channel-number' in stream_custom_props: - channel_number = float(stream_custom_props['channel-number']) + if "tvg-chno" in stream_custom_props: + channel_number = float(stream_custom_props["tvg-chno"]) + elif "channel-number" in stream_custom_props: + channel_number = float(stream_custom_props["channel-number"]) if channel_number is None: - provided_number = request.data.get('channel_number') + provided_number = request.data.get("channel_number") if provided_number is None: channel_number = Channel.get_next_available_channel_number() else: try: channel_number = float(provided_number) except ValueError: - return Response({"error": "channel_number must be an integer."}, status=status.HTTP_400_BAD_REQUEST) + return Response( + {"error": "channel_number must be an integer."}, + status=status.HTTP_400_BAD_REQUEST, + ) # If the provided number is already used, return an error. if Channel.objects.filter(channel_number=channel_number).exists(): return Response( - {"error": f"Channel number {channel_number} is already in use. Please choose a different number."}, - status=status.HTTP_400_BAD_REQUEST + { + "error": f"Channel number {channel_number} is already in use. Please choose a different number." + }, + status=status.HTTP_400_BAD_REQUEST, ) - #Get the tvc_guide_stationid from custom properties if it exists + # Get the tvc_guide_stationid from custom properties if it exists tvc_guide_stationid = None - if 'tvc-guide-stationid' in stream_custom_props: - tvc_guide_stationid = stream_custom_props['tvc-guide-stationid'] - - + if "tvc-guide-stationid" in stream_custom_props: + tvc_guide_stationid = stream_custom_props["tvc-guide-stationid"] channel_data = { - 'channel_number': channel_number, - 'name': name, - 'tvg_id': stream.tvg_id, - 'tvc_guide_stationid': tvc_guide_stationid, - 'channel_group_id': channel_group.id, - 'streams': [stream_id], + "channel_number": channel_number, + "name": name, + "tvg_id": stream.tvg_id, + "tvc_guide_stationid": tvc_guide_stationid, + "streams": [stream_id], } + # Only add channel_group_id if the stream has a channel group + if channel_group: + channel_data["channel_group_id"] = channel_group.id + if stream.logo_url: - logo, _ = Logo.objects.get_or_create(url=stream.logo_url, defaults={ - "name": stream.name or stream.tvg_id - }) + logo, _ = Logo.objects.get_or_create( + url=stream.logo_url, defaults={"name": stream.name or stream.tvg_id} + ) channel_data["logo_id"] = logo.id # Attempt to find existing EPGs with the same tvg-id @@ -351,7 +457,7 @@ class ChannelViewSet(viewsets.ModelViewSet): return Response(serializer.data, status=status.HTTP_201_CREATED) @swagger_auto_schema( - method='post', + method="post", operation_description=( "Bulk create channels from existing streams. For each object, if 'channel_number' is provided, " "it is used (if available); otherwise, the next available number is auto-assigned. " @@ -364,31 +470,37 @@ class ChannelViewSet(viewsets.ModelViewSet): required=["stream_id"], properties={ "stream_id": openapi.Schema( - type=openapi.TYPE_INTEGER, description="ID of the stream to link" + type=openapi.TYPE_INTEGER, + description="ID of the stream to link", ), "channel_number": openapi.Schema( type=openapi.TYPE_NUMBER, - description="(Optional) Desired channel number. Must not be in use." + description="(Optional) Desired channel number. Must not be in use.", ), "name": openapi.Schema( type=openapi.TYPE_STRING, description="Desired channel name" - ) - } - ) + ), + }, + ), ), - responses={201: "Bulk channels created"} + responses={201: "Bulk channels created"}, ) - @action(detail=False, methods=['post'], url_path='from-stream/bulk') + @action(detail=False, methods=["post"], url_path="from-stream/bulk") def from_stream_bulk(self, request): data_list = request.data if not isinstance(data_list, list): - return Response({"error": "Expected a list of channel objects"}, status=status.HTTP_400_BAD_REQUEST) + return Response( + {"error": "Expected a list of channel objects"}, + status=status.HTTP_400_BAD_REQUEST, + ) created_channels = [] errors = [] # Gather current used numbers once. - used_numbers = set(Channel.objects.all().values_list('channel_number', flat=True)) + used_numbers = set( + Channel.objects.all().values_list("channel_number", flat=True) + ) next_number = 1 def get_auto_number(): @@ -403,9 +515,14 @@ class ChannelViewSet(viewsets.ModelViewSet): streams_map = [] logo_map = [] for item in data_list: - stream_id = item.get('stream_id') + stream_id = item.get("stream_id") if not all([stream_id]): - errors.append({"item": item, "error": "Missing required fields: stream_id and name are required."}) + errors.append( + { + "item": item, + "error": "Missing required fields: stream_id and name are required.", + } + ) continue try: @@ -414,48 +531,68 @@ class ChannelViewSet(viewsets.ModelViewSet): errors.append({"item": item, "error": str(e)}) continue - name = item.get('name') + name = item.get("name") if name is None: name = stream.name channel_group = stream.channel_group - stream_custom_props = json.loads(stream.custom_properties) if stream.custom_properties else {} + stream_custom_props = ( + json.loads(stream.custom_properties) if stream.custom_properties else {} + ) channel_number = None - if 'tvg-chno' in stream_custom_props: - channel_number = float(stream_custom_props['tvg-chno']) - elif 'channel-number' in stream_custom_props: - channel_number = float(stream_custom_props['channel-number']) + if "tvg-chno" in stream_custom_props: + channel_number = float(stream_custom_props["tvg-chno"]) + elif "channel-number" in stream_custom_props: + channel_number = float(stream_custom_props["channel-number"]) # Get the tvc_guide_stationid from custom properties if it exists tvc_guide_stationid = None - if 'tvc-guide-stationid' in stream_custom_props: - tvc_guide_stationid = stream_custom_props['tvc-guide-stationid'] + if "tvc-guide-stationid" in stream_custom_props: + tvc_guide_stationid = stream_custom_props["tvc-guide-stationid"] # Determine channel number: if provided, use it (if free); else auto assign. if channel_number is None: - provided_number = item.get('channel_number') + provided_number = item.get("channel_number") if provided_number is None: channel_number = get_auto_number() else: try: channel_number = float(provided_number) except ValueError: - errors.append({"item": item, "error": "channel_number must be an integer."}) + errors.append( + { + "item": item, + "error": "channel_number must be an integer.", + } + ) continue - if channel_number in used_numbers or Channel.objects.filter(channel_number=channel_number).exists(): - errors.append({"item": item, "error": f"Channel number {channel_number} is already in use."}) + if ( + channel_number in used_numbers + or Channel.objects.filter( + channel_number=channel_number + ).exists() + ): + errors.append( + { + "item": item, + "error": f"Channel number {channel_number} is already in use.", + } + ) continue used_numbers.add(channel_number) channel_data = { "channel_number": channel_number, "name": name, - 'tvc_guide_stationid': tvc_guide_stationid, + "tvc_guide_stationid": tvc_guide_stationid, "tvg_id": stream.tvg_id, - "channel_group_id": channel_group.id, } + # Only add channel_group_id if the stream has a channel group + if channel_group: + channel_data["channel_group_id"] = channel_group.id + # Attempt to find existing EPGs with the same tvg-id epgs = EPGData.objects.filter(tvg_id=stream.tvg_id) if epgs: @@ -469,10 +606,12 @@ class ChannelViewSet(viewsets.ModelViewSet): streams_map.append([stream_id]) if stream.logo_url: - logos_to_create.append(Logo( - url=stream.logo_url, - name=stream.name or stream.tvg_id, - )) + logos_to_create.append( + Logo( + url=stream.logo_url, + name=stream.name or stream.tvg_id, + ) + ) logo_map.append(stream.logo_url) else: logo_map.append(None) @@ -486,7 +625,12 @@ class ChannelViewSet(viewsets.ModelViewSet): if logos_to_create: Logo.objects.bulk_create(logos_to_create, ignore_conflicts=True) - channel_logos = {logo.url: logo for logo in Logo.objects.filter(url__in=[url for url in logo_map if url is not None])} + channel_logos = { + logo.url: logo + for logo in Logo.objects.filter( + url__in=[url for url in logo_map if url is not None] + ) + } profiles = ChannelProfile.objects.all() channel_profile_memberships = [] @@ -495,17 +639,23 @@ class ChannelViewSet(viewsets.ModelViewSet): created_channels = Channel.objects.bulk_create(channels_to_create) update = [] - for channel, stream_ids, logo_url in zip(created_channels, streams_map, logo_map): + for channel, stream_ids, logo_url in zip( + created_channels, streams_map, logo_map + ): if logo_url: channel.logo = channel_logos[logo_url] update.append(channel) channel_profile_memberships = channel_profile_memberships + [ - ChannelProfileMembership(channel_profile=profile, channel=channel) + ChannelProfileMembership( + channel_profile=profile, channel=channel + ) for profile in profiles ] - ChannelProfileMembership.objects.bulk_create(channel_profile_memberships) - Channel.objects.bulk_update(update, ['logo']) + ChannelProfileMembership.objects.bulk_create( + channel_profile_memberships + ) + Channel.objects.bulk_update(update, ["logo"]) for channel, stream_ids in zip(created_channels, streams_map): channel.streams.set(stream_ids) @@ -520,54 +670,60 @@ class ChannelViewSet(viewsets.ModelViewSet): # 6) EPG Fuzzy Matching # ───────────────────────────────────────────────────────── @swagger_auto_schema( - method='post', + method="post", operation_description="Kick off a Celery task that tries to fuzzy-match channels with EPG data.", - responses={202: "EPG matching task initiated"} + responses={202: "EPG matching task initiated"}, ) - @action(detail=False, methods=['post'], url_path='match-epg') + @action(detail=False, methods=["post"], url_path="match-epg") def match_epg(self, request): match_epg_channels.delay() - return Response({"message": "EPG matching task initiated."}, status=status.HTTP_202_ACCEPTED) + return Response( + {"message": "EPG matching task initiated."}, status=status.HTTP_202_ACCEPTED + ) # ───────────────────────────────────────────────────────── # 7) Set EPG and Refresh # ───────────────────────────────────────────────────────── @swagger_auto_schema( - method='post', + method="post", operation_description="Set EPG data for a channel and refresh program data", request_body=openapi.Schema( type=openapi.TYPE_OBJECT, - required=['epg_data_id'], + required=["epg_data_id"], properties={ - 'epg_data_id': openapi.Schema( + "epg_data_id": openapi.Schema( type=openapi.TYPE_INTEGER, description="EPG data ID to link" ) - } + }, ), - responses={200: "EPG data linked and refresh triggered"} + responses={200: "EPG data linked and refresh triggered"}, ) - @action(detail=True, methods=['post'], url_path='set-epg') + @action(detail=True, methods=["post"], url_path="set-epg") def set_epg(self, request, pk=None): channel = self.get_object() - epg_data_id = request.data.get('epg_data_id') + epg_data_id = request.data.get("epg_data_id") # Handle removing EPG link - if epg_data_id in (None, '', '0', 0): + if epg_data_id in (None, "", "0", 0): channel.epg_data = None - channel.save(update_fields=['epg_data']) - return Response({"message": f"EPG data removed from channel {channel.name}"}) + channel.save(update_fields=["epg_data"]) + return Response( + {"message": f"EPG data removed from channel {channel.name}"} + ) try: # Get the EPG data object from apps.epg.models import EPGData + epg_data = EPGData.objects.get(pk=epg_data_id) # Set the EPG data and save channel.epg_data = epg_data - channel.save(update_fields=['epg_data']) + channel.save(update_fields=["epg_data"]) # Explicitly trigger program refresh for this EPG from apps.epg.tasks import parse_programs_for_tvg_id + task_result = parse_programs_for_tvg_id.delay(epg_data.id) # Prepare response with task status info @@ -575,45 +731,47 @@ class ChannelViewSet(viewsets.ModelViewSet): if task_result.result == "Task already running": status_message = "EPG refresh already in progress" - return Response({ - "message": f"EPG data set to {epg_data.tvg_id} for channel {channel.name}. {status_message}.", - "channel": self.get_serializer(channel).data, - "task_status": status_message - }) + return Response( + { + "message": f"EPG data set to {epg_data.tvg_id} for channel {channel.name}. {status_message}.", + "channel": self.get_serializer(channel).data, + "task_status": status_message, + } + ) except Exception as e: return Response({"error": str(e)}, status=400) @swagger_auto_schema( - method='post', + method="post", operation_description="Associate multiple channels with EPG data without triggering a full refresh", request_body=openapi.Schema( type=openapi.TYPE_OBJECT, properties={ - 'associations': openapi.Schema( + "associations": openapi.Schema( type=openapi.TYPE_ARRAY, items=openapi.Schema( type=openapi.TYPE_OBJECT, properties={ - 'channel_id': openapi.Schema(type=openapi.TYPE_INTEGER), - 'epg_data_id': openapi.Schema(type=openapi.TYPE_INTEGER) - } - ) + "channel_id": openapi.Schema(type=openapi.TYPE_INTEGER), + "epg_data_id": openapi.Schema(type=openapi.TYPE_INTEGER), + }, + ), ) - } + }, ), - responses={200: "EPG data linked for multiple channels"} + responses={200: "EPG data linked for multiple channels"}, ) - @action(detail=False, methods=['post'], url_path='batch-set-epg') + @action(detail=False, methods=["post"], url_path="batch-set-epg") def batch_set_epg(self, request): """Efficiently associate multiple channels with EPG data at once.""" - associations = request.data.get('associations', []) + associations = request.data.get("associations", []) channels_updated = 0 programs_refreshed = 0 unique_epg_ids = set() for assoc in associations: - channel_id = assoc.get('channel_id') - epg_data_id = assoc.get('epg_data_id') + channel_id = assoc.get("channel_id") + epg_data_id = assoc.get("epg_data_id") if not channel_id: continue @@ -624,7 +782,7 @@ class ChannelViewSet(viewsets.ModelViewSet): # Set the EPG data channel.epg_data_id = epg_data_id - channel.save(update_fields=['epg_data']) + channel.save(update_fields=["epg_data"]) channels_updated += 1 # Track unique EPG data IDs @@ -634,27 +792,37 @@ class ChannelViewSet(viewsets.ModelViewSet): except Channel.DoesNotExist: logger.error(f"Channel with ID {channel_id} not found") except Exception as e: - logger.error(f"Error setting EPG data for channel {channel_id}: {str(e)}") + logger.error( + f"Error setting EPG data for channel {channel_id}: {str(e)}" + ) # Trigger program refresh for unique EPG data IDs from apps.epg.tasks import parse_programs_for_tvg_id + for epg_id in unique_epg_ids: parse_programs_for_tvg_id.delay(epg_id) programs_refreshed += 1 + return Response( + { + "success": True, + "channels_updated": channels_updated, + "programs_refreshed": programs_refreshed, + } + ) - return Response({ - 'success': True, - 'channels_updated': channels_updated, - 'programs_refreshed': programs_refreshed - }) - # ───────────────────────────────────────────────────────── # 4) Bulk Delete Streams # ───────────────────────────────────────────────────────── class BulkDeleteStreamsAPIView(APIView): - permission_classes = [IsAuthenticated] + def get_permissions(self): + try: + return [ + perm() for perm in permission_classes_by_method[self.request.method] + ] + except KeyError: + return [Authenticated()] @swagger_auto_schema( operation_description="Bulk delete streams by ID", @@ -665,23 +833,32 @@ class BulkDeleteStreamsAPIView(APIView): "stream_ids": openapi.Schema( type=openapi.TYPE_ARRAY, items=openapi.Items(type=openapi.TYPE_INTEGER), - description="Stream IDs to delete" + description="Stream IDs to delete", ) }, ), - responses={204: "Streams deleted"} + responses={204: "Streams deleted"}, ) def delete(self, request, *args, **kwargs): - stream_ids = request.data.get('stream_ids', []) + stream_ids = request.data.get("stream_ids", []) Stream.objects.filter(id__in=stream_ids).delete() - return Response({"message": "Streams deleted successfully!"}, status=status.HTTP_204_NO_CONTENT) + return Response( + {"message": "Streams deleted successfully!"}, + status=status.HTTP_204_NO_CONTENT, + ) # ───────────────────────────────────────────────────────── # 5) Bulk Delete Channels # ───────────────────────────────────────────────────────── class BulkDeleteChannelsAPIView(APIView): - permission_classes = [IsAuthenticated] + def get_permissions(self): + try: + return [ + perm() for perm in permission_classes_by_method[self.request.method] + ] + except KeyError: + return [Authenticated()] @swagger_auto_schema( operation_description="Bulk delete channels by ID", @@ -692,44 +869,66 @@ class BulkDeleteChannelsAPIView(APIView): "channel_ids": openapi.Schema( type=openapi.TYPE_ARRAY, items=openapi.Items(type=openapi.TYPE_INTEGER), - description="Channel IDs to delete" + description="Channel IDs to delete", ) }, ), - responses={204: "Channels deleted"} + responses={204: "Channels deleted"}, ) def delete(self, request): - channel_ids = request.data.get('channel_ids', []) + channel_ids = request.data.get("channel_ids", []) Channel.objects.filter(id__in=channel_ids).delete() - return Response({"message": "Channels deleted"}, status=status.HTTP_204_NO_CONTENT) + return Response( + {"message": "Channels deleted"}, status=status.HTTP_204_NO_CONTENT + ) + class LogoViewSet(viewsets.ModelViewSet): - permission_classes = [IsAuthenticated] queryset = Logo.objects.all() serializer_class = LogoSerializer parser_classes = (MultiPartParser, FormParser) - @action(detail=False, methods=['post']) - def upload(self, request): - if 'file' not in request.FILES: - return Response({'error': 'No file uploaded'}, status=status.HTTP_400_BAD_REQUEST) + def get_permissions(self): + if self.action in ["upload"]: + return [IsAdmin()] - file = request.FILES['file'] + if self.action in ["cache"]: + return [AllowAny()] + + try: + return [perm() for perm in permission_classes_by_action[self.action]] + except KeyError: + return [Authenticated()] + + @action(detail=False, methods=["post"]) + def upload(self, request): + if "file" not in request.FILES: + return Response( + {"error": "No file uploaded"}, status=status.HTTP_400_BAD_REQUEST + ) + + file = request.FILES["file"] file_name = file.name - file_path = os.path.join('/data/logos', file_name) + file_path = os.path.join("/data/logos", file_name) os.makedirs(os.path.dirname(file_path), exist_ok=True) - with open(file_path, 'wb+') as destination: + with open(file_path, "wb+") as destination: for chunk in file.chunks(): destination.write(chunk) - logo, _ = Logo.objects.get_or_create(url=file_path, defaults={ - "name": file_name, - }) + logo, _ = Logo.objects.get_or_create( + url=file_path, + defaults={ + "name": file_name, + }, + ) - return Response({'id': logo.id, 'name': logo.name, 'url': logo.url}, status=status.HTTP_201_CREATED) + return Response( + {"id": logo.id, "name": logo.name, "url": logo.url}, + status=status.HTTP_201_CREATED, + ) - @action(detail=True, methods=['get'], permission_classes=[AllowAny]) + @action(detail=True, methods=["get"], permission_classes=[AllowAny]) def cache(self, request, pk=None): """Streams the logo file, whether it's local or remote.""" logo = self.get_object() @@ -742,11 +941,15 @@ class LogoViewSet(viewsets.ModelViewSet): # Get proper mime type (first item of the tuple) content_type, _ = mimetypes.guess_type(logo_url) if not content_type: - content_type = 'image/jpeg' # Default to a common image type + content_type = "image/jpeg" # Default to a common image type # Use context manager and set Content-Disposition to inline - response = StreamingHttpResponse(open(logo_url, "rb"), content_type=content_type) - response['Content-Disposition'] = 'inline; filename="{}"'.format(os.path.basename(logo_url)) + response = StreamingHttpResponse( + open(logo_url, "rb"), content_type=content_type + ) + response["Content-Disposition"] = 'inline; filename="{}"'.format( + os.path.basename(logo_url) + ) return response else: # Remote image @@ -754,7 +957,7 @@ class LogoViewSet(viewsets.ModelViewSet): remote_response = requests.get(logo_url, stream=True) if remote_response.status_code == 200: # Try to get content type from response headers first - content_type = remote_response.headers.get('Content-Type') + content_type = remote_response.headers.get("Content-Type") # If no content type in headers or it's empty, guess based on URL if not content_type: @@ -762,43 +965,89 @@ class LogoViewSet(viewsets.ModelViewSet): # If still no content type, default to common image type if not content_type: - content_type = 'image/jpeg' + content_type = "image/jpeg" - response = StreamingHttpResponse(remote_response.iter_content(chunk_size=8192), content_type=content_type) - response['Content-Disposition'] = 'inline; filename="{}"'.format(os.path.basename(logo_url)) + response = StreamingHttpResponse( + remote_response.iter_content(chunk_size=8192), + content_type=content_type, + ) + response["Content-Disposition"] = 'inline; filename="{}"'.format( + os.path.basename(logo_url) + ) return response raise Http404("Remote image not found") except requests.RequestException: raise Http404("Error fetching remote image") + class ChannelProfileViewSet(viewsets.ModelViewSet): queryset = ChannelProfile.objects.all() serializer_class = ChannelProfileSerializer - permission_classes = [IsAuthenticated] + + def get_queryset(self): + user = self.request.user + + # If user_level is 10, return all ChannelProfiles + if hasattr(user, "user_level") and user.user_level == 10: + return ChannelProfile.objects.all() + + # Otherwise, return only ChannelProfiles related to the user + return self.request.user.channel_profiles.all() + + def get_permissions(self): + try: + return [perm() for perm in permission_classes_by_action[self.action]] + except KeyError: + return [Authenticated()] + class GetChannelStreamsAPIView(APIView): + def get_permissions(self): + try: + return [ + perm() for perm in permission_classes_by_method[self.request.method] + ] + except KeyError: + return [Authenticated()] + def get(self, request, channel_id): channel = get_object_or_404(Channel, id=channel_id) # Order the streams by channelstream__order to match the order in the channel view - streams = channel.streams.all().order_by('channelstream__order') + streams = channel.streams.all().order_by("channelstream__order") serializer = StreamSerializer(streams, many=True) return Response(serializer.data) + class UpdateChannelMembershipAPIView(APIView): + permission_classes = [IsOwnerOfObject] + def patch(self, request, profile_id, channel_id): """Enable or disable a channel for a specific group""" channel_profile = get_object_or_404(ChannelProfile, id=profile_id) channel = get_object_or_404(Channel, id=channel_id) - membership = get_object_or_404(ChannelProfileMembership, channel_profile=channel_profile, channel=channel) + membership = get_object_or_404( + ChannelProfileMembership, channel_profile=channel_profile, channel=channel + ) - serializer = ChannelProfileMembershipSerializer(membership, data=request.data, partial=True) + serializer = ChannelProfileMembershipSerializer( + membership, data=request.data, partial=True + ) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + class BulkUpdateChannelMembershipAPIView(APIView): + def get_permissions(self): + try: + return [ + perm() for perm in permission_classes_by_method[self.request.method] + ] + except KeyError: + return [Authenticated()] + def patch(self, request, profile_id): """Bulk enable or disable channels for a specific profile""" # Get the channel profile @@ -808,30 +1057,34 @@ class BulkUpdateChannelMembershipAPIView(APIView): serializer = BulkChannelProfileMembershipSerializer(data=request.data) if serializer.is_valid(): - updates = serializer.validated_data['channels'] - channel_ids = [entry['channel_id'] for entry in updates] - + updates = serializer.validated_data["channels"] + channel_ids = [entry["channel_id"] for entry in updates] memberships = ChannelProfileMembership.objects.filter( - channel_profile=channel_profile, - channel_id__in=channel_ids + channel_profile=channel_profile, channel_id__in=channel_ids ) membership_dict = {m.channel.id: m for m in memberships} for entry in updates: - channel_id = entry['channel_id'] - enabled_status = entry['enabled'] + channel_id = entry["channel_id"] + enabled_status = entry["enabled"] if channel_id in membership_dict: membership_dict[channel_id].enabled = enabled_status - ChannelProfileMembership.objects.bulk_update(memberships, ['enabled']) + ChannelProfileMembership.objects.bulk_update(memberships, ["enabled"]) return Response({"status": "success"}, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + class RecordingViewSet(viewsets.ModelViewSet): queryset = Recording.objects.all() serializer_class = RecordingSerializer - permission_classes = [IsAuthenticated] + + def get_permissions(self): + try: + return [perm() for perm in permission_classes_by_action[self.action]] + except KeyError: + return [Authenticated()] diff --git a/apps/channels/migrations/0021_channel_user_level.py b/apps/channels/migrations/0021_channel_user_level.py new file mode 100644 index 00000000..2aa55eeb --- /dev/null +++ b/apps/channels/migrations/0021_channel_user_level.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.6 on 2025-05-18 14:31 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dispatcharr_channels', '0020_alter_channel_channel_number'), + ] + + operations = [ + migrations.AddField( + model_name='channel', + name='user_level', + field=models.IntegerField(default=0), + ), + ] diff --git a/apps/channels/models.py b/apps/channels/models.py index 191eb45e..1bcbcc41 100644 --- a/apps/channels/models.py +++ b/apps/channels/models.py @@ -9,12 +9,14 @@ from datetime import datetime import hashlib import json from apps.epg.models import EPGData +from apps.accounts.models import User logger = logging.getLogger(__name__) # If you have an M3UAccount model in apps.m3u, you can still import it: from apps.m3u.models import M3UAccount + # Add fallback functions if Redis isn't available def get_total_viewers(channel_id): """Get viewer count from Redis or return 0 if Redis isn't available""" @@ -25,6 +27,7 @@ def get_total_viewers(channel_id): except Exception: return 0 + class ChannelGroup(models.Model): name = models.TextField(unique=True, db_index=True) @@ -45,10 +48,12 @@ class ChannelGroup(models.Model): return created_objects + class Stream(models.Model): """ Represents a single stream (e.g. from an M3U source or custom URL). """ + name = models.CharField(max_length=255, default="Default Stream") url = models.URLField(max_length=2000, blank=True, null=True) m3u_account = models.ForeignKey( @@ -60,7 +65,7 @@ class Stream(models.Model): ) logo_url = models.TextField(blank=True, null=True) tvg_id = models.CharField(max_length=255, blank=True, null=True) - local_file = models.FileField(upload_to='uploads/', blank=True, null=True) + local_file = models.FileField(upload_to="uploads/", blank=True, null=True) current_viewers = models.PositiveIntegerField(default=0) updated_at = models.DateTimeField(auto_now=True) channel_group = models.ForeignKey( @@ -68,18 +73,18 @@ class Stream(models.Model): on_delete=models.SET_NULL, null=True, blank=True, - related_name='streams' + related_name="streams", ) stream_profile = models.ForeignKey( StreamProfile, null=True, blank=True, on_delete=models.SET_NULL, - related_name='streams' + related_name="streams", ) is_custom = models.BooleanField( default=False, - help_text="Whether this is a user-created stream or from an M3U account" + help_text="Whether this is a user-created stream or from an M3U account", ) stream_hash = models.CharField( max_length=255, @@ -95,7 +100,7 @@ class Stream(models.Model): # If you use m3u_account, you might do unique_together = ('name','url','m3u_account') verbose_name = "Stream" verbose_name_plural = "Streams" - ordering = ['-updated_at'] + ordering = ["-updated_at"] def __str__(self): return self.name or self.url or f"Stream ID {self.id}" @@ -105,14 +110,14 @@ class Stream(models.Model): if keys is None: keys = CoreSettings.get_m3u_hash_key().split(",") - stream_parts = { - "name": name, "url": url, "tvg_id": tvg_id - } + stream_parts = {"name": name, "url": url, "tvg_id": tvg_id} hash_parts = {key: stream_parts[key] for key in keys if key in stream_parts} # Serialize and hash the dictionary - serialized_obj = json.dumps(hash_parts, sort_keys=True) # sort_keys ensures consistent ordering + serialized_obj = json.dumps( + hash_parts, sort_keys=True + ) # sort_keys ensures consistent ordering hash_object = hashlib.sha256(serialized_obj.encode()) return hash_object.hexdigest() @@ -128,13 +133,17 @@ class Stream(models.Model): return stream, False # False means it was updated, not created except cls.DoesNotExist: # If it doesn't exist, create a new object with the given hash - fields_to_update['stream_hash'] = hash_value # Make sure the hash field is set + fields_to_update["stream_hash"] = ( + hash_value # Make sure the hash field is set + ) stream = cls.objects.create(**fields_to_update) return stream, True # True means it was created # @TODO: honor stream's stream profile def get_stream_profile(self): - stream_profile = StreamProfile.objects.get(id=CoreSettings.get_default_stream_profile_id()) + stream_profile = StreamProfile.objects.get( + id=CoreSettings.get_default_stream_profile_id() + ) return stream_profile @@ -152,7 +161,9 @@ class Stream(models.Model): m3u_account = self.m3u_account m3u_profiles = m3u_account.profiles.all() default_profile = next((obj for obj in m3u_profiles if obj.is_default), None) - profiles = [default_profile] + [obj for obj in m3u_profiles if not obj.is_default] + profiles = [default_profile] + [ + obj for obj in m3u_profiles if not obj.is_default + ] for profile in profiles: logger.info(profile) @@ -167,13 +178,19 @@ class Stream(models.Model): if profile.max_streams == 0 or current_connections < profile.max_streams: # Start a new stream redis_client.set(f"channel_stream:{self.id}", self.id) - redis_client.set(f"stream_profile:{self.id}", profile.id) # Store only the matched profile + redis_client.set( + f"stream_profile:{self.id}", profile.id + ) # Store only the matched profile # Increment connection count for profiles with limits if profile.max_streams > 0: redis_client.incr(profile_connections_key) - return self.id, profile.id, None # Return newly assigned stream and matched profile + return ( + self.id, + profile.id, + None, + ) # Return newly assigned stream and matched profile # 4. No available streams return None, None, None @@ -194,7 +211,9 @@ class Stream(models.Model): redis_client.delete(f"stream_profile:{stream_id}") # Remove profile association profile_id = int(profile_id) - logger.debug(f"Found profile ID {profile_id} associated with stream {stream_id}") + logger.debug( + f"Found profile ID {profile_id} associated with stream {stream_id}" + ) profile_connections_key = f"profile_connections:{profile_id}" @@ -203,6 +222,7 @@ class Stream(models.Model): if current_count > 0: redis_client.decr(profile_connections_key) + class ChannelManager(models.Manager): def active(self): return self.all() @@ -212,38 +232,35 @@ class Channel(models.Model): channel_number = models.FloatField(db_index=True) name = models.CharField(max_length=255) logo = models.ForeignKey( - 'Logo', + "Logo", on_delete=models.SET_NULL, null=True, blank=True, - related_name='channels', + related_name="channels", ) # M2M to Stream now in the same file streams = models.ManyToManyField( - Stream, - blank=True, - through='ChannelStream', - related_name='channels' + Stream, blank=True, through="ChannelStream", related_name="channels" ) channel_group = models.ForeignKey( - 'ChannelGroup', + "ChannelGroup", on_delete=models.SET_NULL, null=True, blank=True, - related_name='channels', - help_text="Channel group this channel belongs to." + related_name="channels", + help_text="Channel group this channel belongs to.", ) tvg_id = models.CharField(max_length=255, blank=True, null=True) tvc_guide_stationid = models.CharField(max_length=255, blank=True, null=True) - + epg_data = models.ForeignKey( EPGData, on_delete=models.SET_NULL, null=True, blank=True, - related_name='channels' + related_name="channels", ) stream_profile = models.ForeignKey( @@ -251,16 +268,19 @@ class Channel(models.Model): on_delete=models.SET_NULL, null=True, blank=True, - related_name='channels' + related_name="channels", ) - uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True, db_index=True) + uuid = models.UUIDField( + default=uuid.uuid4, editable=False, unique=True, db_index=True + ) + + user_level = models.IntegerField(default=0) def clean(self): # Enforce unique channel_number within a given group existing = Channel.objects.filter( - channel_number=self.channel_number, - channel_group=self.channel_group + channel_number=self.channel_number, channel_group=self.channel_group ).exclude(id=self.id) if existing.exists(): raise ValidationError( @@ -272,7 +292,7 @@ class Channel(models.Model): @classmethod def get_next_available_channel_number(cls, starting_from=1): - used_numbers = set(cls.objects.all().values_list('channel_number', flat=True)) + used_numbers = set(cls.objects.all().values_list("channel_number", flat=True)) n = starting_from while n in used_numbers: n += 1 @@ -282,7 +302,9 @@ class Channel(models.Model): def get_stream_profile(self): stream_profile = self.stream_profile if not stream_profile: - stream_profile = StreamProfile.objects.get(id=CoreSettings.get_default_stream_profile_id()) + stream_profile = StreamProfile.objects.get( + id=CoreSettings.get_default_stream_profile_id() + ) return stream_profile @@ -312,16 +334,20 @@ class Channel(models.Model): profile_id = int(profile_id_bytes) return stream_id, profile_id, None except (ValueError, TypeError): - logger.debug(f"Invalid profile ID retrieved from Redis: {profile_id_bytes}") + logger.debug( + f"Invalid profile ID retrieved from Redis: {profile_id_bytes}" + ) except (ValueError, TypeError): - logger.debug(f"Invalid stream ID retrieved from Redis: {stream_id_bytes}") + logger.debug( + f"Invalid stream ID retrieved from Redis: {stream_id_bytes}" + ) # No existing active stream, attempt to assign a new one has_streams_but_maxed_out = False has_active_profiles = False # Iterate through channel streams and their profiles - for stream in self.streams.all().order_by('channelstream__order'): + for stream in self.streams.all().order_by("channelstream__order"): # Retrieve the M3U account associated with the stream. m3u_account = stream.m3u_account if not m3u_account: @@ -329,13 +355,17 @@ class Channel(models.Model): continue m3u_profiles = m3u_account.profiles.all() - default_profile = next((obj for obj in m3u_profiles if obj.is_default), None) + default_profile = next( + (obj for obj in m3u_profiles if obj.is_default), None + ) if not default_profile: logger.debug(f"M3U account {m3u_account.id} has no default profile") continue - profiles = [default_profile] + [obj for obj in m3u_profiles if not obj.is_default] + profiles = [default_profile] + [ + obj for obj in m3u_profiles if not obj.is_default + ] for profile in profiles: # Skip inactive profiles @@ -346,10 +376,15 @@ class Channel(models.Model): has_active_profiles = True profile_connections_key = f"profile_connections:{profile.id}" - current_connections = int(redis_client.get(profile_connections_key) or 0) + current_connections = int( + redis_client.get(profile_connections_key) or 0 + ) # Check if profile has available slots (or unlimited connections) - if profile.max_streams == 0 or current_connections < profile.max_streams: + if ( + profile.max_streams == 0 + or current_connections < profile.max_streams + ): # Start a new stream redis_client.set(f"channel_stream:{self.id}", stream.id) redis_client.set(f"stream_profile:{stream.id}", profile.id) @@ -358,11 +393,17 @@ class Channel(models.Model): if profile.max_streams > 0: redis_client.incr(profile_connections_key) - return stream.id, profile.id, None # Return newly assigned stream and matched profile + return ( + stream.id, + profile.id, + None, + ) # Return newly assigned stream and matched profile else: # This profile is at max connections has_streams_but_maxed_out = True - logger.debug(f"Profile {profile.id} at max connections: {current_connections}/{profile.max_streams}") + logger.debug( + f"Profile {profile.id} at max connections: {current_connections}/{profile.max_streams}" + ) # No available streams - determine specific reason if has_streams_but_maxed_out: @@ -388,7 +429,9 @@ class Channel(models.Model): redis_client.delete(f"channel_stream:{self.id}") # Remove active stream stream_id = int(stream_id) - logger.debug(f"Found stream ID {stream_id} associated with channel stream {self.id}") + logger.debug( + f"Found stream ID {stream_id} associated with channel stream {self.id}" + ) # Get the matched profile for cleanup profile_id = redis_client.get(f"stream_profile:{stream_id}") @@ -399,7 +442,9 @@ class Channel(models.Model): redis_client.delete(f"stream_profile:{stream_id}") # Remove profile association profile_id = int(profile_id) - logger.debug(f"Found profile ID {profile_id} associated with stream {stream_id}") + logger.debug( + f"Found profile ID {profile_id} associated with stream {stream_id}" + ) profile_connections_key = f"profile_connections:{profile_id}" @@ -452,20 +497,26 @@ class Channel(models.Model): # Increment connection count for new profile new_profile_connections_key = f"profile_connections:{new_profile_id}" redis_client.incr(new_profile_connections_key) - logger.info(f"Updated stream {stream_id} profile from {current_profile_id} to {new_profile_id}") + logger.info( + f"Updated stream {stream_id} profile from {current_profile_id} to {new_profile_id}" + ) return True class ChannelProfile(models.Model): name = models.CharField(max_length=100, unique=True) + class ChannelProfileMembership(models.Model): channel_profile = models.ForeignKey(ChannelProfile, on_delete=models.CASCADE) channel = models.ForeignKey(Channel, on_delete=models.CASCADE) - enabled = models.BooleanField(default=True) # Track if the channel is enabled for this group + enabled = models.BooleanField( + default=True + ) # Track if the channel is enabled for this group class Meta: - unique_together = ('channel_profile', 'channel') + unique_together = ("channel_profile", "channel") + class ChannelStream(models.Model): channel = models.ForeignKey(Channel, on_delete=models.CASCADE) @@ -473,27 +524,26 @@ class ChannelStream(models.Model): order = models.PositiveIntegerField(default=0) # Ordering field class Meta: - ordering = ['order'] # Ensure streams are retrieved in order + ordering = ["order"] # Ensure streams are retrieved in order constraints = [ - models.UniqueConstraint(fields=['channel', 'stream'], name='unique_channel_stream') + models.UniqueConstraint( + fields=["channel", "stream"], name="unique_channel_stream" + ) ] + class ChannelGroupM3UAccount(models.Model): channel_group = models.ForeignKey( - ChannelGroup, - on_delete=models.CASCADE, - related_name='m3u_account' + ChannelGroup, on_delete=models.CASCADE, related_name="m3u_account" ) m3u_account = models.ForeignKey( - M3UAccount, - on_delete=models.CASCADE, - related_name='channel_group' + M3UAccount, on_delete=models.CASCADE, related_name="channel_group" ) custom_properties = models.TextField(null=True, blank=True) enabled = models.BooleanField(default=True) class Meta: - unique_together = ('channel_group', 'm3u_account') + unique_together = ("channel_group", "m3u_account") def __str__(self): return f"{self.channel_group.name} - {self.m3u_account.name} (Enabled: {self.enabled})" @@ -506,8 +556,11 @@ class Logo(models.Model): def __str__(self): return self.name + class Recording(models.Model): - channel = models.ForeignKey("Channel", on_delete=models.CASCADE, related_name="recordings") + channel = models.ForeignKey( + "Channel", on_delete=models.CASCADE, related_name="recordings" + ) start_time = models.DateTimeField() end_time = models.DateTimeField() task_id = models.CharField(max_length=255, null=True, blank=True) diff --git a/apps/channels/serializers.py b/apps/channels/serializers.py index 5423037f..cdc6ef60 100644 --- a/apps/channels/serializers.py +++ b/apps/channels/serializers.py @@ -1,5 +1,15 @@ from rest_framework import serializers -from .models import Stream, Channel, ChannelGroup, ChannelStream, ChannelGroupM3UAccount, Logo, ChannelProfile, ChannelProfileMembership, Recording +from .models import ( + Stream, + Channel, + ChannelGroup, + ChannelStream, + ChannelGroupM3UAccount, + Logo, + ChannelProfile, + ChannelProfileMembership, + Recording, +) from apps.epg.serializers import EPGDataSerializer from core.models import StreamProfile from apps.epg.models import EPGData @@ -7,19 +17,23 @@ from django.urls import reverse from rest_framework import serializers from django.utils import timezone + class LogoSerializer(serializers.ModelSerializer): cache_url = serializers.SerializerMethodField() class Meta: model = Logo - fields = ['id', 'name', 'url', 'cache_url'] + fields = ["id", "name", "url", "cache_url"] def get_cache_url(self, obj): # return f"/api/channels/logos/{obj.id}/cache/" - request = self.context.get('request') + request = self.context.get("request") if request: - return request.build_absolute_uri(reverse('api:channels:logo-cache', args=[obj.id])) - return reverse('api:channels:logo-cache', args=[obj.id]) + return request.build_absolute_uri( + reverse("api:channels:logo-cache", args=[obj.id]) + ) + return reverse("api:channels:logo-cache", args=[obj.id]) + # # Stream @@ -27,43 +41,46 @@ class LogoSerializer(serializers.ModelSerializer): class StreamSerializer(serializers.ModelSerializer): stream_profile_id = serializers.PrimaryKeyRelatedField( queryset=StreamProfile.objects.all(), - source='stream_profile', + source="stream_profile", allow_null=True, - required=False + required=False, ) - read_only_fields = ['is_custom', 'm3u_account', 'stream_hash'] + read_only_fields = ["is_custom", "m3u_account", "stream_hash"] class Meta: model = Stream fields = [ - 'id', - 'name', - 'url', - 'm3u_account', # Uncomment if using M3U fields - 'logo_url', - 'tvg_id', - 'local_file', - 'current_viewers', - 'updated_at', - 'last_seen', - 'stream_profile_id', - 'is_custom', - 'channel_group', - 'stream_hash', + "id", + "name", + "url", + "m3u_account", # Uncomment if using M3U fields + "logo_url", + "tvg_id", + "local_file", + "current_viewers", + "updated_at", + "last_seen", + "stream_profile_id", + "is_custom", + "channel_group", + "stream_hash", ] def get_fields(self): fields = super().get_fields() # Unable to edit specific properties if this stream was created from an M3U account - if self.instance and getattr(self.instance, 'm3u_account', None) and not self.instance.is_custom: - fields['id'].read_only = True - fields['name'].read_only = True - fields['url'].read_only = True - fields['m3u_account'].read_only = True - fields['tvg_id'].read_only = True - fields['channel_group'].read_only = True - + if ( + self.instance + and getattr(self.instance, "m3u_account", None) + and not self.instance.is_custom + ): + fields["id"].read_only = True + fields["name"].read_only = True + fields["url"].read_only = True + fields["m3u_account"].read_only = True + fields["tvg_id"].read_only = True + fields["channel_group"].read_only = True return fields @@ -74,35 +91,38 @@ class StreamSerializer(serializers.ModelSerializer): class ChannelGroupSerializer(serializers.ModelSerializer): class Meta: model = ChannelGroup - fields = ['id', 'name'] + fields = ["id", "name"] + class ChannelProfileSerializer(serializers.ModelSerializer): channels = serializers.SerializerMethodField() class Meta: model = ChannelProfile - fields = ['id', 'name', 'channels'] + fields = ["id", "name", "channels"] def get_channels(self, obj): - memberships = ChannelProfileMembership.objects.filter(channel_profile=obj, enabled=True) - return [ - membership.channel.id - for membership in memberships - ] + memberships = ChannelProfileMembership.objects.filter( + channel_profile=obj, enabled=True + ) + return [membership.channel.id for membership in memberships] + class ChannelProfileMembershipSerializer(serializers.ModelSerializer): class Meta: model = ChannelProfileMembership - fields = ['channel', 'enabled'] + fields = ["channel", "enabled"] + class ChanneProfilelMembershipUpdateSerializer(serializers.Serializer): channel_id = serializers.IntegerField() # Ensure channel_id is an integer enabled = serializers.BooleanField() + class BulkChannelProfileMembershipSerializer(serializers.Serializer): channels = serializers.ListField( child=ChanneProfilelMembershipUpdateSerializer(), # Use the nested serializer - allow_empty=False + allow_empty=False, ) def validate_channels(self, value): @@ -110,6 +130,7 @@ class BulkChannelProfileMembershipSerializer(serializers.Serializer): raise serializers.ValidationError("At least one channel must be provided.") return value + # # Channel # @@ -119,14 +140,10 @@ class ChannelSerializer(serializers.ModelSerializer): channel_number = serializers.FloatField( allow_null=True, required=False, - error_messages={ - 'invalid': 'Channel number must be a valid decimal number.' - } + error_messages={"invalid": "Channel number must be a valid decimal number."}, ) channel_group_id = serializers.PrimaryKeyRelatedField( - queryset=ChannelGroup.objects.all(), - source="channel_group", - required=False + queryset=ChannelGroup.objects.all(), source="channel_group", required=False ) epg_data_id = serializers.PrimaryKeyRelatedField( queryset=EPGData.objects.all(), @@ -137,16 +154,18 @@ class ChannelSerializer(serializers.ModelSerializer): stream_profile_id = serializers.PrimaryKeyRelatedField( queryset=StreamProfile.objects.all(), - source='stream_profile', + source="stream_profile", allow_null=True, - required=False + required=False, ) - streams = serializers.PrimaryKeyRelatedField(queryset=Stream.objects.all(), many=True, required=False) + streams = serializers.PrimaryKeyRelatedField( + queryset=Stream.objects.all(), many=True, required=False + ) logo_id = serializers.PrimaryKeyRelatedField( queryset=Logo.objects.all(), - source='logo', + source="logo", allow_null=True, required=False, ) @@ -154,24 +173,25 @@ class ChannelSerializer(serializers.ModelSerializer): class Meta: model = Channel fields = [ - 'id', - 'channel_number', - 'name', - 'channel_group_id', - 'tvg_id', - 'tvc_guide_stationid', - 'epg_data_id', - 'streams', - 'stream_profile_id', - 'uuid', - 'logo_id', + "id", + "channel_number", + "name", + "channel_group_id", + "tvg_id", + "tvc_guide_stationid", + "epg_data_id", + "streams", + "stream_profile_id", + "uuid", + "logo_id", + "user_level", ] def to_representation(self, instance): - include_streams = self.context.get('include_streams', False) + include_streams = self.context.get("include_streams", False) if include_streams: - self.fields['streams'] = serializers.SerializerMethodField() + self.fields["streams"] = serializers.SerializerMethodField() return super().to_representation(instance) @@ -180,22 +200,28 @@ class ChannelSerializer(serializers.ModelSerializer): def get_streams(self, obj): """Retrieve ordered stream IDs for GET requests.""" - return StreamSerializer(obj.streams.all().order_by('channelstream__order'), many=True).data + return StreamSerializer( + obj.streams.all().order_by("channelstream__order"), many=True + ).data def create(self, validated_data): - streams = validated_data.pop('streams', []) - channel_number = validated_data.pop('channel_number', Channel.get_next_available_channel_number()) + streams = validated_data.pop("streams", []) + channel_number = validated_data.pop( + "channel_number", Channel.get_next_available_channel_number() + ) validated_data["channel_number"] = channel_number channel = Channel.objects.create(**validated_data) # Add streams in the specified order for index, stream in enumerate(streams): - ChannelStream.objects.create(channel=channel, stream_id=stream.id, order=index) + ChannelStream.objects.create( + channel=channel, stream_id=stream.id, order=index + ) return channel def update(self, instance, validated_data): - streams = validated_data.pop('streams', None) + streams = validated_data.pop("streams", None) # Update standard fields for attr, value in validated_data.items(): @@ -206,8 +232,7 @@ class ChannelSerializer(serializers.ModelSerializer): if streams is not None: # Normalize stream IDs normalized_ids = [ - stream.id if hasattr(stream, "id") else stream - for stream in streams + stream.id if hasattr(stream, "id") else stream for stream in streams ] print(normalized_ids) @@ -234,9 +259,7 @@ class ChannelSerializer(serializers.ModelSerializer): cs.save(update_fields=["order"]) else: ChannelStream.objects.create( - channel=instance, - stream_id=stream_id, - order=order + channel=instance, stream_id=stream_id, order=order ) return instance @@ -250,20 +273,23 @@ class ChannelSerializer(serializers.ModelSerializer): # Ensure it's processed as a float return float(value) except (ValueError, TypeError): - raise serializers.ValidationError("Channel number must be a valid decimal number.") + raise serializers.ValidationError( + "Channel number must be a valid decimal number." + ) def validate_stream_profile(self, value): """Handle special case where empty/0 values mean 'use default' (null)""" - if value == '0' or value == 0 or value == '' or value is None: + if value == "0" or value == 0 or value == "" or value is None: return None return value # PrimaryKeyRelatedField will handle the conversion to object + class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer): enabled = serializers.BooleanField() class Meta: model = ChannelGroupM3UAccount - fields = ['id', 'channel_group', 'enabled'] + fields = ["id", "channel_group", "enabled"] # Optionally, if you only need the id of the ChannelGroup, you can customize it like this: # channel_group = serializers.PrimaryKeyRelatedField(queryset=ChannelGroup.objects.all()) @@ -272,12 +298,12 @@ class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer): class RecordingSerializer(serializers.ModelSerializer): class Meta: model = Recording - fields = '__all__' - read_only_fields = ['task_id'] + fields = "__all__" + read_only_fields = ["task_id"] def validate(self, data): - start_time = data.get('start_time') - end_time = data.get('end_time') + start_time = data.get("start_time") + end_time = data.get("end_time") now = timezone.now() # timezone-aware current time @@ -286,8 +312,8 @@ class RecordingSerializer(serializers.ModelSerializer): if start_time < now: # Optional: Adjust start_time if it's in the past but end_time is in the future - data['start_time'] = now # or: timezone.now() + timedelta(seconds=1) - if end_time <= data['start_time']: + data["start_time"] = now # or: timezone.now() + timedelta(seconds=1) + if end_time <= data["start_time"]: raise serializers.ValidationError("End time must be after start time.") return data diff --git a/apps/epg/api_views.py b/apps/epg/api_views.py index 240e2dcb..f3248677 100644 --- a/apps/epg/api_views.py +++ b/apps/epg/api_views.py @@ -2,18 +2,27 @@ import logging, os from rest_framework import viewsets, status from rest_framework.response import Response from rest_framework.views import APIView -from rest_framework.permissions import IsAuthenticated from rest_framework.decorators import action from drf_yasg.utils import swagger_auto_schema from drf_yasg import openapi from django.utils import timezone from datetime import timedelta from .models import EPGSource, ProgramData, EPGData # Added ProgramData -from .serializers import ProgramDataSerializer, EPGSourceSerializer, EPGDataSerializer # Updated serializer +from .serializers import ( + ProgramDataSerializer, + EPGSourceSerializer, + EPGDataSerializer, +) # Updated serializer from .tasks import refresh_epg_data +from apps.accounts.permissions import ( + Authenticated, + permission_classes_by_action, + permission_classes_by_method, +) logger = logging.getLogger(__name__) + # ───────────────────────────── # 1) EPG Source API (CRUD) # ───────────────────────────── @@ -21,30 +30,38 @@ class EPGSourceViewSet(viewsets.ModelViewSet): """ API endpoint that allows EPG sources to be viewed or edited. """ + queryset = EPGSource.objects.all() serializer_class = EPGSourceSerializer - permission_classes = [IsAuthenticated] + + def get_permissions(self): + try: + return [perm() for perm in permission_classes_by_action[self.action]] + except KeyError: + return [Authenticated()] def list(self, request, *args, **kwargs): logger.debug("Listing all EPG sources.") return super().list(request, *args, **kwargs) - @action(detail=False, methods=['post']) + @action(detail=False, methods=["post"]) def upload(self, request): - if 'file' not in request.FILES: - return Response({'error': 'No file uploaded'}, status=status.HTTP_400_BAD_REQUEST) + if "file" not in request.FILES: + return Response( + {"error": "No file uploaded"}, status=status.HTTP_400_BAD_REQUEST + ) - file = request.FILES['file'] + file = request.FILES["file"] file_name = file.name - file_path = os.path.join('/data/uploads/epgs', file_name) + file_path = os.path.join("/data/uploads/epgs", file_name) os.makedirs(os.path.dirname(file_path), exist_ok=True) - with open(file_path, 'wb+') as destination: + with open(file_path, "wb+") as destination: for chunk in file.chunks(): destination.write(chunk) new_obj_data = request.data.copy() - new_obj_data['file_path'] = file_path + new_obj_data["file_path"] = file_path serializer = self.get_serializer(data=new_obj_data) serializer.is_valid(raise_exception=True) @@ -57,55 +74,78 @@ class EPGSourceViewSet(viewsets.ModelViewSet): instance = self.get_object() # Check if we're toggling is_active - if 'is_active' in request.data and instance.is_active != request.data['is_active']: + if ( + "is_active" in request.data + and instance.is_active != request.data["is_active"] + ): # Set appropriate status based on new is_active value - if request.data['is_active']: - request.data['status'] = 'idle' + if request.data["is_active"]: + request.data["status"] = "idle" else: - request.data['status'] = 'disabled' + request.data["status"] = "disabled" # Continue with regular partial update return super().partial_update(request, *args, **kwargs) + # ───────────────────────────── # 2) Program API (CRUD) # ───────────────────────────── class ProgramViewSet(viewsets.ModelViewSet): """Handles CRUD operations for EPG programs""" + queryset = ProgramData.objects.all() serializer_class = ProgramDataSerializer - permission_classes = [IsAuthenticated] + + def get_permissions(self): + try: + return [perm() for perm in permission_classes_by_action[self.action]] + except KeyError: + return [Authenticated()] def list(self, request, *args, **kwargs): logger.debug("Listing all EPG programs.") return super().list(request, *args, **kwargs) + # ───────────────────────────── # 3) EPG Grid View # ───────────────────────────── class EPGGridAPIView(APIView): """Returns all programs airing in the next 24 hours including currently running ones and recent ones""" + def get_permissions(self): + try: + return [ + perm() for perm in permission_classes_by_method[self.request.method] + ] + except KeyError: + return [Authenticated()] + @swagger_auto_schema( operation_description="Retrieve programs from the previous hour, currently running and upcoming for the next 24 hours", - responses={200: ProgramDataSerializer(many=True)} + responses={200: ProgramDataSerializer(many=True)}, ) def get(self, request, format=None): # Use current time instead of midnight now = timezone.now() one_hour_ago = now - timedelta(hours=1) twenty_four_hours_later = now + timedelta(hours=24) - logger.debug(f"EPGGridAPIView: Querying programs between {one_hour_ago} and {twenty_four_hours_later}.") + logger.debug( + f"EPGGridAPIView: Querying programs between {one_hour_ago} and {twenty_four_hours_later}." + ) # Use select_related to prefetch EPGData and include programs from the last hour - programs = ProgramData.objects.select_related('epg').filter( + programs = ProgramData.objects.select_related("epg").filter( # Programs that end after one hour ago (includes recently ended programs) end_time__gt=one_hour_ago, # AND start before the end time window - start_time__lt=twenty_four_hours_later + start_time__lt=twenty_four_hours_later, ) count = programs.count() - logger.debug(f"EPGGridAPIView: Found {count} program(s), including recently ended, currently running, and upcoming shows.") + logger.debug( + f"EPGGridAPIView: Found {count} program(s), including recently ended, currently running, and upcoming shows." + ) # Generate dummy programs for channels that have no EPG data from apps.channels.models import Channel @@ -118,9 +158,13 @@ class EPGGridAPIView(APIView): # Log more detailed information about channels missing EPG data if channels_count > 0: channel_names = [f"{ch.name} (ID: {ch.id})" for ch in channels_without_epg] - logger.warning(f"EPGGridAPIView: Missing EPG data for these channels: {', '.join(channel_names)}") + logger.warning( + f"EPGGridAPIView: Missing EPG data for these channels: {', '.join(channel_names)}" + ) - logger.debug(f"EPGGridAPIView: Found {channels_count} channels with no EPG data.") + logger.debug( + f"EPGGridAPIView: Found {channels_count} channels with no EPG data." + ) # Serialize the regular programs serialized_programs = ProgramDataSerializer(programs, many=True).data @@ -130,33 +174,33 @@ class EPGGridAPIView(APIView): (0, 4): [ "Late Night with {channel} - Where insomniacs unite!", "The 'Why Am I Still Awake?' Show on {channel}", - "Counting Sheep - A {channel} production for the sleepless" + "Counting Sheep - A {channel} production for the sleepless", ], (4, 8): [ "Dawn Patrol - Rise and shine with {channel}!", "Early Bird Special - Coffee not included", - "Morning Zombies - Before coffee viewing on {channel}" + "Morning Zombies - Before coffee viewing on {channel}", ], (8, 12): [ "Mid-Morning Meetings - Pretend you're paying attention while watching {channel}", "The 'I Should Be Working' Hour on {channel}", - "Productivity Killer - {channel}'s daytime programming" + "Productivity Killer - {channel}'s daytime programming", ], (12, 16): [ "Lunchtime Laziness with {channel}", "The Afternoon Slump - Brought to you by {channel}", - "Post-Lunch Food Coma Theater on {channel}" + "Post-Lunch Food Coma Theater on {channel}", ], (16, 20): [ "Rush Hour - {channel}'s alternative to traffic", "The 'What's For Dinner?' Debate on {channel}", - "Evening Escapism - {channel}'s remedy for reality" + "Evening Escapism - {channel}'s remedy for reality", ], (20, 24): [ "Prime Time Placeholder - {channel}'s finest not-programming", "The 'Netflix Was Too Complicated' Show on {channel}", - "Family Argument Avoider - Courtesy of {channel}" - ] + "Family Argument Avoider - Courtesy of {channel}", + ], } # Generate and append dummy programs @@ -184,7 +228,9 @@ class EPGGridAPIView(APIView): if start_range <= hour < end_range: # Pick a description using the sum of the hour and day as seed # This makes it somewhat random but consistent for the same timeslot - description = descriptions[(hour + day) % len(descriptions)].format(channel=channel.name) + description = descriptions[ + (hour + day) % len(descriptions) + ].format(channel=channel.name) break else: # Fallback description if somehow no range matches @@ -192,29 +238,31 @@ class EPGGridAPIView(APIView): # Create a dummy program in the same format as regular programs dummy_program = { - 'id': f"dummy-{channel.id}-{hour_offset}", # Create a unique ID - 'epg': { - 'tvg_id': dummy_tvg_id, - 'name': channel.name - }, - 'start_time': start_time.isoformat(), - 'end_time': end_time.isoformat(), - 'title': f"{channel.name}", - 'description': description, - 'tvg_id': dummy_tvg_id, - 'sub_title': None, - 'custom_properties': None + "id": f"dummy-{channel.id}-{hour_offset}", # Create a unique ID + "epg": {"tvg_id": dummy_tvg_id, "name": channel.name}, + "start_time": start_time.isoformat(), + "end_time": end_time.isoformat(), + "title": f"{channel.name}", + "description": description, + "tvg_id": dummy_tvg_id, + "sub_title": None, + "custom_properties": None, } dummy_programs.append(dummy_program) except Exception as e: - logger.error(f"Error creating dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}") + logger.error( + f"Error creating dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}" + ) # Combine regular and dummy programs all_programs = list(serialized_programs) + dummy_programs - logger.debug(f"EPGGridAPIView: Returning {len(all_programs)} total programs (including {len(dummy_programs)} dummy programs).") + logger.debug( + f"EPGGridAPIView: Returning {len(all_programs)} total programs (including {len(dummy_programs)} dummy programs)." + ) + + return Response({"data": all_programs}, status=status.HTTP_200_OK) - return Response({'data': all_programs}, status=status.HTTP_200_OK) # ───────────────────────────── # 4) EPG Import View @@ -222,15 +270,26 @@ class EPGGridAPIView(APIView): class EPGImportAPIView(APIView): """Triggers an EPG data refresh""" + def get_permissions(self): + try: + return [ + perm() for perm in permission_classes_by_method[self.request.method] + ] + except KeyError: + return [Authenticated()] + @swagger_auto_schema( operation_description="Triggers an EPG data import", - responses={202: "EPG data import initiated"} + responses={202: "EPG data import initiated"}, ) def post(self, request, format=None): logger.info("EPGImportAPIView: Received request to import EPG data.") - refresh_epg_data.delay(request.data.get('id', None)) # Trigger Celery task + refresh_epg_data.delay(request.data.get("id", None)) # Trigger Celery task logger.info("EPGImportAPIView: Task dispatched to refresh EPG data.") - return Response({'success': True, 'message': 'EPG data import initiated.'}, status=status.HTTP_202_ACCEPTED) + return Response( + {"success": True, "message": "EPG data import initiated."}, + status=status.HTTP_202_ACCEPTED, + ) # ───────────────────────────── @@ -240,6 +299,12 @@ class EPGDataViewSet(viewsets.ReadOnlyModelViewSet): """ API endpoint that allows EPGData objects to be viewed. """ + queryset = EPGData.objects.all() serializer_class = EPGDataSerializer - permission_classes = [IsAuthenticated] + + def get_permissions(self): + try: + return [perm() for perm in permission_classes_by_action[self.action]] + except KeyError: + return [Authenticated()] diff --git a/apps/hdhr/api_views.py b/apps/hdhr/api_views.py index 0d7b77e0..f4f174f3 100644 --- a/apps/hdhr/api_views.py +++ b/apps/hdhr/api_views.py @@ -1,7 +1,7 @@ from rest_framework import viewsets, status from rest_framework.response import Response from rest_framework.views import APIView -from rest_framework.permissions import IsAuthenticated +from apps.accounts.permissions import Authenticated, permission_classes_by_action from django.http import JsonResponse, HttpResponseForbidden, HttpResponse import logging from drf_yasg.utils import swagger_auto_schema @@ -18,21 +18,30 @@ from django.utils.decorators import method_decorator from django.contrib.auth.decorators import login_required from django.views.decorators.csrf import csrf_exempt from apps.m3u.models import M3UAccountProfile + # Configure logger logger = logging.getLogger(__name__) + @login_required def hdhr_dashboard_view(request): """Render the HDHR management page.""" hdhr_devices = HDHRDevice.objects.all() return render(request, "hdhr/hdhr.html", {"hdhr_devices": hdhr_devices}) + # 🔹 1) HDHomeRun Device API class HDHRDeviceViewSet(viewsets.ModelViewSet): """Handles CRUD operations for HDHomeRun devices""" + queryset = HDHRDevice.objects.all() serializer_class = HDHRDeviceSerializer - permission_classes = [IsAuthenticated] + + def get_permissions(self): + try: + return [perm() for perm in permission_classes_by_action[self.action]] + except KeyError: + return [Authenticated()] # 🔹 2) Discover API @@ -41,20 +50,20 @@ class DiscoverAPIView(APIView): @swagger_auto_schema( operation_description="Retrieve HDHomeRun device discovery information", - responses={200: openapi.Response("HDHR Discovery JSON")} + responses={200: openapi.Response("HDHR Discovery JSON")}, ) def get(self, request, profile=None): uri_parts = ["hdhr"] if profile is not None: uri_parts.append(profile) - base_url = request.build_absolute_uri(f'/{"/".join(uri_parts)}/').rstrip('/') + base_url = request.build_absolute_uri(f'/{"/".join(uri_parts)}/').rstrip("/") device = HDHRDevice.objects.first() # Calculate tuner count from active profiles from active M3U accounts (excluding default "custom Default" profile) profiles = M3UAccountProfile.objects.filter( is_active=True, - m3u_account__is_active=True # Only include profiles from enabled M3U accounts + m3u_account__is_active=True, # Only include profiles from enabled M3U accounts ).exclude(id=1) # 1. Check if any profile has unlimited streams (max_streams=0) @@ -63,9 +72,12 @@ class DiscoverAPIView(APIView): # 2. Calculate tuner count from limited profiles limited_tuners = 0 if not has_unlimited: - limited_tuners = profiles.filter(max_streams__gt=0).aggregate( - total=models.Sum('max_streams') - ).get('total', 0) or 0 + limited_tuners = ( + profiles.filter(max_streams__gt=0) + .aggregate(total=models.Sum("max_streams")) + .get("total", 0) + or 0 + ) # 3. Add custom stream count to tuner count custom_stream_count = Stream.objects.filter(is_custom=True).count() @@ -82,7 +94,9 @@ class DiscoverAPIView(APIView): # 5. Ensure minimum of 1 tuners tuner_count = max(1, tuner_count) - logger.debug(f"Calculated tuner count: {tuner_count} (limited profiles: {limited_tuners}, custom streams: {custom_stream_count}, unlimited: {has_unlimited})") + logger.debug( + f"Calculated tuner count: {tuner_count} (limited profiles: {limited_tuners}, custom streams: {custom_stream_count}, unlimited: {has_unlimited})" + ) # Create a unique DeviceID for the HDHomeRun device based on profile ID or a default value device_ID = "12345678" # Default DeviceID @@ -123,17 +137,17 @@ class LineupAPIView(APIView): @swagger_auto_schema( operation_description="Retrieve the available channel lineup", - responses={200: openapi.Response("Channel Lineup JSON")} + responses={200: openapi.Response("Channel Lineup JSON")}, ) def get(self, request, profile=None): if profile is not None: channel_profile = ChannelProfile.objects.get(name=profile) channels = Channel.objects.filter( channelprofilemembership__channel_profile=channel_profile, - channelprofilemembership__enabled=True - ).order_by('channel_number') + channelprofilemembership__enabled=True, + ).order_by("channel_number") else: - channels = Channel.objects.all().order_by('channel_number') + channels = Channel.objects.all().order_by("channel_number") lineup = [] for ch in channels: @@ -146,13 +160,15 @@ class LineupAPIView(APIView): else: formatted_channel_number = "" - lineup.append({ - "GuideNumber": formatted_channel_number, - "GuideName": ch.name, - "URL": request.build_absolute_uri(f"/proxy/ts/stream/{ch.uuid}"), - "Guide_ID": formatted_channel_number, - "Station": formatted_channel_number, - }) + lineup.append( + { + "GuideNumber": formatted_channel_number, + "GuideName": ch.name, + "URL": request.build_absolute_uri(f"/proxy/ts/stream/{ch.uuid}"), + "Guide_ID": formatted_channel_number, + "Station": formatted_channel_number, + } + ) return JsonResponse(lineup, safe=False) @@ -162,14 +178,14 @@ class LineupStatusAPIView(APIView): @swagger_auto_schema( operation_description="Retrieve the HDHomeRun lineup status", - responses={200: openapi.Response("Lineup Status JSON")} + responses={200: openapi.Response("Lineup Status JSON")}, ) def get(self, request, profile=None): data = { "ScanInProgress": 0, "ScanPossible": 0, "Source": "Cable", - "SourceList": ["Cable"] + "SourceList": ["Cable"], } return JsonResponse(data) @@ -180,10 +196,10 @@ class HDHRDeviceXMLAPIView(APIView): @swagger_auto_schema( operation_description="Retrieve the HDHomeRun device XML configuration", - responses={200: openapi.Response("HDHR Device XML")} + responses={200: openapi.Response("HDHR Device XML")}, ) def get(self, request): - base_url = request.build_absolute_uri('/hdhr/').rstrip('/') + base_url = request.build_absolute_uri("/hdhr/").rstrip("/") xml_response = f""" diff --git a/apps/hdhr/views.py b/apps/hdhr/views.py index 048eb340..40823259 100644 --- a/apps/hdhr/views.py +++ b/apps/hdhr/views.py @@ -1,7 +1,7 @@ from rest_framework import viewsets, status from rest_framework.response import Response from rest_framework.views import APIView -from rest_framework.permissions import IsAuthenticated +from apps.accounts.permissions import Authenticated, permission_classes_by_action from django.http import JsonResponse, HttpResponseForbidden, HttpResponse from drf_yasg.utils import swagger_auto_schema from drf_yasg import openapi @@ -16,18 +16,26 @@ from django.utils.decorators import method_decorator from django.contrib.auth.decorators import login_required from django.views.decorators.csrf import csrf_exempt + @login_required def hdhr_dashboard_view(request): """Render the HDHR management page.""" hdhr_devices = HDHRDevice.objects.all() return render(request, "hdhr/hdhr.html", {"hdhr_devices": hdhr_devices}) + # 🔹 1) HDHomeRun Device API class HDHRDeviceViewSet(viewsets.ModelViewSet): """Handles CRUD operations for HDHomeRun devices""" + queryset = HDHRDevice.objects.all() serializer_class = HDHRDeviceSerializer - permission_classes = [IsAuthenticated] + + def get_permissions(self): + try: + return [perm() for perm in permission_classes_by_action[self.action]] + except KeyError: + return [Authenticated()] # 🔹 2) Discover API @@ -36,10 +44,10 @@ class DiscoverAPIView(APIView): @swagger_auto_schema( operation_description="Retrieve HDHomeRun device discovery information", - responses={200: openapi.Response("HDHR Discovery JSON")} + responses={200: openapi.Response("HDHR Discovery JSON")}, ) def get(self, request): - base_url = request.build_absolute_uri('/hdhr/').rstrip('/') + base_url = request.build_absolute_uri("/hdhr/").rstrip("/") device = HDHRDevice.objects.first() if not device: @@ -75,15 +83,15 @@ class LineupAPIView(APIView): @swagger_auto_schema( operation_description="Retrieve the available channel lineup", - responses={200: openapi.Response("Channel Lineup JSON")} + responses={200: openapi.Response("Channel Lineup JSON")}, ) def get(self, request): - channels = Channel.objects.all().order_by('channel_number') + channels = Channel.objects.all().order_by("channel_number") lineup = [ { "GuideNumber": str(ch.channel_number), "GuideName": ch.name, - "URL": request.build_absolute_uri(f"/proxy/ts/stream/{ch.uuid}") + "URL": request.build_absolute_uri(f"/proxy/ts/stream/{ch.uuid}"), } for ch in channels ] @@ -96,14 +104,14 @@ class LineupStatusAPIView(APIView): @swagger_auto_schema( operation_description="Retrieve the HDHomeRun lineup status", - responses={200: openapi.Response("Lineup Status JSON")} + responses={200: openapi.Response("Lineup Status JSON")}, ) def get(self, request): data = { "ScanInProgress": 0, "ScanPossible": 0, "Source": "Cable", - "SourceList": ["Cable"] + "SourceList": ["Cable"], } return JsonResponse(data) @@ -114,10 +122,10 @@ class HDHRDeviceXMLAPIView(APIView): @swagger_auto_schema( operation_description="Retrieve the HDHomeRun device XML configuration", - responses={200: openapi.Response("HDHR Device XML")} + responses={200: openapi.Response("HDHR Device XML")}, ) def get(self, request): - base_url = request.build_absolute_uri('/hdhr/').rstrip('/') + base_url = request.build_absolute_uri("/hdhr/").rstrip("/") xml_response = f""" diff --git a/apps/m3u/api_views.py b/apps/m3u/api_views.py index daac92b1..0ef42272 100644 --- a/apps/m3u/api_views.py +++ b/apps/m3u/api_views.py @@ -1,7 +1,11 @@ from rest_framework import viewsets, status from rest_framework.response import Response from rest_framework.views import APIView -from rest_framework.permissions import IsAuthenticated +from apps.accounts.permissions import ( + Authenticated, + permission_classes_by_action, + permission_classes_by_method, +) from drf_yasg.utils import swagger_auto_schema from drf_yasg import openapi from django.shortcuts import get_object_or_404 @@ -17,6 +21,7 @@ from .models import M3UAccount, M3UFilter, ServerGroup, M3UAccountProfile from core.models import UserAgent from apps.channels.models import ChannelGroupM3UAccount from core.serializers import UserAgentSerializer + # Import all serializers, including the UserAgentSerializer. from .serializers import ( M3UAccountSerializer, @@ -29,45 +34,54 @@ from .tasks import refresh_single_m3u_account, refresh_m3u_accounts from django.core.files.storage import default_storage from django.core.files.base import ContentFile + class M3UAccountViewSet(viewsets.ModelViewSet): """Handles CRUD operations for M3U accounts""" - queryset = M3UAccount.objects.prefetch_related('channel_group') + + queryset = M3UAccount.objects.prefetch_related("channel_group") serializer_class = M3UAccountSerializer - permission_classes = [IsAuthenticated] + + def get_permissions(self): + try: + return [perm() for perm in permission_classes_by_action[self.action]] + except KeyError: + return [Authenticated()] def create(self, request, *args, **kwargs): # Handle file upload first, if any file_path = None - if 'file' in request.FILES: - file = request.FILES['file'] + if "file" in request.FILES: + file = request.FILES["file"] file_name = file.name - file_path = os.path.join('/data/uploads/m3us', file_name) + file_path = os.path.join("/data/uploads/m3us", file_name) os.makedirs(os.path.dirname(file_path), exist_ok=True) - with open(file_path, 'wb+') as destination: + with open(file_path, "wb+") as destination: for chunk in file.chunks(): destination.write(chunk) # Add file_path to the request data so it's available during creation request.data._mutable = True # Allow modification of the request data - request.data['file_path'] = file_path # Include the file path if a file was uploaded - + request.data["file_path"] = ( + file_path # Include the file path if a file was uploaded + ) + # Handle the user_agent field - convert "null" string to None - if 'user_agent' in request.data and request.data['user_agent'] == 'null': - request.data['user_agent'] = None - + if "user_agent" in request.data and request.data["user_agent"] == "null": + request.data["user_agent"] = None + # Handle server_url appropriately - if 'server_url' in request.data and not request.data['server_url']: - request.data.pop('server_url') - + if "server_url" in request.data and not request.data["server_url"]: + request.data.pop("server_url") + request.data._mutable = False # Make the request data immutable again # Now call super().create() to create the instance response = super().create(request, *args, **kwargs) - print(response.data.get('account_type')) - if response.data.get('account_type') == M3UAccount.Types.XC: - refresh_m3u_groups(response.data.get('id')) + print(response.data.get("account_type")) + if response.data.get("account_type") == M3UAccount.Types.XC: + refresh_m3u_groups(response.data.get("id")) # After the instance is created, return the response return response @@ -77,28 +91,30 @@ class M3UAccountViewSet(viewsets.ModelViewSet): # Handle file upload first, if any file_path = None - if 'file' in request.FILES: - file = request.FILES['file'] + if "file" in request.FILES: + file = request.FILES["file"] file_name = file.name - file_path = os.path.join('/data/uploads/m3us', file_name) + file_path = os.path.join("/data/uploads/m3us", file_name) os.makedirs(os.path.dirname(file_path), exist_ok=True) - with open(file_path, 'wb+') as destination: + with open(file_path, "wb+") as destination: for chunk in file.chunks(): destination.write(chunk) # Add file_path to the request data so it's available during creation request.data._mutable = True # Allow modification of the request data - request.data['file_path'] = file_path # Include the file path if a file was uploaded - + request.data["file_path"] = ( + file_path # Include the file path if a file was uploaded + ) + # Handle the user_agent field - convert "null" string to None - if 'user_agent' in request.data and request.data['user_agent'] == 'null': - request.data['user_agent'] = None - + if "user_agent" in request.data and request.data["user_agent"] == "null": + request.data["user_agent"] = None + # Handle server_url appropriately - if 'server_url' in request.data and not request.data['server_url']: - request.data.pop('server_url') - + if "server_url" in request.data and not request.data["server_url"]: + request.data.pop("server_url") + request.data._mutable = False # Make the request data immutable again if instance.file_path and os.path.exists(instance.file_path): @@ -115,75 +131,131 @@ class M3UAccountViewSet(viewsets.ModelViewSet): instance = self.get_object() # Check if we're toggling is_active - if 'is_active' in request.data and instance.is_active != request.data['is_active']: + if ( + "is_active" in request.data + and instance.is_active != request.data["is_active"] + ): # Set appropriate status based on new is_active value - if request.data['is_active']: - request.data['status'] = M3UAccount.Status.IDLE + if request.data["is_active"]: + request.data["status"] = M3UAccount.Status.IDLE else: - request.data['status'] = M3UAccount.Status.DISABLED + request.data["status"] = M3UAccount.Status.DISABLED # Continue with regular partial update return super().partial_update(request, *args, **kwargs) + class M3UFilterViewSet(viewsets.ModelViewSet): """Handles CRUD operations for M3U filters""" + queryset = M3UFilter.objects.all() serializer_class = M3UFilterSerializer - permission_classes = [IsAuthenticated] + + def get_permissions(self): + try: + return [perm() for perm in permission_classes_by_action[self.action]] + except KeyError: + return [Authenticated()] + class ServerGroupViewSet(viewsets.ModelViewSet): """Handles CRUD operations for Server Groups""" + queryset = ServerGroup.objects.all() serializer_class = ServerGroupSerializer - permission_classes = [IsAuthenticated] + + def get_permissions(self): + try: + return [perm() for perm in permission_classes_by_action[self.action]] + except KeyError: + return [Authenticated()] + class RefreshM3UAPIView(APIView): """Triggers refresh for all active M3U accounts""" + def get_permissions(self): + try: + return [ + perm() for perm in permission_classes_by_method[self.request.method] + ] + except KeyError: + return [Authenticated()] + @swagger_auto_schema( operation_description="Triggers a refresh of all active M3U accounts", - responses={202: "M3U refresh initiated"} + responses={202: "M3U refresh initiated"}, ) def post(self, request, format=None): refresh_m3u_accounts.delay() - return Response({'success': True, 'message': 'M3U refresh initiated.'}, status=status.HTTP_202_ACCEPTED) + return Response( + {"success": True, "message": "M3U refresh initiated."}, + status=status.HTTP_202_ACCEPTED, + ) + class RefreshSingleM3UAPIView(APIView): """Triggers refresh for a single M3U account""" + def get_permissions(self): + try: + return [ + perm() for perm in permission_classes_by_method[self.request.method] + ] + except KeyError: + return [Authenticated()] + @swagger_auto_schema( operation_description="Triggers a refresh of a single M3U account", - responses={202: "M3U account refresh initiated"} + responses={202: "M3U account refresh initiated"}, ) def post(self, request, account_id, format=None): refresh_single_m3u_account.delay(account_id) - return Response({'success': True, 'message': f'M3U account {account_id} refresh initiated.'}, - status=status.HTTP_202_ACCEPTED) + return Response( + { + "success": True, + "message": f"M3U account {account_id} refresh initiated.", + }, + status=status.HTTP_202_ACCEPTED, + ) + class UserAgentViewSet(viewsets.ModelViewSet): """Handles CRUD operations for User Agents""" + queryset = UserAgent.objects.all() serializer_class = UserAgentSerializer - permission_classes = [IsAuthenticated] + + def get_permissions(self): + try: + return [perm() for perm in permission_classes_by_action[self.action]] + except KeyError: + return [Authenticated()] + class M3UAccountProfileViewSet(viewsets.ModelViewSet): queryset = M3UAccountProfile.objects.all() serializer_class = M3UAccountProfileSerializer - permission_classes = [IsAuthenticated] + + def get_permissions(self): + try: + return [perm() for perm in permission_classes_by_action[self.action]] + except KeyError: + return [Authenticated()] def get_queryset(self): - m3u_account_id = self.kwargs['account_id'] + m3u_account_id = self.kwargs["account_id"] return M3UAccountProfile.objects.filter(m3u_account_id=m3u_account_id) def perform_create(self, serializer): # Get the account ID from the URL - account_id = self.kwargs['account_id'] + account_id = self.kwargs["account_id"] # Get the M3UAccount instance for the account_id m3u_account = M3UAccount.objects.get(id=account_id) # Save the 'm3u_account' in the serializer context - serializer.context['m3u_account'] = m3u_account + serializer.context["m3u_account"] = m3u_account # Perform the actual save serializer.save(m3u_account_id=m3u_account) diff --git a/apps/m3u/migrations/0003_create_custom_account.py b/apps/m3u/migrations/0003_create_custom_account.py index 8695f248..cdc40cda 100644 --- a/apps/m3u/migrations/0003_create_custom_account.py +++ b/apps/m3u/migrations/0003_create_custom_account.py @@ -3,6 +3,7 @@ from django.db import migrations from core.models import CoreSettings + def create_custom_account(apps, schema_editor): default_user_agent_id = CoreSettings.get_default_user_agent_id() @@ -18,7 +19,7 @@ def create_custom_account(apps, schema_editor): M3UAccountProfile = apps.get_model("m3u", "M3UAccountProfile") M3UAccountProfile.objects.create( m3u_account=m3u_account, - name=f'{m3u_account.name} Default', + name=f"{m3u_account.name} Default", max_streams=m3u_account.max_streams, is_default=True, is_active=True, @@ -26,10 +27,12 @@ def create_custom_account(apps, schema_editor): replace_pattern="$1", ) + class Migration(migrations.Migration): dependencies = [ - ('m3u', '0002_m3uaccount_locked'), + ("m3u", "0002_m3uaccount_locked"), + ("core", "0004_preload_core_settings"), ] operations = [ diff --git a/apps/m3u/migrations/0005_m3uaccount_custom_properties_and_more.py b/apps/m3u/migrations/0005_m3uaccount_custom_properties_and_more.py index 3728bf7f..7a5f2013 100644 --- a/apps/m3u/migrations/0005_m3uaccount_custom_properties_and_more.py +++ b/apps/m3u/migrations/0005_m3uaccount_custom_properties_and_more.py @@ -7,24 +7,29 @@ from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ - ('django_celery_beat', '0019_alter_periodictasks_options'), - ('m3u', '0004_m3uaccount_stream_profile'), + ("django_celery_beat", "0019_alter_periodictasks_options"), + ("m3u", "0004_m3uaccount_stream_profile"), ] operations = [ migrations.AddField( - model_name='m3uaccount', - name='custom_properties', + model_name="m3uaccount", + name="custom_properties", field=models.TextField(blank=True, null=True), ), migrations.AddField( - model_name='m3uaccount', - name='refresh_interval', + model_name="m3uaccount", + name="refresh_interval", field=models.IntegerField(default=24), ), migrations.AddField( - model_name='m3uaccount', - name='refresh_task', - field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='django_celery_beat.periodictask'), + model_name="m3uaccount", + name="refresh_task", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="django_celery_beat.periodictask", + ), ), ] diff --git a/apps/m3u/models.py b/apps/m3u/models.py index a297fd18..94ec88fc 100644 --- a/apps/m3u/models.py +++ b/apps/m3u/models.py @@ -7,7 +7,8 @@ from apps.channels.models import StreamProfile from django_celery_beat.models import PeriodicTask from core.models import CoreSettings, UserAgent -CUSTOM_M3U_ACCOUNT_NAME="custom" +CUSTOM_M3U_ACCOUNT_NAME = "custom" + class M3UAccount(models.Model): class Types(models.TextChoices): @@ -25,72 +26,61 @@ class M3UAccount(models.Model): """Represents an M3U Account for IPTV streams.""" name = models.CharField( - max_length=255, - unique=True, - help_text="Unique name for this M3U account" + max_length=255, unique=True, help_text="Unique name for this M3U account" ) server_url = models.URLField( blank=True, null=True, - help_text="The base URL of the M3U server (optional if a file is uploaded)" - ) - file_path = models.CharField( - max_length=255, - blank=True, - null=True + help_text="The base URL of the M3U server (optional if a file is uploaded)", ) + file_path = models.CharField(max_length=255, blank=True, null=True) server_group = models.ForeignKey( - 'ServerGroup', + "ServerGroup", on_delete=models.SET_NULL, null=True, blank=True, - related_name='m3u_accounts', - help_text="The server group this M3U account belongs to" + related_name="m3u_accounts", + help_text="The server group this M3U account belongs to", ) max_streams = models.PositiveIntegerField( - default=0, - help_text="Maximum number of concurrent streams (0 for unlimited)" + default=0, help_text="Maximum number of concurrent streams (0 for unlimited)" ) is_active = models.BooleanField( - default=True, - help_text="Set to false to deactivate this M3U account" + default=True, help_text="Set to false to deactivate this M3U account" ) created_at = models.DateTimeField( - auto_now_add=True, - help_text="Time when this account was created" + auto_now_add=True, help_text="Time when this account was created" ) updated_at = models.DateTimeField( - null=True, blank=True, - help_text="Time when this account was last successfully refreshed" + null=True, + blank=True, + help_text="Time when this account was last successfully refreshed", ) status = models.CharField( - max_length=20, - choices=Status.choices, - default=Status.IDLE + max_length=20, choices=Status.choices, default=Status.IDLE ) last_message = models.TextField( null=True, blank=True, - help_text="Last status message, including success results or error information" + help_text="Last status message, including success results or error information", ) user_agent = models.ForeignKey( - 'core.UserAgent', + "core.UserAgent", on_delete=models.SET_NULL, null=True, blank=True, - related_name='m3u_accounts', - help_text="The User-Agent associated with this M3U account." + related_name="m3u_accounts", + help_text="The User-Agent associated with this M3U account.", ) locked = models.BooleanField( - default=False, - help_text="Protected - can't be deleted or modified" + default=False, help_text="Protected - can't be deleted or modified" ) stream_profile = models.ForeignKey( StreamProfile, on_delete=models.SET_NULL, null=True, blank=True, - related_name='m3u_accounts' + related_name="m3u_accounts", ) account_type = models.CharField(choices=Types.choices, default=Types.STADNARD) username = models.CharField(max_length=255, null=True, blank=True) @@ -102,7 +92,7 @@ class M3UAccount(models.Model): ) stale_stream_days = models.PositiveIntegerField( default=7, - help_text="Number of days after which a stream will be removed if not seen in the M3U source." + help_text="Number of days after which a stream will be removed if not seen in the M3U source.", ) def __str__(self): @@ -134,17 +124,19 @@ class M3UAccount(models.Model): def get_user_agent(self): user_agent = self.user_agent if not user_agent: - user_agent = UserAgent.objects.get(id=CoreSettings.get_default_user_agent_id()) + user_agent = UserAgent.objects.get( + id=CoreSettings.get_default_user_agent_id() + ) return user_agent def save(self, *args, **kwargs): # Prevent auto_now behavior by handling updated_at manually - if 'update_fields' in kwargs and 'updated_at' not in kwargs['update_fields']: + if "update_fields" in kwargs and "updated_at" not in kwargs["update_fields"]: # Don't modify updated_at for regular updates - kwargs.setdefault('update_fields', []) - if 'updated_at' in kwargs['update_fields']: - kwargs['update_fields'].remove('updated_at') + kwargs.setdefault("update_fields", []) + if "updated_at" in kwargs["update_fields"]: + kwargs["update_fields"].remove("updated_at") super().save(*args, **kwargs) # def get_channel_groups(self): @@ -158,35 +150,36 @@ class M3UAccount(models.Model): # """Return all streams linked to this account with enabled ChannelGroups.""" # return self.streams.filter(channel_group__in=ChannelGroup.objects.filter(m3u_account__enabled=True)) + class M3UFilter(models.Model): """Defines filters for M3U accounts based on stream name or group title.""" + FILTER_TYPE_CHOICES = ( - ('group', 'Group Title'), - ('name', 'Stream Name'), + ("group", "Group Title"), + ("name", "Stream Name"), ) m3u_account = models.ForeignKey( M3UAccount, on_delete=models.CASCADE, - related_name='filters', - help_text="The M3U account this filter is applied to." + related_name="filters", + help_text="The M3U account this filter is applied to.", ) filter_type = models.CharField( max_length=50, choices=FILTER_TYPE_CHOICES, - default='group', - help_text="Filter based on either group title or stream name." + default="group", + help_text="Filter based on either group title or stream name.", ) regex_pattern = models.CharField( - max_length=200, - help_text="A regex pattern to match streams or groups." + max_length=200, help_text="A regex pattern to match streams or groups." ) exclude = models.BooleanField( default=True, - help_text="If True, matching items are excluded; if False, only matches are included." + help_text="If True, matching items are excluded; if False, only matches are included.", ) def applies_to(self, stream_name, group_name): - target = group_name if self.filter_type == 'group' else stream_name + target = group_name if self.filter_type == "group" else stream_name return bool(re.search(self.regex_pattern, target, re.IGNORECASE)) def clean(self): @@ -196,7 +189,9 @@ class M3UFilter(models.Model): raise ValidationError(f"Invalid regex pattern: {self.regex_pattern}") def __str__(self): - filter_type_display = dict(self.FILTER_TYPE_CHOICES).get(self.filter_type, 'Unknown') + filter_type_display = dict(self.FILTER_TYPE_CHOICES).get( + self.filter_type, "Unknown" + ) exclude_status = "Exclude" if self.exclude else "Include" return f"[{self.m3u_account.name}] {filter_type_display}: {self.regex_pattern} ({exclude_status})" @@ -222,40 +217,38 @@ class M3UFilter(models.Model): class ServerGroup(models.Model): """Represents a logical grouping of servers or channels.""" + name = models.CharField( - max_length=100, - unique=True, - help_text="Unique name for this server group." + max_length=100, unique=True, help_text="Unique name for this server group." ) def __str__(self): return self.name + from django.db import models + class M3UAccountProfile(models.Model): """Represents a profile associated with an M3U Account.""" + m3u_account = models.ForeignKey( - 'M3UAccount', + "M3UAccount", on_delete=models.CASCADE, - related_name='profiles', - help_text="The M3U account this profile belongs to." + related_name="profiles", + help_text="The M3U account this profile belongs to.", ) name = models.CharField( - max_length=255, - help_text="Name for the M3U account profile" + max_length=255, help_text="Name for the M3U account profile" ) is_default = models.BooleanField( - default=False, - help_text="Set to false to deactivate this profile" + default=False, help_text="Set to false to deactivate this profile" ) max_streams = models.PositiveIntegerField( - default=0, - help_text="Maximum number of concurrent streams (0 for unlimited)" + default=0, help_text="Maximum number of concurrent streams (0 for unlimited)" ) is_active = models.BooleanField( - default=True, - help_text="Set to false to deactivate this profile" + default=True, help_text="Set to false to deactivate this profile" ) search_pattern = models.CharField( max_length=255, @@ -267,19 +260,22 @@ class M3UAccountProfile(models.Model): class Meta: constraints = [ - models.UniqueConstraint(fields=['m3u_account', 'name'], name='unique_account_name') + models.UniqueConstraint( + fields=["m3u_account", "name"], name="unique_account_name" + ) ] def __str__(self): return f"{self.name} ({self.m3u_account.name})" + @receiver(models.signals.post_save, sender=M3UAccount) def create_profile_for_m3u_account(sender, instance, created, **kwargs): """Automatically create an M3UAccountProfile when M3UAccount is created.""" if created: M3UAccountProfile.objects.create( m3u_account=instance, - name=f'{instance.name} Default', + name=f"{instance.name} Default", max_streams=instance.max_streams, is_default=True, is_active=True, @@ -292,6 +288,5 @@ def create_profile_for_m3u_account(sender, instance, created, **kwargs): is_default=True, ) - profile.max_streams = instance.max_streams profile.save() diff --git a/apps/m3u/serializers.py b/apps/m3u/serializers.py index 038af628..7394f00b 100644 --- a/apps/m3u/serializers.py +++ b/apps/m3u/serializers.py @@ -3,33 +3,45 @@ from rest_framework.response import Response from .models import M3UAccount, M3UFilter, ServerGroup, M3UAccountProfile from core.models import UserAgent from apps.channels.models import ChannelGroup, ChannelGroupM3UAccount -from apps.channels.serializers import ChannelGroupM3UAccountSerializer, ChannelGroupSerializer +from apps.channels.serializers import ( + ChannelGroupM3UAccountSerializer, + ChannelGroupSerializer, +) import logging logger = logging.getLogger(__name__) + class M3UFilterSerializer(serializers.ModelSerializer): """Serializer for M3U Filters""" - channel_groups = ChannelGroupM3UAccountSerializer(source='m3u_account', many=True) + + channel_groups = ChannelGroupM3UAccountSerializer(source="m3u_account", many=True) class Meta: model = M3UFilter - fields = ['id', 'filter_type', 'regex_pattern', 'exclude', 'channel_groups'] + fields = ["id", "filter_type", "regex_pattern", "exclude", "channel_groups"] -from rest_framework import serializers -from .models import M3UAccountProfile class M3UAccountProfileSerializer(serializers.ModelSerializer): class Meta: model = M3UAccountProfile - fields = ['id', 'name', 'max_streams', 'is_active', 'is_default', 'current_viewers', 'search_pattern', 'replace_pattern'] - read_only_fields = ['id'] + fields = [ + "id", + "name", + "max_streams", + "is_active", + "is_default", + "current_viewers", + "search_pattern", + "replace_pattern", + ] + read_only_fields = ["id"] def create(self, validated_data): - m3u_account = self.context.get('m3u_account') + m3u_account = self.context.get("m3u_account") # Use the m3u_account when creating the profile - validated_data['m3u_account_id'] = m3u_account.id + validated_data["m3u_account_id"] = m3u_account.id return super().create(validated_data) @@ -43,12 +55,14 @@ class M3UAccountProfileSerializer(serializers.ModelSerializer): if instance.is_default: return Response( {"error": "Default profiles cannot be deleted."}, - status=status.HTTP_400_BAD_REQUEST + status=status.HTTP_400_BAD_REQUEST, ) return super().destroy(request, *args, **kwargs) + class M3UAccountSerializer(serializers.ModelSerializer): """Serializer for M3U Account""" + filters = M3UFilterSerializer(many=True, read_only=True) # Include user_agent as a mandatory field using its primary key. user_agent = serializers.PrimaryKeyRelatedField( @@ -57,28 +71,48 @@ class M3UAccountSerializer(serializers.ModelSerializer): allow_null=True, ) profiles = M3UAccountProfileSerializer(many=True, read_only=True) - read_only_fields = ['locked', 'created_at', 'updated_at'] + read_only_fields = ["locked", "created_at", "updated_at"] # channel_groups = serializers.SerializerMethodField() - channel_groups = ChannelGroupM3UAccountSerializer(source='channel_group', many=True, required=False) + channel_groups = ChannelGroupM3UAccountSerializer( + source="channel_group", many=True, required=False + ) class Meta: model = M3UAccount fields = [ - 'id', 'name', 'server_url', 'file_path', 'server_group', - 'max_streams', 'is_active', 'created_at', 'updated_at', 'filters', 'user_agent', 'profiles', 'locked', - 'channel_groups', 'refresh_interval', 'custom_properties', 'account_type', 'username', 'password', 'stale_stream_days', - 'status', 'last_message', + "id", + "name", + "server_url", + "file_path", + "server_group", + "max_streams", + "is_active", + "created_at", + "updated_at", + "filters", + "user_agent", + "profiles", + "locked", + "channel_groups", + "refresh_interval", + "custom_properties", + "account_type", + "username", + "password", + "stale_stream_days", + "status", + "last_message", ] extra_kwargs = { - 'password': { - 'required': False, - 'allow_blank': True, + "password": { + "required": False, + "allow_blank": True, }, } def update(self, instance, validated_data): # Pop out channel group memberships so we can handle them manually - channel_group_data = validated_data.pop('channel_group', []) + channel_group_data = validated_data.pop("channel_group", []) # First, update the M3UAccount itself for attr, value in validated_data.items(): @@ -88,13 +122,12 @@ class M3UAccountSerializer(serializers.ModelSerializer): # Prepare a list of memberships to update memberships_to_update = [] for group_data in channel_group_data: - group = group_data.get('channel_group') - enabled = group_data.get('enabled') + group = group_data.get("channel_group") + enabled = group_data.get("enabled") try: membership = ChannelGroupM3UAccount.objects.get( - m3u_account=instance, - channel_group=group + m3u_account=instance, channel_group=group ) membership.enabled = enabled memberships_to_update.append(membership) @@ -103,13 +136,16 @@ class M3UAccountSerializer(serializers.ModelSerializer): # Perform the bulk update if memberships_to_update: - ChannelGroupM3UAccount.objects.bulk_update(memberships_to_update, ['enabled']) + ChannelGroupM3UAccount.objects.bulk_update( + memberships_to_update, ["enabled"] + ) return instance + class ServerGroupSerializer(serializers.ModelSerializer): """Serializer for Server Group""" class Meta: model = ServerGroup - fields = ['id', 'name'] + fields = ["id", "name"] diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index ce46a2ec..d6e0755b 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -172,6 +172,13 @@ def fetch_m3u_lines(account, use_cache=False): send_m3u_update(account.id, "downloading", 100, status="error", error=error_msg) return [], False +def get_case_insensitive_attr(attributes, key, default=""): + """Get attribute value using case-insensitive key lookup.""" + for attr_key, attr_value in attributes.items(): + if attr_key.lower() == key.lower(): + return attr_value + return default + def parse_extinf_line(line: str) -> dict: """ Parse an EXTINF line from an M3U file. @@ -193,7 +200,7 @@ def parse_extinf_line(line: str) -> dict: attributes_part, display_name = parts[0], parts[1].strip() attrs = dict(re.findall(r'([^\s]+)=["\']([^"\']+)["\']', attributes_part)) # Use tvg-name attribute if available; otherwise, use the display name. - name = attrs.get('tvg-name', display_name) + name = get_case_insensitive_attr(attrs, 'tvg-name', display_name) return { 'attributes': attrs, 'display_name': display_name, @@ -409,8 +416,8 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): for stream_info in batch: try: name, url = stream_info["name"], stream_info["url"] - tvg_id, tvg_logo = stream_info["attributes"].get("tvg-id", ""), stream_info["attributes"].get("tvg-logo", "") - group_title = stream_info["attributes"].get("group-title", "Default Group") + tvg_id, tvg_logo = get_case_insensitive_attr(stream_info["attributes"], "tvg-id", ""), get_case_insensitive_attr(stream_info["attributes"], "tvg-logo", "") + group_title = get_case_insensitive_attr(stream_info["attributes"], "group-title", "Default Group") # Filter out disabled groups for this account if group_title not in groups: @@ -712,8 +719,9 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): extinf_count += 1 parsed = parse_extinf_line(line) if parsed: - if "group-title" in parsed["attributes"]: - group_name = parsed["attributes"]["group-title"] + group_title_attr = get_case_insensitive_attr(parsed["attributes"], "group-title", "") + if group_title_attr: + group_name = group_title_attr # Log new groups as they're discovered if group_name not in groups: logger.debug(f"Found new group for M3U account {account_id}: '{group_name}'") diff --git a/apps/output/urls.py b/apps/output/urls.py index 92774adb..8b9c4f3a 100644 --- a/apps/output/urls.py +++ b/apps/output/urls.py @@ -1,16 +1,14 @@ from django.urls import path, re_path, include -from .views import generate_m3u, generate_epg +from .views import m3u_endpoint, epg_endpoint, xc_get from core.views import stream_view -app_name = 'output' +app_name = "output" urlpatterns = [ # Allow `/m3u`, `/m3u/`, `/m3u/profile_name`, and `/m3u/profile_name/` - re_path(r'^m3u(?:/(?P[^/]+))?/?$', generate_m3u, name='generate_m3u'), - + re_path(r"^m3u(?:/(?P[^/]+))?/?$", m3u_endpoint, name="m3u_endpoint"), # Allow `/epg`, `/epg/`, `/epg/profile_name`, and `/epg/profile_name/` - re_path(r'^epg(?:/(?P[^/]+))?/?$', generate_epg, name='generate_epg'), - + re_path(r"^epg(?:/(?P[^/]+))?/?$", epg_endpoint, name="epg_endpoint"), # Allow both `/stream/` and `/stream//` - re_path(r'^stream/(?P[0-9a-fA-F\-]+)/?$', stream_view, name='stream'), + re_path(r"^stream/(?P[0-9a-fA-F\-]+)/?$", stream_view, name="stream"), ] diff --git a/apps/output/views.py b/apps/output/views.py index 2b18d185..577f93d0 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -1,18 +1,40 @@ -from django.http import HttpResponse, HttpResponseForbidden +import ipaddress +from django.http import HttpResponse, JsonResponse, Http404, HttpResponseForbidden +from rest_framework.response import Response from django.urls import reverse +from apps.channels.models import Channel, ChannelProfile, ChannelGroup from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_http_methods -from apps.channels.models import Channel, ChannelProfile from apps.epg.models import ProgramData +from apps.accounts.models import User +from core.models import CoreSettings, NETWORK_ACCESS +from dispatcharr.utils import network_access_allowed from django.utils import timezone -from django.views.decorators.csrf import csrf_exempt +from django.shortcuts import get_object_or_404 from datetime import datetime, timedelta import re import html # Add this import for XML escaping +from tzlocal import get_localzone +import time +import json +from urllib.parse import urlparse +import base64 + +def m3u_endpoint(request, profile_name=None, user=None): + if not network_access_allowed(request, "M3U_EPG"): + return JsonResponse({"error": "Forbidden"}, status=403) + + return generate_m3u(request, profile_name, user) + +def epg_endpoint(request, profile_name=None, user=None): + if not network_access_allowed(request, "M3U_EPG"): + return JsonResponse({"error": "Forbidden"}, status=403) + + return generate_epg(request, profile_name, user) @csrf_exempt @require_http_methods(["GET", "POST"]) -def generate_m3u(request, profile_name=None): +def generate_m3u(request, profile_name=None, user=None): """ Dynamically generate an M3U file from channels. The stream URL now points to the new stream_view that uses StreamProfile. @@ -22,6 +44,26 @@ def generate_m3u(request, profile_name=None): if request.method == "POST" and request.body: return HttpResponseForbidden("POST requests with content are not allowed") + if user is not None: + if user.user_level == 0: + filters = { + "channelprofilemembership__enabled": True, + "user_level__lte": user.user_level, + } + + if user.channel_profiles.count() != 0: + channel_profiles = user.channel_profiles.all() + filters["channelprofilemembership__channel_profile__in"] = ( + channel_profiles + ) + + channels = Channel.objects.filter(**filters).order_by("channel_number") + else: + channels = Channel.objects.filter(user_level__lte=user.user_level).order_by( + "channel_number" + ) + + if profile_name is not None: channel_profile = ChannelProfile.objects.get(name=profile_name) channels = Channel.objects.filter( @@ -29,7 +71,14 @@ def generate_m3u(request, profile_name=None): channelprofilemembership__enabled=True ).order_by('channel_number') else: - channels = Channel.objects.order_by('channel_number') + if profile_name is not None: + channel_profile = ChannelProfile.objects.get(name=profile_name) + channels = Channel.objects.filter( + channelprofilemembership__channel_profile=channel_profile, + channelprofilemembership__enabled=True, + ).order_by("channel_number") + else: + channels = Channel.objects.order_by("channel_number") # Check if the request wants to use direct logo URLs instead of cache use_cached_logos = request.GET.get('cachedlogos', 'true').lower() != 'false' @@ -82,7 +131,9 @@ def generate_m3u(request, profile_name=None): # create possible gracenote id insertion tvc_guide_stationid = "" if channel.tvc_guide_stationid: - tvc_guide_stationid = f'tvc-guide-stationid="{channel.tvc_guide_stationid}" ' + tvc_guide_stationid = ( + f'tvc-guide-stationid="{channel.tvc_guide_stationid}" ' + ) extinf_line = ( f'#EXTINF:-1 tvg-id="{tvg_id}" tvg-name="{tvg_name}" tvg-logo="{tvg_logo}" ' @@ -108,27 +159,11 @@ def generate_m3u(request, profile_name=None): m3u_content += extinf_line + stream_url + "\n" response = HttpResponse(m3u_content, content_type="audio/x-mpegurl") - response['Content-Disposition'] = 'attachment; filename="channels.m3u"' + response["Content-Disposition"] = 'attachment; filename="channels.m3u"' return response -def generate_dummy_epg(channel_id, channel_name, xml_lines=None, num_days=1, program_length_hours=4): - """ - Generate dummy EPG programs for channels without EPG data. - Creates program blocks for a specified number of days. - - Args: - channel_id: The channel ID to use in the program entries - channel_name: The name of the channel to use in program titles - xml_lines: Optional list to append lines to, otherwise returns new list - num_days: Number of days to generate EPG data for (default: 1) - program_length_hours: Length of each program block in hours (default: 4) - - Returns: - List of XML lines for the dummy EPG entries - """ - if xml_lines is None: - xml_lines = [] +def generate_dummy_programs(channel_id, channel_name, num_days=1, program_length_hours=4): # Get current time rounded to hour now = timezone.now() now = now.replace(minute=0, second=0, microsecond=0) @@ -138,35 +173,37 @@ def generate_dummy_epg(channel_id, channel_name, xml_lines=None, num_days=1, pro (0, 4): [ f"Late Night with {channel_name} - Where insomniacs unite!", f"The 'Why Am I Still Awake?' Show on {channel_name}", - f"Counting Sheep - A {channel_name} production for the sleepless" + f"Counting Sheep - A {channel_name} production for the sleepless", ], (4, 8): [ f"Dawn Patrol - Rise and shine with {channel_name}!", f"Early Bird Special - Coffee not included", - f"Morning Zombies - Before coffee viewing on {channel_name}" + f"Morning Zombies - Before coffee viewing on {channel_name}", ], (8, 12): [ f"Mid-Morning Meetings - Pretend you're paying attention while watching {channel_name}", f"The 'I Should Be Working' Hour on {channel_name}", - f"Productivity Killer - {channel_name}'s daytime programming" + f"Productivity Killer - {channel_name}'s daytime programming", ], (12, 16): [ f"Lunchtime Laziness with {channel_name}", f"The Afternoon Slump - Brought to you by {channel_name}", - f"Post-Lunch Food Coma Theater on {channel_name}" + f"Post-Lunch Food Coma Theater on {channel_name}", ], (16, 20): [ f"Rush Hour - {channel_name}'s alternative to traffic", f"The 'What's For Dinner?' Debate on {channel_name}", - f"Evening Escapism - {channel_name}'s remedy for reality" + f"Evening Escapism - {channel_name}'s remedy for reality", ], (20, 24): [ f"Prime Time Placeholder - {channel_name}'s finest not-programming", f"The 'Netflix Was Too Complicated' Show on {channel_name}", - f"Family Argument Avoider - Courtesy of {channel_name}" - ] + f"Family Argument Avoider - Courtesy of {channel_name}", + ], } + programs = [] + # Create programs for each day for day in range(num_days): day_start = now + timedelta(days=day) @@ -192,19 +229,54 @@ def generate_dummy_epg(channel_id, channel_name, xml_lines=None, num_days=1, pro # Fallback description if somehow no range matches description = f"Placeholder program for {channel_name} - EPG data went on vacation" - # Format times in XMLTV format - start_str = start_time.strftime("%Y%m%d%H%M%S %z") - stop_str = end_time.strftime("%Y%m%d%H%M%S %z") + programs.append({ + "channel_id": channel_id, + "start_time": start_time, + "end_time": end_time, + "title": channel_name, + "description": description, + }) - # Create program entry with escaped channel name - xml_lines.append(f' ') - xml_lines.append(f' {html.escape(channel_name)}') - xml_lines.append(f' {html.escape(description)}') - xml_lines.append(f' ') + return programs + + +def generate_dummy_epg( + channel_id, channel_name, xml_lines=None, num_days=1, program_length_hours=4 +): + """ + Generate dummy EPG programs for channels without EPG data. + Creates program blocks for a specified number of days. + + Args: + channel_id: The channel ID to use in the program entries + channel_name: The name of the channel to use in program titles + xml_lines: Optional list to append lines to, otherwise returns new list + num_days: Number of days to generate EPG data for (default: 1) + program_length_hours: Length of each program block in hours (default: 4) + + Returns: + List of XML lines for the dummy EPG entries + """ + if xml_lines is None: + xml_lines = [] + + for program in generate_dummy_programs(channel_id, channel_name, num_days=1, program_length_hours=4): + # Format times in XMLTV format + start_str = program['start_time'].strftime("%Y%m%d%H%M%S %z") + stop_str = program['end_time'].strftime("%Y%m%d%H%M%S %z") + + # Create program entry with escaped channel name + xml_lines.append( + f' ' + ) + xml_lines.append(f" {html.escape(program['title'])}") + xml_lines.append(f" {html.escape(program['description'])}") + xml_lines.append(f" ") return xml_lines -def generate_epg(request, profile_name=None): + +def generate_epg(request, profile_name=None, user=None): """ Dynamically generate an XMLTV (EPG) file using the new EPGData/ProgramData models. Since the EPG data is stored independently of Channels, we group programmes @@ -213,16 +285,37 @@ def generate_epg(request, profile_name=None): """ xml_lines = [] xml_lines.append('') - xml_lines.append('') + xml_lines.append( + '' + ) - if profile_name is not None: - channel_profile = ChannelProfile.objects.get(name=profile_name) - channels = Channel.objects.filter( - channelprofilemembership__channel_profile=channel_profile, - channelprofilemembership__enabled=True - ) + if user is not None: + if user.user_level == 0: + filters = { + "channelprofilemembership__enabled": True, + "user_level__lte": user.user_level, + } + + if user.channel_profiles.count() != 0: + channel_profiles = user.channel_profiles.all() + filters["channelprofilemembership__channel_profile__in"] = ( + channel_profiles + ) + + channels = Channel.objects.filter(**filters).order_by("channel_number") + else: + channels = Channel.objects.filter(user_level__lte=user.user_level).order_by( + "channel_number" + ) else: - channels = Channel.objects.all() + if profile_name is not None: + channel_profile = ChannelProfile.objects.get(name=profile_name) + channels = Channel.objects.filter( + channelprofilemembership__channel_profile=channel_profile, + channelprofilemembership__enabled=True, + ) + else: + channels = Channel.objects.all() # Check if the request wants to use direct logo URLs instead of cache use_cached_logos = request.GET.get('cachedlogos', 'true').lower() != 'false' @@ -287,7 +380,7 @@ def generate_epg(request, profile_name=None): xml_lines.append(f' {html.escape(display_name)}') xml_lines.append(f' ') - xml_lines.append(' ') + xml_lines.append(" ") for channel in channels: # Use the same channel ID determination for program entries @@ -337,93 +430,436 @@ def generate_epg(request, profile_name=None): # Add subtitle if available if prog.sub_title: - xml_lines.append(f' {html.escape(prog.sub_title)}') + xml_lines.append( + f" {html.escape(prog.sub_title)}" + ) # Add description if available if prog.description: - xml_lines.append(f' {html.escape(prog.description)}') + xml_lines.append( + f" {html.escape(prog.description)}" + ) # Process custom properties if available if prog.custom_properties: try: import json + custom_data = json.loads(prog.custom_properties) # Add categories if available - if 'categories' in custom_data and custom_data['categories']: - for category in custom_data['categories']: - xml_lines.append(f' {html.escape(category)}') + if "categories" in custom_data and custom_data["categories"]: + for category in custom_data["categories"]: + xml_lines.append( + f" {html.escape(category)}" + ) # Handle episode numbering - multiple formats supported # Standard episode number if available - if 'episode' in custom_data: - xml_lines.append(f' E{custom_data["episode"]}') + if "episode" in custom_data: + xml_lines.append( + f' E{custom_data["episode"]}' + ) # Handle onscreen episode format (like S06E128) - if 'onscreen_episode' in custom_data: - xml_lines.append(f' {html.escape(custom_data["onscreen_episode"])}') + if "onscreen_episode" in custom_data: + xml_lines.append( + f' {html.escape(custom_data["onscreen_episode"])}' + ) # Handle dd_progid format if 'dd_progid' in custom_data: xml_lines.append(f' {html.escape(custom_data["dd_progid"])}') # Add season and episode numbers in xmltv_ns format if available - if 'season' in custom_data and 'episode' in custom_data: - season = int(custom_data['season']) - 1 if str(custom_data['season']).isdigit() else 0 - episode = int(custom_data['episode']) - 1 if str(custom_data['episode']).isdigit() else 0 - xml_lines.append(f' {season}.{episode}.') + if "season" in custom_data and "episode" in custom_data: + season = ( + int(custom_data["season"]) - 1 + if str(custom_data["season"]).isdigit() + else 0 + ) + episode = ( + int(custom_data["episode"]) - 1 + if str(custom_data["episode"]).isdigit() + else 0 + ) + xml_lines.append( + f' {season}.{episode}.' + ) # Add rating if available - if 'rating' in custom_data: - rating_system = custom_data.get('rating_system', 'TV Parental Guidelines') - xml_lines.append(f' ') - xml_lines.append(f' {html.escape(custom_data["rating"])}') - xml_lines.append(f' ') + if "rating" in custom_data: + rating_system = custom_data.get( + "rating_system", "TV Parental Guidelines" + ) + xml_lines.append( + f' ' + ) + xml_lines.append( + f' {html.escape(custom_data["rating"])}' + ) + xml_lines.append(f" ") # Add actors/directors/writers if available - if 'credits' in custom_data: - xml_lines.append(f' ') - for role, people in custom_data['credits'].items(): + if "credits" in custom_data: + xml_lines.append(f" ") + for role, people in custom_data["credits"].items(): if isinstance(people, list): for person in people: - xml_lines.append(f' <{role}>{html.escape(person)}') + xml_lines.append( + f" <{role}>{html.escape(person)}" + ) else: - xml_lines.append(f' <{role}>{html.escape(people)}') - xml_lines.append(f' ') + xml_lines.append( + f" <{role}>{html.escape(people)}" + ) + xml_lines.append(f" ") # Add program date/year if available - if 'year' in custom_data: - xml_lines.append(f' {html.escape(custom_data["year"])}') + if "year" in custom_data: + xml_lines.append( + f' {html.escape(custom_data["year"])}' + ) # Add country if available - if 'country' in custom_data: - xml_lines.append(f' {html.escape(custom_data["country"])}') + if "country" in custom_data: + xml_lines.append( + f' {html.escape(custom_data["country"])}' + ) # Add icon if available - if 'icon' in custom_data: - xml_lines.append(f' ') + if "icon" in custom_data: + xml_lines.append( + f' ' + ) # Add special flags as proper tags - if custom_data.get('previously_shown', False): - xml_lines.append(f' ') + if custom_data.get("previously_shown", False): + xml_lines.append(f" ") - if custom_data.get('premiere', False): - xml_lines.append(f' ') + if custom_data.get("premiere", False): + xml_lines.append(f" ") - if custom_data.get('new', False): - xml_lines.append(f' ') + if custom_data.get("new", False): + xml_lines.append(f" ") if custom_data.get('live', False): xml_lines.append(f' ') except Exception as e: - xml_lines.append(f' ') + xml_lines.append( + f" " + ) - xml_lines.append(' ') + xml_lines.append(" ") - xml_lines.append('') + xml_lines.append("") xml_content = "\n".join(xml_lines) response = HttpResponse(xml_content, content_type="application/xml") - response['Content-Disposition'] = 'attachment; filename="epg.xml"' + response["Content-Disposition"] = 'attachment; filename="epg.xml"' return response + + +def xc_get_user(request): + username = request.GET.get("username") + password = request.GET.get("password") + + if not username or not password: + return None + + user = get_object_or_404(User, username=username) + custom_properties = ( + json.loads(user.custom_properties) if user.custom_properties else {} + ) + + if "xc_password" not in custom_properties: + return None + + if custom_properties["xc_password"] != password: + return None + + return user + + +def xc_get_info(request, full=False): + if not network_access_allowed(request, 'XC_API'): + return JsonResponse({'error': 'Forbidden'}, status=403) + + user = xc_get_user(request) + + if user is None: + return JsonResponse({'error': 'Unauthorized'}, status=401) + + raw_host = request.get_host() + if ":" in raw_host: + hostname, port = raw_host.split(":", 1) + else: + hostname = raw_host + port = "443" if request.is_secure() else "80" + + info = { + "user_info": { + "username": request.GET.get("username"), + "password": request.GET.get("password"), + "message": "", + "auth": 1, + "status": "Active", + "exp_date": "1715062090", + "max_connections": "99", + "allowed_output_formats": [ + "ts", + ], + }, + "server_info": { + "url": hostname, + "server_protocol": request.scheme, + "port": port, + "timezone": get_localzone().key, + "timestamp_now": int(time.time()), + "time_now": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), + "process": True, + }, + } + + if full == True: + info['categories'] = { + "series": [], + "movie": [], + "live": xc_get_live_categories(user), + } + info['available_channels'] = {channel["stream_id"]: channel for channel in xc_get_live_streams(request, user, request.GET.get("category_id"))} + + return info + + +def xc_player_api(request, full=False): + if not network_access_allowed(request, 'XC_API'): + return JsonResponse({'error': 'Forbidden'}, status=403) + + action = request.GET.get("action") + user = xc_get_user(request) + + if user is None: + return JsonResponse({'error': 'Unauthorized'}, status=401) + + server_info = xc_get_info(request) + + if not action: + return JsonResponse(server_info) + + if action == "get_live_categories": + return JsonResponse(xc_get_live_categories(user), safe=False) + if action == "get_live_streams": + return JsonResponse(xc_get_live_streams(request, user, request.GET.get("category_id")), safe=False) + if action == "get_short_epg": + return JsonResponse(xc_get_epg(request, user, short=True), safe=False) + if action == "get_simple_data_table": + return JsonResponse(xc_get_epg(request, user, short=False), safe=False) + + # Endpoints not implemented, but still provide a response + if action in [ + "get_vod_categories", + "get_vod_streams", + "get_series", + "get_series_categories", + "get_series_info", + "get_vod_info", + ]: + return JsonResponse([], safe=False) + + raise Http404() + + +def xc_panel_api(request): + if not network_access_allowed(request, 'XC_API'): + return JsonResponse({'error': 'Forbidden'}, status=403) + + user = xc_get_user(request) + + if user is None: + return JsonResponse({'error': 'Unauthorized'}, status=401) + + return JsonResponse(xc_get_info(request, True)) + + +def xc_get(request): + if not network_access_allowed(request, 'XC_API'): + return JsonResponse({'error': 'Forbidden'}, status=403) + + action = request.GET.get("action") + user = xc_get_user(request) + + if user is None: + return JsonResponse({'error': 'Unauthorized'}, status=401) + + return generate_m3u(request, None, user) + + +def xc_xmltv(request): + if not network_access_allowed(request, 'XC_API'): + return JsonResponse({'error': 'Forbidden'}, status=403) + + user = xc_get_user(request) + + if user is None: + return JsonResponse({'error': 'Unauthorized'}, status=401) + + return generate_epg(request, None, user) + + +def xc_get_live_categories(user): + response = [] + + if user.user_level == 0: + filters = { + "channels__channelprofilemembership__enabled": True, + "channels__user_level": 0, + } + + if user.channel_profiles.count() != 0: + # Only get data from active profile + channel_profiles = user.channel_profiles.all() + filters["channels__channelprofilemembership__channel_profile__in"] = ( + channel_profiles + ) + + channel_groups = ChannelGroup.objects.filter(**filters).distinct() + else: + channel_groups = ChannelGroup.objects.filter( + channels__isnull=False, channels__user_level__lte=user.user_level + ).distinct() + + for group in channel_groups: + response.append( + { + "category_id": group.id, + "category_name": group.name, + "parent_id": 0, + } + ) + + return response + + +def xc_get_live_streams(request, user, category_id=None): + streams = [] + + if user.user_level == 0: + filters = { + "channelprofilemembership__enabled": True, + "user_level__lte": user.user_level, + } + + if user.channel_profiles.count() > 0: + # Only get data from active profile + channel_profiles = user.channel_profiles.all() + filters["channelprofilemembership__channel_profile__in"] = channel_profiles + + if category_id is not None: + filters["channel_group__id"] = category_id + + channels = Channel.objects.filter(**filters) + else: + if not category_id: + channels = Channel.objects.filter(user_level__lte=user.user_level) + else: + channels = Channel.objects.filter( + channel_group__id=category_id, user_level__lte=user.user_level + ) + + for channel in channels: + streams.append( + { + "num": int(channel.channel_number) if channel.channel_number.is_integer() else channel.channel_number, + "name": channel.name, + "stream_type": "live", + "stream_id": channel.id, + "stream_icon": ( + None + if not channel.logo + else request.build_absolute_uri( + reverse("api:channels:logo-cache", args=[channel.logo.id]) + ) + ), + "epg_channel_id": int(channel.channel_number) if channel.channel_number.is_integer() else channel.channel_number, + "added": int(time.time()), # @TODO: make this the actual created date + "is_adult": 0, + "category_id": channel.channel_group.id, + "category_ids": [channel.channel_group.id], + "custom_sid": None, + "tv_archive": 0, + "direct_source": "", + "tv_archive_duration": 0, + } + ) + + return streams + + +def xc_get_epg(request, user, short=False): + channel_id = request.GET.get('stream_id') + if not channel_id: + raise Http404() + + channel = None + if user.user_level < 10: + filters = { + "id": channel_id, + "channelprofilemembership__enabled": True, + "user_level__lte": user.user_level, + } + + if user.channel_profiles.count() > 0: + channel_profiles = user.channel_profiles.all() + filters["channelprofilemembership__channel_profile__in"] = channel_profiles + + channel = get_object_or_404(Channel, **filters) + else: + channel = get_object_or_404(Channel, id=channel_id) + + if not channel: + raise Http404() + + limit = request.GET.get('limit', 4) + if channel.epg_data: + if short == False: + programs = channel.epg_data.programs.filter( + start_time__gte=timezone.now() + ).order_by('start_time') + else: + programs = channel.epg_data.programs.all().order_by('start_time')[:limit] + else: + programs = generate_dummy_programs(channel_id=channel_id, channel_name=channel.name) + + output = {"epg_listings": []} + for program in programs: + id = "0" + epg_id = "0" + title = program['title'] if isinstance(program, dict) else program.title + description = program['description'] if isinstance(program, dict) else program.description + + start = program["start_time"] if isinstance(program, dict) else program.start_time + end = program["end_time"] if isinstance(program, dict) else program.end_time + + program_output = { + "id": f"{id}", + "epg_id": f"{epg_id}", + "title": base64.b64encode(title.encode()).decode(), + "lang": "", + "start": start.strftime("%Y%m%d%H%M%S"), + "end": end.strftime("%Y%m%d%H%M%S"), + "description": base64.b64encode(description.encode()).decode(), + "channel_id": int(channel.channel_number) if channel.channel_number.is_integer() else channel.channel_number, + "start_timestamp": int(start.timestamp()), + "stop_timestamp": int(end.timestamp()), + "stream_id": f"{channel_id}", + } + + if short == False: + program_output["now_playing"] = 1 if start <= timezone.now() <= end else 0 + program_output["has_archive"] = "0" + + output['epg_listings'].append(program_output) + + return output diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index f8a7323b..a57f1384 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -386,90 +386,99 @@ class StreamManager: buffer = b"" last_stats_line = b"" - # Read in small chunks + # Read byte by byte for immediate detection while self.transcode_process and self.transcode_process.stderr: try: - chunk = self.transcode_process.stderr.read(256) # Smaller chunks for real-time processing - if not chunk: + # Read one byte at a time for immediate processing + byte = self.transcode_process.stderr.read(1) + if not byte: break - buffer += chunk + buffer += byte - # Look for stats updates (overwrite previous stats with \r) - if b'\r' in buffer and b"frame=" in buffer: - # Split on \r to handle overwriting stats - parts = buffer.split(b'\r') + # Check for frame= at the start of buffer (new stats line) + if buffer == b"frame=": + # We detected the start of a stats line, read until we get a complete line + # or hit a carriage return (which overwrites the previous stats) + while True: + next_byte = self.transcode_process.stderr.read(1) + if not next_byte: + break - # Process all parts except the last (which might be incomplete) - for i, part in enumerate(parts[:-1]): - if part.strip(): - if part.startswith(b"frame=") or b"frame=" in part: - # This is a stats line - keep it intact - try: - stats_text = part.decode('utf-8', errors='ignore').strip() - if stats_text and "frame=" in stats_text: - # Extract just the stats portion if there's other content - if "frame=" in stats_text: - frame_start = stats_text.find("frame=") - stats_text = stats_text[frame_start:] + buffer += next_byte - self._parse_ffmpeg_stats(stats_text) - self._log_stderr_content(stats_text) - last_stats_line = part - except Exception as e: - logger.debug(f"Error parsing stats line: {e}") - else: - # Regular content - process line by line - line_content = part - while b'\n' in line_content: - line, line_content = line_content.split(b'\n', 1) - if line.strip(): - self._log_stderr_content(line.decode('utf-8', errors='ignore')) + # Break on carriage return (stats overwrite) or newline + if next_byte in (b'\r', b'\n'): + break - # Handle remaining content without newline - if line_content.strip(): - self._log_stderr_content(line_content.decode('utf-8', errors='ignore')) + # Also break if we have enough data for a typical stats line + if len(buffer) > 200: # Typical stats line length + break - # Keep the last part as it might be incomplete - buffer = parts[-1] + # Process the stats line immediately + if buffer.strip(): + try: + stats_text = buffer.decode('utf-8', errors='ignore').strip() + if stats_text and "frame=" in stats_text: + self._parse_ffmpeg_stats(stats_text) + self._log_stderr_content(stats_text) + except Exception as e: + logger.debug(f"Error parsing immediate stats line: {e}") + + # Clear buffer after processing + buffer = b"" + continue # Handle regular line breaks for non-stats content - elif b'\n' in buffer: - while b'\n' in buffer: - line, buffer = buffer.split(b'\n', 1) - if line.strip(): - line_text = line.decode('utf-8', errors='ignore').strip() - if line_text and not line_text.startswith("frame="): - self._log_stderr_content(line_text) + elif byte == b'\n': + if buffer.strip(): + line_text = buffer.decode('utf-8', errors='ignore').strip() + if line_text and not line_text.startswith("frame="): + self._log_stderr_content(line_text) + buffer = b"" - # If we have a potential stats line in buffer without line breaks - elif b"frame=" in buffer and (b"speed=" in buffer or len(buffer) > 200): - # We likely have a complete or substantial stats line - try: - stats_text = buffer.decode('utf-8', errors='ignore').strip() - if "frame=" in stats_text: - # Extract just the stats portion - frame_start = stats_text.find("frame=") - stats_text = stats_text[frame_start:] + # Handle carriage returns (potential stats overwrite) + elif byte == b'\r': + # Check if this might be a stats line + if b"frame=" in buffer: + try: + stats_text = buffer.decode('utf-8', errors='ignore').strip() + if stats_text and "frame=" in stats_text: + self._parse_ffmpeg_stats(stats_text) + self._log_stderr_content(stats_text) + except Exception as e: + logger.debug(f"Error parsing stats on carriage return: {e}") + elif buffer.strip(): + # Regular content with carriage return + line_text = buffer.decode('utf-8', errors='ignore').strip() + if line_text: + self._log_stderr_content(line_text) + buffer = b"" - self._parse_ffmpeg_stats(stats_text) - self._log_stderr_content(stats_text) - buffer = b"" # Clear buffer after processing - except Exception as e: - logger.debug(f"Error parsing buffered stats: {e}") - - # Prevent buffer from growing too large - if len(buffer) > 4096: - # Try to preserve any potential stats line at the end - if b"frame=" in buffer[-1024:]: - buffer = buffer[-1024:] - else: - buffer = buffer[-512:] + # Prevent buffer from growing too large for non-stats content + elif len(buffer) > 1024 and b"frame=" not in buffer: + # Process whatever we have if it's not a stats line + if buffer.strip(): + line_text = buffer.decode('utf-8', errors='ignore').strip() + if line_text: + self._log_stderr_content(line_text) + buffer = b"" except Exception as e: - logger.error(f"Error reading stderr: {e}") + logger.error(f"Error reading stderr byte: {e}") break + # Process any remaining buffer content + if buffer.strip(): + try: + remaining_text = buffer.decode('utf-8', errors='ignore').strip() + if remaining_text: + if "frame=" in remaining_text: + self._parse_ffmpeg_stats(remaining_text) + self._log_stderr_content(remaining_text) + except Exception as e: + logger.debug(f"Error processing remaining buffer: {e}") + except Exception as e: # Catch any other exceptions in the thread to prevent crashes try: @@ -488,13 +497,18 @@ class StreamManager: content_lower = content.lower() # Check for stream info lines first and delegate to ChannelService + # Only parse INPUT streams (which have hex identifiers like [0x100]) not output streams if "stream #" in content_lower and ("video:" in content_lower or "audio:" in content_lower): - from .services.channel_service import ChannelService - if "video:" in content_lower: - ChannelService.parse_and_store_stream_info(self.channel_id, content, "video") - elif "audio:" in content_lower: - ChannelService.parse_and_store_stream_info(self.channel_id, content, "audio") - + # Check if this is an input stream by looking for the hex identifier pattern [0x...] + if "stream #0:" in content_lower and "[0x" in content_lower: + from .services.channel_service import ChannelService + if "video:" in content_lower: + ChannelService.parse_and_store_stream_info(self.channel_id, content, "video") + elif "audio:" in content_lower: + ChannelService.parse_and_store_stream_info(self.channel_id, content, "audio") + else: + # This is likely an output stream (no hex identifier), don't parse it + logger.debug(f"Skipping output stream info: {content}") # Determine log level based on content if any(keyword in content_lower for keyword in ['error', 'failed', 'cannot', 'invalid', 'corrupt']): logger.error(f"FFmpeg stderr: {content}") diff --git a/apps/proxy/ts_proxy/url_utils.py b/apps/proxy/ts_proxy/url_utils.py index dbd3c5dd..a60e0ba3 100644 --- a/apps/proxy/ts_proxy/url_utils.py +++ b/apps/proxy/ts_proxy/url_utils.py @@ -17,7 +17,6 @@ logger = get_logger() def get_stream_object(id: str): try: - uuid_obj = UUID(id, version=4) logger.info(f"Fetching channel ID {id}") return get_object_or_404(Channel, uuid=id) except: diff --git a/apps/proxy/ts_proxy/views.py b/apps/proxy/ts_proxy/views.py index b90e1585..8d46df5e 100644 --- a/apps/proxy/ts_proxy/views.py +++ b/apps/proxy/ts_proxy/views.py @@ -3,6 +3,7 @@ import threading import time import random import re +import pathlib from django.http import StreamingHttpResponse, JsonResponse, HttpResponseRedirect from django.views.decorators.csrf import csrf_exempt from django.shortcuts import get_object_or_404 @@ -15,22 +16,38 @@ from .redis_keys import RedisKeys import logging from apps.channels.models import Channel, Stream from apps.m3u.models import M3UAccount, M3UAccountProfile +from apps.accounts.models import User from core.models import UserAgent, CoreSettings, PROXY_PROFILE_NAME from rest_framework.decorators import api_view, permission_classes -from rest_framework.permissions import IsAuthenticated +from rest_framework.response import Response +from apps.accounts.permissions import ( + IsAdmin, + permission_classes_by_method, + permission_classes_by_action, +) from .constants import ChannelState, EventType, StreamType, ChannelMetadataField from .config_helper import ConfigHelper from .services.channel_service import ChannelService -from .url_utils import generate_stream_url, transform_url, get_stream_info_for_switch, get_stream_object, get_alternate_streams +from .url_utils import ( + generate_stream_url, + transform_url, + get_stream_info_for_switch, + get_stream_object, + get_alternate_streams, +) from .utils import get_logger from uuid import UUID import gevent +from dispatcharr.utils import network_access_allowed logger = get_logger() -@api_view(['GET']) +@api_view(["GET"]) def stream_ts(request, channel_id): + if not network_access_allowed(request, "STREAMS"): + return JsonResponse({"error": "Forbidden"}, status=403) + """Stream TS data to client with immediate response and keep-alive packets during initialization""" channel = get_stream_object(channel_id) @@ -44,10 +61,12 @@ def stream_ts(request, channel_id): logger.info(f"[{client_id}] Requested stream for channel {channel_id}") # Extract client user agent early - for header in ['HTTP_USER_AGENT', 'User-Agent', 'user-agent']: - if (header in request.META): + for header in ["HTTP_USER_AGENT", "User-Agent", "user-agent"]: + if header in request.META: client_user_agent = request.META[header] - logger.debug(f"[{client_id}] Client connected with user agent: {client_user_agent}") + logger.debug( + f"[{client_id}] Client connected with user agent: {client_user_agent}" + ) break # Check if we need to reinitialize the channel @@ -60,38 +79,58 @@ def stream_ts(request, channel_id): metadata_key = RedisKeys.channel_metadata(channel_id) if proxy_server.redis_client.exists(metadata_key): metadata = proxy_server.redis_client.hgetall(metadata_key) - state_field = ChannelMetadataField.STATE.encode('utf-8') + state_field = ChannelMetadataField.STATE.encode("utf-8") if state_field in metadata: - channel_state = metadata[state_field].decode('utf-8') + channel_state = metadata[state_field].decode("utf-8") # IMPROVED: Check for *any* state that indicates initialization is in progress - active_states = [ChannelState.INITIALIZING, ChannelState.CONNECTING, ChannelState.WAITING_FOR_CLIENTS, ChannelState.ACTIVE] + active_states = [ + ChannelState.INITIALIZING, + ChannelState.CONNECTING, + ChannelState.WAITING_FOR_CLIENTS, + ChannelState.ACTIVE, + ] if channel_state in active_states: # Channel is being initialized or already active - no need for reinitialization needs_initialization = False - logger.debug(f"[{client_id}] Channel {channel_id} already in state {channel_state}, skipping initialization") + logger.debug( + f"[{client_id}] Channel {channel_id} already in state {channel_state}, skipping initialization" + ) # Special handling for initializing/connecting states - if channel_state in [ChannelState.INITIALIZING, ChannelState.CONNECTING]: + if channel_state in [ + ChannelState.INITIALIZING, + ChannelState.CONNECTING, + ]: channel_initializing = True - logger.debug(f"[{client_id}] Channel {channel_id} is still initializing, client will wait for completion") + logger.debug( + f"[{client_id}] Channel {channel_id} is still initializing, client will wait for completion" + ) else: # Only check for owner if channel is in a valid state - owner_field = ChannelMetadataField.OWNER.encode('utf-8') + owner_field = ChannelMetadataField.OWNER.encode("utf-8") if owner_field in metadata: - owner = metadata[owner_field].decode('utf-8') + owner = metadata[owner_field].decode("utf-8") owner_heartbeat_key = f"ts_proxy:worker:{owner}:heartbeat" if proxy_server.redis_client.exists(owner_heartbeat_key): # Owner is still active, so we don't need to reinitialize needs_initialization = False - logger.debug(f"[{client_id}] Channel {channel_id} has active owner {owner}") + logger.debug( + f"[{client_id}] Channel {channel_id} has active owner {owner}" + ) # Start initialization if needed if needs_initialization or not proxy_server.check_if_channel_exists(channel_id): logger.info(f"[{client_id}] Starting channel {channel_id} initialization") # Force cleanup of any previous instance if in terminal state - if channel_state in [ChannelState.ERROR, ChannelState.STOPPING, ChannelState.STOPPED]: - logger.warning(f"[{client_id}] Channel {channel_id} in state {channel_state}, forcing cleanup") + if channel_state in [ + ChannelState.ERROR, + ChannelState.STOPPING, + ChannelState.STOPPED, + ]: + logger.warning( + f"[{client_id}] Channel {channel_id} in state {channel_state}, forcing cleanup" + ) proxy_server.stop_channel(channel_id) # Use max retry attempts and connection timeout from config @@ -107,67 +146,90 @@ def stream_ts(request, channel_id): # Try to get a stream with configured retries for attempt in range(max_retries): - stream_url, stream_user_agent, transcode, profile_value = generate_stream_url(channel_id) + stream_url, stream_user_agent, transcode, profile_value = ( + generate_stream_url(channel_id) + ) if stream_url is not None: - logger.info(f"[{client_id}] Successfully obtained stream for channel {channel_id}") + logger.info( + f"[{client_id}] Successfully obtained stream for channel {channel_id}" + ) break # If we failed because there are no streams assigned, don't retry _, _, error_reason = channel.get_stream() - if error_reason and 'maximum connection limits' not in error_reason: - logger.warning(f"[{client_id}] Can't retry - error not related to connection limits: {error_reason}") + if error_reason and "maximum connection limits" not in error_reason: + logger.warning( + f"[{client_id}] Can't retry - error not related to connection limits: {error_reason}" + ) break # Don't exceed the overall connection timeout if time.time() - wait_start_time > retry_timeout: - logger.warning(f"[{client_id}] Connection wait timeout exceeded ({retry_timeout}s)") + logger.warning( + f"[{client_id}] Connection wait timeout exceeded ({retry_timeout}s)" + ) break # Wait before retrying (using exponential backoff with a cap) - wait_time = min(0.5 * (2 ** attempt), 2.0) # Caps at 2 seconds - logger.info(f"[{client_id}] Waiting {wait_time:.1f}s for a connection to become available (attempt {attempt+1}/{max_retries})") - gevent.sleep(wait_time) # FIXED: Using gevent.sleep instead of time.sleep + wait_time = min(0.5 * (2**attempt), 2.0) # Caps at 2 seconds + logger.info( + f"[{client_id}] Waiting {wait_time:.1f}s for a connection to become available (attempt {attempt+1}/{max_retries})" + ) + gevent.sleep( + wait_time + ) # FIXED: Using gevent.sleep instead of time.sleep if stream_url is None: # Make sure to release any stream locks that might have been acquired - if hasattr(channel, 'streams') and channel.streams.exists(): + if hasattr(channel, "streams") and channel.streams.exists(): for stream in channel.streams.all(): try: stream.release_stream() - logger.info(f"[{client_id}] Released stream {stream.id} for channel {channel_id}") + logger.info( + f"[{client_id}] Released stream {stream.id} for channel {channel_id}" + ) except Exception as e: logger.error(f"[{client_id}] Error releasing stream: {e}") # Get the specific error message if available wait_duration = f"{int(time.time() - wait_start_time)}s" - error_msg = error_reason if error_reason else 'No available streams for this channel' - return JsonResponse({ - 'error': error_msg, - 'waited': wait_duration - }, status=503) # 503 Service Unavailable is appropriate here + error_msg = ( + error_reason + if error_reason + else "No available streams for this channel" + ) + return JsonResponse( + {"error": error_msg, "waited": wait_duration}, status=503 + ) # 503 Service Unavailable is appropriate here # Get the stream ID from the channel stream_id, m3u_profile_id, _ = channel.get_stream() - logger.info(f"Channel {channel_id} using stream ID {stream_id}, m3u account profile ID {m3u_profile_id}") + logger.info( + f"Channel {channel_id} using stream ID {stream_id}, m3u account profile ID {m3u_profile_id}" + ) # Generate transcode command if needed stream_profile = channel.get_stream_profile() if stream_profile.is_redirect(): # Validate the stream URL before redirecting - from .url_utils import validate_stream_url, get_alternate_streams, get_stream_info_for_switch + from .url_utils import ( + validate_stream_url, + get_alternate_streams, + get_stream_info_for_switch, + ) # Try initial URL logger.info(f"[{client_id}] Validating redirect URL: {stream_url}") is_valid, final_url, status_code, message = validate_stream_url( - stream_url, - user_agent=stream_user_agent, - timeout=(5, 5) + stream_url, user_agent=stream_user_agent, timeout=(5, 5) ) # If first URL doesn't validate, try alternates if not is_valid: - logger.warning(f"[{client_id}] Primary stream URL failed validation: {message}") + logger.warning( + f"[{client_id}] Primary stream URL failed validation: {message}" + ) # Track tried streams to avoid loops tried_streams = {stream_id} @@ -177,49 +239,71 @@ def stream_ts(request, channel_id): # Try each alternate until one works for alt in alternates: - if alt['stream_id'] in tried_streams: + if alt["stream_id"] in tried_streams: continue - tried_streams.add(alt['stream_id']) + tried_streams.add(alt["stream_id"]) # Get stream info - alt_info = get_stream_info_for_switch(channel_id, alt['stream_id']) - if 'error' in alt_info: - logger.warning(f"[{client_id}] Error getting alternate stream info: {alt_info['error']}") + alt_info = get_stream_info_for_switch( + channel_id, alt["stream_id"] + ) + if "error" in alt_info: + logger.warning( + f"[{client_id}] Error getting alternate stream info: {alt_info['error']}" + ) continue # Validate the alternate URL - logger.info(f"[{client_id}] Trying alternate stream #{alt['stream_id']}: {alt_info['url']}") + logger.info( + f"[{client_id}] Trying alternate stream #{alt['stream_id']}: {alt_info['url']}" + ) is_valid, final_url, status_code, message = validate_stream_url( - alt_info['url'], - user_agent=alt_info['user_agent'], - timeout=(5, 5) + alt_info["url"], + user_agent=alt_info["user_agent"], + timeout=(5, 5), ) if is_valid: - logger.info(f"[{client_id}] Alternate stream #{alt['stream_id']} validated successfully") + logger.info( + f"[{client_id}] Alternate stream #{alt['stream_id']} validated successfully" + ) break else: - logger.warning(f"[{client_id}] Alternate stream #{alt['stream_id']} failed validation: {message}") + logger.warning( + f"[{client_id}] Alternate stream #{alt['stream_id']} failed validation: {message}" + ) # Release stream lock before redirecting channel.release_stream() # Final decision based on validation results if is_valid: - logger.info(f"[{client_id}] Redirecting to validated URL: {final_url} ({message})") + logger.info( + f"[{client_id}] Redirecting to validated URL: {final_url} ({message})" + ) return HttpResponseRedirect(final_url) else: - logger.error(f"[{client_id}] All available redirect URLs failed validation") - return JsonResponse({ - 'error': 'All available streams failed validation' - }, status=502) # 502 Bad Gateway + logger.error( + f"[{client_id}] All available redirect URLs failed validation" + ) + return JsonResponse( + {"error": "All available streams failed validation"}, status=502 + ) # 502 Bad Gateway # Initialize channel with the stream's user agent (not the client's) success = ChannelService.initialize_channel( - channel_id, stream_url, stream_user_agent, transcode, profile_value, stream_id, m3u_profile_id + channel_id, + stream_url, + stream_user_agent, + transcode, + profile_value, + stream_id, + m3u_profile_id, ) if not success: - return JsonResponse({'error': 'Failed to initialize channel'}, status=500) + return JsonResponse( + {"error": "Failed to initialize channel"}, status=500 + ) # If we're the owner, wait for connection to establish if proxy_server.am_i_owner(channel_id): @@ -230,7 +314,9 @@ def stream_ts(request, channel_id): while not manager.connected: if time.time() - wait_start > timeout: proxy_server.stop_channel(channel_id) - return JsonResponse({'error': 'Connection timeout'}, status=504) + return JsonResponse( + {"error": "Connection timeout"}, status=504 + ) # Check if this manager should keep retrying or stop if not manager.should_retry(): @@ -240,41 +326,68 @@ def stream_ts(request, channel_id): if proxy_server.redis_client: try: - state_bytes = proxy_server.redis_client.hget(metadata_key, ChannelMetadataField.STATE) + state_bytes = proxy_server.redis_client.hget( + metadata_key, ChannelMetadataField.STATE + ) if state_bytes: - current_state = state_bytes.decode('utf-8') - logger.debug(f"[{client_id}] Current state of channel {channel_id}: {current_state}") + current_state = state_bytes.decode("utf-8") + logger.debug( + f"[{client_id}] Current state of channel {channel_id}: {current_state}" + ) except Exception as e: - logger.warning(f"[{client_id}] Error getting channel state: {e}") + logger.warning( + f"[{client_id}] Error getting channel state: {e}" + ) # Allow normal transitional states to continue - if current_state in [ChannelState.INITIALIZING, ChannelState.CONNECTING]: - logger.info(f"[{client_id}] Channel {channel_id} is in {current_state} state, continuing to wait") + if current_state in [ + ChannelState.INITIALIZING, + ChannelState.CONNECTING, + ]: + logger.info( + f"[{client_id}] Channel {channel_id} is in {current_state} state, continuing to wait" + ) # Reset wait timer to allow the transition to complete wait_start = time.time() continue # Check if we're switching URLs - if hasattr(manager, 'url_switching') and manager.url_switching: - logger.info(f"[{client_id}] Stream manager is currently switching URLs for channel {channel_id}") + if ( + hasattr(manager, "url_switching") + and manager.url_switching + ): + logger.info( + f"[{client_id}] Stream manager is currently switching URLs for channel {channel_id}" + ) # Reset wait timer to give the switch a chance wait_start = time.time() continue # If we reach here, we've exhausted retries and the channel isn't in a valid transitional state - logger.warning(f"[{client_id}] Channel {channel_id} failed to connect and is not in transitional state") + logger.warning( + f"[{client_id}] Channel {channel_id} failed to connect and is not in transitional state" + ) proxy_server.stop_channel(channel_id) - return JsonResponse({'error': 'Failed to connect'}, status=502) + return JsonResponse( + {"error": "Failed to connect"}, status=502 + ) - gevent.sleep(0.1) # FIXED: Using gevent.sleep instead of time.sleep + gevent.sleep( + 0.1 + ) # FIXED: Using gevent.sleep instead of time.sleep logger.info(f"[{client_id}] Successfully initialized channel {channel_id}") channel_initializing = True # Register client - can do this regardless of initialization state # Create local resources if needed - if channel_id not in proxy_server.stream_buffers or channel_id not in proxy_server.client_managers: - logger.debug(f"[{client_id}] Channel {channel_id} exists in Redis but not initialized in this worker - initializing now") + if ( + channel_id not in proxy_server.stream_buffers + or channel_id not in proxy_server.client_managers + ): + logger.debug( + f"[{client_id}] Channel {channel_id} exists in Redis but not initialized in this worker - initializing now" + ) # Get URL from Redis metadata url = None @@ -282,32 +395,54 @@ def stream_ts(request, channel_id): if proxy_server.redis_client: metadata_key = RedisKeys.channel_metadata(channel_id) - url_bytes = proxy_server.redis_client.hget(metadata_key, ChannelMetadataField.URL) - ua_bytes = proxy_server.redis_client.hget(metadata_key, ChannelMetadataField.USER_AGENT) - profile_bytes = proxy_server.redis_client.hget(metadata_key, ChannelMetadataField.STREAM_PROFILE) + url_bytes = proxy_server.redis_client.hget( + metadata_key, ChannelMetadataField.URL + ) + ua_bytes = proxy_server.redis_client.hget( + metadata_key, ChannelMetadataField.USER_AGENT + ) + profile_bytes = proxy_server.redis_client.hget( + metadata_key, ChannelMetadataField.STREAM_PROFILE + ) if url_bytes: - url = url_bytes.decode('utf-8') + url = url_bytes.decode("utf-8") if ua_bytes: - stream_user_agent = ua_bytes.decode('utf-8') + stream_user_agent = ua_bytes.decode("utf-8") # Extract transcode setting from Redis if profile_bytes: - profile_str = profile_bytes.decode('utf-8') - use_transcode = (profile_str == PROXY_PROFILE_NAME or profile_str == 'None') - logger.debug(f"Using profile '{profile_str}' for channel {channel_id}, transcode={use_transcode}") + profile_str = profile_bytes.decode("utf-8") + use_transcode = ( + profile_str == PROXY_PROFILE_NAME or profile_str == "None" + ) + logger.debug( + f"Using profile '{profile_str}' for channel {channel_id}, transcode={use_transcode}" + ) else: # Default settings when profile not found in Redis - profile_str = 'None' # Default profile name - use_transcode = False # Default to direct streaming without transcoding - logger.debug(f"No profile found in Redis for channel {channel_id}, defaulting to transcode={use_transcode}") + profile_str = "None" # Default profile name + use_transcode = ( + False # Default to direct streaming without transcoding + ) + logger.debug( + f"No profile found in Redis for channel {channel_id}, defaulting to transcode={use_transcode}" + ) # Use client_user_agent as fallback if stream_user_agent is None - success = proxy_server.initialize_channel(url, channel_id, stream_user_agent or client_user_agent, use_transcode) + success = proxy_server.initialize_channel( + url, channel_id, stream_user_agent or client_user_agent, use_transcode + ) if not success: - logger.error(f"[{client_id}] Failed to initialize channel {channel_id} locally") - return JsonResponse({'error': 'Failed to initialize channel locally'}, status=500) + logger.error( + f"[{client_id}] Failed to initialize channel {channel_id} locally" + ) + return JsonResponse( + {"error": "Failed to initialize channel locally"}, status=500 + ) - logger.info(f"[{client_id}] Successfully initialized channel {channel_id} locally") + logger.info( + f"[{client_id}] Successfully initialized channel {channel_id} locally" + ) # Register client buffer = proxy_server.stream_buffers[channel_id] @@ -322,53 +457,99 @@ def stream_ts(request, channel_id): # Return the StreamingHttpResponse from the main function response = StreamingHttpResponse( - streaming_content=generate(), - content_type='video/mp2t' + streaming_content=generate(), content_type="video/mp2t" ) - response['Cache-Control'] = 'no-cache' + response["Cache-Control"] = "no-cache" return response except Exception as e: logger.error(f"Error in stream_ts: {e}", exc_info=True) - return JsonResponse({'error': str(e)}, status=500) + return JsonResponse({"error": str(e)}, status=500) + + +@api_view(["GET"]) +def stream_xc(request, username, password, channel_id): + user = get_object_or_404(User, username=username) + + extension = pathlib.Path(channel_id).suffix + channel_id = pathlib.Path(channel_id).stem + + custom_properties = ( + json.loads(user.custom_properties) if user.custom_properties else {} + ) + + if "xc_password" not in custom_properties: + return Response({"error": "Invalid credentials"}, status=401) + + if custom_properties["xc_password"] != password: + return Response({"error": "Invalid credentials"}, status=401) + + print(f"Fetchin channel with ID: {channel_id}") + if user.user_level < 10: + filters = { + "id": int(channel_id), + "channelprofilemembership__enabled": True, + "user_level__lte": user.user_level, + } + + if user.channel_profiles.count() > 0: + channel_profiles = user.channel_profiles.all() + filters["channelprofilemembership__channel_profile__in"] = channel_profiles + + channel = Channel.objects.filter(**filters).distinct().first() + if not channel: + return JsonResponse({"error": "Not found"}, status=404) + else: + channel = get_object_or_404(Channel, id=channel_id) + + # @TODO: we've got the file 'type' via extension, support this when we support multiple outputs + return stream_ts(request._request, channel.uuid) + @csrf_exempt -@api_view(['POST']) -@permission_classes([IsAuthenticated]) +@api_view(["POST"]) +@permission_classes([IsAdmin]) def change_stream(request, channel_id): """Change stream URL for existing channel with enhanced diagnostics""" proxy_server = ProxyServer.get_instance() try: data = json.loads(request.body) - new_url = data.get('url') - user_agent = data.get('user_agent') - stream_id = data.get('stream_id') + new_url = data.get("url") + user_agent = data.get("user_agent") + stream_id = data.get("stream_id") # If stream_id is provided, get the URL and user_agent from it if stream_id: - logger.info(f"Stream ID {stream_id} provided, looking up stream info for channel {channel_id}") + logger.info( + f"Stream ID {stream_id} provided, looking up stream info for channel {channel_id}" + ) stream_info = get_stream_info_for_switch(channel_id, stream_id) - if 'error' in stream_info: - return JsonResponse({ - 'error': stream_info['error'], - 'stream_id': stream_id - }, status=404) + if "error" in stream_info: + return JsonResponse( + {"error": stream_info["error"], "stream_id": stream_id}, status=404 + ) # Use the info from the stream - new_url = stream_info['url'] - user_agent = stream_info['user_agent'] - m3u_profile_id = stream_info.get('m3u_profile_id') + new_url = stream_info["url"] + user_agent = stream_info["user_agent"] + m3u_profile_id = stream_info.get("m3u_profile_id") # Stream ID will be passed to change_stream_url later elif not new_url: - return JsonResponse({'error': 'Either url or stream_id must be provided'}, status=400) + return JsonResponse( + {"error": "Either url or stream_id must be provided"}, status=400 + ) - logger.info(f"Attempting to change stream for channel {channel_id} to {new_url}") + logger.info( + f"Attempting to change stream for channel {channel_id} to {new_url}" + ) # Use the service layer instead of direct implementation # Pass stream_id to ensure proper connection tracking - result = ChannelService.change_stream_url(channel_id, new_url, user_agent, stream_id, m3u_profile_id) + result = ChannelService.change_stream_url( + channel_id, new_url, user_agent, stream_id, m3u_profile_id + ) # Get the stream manager before updating URL stream_manager = proxy_server.stream_managers.get(channel_id) @@ -377,37 +558,43 @@ def change_stream(request, channel_id): if stream_manager: # Reset tried streams when manually switching URL via API stream_manager.tried_stream_ids = set() - logger.debug(f"Reset tried stream IDs for channel {channel_id} during manual stream change") + logger.debug( + f"Reset tried stream IDs for channel {channel_id} during manual stream change" + ) - if result.get('status') == 'error': - return JsonResponse({ - 'error': result.get('message', 'Unknown error'), - 'diagnostics': result.get('diagnostics', {}) - }, status=404) + if result.get("status") == "error": + return JsonResponse( + { + "error": result.get("message", "Unknown error"), + "diagnostics": result.get("diagnostics", {}), + }, + status=404, + ) # Format response based on whether it was a direct update or event-based response_data = { - 'message': 'Stream changed successfully', - 'channel': channel_id, - 'url': new_url, - 'owner': result.get('direct_update', False), - 'worker_id': proxy_server.worker_id + "message": "Stream changed successfully", + "channel": channel_id, + "url": new_url, + "owner": result.get("direct_update", False), + "worker_id": proxy_server.worker_id, } # Include stream_id in response if it was used if stream_id: - response_data['stream_id'] = stream_id + response_data["stream_id"] = stream_id return JsonResponse(response_data) except json.JSONDecodeError: - return JsonResponse({'error': 'Invalid JSON'}, status=400) + return JsonResponse({"error": "Invalid JSON"}, status=400) except Exception as e: logger.error(f"Failed to change stream: {e}", exc_info=True) - return JsonResponse({'error': str(e)}, status=500) + return JsonResponse({"error": str(e)}, status=500) -@api_view(['GET']) -@permission_classes([IsAuthenticated]) + +@api_view(["GET"]) +@permission_classes([IsAdmin]) def channel_status(request, channel_id=None): """ Returns status information about channels with detail level based on request: @@ -419,7 +606,7 @@ def channel_status(request, channel_id=None): try: # Check if Redis is available if not proxy_server.redis_client: - return JsonResponse({'error': 'Redis connection not available'}, status=500) + return JsonResponse({"error": "Redis connection not available"}, status=500) # Handle single channel or all channels if channel_id: @@ -428,7 +615,9 @@ def channel_status(request, channel_id=None): if channel_info: return JsonResponse(channel_info) else: - return JsonResponse({'error': f'Channel {channel_id} not found'}, status=404) + return JsonResponse( + {"error": f"Channel {channel_id} not found"}, status=404 + ) else: # Basic info for all channels channel_pattern = "ts_proxy:channel:*:metadata" @@ -437,9 +626,13 @@ def channel_status(request, channel_id=None): # Extract channel IDs from keys cursor = 0 while True: - cursor, keys = proxy_server.redis_client.scan(cursor, match=channel_pattern) + cursor, keys = proxy_server.redis_client.scan( + cursor, match=channel_pattern + ) for key in keys: - channel_id_match = re.search(r"ts_proxy:channel:(.*):metadata", key.decode('utf-8')) + channel_id_match = re.search( + r"ts_proxy:channel:(.*):metadata", key.decode("utf-8") + ) if channel_id_match: ch_id = channel_id_match.group(1) channel_info = ChannelStatus.get_basic_channel_info(ch_id) @@ -449,15 +642,16 @@ def channel_status(request, channel_id=None): if cursor == 0: break - return JsonResponse({'channels': all_channels, 'count': len(all_channels)}) + return JsonResponse({"channels": all_channels, "count": len(all_channels)}) except Exception as e: logger.error(f"Error in channel_status: {e}", exc_info=True) - return JsonResponse({'error': str(e)}, status=500) + return JsonResponse({"error": str(e)}, status=500) + @csrf_exempt -@api_view(['POST', 'DELETE']) -@permission_classes([IsAuthenticated]) +@api_view(["POST", "DELETE"]) +@permission_classes([IsAdmin]) def stop_channel(request, channel_id): """Stop a channel and release all associated resources using PubSub events""" try: @@ -466,60 +660,70 @@ def stop_channel(request, channel_id): # Use the service layer instead of direct implementation result = ChannelService.stop_channel(channel_id) - if result.get('status') == 'error': - return JsonResponse({'error': result.get('message', 'Unknown error')}, status=404) + if result.get("status") == "error": + return JsonResponse( + {"error": result.get("message", "Unknown error")}, status=404 + ) - return JsonResponse({ - 'message': 'Channel stop request sent', - 'channel_id': channel_id, - 'previous_state': result.get('previous_state') - }) + return JsonResponse( + { + "message": "Channel stop request sent", + "channel_id": channel_id, + "previous_state": result.get("previous_state"), + } + ) except Exception as e: logger.error(f"Failed to stop channel: {e}", exc_info=True) - return JsonResponse({'error': str(e)}, status=500) + return JsonResponse({"error": str(e)}, status=500) + @csrf_exempt -@api_view(['POST']) -@permission_classes([IsAuthenticated]) +@api_view(["POST"]) +@permission_classes([IsAdmin]) def stop_client(request, channel_id): """Stop a specific client connection using existing client management""" try: # Parse request body to get client ID data = json.loads(request.body) - client_id = data.get('client_id') + client_id = data.get("client_id") if not client_id: - return JsonResponse({'error': 'No client_id provided'}, status=400) + return JsonResponse({"error": "No client_id provided"}, status=400) # Use the service layer instead of direct implementation result = ChannelService.stop_client(channel_id, client_id) - if result.get('status') == 'error': - return JsonResponse({'error': result.get('message')}, status=404) + if result.get("status") == "error": + return JsonResponse({"error": result.get("message")}, status=404) - return JsonResponse({ - 'message': 'Client stop request processed', - 'channel_id': channel_id, - 'client_id': client_id, - 'locally_processed': result.get('locally_processed', False) - }) + return JsonResponse( + { + "message": "Client stop request processed", + "channel_id": channel_id, + "client_id": client_id, + "locally_processed": result.get("locally_processed", False), + } + ) except json.JSONDecodeError: - return JsonResponse({'error': 'Invalid JSON'}, status=400) + return JsonResponse({"error": "Invalid JSON"}, status=400) except Exception as e: logger.error(f"Failed to stop client: {e}", exc_info=True) - return JsonResponse({'error': str(e)}, status=500) + return JsonResponse({"error": str(e)}, status=500) + @csrf_exempt -@api_view(['POST']) -@permission_classes([IsAuthenticated]) +@api_view(["POST"]) +@permission_classes([IsAdmin]) def next_stream(request, channel_id): """Switch to the next available stream for a channel""" proxy_server = ProxyServer.get_instance() try: - logger.info(f"Request to switch to next stream for channel {channel_id} received") + logger.info( + f"Request to switch to next stream for channel {channel_id} received" + ) # Check if the channel exists channel = get_stream_object(channel_id) @@ -532,29 +736,42 @@ def next_stream(request, channel_id): metadata_key = RedisKeys.channel_metadata(channel_id) if proxy_server.redis_client.exists(metadata_key): # Get current stream ID from Redis - stream_id_bytes = proxy_server.redis_client.hget(metadata_key, ChannelMetadataField.STREAM_ID) + stream_id_bytes = proxy_server.redis_client.hget( + metadata_key, ChannelMetadataField.STREAM_ID + ) if stream_id_bytes: - current_stream_id = int(stream_id_bytes.decode('utf-8')) - logger.info(f"Found current stream ID {current_stream_id} in Redis for channel {channel_id}") + current_stream_id = int(stream_id_bytes.decode("utf-8")) + logger.info( + f"Found current stream ID {current_stream_id} in Redis for channel {channel_id}" + ) # Get M3U profile from Redis if available - profile_id_bytes = proxy_server.redis_client.hget(metadata_key, ChannelMetadataField.M3U_PROFILE) + profile_id_bytes = proxy_server.redis_client.hget( + metadata_key, ChannelMetadataField.M3U_PROFILE + ) if profile_id_bytes: - profile_id = int(profile_id_bytes.decode('utf-8')) - logger.info(f"Found M3U profile ID {profile_id} in Redis for channel {channel_id}") + profile_id = int(profile_id_bytes.decode("utf-8")) + logger.info( + f"Found M3U profile ID {profile_id} in Redis for channel {channel_id}" + ) if not current_stream_id: # Channel is not running - return JsonResponse({'error': 'No current stream found for channel'}, status=404) + return JsonResponse( + {"error": "No current stream found for channel"}, status=404 + ) # Get all streams for this channel in their defined order - streams = list(channel.streams.all().order_by('channelstream__order')) + streams = list(channel.streams.all().order_by("channelstream__order")) if len(streams) <= 1: - return JsonResponse({ - 'error': 'No alternate streams available for this channel', - 'current_stream_id': current_stream_id - }, status=404) + return JsonResponse( + { + "error": "No alternate streams available for this channel", + "current_stream_id": current_stream_id, + }, + status=404, + ) # Find the current stream's position in the list current_index = None @@ -564,61 +781,74 @@ def next_stream(request, channel_id): break if current_index is None: - logger.warning(f"Current stream ID {current_stream_id} not found in channel's streams list") + logger.warning( + f"Current stream ID {current_stream_id} not found in channel's streams list" + ) # Fall back to the first stream that's not the current one next_stream = next((s for s in streams if s.id != current_stream_id), None) if not next_stream: - return JsonResponse({ - 'error': 'Could not find current stream in channel list', - 'current_stream_id': current_stream_id - }, status=404) + return JsonResponse( + { + "error": "Could not find current stream in channel list", + "current_stream_id": current_stream_id, + }, + status=404, + ) else: # Get the next stream in the rotation (with wrap-around) next_index = (current_index + 1) % len(streams) next_stream = streams[next_index] next_stream_id = next_stream.id - logger.info(f"Rotating to next stream ID {next_stream_id} for channel {channel_id}") + logger.info( + f"Rotating to next stream ID {next_stream_id} for channel {channel_id}" + ) # Get full stream info including URL for the next stream stream_info = get_stream_info_for_switch(channel_id, next_stream_id) - if 'error' in stream_info: - return JsonResponse({ - 'error': stream_info['error'], - 'current_stream_id': current_stream_id, - 'next_stream_id': next_stream_id - }, status=404) + if "error" in stream_info: + return JsonResponse( + { + "error": stream_info["error"], + "current_stream_id": current_stream_id, + "next_stream_id": next_stream_id, + }, + status=404, + ) # Now use the ChannelService to change the stream URL result = ChannelService.change_stream_url( channel_id, - stream_info['url'], - stream_info['user_agent'], - next_stream_id # Pass the stream_id to be stored in Redis + stream_info["url"], + stream_info["user_agent"], + next_stream_id, # Pass the stream_id to be stored in Redis ) - if result.get('status') == 'error': - return JsonResponse({ - 'error': result.get('message', 'Unknown error'), - 'diagnostics': result.get('diagnostics', {}), - 'current_stream_id': current_stream_id, - 'next_stream_id': next_stream_id - }, status=404) + if result.get("status") == "error": + return JsonResponse( + { + "error": result.get("message", "Unknown error"), + "diagnostics": result.get("diagnostics", {}), + "current_stream_id": current_stream_id, + "next_stream_id": next_stream_id, + }, + status=404, + ) # Format success response response_data = { - 'message': 'Stream switched to next available', - 'channel': channel_id, - 'previous_stream_id': current_stream_id, - 'new_stream_id': next_stream_id, - 'new_url': stream_info['url'], - 'owner': result.get('direct_update', False), - 'worker_id': proxy_server.worker_id + "message": "Stream switched to next available", + "channel": channel_id, + "previous_stream_id": current_stream_id, + "new_stream_id": next_stream_id, + "new_url": stream_info["url"], + "owner": result.get("direct_update", False), + "worker_id": proxy_server.worker_id, } return JsonResponse(response_data) except Exception as e: logger.error(f"Failed to switch to next stream: {e}", exc_info=True) - return JsonResponse({'error': str(e)}, status=500) + return JsonResponse({"error": str(e)}, status=500) diff --git a/core/api_views.py b/core/api_views.py index 84eb4918..db9725ee 100644 --- a/core/api_views.py +++ b/core/api_views.py @@ -1,38 +1,70 @@ # core/api_views.py +import json +import ipaddress +import logging from rest_framework import viewsets, status from rest_framework.decorators import action from rest_framework.response import Response from django.shortcuts import get_object_or_404 -from .models import UserAgent, StreamProfile, CoreSettings, ProxySettings, STREAM_HASH_KEY -from .serializers import UserAgentSerializer, StreamProfileSerializer, CoreSettingsSerializer, ProxySettingsSerializer +<<<<<<< HEAD from rest_framework.permissions import IsAuthenticated from rest_framework.decorators import api_view, permission_classes +======= +from .models import ( + UserAgent, + StreamProfile, + CoreSettings, + STREAM_HASH_KEY, + NETWORK_ACCESS, + ProxySettings, +) +from .serializers import ( + UserAgentSerializer, + StreamProfileSerializer, + CoreSettingsSerializer, + ProxySettingsSerializer, +) +from rest_framework.decorators import api_view, permission_classes, action +>>>>>>> 59e4a28b311d00d073f238e01e735d68a821c3f3 from drf_yasg.utils import swagger_auto_schema import socket import requests import os from core.tasks import rehash_streams +from apps.accounts.permissions import ( + Authenticated, +) +from dispatcharr.utils import get_client_ip + + +logger = logging.getLogger(__name__) + class UserAgentViewSet(viewsets.ModelViewSet): """ API endpoint that allows user agents to be viewed, created, edited, or deleted. """ + queryset = UserAgent.objects.all() serializer_class = UserAgentSerializer + class StreamProfileViewSet(viewsets.ModelViewSet): """ API endpoint that allows stream profiles to be viewed, created, edited, or deleted. """ + queryset = StreamProfile.objects.all() serializer_class = StreamProfileSerializer + class CoreSettingsViewSet(viewsets.ModelViewSet): """ API endpoint for editing core settings. This is treated as a singleton: only one instance should exist. """ + queryset = CoreSettings.objects.all() serializer_class = CoreSettingsSerializer @@ -40,11 +72,51 @@ class CoreSettingsViewSet(viewsets.ModelViewSet): instance = self.get_object() response = super().update(request, *args, **kwargs) if instance.key == STREAM_HASH_KEY: - if instance.value != request.data['value']: - rehash_streams.delay(request.data['value'].split(',')) + if instance.value != request.data["value"]: + rehash_streams.delay(request.data["value"].split(",")) return response + @action(detail=False, methods=["post"], url_path="check") + def check(self, request, *args, **kwargs): + data = request.data + if data.get("key") == NETWORK_ACCESS: + client_ip = ipaddress.ip_address(get_client_ip(request)) + + in_network = {} + invalid = [] + + value = json.loads(data.get("value", "{}")) + for key, val in value.items(): + in_network[key] = [] + cidrs = val.split(",") + for cidr in cidrs: + try: + network = ipaddress.ip_network(cidr) + + if client_ip in network: + in_network[key] = [] + break + + in_network[key].append(cidr) + except: + invalid.append(cidr) + + if len(invalid) > 0: + return Response( + { + "error": True, + "message": "Invalid CIDR(s)", + "data": invalid, + }, + status=status.HTTP_200_OK, + ) + + return Response(in_network, status=status.HTTP_200_OK) + + return Response({}, status=status.HTTP_200_OK) + +<<<<<<< HEAD class ProxySettingsViewSet(viewsets.ModelViewSet): """ API endpoint for proxy settings. @@ -101,17 +173,19 @@ class ProxySettingsViewSet(viewsets.ModelViewSet): serializer.is_valid(raise_exception=True) serializer.save() return Response(serializer.data) +======= + + +>>>>>>> 59e4a28b311d00d073f238e01e735d68a821c3f3 @swagger_auto_schema( - method='get', + method="get", operation_description="Endpoint for environment details", - responses={200: "Environment variables"} + responses={200: "Environment variables"}, ) -@api_view(['GET']) -@permission_classes([IsAuthenticated]) +@api_view(["GET"]) +@permission_classes([Authenticated]) def environment(request): - - public_ip = None local_ip = None country_code = None @@ -135,36 +209,60 @@ def environment(request): except Exception as e: local_ip = f"Error: {e}" - # 3) If we got a valid public_ip, fetch geo info from ipapi.co + # 3) If we got a valid public_ip, fetch geo info from ipapi.co or ip-api.com if public_ip and "Error" not in public_ip: try: - geo = requests.get(f"https://ipapi.co/{public_ip}/json/", timeout=5).json() - # ipapi returns fields like country_code, country_name, etc. - country_code = geo.get("country_code", "") # e.g. "US" - country_name = geo.get("country_name", "") # e.g. "United States" - except requests.RequestException as e: + # Attempt to get geo information from ipapi.co first + r = requests.get(f"https://ipapi.co/{public_ip}/json/", timeout=5) + + if r.status_code == requests.codes.ok: + geo = r.json() + country_code = geo.get("country_code") # e.g. "US" + country_name = geo.get("country_name") # e.g. "United States" + + else: + # If ipapi.co fails, fallback to ip-api.com + # only supports http requests for free tier + r = requests.get("http://ip-api.com/json/", timeout=5) + + if r.status_code == requests.codes.ok: + geo = r.json() + country_code = geo.get("countryCode") # e.g. "US" + country_name = geo.get("country") # e.g. "United States" + + else: + raise Exception("Geo lookup failed with both services") + + except Exception as e: + logger.error(f"Error during geo lookup: {e}") country_code = None country_name = None - return Response({ - 'authenticated': True, - 'public_ip': public_ip, - 'local_ip': local_ip, - 'country_code': country_code, - 'country_name': country_name, - 'env_mode': "dev" if os.getenv('DISPATCHARR_ENV') == "dev" else "prod", - }) + return Response( + { + "authenticated": True, + "public_ip": public_ip, + "local_ip": local_ip, + "country_code": country_code, + "country_name": country_name, + "env_mode": "dev" if os.getenv("DISPATCHARR_ENV") == "dev" else "prod", + } + ) + @swagger_auto_schema( - method='get', + method="get", operation_description="Get application version information", - responses={200: "Version information"} + responses={200: "Version information"}, ) -@api_view(['GET']) +@api_view(["GET"]) def version(request): # Import version information from version import __version__, __timestamp__ - return Response({ - 'version': __version__, - 'timestamp': __timestamp__, - }) + + return Response( + { + "version": __version__, + "timestamp": __timestamp__, + } + ) diff --git a/core/migrations/0013_default_network_access_settings.py b/core/migrations/0013_default_network_access_settings.py new file mode 100644 index 00000000..be53ba05 --- /dev/null +++ b/core/migrations/0013_default_network_access_settings.py @@ -0,0 +1,24 @@ +# Generated by Django 5.1.6 on 2025-03-01 14:01 + +from django.db import migrations +from django.utils.text import slugify + + +def preload_network_access_settings(apps, schema_editor): + CoreSettings = apps.get_model("core", "CoreSettings") + CoreSettings.objects.create( + key=slugify("Network Access"), + name="Network Access", + value="{}", + ) + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0012_default_active_m3u_accounts"), + ] + + operations = [ + migrations.RunPython(preload_network_access_settings), + ] diff --git a/core/models.py b/core/models.py index 3af21628..dcb4a68f 100644 --- a/core/models.py +++ b/core/models.py @@ -2,25 +2,24 @@ from django.db import models from django.utils.text import slugify + class UserAgent(models.Model): name = models.CharField( - max_length=512, - unique=True, - help_text="The User-Agent name." + max_length=512, unique=True, help_text="The User-Agent name." ) user_agent = models.CharField( max_length=512, unique=True, - help_text="The complete User-Agent string sent by the client." + help_text="The complete User-Agent string sent by the client.", ) description = models.CharField( max_length=255, blank=True, - help_text="An optional description of the client or device type." + help_text="An optional description of the client or device type.", ) is_active = models.BooleanField( default=True, - help_text="Whether this user agent is currently allowed/recognized." + help_text="Whether this user agent is currently allowed/recognized.", ) created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) @@ -28,31 +27,34 @@ class UserAgent(models.Model): def __str__(self): return self.name -PROXY_PROFILE_NAME = 'Proxy' -REDIRECT_PROFILE_NAME = 'Redirect' + +PROXY_PROFILE_NAME = "Proxy" +REDIRECT_PROFILE_NAME = "Redirect" + class StreamProfile(models.Model): name = models.CharField(max_length=255, help_text="Name of the stream profile") command = models.CharField( max_length=255, help_text="Command to execute (e.g., 'yt.sh', 'streamlink', or 'vlc')", - blank=True + blank=True, ) parameters = models.TextField( help_text="Command-line parameters. Use {userAgent} and {streamUrl} as placeholders.", - blank=True + blank=True, ) locked = models.BooleanField( - default=False, - help_text="Protected - can't be deleted or modified" + default=False, help_text="Protected - can't be deleted or modified" + ) + is_active = models.BooleanField( + default=True, help_text="Whether this profile is active" ) - is_active = models.BooleanField(default=True, help_text="Whether this profile is active") user_agent = models.ForeignKey( "UserAgent", on_delete=models.SET_NULL, null=True, blank=True, - help_text="Optional user agent to use. If not set, you can fall back to a default." + help_text="Optional user agent to use. If not set, you can fall back to a default.", ) def __str__(self): @@ -77,7 +79,9 @@ class StreamProfile(models.Model): new_value = new_value.pk if field_name not in allowed_fields and orig_value != new_value: - raise ValidationError(f"Cannot modify {field_name} on a protected profile.") + raise ValidationError( + f"Cannot modify {field_name} on a protected profile." + ) super().save(*args, **kwargs) @@ -90,10 +94,14 @@ class StreamProfile(models.Model): for field_name, new_value in kwargs.items(): if field_name not in allowed_fields: - raise ValidationError(f"Cannot modify {field_name} on a protected profile.") + raise ValidationError( + f"Cannot modify {field_name} on a protected profile." + ) # Ensure user_agent ForeignKey updates correctly - if field_name == "user_agent" and isinstance(new_value, cls._meta.get_field("user_agent").related_model): + if field_name == "user_agent" and isinstance( + new_value, cls._meta.get_field("user_agent").related_model + ): new_value = new_value.pk # Convert object to ID if needed setattr(instance, field_name, new_value) @@ -122,7 +130,8 @@ class StreamProfile(models.Model): # Split the command and iterate through each part to apply replacements cmd = [self.command] + [ - self._replace_in_part(part, replacements) for part in self.parameters.split() + self._replace_in_part(part, replacements) + for part in self.parameters.split() ] return cmd @@ -134,11 +143,13 @@ class StreamProfile(models.Model): return part -DEFAULT_USER_AGENT_KEY= slugify("Default User-Agent") +DEFAULT_USER_AGENT_KEY = slugify("Default User-Agent") DEFAULT_STREAM_PROFILE_KEY = slugify("Default Stream Profile") STREAM_HASH_KEY = slugify("M3U Hash Key") PREFERRED_REGION_KEY = slugify("Preferred Region") AUTO_IMPORT_MAPPED_FILES = slugify("Auto-Import Mapped Files") +NETWORK_ACCESS = slugify("Network Access") + class CoreSettings(models.Model): key = models.CharField( diff --git a/core/serializers.py b/core/serializers.py index 4648a74a..fcc813fe 100644 --- a/core/serializers.py +++ b/core/serializers.py @@ -1,22 +1,70 @@ # core/serializers.py +import json +import ipaddress from rest_framework import serializers -from .models import CoreSettings, UserAgent, StreamProfile, ProxySettings +from .models import CoreSettings, UserAgent, StreamProfile, ProxySettings, NETWORK_ACCESS + class UserAgentSerializer(serializers.ModelSerializer): class Meta: model = UserAgent - fields = ['id', 'name', 'user_agent', 'description', 'is_active', 'created_at', 'updated_at'] + fields = [ + "id", + "name", + "user_agent", + "description", + "is_active", + "created_at", + "updated_at", + ] + class StreamProfileSerializer(serializers.ModelSerializer): class Meta: model = StreamProfile - fields = ['id', 'name', 'command', 'parameters', 'is_active', 'user_agent', 'locked'] + fields = [ + "id", + "name", + "command", + "parameters", + "is_active", + "user_agent", + "locked", + ] + class CoreSettingsSerializer(serializers.ModelSerializer): class Meta: model = CoreSettings - fields = '__all__' + fields = "__all__" + + def update(self, instance, validated_data): + if instance.key == NETWORK_ACCESS: + errors = False + invalid = {} + value = json.loads(validated_data.get("value")) + for key, val in value.items(): + cidrs = val.split(",") + for cidr in cidrs: + try: + ipaddress.ip_network(cidr) + except: + errors = True + if key not in invalid: + invalid[key] = [] + invalid[key].append(cidr) + + if errors: + # Perform CIDR validation + raise serializers.ValidationError( + { + "message": "Invalid CIDRs", + "value": invalid, + } + ) + + return super().update(instance, validated_data) class ProxySettingsSerializer(serializers.ModelSerializer): class Meta: @@ -24,7 +72,7 @@ class ProxySettingsSerializer(serializers.ModelSerializer): fields = [ 'id', 'buffering_timeout', - 'buffering_speed', + 'buffering_speed', 'redis_chunk_ttl', 'channel_shutdown_delay', 'channel_init_grace_period', diff --git a/dispatcharr/settings.py b/dispatcharr/settings.py index 06084b49..63b88c7c 100644 --- a/dispatcharr/settings.py +++ b/dispatcharr/settings.py @@ -4,43 +4,44 @@ from datetime import timedelta BASE_DIR = Path(__file__).resolve().parent.parent -SECRET_KEY = 'REPLACE_ME_WITH_A_REAL_SECRET' +SECRET_KEY = "REPLACE_ME_WITH_A_REAL_SECRET" REDIS_HOST = os.environ.get("REDIS_HOST", "localhost") REDIS_DB = os.environ.get("REDIS_DB", "0") # Set DEBUG to True for development, False for production -if os.environ.get('DISPATCHARR_DEBUG', 'False').lower() == 'true': +if os.environ.get("DISPATCHARR_DEBUG", "False").lower() == "true": DEBUG = True else: DEBUG = False ALLOWED_HOSTS = ["*"] +SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") INSTALLED_APPS = [ - 'apps.api', - 'apps.accounts', - 'apps.channels.apps.ChannelsConfig', - 'apps.dashboard', - 'apps.epg', - 'apps.hdhr', - 'apps.m3u', - 'apps.output', - 'apps.proxy.apps.ProxyConfig', - 'apps.proxy.ts_proxy', - 'core', - 'daphne', - 'drf_yasg', - 'channels', - 'django.contrib.admin', - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.messages', - 'django.contrib.staticfiles', - 'rest_framework', - 'corsheaders', - 'django_filters', - 'django_celery_beat', + "apps.api", + "apps.accounts", + "apps.channels.apps.ChannelsConfig", + "apps.dashboard", + "apps.epg", + "apps.hdhr", + "apps.m3u", + "apps.output", + "apps.proxy.apps.ProxyConfig", + "apps.proxy.ts_proxy", + "core", + "daphne", + "drf_yasg", + "channels", + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.staticfiles", + "rest_framework", + "corsheaders", + "django_filters", + "django_celery_beat", ] # EPG Processing optimization settings @@ -50,21 +51,23 @@ EPG_ENABLE_MEMORY_MONITORING = True # Whether to monitor memory usage during pr # Database optimization settings DATABASE_STATEMENT_TIMEOUT = 300 # Seconds before timing out long-running queries -DATABASE_CONN_MAX_AGE = 60 # Connection max age in seconds, helps with frequent reconnects +DATABASE_CONN_MAX_AGE = ( + 60 # Connection max age in seconds, helps with frequent reconnects +) # Disable atomic requests for performance-sensitive views ATOMIC_REQUESTS = False # Cache settings - add caching for EPG operations CACHES = { - 'default': { - 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', - 'LOCATION': 'dispatcharr-epg-cache', - 'TIMEOUT': 3600, # 1 hour cache timeout - 'OPTIONS': { - 'MAX_ENTRIES': 10000, - 'CULL_FREQUENCY': 3, # Purge 1/3 of entries when max is reached - } + "default": { + "BACKEND": "django.core.cache.backends.locmem.LocMemCache", + "LOCATION": "dispatcharr-epg-cache", + "TIMEOUT": 3600, # 1 hour cache timeout + "OPTIONS": { + "MAX_ENTRIES": 10000, + "CULL_FREQUENCY": 3, # Purge 1/3 of entries when max is reached + }, } } @@ -72,29 +75,26 @@ CACHES = { REQUESTS_TIMEOUT = 30 # Seconds for external API requests MIDDLEWARE = [ - 'django.middleware.security.SecurityMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', - 'corsheaders.middleware.CorsMiddleware', + "django.middleware.security.SecurityMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", + "corsheaders.middleware.CorsMiddleware", ] -ROOT_URLCONF = 'dispatcharr.urls' +ROOT_URLCONF = "dispatcharr.urls" TEMPLATES = [ { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [ - os.path.join(BASE_DIR, 'frontend/dist'), - BASE_DIR / "templates" - ], - 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [os.path.join(BASE_DIR, "frontend/dist"), BASE_DIR / "templates"], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ "django.template.context_processors.debug", "django.template.context_processors.request", "django.contrib.auth.context_processors.auth", @@ -104,8 +104,8 @@ TEMPLATES = [ }, ] -WSGI_APPLICATION = 'dispatcharr.wsgi.application' -ASGI_APPLICATION = 'dispatcharr.asgi.application' +WSGI_APPLICATION = "dispatcharr.wsgi.application" +ASGI_APPLICATION = "dispatcharr.asgi.application" CHANNEL_LAYERS = { "default": { @@ -116,76 +116,72 @@ CHANNEL_LAYERS = { }, } -if os.getenv('DB_ENGINE', None) == 'sqlite': +if os.getenv("DB_ENGINE", None) == "sqlite": DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.sqlite3', - 'NAME': '/data/dispatcharr.db', + "default": { + "ENGINE": "django.db.backends.sqlite3", + "NAME": "/data/dispatcharr.db", } } else: DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.postgresql', - 'NAME': os.environ.get('POSTGRES_DB', 'dispatcharr'), - 'USER': os.environ.get('POSTGRES_USER', 'dispatch'), - 'PASSWORD': os.environ.get('POSTGRES_PASSWORD', 'secret'), - 'HOST': os.environ.get('POSTGRES_HOST', 'localhost'), - 'PORT': int(os.environ.get('POSTGRES_PORT', 5432)), + "default": { + "ENGINE": "django.db.backends.postgresql", + "NAME": os.environ.get("POSTGRES_DB", "dispatcharr"), + "USER": os.environ.get("POSTGRES_USER", "dispatch"), + "PASSWORD": os.environ.get("POSTGRES_PASSWORD", "secret"), + "HOST": os.environ.get("POSTGRES_HOST", "localhost"), + "PORT": int(os.environ.get("POSTGRES_PORT", 5432)), } } AUTH_PASSWORD_VALIDATORS = [ { - 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", }, ] REST_FRAMEWORK = { - 'DEFAULT_SCHEMA_CLASS': 'rest_framework.schemas.coreapi.AutoSchema', - 'DEFAULT_RENDERER_CLASSES': [ - 'rest_framework.renderers.JSONRenderer', - 'rest_framework.renderers.BrowsableAPIRenderer', + "DEFAULT_SCHEMA_CLASS": "rest_framework.schemas.coreapi.AutoSchema", + "DEFAULT_RENDERER_CLASSES": [ + "rest_framework.renderers.JSONRenderer", + "rest_framework.renderers.BrowsableAPIRenderer", ], - 'DEFAULT_AUTHENTICATION_CLASSES': [ - 'rest_framework_simplejwt.authentication.JWTAuthentication', + "DEFAULT_AUTHENTICATION_CLASSES": [ + "rest_framework_simplejwt.authentication.JWTAuthentication", ], - 'DEFAULT_FILTER_BACKENDS': ['django_filters.rest_framework.DjangoFilterBackend'], + "DEFAULT_FILTER_BACKENDS": ["django_filters.rest_framework.DjangoFilterBackend"], } SWAGGER_SETTINGS = { - 'SECURITY_DEFINITIONS': { - 'Bearer': { - 'type': 'apiKey', - 'name': 'Authorization', - 'in': 'header' - } - } + "SECURITY_DEFINITIONS": { + "Bearer": {"type": "apiKey", "name": "Authorization", "in": "header"} + } } -LANGUAGE_CODE = 'en-us' -TIME_ZONE = 'UTC' +LANGUAGE_CODE = "en-us" +TIME_ZONE = "UTC" USE_I18N = True USE_TZ = True -STATIC_URL = '/static/' -STATIC_ROOT = BASE_DIR / 'static' # Directory where static files will be collected +STATIC_URL = "/static/" +STATIC_ROOT = BASE_DIR / "static" # Directory where static files will be collected # Adjust STATICFILES_DIRS to include the paths to the directories that contain your static files. STATICFILES_DIRS = [ - os.path.join(BASE_DIR, 'frontend/dist'), # React build static files + os.path.join(BASE_DIR, "frontend/dist"), # React build static files ] -DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' -AUTH_USER_MODEL = 'accounts.User' +DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" +AUTH_USER_MODEL = "accounts.User" -CELERY_BROKER_URL = os.environ.get('CELERY_BROKER_URL', 'redis://localhost:6379/0') +CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL", "redis://localhost:6379/0") CELERY_RESULT_BACKEND = CELERY_BROKER_URL # Configure Redis key prefix CELERY_RESULT_BACKEND_TRANSPORT_OPTIONS = { - 'global_keyprefix': 'celery-tasks:', # Set the Redis key prefix for Celery + "global_keyprefix": "celery-tasks:", # Set the Redis key prefix for Celery } # Set TTL (Time-to-Live) for task results (in seconds) @@ -193,47 +189,44 @@ CELERY_RESULT_EXPIRES = 3600 # 1 hour TTL for task results # Optionally, set visibility timeout for task retries (if using Redis) CELERY_BROKER_TRANSPORT_OPTIONS = { - 'visibility_timeout': 3600, # Time in seconds that a task remains invisible during retries + "visibility_timeout": 3600, # Time in seconds that a task remains invisible during retries } -CELERY_ACCEPT_CONTENT = ['json'] -CELERY_TASK_SERIALIZER = 'json' +CELERY_ACCEPT_CONTENT = ["json"] +CELERY_TASK_SERIALIZER = "json" CELERY_BEAT_SCHEDULER = "django_celery_beat.schedulers.DatabaseScheduler" CELERY_BEAT_SCHEDULE = { - 'fetch-channel-statuses': { - 'task': 'apps.proxy.tasks.fetch_channel_stats', # Direct task call - 'schedule': 2.0, # Every 2 seconds + "fetch-channel-statuses": { + "task": "apps.proxy.tasks.fetch_channel_stats", # Direct task call + "schedule": 2.0, # Every 2 seconds }, - 'scan-files': { - 'task': 'core.tasks.scan_and_process_files', # Direct task call - 'schedule': 20.0, # Every 20 seconds + "scan-files": { + "task": "core.tasks.scan_and_process_files", # Direct task call + "schedule": 20.0, # Every 20 seconds }, } -MEDIA_ROOT = BASE_DIR / 'media' -MEDIA_URL = '/media/' +MEDIA_ROOT = BASE_DIR / "media" +MEDIA_URL = "/media/" SERVER_IP = "127.0.0.1" CORS_ALLOW_ALL_ORIGINS = True CORS_ALLOW_CREDENTIALS = True -CSRF_TRUSTED_ORIGINS = [ - 'http://*', - 'https://*' -] +CSRF_TRUSTED_ORIGINS = ["http://*", "https://*"] APPEND_SLASH = True SIMPLE_JWT = { - 'ACCESS_TOKEN_LIFETIME': timedelta(minutes=30), - 'REFRESH_TOKEN_LIFETIME': timedelta(days=1), - 'ROTATE_REFRESH_TOKENS': False, # Optional: Whether to rotate refresh tokens - 'BLACKLIST_AFTER_ROTATION': True, # Optional: Whether to blacklist refresh tokens + "ACCESS_TOKEN_LIFETIME": timedelta(minutes=30), + "REFRESH_TOKEN_LIFETIME": timedelta(days=1), + "ROTATE_REFRESH_TOKENS": False, # Optional: Whether to rotate refresh tokens + "BLACKLIST_AFTER_ROTATION": True, # Optional: Whether to blacklist refresh tokens } # Redis connection settings -REDIS_URL = 'redis://localhost:6379/0' +REDIS_URL = "redis://localhost:6379/0" REDIS_SOCKET_TIMEOUT = 60 # Socket timeout in seconds REDIS_SOCKET_CONNECT_TIMEOUT = 5 # Connection timeout in seconds REDIS_HEALTH_CHECK_INTERVAL = 15 # Health check every 15 seconds @@ -244,45 +237,45 @@ REDIS_RETRY_INTERVAL = 1 # Initial retry interval in seconds # Proxy Settings PROXY_SETTINGS = { - 'HLS': { - 'DEFAULT_URL': '', # Default HLS stream URL if needed - 'BUFFER_SIZE': 1000, - 'USER_AGENT': 'VLC/3.0.20 LibVLC/3.0.20', - 'CHUNK_SIZE': 8192, - 'CLIENT_POLL_INTERVAL': 0.1, - 'MAX_RETRIES': 3, - 'MIN_SEGMENTS': 12, - 'MAX_SEGMENTS': 16, - 'WINDOW_SIZE': 12, - 'INITIAL_SEGMENTS': 3, + "HLS": { + "DEFAULT_URL": "", # Default HLS stream URL if needed + "BUFFER_SIZE": 1000, + "USER_AGENT": "VLC/3.0.20 LibVLC/3.0.20", + "CHUNK_SIZE": 8192, + "CLIENT_POLL_INTERVAL": 0.1, + "MAX_RETRIES": 3, + "MIN_SEGMENTS": 12, + "MAX_SEGMENTS": 16, + "WINDOW_SIZE": 12, + "INITIAL_SEGMENTS": 3, + }, + "TS": { + "DEFAULT_URL": "", # Default TS stream URL if needed + "BUFFER_SIZE": 1000, + "RECONNECT_DELAY": 5, + "USER_AGENT": "VLC/3.0.20 LibVLC/3.0.20", + "REDIS_CHUNK_TTL": 60, # How long to keep chunks in Redis (seconds) }, - 'TS': { - 'DEFAULT_URL': '', # Default TS stream URL if needed - 'BUFFER_SIZE': 1000, - 'RECONNECT_DELAY': 5, - 'USER_AGENT': 'VLC/3.0.20 LibVLC/3.0.20', - 'REDIS_CHUNK_TTL': 60, # How long to keep chunks in Redis (seconds) - } } # Map log level names to their numeric values LOG_LEVEL_MAP = { - 'TRACE': 5, - 'DEBUG': 10, - 'INFO': 20, - 'WARNING': 30, - 'ERROR': 40, - 'CRITICAL': 50 + "TRACE": 5, + "DEBUG": 10, + "INFO": 20, + "WARNING": 30, + "ERROR": 40, + "CRITICAL": 50, } # Get log level from environment variable, default to INFO if not set # Add debugging output to see exactly what's being detected -env_log_level = os.environ.get('DISPATCHARR_LOG_LEVEL', '') +env_log_level = os.environ.get("DISPATCHARR_LOG_LEVEL", "") print(f"Environment DISPATCHARR_LOG_LEVEL detected as: '{env_log_level}'") if not env_log_level: print("No DISPATCHARR_LOG_LEVEL found in environment, using default INFO") - LOG_LEVEL_NAME = 'INFO' + LOG_LEVEL_NAME = "INFO" else: LOG_LEVEL_NAME = env_log_level.upper() print(f"Setting log level to: {LOG_LEVEL_NAME}") @@ -291,68 +284,68 @@ LOG_LEVEL = LOG_LEVEL_MAP.get(LOG_LEVEL_NAME, 20) # Default to INFO (20) if inv # Add this to your existing LOGGING configuration or create one if it doesn't exist LOGGING = { - 'version': 1, - 'disable_existing_loggers': False, - 'formatters': { - 'verbose': { - 'format': '{asctime} {levelname} {name} {message}', - 'style': '{', + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "verbose": { + "format": "{asctime} {levelname} {name} {message}", + "style": "{", }, }, - 'handlers': { - 'console': { - 'class': 'logging.StreamHandler', - 'formatter': 'verbose', - 'level': 5, # Always allow TRACE level messages through the handler + "handlers": { + "console": { + "class": "logging.StreamHandler", + "formatter": "verbose", + "level": 5, # Always allow TRACE level messages through the handler }, }, - 'loggers': { - 'core.tasks': { - 'handlers': ['console'], - 'level': LOG_LEVEL, # Use environment-configured level - 'propagate': False, # Don't propagate to root logger to avoid duplicate logs + "loggers": { + "core.tasks": { + "handlers": ["console"], + "level": LOG_LEVEL, # Use environment-configured level + "propagate": False, # Don't propagate to root logger to avoid duplicate logs }, - 'core.utils': { - 'handlers': ['console'], - 'level': LOG_LEVEL, - 'propagate': False, + "core.utils": { + "handlers": ["console"], + "level": LOG_LEVEL, + "propagate": False, }, - 'apps.proxy': { - 'handlers': ['console'], - 'level': LOG_LEVEL, # Use environment-configured level - 'propagate': False, # Don't propagate to root logger + "apps.proxy": { + "handlers": ["console"], + "level": LOG_LEVEL, # Use environment-configured level + "propagate": False, # Don't propagate to root logger }, # Add parent logger for all app modules - 'apps': { - 'handlers': ['console'], - 'level': LOG_LEVEL, - 'propagate': False, + "apps": { + "handlers": ["console"], + "level": LOG_LEVEL, + "propagate": False, }, # Celery loggers to capture task execution messages - 'celery': { - 'handlers': ['console'], - 'level': LOG_LEVEL, # Use configured log level for Celery logs - 'propagate': False, + "celery": { + "handlers": ["console"], + "level": LOG_LEVEL, # Use configured log level for Celery logs + "propagate": False, }, - 'celery.task': { - 'handlers': ['console'], - 'level': LOG_LEVEL, # Use configured log level for task-specific logs - 'propagate': False, + "celery.task": { + "handlers": ["console"], + "level": LOG_LEVEL, # Use configured log level for task-specific logs + "propagate": False, }, - 'celery.worker': { - 'handlers': ['console'], - 'level': LOG_LEVEL, # Use configured log level for worker logs - 'propagate': False, + "celery.worker": { + "handlers": ["console"], + "level": LOG_LEVEL, # Use configured log level for worker logs + "propagate": False, }, - 'celery.beat': { - 'handlers': ['console'], - 'level': LOG_LEVEL, # Use configured log level for scheduler logs - 'propagate': False, + "celery.beat": { + "handlers": ["console"], + "level": LOG_LEVEL, # Use configured log level for scheduler logs + "propagate": False, }, # Add any other loggers you need to capture TRACE logs from }, - 'root': { - 'handlers': ['console'], - 'level': LOG_LEVEL, # Use user-configured level instead of hardcoded 'INFO' + "root": { + "handlers": ["console"], + "level": LOG_LEVEL, # Use user-configured level instead of hardcoded 'INFO' }, } diff --git a/dispatcharr/urls.py b/dispatcharr/urls.py index f0de138e..3e891314 100644 --- a/dispatcharr/urls.py +++ b/dispatcharr/urls.py @@ -7,13 +7,14 @@ from rest_framework import permissions from drf_yasg.views import get_schema_view from drf_yasg import openapi from .routing import websocket_urlpatterns - +from apps.output.views import xc_player_api, xc_panel_api, xc_get, xc_xmltv +from apps.proxy.ts_proxy.views import stream_xc # Define schema_view for Swagger schema_view = get_schema_view( openapi.Info( title="Dispatcharr API", - default_version='v1', + default_version="v1", description="API documentation for Dispatcharr", terms_of_service="https://www.google.com/policies/terms/", contact=openapi.Contact(email="contact@dispatcharr.local"), @@ -25,38 +26,48 @@ schema_view = get_schema_view( urlpatterns = [ # API Routes - path('api/', include(('apps.api.urls', 'api'), namespace='api')), - path('api', RedirectView.as_view(url='/api/', permanent=True)), - + path("api/", include(("apps.api.urls", "api"), namespace="api")), + path("api", RedirectView.as_view(url="/api/", permanent=True)), # Admin - path('admin', RedirectView.as_view(url='/admin/', permanent=True)), - path('admin/', admin.site.urls), - + path("admin", RedirectView.as_view(url="/admin/", permanent=True)), + path("admin/", admin.site.urls), # Outputs - path('output', RedirectView.as_view(url='/output/', permanent=True)), - path('output/', include(('apps.output.urls', 'output'), namespace='output')), - + path("output", RedirectView.as_view(url="/output/", permanent=True)), + path("output/", include(("apps.output.urls", "output"), namespace="output")), # HDHR - path('hdhr', RedirectView.as_view(url='/hdhr/', permanent=True)), - path('hdhr/', include(('apps.hdhr.urls', 'hdhr'), namespace='hdhr')), - + path("hdhr", RedirectView.as_view(url="/hdhr/", permanent=True)), + path("hdhr/", include(("apps.hdhr.urls", "hdhr"), namespace="hdhr")), # Add proxy apps - Move these before the catch-all - path('proxy/', include(('apps.proxy.urls', 'proxy'), namespace='proxy')), - path('proxy', RedirectView.as_view(url='/proxy/', permanent=True)), - + path("proxy/", include(("apps.proxy.urls", "proxy"), namespace="proxy")), + path("proxy", RedirectView.as_view(url="/proxy/", permanent=True)), + # xc + re_path("player_api.php", xc_player_api, name="xc_player_api"), + re_path("panel_api.php", xc_panel_api, name="xc_panel_api"), + re_path("get.php", xc_get, name="xc_get"), + re_path("xmltv.php", xc_xmltv, name="xc_xmltv"), + path( + "live///", + stream_xc, + name="xc_live_stream_endpoint", + ), + path( + "//", + stream_xc, + name="xc_stream_endpoint", + ), # Swagger UI - path('swagger/', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'), - + path( + "swagger/", + schema_view.with_ui("swagger", cache_timeout=0), + name="schema-swagger-ui", + ), # ReDoc UI - path('redoc/', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'), - + path("redoc/", schema_view.with_ui("redoc", cache_timeout=0), name="schema-redoc"), # Optionally, serve the raw Swagger JSON - path('swagger.json', schema_view.without_ui(cache_timeout=0), name='schema-json'), - + path("swagger.json", schema_view.without_ui(cache_timeout=0), name="schema-json"), # Catch-all routes should always be last - path('', TemplateView.as_view(template_name='index.html')), # React entry point - path('', TemplateView.as_view(template_name='index.html')), - + path("", TemplateView.as_view(template_name="index.html")), # React entry point + path("", TemplateView.as_view(template_name="index.html")), ] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) urlpatterns += websocket_urlpatterns diff --git a/dispatcharr/utils.py b/dispatcharr/utils.py index e6392c6c..767913c6 100644 --- a/dispatcharr/utils.py +++ b/dispatcharr/utils.py @@ -1,23 +1,58 @@ # dispatcharr/utils.py +import json +import ipaddress from django.http import JsonResponse from django.core.exceptions import ValidationError +from core.models import CoreSettings, NETWORK_ACCESS + def json_error_response(message, status=400): """Return a standardized error JSON response.""" - return JsonResponse({'success': False, 'error': message}, status=status) + return JsonResponse({"success": False, "error": message}, status=status) + def json_success_response(data=None, status=200): """Return a standardized success JSON response.""" - response = {'success': True} + response = {"success": True} if data is not None: response.update(data) return JsonResponse(response, status=status) + def validate_logo_file(file): """Validate uploaded logo file size and MIME type.""" - valid_mime_types = ['image/jpeg', 'image/png', 'image/gif'] + valid_mime_types = ["image/jpeg", "image/png", "image/gif"] if file.content_type not in valid_mime_types: - raise ValidationError('Unsupported file type. Allowed types: JPEG, PNG, GIF.') + raise ValidationError("Unsupported file type. Allowed types: JPEG, PNG, GIF.") if file.size > 2 * 1024 * 1024: - raise ValidationError('File too large. Max 2MB.') + raise ValidationError("File too large. Max 2MB.") + +def get_client_ip(request): + x_forwarded_for = request.META.get("HTTP_X_REAL_IP") + if x_forwarded_for: + # X-Forwarded-For can be a comma-separated list of IPs + ip = x_forwarded_for.split(",")[0].strip() + else: + ip = request.META.get("REMOTE_ADDR") + return ip + + +def network_access_allowed(request, settings_key): + network_access = json.loads(CoreSettings.objects.get(key=NETWORK_ACCESS).value) + + cidrs = ( + network_access[settings_key].split(",") + if settings_key in network_access + else ["0.0.0.0/0"] + ) + + network_allowed = False + client_ip = ipaddress.ip_address(get_client_ip(request)) + for cidr in cidrs: + network = ipaddress.ip_network(cidr) + if client_ip in network: + network_allowed = True + break + + return network_allowed diff --git a/docker/nginx.conf b/docker/nginx.conf index 65d382c5..db097ede 100644 --- a/docker/nginx.conf +++ b/docker/nginx.conf @@ -9,13 +9,16 @@ server { proxy_read_timeout 300; client_max_body_size 0; # Allow file uploads up to 128MB + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Host $host:$server_port; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header Host $host; + # Serve Django via uWSGI location / { include uwsgi_params; uwsgi_pass unix:/app/uwsgi.sock; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header Host $host; } location /assets/ { @@ -55,11 +58,6 @@ server { location /hdhr { include uwsgi_params; uwsgi_pass unix:/app/uwsgi.sock; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Host $host:$server_port; - proxy_set_header X-Forwarded-Proto $scheme; - proxy_set_header Host $host; } # Serve FFmpeg streams efficiently @@ -78,9 +76,6 @@ server { proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection "Upgrade"; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header Host $host; } # Route TS proxy requests to the dedicated instance @@ -94,8 +89,5 @@ server { proxy_read_timeout 300s; proxy_send_timeout 300s; client_max_body_size 0; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header Host $host; } } diff --git a/frontend/package-lock.json b/frontend/package-lock.json index d8da7f76..5325ff6c 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -12,25 +12,20 @@ "@dnd-kit/modifiers": "^9.0.0", "@dnd-kit/sortable": "^10.0.0", "@dnd-kit/utilities": "^3.2.2", - "@mantine/charts": "^7.17.2", - "@mantine/core": "^7.17.2", - "@mantine/dates": "^7.17.2", - "@mantine/dropzone": "^7.17.2", - "@mantine/form": "^7.17.3", - "@mantine/hooks": "^7.17.2", - "@mantine/notifications": "^7.17.2", - "@tabler/icons-react": "^3.31.0", + "@mantine/charts": "~8.0.1", + "@mantine/core": "~8.0.1", + "@mantine/dates": "~8.0.1", + "@mantine/dropzone": "~8.0.1", + "@mantine/form": "~8.0.1", + "@mantine/hooks": "~8.0.1", + "@mantine/notifications": "~8.0.1", "@tanstack/react-table": "^8.21.2", "allotment": "^1.20.3", - "axios": "^1.8.2", - "clsx": "^2.1.1", "dayjs": "^1.11.13", "formik": "^2.4.6", "hls.js": "^1.5.20", - "lucide-react": "^0.479.0", - "mantine-react-table": "^2.0.0-beta.9", + "lucide-react": "^0.511.0", "mpegts.js": "^1.8.0", - "prettier": "^3.5.3", "react": "^19.0.0", "react-dom": "^19.0.0", "react-draggable": "^4.4.6", @@ -53,6 +48,7 @@ "eslint-plugin-react-hooks": "^5.1.0", "eslint-plugin-react-refresh": "^0.4.19", "globals": "^15.15.0", + "prettier": "^3.5.3", "vite": "^6.2.0" } }, @@ -753,72 +749,72 @@ "license": "Apache-2.0" }, "node_modules/@mantine/charts": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@mantine/charts/-/charts-7.17.2.tgz", - "integrity": "sha512-ckB23pIqRjzysUz2EiWZD9AVyf7t0r7o7zfJbl01nzOezFgYq5RGeRoxvpcsfBC+YoSbB/43rjNcXtYhtA7QzA==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@mantine/charts/-/charts-8.0.1.tgz", + "integrity": "sha512-yntk4siXpQGSj83tDwftJw6fHTOBS6c/VWinjvTW29ptEdjBCxbKFfyyDc9UGVVuO7ovbdtpfCZBpuN2I7HPCA==", "license": "MIT", "peerDependencies": { - "@mantine/core": "7.17.2", - "@mantine/hooks": "7.17.2", + "@mantine/core": "8.0.1", + "@mantine/hooks": "8.0.1", "react": "^18.x || ^19.x", "react-dom": "^18.x || ^19.x", "recharts": "^2.13.3" } }, "node_modules/@mantine/core": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@mantine/core/-/core-7.17.2.tgz", - "integrity": "sha512-R6MYhitJ0JEgrhadd31Nw9FhRaQwDHjXUs5YIlitKH/fTOz9gKSxKjzmNng3bEBQCcbEDOkZj3FRcBgTUh/F0Q==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@mantine/core/-/core-8.0.1.tgz", + "integrity": "sha512-4ezaxKjChSPtawamQ3KrJq+x506uTouXlL0Z5fP+t105KnyxMrAJUENhbh2ivD4pq9Zh1BFiD9IWzyu3IXFR8w==", "license": "MIT", "dependencies": { "@floating-ui/react": "^0.26.28", "clsx": "^2.1.1", "react-number-format": "^5.4.3", "react-remove-scroll": "^2.6.2", - "react-textarea-autosize": "8.5.6", + "react-textarea-autosize": "8.5.9", "type-fest": "^4.27.0" }, "peerDependencies": { - "@mantine/hooks": "7.17.2", + "@mantine/hooks": "8.0.1", "react": "^18.x || ^19.x", "react-dom": "^18.x || ^19.x" } }, "node_modules/@mantine/dates": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@mantine/dates/-/dates-7.17.2.tgz", - "integrity": "sha512-7bB992j8f+uEi280jab0/8i5yfsN/3oSrMDFwatZ+7XSDUwiP0YFib/FVX0pNSSqdFpbXhUmsZEECX71QtHw+Q==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@mantine/dates/-/dates-8.0.1.tgz", + "integrity": "sha512-YCmV5jiGE9Ts2uhNS217IA1Hd5kAa8oaEtfnU0bS1sL36zKEf2s6elmzY718XdF8tFil0jJWAj0jiCrA3/udMg==", "license": "MIT", "dependencies": { "clsx": "^2.1.1" }, "peerDependencies": { - "@mantine/core": "7.17.2", - "@mantine/hooks": "7.17.2", + "@mantine/core": "8.0.1", + "@mantine/hooks": "8.0.1", "dayjs": ">=1.0.0", "react": "^18.x || ^19.x", "react-dom": "^18.x || ^19.x" } }, "node_modules/@mantine/dropzone": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@mantine/dropzone/-/dropzone-7.17.2.tgz", - "integrity": "sha512-NMQ1SDmnW0sf3GO6p1r/VIcg/xWqlRmfnWCr00/bGRbBEGbyaUwL3LSn+KYBJdY+3/jNGvGa+xflWDvnby5tzw==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@mantine/dropzone/-/dropzone-8.0.1.tgz", + "integrity": "sha512-8PH5yrtA/ebCIwjs0m4J9qOvEyS/P4XmNlHrw0E389/qq64Ol7+/ZH7Xtiq64IaY8kvsMW1XHaV0c+bdYrijiA==", "license": "MIT", "dependencies": { - "react-dropzone-esm": "15.2.0" + "react-dropzone": "14.3.8" }, "peerDependencies": { - "@mantine/core": "7.17.2", - "@mantine/hooks": "7.17.2", + "@mantine/core": "8.0.1", + "@mantine/hooks": "8.0.1", "react": "^18.x || ^19.x", "react-dom": "^18.x || ^19.x" } }, "node_modules/@mantine/form": { - "version": "7.17.3", - "resolved": "https://registry.npmjs.org/@mantine/form/-/form-7.17.3.tgz", - "integrity": "sha512-ktERldD8f9lrjjz6wIbwMnNbAZq8XEWPx4K5WuFyjXaK0PI8D+gsXIGKMtA5rVrAUFHCWCdbK3yLgtjJNki8ew==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@mantine/form/-/form-8.0.1.tgz", + "integrity": "sha512-lQ94gn/9p60C+tKEW7psQ1tZHod58Q0bXLbRDadRKMwnqBb2WFoIuaQWPDo7ox+PqyOv28dtflgS+Lm95EbBhg==", "license": "MIT", "dependencies": { "fast-deep-equal": "^3.1.3", @@ -829,34 +825,34 @@ } }, "node_modules/@mantine/hooks": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@mantine/hooks/-/hooks-7.17.2.tgz", - "integrity": "sha512-tbErVcGZu0E4dSmE6N0k6Tv1y9R3SQmmQgwqorcc+guEgKMdamc36lucZGlJnSGUmGj+WLUgELkEQ0asdfYBDA==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@mantine/hooks/-/hooks-8.0.1.tgz", + "integrity": "sha512-GvLdM4Ro3QcDyIgqrdXsUZmeeKye2TNL/k3mEr9JhM5KacHQjr83JPp0u9eLobn7kiyBqpLTYmVYAbmjJdCxHw==", "license": "MIT", "peerDependencies": { "react": "^18.x || ^19.x" } }, "node_modules/@mantine/notifications": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@mantine/notifications/-/notifications-7.17.2.tgz", - "integrity": "sha512-vg0L8cmihz0ODg4WJ9MAyK06WPt/6g67ksIUFxd4F8RfdJbIMLTsNG9yWoSfuhtXenUg717KaA917IWLjDSaqw==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@mantine/notifications/-/notifications-8.0.1.tgz", + "integrity": "sha512-7TX9OyAmUcok3qffnheS7gTAMKDczETy8XEYDr38Sy/XIoXLjM+3CwO+a/vfd1F9oW2LvkahkHT0Ey+vBOVd0Q==", "license": "MIT", "dependencies": { - "@mantine/store": "7.17.2", + "@mantine/store": "8.0.1", "react-transition-group": "4.4.5" }, "peerDependencies": { - "@mantine/core": "7.17.2", - "@mantine/hooks": "7.17.2", + "@mantine/core": "8.0.1", + "@mantine/hooks": "8.0.1", "react": "^18.x || ^19.x", "react-dom": "^18.x || ^19.x" } }, "node_modules/@mantine/store": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@mantine/store/-/store-7.17.2.tgz", - "integrity": "sha512-UoMUYQK/z58hMueCkpDIXc49gPgrVO/zcpb0k+B7MFU51EIUiFzHLxLFBmWrgCAM6rzJORqN8JjyCd/PB9j4aw==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@mantine/store/-/store-8.0.1.tgz", + "integrity": "sha512-3wfUDeiERXJEI+MGgRAbh+9aY35D9oE4UzquLqZh8cIiH5i5g64Y/eJx3PfjHgO5+Zeu6lbgTgL6k4lg4a2SBQ==", "license": "MIT", "peerDependencies": { "react": "^18.x || ^19.x" @@ -990,48 +986,6 @@ "@swc/counter": "^0.1.3" } }, - "node_modules/@tabler/icons": { - "version": "3.31.0", - "resolved": "https://registry.npmjs.org/@tabler/icons/-/icons-3.31.0.tgz", - "integrity": "sha512-dblAdeKY3+GA1U+Q9eziZ0ooVlZMHsE8dqP0RkwvRtEsAULoKOYaCUOcJ4oW1DjWegdxk++UAt2SlQVnmeHv+g==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/codecalm" - } - }, - "node_modules/@tabler/icons-react": { - "version": "3.31.0", - "resolved": "https://registry.npmjs.org/@tabler/icons-react/-/icons-react-3.31.0.tgz", - "integrity": "sha512-2rrCM5y/VnaVKnORpDdAua9SEGuJKVqPtWxeQ/vUVsgaUx30LDgBZph7/lterXxDY1IKR6NO//HDhWiifXTi3w==", - "license": "MIT", - "dependencies": { - "@tabler/icons": "3.31.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/codecalm" - }, - "peerDependencies": { - "react": ">= 16" - } - }, - "node_modules/@tanstack/match-sorter-utils": { - "version": "8.19.4", - "resolved": "https://registry.npmjs.org/@tanstack/match-sorter-utils/-/match-sorter-utils-8.19.4.tgz", - "integrity": "sha512-Wo1iKt2b9OT7d+YGhvEPD3DXvPv2etTusIMhMUoG7fbhmxcXCtIjJDEygy91Y2JFlwGyjqiBPRozme7UD8hoqg==", - "license": "MIT", - "dependencies": { - "remove-accents": "0.5.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/tannerlinsley" - } - }, "node_modules/@tanstack/react-table": { "version": "8.21.3", "resolved": "https://registry.npmjs.org/@tanstack/react-table/-/react-table-8.21.3.tgz", @@ -1052,23 +1006,6 @@ "react-dom": ">=16.8" } }, - "node_modules/@tanstack/react-virtual": { - "version": "3.11.2", - "resolved": "https://registry.npmjs.org/@tanstack/react-virtual/-/react-virtual-3.11.2.tgz", - "integrity": "sha512-OuFzMXPF4+xZgx8UzJha0AieuMihhhaWG0tCqpp6tDzlFwOmNBPYMuLOtMJ1Tr4pXLHmgjcWhG6RlknY2oNTdQ==", - "license": "MIT", - "dependencies": { - "@tanstack/virtual-core": "3.11.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/tannerlinsley" - }, - "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" - } - }, "node_modules/@tanstack/table-core": { "version": "8.21.3", "resolved": "https://registry.npmjs.org/@tanstack/table-core/-/table-core-8.21.3.tgz", @@ -1082,22 +1019,6 @@ "url": "https://github.com/sponsors/tannerlinsley" } }, - "node_modules/@tanstack/virtual-core": { - "version": "3.11.2", - "resolved": "https://registry.npmjs.org/@tanstack/virtual-core/-/virtual-core-3.11.2.tgz", - "integrity": "sha512-vTtpNt7mKCiZ1pwU9hfKPhpdVO2sVzFQsxoVBGtOSHxlrRRzYr8iQ2TlwbAcRYCcEiZ9ECAM8kBzH0v2+VzfKw==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/tannerlinsley" - } - }, - "node_modules/@types/cookie": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.6.0.tgz", - "integrity": "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==", - "license": "MIT" - }, "node_modules/@types/d3-array": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.1.tgz", @@ -1386,21 +1307,13 @@ "dev": true, "license": "Python-2.0" }, - "node_modules/asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", - "license": "MIT" - }, - "node_modules/axios": { - "version": "1.8.3", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.8.3.tgz", - "integrity": "sha512-iP4DebzoNlP/YN2dpwCgb8zoCmhtkajzS48JvwmkSkXvPI3DHc7m+XYL5tGnSlJtR6nImXZmdCuN5aP8dh1d8A==", + "node_modules/attr-accept": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/attr-accept/-/attr-accept-2.2.5.tgz", + "integrity": "sha512-0bDNnY/u6pPwHDMoF0FieU354oBi0a8rD9FcsLwzcGWbc8KS8KPIi7y+s13OlVY+gMWc/9xEMUgNE6Qm8ZllYQ==", "license": "MIT", - "dependencies": { - "follow-redirects": "^1.15.6", - "form-data": "^4.0.0", - "proxy-from-env": "^1.1.0" + "engines": { + "node": ">=4" } }, "node_modules/babel-plugin-macros": { @@ -1436,19 +1349,6 @@ "concat-map": "0.0.1" } }, - "node_modules/call-bind-apply-helpers": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", - "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, "node_modules/callsites": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", @@ -1510,18 +1410,6 @@ "dev": true, "license": "MIT" }, - "node_modules/combined-stream": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", - "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "license": "MIT", - "dependencies": { - "delayed-stream": "~1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -1756,15 +1644,6 @@ "node": ">=0.10.0" } }, - "node_modules/delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", - "license": "MIT", - "engines": { - "node": ">=0.4.0" - } - }, "node_modules/detect-node-es": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz", @@ -1786,20 +1665,6 @@ "resolved": "https://registry.npmjs.org/dom-walk/-/dom-walk-0.1.2.tgz", "integrity": "sha512-6QvTW9mrGeIegrFXdtQi9pk7O/nSK6lSdXW2eqUspN5LWD7UTji2Fqw5V2YLjBpHEoU9Xl/eUWNpDeZvoyOv2w==" }, - "node_modules/dunder-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", - "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.1", - "es-errors": "^1.3.0", - "gopd": "^1.2.0" - }, - "engines": { - "node": ">= 0.4" - } - }, "node_modules/error-ex": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", @@ -1809,51 +1674,6 @@ "is-arrayish": "^0.2.1" } }, - "node_modules/es-define-property": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", - "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-errors": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", - "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-object-atoms": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", - "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-set-tostringtag": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", - "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.6", - "has-tostringtag": "^1.0.2", - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" - } - }, "node_modules/es6-promise": { "version": "4.2.8", "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", @@ -2126,6 +1946,21 @@ "dev": true, "license": "MIT" }, + "node_modules/fdir": { + "version": "6.4.4", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz", + "integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, "node_modules/file-entry-cache": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", @@ -2139,6 +1974,18 @@ "node": ">=16.0.0" } }, + "node_modules/file-selector": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/file-selector/-/file-selector-2.1.2.tgz", + "integrity": "sha512-QgXo+mXTe8ljeqUFaX3QVHc5osSItJ/Km+xpocx0aSqWGMSCf6qYs/VnzZgS864Pjn5iceMRFigeAV7AfTlaig==", + "license": "MIT", + "dependencies": { + "tslib": "^2.7.0" + }, + "engines": { + "node": ">= 12" + } + }, "node_modules/find-root": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/find-root/-/find-root-1.1.0.tgz", @@ -2183,41 +2030,6 @@ "dev": true, "license": "ISC" }, - "node_modules/follow-redirects": { - "version": "1.15.9", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz", - "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==", - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/RubenVerborgh" - } - ], - "license": "MIT", - "engines": { - "node": ">=4.0" - }, - "peerDependenciesMeta": { - "debug": { - "optional": true - } - } - }, - "node_modules/form-data": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.2.tgz", - "integrity": "sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==", - "license": "MIT", - "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.8", - "es-set-tostringtag": "^2.1.0", - "mime-types": "^2.1.12" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/formik": { "version": "2.4.6", "resolved": "https://registry.npmjs.org/formik/-/formik-2.4.6.tgz", @@ -2243,6 +2055,21 @@ "react": ">=16.8.0" } }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, "node_modules/function-bind": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", @@ -2252,30 +2079,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/get-intrinsic": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", - "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.2", - "es-define-property": "^1.0.1", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.1.1", - "function-bind": "^1.1.2", - "get-proto": "^1.0.1", - "gopd": "^1.2.0", - "has-symbols": "^1.1.0", - "hasown": "^2.0.2", - "math-intrinsics": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/get-nonce": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/get-nonce/-/get-nonce-1.0.1.tgz", @@ -2285,19 +2088,6 @@ "node": ">=6" } }, - "node_modules/get-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", - "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", - "license": "MIT", - "dependencies": { - "dunder-proto": "^1.0.1", - "es-object-atoms": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - } - }, "node_modules/glob-parent": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", @@ -2334,18 +2124,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/gopd": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", - "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -2356,33 +2134,6 @@ "node": ">=8" } }, - "node_modules/has-symbols": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", - "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-tostringtag": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", - "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", - "license": "MIT", - "dependencies": { - "has-symbols": "^1.0.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/hasown": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", @@ -2682,9 +2433,9 @@ } }, "node_modules/lucide-react": { - "version": "0.479.0", - "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.479.0.tgz", - "integrity": "sha512-aBhNnveRhorBOK7uA4gDjgaf+YlHMdMhQ/3cupk6exM10hWlEU+2QtWYOfhXhjAsmdb6LeKR+NZnow4UxRRiTQ==", + "version": "0.511.0", + "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.511.0.tgz", + "integrity": "sha512-VK5a2ydJ7xm8GvBeKLS9mu1pVK6ucef9780JVUjw6bAjJL/QXnd4Y0p7SPeOUMC27YhzNCZvm5d/QX0Tp3rc0w==", "license": "ISC", "peerDependencies": { "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0" @@ -2701,103 +2452,12 @@ "global": "^4.4.0" } }, - "node_modules/mantine-react-table": { - "version": "2.0.0-beta.9", - "resolved": "https://registry.npmjs.org/mantine-react-table/-/mantine-react-table-2.0.0-beta.9.tgz", - "integrity": "sha512-ZdfcwebWaPERoDvAuk43VYcBCzamohARVclnbuepT0PHZ0wRcDPMBR+zgaocL+pFy8EXUGwvWTOKNh25ITpjNQ==", - "license": "MIT", - "dependencies": { - "@tanstack/match-sorter-utils": "8.19.4", - "@tanstack/react-table": "8.20.5", - "@tanstack/react-virtual": "3.11.2" - }, - "engines": { - "node": ">=16" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/kevinvandy" - }, - "peerDependencies": { - "@mantine/core": "^7.9", - "@mantine/dates": "^7.9", - "@mantine/hooks": "^7.9", - "@tabler/icons-react": ">=2.23.0", - "clsx": ">=2", - "dayjs": ">=1.11", - "react": ">=18.0", - "react-dom": ">=18.0" - } - }, - "node_modules/mantine-react-table/node_modules/@tanstack/react-table": { - "version": "8.20.5", - "resolved": "https://registry.npmjs.org/@tanstack/react-table/-/react-table-8.20.5.tgz", - "integrity": "sha512-WEHopKw3znbUZ61s9i0+i9g8drmDo6asTWbrQh8Us63DAk/M0FkmIqERew6P71HI75ksZ2Pxyuf4vvKh9rAkiA==", - "license": "MIT", - "dependencies": { - "@tanstack/table-core": "8.20.5" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/tannerlinsley" - }, - "peerDependencies": { - "react": ">=16.8", - "react-dom": ">=16.8" - } - }, - "node_modules/mantine-react-table/node_modules/@tanstack/table-core": { - "version": "8.20.5", - "resolved": "https://registry.npmjs.org/@tanstack/table-core/-/table-core-8.20.5.tgz", - "integrity": "sha512-P9dF7XbibHph2PFRz8gfBKEXEY/HJPOhym8CHmjF8y3q5mWpKx9xtZapXQUWCgkqvsK0R46Azuz+VaxD4Xl+Tg==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/tannerlinsley" - } - }, - "node_modules/math-intrinsics": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", - "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, "node_modules/memoize-one": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/memoize-one/-/memoize-one-5.2.1.tgz", "integrity": "sha512-zYiwtZUcYyXKo/np96AGZAckk+FWWsUdJ3cHGGmld7+AhvcWmQyGCYUh1hc4Q/pkOhb65dQR/pqCyK0cOaHz4Q==", "license": "MIT" }, - "node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "license": "MIT", - "dependencies": { - "mime-db": "1.52.0" - }, - "engines": { - "node": ">= 0.6" - } - }, "node_modules/min-document": { "version": "2.19.0", "resolved": "https://registry.npmjs.org/min-document/-/min-document-2.19.0.tgz", @@ -3023,6 +2683,19 @@ "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", "license": "ISC" }, + "node_modules/picomatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", + "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/pkcs7": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/pkcs7/-/pkcs7-1.0.4.tgz", @@ -3078,6 +2751,7 @@ "version": "3.5.3", "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.3.tgz", "integrity": "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==", + "dev": true, "license": "MIT", "bin": { "prettier": "bin/prettier.cjs" @@ -3121,12 +2795,6 @@ "integrity": "sha512-SVtmxhRE/CGkn3eZY1T6pC8Nln6Fr/lu1mKSgRud0eC73whjGfoAogbn78LkD8aFL0zz3bAFerKSnOl7NlErBA==", "license": "MIT" }, - "node_modules/proxy-from-env": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", - "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", - "license": "MIT" - }, "node_modules/punycode": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", @@ -3181,12 +2849,14 @@ "node": ">=6" } }, - "node_modules/react-dropzone-esm": { - "version": "15.2.0", - "resolved": "https://registry.npmjs.org/react-dropzone-esm/-/react-dropzone-esm-15.2.0.tgz", - "integrity": "sha512-pPwR8xWVL+tFLnbAb8KVH5f6Vtl397tck8dINkZ1cPMxHWH+l9dFmIgRWgbh7V7jbjIcuKXCsVrXbhQz68+dVA==", + "node_modules/react-dropzone": { + "version": "14.3.8", + "resolved": "https://registry.npmjs.org/react-dropzone/-/react-dropzone-14.3.8.tgz", + "integrity": "sha512-sBgODnq+lcA4P296DY4wacOZz3JFpD99fp+hb//iBO2HHnyeZU3FwWyXJ6salNpqQdsZrgMrotuko/BdJMV8Ug==", "license": "MIT", "dependencies": { + "attr-accept": "^2.2.4", + "file-selector": "^2.1.0", "prop-types": "^15.8.1" }, "engines": { @@ -3288,15 +2958,13 @@ } }, "node_modules/react-router": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.3.0.tgz", - "integrity": "sha512-466f2W7HIWaNXTKM5nHTqNxLrHTyXybm7R0eBlVSt0k/u55tTCDO194OIx/NrYD4TS5SXKTNekXfT37kMKUjgw==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.6.0.tgz", + "integrity": "sha512-GGufuHIVCJDbnIAXP3P9Sxzq3UUsddG3rrI3ut1q6m0FI6vxVBF3JoPQ38+W/blslLH4a5Yutp8drkEpXoddGQ==", "license": "MIT", "dependencies": { - "@types/cookie": "^0.6.0", "cookie": "^1.0.1", - "set-cookie-parser": "^2.6.0", - "turbo-stream": "2.4.0" + "set-cookie-parser": "^2.6.0" }, "engines": { "node": ">=20.0.0" @@ -3312,12 +2980,12 @@ } }, "node_modules/react-router-dom": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.3.0.tgz", - "integrity": "sha512-z7Q5FTiHGgQfEurX/FBinkOXhWREJIAB2RiU24lvcBa82PxUpwqvs/PAXb9lJyPjTs2jrl6UkLvCZVGJPeNuuQ==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.6.0.tgz", + "integrity": "sha512-DYgm6RDEuKdopSyGOWZGtDfSm7Aofb8CCzgkliTjtu/eDuB0gcsv6qdFhhi8HdtmA+KHkt5MfZ5K2PdzjugYsA==", "license": "MIT", "dependencies": { - "react-router": "7.3.0" + "react-router": "7.6.0" }, "engines": { "node": ">=20.0.0" @@ -3365,9 +3033,9 @@ } }, "node_modules/react-textarea-autosize": { - "version": "8.5.6", - "resolved": "https://registry.npmjs.org/react-textarea-autosize/-/react-textarea-autosize-8.5.6.tgz", - "integrity": "sha512-aT3ioKXMa8f6zHYGebhbdMD2L00tKeRX1zuVuDx9YQK/JLLRSaSxq3ugECEmUB9z2kvk6bFSIoRHLkkUv0RJiw==", + "version": "8.5.9", + "resolved": "https://registry.npmjs.org/react-textarea-autosize/-/react-textarea-autosize-8.5.9.tgz", + "integrity": "sha512-U1DGlIQN5AwgjTyOEnI1oCcMuEr1pv1qOtklB2l4nyMGbHzWrI0eFsYK0zos2YWqAolJyG0IWJaqWmWj5ETh0A==", "license": "MIT", "dependencies": { "@babel/runtime": "^7.20.13", @@ -3495,12 +3163,6 @@ "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==", "license": "MIT" }, - "node_modules/remove-accents": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/remove-accents/-/remove-accents-0.5.0.tgz", - "integrity": "sha512-8g3/Otx1eJaVD12e31UbJj1YzdtVvzH85HV7t+9MJYk/u3XmkOUJ5Ys9wQrf9PCPK8+xn4ymzqYCiZl6QWKn+A==", - "license": "MIT" - }, "node_modules/resolve": { "version": "1.22.10", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", @@ -3691,6 +3353,23 @@ "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==", "license": "MIT" }, + "node_modules/tinyglobby": { + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.13.tgz", + "integrity": "sha512-mEwzpUgrLySlveBwEVDMKk5B57bhLPYovRfPAXD5gA/98Opn0rCDj3GtLwFvCvH5RK9uPCExUROW5NjDwvqkxw==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.4.4", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, "node_modules/toposort": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/toposort/-/toposort-2.0.2.tgz", @@ -3703,12 +3382,6 @@ "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", "license": "0BSD" }, - "node_modules/turbo-stream": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/turbo-stream/-/turbo-stream-2.4.0.tgz", - "integrity": "sha512-FHncC10WpBd2eOmGwpmQsWLDoK4cqsA/UT/GqNoaKOQnT8uzhtCbg3EoUDMvqpOSAI0S26mr0rkjzbOO6S3v1g==", - "license": "ISC" - }, "node_modules/type-check": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", @@ -3780,9 +3453,9 @@ } }, "node_modules/use-isomorphic-layout-effect": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/use-isomorphic-layout-effect/-/use-isomorphic-layout-effect-1.2.0.tgz", - "integrity": "sha512-q6ayo8DWoPZT0VdG4u3D3uxcgONP3Mevx2i2b0434cwWBoL+aelL1DzkXI6w3PhTZzUeR2kaVlZn70iCiseP6w==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/use-isomorphic-layout-effect/-/use-isomorphic-layout-effect-1.2.1.tgz", + "integrity": "sha512-tpZZ+EX0gaghDAiFR37hj5MgY6ZN55kLiPkJsKxBMZ6GZdOSPJXiOzPM984oPYZ5AnehYx5WQp1+ME8I/P/pRA==", "license": "MIT", "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" @@ -3906,15 +3579,18 @@ } }, "node_modules/vite": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/vite/-/vite-6.2.1.tgz", - "integrity": "sha512-n2GnqDb6XPhlt9B8olZPrgMD/es/Nd1RdChF6CBD/fHW6pUyUTt2sQW2fPRX5GiD9XEa6+8A6A4f2vT6pSsE7Q==", + "version": "6.3.5", + "resolved": "https://registry.npmjs.org/vite/-/vite-6.3.5.tgz", + "integrity": "sha512-cZn6NDFE7wdTpINgs++ZJ4N49W2vRp8LCKrn3Ob1kYNtOo21vfDoaV5GzBfLU4MovSAB8uNRm4jgzVQZ+mBzPQ==", "dev": true, "license": "MIT", "dependencies": { "esbuild": "^0.25.0", + "fdir": "^6.4.4", + "picomatch": "^4.0.2", "postcss": "^8.5.3", - "rollup": "^4.30.1" + "rollup": "^4.34.9", + "tinyglobby": "^0.2.13" }, "bin": { "vite": "bin/vite.js" @@ -4008,21 +3684,6 @@ "node": ">=0.10.0" } }, - "node_modules/yaml": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.7.0.tgz", - "integrity": "sha512-+hSoy/QHluxmC9kCIJyL/uyFmLmc+e5CFR5Wa+bpIhIj85LVb9ZH2nVnqrHoSvKogwODv0ClqZkmiSSaIH5LTA==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true, - "bin": { - "yaml": "bin.mjs" - }, - "engines": { - "node": ">= 14" - } - }, "node_modules/yocto-queue": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", diff --git a/frontend/package.json b/frontend/package.json index 3b287d79..1f6c769d 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -14,25 +14,20 @@ "@dnd-kit/modifiers": "^9.0.0", "@dnd-kit/sortable": "^10.0.0", "@dnd-kit/utilities": "^3.2.2", - "@mantine/charts": "^7.17.2", - "@mantine/core": "^7.17.2", - "@mantine/dates": "^7.17.2", - "@mantine/dropzone": "^7.17.2", - "@mantine/form": "^7.17.3", - "@mantine/hooks": "^7.17.2", - "@mantine/notifications": "^7.17.2", - "@tabler/icons-react": "^3.31.0", + "@mantine/charts": "~8.0.1", + "@mantine/core": "~8.0.1", + "@mantine/dates": "~8.0.1", + "@mantine/dropzone": "~8.0.1", + "@mantine/form": "~8.0.1", + "@mantine/hooks": "~8.0.1", + "@mantine/notifications": "~8.0.1", "@tanstack/react-table": "^8.21.2", "allotment": "^1.20.3", - "axios": "^1.8.2", - "clsx": "^2.1.1", "dayjs": "^1.11.13", "formik": "^2.4.6", "hls.js": "^1.5.20", - "lucide-react": "^0.479.0", - "mantine-react-table": "^2.0.0-beta.9", + "lucide-react": "^0.511.0", "mpegts.js": "^1.8.0", - "prettier": "^3.5.3", "react": "^19.0.0", "react-dom": "^19.0.0", "react-draggable": "^4.4.6", @@ -55,6 +50,7 @@ "eslint-plugin-react-hooks": "^5.1.0", "eslint-plugin-react-refresh": "^0.4.19", "globals": "^15.15.0", + "prettier": "^3.5.3", "vite": "^6.2.0" } } diff --git a/frontend/src/App.jsx b/frontend/src/App.jsx index 7295d12e..a057be50 100644 --- a/frontend/src/App.jsx +++ b/frontend/src/App.jsx @@ -14,13 +14,13 @@ import Guide from './pages/Guide'; import Stats from './pages/Stats'; import DVR from './pages/DVR'; import Settings from './pages/Settings'; +import Users from './pages/Users'; import useAuthStore from './store/auth'; import FloatingVideo from './components/FloatingVideo'; import { WebsocketProvider } from './WebSocket'; import { Box, AppShell, MantineProvider } from '@mantine/core'; import '@mantine/core/styles.css'; // Ensure Mantine global styles load import '@mantine/notifications/styles.css'; -import 'mantine-react-table/styles.css'; import '@mantine/dropzone/styles.css'; import '@mantine/dates/styles.css'; import './index.css'; @@ -75,18 +75,17 @@ const App = () => { const loggedIn = await initializeAuth(); if (loggedIn) { await initData(); - setIsAuthenticated(true); } else { await logout(); } } catch (error) { - console.error("Auth check failed:", error); + console.error('Auth check failed:', error); await logout(); } }; checkAuth(); - }, [initializeAuth, initData, setIsAuthenticated, logout]); + }, [initializeAuth, initData, logout]); return ( { } /> } /> } /> + } /> } /> ) : ( diff --git a/frontend/src/api.js b/frontend/src/api.js index 73bbde7d..17c38b90 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -9,6 +9,7 @@ import useStreamProfilesStore from './store/streamProfiles'; import useSettingsStore from './store/settings'; import { notifications } from '@mantine/notifications'; import useChannelsTableStore from './store/channelsTable'; +import useUsersStore from './store/users'; // If needed, you can set a base host or keep it empty if relative requests const host = import.meta.env.DEV @@ -1084,6 +1085,21 @@ export default class API { } } + static async checkSetting(values) { + const { id, ...payload } = values; + + try { + const response = await request(`${host}/api/core/settings/check/`, { + method: 'POST', + body: payload, + }); + + return response; + } catch (e) { + errorNotification('Failed to update settings', e); + } + } + static async updateSetting(values) { const { id, ...payload } = values; @@ -1392,4 +1408,59 @@ export default class API { return null; } } + + static async me() { + return await request(`${host}/api/accounts/users/me/`); + } + + static async getUsers() { + try { + const response = await request(`${host}/api/accounts/users/`); + return response; + } catch (e) { + errorNotification('Failed to fetch users', e); + } + } + + static async createUser(body) { + try { + const response = await request(`${host}/api/accounts/users/`, { + method: 'POST', + body, + }); + + useUsersStore.getState().addUser(response); + + return response; + } catch (e) { + errorNotification('Failed to fetch users', e); + } + } + + static async updateUser(id, body) { + try { + const response = await request(`${host}/api/accounts/users/${id}/`, { + method: 'PATCH', + body, + }); + + useUsersStore.getState().updateUser(response); + + return response; + } catch (e) { + errorNotification('Failed to fetch users', e); + } + } + + static async deleteUser(id) { + try { + await request(`${host}/api/accounts/users/${id}/`, { + method: 'DELETE', + }); + + useUsersStore.getState().removeUser(id); + } catch (e) { + errorNotification('Failed to delete user', e); + } + } } diff --git a/frontend/src/components/ConfirmationDialog.jsx b/frontend/src/components/ConfirmationDialog.jsx index 3c0f15e7..822b46f1 100644 --- a/frontend/src/components/ConfirmationDialog.jsx +++ b/frontend/src/components/ConfirmationDialog.jsx @@ -18,60 +18,60 @@ import useWarningsStore from '../store/warnings'; * @param {string} [props.size='md'] - Size of the modal */ const ConfirmationDialog = ({ - opened, - onClose, - onConfirm, - title = 'Confirm Action', - message = 'Are you sure you want to proceed?', - confirmLabel = 'Confirm', - cancelLabel = 'Cancel', - actionKey, - onSuppressChange, - size = 'md', // Add default size parameter - md is a medium width + opened, + onClose, + onConfirm, + title = 'Confirm Action', + message = 'Are you sure you want to proceed?', + confirmLabel = 'Confirm', + cancelLabel = 'Cancel', + actionKey, + onSuppressChange, + size = 'md', // Add default size parameter - md is a medium width }) => { - const suppressWarning = useWarningsStore((s) => s.suppressWarning); - const isWarningSuppressed = useWarningsStore((s) => s.isWarningSuppressed); - const [suppressChecked, setSuppressChecked] = useState( - isWarningSuppressed(actionKey) - ); + const suppressWarning = useWarningsStore((s) => s.suppressWarning); + const isWarningSuppressed = useWarningsStore((s) => s.isWarningSuppressed); + const [suppressChecked, setSuppressChecked] = useState( + isWarningSuppressed(actionKey) + ); - const handleToggleSuppress = (e) => { - setSuppressChecked(e.currentTarget.checked); - if (onSuppressChange) { - onSuppressChange(e.currentTarget.checked); - } - }; + const handleToggleSuppress = (e) => { + setSuppressChecked(e.currentTarget.checked); + if (onSuppressChange) { + onSuppressChange(e.currentTarget.checked); + } + }; - const handleConfirm = () => { - if (suppressChecked) { - suppressWarning(actionKey); - } - onConfirm(); - }; + const handleConfirm = () => { + if (suppressChecked) { + suppressWarning(actionKey); + } + onConfirm(); + }; - return ( - - {message} + return ( + + {message} - {actionKey && ( - - )} + {actionKey && ( + + )} - - - - - - ); + + + + + + ); }; export default ConfirmationDialog; diff --git a/frontend/src/components/FloatingVideo.jsx b/frontend/src/components/FloatingVideo.jsx index 46c191eb..7f1e1c53 100644 --- a/frontend/src/components/FloatingVideo.jsx +++ b/frontend/src/components/FloatingVideo.jsx @@ -73,72 +73,109 @@ export default function FloatingVideo() { console.log("Attempting to play stream:", streamUrl); try { - // If the browser supports MSE for live playback, initialize mpegts.js - if (mpegts.getFeatureList().mseLivePlayback) { - // Set loading flag - setIsLoading(true); - - const player = mpegts.createPlayer({ - type: 'mpegts', // MPEG-TS format - url: streamUrl, - isLive: true, - enableWorker: true, - enableStashBuffer: false, // Try disabling stash buffer for live streams - liveBufferLatencyChasing: true, - liveSync: true, - cors: true, // Enable CORS for cross-domain requests - // Add error recovery options - autoCleanupSourceBuffer: true, - autoCleanupMaxBackwardDuration: 10, - autoCleanupMinBackwardDuration: 5, - reuseRedirectedURL: true, - }); - - player.attachMediaElement(videoRef.current); - - // Add events to track loading state - player.on(mpegts.Events.LOADING_COMPLETE, () => { - setIsLoading(false); - }); - - player.on(mpegts.Events.METADATA_ARRIVED, () => { - setIsLoading(false); - }); - - // Add error event handler - player.on(mpegts.Events.ERROR, (errorType, errorDetail) => { - setIsLoading(false); - - // Filter out aborted errors - if (errorType !== 'NetworkError' || !errorDetail?.includes('aborted')) { - console.error('Player error:', errorType, errorDetail); - setLoadError(`Error: ${errorType}${errorDetail ? ` - ${errorDetail}` : ''}`); - } - }); - - player.load(); - - // Don't auto-play until we've loaded properly - player.on(mpegts.Events.MEDIA_INFO, () => { - setIsLoading(false); - try { - player.play().catch(e => { - console.log("Auto-play prevented:", e); - setLoadError("Auto-play was prevented. Click play to start."); - }); - } catch (e) { - console.log("Error during play:", e); - setLoadError(`Playback error: ${e.message}`); - } - }); - - // Store player instance so we can clean up later - playerRef.current = player; + // Check for MSE support first + if (!mpegts.getFeatureList().mseLivePlayback) { + setIsLoading(false); + setLoadError("Your browser doesn't support live video streaming. Please try Chrome or Edge."); + return; } + + // Check for basic codec support + const video = document.createElement('video'); + const h264Support = video.canPlayType('video/mp4; codecs="avc1.42E01E"'); + const aacSupport = video.canPlayType('audio/mp4; codecs="mp4a.40.2"'); + + console.log("Browser codec support - H264:", h264Support, "AAC:", aacSupport); + + // If the browser supports MSE for live playback, initialize mpegts.js + setIsLoading(true); + + const player = mpegts.createPlayer({ + type: 'mpegts', + url: streamUrl, + isLive: true, + enableWorker: true, + enableStashBuffer: false, + liveBufferLatencyChasing: true, + liveSync: true, + cors: true, + autoCleanupSourceBuffer: true, + autoCleanupMaxBackwardDuration: 10, + autoCleanupMinBackwardDuration: 5, + reuseRedirectedURL: true, + }); + + player.attachMediaElement(videoRef.current); + + // Add events to track loading state + player.on(mpegts.Events.LOADING_COMPLETE, () => { + setIsLoading(false); + }); + + player.on(mpegts.Events.METADATA_ARRIVED, () => { + setIsLoading(false); + }); + + // Enhanced error event handler with codec-specific messages + player.on(mpegts.Events.ERROR, (errorType, errorDetail) => { + setIsLoading(false); + + // Filter out aborted errors + if (errorType !== 'NetworkError' || !errorDetail?.includes('aborted')) { + console.error('Player error:', errorType, errorDetail); + + // Provide specific error messages based on error type + let errorMessage = `Error: ${errorType}`; + + if (errorType === 'MediaError') { + // Try to determine if it's an audio or video codec issue + const errorString = errorDetail?.toLowerCase() || ''; + + if (errorString.includes('audio') || errorString.includes('ac3') || errorString.includes('ac-3')) { + errorMessage = "Audio codec not supported by your browser. Try Chrome or Edge for better audio codec support."; + } else if (errorString.includes('video') || errorString.includes('h264') || errorString.includes('h.264')) { + errorMessage = "Video codec not supported by your browser. Try Chrome or Edge for better video codec support."; + } else if (errorString.includes('mse')) { + errorMessage = "Your browser doesn't support the codecs used in this stream. Try Chrome or Edge for better compatibility."; + } else { + errorMessage = "Media codec not supported by your browser. This may be due to unsupported audio (AC3) or video codecs. Try Chrome or Edge."; + } + } else if (errorDetail) { + errorMessage += ` - ${errorDetail}`; + } + + setLoadError(errorMessage); + } + }); + + player.load(); + + // Don't auto-play until we've loaded properly + player.on(mpegts.Events.MEDIA_INFO, () => { + setIsLoading(false); + try { + player.play().catch(e => { + console.log("Auto-play prevented:", e); + setLoadError("Auto-play was prevented. Click play to start."); + }); + } catch (e) { + console.log("Error during play:", e); + setLoadError(`Playback error: ${e.message}`); + } + }); + + // Store player instance so we can clean up later + playerRef.current = player; } catch (error) { setIsLoading(false); - setLoadError(`Initialization error: ${error.message}`); console.error("Error initializing player:", error); + + // Provide helpful error message based on the error + if (error.message?.includes('codec') || error.message?.includes('format')) { + setLoadError("Codec not supported by your browser. Please try a different browser (Chrome/Edge recommended)."); + } else { + setLoadError(`Initialization error: ${error.message}`); + } } // Cleanup when component unmounts or streamUrl changes @@ -191,7 +228,7 @@ export default function FloatingVideo() { style={{ width: '100%', height: '180px', backgroundColor: '#000' }} /> - {/* Loading overlay */} + {/* Loading overlay - only show when loading */} {isLoading && ( )} - - {/* Error message overlay */} - {!isLoading && loadError && ( - - - {loadError} - - - )} + + {/* Error message below video - doesn't block controls */} + {!isLoading && loadError && ( + + + {loadError} + + + )} ); diff --git a/frontend/src/components/M3URefreshNotification.jsx b/frontend/src/components/M3URefreshNotification.jsx index e669b444..8a6647cb 100644 --- a/frontend/src/components/M3URefreshNotification.jsx +++ b/frontend/src/components/M3URefreshNotification.jsx @@ -2,13 +2,13 @@ import React, { useEffect, useState } from 'react'; import usePlaylistsStore from '../store/playlists'; import { notifications } from '@mantine/notifications'; -import { IconCheck } from '@tabler/icons-react'; import useStreamsStore from '../store/streams'; import useChannelsStore from '../store/channels'; import useEPGsStore from '../store/epgs'; import { Stack, Button, Group } from '@mantine/core'; import API from '../api'; import { useNavigate } from 'react-router-dom'; +import { CircleCheck } from 'lucide-react'; export default function M3URefreshNotification() { const playlists = usePlaylistsStore((s) => s.playlists); @@ -40,7 +40,7 @@ export default function M3URefreshNotification() { }); // Special handling for pending setup status - if (data.status === "pending_setup") { + if (data.status === 'pending_setup') { fetchChannelGroups(); fetchPlaylists(); @@ -48,7 +48,8 @@ export default function M3URefreshNotification() { title: `M3U Setup: ${playlist.name}`, message: ( - {data.message || "M3U groups loaded. Please select groups or refresh M3U to complete setup."} + {data.message || + 'M3U groups loaded. Please select groups or refresh M3U to complete setup.'} - - - - - - ); -}; - -export default ProxyManager; \ No newline at end of file diff --git a/frontend/src/components/Sidebar.jsx b/frontend/src/components/Sidebar.jsx index 688ce3a6..83bc2fc3 100644 --- a/frontend/src/components/Sidebar.jsx +++ b/frontend/src/components/Sidebar.jsx @@ -10,6 +10,9 @@ import { Copy, ChartLine, Video, + Ellipsis, + LogOut, + User, } from 'lucide-react'; import { Avatar, @@ -21,6 +24,7 @@ import { UnstyledButton, TextInput, ActionIcon, + Menu, } from '@mantine/core'; import logo from '../images/logo.png'; import useChannelsStore from '../store/channels'; @@ -28,6 +32,8 @@ import './sidebar.css'; import useSettingsStore from '../store/settings'; import useAuthStore from '../store/auth'; // Add this import import API from '../api'; +import { USER_LEVELS } from '../constants'; +import UserForm from './forms/User'; const NavLink = ({ item, isActive, collapsed }) => { return ( @@ -63,11 +69,66 @@ const NavLink = ({ item, isActive, collapsed }) => { const Sidebar = ({ collapsed, toggleDrawer, drawerWidth, miniDrawerWidth }) => { const location = useLocation(); + const channels = useChannelsStore((s) => s.channels); const environment = useSettingsStore((s) => s.environment); const isAuthenticated = useAuthStore((s) => s.isAuthenticated); + const authUser = useAuthStore((s) => s.user); + const logout = useAuthStore((s) => s.logout); + const publicIPRef = useRef(null); - const [appVersion, setAppVersion] = useState({ version: '', timestamp: null }); + + const [appVersion, setAppVersion] = useState({ + version: '', + timestamp: null, + }); + const [userFormOpen, setUserFormOpen] = useState(false); + + const closeUserForm = () => setUserFormOpen(false); + + // Navigation Items + const navItems = + authUser && authUser.user_level == USER_LEVELS.ADMIN + ? [ + { + label: 'Channels', + icon: , + path: '/channels', + badge: `(${Object.keys(channels).length})`, + }, + { + label: 'M3U & EPG Manager', + icon: , + path: '/sources', + }, + { label: 'TV Guide', icon: , path: '/guide' }, + { label: 'DVR', icon: @@ -270,6 +319,8 @@ const Sidebar = ({ collapsed, toggleDrawer, drawerWidth, miniDrawerWidth }) => { {appVersion?.timestamp ? `-${appVersion.timestamp}` : ''} )} + + ); }; diff --git a/frontend/src/components/forms/AssignChannelNumbers.jsx b/frontend/src/components/forms/AssignChannelNumbers.jsx new file mode 100644 index 00000000..7465cae7 --- /dev/null +++ b/frontend/src/components/forms/AssignChannelNumbers.jsx @@ -0,0 +1,88 @@ +import React, { useState, useEffect, useRef } from 'react'; +import API from '../../api'; +import { + Button, + Modal, + Text, + Group, + Flex, + useMantineTheme, + NumberInput, +} from '@mantine/core'; +import { ListOrdered } from 'lucide-react'; +import { useForm } from '@mantine/form'; +import { notifications } from '@mantine/notifications'; + +const AssignChannelNumbers = ({ channelIds, isOpen, onClose }) => { + const theme = useMantineTheme(); + + const form = useForm({ + mode: 'uncontrolled', + initialValues: { + starting_number: 1, + }, + }); + + const onSubmit = async () => { + const { starting_number } = form.getValues(); + + try { + const result = await API.assignChannelNumbers( + channelIds, + starting_number + ); + + notifications.show({ + title: result.message || 'Channels assigned', + color: 'green.5', + }); + + API.requeryChannels(); + + onClose(); + } catch (err) { + console.error(err); + notifications.show({ + title: 'Failed to assign channels', + color: 'red.5', + }); + } + }; + + if (!isOpen) { + return <>; + } + + return ( + + + Assign Channel #s + + } + styles={{ hannontent: { '--mantine-color-body': '#27272A' } }} + > +
+ + + + + + +
+ ); +}; + +export default AssignChannelNumbers; diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index ac048712..452db052 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -5,7 +5,6 @@ import useChannelsStore from '../../store/channels'; import API from '../../api'; import useStreamProfilesStore from '../../store/streamProfiles'; import useStreamsStore from '../../store/streams'; -import { MantineReactTable, useMantineReactTable } from 'mantine-react-table'; import ChannelGroupForm from './ChannelGroup'; import usePlaylistsStore from '../../store/playlists'; import logo from '../../images/logo.png'; @@ -36,6 +35,7 @@ import { ListOrdered, SquarePlus, SquareX, X } from 'lucide-react'; import useEPGsStore from '../../store/epgs'; import { Dropzone } from '@mantine/dropzone'; import { FixedSizeList as List } from 'react-window'; +import { USER_LEVELS, USER_LEVEL_LABELS } from '../../constants'; const ChannelForm = ({ channel = null, isOpen, onClose }) => { const theme = useMantineTheme(); @@ -94,13 +94,17 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { const formik = useFormik({ initialValues: { name: '', - channel_number: '', // Change from 0 to empty string for consistency - channel_group_id: Object.keys(channelGroups).length > 0 ? Object.keys(channelGroups)[0] : '', + channel_number: '', // Change from 0 to empty string for consistency + channel_group_id: + Object.keys(channelGroups).length > 0 + ? Object.keys(channelGroups)[0] + : '', stream_profile_id: '0', tvg_id: '', tvc_guide_stationid: '', epg_data_id: '', logo_id: '', + user_level: '0', }, validationSchema: Yup.object({ name: Yup.string().required('Name is required'), @@ -124,7 +128,8 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { formattedValues.tvg_id = formattedValues.tvg_id || null; // Ensure tvc_guide_stationid is properly included (no empty strings) - formattedValues.tvc_guide_stationid = formattedValues.tvc_guide_stationid || null; + formattedValues.tvc_guide_stationid = + formattedValues.tvc_guide_stationid || null; if (channel) { // If there's an EPG to set, use our enhanced endpoint @@ -183,7 +188,8 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { formik.setValues({ name: channel.name || '', - channel_number: channel.channel_number !== null ? channel.channel_number : '', + channel_number: + channel.channel_number !== null ? channel.channel_number : '', channel_group_id: channel.channel_group_id ? `${channel.channel_group_id}` : '', @@ -194,6 +200,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { tvc_guide_stationid: channel.tvc_guide_stationid || '', epg_data_id: channel.epg_data_id ?? '', logo_id: channel.logo_id ? `${channel.logo_id}` : '', + user_level: `${channel.user_level}`, }); setChannelStreams(channel.streams || []); @@ -216,134 +223,6 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { ); }; - // const activeStreamsTable = useMantineReactTable({ - // data: channelStreams, - // columns: useMemo( - // () => [ - // { - // header: 'Name', - // accessorKey: 'name', - // Cell: ({ cell }) => ( - //
- // {cell.getValue()} - //
- // ), - // }, - // { - // header: 'M3U', - // accessorKey: 'group_name', - // Cell: ({ cell }) => ( - //
- // {cell.getValue()} - //
- // ), - // }, - // ], - // [] - // ), - // enableSorting: false, - // enableBottomToolbar: false, - // enableTopToolbar: false, - // columnFilterDisplayMode: 'popover', - // enablePagination: false, - // enableRowVirtualization: true, - // enableRowOrdering: true, - // rowVirtualizerOptions: { overscan: 5 }, //optionally customize the row virtualizer - // initialState: { - // density: 'compact', - // }, - // enableRowActions: true, - // positionActionsColumn: 'last', - // renderRowActions: ({ row }) => ( - // <> - // removeStream(row.original)} - // > - // {/* Small icon size */} - // - // - // ), - // mantineTableContainerProps: { - // style: { - // height: '200px', - // }, - // }, - // mantineRowDragHandleProps: ({ table }) => ({ - // onDragEnd: () => { - // const { draggingRow, hoveredRow } = table.getState(); - - // if (hoveredRow && draggingRow) { - // channelStreams.splice( - // hoveredRow.index, - // 0, - // channelStreams.splice(draggingRow.index, 1)[0] - // ); - - // setChannelStreams([...channelStreams]); - // } - // }, - // }), - // }); - - // const availableStreamsTable = useMantineReactTable({ - // data: streams, - // columns: useMemo( - // () => [ - // { - // header: 'Name', - // accessorKey: 'name', - // }, - // { - // header: 'M3U', - // accessorFn: (row) => - // playlists.find((playlist) => playlist.id === row.m3u_account)?.name, - // }, - // ], - // [] - // ), - // enableBottomToolbar: false, - // enableTopToolbar: false, - // columnFilterDisplayMode: 'popover', - // enablePagination: false, - // enableRowVirtualization: true, - // rowVirtualizerOptions: { overscan: 5 }, //optionally customize the row virtualizer - // initialState: { - // density: 'compact', - // }, - // enableRowActions: true, - // renderRowActions: ({ row }) => ( - // <> - // addStream(row.original)} - // > - // {/* Small icon size */} - // - // - // ), - // positionActionsColumn: 'last', - // mantineTableContainerProps: { - // style: { - // height: '200px', - // }, - // }, - // }); - // Update the handler for when channel group modal is closed const handleChannelGroupModalClose = (newGroup) => { setChannelGroupModalOpen(false); @@ -353,7 +232,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { // Preserve all current form values while updating just the channel_group_id formik.setValues({ ...formik.values, - channel_group_id: `${newGroup.id}` + channel_group_id: `${newGroup.id}`, }); } }; @@ -542,6 +421,23 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { )} size="xs" /> + + ({ + value: `${option.id}`, + label: option.name, + })) + )} + size="xs" + /> + + { + formik.setFieldValue('channel_group_id', value); // Update Formik's state with the new value + }} + error={ + formik.errors.channel_group_id + ? formik.touched.channel_group_id + : '' + } + data={Object.values(channelGroups).map((option, index) => ({ + value: `${option.id}`, + label: option.name, + }))} + size="xs" + style={{ flex: 1 }} + /> */} + + setChannelGroupModalOpen(true)} + title="Create new group" + size="small" + variant="transparent" + style={{ marginBottom: 5 }} + > + + + + + + ({ + value: `${epg.id}`, + label: epg.name, + }))} + size="xs" + mb="xs" + /> + + {/* Filter Input */} + + setTvgFilter(event.currentTarget.value) + } + mb="xs" + size="xs" + /> + + + + + {({ index, style }) => ( +
+ +
+ )} +
+
+ + +
+ + + + + + + + ); +}; + +export default ChannelsForm; diff --git a/frontend/src/components/forms/EPG.jsx b/frontend/src/components/forms/EPG.jsx index 0c7f78c0..603d0c81 100644 --- a/frontend/src/components/forms/EPG.jsx +++ b/frontend/src/components/forms/EPG.jsx @@ -22,7 +22,6 @@ import { Box, } from '@mantine/core'; import { isNotEmpty, useForm } from '@mantine/form'; -import { IconUpload } from '@tabler/icons-react'; const EPG = ({ epg = null, isOpen, onClose }) => { const epgs = useEPGsStore((state) => state.epgs); @@ -123,7 +122,9 @@ const EPG = ({ epg = null, isOpen, onClose }) => { value: 'schedules_direct', }, ]} - onChange={(event) => handleSourceTypeChange(event.currentTarget.value)} + onChange={(event) => + handleSourceTypeChange(event.currentTarget.value) + } /> { {/* Put checkbox at the same level as Refresh Interval */} - Status - When enabled, this EPG source will auto update. - + + Status + + + When enabled, this EPG source will auto update. + + { - - + diff --git a/frontend/src/components/forms/LoginForm.jsx b/frontend/src/components/forms/LoginForm.jsx index 615ab725..916a2c30 100644 --- a/frontend/src/components/forms/LoginForm.jsx +++ b/frontend/src/components/forms/LoginForm.jsx @@ -5,17 +5,18 @@ import { Paper, Title, TextInput, Button, Center, Stack } from '@mantine/core'; const LoginForm = () => { const login = useAuthStore((s) => s.login); + const logout = useAuthStore((s) => s.logout); const isAuthenticated = useAuthStore((s) => s.isAuthenticated); const initData = useAuthStore((s) => s.initData); const navigate = useNavigate(); // Hook to navigate to other routes const [formData, setFormData] = useState({ username: '', password: '' }); - useEffect(() => { - if (isAuthenticated) { - navigate('/channels'); - } - }, [isAuthenticated, navigate]); + // useEffect(() => { + // if (isAuthenticated) { + // navigate('/channels'); + // } + // }, [isAuthenticated, navigate]); const handleInputChange = (e) => { setFormData({ @@ -27,8 +28,13 @@ const LoginForm = () => { const handleSubmit = async (e) => { e.preventDefault(); await login(formData); - initData(); - navigate('/channels'); // Or any other route you'd like + + try { + await initData(); + navigate('/channels'); + } catch (e) { + console.log(`Failed to login: ${e}`); + } }; return ( @@ -60,7 +66,7 @@ const LoginForm = () => { name="password" value={formData.password} onChange={handleInputChange} - required + // required /> - - - - + ({ + label: r.label, + value: `${r.value}`, + }))} + /> - - User-Agents - - - - + + + Auto-Import Mapped Files + + + - - Stream Profiles - - - - + + + + + + + + , + + + User-Agents + + + + , + + + Stream Profiles + + + + , + + + + Network Access + {accordianValue == 'network-access' && ( + + Comma-Delimited CIDR ranges + + )} + + +
+ + {networkAccessSaved && ( + + )} + {networkAccessError && ( + + )} + {Object.entries(NETWORK_ACCESS_OPTIONS).map( + ([key, config]) => { + return ( + + ); + } + )} + + + + + +
+
+
, + ] + : [] + )}
+ + setNetworkAccessConfirmOpen(false)} + onConfirm={saveNetworkAccess} + title={`Confirm Network Access Blocks`} + message={ + <> + + Your client is not included in the allowed networks for the web + UI. Are you sure you want to proceed? + + +
    + {netNetworkAccessConfirmCIDRs.map((cidr) => ( +
  • {cidr}
  • + ))} +
+ + } + confirmLabel="Save" + cancelLabel="Cancel" + size="md" + /> ); }; diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index f60e14d0..ba9829fe 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -16,7 +16,6 @@ import { Select, Badge, } from '@mantine/core'; -import { MantineReactTable, useMantineReactTable } from 'mantine-react-table'; import { TableHelper } from '../helpers'; import API from '../api'; import useChannelsStore from '../store/channels'; @@ -38,6 +37,8 @@ import useStreamProfilesStore from '../store/streamProfiles'; import usePlaylistsStore from '../store/playlists'; // Add this import import { useLocation } from 'react-router-dom'; import { notifications } from '@mantine/notifications'; +import { CustomTable, useTable } from '../components/tables/CustomTable'; +import useLocalStorage from '../hooks/useLocalStorage'; dayjs.extend(duration); dayjs.extend(relativeTime); @@ -79,21 +80,30 @@ const getStartDate = (uptime) => { }; // Create a separate component for each channel card to properly handle the hook -const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channelsByUUID }) => { +const ChannelCard = ({ + channel, + clients, + stopClient, + stopChannel, + logos, + channelsByUUID, +}) => { const location = useLocation(); const [availableStreams, setAvailableStreams] = useState([]); const [isLoadingStreams, setIsLoadingStreams] = useState(false); const [activeStreamId, setActiveStreamId] = useState(null); - const [currentM3UProfile, setCurrentM3UProfile] = useState(null); // Add state for current M3U profile + const [currentM3UProfile, setCurrentM3UProfile] = useState(null); // Add state for current M3U profile + const [data, setData] = useState([]); // Get M3U account data from the playlists store const m3uAccounts = usePlaylistsStore((s) => s.playlists); + const [tableSize] = useLocalStorage('table-size', 'default'); // Create a map of M3U account IDs to names for quick lookup const m3uAccountsMap = useMemo(() => { const map = {}; if (m3uAccounts && Array.isArray(m3uAccounts)) { - m3uAccounts.forEach(account => { + m3uAccounts.forEach((account) => { if (account.id) { map[account.id] = account.name; } @@ -102,17 +112,15 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel return map; }, [m3uAccounts]); - // Safety check - if channel doesn't have required data, don't render - if (!channel || !channel.channel_id) { - return null; - } - // Update M3U profile information when channel data changes useEffect(() => { // If the channel data includes M3U profile information, update our state if (channel.m3u_profile || channel.m3u_profile_name) { setCurrentM3UProfile({ - name: channel.m3u_profile?.name || channel.m3u_profile_name || 'Default M3U' + name: + channel.m3u_profile?.name || + channel.m3u_profile_name || + 'Default M3U', }); } }, [channel.m3u_profile, channel.m3u_profile_name, channel.stream_id]); @@ -133,8 +141,10 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel // If we have a channel URL, try to find the matching stream if (channel.url && streamData.length > 0) { // Try to find matching stream based on URL - const matchingStream = streamData.find(stream => - channel.url.includes(stream.url) || stream.url.includes(channel.url) + const matchingStream = streamData.find( + (stream) => + channel.url.includes(stream.url) || + stream.url.includes(channel.url) ); if (matchingStream) { @@ -148,7 +158,7 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel } } } catch (error) { - console.error("Error fetching streams:", error); + console.error('Error fetching streams:', error); } finally { setIsLoadingStreams(false); } @@ -157,17 +167,70 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel fetchStreams(); }, [channel.channel_id, channel.url, channelsByUUID]); + useEffect(() => { + setData( + clients + .filter((client) => client.channel.channel_id === channel.channel_id) + .map((client) => ({ + id: client.client_id, + ...client, + })) + ); + }, [clients]); + + const renderHeaderCell = (header) => { + switch (header.id) { + default: + return ( + + + {header.column.columnDef.header} + + + ); + } + }; + + const renderBodyCell = ({ cell, row }) => { + switch (cell.column.id) { + case 'actions': + return ( + +
+ + + stopClient( + row.original.channel.uuid, + row.original.client_id + ) + } + > + + + +
+
+ ); + } + }; + // Handle stream switching const handleStreamChange = async (streamId) => { try { - console.log("Switching to stream ID:", streamId); + console.log('Switching to stream ID:', streamId); // Find the selected stream in availableStreams for debugging - const selectedStream = availableStreams.find(s => s.id.toString() === streamId); - console.log("Selected stream details:", selectedStream); + const selectedStream = availableStreams.find( + (s) => s.id.toString() === streamId + ); + console.log('Selected stream details:', selectedStream); // Make sure we're passing the correct ID to the API const response = await API.switchStream(channel.channel_id, streamId); - console.log("Stream switch API response:", response); + console.log('Stream switch API response:', response); // Update the local active stream ID immediately setActiveStreamId(streamId); @@ -193,21 +256,22 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel const channelId = channelsByUUID[channel.channel_id]; if (channelId) { const updatedStreamData = await API.getChannelStreams(channelId); - console.log("Channel streams after switch:", updatedStreamData); + console.log('Channel streams after switch:', updatedStreamData); // Update current stream information with fresh data - const updatedStream = updatedStreamData.find(s => s.id.toString() === streamId); + const updatedStream = updatedStreamData.find( + (s) => s.id.toString() === streamId + ); if (updatedStream && updatedStream.m3u_profile) { setCurrentM3UProfile(updatedStream.m3u_profile); } } } catch (error) { - console.error("Error checking streams after switch:", error); + console.error('Error checking streams after switch:', error); } }, 2000); - } catch (error) { - console.error("Stream switch error:", error); + console.error('Stream switch error:', error); notifications.show({ title: 'Error switching stream', message: error.toString(), @@ -215,23 +279,31 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel }); } }; + console.log(data); const clientsColumns = useMemo( () => [ + { + id: 'expand', + size: 20, + }, { header: 'IP Address', accessorKey: 'ip_address', - size: 50, }, // Updated Connected column with tooltip { + id: 'connected', header: 'Connected', accessorFn: (row) => { // Check for connected_since (which is seconds since connection) if (row.connected_since) { // Calculate the actual connection time by subtracting the seconds from current time const currentTime = dayjs(); - const connectedTime = currentTime.subtract(row.connected_since, 'second'); + const connectedTime = currentTime.subtract( + row.connected_since, + 'second' + ); return connectedTime.format('MM/DD HH:mm:ss'); } @@ -243,15 +315,21 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel return 'Unknown'; }, - Cell: ({ cell }) => ( - + cell: ({ cell }) => ( + {cell.getValue()} ), - size: 50, }, // Update Duration column with tooltip showing exact seconds { + id: 'duration', header: 'Duration', accessorFn: (row) => { if (row.connected_since) { @@ -259,72 +337,74 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel } if (row.connection_duration) { - return dayjs.duration(row.connection_duration, 'seconds').humanize(); + return dayjs + .duration(row.connection_duration, 'seconds') + .humanize(); } return '-'; }, - Cell: ({ cell, row }) => { - const exactDuration = row.original.connected_since || row.original.connection_duration; + cell: ({ cell, row }) => { + const exactDuration = + row.original.connected_since || row.original.connection_duration; return ( - + {cell.getValue()} ); }, - size: 50, - } + }, + { + id: 'actions', + header: 'Actions', + size: tableSize == 'compact' ? 75 : 100, + }, ], [] ); // This hook is now at the top level of this component - const channelClientsTable = useMantineReactTable({ + const channelClientsTable = useTable({ ...TableHelper.defaultProperties, columns: clientsColumns, - data: clients.filter( - (client) => client.channel.channel_id === channel.channel_id - ), - enablePagination: false, - enableTopToolbar: false, - enableBottomToolbar: false, - enableRowSelection: false, - enableColumnFilters: false, - mantineTableBodyCellProps: { - style: { - padding: 4, - borderColor: '#444', - color: '#E0E0E0', - fontSize: '0.85rem', - }, + data, + allRowIds: data.map((client) => client.id), + tableCellProps: () => ({ + padding: 4, + borderColor: '#444', + color: '#E0E0E0', + fontSize: '0.85rem', + }), + headerCellRenderFns: { + ip_address: renderHeaderCell, + connected: renderHeaderCell, + duration: renderHeaderCell, + actions: renderHeaderCell, + }, + bodyCellRenderFns: { + actions: renderBodyCell, + }, + getExpandedRowHeight: (row) => { + return 20 + 28 * row.original.streams.length; + }, + expandedRowRenderer: ({ row }) => { + return ( + + + + User Agent: + + {row.original.user_agent || 'Unknown'} + + + ); }, - enableRowActions: true, - renderRowActions: ({ row }) => ( - -
- - - stopClient(row.original.channel.uuid, row.original.client_id) - } - > - - - -
-
- ), - renderDetailPanel: ({ row }) => ( - - - User Agent: - {row.original.user_agent || "Unknown"} - - - ), mantineExpandButtonProps: ({ row, table }) => ({ size: 'xs', style: { @@ -343,13 +423,11 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel }, }); - if (location.pathname != '/stats') { - return <>; - } - // Get logo URL from the logos object if available - const logoUrl = channel.logo_id && logos && logos[channel.logo_id] ? - logos[channel.logo_id].cache_url : null; + const logoUrl = + channel.logo_id && logos && logos[channel.logo_id] + ? logos[channel.logo_id].cache_url + : null; // Ensure these values exist to prevent errors const channelName = channel.name || 'Unnamed Channel'; @@ -361,19 +439,21 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel const streamProfileName = channel.stream_profile?.name || 'Unknown Profile'; // Use currentM3UProfile if available, otherwise fall back to channel data - const m3uProfileName = currentM3UProfile?.name || + const m3uProfileName = + currentM3UProfile?.name || channel.m3u_profile?.name || channel.m3u_profile_name || 'Unknown M3U Profile'; // Create select options for available streams - const streamOptions = availableStreams.map(stream => { + const streamOptions = availableStreams.map((stream) => { // Get account name from our mapping if it exists - const accountName = stream.m3u_account && m3uAccountsMap[stream.m3u_account] - ? m3uAccountsMap[stream.m3u_account] - : stream.m3u_account - ? `M3U #${stream.m3u_account}` - : 'Unknown M3U'; + const accountName = + stream.m3u_account && m3uAccountsMap[stream.m3u_account] + ? m3uAccountsMap[stream.m3u_account] + : stream.m3u_account + ? `M3U #${stream.m3u_account}` + : 'Unknown M3U'; return { value: stream.id.toString(), @@ -381,6 +461,15 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel }; }); + if (location.pathname != '/stats') { + return <>; + } + + // Safety check - if channel doesn't have required data, don't render + if (!channel || !channel.channel_id) { + return null; + } + return ( - + channel logo @@ -465,7 +556,9 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel