Merge branch 'dev' of https://github.com/Dispatcharr/Dispatcharr into proxy-settings

This commit is contained in:
SergeantPanda 2025-06-12 10:45:44 -05:00
commit ada1d51aaa
79 changed files with 6726 additions and 3193 deletions

View file

@ -11,6 +11,10 @@
**/.toolstarget
**/.vs
**/.vscode
**/.history
**/media
**/models
**/static
**/*.*proj.user
**/*.dbmdl
**/*.jfm
@ -26,3 +30,4 @@
**/values.dev.yaml
LICENSE
README.md
data/

View file

@ -1,41 +1,39 @@
from django.urls import path, include
from rest_framework.routers import DefaultRouter
from .api_views import (
AuthViewSet, UserViewSet, GroupViewSet,
list_permissions, initialize_superuser
AuthViewSet,
UserViewSet,
GroupViewSet,
TokenObtainPairView,
TokenRefreshView,
list_permissions,
initialize_superuser,
)
from rest_framework_simplejwt import views as jwt_views
app_name = 'accounts'
app_name = "accounts"
# 🔹 Register ViewSets with a Router
router = DefaultRouter()
router.register(r'users', UserViewSet, basename='user')
router.register(r'groups', GroupViewSet, basename='group')
router.register(r"users", UserViewSet, basename="user")
router.register(r"groups", GroupViewSet, basename="group")
# 🔹 Custom Authentication Endpoints
auth_view = AuthViewSet.as_view({
'post': 'login'
})
auth_view = AuthViewSet.as_view({"post": "login"})
logout_view = AuthViewSet.as_view({
'post': 'logout'
})
logout_view = AuthViewSet.as_view({"post": "logout"})
# 🔹 Define API URL patterns
urlpatterns = [
# Authentication
path('auth/login/', auth_view, name='user-login'),
path('auth/logout/', logout_view, name='user-logout'),
path("auth/login/", auth_view, name="user-login"),
path("auth/logout/", logout_view, name="user-logout"),
# Superuser API
path('initialize-superuser/', initialize_superuser, name='initialize_superuser'),
path("initialize-superuser/", initialize_superuser, name="initialize_superuser"),
# Permissions API
path('permissions/', list_permissions, name='list-permissions'),
path('token/', jwt_views.TokenObtainPairView.as_view(), name='token_obtain_pair'),
path('token/refresh/', jwt_views.TokenRefreshView.as_view(), name='token_refresh'),
path("permissions/", list_permissions, name="list-permissions"),
path("token/", TokenObtainPairView.as_view(), name="token_obtain_pair"),
path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"),
]
# 🔹 Include ViewSet routes

View file

@ -2,16 +2,37 @@ from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.models import Group, Permission
from django.http import JsonResponse, HttpResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework.decorators import api_view, permission_classes
from rest_framework.permissions import IsAuthenticated, AllowAny
from rest_framework.decorators import api_view, permission_classes, action
from rest_framework.response import Response
from rest_framework import viewsets
from rest_framework import viewsets, status
from drf_yasg.utils import swagger_auto_schema
from drf_yasg import openapi
import json
from .permissions import IsAdmin, Authenticated
from dispatcharr.utils import network_access_allowed
from .models import User
from .serializers import UserSerializer, GroupSerializer, PermissionSerializer
from rest_framework_simplejwt.views import TokenObtainPairView, TokenRefreshView
class TokenObtainPairView(TokenObtainPairView):
def post(self, request, *args, **kwargs):
# Custom logic here
if not network_access_allowed(request, "UI"):
return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN)
return super().post(request, *args, **kwargs)
class TokenRefreshView(TokenRefreshView):
def post(self, request, *args, **kwargs):
# Custom logic here
if not network_access_allowed(request, "UI"):
return Response({"error": "Unauthorized"}, status=status.HTTP_403_FORBIDDEN)
return super().post(request, *args, **kwargs)
@csrf_exempt # In production, consider CSRF protection strategies or ensure this endpoint is only accessible when no superuser exists.
def initialize_superuser(request):
@ -26,15 +47,20 @@ def initialize_superuser(request):
password = data.get("password")
email = data.get("email", "")
if not username or not password:
return JsonResponse({"error": "Username and password are required."}, status=400)
return JsonResponse(
{"error": "Username and password are required."}, status=400
)
# Create the superuser
User.objects.create_superuser(username=username, password=password, email=email)
User.objects.create_superuser(
username=username, password=password, email=email, user_level=10
)
return JsonResponse({"superuser_exists": True})
except Exception as e:
return JsonResponse({"error": str(e)}, status=500)
# For GET requests, indicate no superuser exists
return JsonResponse({"superuser_exists": False})
# 🔹 1) Authentication APIs
class AuthViewSet(viewsets.ViewSet):
"""Handles user login and logout"""
@ -43,36 +69,40 @@ class AuthViewSet(viewsets.ViewSet):
operation_description="Authenticate and log in a user",
request_body=openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['username', 'password'],
required=["username", "password"],
properties={
'username': openapi.Schema(type=openapi.TYPE_STRING),
'password': openapi.Schema(type=openapi.TYPE_STRING, format=openapi.FORMAT_PASSWORD)
"username": openapi.Schema(type=openapi.TYPE_STRING),
"password": openapi.Schema(
type=openapi.TYPE_STRING, format=openapi.FORMAT_PASSWORD
),
},
),
responses={200: "Login successful", 400: "Invalid credentials"},
)
def login(self, request):
"""Logs in a user and returns user details"""
username = request.data.get('username')
password = request.data.get('password')
username = request.data.get("username")
password = request.data.get("password")
user = authenticate(request, username=username, password=password)
if user:
login(request, user)
return Response({
"message": "Login successful",
"user": {
"id": user.id,
"username": user.username,
"email": user.email,
"groups": list(user.groups.values_list('name', flat=True))
return Response(
{
"message": "Login successful",
"user": {
"id": user.id,
"username": user.username,
"email": user.email,
"groups": list(user.groups.values_list("name", flat=True)),
},
}
})
)
return Response({"error": "Invalid credentials"}, status=400)
@swagger_auto_schema(
operation_description="Log out the current user",
responses={200: "Logout successful"}
responses={200: "Logout successful"},
)
def logout(self, request):
"""Logs out the authenticated user"""
@ -83,13 +113,19 @@ class AuthViewSet(viewsets.ViewSet):
# 🔹 2) User Management APIs
class UserViewSet(viewsets.ModelViewSet):
"""Handles CRUD operations for Users"""
queryset = User.objects.all()
serializer_class = UserSerializer
permission_classes = [IsAuthenticated]
def get_permissions(self):
if self.action == "me":
return [Authenticated()]
return [IsAdmin()]
@swagger_auto_schema(
operation_description="Retrieve a list of users",
responses={200: UserSerializer(many=True)}
responses={200: UserSerializer(many=True)},
)
def list(self, request, *args, **kwargs):
return super().list(request, *args, **kwargs)
@ -110,17 +146,28 @@ class UserViewSet(viewsets.ModelViewSet):
def destroy(self, request, *args, **kwargs):
return super().destroy(request, *args, **kwargs)
@swagger_auto_schema(
method="get",
operation_description="Get active user information",
)
@action(detail=False, methods=["get"], url_path="me")
def me(self, request):
user = request.user
serializer = UserSerializer(user)
return Response(serializer.data)
# 🔹 3) Group Management APIs
class GroupViewSet(viewsets.ModelViewSet):
"""Handles CRUD operations for Groups"""
queryset = Group.objects.all()
serializer_class = GroupSerializer
permission_classes = [IsAuthenticated]
permission_classes = [Authenticated]
@swagger_auto_schema(
operation_description="Retrieve a list of groups",
responses={200: GroupSerializer(many=True)}
responses={200: GroupSerializer(many=True)},
)
def list(self, request, *args, **kwargs):
return super().list(request, *args, **kwargs)
@ -144,12 +191,12 @@ class GroupViewSet(viewsets.ModelViewSet):
# 🔹 4) Permissions List API
@swagger_auto_schema(
method='get',
method="get",
operation_description="Retrieve a list of all permissions",
responses={200: PermissionSerializer(many=True)}
responses={200: PermissionSerializer(many=True)},
)
@api_view(['GET'])
@permission_classes([IsAuthenticated])
@api_view(["GET"])
@permission_classes([Authenticated])
def list_permissions(request):
"""Returns a list of all available permissions"""
permissions = Permission.objects.all()

View file

@ -1,6 +1,7 @@
from django.apps import AppConfig
class AccountsConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'apps.accounts'
default_auto_field = "django.db.models.BigAutoField"
name = "apps.accounts"
verbose_name = "Accounts & Authentication"

View file

@ -0,0 +1,43 @@
# Generated by Django 5.1.6 on 2025-05-18 15:47
from django.db import migrations, models
def set_user_level_to_10(apps, schema_editor):
User = apps.get_model("accounts", "User")
User.objects.update(user_level=10)
class Migration(migrations.Migration):
dependencies = [
("accounts", "0001_initial"),
("dispatcharr_channels", "0021_channel_user_level"),
]
operations = [
migrations.RemoveField(
model_name="user",
name="channel_groups",
),
migrations.AddField(
model_name="user",
name="channel_profiles",
field=models.ManyToManyField(
blank=True,
related_name="users",
to="dispatcharr_channels.channelprofile",
),
),
migrations.AddField(
model_name="user",
name="user_level",
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name="user",
name="custom_properties",
field=models.TextField(blank=True, null=True),
),
migrations.RunPython(set_user_level_to_10),
]

View file

@ -2,17 +2,26 @@
from django.db import models
from django.contrib.auth.models import AbstractUser, Permission
class User(AbstractUser):
"""
Custom user model for Dispatcharr.
Inherits from Django's AbstractUser to add additional fields if needed.
"""
class UserLevel(models.IntegerChoices):
STREAMER = 0, "Streamer"
STANDARD = 1, "Standard User"
ADMIN = 10, "Admin"
avatar_config = models.JSONField(default=dict, blank=True, null=True)
channel_groups = models.ManyToManyField(
'dispatcharr_channels.ChannelGroup', # Updated reference to renamed model
channel_profiles = models.ManyToManyField(
"dispatcharr_channels.ChannelProfile",
blank=True,
related_name="users"
related_name="users",
)
user_level = models.IntegerField(default=UserLevel.STREAMER)
custom_properties = models.TextField(null=True, blank=True)
def __str__(self):
return self.username

View file

@ -0,0 +1,56 @@
from rest_framework.permissions import IsAuthenticated
from .models import User
from dispatcharr.utils import network_access_allowed
class Authenticated(IsAuthenticated):
def has_permission(self, request, view):
is_authenticated = super().has_permission(request, view)
network_allowed = network_access_allowed(request, "UI")
return is_authenticated and network_allowed
class IsStandardUser(Authenticated):
def has_permission(self, request, view):
if not super().has_permission(request, view):
return False
return request.user and request.user.user_level >= User.UserLevel.STANDARD
class IsAdmin(Authenticated):
def has_permission(self, request, view):
if not super().has_permission(request, view):
return False
return request.user.user_level >= 10
class IsOwnerOfObject(Authenticated):
def has_object_permission(self, request, view, obj):
if not super().has_permission(request, view):
return False
is_admin = IsAdmin().has_permission(request, view)
is_owner = request.user in obj.users.all()
return is_admin or is_owner
permission_classes_by_action = {
"list": [IsStandardUser],
"create": [IsAdmin],
"retrieve": [IsStandardUser],
"update": [IsAdmin],
"partial_update": [IsAdmin],
"destroy": [IsAdmin],
}
permission_classes_by_method = {
"GET": [IsStandardUser],
"POST": [IsAdmin],
"PATCH": [IsAdmin],
"PUT": [IsAdmin],
"DELETE": [IsAdmin],
}

View file

@ -1,13 +1,14 @@
from rest_framework import serializers
from django.contrib.auth.models import Group, Permission
from .models import User
from apps.channels.models import ChannelProfile
# 🔹 Fix for Permission serialization
class PermissionSerializer(serializers.ModelSerializer):
class Meta:
model = Permission
fields = ['id', 'name', 'codename']
fields = ["id", "name", "codename"]
# 🔹 Fix for Group serialization
@ -18,15 +19,53 @@ class GroupSerializer(serializers.ModelSerializer):
class Meta:
model = Group
fields = ['id', 'name', 'permissions']
fields = ["id", "name", "permissions"]
# 🔹 Fix for User serialization
class UserSerializer(serializers.ModelSerializer):
groups = serializers.SlugRelatedField(
many=True, queryset=Group.objects.all(), slug_field="name"
) # ✅ Fix ManyToMany `_meta` error
password = serializers.CharField(write_only=True)
channel_profiles = serializers.PrimaryKeyRelatedField(
queryset=ChannelProfile.objects.all(), many=True, required=False
)
class Meta:
model = User
fields = ['id', 'username', 'email', 'groups']
fields = [
"id",
"username",
"email",
"user_level",
"password",
"channel_profiles",
"custom_properties",
]
def create(self, validated_data):
channel_profiles = validated_data.pop("channel_profiles", [])
user = User(**validated_data)
user.set_password(validated_data["password"])
user.is_active = True
user.save()
user.channel_profiles.set(channel_profiles)
return user
def update(self, instance, validated_data):
password = validated_data.pop("password", None)
channel_profiles = validated_data.pop("channel_profiles", None)
for attr, value in validated_data.items():
setattr(instance, attr, value)
if password:
instance.set_password(password)
instance.save()
if channel_profiles is not None:
instance.channel_profiles.set(channel_profiles)
return instance

View file

@ -5,6 +5,7 @@ from django.db.models.signals import post_save
from django.dispatch import receiver
from .models import User
@receiver(post_save, sender=User)
def handle_new_user(sender, instance, created, **kwargs):
if created:

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,18 @@
# Generated by Django 5.1.6 on 2025-05-18 14:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dispatcharr_channels', '0020_alter_channel_channel_number'),
]
operations = [
migrations.AddField(
model_name='channel',
name='user_level',
field=models.IntegerField(default=0),
),
]

View file

@ -9,12 +9,14 @@ from datetime import datetime
import hashlib
import json
from apps.epg.models import EPGData
from apps.accounts.models import User
logger = logging.getLogger(__name__)
# If you have an M3UAccount model in apps.m3u, you can still import it:
from apps.m3u.models import M3UAccount
# Add fallback functions if Redis isn't available
def get_total_viewers(channel_id):
"""Get viewer count from Redis or return 0 if Redis isn't available"""
@ -25,6 +27,7 @@ def get_total_viewers(channel_id):
except Exception:
return 0
class ChannelGroup(models.Model):
name = models.TextField(unique=True, db_index=True)
@ -45,10 +48,12 @@ class ChannelGroup(models.Model):
return created_objects
class Stream(models.Model):
"""
Represents a single stream (e.g. from an M3U source or custom URL).
"""
name = models.CharField(max_length=255, default="Default Stream")
url = models.URLField(max_length=2000, blank=True, null=True)
m3u_account = models.ForeignKey(
@ -60,7 +65,7 @@ class Stream(models.Model):
)
logo_url = models.TextField(blank=True, null=True)
tvg_id = models.CharField(max_length=255, blank=True, null=True)
local_file = models.FileField(upload_to='uploads/', blank=True, null=True)
local_file = models.FileField(upload_to="uploads/", blank=True, null=True)
current_viewers = models.PositiveIntegerField(default=0)
updated_at = models.DateTimeField(auto_now=True)
channel_group = models.ForeignKey(
@ -68,18 +73,18 @@ class Stream(models.Model):
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name='streams'
related_name="streams",
)
stream_profile = models.ForeignKey(
StreamProfile,
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='streams'
related_name="streams",
)
is_custom = models.BooleanField(
default=False,
help_text="Whether this is a user-created stream or from an M3U account"
help_text="Whether this is a user-created stream or from an M3U account",
)
stream_hash = models.CharField(
max_length=255,
@ -95,7 +100,7 @@ class Stream(models.Model):
# If you use m3u_account, you might do unique_together = ('name','url','m3u_account')
verbose_name = "Stream"
verbose_name_plural = "Streams"
ordering = ['-updated_at']
ordering = ["-updated_at"]
def __str__(self):
return self.name or self.url or f"Stream ID {self.id}"
@ -105,14 +110,14 @@ class Stream(models.Model):
if keys is None:
keys = CoreSettings.get_m3u_hash_key().split(",")
stream_parts = {
"name": name, "url": url, "tvg_id": tvg_id
}
stream_parts = {"name": name, "url": url, "tvg_id": tvg_id}
hash_parts = {key: stream_parts[key] for key in keys if key in stream_parts}
# Serialize and hash the dictionary
serialized_obj = json.dumps(hash_parts, sort_keys=True) # sort_keys ensures consistent ordering
serialized_obj = json.dumps(
hash_parts, sort_keys=True
) # sort_keys ensures consistent ordering
hash_object = hashlib.sha256(serialized_obj.encode())
return hash_object.hexdigest()
@ -128,13 +133,17 @@ class Stream(models.Model):
return stream, False # False means it was updated, not created
except cls.DoesNotExist:
# If it doesn't exist, create a new object with the given hash
fields_to_update['stream_hash'] = hash_value # Make sure the hash field is set
fields_to_update["stream_hash"] = (
hash_value # Make sure the hash field is set
)
stream = cls.objects.create(**fields_to_update)
return stream, True # True means it was created
# @TODO: honor stream's stream profile
def get_stream_profile(self):
stream_profile = StreamProfile.objects.get(id=CoreSettings.get_default_stream_profile_id())
stream_profile = StreamProfile.objects.get(
id=CoreSettings.get_default_stream_profile_id()
)
return stream_profile
@ -152,7 +161,9 @@ class Stream(models.Model):
m3u_account = self.m3u_account
m3u_profiles = m3u_account.profiles.all()
default_profile = next((obj for obj in m3u_profiles if obj.is_default), None)
profiles = [default_profile] + [obj for obj in m3u_profiles if not obj.is_default]
profiles = [default_profile] + [
obj for obj in m3u_profiles if not obj.is_default
]
for profile in profiles:
logger.info(profile)
@ -167,13 +178,19 @@ class Stream(models.Model):
if profile.max_streams == 0 or current_connections < profile.max_streams:
# Start a new stream
redis_client.set(f"channel_stream:{self.id}", self.id)
redis_client.set(f"stream_profile:{self.id}", profile.id) # Store only the matched profile
redis_client.set(
f"stream_profile:{self.id}", profile.id
) # Store only the matched profile
# Increment connection count for profiles with limits
if profile.max_streams > 0:
redis_client.incr(profile_connections_key)
return self.id, profile.id, None # Return newly assigned stream and matched profile
return (
self.id,
profile.id,
None,
) # Return newly assigned stream and matched profile
# 4. No available streams
return None, None, None
@ -194,7 +211,9 @@ class Stream(models.Model):
redis_client.delete(f"stream_profile:{stream_id}") # Remove profile association
profile_id = int(profile_id)
logger.debug(f"Found profile ID {profile_id} associated with stream {stream_id}")
logger.debug(
f"Found profile ID {profile_id} associated with stream {stream_id}"
)
profile_connections_key = f"profile_connections:{profile_id}"
@ -203,6 +222,7 @@ class Stream(models.Model):
if current_count > 0:
redis_client.decr(profile_connections_key)
class ChannelManager(models.Manager):
def active(self):
return self.all()
@ -212,38 +232,35 @@ class Channel(models.Model):
channel_number = models.FloatField(db_index=True)
name = models.CharField(max_length=255)
logo = models.ForeignKey(
'Logo',
"Logo",
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name='channels',
related_name="channels",
)
# M2M to Stream now in the same file
streams = models.ManyToManyField(
Stream,
blank=True,
through='ChannelStream',
related_name='channels'
Stream, blank=True, through="ChannelStream", related_name="channels"
)
channel_group = models.ForeignKey(
'ChannelGroup',
"ChannelGroup",
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name='channels',
help_text="Channel group this channel belongs to."
related_name="channels",
help_text="Channel group this channel belongs to.",
)
tvg_id = models.CharField(max_length=255, blank=True, null=True)
tvc_guide_stationid = models.CharField(max_length=255, blank=True, null=True)
epg_data = models.ForeignKey(
EPGData,
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name='channels'
related_name="channels",
)
stream_profile = models.ForeignKey(
@ -251,16 +268,19 @@ class Channel(models.Model):
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name='channels'
related_name="channels",
)
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True, db_index=True)
uuid = models.UUIDField(
default=uuid.uuid4, editable=False, unique=True, db_index=True
)
user_level = models.IntegerField(default=0)
def clean(self):
# Enforce unique channel_number within a given group
existing = Channel.objects.filter(
channel_number=self.channel_number,
channel_group=self.channel_group
channel_number=self.channel_number, channel_group=self.channel_group
).exclude(id=self.id)
if existing.exists():
raise ValidationError(
@ -272,7 +292,7 @@ class Channel(models.Model):
@classmethod
def get_next_available_channel_number(cls, starting_from=1):
used_numbers = set(cls.objects.all().values_list('channel_number', flat=True))
used_numbers = set(cls.objects.all().values_list("channel_number", flat=True))
n = starting_from
while n in used_numbers:
n += 1
@ -282,7 +302,9 @@ class Channel(models.Model):
def get_stream_profile(self):
stream_profile = self.stream_profile
if not stream_profile:
stream_profile = StreamProfile.objects.get(id=CoreSettings.get_default_stream_profile_id())
stream_profile = StreamProfile.objects.get(
id=CoreSettings.get_default_stream_profile_id()
)
return stream_profile
@ -312,16 +334,20 @@ class Channel(models.Model):
profile_id = int(profile_id_bytes)
return stream_id, profile_id, None
except (ValueError, TypeError):
logger.debug(f"Invalid profile ID retrieved from Redis: {profile_id_bytes}")
logger.debug(
f"Invalid profile ID retrieved from Redis: {profile_id_bytes}"
)
except (ValueError, TypeError):
logger.debug(f"Invalid stream ID retrieved from Redis: {stream_id_bytes}")
logger.debug(
f"Invalid stream ID retrieved from Redis: {stream_id_bytes}"
)
# No existing active stream, attempt to assign a new one
has_streams_but_maxed_out = False
has_active_profiles = False
# Iterate through channel streams and their profiles
for stream in self.streams.all().order_by('channelstream__order'):
for stream in self.streams.all().order_by("channelstream__order"):
# Retrieve the M3U account associated with the stream.
m3u_account = stream.m3u_account
if not m3u_account:
@ -329,13 +355,17 @@ class Channel(models.Model):
continue
m3u_profiles = m3u_account.profiles.all()
default_profile = next((obj for obj in m3u_profiles if obj.is_default), None)
default_profile = next(
(obj for obj in m3u_profiles if obj.is_default), None
)
if not default_profile:
logger.debug(f"M3U account {m3u_account.id} has no default profile")
continue
profiles = [default_profile] + [obj for obj in m3u_profiles if not obj.is_default]
profiles = [default_profile] + [
obj for obj in m3u_profiles if not obj.is_default
]
for profile in profiles:
# Skip inactive profiles
@ -346,10 +376,15 @@ class Channel(models.Model):
has_active_profiles = True
profile_connections_key = f"profile_connections:{profile.id}"
current_connections = int(redis_client.get(profile_connections_key) or 0)
current_connections = int(
redis_client.get(profile_connections_key) or 0
)
# Check if profile has available slots (or unlimited connections)
if profile.max_streams == 0 or current_connections < profile.max_streams:
if (
profile.max_streams == 0
or current_connections < profile.max_streams
):
# Start a new stream
redis_client.set(f"channel_stream:{self.id}", stream.id)
redis_client.set(f"stream_profile:{stream.id}", profile.id)
@ -358,11 +393,17 @@ class Channel(models.Model):
if profile.max_streams > 0:
redis_client.incr(profile_connections_key)
return stream.id, profile.id, None # Return newly assigned stream and matched profile
return (
stream.id,
profile.id,
None,
) # Return newly assigned stream and matched profile
else:
# This profile is at max connections
has_streams_but_maxed_out = True
logger.debug(f"Profile {profile.id} at max connections: {current_connections}/{profile.max_streams}")
logger.debug(
f"Profile {profile.id} at max connections: {current_connections}/{profile.max_streams}"
)
# No available streams - determine specific reason
if has_streams_but_maxed_out:
@ -388,7 +429,9 @@ class Channel(models.Model):
redis_client.delete(f"channel_stream:{self.id}") # Remove active stream
stream_id = int(stream_id)
logger.debug(f"Found stream ID {stream_id} associated with channel stream {self.id}")
logger.debug(
f"Found stream ID {stream_id} associated with channel stream {self.id}"
)
# Get the matched profile for cleanup
profile_id = redis_client.get(f"stream_profile:{stream_id}")
@ -399,7 +442,9 @@ class Channel(models.Model):
redis_client.delete(f"stream_profile:{stream_id}") # Remove profile association
profile_id = int(profile_id)
logger.debug(f"Found profile ID {profile_id} associated with stream {stream_id}")
logger.debug(
f"Found profile ID {profile_id} associated with stream {stream_id}"
)
profile_connections_key = f"profile_connections:{profile_id}"
@ -452,20 +497,26 @@ class Channel(models.Model):
# Increment connection count for new profile
new_profile_connections_key = f"profile_connections:{new_profile_id}"
redis_client.incr(new_profile_connections_key)
logger.info(f"Updated stream {stream_id} profile from {current_profile_id} to {new_profile_id}")
logger.info(
f"Updated stream {stream_id} profile from {current_profile_id} to {new_profile_id}"
)
return True
class ChannelProfile(models.Model):
name = models.CharField(max_length=100, unique=True)
class ChannelProfileMembership(models.Model):
channel_profile = models.ForeignKey(ChannelProfile, on_delete=models.CASCADE)
channel = models.ForeignKey(Channel, on_delete=models.CASCADE)
enabled = models.BooleanField(default=True) # Track if the channel is enabled for this group
enabled = models.BooleanField(
default=True
) # Track if the channel is enabled for this group
class Meta:
unique_together = ('channel_profile', 'channel')
unique_together = ("channel_profile", "channel")
class ChannelStream(models.Model):
channel = models.ForeignKey(Channel, on_delete=models.CASCADE)
@ -473,27 +524,26 @@ class ChannelStream(models.Model):
order = models.PositiveIntegerField(default=0) # Ordering field
class Meta:
ordering = ['order'] # Ensure streams are retrieved in order
ordering = ["order"] # Ensure streams are retrieved in order
constraints = [
models.UniqueConstraint(fields=['channel', 'stream'], name='unique_channel_stream')
models.UniqueConstraint(
fields=["channel", "stream"], name="unique_channel_stream"
)
]
class ChannelGroupM3UAccount(models.Model):
channel_group = models.ForeignKey(
ChannelGroup,
on_delete=models.CASCADE,
related_name='m3u_account'
ChannelGroup, on_delete=models.CASCADE, related_name="m3u_account"
)
m3u_account = models.ForeignKey(
M3UAccount,
on_delete=models.CASCADE,
related_name='channel_group'
M3UAccount, on_delete=models.CASCADE, related_name="channel_group"
)
custom_properties = models.TextField(null=True, blank=True)
enabled = models.BooleanField(default=True)
class Meta:
unique_together = ('channel_group', 'm3u_account')
unique_together = ("channel_group", "m3u_account")
def __str__(self):
return f"{self.channel_group.name} - {self.m3u_account.name} (Enabled: {self.enabled})"
@ -506,8 +556,11 @@ class Logo(models.Model):
def __str__(self):
return self.name
class Recording(models.Model):
channel = models.ForeignKey("Channel", on_delete=models.CASCADE, related_name="recordings")
channel = models.ForeignKey(
"Channel", on_delete=models.CASCADE, related_name="recordings"
)
start_time = models.DateTimeField()
end_time = models.DateTimeField()
task_id = models.CharField(max_length=255, null=True, blank=True)

View file

@ -1,5 +1,15 @@
from rest_framework import serializers
from .models import Stream, Channel, ChannelGroup, ChannelStream, ChannelGroupM3UAccount, Logo, ChannelProfile, ChannelProfileMembership, Recording
from .models import (
Stream,
Channel,
ChannelGroup,
ChannelStream,
ChannelGroupM3UAccount,
Logo,
ChannelProfile,
ChannelProfileMembership,
Recording,
)
from apps.epg.serializers import EPGDataSerializer
from core.models import StreamProfile
from apps.epg.models import EPGData
@ -7,19 +17,23 @@ from django.urls import reverse
from rest_framework import serializers
from django.utils import timezone
class LogoSerializer(serializers.ModelSerializer):
cache_url = serializers.SerializerMethodField()
class Meta:
model = Logo
fields = ['id', 'name', 'url', 'cache_url']
fields = ["id", "name", "url", "cache_url"]
def get_cache_url(self, obj):
# return f"/api/channels/logos/{obj.id}/cache/"
request = self.context.get('request')
request = self.context.get("request")
if request:
return request.build_absolute_uri(reverse('api:channels:logo-cache', args=[obj.id]))
return reverse('api:channels:logo-cache', args=[obj.id])
return request.build_absolute_uri(
reverse("api:channels:logo-cache", args=[obj.id])
)
return reverse("api:channels:logo-cache", args=[obj.id])
#
# Stream
@ -27,43 +41,46 @@ class LogoSerializer(serializers.ModelSerializer):
class StreamSerializer(serializers.ModelSerializer):
stream_profile_id = serializers.PrimaryKeyRelatedField(
queryset=StreamProfile.objects.all(),
source='stream_profile',
source="stream_profile",
allow_null=True,
required=False
required=False,
)
read_only_fields = ['is_custom', 'm3u_account', 'stream_hash']
read_only_fields = ["is_custom", "m3u_account", "stream_hash"]
class Meta:
model = Stream
fields = [
'id',
'name',
'url',
'm3u_account', # Uncomment if using M3U fields
'logo_url',
'tvg_id',
'local_file',
'current_viewers',
'updated_at',
'last_seen',
'stream_profile_id',
'is_custom',
'channel_group',
'stream_hash',
"id",
"name",
"url",
"m3u_account", # Uncomment if using M3U fields
"logo_url",
"tvg_id",
"local_file",
"current_viewers",
"updated_at",
"last_seen",
"stream_profile_id",
"is_custom",
"channel_group",
"stream_hash",
]
def get_fields(self):
fields = super().get_fields()
# Unable to edit specific properties if this stream was created from an M3U account
if self.instance and getattr(self.instance, 'm3u_account', None) and not self.instance.is_custom:
fields['id'].read_only = True
fields['name'].read_only = True
fields['url'].read_only = True
fields['m3u_account'].read_only = True
fields['tvg_id'].read_only = True
fields['channel_group'].read_only = True
if (
self.instance
and getattr(self.instance, "m3u_account", None)
and not self.instance.is_custom
):
fields["id"].read_only = True
fields["name"].read_only = True
fields["url"].read_only = True
fields["m3u_account"].read_only = True
fields["tvg_id"].read_only = True
fields["channel_group"].read_only = True
return fields
@ -74,35 +91,38 @@ class StreamSerializer(serializers.ModelSerializer):
class ChannelGroupSerializer(serializers.ModelSerializer):
class Meta:
model = ChannelGroup
fields = ['id', 'name']
fields = ["id", "name"]
class ChannelProfileSerializer(serializers.ModelSerializer):
channels = serializers.SerializerMethodField()
class Meta:
model = ChannelProfile
fields = ['id', 'name', 'channels']
fields = ["id", "name", "channels"]
def get_channels(self, obj):
memberships = ChannelProfileMembership.objects.filter(channel_profile=obj, enabled=True)
return [
membership.channel.id
for membership in memberships
]
memberships = ChannelProfileMembership.objects.filter(
channel_profile=obj, enabled=True
)
return [membership.channel.id for membership in memberships]
class ChannelProfileMembershipSerializer(serializers.ModelSerializer):
class Meta:
model = ChannelProfileMembership
fields = ['channel', 'enabled']
fields = ["channel", "enabled"]
class ChanneProfilelMembershipUpdateSerializer(serializers.Serializer):
channel_id = serializers.IntegerField() # Ensure channel_id is an integer
enabled = serializers.BooleanField()
class BulkChannelProfileMembershipSerializer(serializers.Serializer):
channels = serializers.ListField(
child=ChanneProfilelMembershipUpdateSerializer(), # Use the nested serializer
allow_empty=False
allow_empty=False,
)
def validate_channels(self, value):
@ -110,6 +130,7 @@ class BulkChannelProfileMembershipSerializer(serializers.Serializer):
raise serializers.ValidationError("At least one channel must be provided.")
return value
#
# Channel
#
@ -119,14 +140,10 @@ class ChannelSerializer(serializers.ModelSerializer):
channel_number = serializers.FloatField(
allow_null=True,
required=False,
error_messages={
'invalid': 'Channel number must be a valid decimal number.'
}
error_messages={"invalid": "Channel number must be a valid decimal number."},
)
channel_group_id = serializers.PrimaryKeyRelatedField(
queryset=ChannelGroup.objects.all(),
source="channel_group",
required=False
queryset=ChannelGroup.objects.all(), source="channel_group", required=False
)
epg_data_id = serializers.PrimaryKeyRelatedField(
queryset=EPGData.objects.all(),
@ -137,16 +154,18 @@ class ChannelSerializer(serializers.ModelSerializer):
stream_profile_id = serializers.PrimaryKeyRelatedField(
queryset=StreamProfile.objects.all(),
source='stream_profile',
source="stream_profile",
allow_null=True,
required=False
required=False,
)
streams = serializers.PrimaryKeyRelatedField(queryset=Stream.objects.all(), many=True, required=False)
streams = serializers.PrimaryKeyRelatedField(
queryset=Stream.objects.all(), many=True, required=False
)
logo_id = serializers.PrimaryKeyRelatedField(
queryset=Logo.objects.all(),
source='logo',
source="logo",
allow_null=True,
required=False,
)
@ -154,24 +173,25 @@ class ChannelSerializer(serializers.ModelSerializer):
class Meta:
model = Channel
fields = [
'id',
'channel_number',
'name',
'channel_group_id',
'tvg_id',
'tvc_guide_stationid',
'epg_data_id',
'streams',
'stream_profile_id',
'uuid',
'logo_id',
"id",
"channel_number",
"name",
"channel_group_id",
"tvg_id",
"tvc_guide_stationid",
"epg_data_id",
"streams",
"stream_profile_id",
"uuid",
"logo_id",
"user_level",
]
def to_representation(self, instance):
include_streams = self.context.get('include_streams', False)
include_streams = self.context.get("include_streams", False)
if include_streams:
self.fields['streams'] = serializers.SerializerMethodField()
self.fields["streams"] = serializers.SerializerMethodField()
return super().to_representation(instance)
@ -180,22 +200,28 @@ class ChannelSerializer(serializers.ModelSerializer):
def get_streams(self, obj):
"""Retrieve ordered stream IDs for GET requests."""
return StreamSerializer(obj.streams.all().order_by('channelstream__order'), many=True).data
return StreamSerializer(
obj.streams.all().order_by("channelstream__order"), many=True
).data
def create(self, validated_data):
streams = validated_data.pop('streams', [])
channel_number = validated_data.pop('channel_number', Channel.get_next_available_channel_number())
streams = validated_data.pop("streams", [])
channel_number = validated_data.pop(
"channel_number", Channel.get_next_available_channel_number()
)
validated_data["channel_number"] = channel_number
channel = Channel.objects.create(**validated_data)
# Add streams in the specified order
for index, stream in enumerate(streams):
ChannelStream.objects.create(channel=channel, stream_id=stream.id, order=index)
ChannelStream.objects.create(
channel=channel, stream_id=stream.id, order=index
)
return channel
def update(self, instance, validated_data):
streams = validated_data.pop('streams', None)
streams = validated_data.pop("streams", None)
# Update standard fields
for attr, value in validated_data.items():
@ -206,8 +232,7 @@ class ChannelSerializer(serializers.ModelSerializer):
if streams is not None:
# Normalize stream IDs
normalized_ids = [
stream.id if hasattr(stream, "id") else stream
for stream in streams
stream.id if hasattr(stream, "id") else stream for stream in streams
]
print(normalized_ids)
@ -234,9 +259,7 @@ class ChannelSerializer(serializers.ModelSerializer):
cs.save(update_fields=["order"])
else:
ChannelStream.objects.create(
channel=instance,
stream_id=stream_id,
order=order
channel=instance, stream_id=stream_id, order=order
)
return instance
@ -250,20 +273,23 @@ class ChannelSerializer(serializers.ModelSerializer):
# Ensure it's processed as a float
return float(value)
except (ValueError, TypeError):
raise serializers.ValidationError("Channel number must be a valid decimal number.")
raise serializers.ValidationError(
"Channel number must be a valid decimal number."
)
def validate_stream_profile(self, value):
"""Handle special case where empty/0 values mean 'use default' (null)"""
if value == '0' or value == 0 or value == '' or value is None:
if value == "0" or value == 0 or value == "" or value is None:
return None
return value # PrimaryKeyRelatedField will handle the conversion to object
class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer):
enabled = serializers.BooleanField()
class Meta:
model = ChannelGroupM3UAccount
fields = ['id', 'channel_group', 'enabled']
fields = ["id", "channel_group", "enabled"]
# Optionally, if you only need the id of the ChannelGroup, you can customize it like this:
# channel_group = serializers.PrimaryKeyRelatedField(queryset=ChannelGroup.objects.all())
@ -272,12 +298,12 @@ class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer):
class RecordingSerializer(serializers.ModelSerializer):
class Meta:
model = Recording
fields = '__all__'
read_only_fields = ['task_id']
fields = "__all__"
read_only_fields = ["task_id"]
def validate(self, data):
start_time = data.get('start_time')
end_time = data.get('end_time')
start_time = data.get("start_time")
end_time = data.get("end_time")
now = timezone.now() # timezone-aware current time
@ -286,8 +312,8 @@ class RecordingSerializer(serializers.ModelSerializer):
if start_time < now:
# Optional: Adjust start_time if it's in the past but end_time is in the future
data['start_time'] = now # or: timezone.now() + timedelta(seconds=1)
if end_time <= data['start_time']:
data["start_time"] = now # or: timezone.now() + timedelta(seconds=1)
if end_time <= data["start_time"]:
raise serializers.ValidationError("End time must be after start time.")
return data

View file

@ -2,18 +2,27 @@ import logging, os
from rest_framework import viewsets, status
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.permissions import IsAuthenticated
from rest_framework.decorators import action
from drf_yasg.utils import swagger_auto_schema
from drf_yasg import openapi
from django.utils import timezone
from datetime import timedelta
from .models import EPGSource, ProgramData, EPGData # Added ProgramData
from .serializers import ProgramDataSerializer, EPGSourceSerializer, EPGDataSerializer # Updated serializer
from .serializers import (
ProgramDataSerializer,
EPGSourceSerializer,
EPGDataSerializer,
) # Updated serializer
from .tasks import refresh_epg_data
from apps.accounts.permissions import (
Authenticated,
permission_classes_by_action,
permission_classes_by_method,
)
logger = logging.getLogger(__name__)
# ─────────────────────────────
# 1) EPG Source API (CRUD)
# ─────────────────────────────
@ -21,30 +30,38 @@ class EPGSourceViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows EPG sources to be viewed or edited.
"""
queryset = EPGSource.objects.all()
serializer_class = EPGSourceSerializer
permission_classes = [IsAuthenticated]
def get_permissions(self):
try:
return [perm() for perm in permission_classes_by_action[self.action]]
except KeyError:
return [Authenticated()]
def list(self, request, *args, **kwargs):
logger.debug("Listing all EPG sources.")
return super().list(request, *args, **kwargs)
@action(detail=False, methods=['post'])
@action(detail=False, methods=["post"])
def upload(self, request):
if 'file' not in request.FILES:
return Response({'error': 'No file uploaded'}, status=status.HTTP_400_BAD_REQUEST)
if "file" not in request.FILES:
return Response(
{"error": "No file uploaded"}, status=status.HTTP_400_BAD_REQUEST
)
file = request.FILES['file']
file = request.FILES["file"]
file_name = file.name
file_path = os.path.join('/data/uploads/epgs', file_name)
file_path = os.path.join("/data/uploads/epgs", file_name)
os.makedirs(os.path.dirname(file_path), exist_ok=True)
with open(file_path, 'wb+') as destination:
with open(file_path, "wb+") as destination:
for chunk in file.chunks():
destination.write(chunk)
new_obj_data = request.data.copy()
new_obj_data['file_path'] = file_path
new_obj_data["file_path"] = file_path
serializer = self.get_serializer(data=new_obj_data)
serializer.is_valid(raise_exception=True)
@ -57,55 +74,78 @@ class EPGSourceViewSet(viewsets.ModelViewSet):
instance = self.get_object()
# Check if we're toggling is_active
if 'is_active' in request.data and instance.is_active != request.data['is_active']:
if (
"is_active" in request.data
and instance.is_active != request.data["is_active"]
):
# Set appropriate status based on new is_active value
if request.data['is_active']:
request.data['status'] = 'idle'
if request.data["is_active"]:
request.data["status"] = "idle"
else:
request.data['status'] = 'disabled'
request.data["status"] = "disabled"
# Continue with regular partial update
return super().partial_update(request, *args, **kwargs)
# ─────────────────────────────
# 2) Program API (CRUD)
# ─────────────────────────────
class ProgramViewSet(viewsets.ModelViewSet):
"""Handles CRUD operations for EPG programs"""
queryset = ProgramData.objects.all()
serializer_class = ProgramDataSerializer
permission_classes = [IsAuthenticated]
def get_permissions(self):
try:
return [perm() for perm in permission_classes_by_action[self.action]]
except KeyError:
return [Authenticated()]
def list(self, request, *args, **kwargs):
logger.debug("Listing all EPG programs.")
return super().list(request, *args, **kwargs)
# ─────────────────────────────
# 3) EPG Grid View
# ─────────────────────────────
class EPGGridAPIView(APIView):
"""Returns all programs airing in the next 24 hours including currently running ones and recent ones"""
def get_permissions(self):
try:
return [
perm() for perm in permission_classes_by_method[self.request.method]
]
except KeyError:
return [Authenticated()]
@swagger_auto_schema(
operation_description="Retrieve programs from the previous hour, currently running and upcoming for the next 24 hours",
responses={200: ProgramDataSerializer(many=True)}
responses={200: ProgramDataSerializer(many=True)},
)
def get(self, request, format=None):
# Use current time instead of midnight
now = timezone.now()
one_hour_ago = now - timedelta(hours=1)
twenty_four_hours_later = now + timedelta(hours=24)
logger.debug(f"EPGGridAPIView: Querying programs between {one_hour_ago} and {twenty_four_hours_later}.")
logger.debug(
f"EPGGridAPIView: Querying programs between {one_hour_ago} and {twenty_four_hours_later}."
)
# Use select_related to prefetch EPGData and include programs from the last hour
programs = ProgramData.objects.select_related('epg').filter(
programs = ProgramData.objects.select_related("epg").filter(
# Programs that end after one hour ago (includes recently ended programs)
end_time__gt=one_hour_ago,
# AND start before the end time window
start_time__lt=twenty_four_hours_later
start_time__lt=twenty_four_hours_later,
)
count = programs.count()
logger.debug(f"EPGGridAPIView: Found {count} program(s), including recently ended, currently running, and upcoming shows.")
logger.debug(
f"EPGGridAPIView: Found {count} program(s), including recently ended, currently running, and upcoming shows."
)
# Generate dummy programs for channels that have no EPG data
from apps.channels.models import Channel
@ -118,9 +158,13 @@ class EPGGridAPIView(APIView):
# Log more detailed information about channels missing EPG data
if channels_count > 0:
channel_names = [f"{ch.name} (ID: {ch.id})" for ch in channels_without_epg]
logger.warning(f"EPGGridAPIView: Missing EPG data for these channels: {', '.join(channel_names)}")
logger.warning(
f"EPGGridAPIView: Missing EPG data for these channels: {', '.join(channel_names)}"
)
logger.debug(f"EPGGridAPIView: Found {channels_count} channels with no EPG data.")
logger.debug(
f"EPGGridAPIView: Found {channels_count} channels with no EPG data."
)
# Serialize the regular programs
serialized_programs = ProgramDataSerializer(programs, many=True).data
@ -130,33 +174,33 @@ class EPGGridAPIView(APIView):
(0, 4): [
"Late Night with {channel} - Where insomniacs unite!",
"The 'Why Am I Still Awake?' Show on {channel}",
"Counting Sheep - A {channel} production for the sleepless"
"Counting Sheep - A {channel} production for the sleepless",
],
(4, 8): [
"Dawn Patrol - Rise and shine with {channel}!",
"Early Bird Special - Coffee not included",
"Morning Zombies - Before coffee viewing on {channel}"
"Morning Zombies - Before coffee viewing on {channel}",
],
(8, 12): [
"Mid-Morning Meetings - Pretend you're paying attention while watching {channel}",
"The 'I Should Be Working' Hour on {channel}",
"Productivity Killer - {channel}'s daytime programming"
"Productivity Killer - {channel}'s daytime programming",
],
(12, 16): [
"Lunchtime Laziness with {channel}",
"The Afternoon Slump - Brought to you by {channel}",
"Post-Lunch Food Coma Theater on {channel}"
"Post-Lunch Food Coma Theater on {channel}",
],
(16, 20): [
"Rush Hour - {channel}'s alternative to traffic",
"The 'What's For Dinner?' Debate on {channel}",
"Evening Escapism - {channel}'s remedy for reality"
"Evening Escapism - {channel}'s remedy for reality",
],
(20, 24): [
"Prime Time Placeholder - {channel}'s finest not-programming",
"The 'Netflix Was Too Complicated' Show on {channel}",
"Family Argument Avoider - Courtesy of {channel}"
]
"Family Argument Avoider - Courtesy of {channel}",
],
}
# Generate and append dummy programs
@ -184,7 +228,9 @@ class EPGGridAPIView(APIView):
if start_range <= hour < end_range:
# Pick a description using the sum of the hour and day as seed
# This makes it somewhat random but consistent for the same timeslot
description = descriptions[(hour + day) % len(descriptions)].format(channel=channel.name)
description = descriptions[
(hour + day) % len(descriptions)
].format(channel=channel.name)
break
else:
# Fallback description if somehow no range matches
@ -192,29 +238,31 @@ class EPGGridAPIView(APIView):
# Create a dummy program in the same format as regular programs
dummy_program = {
'id': f"dummy-{channel.id}-{hour_offset}", # Create a unique ID
'epg': {
'tvg_id': dummy_tvg_id,
'name': channel.name
},
'start_time': start_time.isoformat(),
'end_time': end_time.isoformat(),
'title': f"{channel.name}",
'description': description,
'tvg_id': dummy_tvg_id,
'sub_title': None,
'custom_properties': None
"id": f"dummy-{channel.id}-{hour_offset}", # Create a unique ID
"epg": {"tvg_id": dummy_tvg_id, "name": channel.name},
"start_time": start_time.isoformat(),
"end_time": end_time.isoformat(),
"title": f"{channel.name}",
"description": description,
"tvg_id": dummy_tvg_id,
"sub_title": None,
"custom_properties": None,
}
dummy_programs.append(dummy_program)
except Exception as e:
logger.error(f"Error creating dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}")
logger.error(
f"Error creating dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}"
)
# Combine regular and dummy programs
all_programs = list(serialized_programs) + dummy_programs
logger.debug(f"EPGGridAPIView: Returning {len(all_programs)} total programs (including {len(dummy_programs)} dummy programs).")
logger.debug(
f"EPGGridAPIView: Returning {len(all_programs)} total programs (including {len(dummy_programs)} dummy programs)."
)
return Response({"data": all_programs}, status=status.HTTP_200_OK)
return Response({'data': all_programs}, status=status.HTTP_200_OK)
# ─────────────────────────────
# 4) EPG Import View
@ -222,15 +270,26 @@ class EPGGridAPIView(APIView):
class EPGImportAPIView(APIView):
"""Triggers an EPG data refresh"""
def get_permissions(self):
try:
return [
perm() for perm in permission_classes_by_method[self.request.method]
]
except KeyError:
return [Authenticated()]
@swagger_auto_schema(
operation_description="Triggers an EPG data import",
responses={202: "EPG data import initiated"}
responses={202: "EPG data import initiated"},
)
def post(self, request, format=None):
logger.info("EPGImportAPIView: Received request to import EPG data.")
refresh_epg_data.delay(request.data.get('id', None)) # Trigger Celery task
refresh_epg_data.delay(request.data.get("id", None)) # Trigger Celery task
logger.info("EPGImportAPIView: Task dispatched to refresh EPG data.")
return Response({'success': True, 'message': 'EPG data import initiated.'}, status=status.HTTP_202_ACCEPTED)
return Response(
{"success": True, "message": "EPG data import initiated."},
status=status.HTTP_202_ACCEPTED,
)
# ─────────────────────────────
@ -240,6 +299,12 @@ class EPGDataViewSet(viewsets.ReadOnlyModelViewSet):
"""
API endpoint that allows EPGData objects to be viewed.
"""
queryset = EPGData.objects.all()
serializer_class = EPGDataSerializer
permission_classes = [IsAuthenticated]
def get_permissions(self):
try:
return [perm() for perm in permission_classes_by_action[self.action]]
except KeyError:
return [Authenticated()]

View file

@ -1,7 +1,7 @@
from rest_framework import viewsets, status
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.permissions import IsAuthenticated
from apps.accounts.permissions import Authenticated, permission_classes_by_action
from django.http import JsonResponse, HttpResponseForbidden, HttpResponse
import logging
from drf_yasg.utils import swagger_auto_schema
@ -18,21 +18,30 @@ from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required
from django.views.decorators.csrf import csrf_exempt
from apps.m3u.models import M3UAccountProfile
# Configure logger
logger = logging.getLogger(__name__)
@login_required
def hdhr_dashboard_view(request):
"""Render the HDHR management page."""
hdhr_devices = HDHRDevice.objects.all()
return render(request, "hdhr/hdhr.html", {"hdhr_devices": hdhr_devices})
# 🔹 1) HDHomeRun Device API
class HDHRDeviceViewSet(viewsets.ModelViewSet):
"""Handles CRUD operations for HDHomeRun devices"""
queryset = HDHRDevice.objects.all()
serializer_class = HDHRDeviceSerializer
permission_classes = [IsAuthenticated]
def get_permissions(self):
try:
return [perm() for perm in permission_classes_by_action[self.action]]
except KeyError:
return [Authenticated()]
# 🔹 2) Discover API
@ -41,20 +50,20 @@ class DiscoverAPIView(APIView):
@swagger_auto_schema(
operation_description="Retrieve HDHomeRun device discovery information",
responses={200: openapi.Response("HDHR Discovery JSON")}
responses={200: openapi.Response("HDHR Discovery JSON")},
)
def get(self, request, profile=None):
uri_parts = ["hdhr"]
if profile is not None:
uri_parts.append(profile)
base_url = request.build_absolute_uri(f'/{"/".join(uri_parts)}/').rstrip('/')
base_url = request.build_absolute_uri(f'/{"/".join(uri_parts)}/').rstrip("/")
device = HDHRDevice.objects.first()
# Calculate tuner count from active profiles from active M3U accounts (excluding default "custom Default" profile)
profiles = M3UAccountProfile.objects.filter(
is_active=True,
m3u_account__is_active=True # Only include profiles from enabled M3U accounts
m3u_account__is_active=True, # Only include profiles from enabled M3U accounts
).exclude(id=1)
# 1. Check if any profile has unlimited streams (max_streams=0)
@ -63,9 +72,12 @@ class DiscoverAPIView(APIView):
# 2. Calculate tuner count from limited profiles
limited_tuners = 0
if not has_unlimited:
limited_tuners = profiles.filter(max_streams__gt=0).aggregate(
total=models.Sum('max_streams')
).get('total', 0) or 0
limited_tuners = (
profiles.filter(max_streams__gt=0)
.aggregate(total=models.Sum("max_streams"))
.get("total", 0)
or 0
)
# 3. Add custom stream count to tuner count
custom_stream_count = Stream.objects.filter(is_custom=True).count()
@ -82,7 +94,9 @@ class DiscoverAPIView(APIView):
# 5. Ensure minimum of 1 tuners
tuner_count = max(1, tuner_count)
logger.debug(f"Calculated tuner count: {tuner_count} (limited profiles: {limited_tuners}, custom streams: {custom_stream_count}, unlimited: {has_unlimited})")
logger.debug(
f"Calculated tuner count: {tuner_count} (limited profiles: {limited_tuners}, custom streams: {custom_stream_count}, unlimited: {has_unlimited})"
)
# Create a unique DeviceID for the HDHomeRun device based on profile ID or a default value
device_ID = "12345678" # Default DeviceID
@ -123,17 +137,17 @@ class LineupAPIView(APIView):
@swagger_auto_schema(
operation_description="Retrieve the available channel lineup",
responses={200: openapi.Response("Channel Lineup JSON")}
responses={200: openapi.Response("Channel Lineup JSON")},
)
def get(self, request, profile=None):
if profile is not None:
channel_profile = ChannelProfile.objects.get(name=profile)
channels = Channel.objects.filter(
channelprofilemembership__channel_profile=channel_profile,
channelprofilemembership__enabled=True
).order_by('channel_number')
channelprofilemembership__enabled=True,
).order_by("channel_number")
else:
channels = Channel.objects.all().order_by('channel_number')
channels = Channel.objects.all().order_by("channel_number")
lineup = []
for ch in channels:
@ -146,13 +160,15 @@ class LineupAPIView(APIView):
else:
formatted_channel_number = ""
lineup.append({
"GuideNumber": formatted_channel_number,
"GuideName": ch.name,
"URL": request.build_absolute_uri(f"/proxy/ts/stream/{ch.uuid}"),
"Guide_ID": formatted_channel_number,
"Station": formatted_channel_number,
})
lineup.append(
{
"GuideNumber": formatted_channel_number,
"GuideName": ch.name,
"URL": request.build_absolute_uri(f"/proxy/ts/stream/{ch.uuid}"),
"Guide_ID": formatted_channel_number,
"Station": formatted_channel_number,
}
)
return JsonResponse(lineup, safe=False)
@ -162,14 +178,14 @@ class LineupStatusAPIView(APIView):
@swagger_auto_schema(
operation_description="Retrieve the HDHomeRun lineup status",
responses={200: openapi.Response("Lineup Status JSON")}
responses={200: openapi.Response("Lineup Status JSON")},
)
def get(self, request, profile=None):
data = {
"ScanInProgress": 0,
"ScanPossible": 0,
"Source": "Cable",
"SourceList": ["Cable"]
"SourceList": ["Cable"],
}
return JsonResponse(data)
@ -180,10 +196,10 @@ class HDHRDeviceXMLAPIView(APIView):
@swagger_auto_schema(
operation_description="Retrieve the HDHomeRun device XML configuration",
responses={200: openapi.Response("HDHR Device XML")}
responses={200: openapi.Response("HDHR Device XML")},
)
def get(self, request):
base_url = request.build_absolute_uri('/hdhr/').rstrip('/')
base_url = request.build_absolute_uri("/hdhr/").rstrip("/")
xml_response = f"""<?xml version="1.0" encoding="utf-8"?>
<root>

View file

@ -1,7 +1,7 @@
from rest_framework import viewsets, status
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.permissions import IsAuthenticated
from apps.accounts.permissions import Authenticated, permission_classes_by_action
from django.http import JsonResponse, HttpResponseForbidden, HttpResponse
from drf_yasg.utils import swagger_auto_schema
from drf_yasg import openapi
@ -16,18 +16,26 @@ from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required
from django.views.decorators.csrf import csrf_exempt
@login_required
def hdhr_dashboard_view(request):
"""Render the HDHR management page."""
hdhr_devices = HDHRDevice.objects.all()
return render(request, "hdhr/hdhr.html", {"hdhr_devices": hdhr_devices})
# 🔹 1) HDHomeRun Device API
class HDHRDeviceViewSet(viewsets.ModelViewSet):
"""Handles CRUD operations for HDHomeRun devices"""
queryset = HDHRDevice.objects.all()
serializer_class = HDHRDeviceSerializer
permission_classes = [IsAuthenticated]
def get_permissions(self):
try:
return [perm() for perm in permission_classes_by_action[self.action]]
except KeyError:
return [Authenticated()]
# 🔹 2) Discover API
@ -36,10 +44,10 @@ class DiscoverAPIView(APIView):
@swagger_auto_schema(
operation_description="Retrieve HDHomeRun device discovery information",
responses={200: openapi.Response("HDHR Discovery JSON")}
responses={200: openapi.Response("HDHR Discovery JSON")},
)
def get(self, request):
base_url = request.build_absolute_uri('/hdhr/').rstrip('/')
base_url = request.build_absolute_uri("/hdhr/").rstrip("/")
device = HDHRDevice.objects.first()
if not device:
@ -75,15 +83,15 @@ class LineupAPIView(APIView):
@swagger_auto_schema(
operation_description="Retrieve the available channel lineup",
responses={200: openapi.Response("Channel Lineup JSON")}
responses={200: openapi.Response("Channel Lineup JSON")},
)
def get(self, request):
channels = Channel.objects.all().order_by('channel_number')
channels = Channel.objects.all().order_by("channel_number")
lineup = [
{
"GuideNumber": str(ch.channel_number),
"GuideName": ch.name,
"URL": request.build_absolute_uri(f"/proxy/ts/stream/{ch.uuid}")
"URL": request.build_absolute_uri(f"/proxy/ts/stream/{ch.uuid}"),
}
for ch in channels
]
@ -96,14 +104,14 @@ class LineupStatusAPIView(APIView):
@swagger_auto_schema(
operation_description="Retrieve the HDHomeRun lineup status",
responses={200: openapi.Response("Lineup Status JSON")}
responses={200: openapi.Response("Lineup Status JSON")},
)
def get(self, request):
data = {
"ScanInProgress": 0,
"ScanPossible": 0,
"Source": "Cable",
"SourceList": ["Cable"]
"SourceList": ["Cable"],
}
return JsonResponse(data)
@ -114,10 +122,10 @@ class HDHRDeviceXMLAPIView(APIView):
@swagger_auto_schema(
operation_description="Retrieve the HDHomeRun device XML configuration",
responses={200: openapi.Response("HDHR Device XML")}
responses={200: openapi.Response("HDHR Device XML")},
)
def get(self, request):
base_url = request.build_absolute_uri('/hdhr/').rstrip('/')
base_url = request.build_absolute_uri("/hdhr/").rstrip("/")
xml_response = f"""<?xml version="1.0" encoding="utf-8"?>
<root>

View file

@ -1,7 +1,11 @@
from rest_framework import viewsets, status
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.permissions import IsAuthenticated
from apps.accounts.permissions import (
Authenticated,
permission_classes_by_action,
permission_classes_by_method,
)
from drf_yasg.utils import swagger_auto_schema
from drf_yasg import openapi
from django.shortcuts import get_object_or_404
@ -17,6 +21,7 @@ from .models import M3UAccount, M3UFilter, ServerGroup, M3UAccountProfile
from core.models import UserAgent
from apps.channels.models import ChannelGroupM3UAccount
from core.serializers import UserAgentSerializer
# Import all serializers, including the UserAgentSerializer.
from .serializers import (
M3UAccountSerializer,
@ -29,45 +34,54 @@ from .tasks import refresh_single_m3u_account, refresh_m3u_accounts
from django.core.files.storage import default_storage
from django.core.files.base import ContentFile
class M3UAccountViewSet(viewsets.ModelViewSet):
"""Handles CRUD operations for M3U accounts"""
queryset = M3UAccount.objects.prefetch_related('channel_group')
queryset = M3UAccount.objects.prefetch_related("channel_group")
serializer_class = M3UAccountSerializer
permission_classes = [IsAuthenticated]
def get_permissions(self):
try:
return [perm() for perm in permission_classes_by_action[self.action]]
except KeyError:
return [Authenticated()]
def create(self, request, *args, **kwargs):
# Handle file upload first, if any
file_path = None
if 'file' in request.FILES:
file = request.FILES['file']
if "file" in request.FILES:
file = request.FILES["file"]
file_name = file.name
file_path = os.path.join('/data/uploads/m3us', file_name)
file_path = os.path.join("/data/uploads/m3us", file_name)
os.makedirs(os.path.dirname(file_path), exist_ok=True)
with open(file_path, 'wb+') as destination:
with open(file_path, "wb+") as destination:
for chunk in file.chunks():
destination.write(chunk)
# Add file_path to the request data so it's available during creation
request.data._mutable = True # Allow modification of the request data
request.data['file_path'] = file_path # Include the file path if a file was uploaded
request.data["file_path"] = (
file_path # Include the file path if a file was uploaded
)
# Handle the user_agent field - convert "null" string to None
if 'user_agent' in request.data and request.data['user_agent'] == 'null':
request.data['user_agent'] = None
if "user_agent" in request.data and request.data["user_agent"] == "null":
request.data["user_agent"] = None
# Handle server_url appropriately
if 'server_url' in request.data and not request.data['server_url']:
request.data.pop('server_url')
if "server_url" in request.data and not request.data["server_url"]:
request.data.pop("server_url")
request.data._mutable = False # Make the request data immutable again
# Now call super().create() to create the instance
response = super().create(request, *args, **kwargs)
print(response.data.get('account_type'))
if response.data.get('account_type') == M3UAccount.Types.XC:
refresh_m3u_groups(response.data.get('id'))
print(response.data.get("account_type"))
if response.data.get("account_type") == M3UAccount.Types.XC:
refresh_m3u_groups(response.data.get("id"))
# After the instance is created, return the response
return response
@ -77,28 +91,30 @@ class M3UAccountViewSet(viewsets.ModelViewSet):
# Handle file upload first, if any
file_path = None
if 'file' in request.FILES:
file = request.FILES['file']
if "file" in request.FILES:
file = request.FILES["file"]
file_name = file.name
file_path = os.path.join('/data/uploads/m3us', file_name)
file_path = os.path.join("/data/uploads/m3us", file_name)
os.makedirs(os.path.dirname(file_path), exist_ok=True)
with open(file_path, 'wb+') as destination:
with open(file_path, "wb+") as destination:
for chunk in file.chunks():
destination.write(chunk)
# Add file_path to the request data so it's available during creation
request.data._mutable = True # Allow modification of the request data
request.data['file_path'] = file_path # Include the file path if a file was uploaded
request.data["file_path"] = (
file_path # Include the file path if a file was uploaded
)
# Handle the user_agent field - convert "null" string to None
if 'user_agent' in request.data and request.data['user_agent'] == 'null':
request.data['user_agent'] = None
if "user_agent" in request.data and request.data["user_agent"] == "null":
request.data["user_agent"] = None
# Handle server_url appropriately
if 'server_url' in request.data and not request.data['server_url']:
request.data.pop('server_url')
if "server_url" in request.data and not request.data["server_url"]:
request.data.pop("server_url")
request.data._mutable = False # Make the request data immutable again
if instance.file_path and os.path.exists(instance.file_path):
@ -115,75 +131,131 @@ class M3UAccountViewSet(viewsets.ModelViewSet):
instance = self.get_object()
# Check if we're toggling is_active
if 'is_active' in request.data and instance.is_active != request.data['is_active']:
if (
"is_active" in request.data
and instance.is_active != request.data["is_active"]
):
# Set appropriate status based on new is_active value
if request.data['is_active']:
request.data['status'] = M3UAccount.Status.IDLE
if request.data["is_active"]:
request.data["status"] = M3UAccount.Status.IDLE
else:
request.data['status'] = M3UAccount.Status.DISABLED
request.data["status"] = M3UAccount.Status.DISABLED
# Continue with regular partial update
return super().partial_update(request, *args, **kwargs)
class M3UFilterViewSet(viewsets.ModelViewSet):
"""Handles CRUD operations for M3U filters"""
queryset = M3UFilter.objects.all()
serializer_class = M3UFilterSerializer
permission_classes = [IsAuthenticated]
def get_permissions(self):
try:
return [perm() for perm in permission_classes_by_action[self.action]]
except KeyError:
return [Authenticated()]
class ServerGroupViewSet(viewsets.ModelViewSet):
"""Handles CRUD operations for Server Groups"""
queryset = ServerGroup.objects.all()
serializer_class = ServerGroupSerializer
permission_classes = [IsAuthenticated]
def get_permissions(self):
try:
return [perm() for perm in permission_classes_by_action[self.action]]
except KeyError:
return [Authenticated()]
class RefreshM3UAPIView(APIView):
"""Triggers refresh for all active M3U accounts"""
def get_permissions(self):
try:
return [
perm() for perm in permission_classes_by_method[self.request.method]
]
except KeyError:
return [Authenticated()]
@swagger_auto_schema(
operation_description="Triggers a refresh of all active M3U accounts",
responses={202: "M3U refresh initiated"}
responses={202: "M3U refresh initiated"},
)
def post(self, request, format=None):
refresh_m3u_accounts.delay()
return Response({'success': True, 'message': 'M3U refresh initiated.'}, status=status.HTTP_202_ACCEPTED)
return Response(
{"success": True, "message": "M3U refresh initiated."},
status=status.HTTP_202_ACCEPTED,
)
class RefreshSingleM3UAPIView(APIView):
"""Triggers refresh for a single M3U account"""
def get_permissions(self):
try:
return [
perm() for perm in permission_classes_by_method[self.request.method]
]
except KeyError:
return [Authenticated()]
@swagger_auto_schema(
operation_description="Triggers a refresh of a single M3U account",
responses={202: "M3U account refresh initiated"}
responses={202: "M3U account refresh initiated"},
)
def post(self, request, account_id, format=None):
refresh_single_m3u_account.delay(account_id)
return Response({'success': True, 'message': f'M3U account {account_id} refresh initiated.'},
status=status.HTTP_202_ACCEPTED)
return Response(
{
"success": True,
"message": f"M3U account {account_id} refresh initiated.",
},
status=status.HTTP_202_ACCEPTED,
)
class UserAgentViewSet(viewsets.ModelViewSet):
"""Handles CRUD operations for User Agents"""
queryset = UserAgent.objects.all()
serializer_class = UserAgentSerializer
permission_classes = [IsAuthenticated]
def get_permissions(self):
try:
return [perm() for perm in permission_classes_by_action[self.action]]
except KeyError:
return [Authenticated()]
class M3UAccountProfileViewSet(viewsets.ModelViewSet):
queryset = M3UAccountProfile.objects.all()
serializer_class = M3UAccountProfileSerializer
permission_classes = [IsAuthenticated]
def get_permissions(self):
try:
return [perm() for perm in permission_classes_by_action[self.action]]
except KeyError:
return [Authenticated()]
def get_queryset(self):
m3u_account_id = self.kwargs['account_id']
m3u_account_id = self.kwargs["account_id"]
return M3UAccountProfile.objects.filter(m3u_account_id=m3u_account_id)
def perform_create(self, serializer):
# Get the account ID from the URL
account_id = self.kwargs['account_id']
account_id = self.kwargs["account_id"]
# Get the M3UAccount instance for the account_id
m3u_account = M3UAccount.objects.get(id=account_id)
# Save the 'm3u_account' in the serializer context
serializer.context['m3u_account'] = m3u_account
serializer.context["m3u_account"] = m3u_account
# Perform the actual save
serializer.save(m3u_account_id=m3u_account)

View file

@ -3,6 +3,7 @@
from django.db import migrations
from core.models import CoreSettings
def create_custom_account(apps, schema_editor):
default_user_agent_id = CoreSettings.get_default_user_agent_id()
@ -18,7 +19,7 @@ def create_custom_account(apps, schema_editor):
M3UAccountProfile = apps.get_model("m3u", "M3UAccountProfile")
M3UAccountProfile.objects.create(
m3u_account=m3u_account,
name=f'{m3u_account.name} Default',
name=f"{m3u_account.name} Default",
max_streams=m3u_account.max_streams,
is_default=True,
is_active=True,
@ -26,10 +27,12 @@ def create_custom_account(apps, schema_editor):
replace_pattern="$1",
)
class Migration(migrations.Migration):
dependencies = [
('m3u', '0002_m3uaccount_locked'),
("m3u", "0002_m3uaccount_locked"),
("core", "0004_preload_core_settings"),
]
operations = [

View file

@ -7,24 +7,29 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('django_celery_beat', '0019_alter_periodictasks_options'),
('m3u', '0004_m3uaccount_stream_profile'),
("django_celery_beat", "0019_alter_periodictasks_options"),
("m3u", "0004_m3uaccount_stream_profile"),
]
operations = [
migrations.AddField(
model_name='m3uaccount',
name='custom_properties',
model_name="m3uaccount",
name="custom_properties",
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='m3uaccount',
name='refresh_interval',
model_name="m3uaccount",
name="refresh_interval",
field=models.IntegerField(default=24),
),
migrations.AddField(
model_name='m3uaccount',
name='refresh_task',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='django_celery_beat.periodictask'),
model_name="m3uaccount",
name="refresh_task",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="django_celery_beat.periodictask",
),
),
]

View file

@ -7,7 +7,8 @@ from apps.channels.models import StreamProfile
from django_celery_beat.models import PeriodicTask
from core.models import CoreSettings, UserAgent
CUSTOM_M3U_ACCOUNT_NAME="custom"
CUSTOM_M3U_ACCOUNT_NAME = "custom"
class M3UAccount(models.Model):
class Types(models.TextChoices):
@ -25,72 +26,61 @@ class M3UAccount(models.Model):
"""Represents an M3U Account for IPTV streams."""
name = models.CharField(
max_length=255,
unique=True,
help_text="Unique name for this M3U account"
max_length=255, unique=True, help_text="Unique name for this M3U account"
)
server_url = models.URLField(
blank=True,
null=True,
help_text="The base URL of the M3U server (optional if a file is uploaded)"
)
file_path = models.CharField(
max_length=255,
blank=True,
null=True
help_text="The base URL of the M3U server (optional if a file is uploaded)",
)
file_path = models.CharField(max_length=255, blank=True, null=True)
server_group = models.ForeignKey(
'ServerGroup',
"ServerGroup",
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name='m3u_accounts',
help_text="The server group this M3U account belongs to"
related_name="m3u_accounts",
help_text="The server group this M3U account belongs to",
)
max_streams = models.PositiveIntegerField(
default=0,
help_text="Maximum number of concurrent streams (0 for unlimited)"
default=0, help_text="Maximum number of concurrent streams (0 for unlimited)"
)
is_active = models.BooleanField(
default=True,
help_text="Set to false to deactivate this M3U account"
default=True, help_text="Set to false to deactivate this M3U account"
)
created_at = models.DateTimeField(
auto_now_add=True,
help_text="Time when this account was created"
auto_now_add=True, help_text="Time when this account was created"
)
updated_at = models.DateTimeField(
null=True, blank=True,
help_text="Time when this account was last successfully refreshed"
null=True,
blank=True,
help_text="Time when this account was last successfully refreshed",
)
status = models.CharField(
max_length=20,
choices=Status.choices,
default=Status.IDLE
max_length=20, choices=Status.choices, default=Status.IDLE
)
last_message = models.TextField(
null=True,
blank=True,
help_text="Last status message, including success results or error information"
help_text="Last status message, including success results or error information",
)
user_agent = models.ForeignKey(
'core.UserAgent',
"core.UserAgent",
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name='m3u_accounts',
help_text="The User-Agent associated with this M3U account."
related_name="m3u_accounts",
help_text="The User-Agent associated with this M3U account.",
)
locked = models.BooleanField(
default=False,
help_text="Protected - can't be deleted or modified"
default=False, help_text="Protected - can't be deleted or modified"
)
stream_profile = models.ForeignKey(
StreamProfile,
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name='m3u_accounts'
related_name="m3u_accounts",
)
account_type = models.CharField(choices=Types.choices, default=Types.STADNARD)
username = models.CharField(max_length=255, null=True, blank=True)
@ -102,7 +92,7 @@ class M3UAccount(models.Model):
)
stale_stream_days = models.PositiveIntegerField(
default=7,
help_text="Number of days after which a stream will be removed if not seen in the M3U source."
help_text="Number of days after which a stream will be removed if not seen in the M3U source.",
)
def __str__(self):
@ -134,17 +124,19 @@ class M3UAccount(models.Model):
def get_user_agent(self):
user_agent = self.user_agent
if not user_agent:
user_agent = UserAgent.objects.get(id=CoreSettings.get_default_user_agent_id())
user_agent = UserAgent.objects.get(
id=CoreSettings.get_default_user_agent_id()
)
return user_agent
def save(self, *args, **kwargs):
# Prevent auto_now behavior by handling updated_at manually
if 'update_fields' in kwargs and 'updated_at' not in kwargs['update_fields']:
if "update_fields" in kwargs and "updated_at" not in kwargs["update_fields"]:
# Don't modify updated_at for regular updates
kwargs.setdefault('update_fields', [])
if 'updated_at' in kwargs['update_fields']:
kwargs['update_fields'].remove('updated_at')
kwargs.setdefault("update_fields", [])
if "updated_at" in kwargs["update_fields"]:
kwargs["update_fields"].remove("updated_at")
super().save(*args, **kwargs)
# def get_channel_groups(self):
@ -158,35 +150,36 @@ class M3UAccount(models.Model):
# """Return all streams linked to this account with enabled ChannelGroups."""
# return self.streams.filter(channel_group__in=ChannelGroup.objects.filter(m3u_account__enabled=True))
class M3UFilter(models.Model):
"""Defines filters for M3U accounts based on stream name or group title."""
FILTER_TYPE_CHOICES = (
('group', 'Group Title'),
('name', 'Stream Name'),
("group", "Group Title"),
("name", "Stream Name"),
)
m3u_account = models.ForeignKey(
M3UAccount,
on_delete=models.CASCADE,
related_name='filters',
help_text="The M3U account this filter is applied to."
related_name="filters",
help_text="The M3U account this filter is applied to.",
)
filter_type = models.CharField(
max_length=50,
choices=FILTER_TYPE_CHOICES,
default='group',
help_text="Filter based on either group title or stream name."
default="group",
help_text="Filter based on either group title or stream name.",
)
regex_pattern = models.CharField(
max_length=200,
help_text="A regex pattern to match streams or groups."
max_length=200, help_text="A regex pattern to match streams or groups."
)
exclude = models.BooleanField(
default=True,
help_text="If True, matching items are excluded; if False, only matches are included."
help_text="If True, matching items are excluded; if False, only matches are included.",
)
def applies_to(self, stream_name, group_name):
target = group_name if self.filter_type == 'group' else stream_name
target = group_name if self.filter_type == "group" else stream_name
return bool(re.search(self.regex_pattern, target, re.IGNORECASE))
def clean(self):
@ -196,7 +189,9 @@ class M3UFilter(models.Model):
raise ValidationError(f"Invalid regex pattern: {self.regex_pattern}")
def __str__(self):
filter_type_display = dict(self.FILTER_TYPE_CHOICES).get(self.filter_type, 'Unknown')
filter_type_display = dict(self.FILTER_TYPE_CHOICES).get(
self.filter_type, "Unknown"
)
exclude_status = "Exclude" if self.exclude else "Include"
return f"[{self.m3u_account.name}] {filter_type_display}: {self.regex_pattern} ({exclude_status})"
@ -222,40 +217,38 @@ class M3UFilter(models.Model):
class ServerGroup(models.Model):
"""Represents a logical grouping of servers or channels."""
name = models.CharField(
max_length=100,
unique=True,
help_text="Unique name for this server group."
max_length=100, unique=True, help_text="Unique name for this server group."
)
def __str__(self):
return self.name
from django.db import models
class M3UAccountProfile(models.Model):
"""Represents a profile associated with an M3U Account."""
m3u_account = models.ForeignKey(
'M3UAccount',
"M3UAccount",
on_delete=models.CASCADE,
related_name='profiles',
help_text="The M3U account this profile belongs to."
related_name="profiles",
help_text="The M3U account this profile belongs to.",
)
name = models.CharField(
max_length=255,
help_text="Name for the M3U account profile"
max_length=255, help_text="Name for the M3U account profile"
)
is_default = models.BooleanField(
default=False,
help_text="Set to false to deactivate this profile"
default=False, help_text="Set to false to deactivate this profile"
)
max_streams = models.PositiveIntegerField(
default=0,
help_text="Maximum number of concurrent streams (0 for unlimited)"
default=0, help_text="Maximum number of concurrent streams (0 for unlimited)"
)
is_active = models.BooleanField(
default=True,
help_text="Set to false to deactivate this profile"
default=True, help_text="Set to false to deactivate this profile"
)
search_pattern = models.CharField(
max_length=255,
@ -267,19 +260,22 @@ class M3UAccountProfile(models.Model):
class Meta:
constraints = [
models.UniqueConstraint(fields=['m3u_account', 'name'], name='unique_account_name')
models.UniqueConstraint(
fields=["m3u_account", "name"], name="unique_account_name"
)
]
def __str__(self):
return f"{self.name} ({self.m3u_account.name})"
@receiver(models.signals.post_save, sender=M3UAccount)
def create_profile_for_m3u_account(sender, instance, created, **kwargs):
"""Automatically create an M3UAccountProfile when M3UAccount is created."""
if created:
M3UAccountProfile.objects.create(
m3u_account=instance,
name=f'{instance.name} Default',
name=f"{instance.name} Default",
max_streams=instance.max_streams,
is_default=True,
is_active=True,
@ -292,6 +288,5 @@ def create_profile_for_m3u_account(sender, instance, created, **kwargs):
is_default=True,
)
profile.max_streams = instance.max_streams
profile.save()

View file

@ -3,33 +3,45 @@ from rest_framework.response import Response
from .models import M3UAccount, M3UFilter, ServerGroup, M3UAccountProfile
from core.models import UserAgent
from apps.channels.models import ChannelGroup, ChannelGroupM3UAccount
from apps.channels.serializers import ChannelGroupM3UAccountSerializer, ChannelGroupSerializer
from apps.channels.serializers import (
ChannelGroupM3UAccountSerializer,
ChannelGroupSerializer,
)
import logging
logger = logging.getLogger(__name__)
class M3UFilterSerializer(serializers.ModelSerializer):
"""Serializer for M3U Filters"""
channel_groups = ChannelGroupM3UAccountSerializer(source='m3u_account', many=True)
channel_groups = ChannelGroupM3UAccountSerializer(source="m3u_account", many=True)
class Meta:
model = M3UFilter
fields = ['id', 'filter_type', 'regex_pattern', 'exclude', 'channel_groups']
fields = ["id", "filter_type", "regex_pattern", "exclude", "channel_groups"]
from rest_framework import serializers
from .models import M3UAccountProfile
class M3UAccountProfileSerializer(serializers.ModelSerializer):
class Meta:
model = M3UAccountProfile
fields = ['id', 'name', 'max_streams', 'is_active', 'is_default', 'current_viewers', 'search_pattern', 'replace_pattern']
read_only_fields = ['id']
fields = [
"id",
"name",
"max_streams",
"is_active",
"is_default",
"current_viewers",
"search_pattern",
"replace_pattern",
]
read_only_fields = ["id"]
def create(self, validated_data):
m3u_account = self.context.get('m3u_account')
m3u_account = self.context.get("m3u_account")
# Use the m3u_account when creating the profile
validated_data['m3u_account_id'] = m3u_account.id
validated_data["m3u_account_id"] = m3u_account.id
return super().create(validated_data)
@ -43,12 +55,14 @@ class M3UAccountProfileSerializer(serializers.ModelSerializer):
if instance.is_default:
return Response(
{"error": "Default profiles cannot be deleted."},
status=status.HTTP_400_BAD_REQUEST
status=status.HTTP_400_BAD_REQUEST,
)
return super().destroy(request, *args, **kwargs)
class M3UAccountSerializer(serializers.ModelSerializer):
"""Serializer for M3U Account"""
filters = M3UFilterSerializer(many=True, read_only=True)
# Include user_agent as a mandatory field using its primary key.
user_agent = serializers.PrimaryKeyRelatedField(
@ -57,28 +71,48 @@ class M3UAccountSerializer(serializers.ModelSerializer):
allow_null=True,
)
profiles = M3UAccountProfileSerializer(many=True, read_only=True)
read_only_fields = ['locked', 'created_at', 'updated_at']
read_only_fields = ["locked", "created_at", "updated_at"]
# channel_groups = serializers.SerializerMethodField()
channel_groups = ChannelGroupM3UAccountSerializer(source='channel_group', many=True, required=False)
channel_groups = ChannelGroupM3UAccountSerializer(
source="channel_group", many=True, required=False
)
class Meta:
model = M3UAccount
fields = [
'id', 'name', 'server_url', 'file_path', 'server_group',
'max_streams', 'is_active', 'created_at', 'updated_at', 'filters', 'user_agent', 'profiles', 'locked',
'channel_groups', 'refresh_interval', 'custom_properties', 'account_type', 'username', 'password', 'stale_stream_days',
'status', 'last_message',
"id",
"name",
"server_url",
"file_path",
"server_group",
"max_streams",
"is_active",
"created_at",
"updated_at",
"filters",
"user_agent",
"profiles",
"locked",
"channel_groups",
"refresh_interval",
"custom_properties",
"account_type",
"username",
"password",
"stale_stream_days",
"status",
"last_message",
]
extra_kwargs = {
'password': {
'required': False,
'allow_blank': True,
"password": {
"required": False,
"allow_blank": True,
},
}
def update(self, instance, validated_data):
# Pop out channel group memberships so we can handle them manually
channel_group_data = validated_data.pop('channel_group', [])
channel_group_data = validated_data.pop("channel_group", [])
# First, update the M3UAccount itself
for attr, value in validated_data.items():
@ -88,13 +122,12 @@ class M3UAccountSerializer(serializers.ModelSerializer):
# Prepare a list of memberships to update
memberships_to_update = []
for group_data in channel_group_data:
group = group_data.get('channel_group')
enabled = group_data.get('enabled')
group = group_data.get("channel_group")
enabled = group_data.get("enabled")
try:
membership = ChannelGroupM3UAccount.objects.get(
m3u_account=instance,
channel_group=group
m3u_account=instance, channel_group=group
)
membership.enabled = enabled
memberships_to_update.append(membership)
@ -103,13 +136,16 @@ class M3UAccountSerializer(serializers.ModelSerializer):
# Perform the bulk update
if memberships_to_update:
ChannelGroupM3UAccount.objects.bulk_update(memberships_to_update, ['enabled'])
ChannelGroupM3UAccount.objects.bulk_update(
memberships_to_update, ["enabled"]
)
return instance
class ServerGroupSerializer(serializers.ModelSerializer):
"""Serializer for Server Group"""
class Meta:
model = ServerGroup
fields = ['id', 'name']
fields = ["id", "name"]

View file

@ -172,6 +172,13 @@ def fetch_m3u_lines(account, use_cache=False):
send_m3u_update(account.id, "downloading", 100, status="error", error=error_msg)
return [], False
def get_case_insensitive_attr(attributes, key, default=""):
"""Get attribute value using case-insensitive key lookup."""
for attr_key, attr_value in attributes.items():
if attr_key.lower() == key.lower():
return attr_value
return default
def parse_extinf_line(line: str) -> dict:
"""
Parse an EXTINF line from an M3U file.
@ -193,7 +200,7 @@ def parse_extinf_line(line: str) -> dict:
attributes_part, display_name = parts[0], parts[1].strip()
attrs = dict(re.findall(r'([^\s]+)=["\']([^"\']+)["\']', attributes_part))
# Use tvg-name attribute if available; otherwise, use the display name.
name = attrs.get('tvg-name', display_name)
name = get_case_insensitive_attr(attrs, 'tvg-name', display_name)
return {
'attributes': attrs,
'display_name': display_name,
@ -409,8 +416,8 @@ def process_m3u_batch(account_id, batch, groups, hash_keys):
for stream_info in batch:
try:
name, url = stream_info["name"], stream_info["url"]
tvg_id, tvg_logo = stream_info["attributes"].get("tvg-id", ""), stream_info["attributes"].get("tvg-logo", "")
group_title = stream_info["attributes"].get("group-title", "Default Group")
tvg_id, tvg_logo = get_case_insensitive_attr(stream_info["attributes"], "tvg-id", ""), get_case_insensitive_attr(stream_info["attributes"], "tvg-logo", "")
group_title = get_case_insensitive_attr(stream_info["attributes"], "group-title", "Default Group")
# Filter out disabled groups for this account
if group_title not in groups:
@ -712,8 +719,9 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False):
extinf_count += 1
parsed = parse_extinf_line(line)
if parsed:
if "group-title" in parsed["attributes"]:
group_name = parsed["attributes"]["group-title"]
group_title_attr = get_case_insensitive_attr(parsed["attributes"], "group-title", "")
if group_title_attr:
group_name = group_title_attr
# Log new groups as they're discovered
if group_name not in groups:
logger.debug(f"Found new group for M3U account {account_id}: '{group_name}'")

View file

@ -1,16 +1,14 @@
from django.urls import path, re_path, include
from .views import generate_m3u, generate_epg
from .views import m3u_endpoint, epg_endpoint, xc_get
from core.views import stream_view
app_name = 'output'
app_name = "output"
urlpatterns = [
# Allow `/m3u`, `/m3u/`, `/m3u/profile_name`, and `/m3u/profile_name/`
re_path(r'^m3u(?:/(?P<profile_name>[^/]+))?/?$', generate_m3u, name='generate_m3u'),
re_path(r"^m3u(?:/(?P<profile_name>[^/]+))?/?$", m3u_endpoint, name="m3u_endpoint"),
# Allow `/epg`, `/epg/`, `/epg/profile_name`, and `/epg/profile_name/`
re_path(r'^epg(?:/(?P<profile_name>[^/]+))?/?$', generate_epg, name='generate_epg'),
re_path(r"^epg(?:/(?P<profile_name>[^/]+))?/?$", epg_endpoint, name="epg_endpoint"),
# Allow both `/stream/<int:stream_id>` and `/stream/<int:stream_id>/`
re_path(r'^stream/(?P<channel_uuid>[0-9a-fA-F\-]+)/?$', stream_view, name='stream'),
re_path(r"^stream/(?P<channel_uuid>[0-9a-fA-F\-]+)/?$", stream_view, name="stream"),
]

View file

@ -1,18 +1,40 @@
from django.http import HttpResponse, HttpResponseForbidden
import ipaddress
from django.http import HttpResponse, JsonResponse, Http404, HttpResponseForbidden
from rest_framework.response import Response
from django.urls import reverse
from apps.channels.models import Channel, ChannelProfile, ChannelGroup
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_http_methods
from apps.channels.models import Channel, ChannelProfile
from apps.epg.models import ProgramData
from apps.accounts.models import User
from core.models import CoreSettings, NETWORK_ACCESS
from dispatcharr.utils import network_access_allowed
from django.utils import timezone
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import get_object_or_404
from datetime import datetime, timedelta
import re
import html # Add this import for XML escaping
from tzlocal import get_localzone
import time
import json
from urllib.parse import urlparse
import base64
def m3u_endpoint(request, profile_name=None, user=None):
if not network_access_allowed(request, "M3U_EPG"):
return JsonResponse({"error": "Forbidden"}, status=403)
return generate_m3u(request, profile_name, user)
def epg_endpoint(request, profile_name=None, user=None):
if not network_access_allowed(request, "M3U_EPG"):
return JsonResponse({"error": "Forbidden"}, status=403)
return generate_epg(request, profile_name, user)
@csrf_exempt
@require_http_methods(["GET", "POST"])
def generate_m3u(request, profile_name=None):
def generate_m3u(request, profile_name=None, user=None):
"""
Dynamically generate an M3U file from channels.
The stream URL now points to the new stream_view that uses StreamProfile.
@ -22,6 +44,26 @@ def generate_m3u(request, profile_name=None):
if request.method == "POST" and request.body:
return HttpResponseForbidden("POST requests with content are not allowed")
if user is not None:
if user.user_level == 0:
filters = {
"channelprofilemembership__enabled": True,
"user_level__lte": user.user_level,
}
if user.channel_profiles.count() != 0:
channel_profiles = user.channel_profiles.all()
filters["channelprofilemembership__channel_profile__in"] = (
channel_profiles
)
channels = Channel.objects.filter(**filters).order_by("channel_number")
else:
channels = Channel.objects.filter(user_level__lte=user.user_level).order_by(
"channel_number"
)
if profile_name is not None:
channel_profile = ChannelProfile.objects.get(name=profile_name)
channels = Channel.objects.filter(
@ -29,7 +71,14 @@ def generate_m3u(request, profile_name=None):
channelprofilemembership__enabled=True
).order_by('channel_number')
else:
channels = Channel.objects.order_by('channel_number')
if profile_name is not None:
channel_profile = ChannelProfile.objects.get(name=profile_name)
channels = Channel.objects.filter(
channelprofilemembership__channel_profile=channel_profile,
channelprofilemembership__enabled=True,
).order_by("channel_number")
else:
channels = Channel.objects.order_by("channel_number")
# Check if the request wants to use direct logo URLs instead of cache
use_cached_logos = request.GET.get('cachedlogos', 'true').lower() != 'false'
@ -82,7 +131,9 @@ def generate_m3u(request, profile_name=None):
# create possible gracenote id insertion
tvc_guide_stationid = ""
if channel.tvc_guide_stationid:
tvc_guide_stationid = f'tvc-guide-stationid="{channel.tvc_guide_stationid}" '
tvc_guide_stationid = (
f'tvc-guide-stationid="{channel.tvc_guide_stationid}" '
)
extinf_line = (
f'#EXTINF:-1 tvg-id="{tvg_id}" tvg-name="{tvg_name}" tvg-logo="{tvg_logo}" '
@ -108,27 +159,11 @@ def generate_m3u(request, profile_name=None):
m3u_content += extinf_line + stream_url + "\n"
response = HttpResponse(m3u_content, content_type="audio/x-mpegurl")
response['Content-Disposition'] = 'attachment; filename="channels.m3u"'
response["Content-Disposition"] = 'attachment; filename="channels.m3u"'
return response
def generate_dummy_epg(channel_id, channel_name, xml_lines=None, num_days=1, program_length_hours=4):
"""
Generate dummy EPG programs for channels without EPG data.
Creates program blocks for a specified number of days.
Args:
channel_id: The channel ID to use in the program entries
channel_name: The name of the channel to use in program titles
xml_lines: Optional list to append lines to, otherwise returns new list
num_days: Number of days to generate EPG data for (default: 1)
program_length_hours: Length of each program block in hours (default: 4)
Returns:
List of XML lines for the dummy EPG entries
"""
if xml_lines is None:
xml_lines = []
def generate_dummy_programs(channel_id, channel_name, num_days=1, program_length_hours=4):
# Get current time rounded to hour
now = timezone.now()
now = now.replace(minute=0, second=0, microsecond=0)
@ -138,35 +173,37 @@ def generate_dummy_epg(channel_id, channel_name, xml_lines=None, num_days=1, pro
(0, 4): [
f"Late Night with {channel_name} - Where insomniacs unite!",
f"The 'Why Am I Still Awake?' Show on {channel_name}",
f"Counting Sheep - A {channel_name} production for the sleepless"
f"Counting Sheep - A {channel_name} production for the sleepless",
],
(4, 8): [
f"Dawn Patrol - Rise and shine with {channel_name}!",
f"Early Bird Special - Coffee not included",
f"Morning Zombies - Before coffee viewing on {channel_name}"
f"Morning Zombies - Before coffee viewing on {channel_name}",
],
(8, 12): [
f"Mid-Morning Meetings - Pretend you're paying attention while watching {channel_name}",
f"The 'I Should Be Working' Hour on {channel_name}",
f"Productivity Killer - {channel_name}'s daytime programming"
f"Productivity Killer - {channel_name}'s daytime programming",
],
(12, 16): [
f"Lunchtime Laziness with {channel_name}",
f"The Afternoon Slump - Brought to you by {channel_name}",
f"Post-Lunch Food Coma Theater on {channel_name}"
f"Post-Lunch Food Coma Theater on {channel_name}",
],
(16, 20): [
f"Rush Hour - {channel_name}'s alternative to traffic",
f"The 'What's For Dinner?' Debate on {channel_name}",
f"Evening Escapism - {channel_name}'s remedy for reality"
f"Evening Escapism - {channel_name}'s remedy for reality",
],
(20, 24): [
f"Prime Time Placeholder - {channel_name}'s finest not-programming",
f"The 'Netflix Was Too Complicated' Show on {channel_name}",
f"Family Argument Avoider - Courtesy of {channel_name}"
]
f"Family Argument Avoider - Courtesy of {channel_name}",
],
}
programs = []
# Create programs for each day
for day in range(num_days):
day_start = now + timedelta(days=day)
@ -192,19 +229,54 @@ def generate_dummy_epg(channel_id, channel_name, xml_lines=None, num_days=1, pro
# Fallback description if somehow no range matches
description = f"Placeholder program for {channel_name} - EPG data went on vacation"
# Format times in XMLTV format
start_str = start_time.strftime("%Y%m%d%H%M%S %z")
stop_str = end_time.strftime("%Y%m%d%H%M%S %z")
programs.append({
"channel_id": channel_id,
"start_time": start_time,
"end_time": end_time,
"title": channel_name,
"description": description,
})
# Create program entry with escaped channel name
xml_lines.append(f' <programme start="{start_str}" stop="{stop_str}" channel="{channel_id}">')
xml_lines.append(f' <title>{html.escape(channel_name)}</title>')
xml_lines.append(f' <desc>{html.escape(description)}</desc>')
xml_lines.append(f' </programme>')
return programs
def generate_dummy_epg(
channel_id, channel_name, xml_lines=None, num_days=1, program_length_hours=4
):
"""
Generate dummy EPG programs for channels without EPG data.
Creates program blocks for a specified number of days.
Args:
channel_id: The channel ID to use in the program entries
channel_name: The name of the channel to use in program titles
xml_lines: Optional list to append lines to, otherwise returns new list
num_days: Number of days to generate EPG data for (default: 1)
program_length_hours: Length of each program block in hours (default: 4)
Returns:
List of XML lines for the dummy EPG entries
"""
if xml_lines is None:
xml_lines = []
for program in generate_dummy_programs(channel_id, channel_name, num_days=1, program_length_hours=4):
# Format times in XMLTV format
start_str = program['start_time'].strftime("%Y%m%d%H%M%S %z")
stop_str = program['end_time'].strftime("%Y%m%d%H%M%S %z")
# Create program entry with escaped channel name
xml_lines.append(
f' <programme start="{start_str}" stop="{stop_str}" channel="{program['channel_id']}">'
)
xml_lines.append(f" <title>{html.escape(program['title'])}</title>")
xml_lines.append(f" <desc>{html.escape(program['description'])}</desc>")
xml_lines.append(f" </programme>")
return xml_lines
def generate_epg(request, profile_name=None):
def generate_epg(request, profile_name=None, user=None):
"""
Dynamically generate an XMLTV (EPG) file using the new EPGData/ProgramData models.
Since the EPG data is stored independently of Channels, we group programmes
@ -213,16 +285,37 @@ def generate_epg(request, profile_name=None):
"""
xml_lines = []
xml_lines.append('<?xml version="1.0" encoding="UTF-8"?>')
xml_lines.append('<tv generator-info-name="Dispatcharr" generator-info-url="https://github.com/Dispatcharr/Dispatcharr">')
xml_lines.append(
'<tv generator-info-name="Dispatcharr" generator-info-url="https://github.com/Dispatcharr/Dispatcharr">'
)
if profile_name is not None:
channel_profile = ChannelProfile.objects.get(name=profile_name)
channels = Channel.objects.filter(
channelprofilemembership__channel_profile=channel_profile,
channelprofilemembership__enabled=True
)
if user is not None:
if user.user_level == 0:
filters = {
"channelprofilemembership__enabled": True,
"user_level__lte": user.user_level,
}
if user.channel_profiles.count() != 0:
channel_profiles = user.channel_profiles.all()
filters["channelprofilemembership__channel_profile__in"] = (
channel_profiles
)
channels = Channel.objects.filter(**filters).order_by("channel_number")
else:
channels = Channel.objects.filter(user_level__lte=user.user_level).order_by(
"channel_number"
)
else:
channels = Channel.objects.all()
if profile_name is not None:
channel_profile = ChannelProfile.objects.get(name=profile_name)
channels = Channel.objects.filter(
channelprofilemembership__channel_profile=channel_profile,
channelprofilemembership__enabled=True,
)
else:
channels = Channel.objects.all()
# Check if the request wants to use direct logo URLs instead of cache
use_cached_logos = request.GET.get('cachedlogos', 'true').lower() != 'false'
@ -287,7 +380,7 @@ def generate_epg(request, profile_name=None):
xml_lines.append(f' <display-name>{html.escape(display_name)}</display-name>')
xml_lines.append(f' <icon src="{html.escape(tvg_logo)}" />')
xml_lines.append(' </channel>')
xml_lines.append(" </channel>")
for channel in channels:
# Use the same channel ID determination for program entries
@ -337,93 +430,436 @@ def generate_epg(request, profile_name=None):
# Add subtitle if available
if prog.sub_title:
xml_lines.append(f' <sub-title>{html.escape(prog.sub_title)}</sub-title>')
xml_lines.append(
f" <sub-title>{html.escape(prog.sub_title)}</sub-title>"
)
# Add description if available
if prog.description:
xml_lines.append(f' <desc>{html.escape(prog.description)}</desc>')
xml_lines.append(
f" <desc>{html.escape(prog.description)}</desc>"
)
# Process custom properties if available
if prog.custom_properties:
try:
import json
custom_data = json.loads(prog.custom_properties)
# Add categories if available
if 'categories' in custom_data and custom_data['categories']:
for category in custom_data['categories']:
xml_lines.append(f' <category>{html.escape(category)}</category>')
if "categories" in custom_data and custom_data["categories"]:
for category in custom_data["categories"]:
xml_lines.append(
f" <category>{html.escape(category)}</category>"
)
# Handle episode numbering - multiple formats supported
# Standard episode number if available
if 'episode' in custom_data:
xml_lines.append(f' <episode-num system="onscreen">E{custom_data["episode"]}</episode-num>')
if "episode" in custom_data:
xml_lines.append(
f' <episode-num system="onscreen">E{custom_data["episode"]}</episode-num>'
)
# Handle onscreen episode format (like S06E128)
if 'onscreen_episode' in custom_data:
xml_lines.append(f' <episode-num system="onscreen">{html.escape(custom_data["onscreen_episode"])}</episode-num>')
if "onscreen_episode" in custom_data:
xml_lines.append(
f' <episode-num system="onscreen">{html.escape(custom_data["onscreen_episode"])}</episode-num>'
)
# Handle dd_progid format
if 'dd_progid' in custom_data:
xml_lines.append(f' <episode-num system="dd_progid">{html.escape(custom_data["dd_progid"])}</episode-num>')
# Add season and episode numbers in xmltv_ns format if available
if 'season' in custom_data and 'episode' in custom_data:
season = int(custom_data['season']) - 1 if str(custom_data['season']).isdigit() else 0
episode = int(custom_data['episode']) - 1 if str(custom_data['episode']).isdigit() else 0
xml_lines.append(f' <episode-num system="xmltv_ns">{season}.{episode}.</episode-num>')
if "season" in custom_data and "episode" in custom_data:
season = (
int(custom_data["season"]) - 1
if str(custom_data["season"]).isdigit()
else 0
)
episode = (
int(custom_data["episode"]) - 1
if str(custom_data["episode"]).isdigit()
else 0
)
xml_lines.append(
f' <episode-num system="xmltv_ns">{season}.{episode}.</episode-num>'
)
# Add rating if available
if 'rating' in custom_data:
rating_system = custom_data.get('rating_system', 'TV Parental Guidelines')
xml_lines.append(f' <rating system="{html.escape(rating_system)}">')
xml_lines.append(f' <value>{html.escape(custom_data["rating"])}</value>')
xml_lines.append(f' </rating>')
if "rating" in custom_data:
rating_system = custom_data.get(
"rating_system", "TV Parental Guidelines"
)
xml_lines.append(
f' <rating system="{html.escape(rating_system)}">'
)
xml_lines.append(
f' <value>{html.escape(custom_data["rating"])}</value>'
)
xml_lines.append(f" </rating>")
# Add actors/directors/writers if available
if 'credits' in custom_data:
xml_lines.append(f' <credits>')
for role, people in custom_data['credits'].items():
if "credits" in custom_data:
xml_lines.append(f" <credits>")
for role, people in custom_data["credits"].items():
if isinstance(people, list):
for person in people:
xml_lines.append(f' <{role}>{html.escape(person)}</{role}>')
xml_lines.append(
f" <{role}>{html.escape(person)}</{role}>"
)
else:
xml_lines.append(f' <{role}>{html.escape(people)}</{role}>')
xml_lines.append(f' </credits>')
xml_lines.append(
f" <{role}>{html.escape(people)}</{role}>"
)
xml_lines.append(f" </credits>")
# Add program date/year if available
if 'year' in custom_data:
xml_lines.append(f' <date>{html.escape(custom_data["year"])}</date>')
if "year" in custom_data:
xml_lines.append(
f' <date>{html.escape(custom_data["year"])}</date>'
)
# Add country if available
if 'country' in custom_data:
xml_lines.append(f' <country>{html.escape(custom_data["country"])}</country>')
if "country" in custom_data:
xml_lines.append(
f' <country>{html.escape(custom_data["country"])}</country>'
)
# Add icon if available
if 'icon' in custom_data:
xml_lines.append(f' <icon src="{html.escape(custom_data["icon"])}" />')
if "icon" in custom_data:
xml_lines.append(
f' <icon src="{html.escape(custom_data["icon"])}" />'
)
# Add special flags as proper tags
if custom_data.get('previously_shown', False):
xml_lines.append(f' <previously-shown />')
if custom_data.get("previously_shown", False):
xml_lines.append(f" <previously-shown />")
if custom_data.get('premiere', False):
xml_lines.append(f' <premiere />')
if custom_data.get("premiere", False):
xml_lines.append(f" <premiere />")
if custom_data.get('new', False):
xml_lines.append(f' <new />')
if custom_data.get("new", False):
xml_lines.append(f" <new />")
if custom_data.get('live', False):
xml_lines.append(f' <live />')
except Exception as e:
xml_lines.append(f' <!-- Error parsing custom properties: {html.escape(str(e))} -->')
xml_lines.append(
f" <!-- Error parsing custom properties: {html.escape(str(e))} -->"
)
xml_lines.append(' </programme>')
xml_lines.append(" </programme>")
xml_lines.append('</tv>')
xml_lines.append("</tv>")
xml_content = "\n".join(xml_lines)
response = HttpResponse(xml_content, content_type="application/xml")
response['Content-Disposition'] = 'attachment; filename="epg.xml"'
response["Content-Disposition"] = 'attachment; filename="epg.xml"'
return response
def xc_get_user(request):
username = request.GET.get("username")
password = request.GET.get("password")
if not username or not password:
return None
user = get_object_or_404(User, username=username)
custom_properties = (
json.loads(user.custom_properties) if user.custom_properties else {}
)
if "xc_password" not in custom_properties:
return None
if custom_properties["xc_password"] != password:
return None
return user
def xc_get_info(request, full=False):
if not network_access_allowed(request, 'XC_API'):
return JsonResponse({'error': 'Forbidden'}, status=403)
user = xc_get_user(request)
if user is None:
return JsonResponse({'error': 'Unauthorized'}, status=401)
raw_host = request.get_host()
if ":" in raw_host:
hostname, port = raw_host.split(":", 1)
else:
hostname = raw_host
port = "443" if request.is_secure() else "80"
info = {
"user_info": {
"username": request.GET.get("username"),
"password": request.GET.get("password"),
"message": "",
"auth": 1,
"status": "Active",
"exp_date": "1715062090",
"max_connections": "99",
"allowed_output_formats": [
"ts",
],
},
"server_info": {
"url": hostname,
"server_protocol": request.scheme,
"port": port,
"timezone": get_localzone().key,
"timestamp_now": int(time.time()),
"time_now": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
"process": True,
},
}
if full == True:
info['categories'] = {
"series": [],
"movie": [],
"live": xc_get_live_categories(user),
}
info['available_channels'] = {channel["stream_id"]: channel for channel in xc_get_live_streams(request, user, request.GET.get("category_id"))}
return info
def xc_player_api(request, full=False):
if not network_access_allowed(request, 'XC_API'):
return JsonResponse({'error': 'Forbidden'}, status=403)
action = request.GET.get("action")
user = xc_get_user(request)
if user is None:
return JsonResponse({'error': 'Unauthorized'}, status=401)
server_info = xc_get_info(request)
if not action:
return JsonResponse(server_info)
if action == "get_live_categories":
return JsonResponse(xc_get_live_categories(user), safe=False)
if action == "get_live_streams":
return JsonResponse(xc_get_live_streams(request, user, request.GET.get("category_id")), safe=False)
if action == "get_short_epg":
return JsonResponse(xc_get_epg(request, user, short=True), safe=False)
if action == "get_simple_data_table":
return JsonResponse(xc_get_epg(request, user, short=False), safe=False)
# Endpoints not implemented, but still provide a response
if action in [
"get_vod_categories",
"get_vod_streams",
"get_series",
"get_series_categories",
"get_series_info",
"get_vod_info",
]:
return JsonResponse([], safe=False)
raise Http404()
def xc_panel_api(request):
if not network_access_allowed(request, 'XC_API'):
return JsonResponse({'error': 'Forbidden'}, status=403)
user = xc_get_user(request)
if user is None:
return JsonResponse({'error': 'Unauthorized'}, status=401)
return JsonResponse(xc_get_info(request, True))
def xc_get(request):
if not network_access_allowed(request, 'XC_API'):
return JsonResponse({'error': 'Forbidden'}, status=403)
action = request.GET.get("action")
user = xc_get_user(request)
if user is None:
return JsonResponse({'error': 'Unauthorized'}, status=401)
return generate_m3u(request, None, user)
def xc_xmltv(request):
if not network_access_allowed(request, 'XC_API'):
return JsonResponse({'error': 'Forbidden'}, status=403)
user = xc_get_user(request)
if user is None:
return JsonResponse({'error': 'Unauthorized'}, status=401)
return generate_epg(request, None, user)
def xc_get_live_categories(user):
response = []
if user.user_level == 0:
filters = {
"channels__channelprofilemembership__enabled": True,
"channels__user_level": 0,
}
if user.channel_profiles.count() != 0:
# Only get data from active profile
channel_profiles = user.channel_profiles.all()
filters["channels__channelprofilemembership__channel_profile__in"] = (
channel_profiles
)
channel_groups = ChannelGroup.objects.filter(**filters).distinct()
else:
channel_groups = ChannelGroup.objects.filter(
channels__isnull=False, channels__user_level__lte=user.user_level
).distinct()
for group in channel_groups:
response.append(
{
"category_id": group.id,
"category_name": group.name,
"parent_id": 0,
}
)
return response
def xc_get_live_streams(request, user, category_id=None):
streams = []
if user.user_level == 0:
filters = {
"channelprofilemembership__enabled": True,
"user_level__lte": user.user_level,
}
if user.channel_profiles.count() > 0:
# Only get data from active profile
channel_profiles = user.channel_profiles.all()
filters["channelprofilemembership__channel_profile__in"] = channel_profiles
if category_id is not None:
filters["channel_group__id"] = category_id
channels = Channel.objects.filter(**filters)
else:
if not category_id:
channels = Channel.objects.filter(user_level__lte=user.user_level)
else:
channels = Channel.objects.filter(
channel_group__id=category_id, user_level__lte=user.user_level
)
for channel in channels:
streams.append(
{
"num": int(channel.channel_number) if channel.channel_number.is_integer() else channel.channel_number,
"name": channel.name,
"stream_type": "live",
"stream_id": channel.id,
"stream_icon": (
None
if not channel.logo
else request.build_absolute_uri(
reverse("api:channels:logo-cache", args=[channel.logo.id])
)
),
"epg_channel_id": int(channel.channel_number) if channel.channel_number.is_integer() else channel.channel_number,
"added": int(time.time()), # @TODO: make this the actual created date
"is_adult": 0,
"category_id": channel.channel_group.id,
"category_ids": [channel.channel_group.id],
"custom_sid": None,
"tv_archive": 0,
"direct_source": "",
"tv_archive_duration": 0,
}
)
return streams
def xc_get_epg(request, user, short=False):
channel_id = request.GET.get('stream_id')
if not channel_id:
raise Http404()
channel = None
if user.user_level < 10:
filters = {
"id": channel_id,
"channelprofilemembership__enabled": True,
"user_level__lte": user.user_level,
}
if user.channel_profiles.count() > 0:
channel_profiles = user.channel_profiles.all()
filters["channelprofilemembership__channel_profile__in"] = channel_profiles
channel = get_object_or_404(Channel, **filters)
else:
channel = get_object_or_404(Channel, id=channel_id)
if not channel:
raise Http404()
limit = request.GET.get('limit', 4)
if channel.epg_data:
if short == False:
programs = channel.epg_data.programs.filter(
start_time__gte=timezone.now()
).order_by('start_time')
else:
programs = channel.epg_data.programs.all().order_by('start_time')[:limit]
else:
programs = generate_dummy_programs(channel_id=channel_id, channel_name=channel.name)
output = {"epg_listings": []}
for program in programs:
id = "0"
epg_id = "0"
title = program['title'] if isinstance(program, dict) else program.title
description = program['description'] if isinstance(program, dict) else program.description
start = program["start_time"] if isinstance(program, dict) else program.start_time
end = program["end_time"] if isinstance(program, dict) else program.end_time
program_output = {
"id": f"{id}",
"epg_id": f"{epg_id}",
"title": base64.b64encode(title.encode()).decode(),
"lang": "",
"start": start.strftime("%Y%m%d%H%M%S"),
"end": end.strftime("%Y%m%d%H%M%S"),
"description": base64.b64encode(description.encode()).decode(),
"channel_id": int(channel.channel_number) if channel.channel_number.is_integer() else channel.channel_number,
"start_timestamp": int(start.timestamp()),
"stop_timestamp": int(end.timestamp()),
"stream_id": f"{channel_id}",
}
if short == False:
program_output["now_playing"] = 1 if start <= timezone.now() <= end else 0
program_output["has_archive"] = "0"
output['epg_listings'].append(program_output)
return output

View file

@ -386,90 +386,99 @@ class StreamManager:
buffer = b""
last_stats_line = b""
# Read in small chunks
# Read byte by byte for immediate detection
while self.transcode_process and self.transcode_process.stderr:
try:
chunk = self.transcode_process.stderr.read(256) # Smaller chunks for real-time processing
if not chunk:
# Read one byte at a time for immediate processing
byte = self.transcode_process.stderr.read(1)
if not byte:
break
buffer += chunk
buffer += byte
# Look for stats updates (overwrite previous stats with \r)
if b'\r' in buffer and b"frame=" in buffer:
# Split on \r to handle overwriting stats
parts = buffer.split(b'\r')
# Check for frame= at the start of buffer (new stats line)
if buffer == b"frame=":
# We detected the start of a stats line, read until we get a complete line
# or hit a carriage return (which overwrites the previous stats)
while True:
next_byte = self.transcode_process.stderr.read(1)
if not next_byte:
break
# Process all parts except the last (which might be incomplete)
for i, part in enumerate(parts[:-1]):
if part.strip():
if part.startswith(b"frame=") or b"frame=" in part:
# This is a stats line - keep it intact
try:
stats_text = part.decode('utf-8', errors='ignore').strip()
if stats_text and "frame=" in stats_text:
# Extract just the stats portion if there's other content
if "frame=" in stats_text:
frame_start = stats_text.find("frame=")
stats_text = stats_text[frame_start:]
buffer += next_byte
self._parse_ffmpeg_stats(stats_text)
self._log_stderr_content(stats_text)
last_stats_line = part
except Exception as e:
logger.debug(f"Error parsing stats line: {e}")
else:
# Regular content - process line by line
line_content = part
while b'\n' in line_content:
line, line_content = line_content.split(b'\n', 1)
if line.strip():
self._log_stderr_content(line.decode('utf-8', errors='ignore'))
# Break on carriage return (stats overwrite) or newline
if next_byte in (b'\r', b'\n'):
break
# Handle remaining content without newline
if line_content.strip():
self._log_stderr_content(line_content.decode('utf-8', errors='ignore'))
# Also break if we have enough data for a typical stats line
if len(buffer) > 200: # Typical stats line length
break
# Keep the last part as it might be incomplete
buffer = parts[-1]
# Process the stats line immediately
if buffer.strip():
try:
stats_text = buffer.decode('utf-8', errors='ignore').strip()
if stats_text and "frame=" in stats_text:
self._parse_ffmpeg_stats(stats_text)
self._log_stderr_content(stats_text)
except Exception as e:
logger.debug(f"Error parsing immediate stats line: {e}")
# Clear buffer after processing
buffer = b""
continue
# Handle regular line breaks for non-stats content
elif b'\n' in buffer:
while b'\n' in buffer:
line, buffer = buffer.split(b'\n', 1)
if line.strip():
line_text = line.decode('utf-8', errors='ignore').strip()
if line_text and not line_text.startswith("frame="):
self._log_stderr_content(line_text)
elif byte == b'\n':
if buffer.strip():
line_text = buffer.decode('utf-8', errors='ignore').strip()
if line_text and not line_text.startswith("frame="):
self._log_stderr_content(line_text)
buffer = b""
# If we have a potential stats line in buffer without line breaks
elif b"frame=" in buffer and (b"speed=" in buffer or len(buffer) > 200):
# We likely have a complete or substantial stats line
try:
stats_text = buffer.decode('utf-8', errors='ignore').strip()
if "frame=" in stats_text:
# Extract just the stats portion
frame_start = stats_text.find("frame=")
stats_text = stats_text[frame_start:]
# Handle carriage returns (potential stats overwrite)
elif byte == b'\r':
# Check if this might be a stats line
if b"frame=" in buffer:
try:
stats_text = buffer.decode('utf-8', errors='ignore').strip()
if stats_text and "frame=" in stats_text:
self._parse_ffmpeg_stats(stats_text)
self._log_stderr_content(stats_text)
except Exception as e:
logger.debug(f"Error parsing stats on carriage return: {e}")
elif buffer.strip():
# Regular content with carriage return
line_text = buffer.decode('utf-8', errors='ignore').strip()
if line_text:
self._log_stderr_content(line_text)
buffer = b""
self._parse_ffmpeg_stats(stats_text)
self._log_stderr_content(stats_text)
buffer = b"" # Clear buffer after processing
except Exception as e:
logger.debug(f"Error parsing buffered stats: {e}")
# Prevent buffer from growing too large
if len(buffer) > 4096:
# Try to preserve any potential stats line at the end
if b"frame=" in buffer[-1024:]:
buffer = buffer[-1024:]
else:
buffer = buffer[-512:]
# Prevent buffer from growing too large for non-stats content
elif len(buffer) > 1024 and b"frame=" not in buffer:
# Process whatever we have if it's not a stats line
if buffer.strip():
line_text = buffer.decode('utf-8', errors='ignore').strip()
if line_text:
self._log_stderr_content(line_text)
buffer = b""
except Exception as e:
logger.error(f"Error reading stderr: {e}")
logger.error(f"Error reading stderr byte: {e}")
break
# Process any remaining buffer content
if buffer.strip():
try:
remaining_text = buffer.decode('utf-8', errors='ignore').strip()
if remaining_text:
if "frame=" in remaining_text:
self._parse_ffmpeg_stats(remaining_text)
self._log_stderr_content(remaining_text)
except Exception as e:
logger.debug(f"Error processing remaining buffer: {e}")
except Exception as e:
# Catch any other exceptions in the thread to prevent crashes
try:
@ -488,13 +497,18 @@ class StreamManager:
content_lower = content.lower()
# Check for stream info lines first and delegate to ChannelService
# Only parse INPUT streams (which have hex identifiers like [0x100]) not output streams
if "stream #" in content_lower and ("video:" in content_lower or "audio:" in content_lower):
from .services.channel_service import ChannelService
if "video:" in content_lower:
ChannelService.parse_and_store_stream_info(self.channel_id, content, "video")
elif "audio:" in content_lower:
ChannelService.parse_and_store_stream_info(self.channel_id, content, "audio")
# Check if this is an input stream by looking for the hex identifier pattern [0x...]
if "stream #0:" in content_lower and "[0x" in content_lower:
from .services.channel_service import ChannelService
if "video:" in content_lower:
ChannelService.parse_and_store_stream_info(self.channel_id, content, "video")
elif "audio:" in content_lower:
ChannelService.parse_and_store_stream_info(self.channel_id, content, "audio")
else:
# This is likely an output stream (no hex identifier), don't parse it
logger.debug(f"Skipping output stream info: {content}")
# Determine log level based on content
if any(keyword in content_lower for keyword in ['error', 'failed', 'cannot', 'invalid', 'corrupt']):
logger.error(f"FFmpeg stderr: {content}")

View file

@ -17,7 +17,6 @@ logger = get_logger()
def get_stream_object(id: str):
try:
uuid_obj = UUID(id, version=4)
logger.info(f"Fetching channel ID {id}")
return get_object_or_404(Channel, uuid=id)
except:

View file

@ -3,6 +3,7 @@ import threading
import time
import random
import re
import pathlib
from django.http import StreamingHttpResponse, JsonResponse, HttpResponseRedirect
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import get_object_or_404
@ -15,22 +16,38 @@ from .redis_keys import RedisKeys
import logging
from apps.channels.models import Channel, Stream
from apps.m3u.models import M3UAccount, M3UAccountProfile
from apps.accounts.models import User
from core.models import UserAgent, CoreSettings, PROXY_PROFILE_NAME
from rest_framework.decorators import api_view, permission_classes
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from apps.accounts.permissions import (
IsAdmin,
permission_classes_by_method,
permission_classes_by_action,
)
from .constants import ChannelState, EventType, StreamType, ChannelMetadataField
from .config_helper import ConfigHelper
from .services.channel_service import ChannelService
from .url_utils import generate_stream_url, transform_url, get_stream_info_for_switch, get_stream_object, get_alternate_streams
from .url_utils import (
generate_stream_url,
transform_url,
get_stream_info_for_switch,
get_stream_object,
get_alternate_streams,
)
from .utils import get_logger
from uuid import UUID
import gevent
from dispatcharr.utils import network_access_allowed
logger = get_logger()
@api_view(['GET'])
@api_view(["GET"])
def stream_ts(request, channel_id):
if not network_access_allowed(request, "STREAMS"):
return JsonResponse({"error": "Forbidden"}, status=403)
"""Stream TS data to client with immediate response and keep-alive packets during initialization"""
channel = get_stream_object(channel_id)
@ -44,10 +61,12 @@ def stream_ts(request, channel_id):
logger.info(f"[{client_id}] Requested stream for channel {channel_id}")
# Extract client user agent early
for header in ['HTTP_USER_AGENT', 'User-Agent', 'user-agent']:
if (header in request.META):
for header in ["HTTP_USER_AGENT", "User-Agent", "user-agent"]:
if header in request.META:
client_user_agent = request.META[header]
logger.debug(f"[{client_id}] Client connected with user agent: {client_user_agent}")
logger.debug(
f"[{client_id}] Client connected with user agent: {client_user_agent}"
)
break
# Check if we need to reinitialize the channel
@ -60,38 +79,58 @@ def stream_ts(request, channel_id):
metadata_key = RedisKeys.channel_metadata(channel_id)
if proxy_server.redis_client.exists(metadata_key):
metadata = proxy_server.redis_client.hgetall(metadata_key)
state_field = ChannelMetadataField.STATE.encode('utf-8')
state_field = ChannelMetadataField.STATE.encode("utf-8")
if state_field in metadata:
channel_state = metadata[state_field].decode('utf-8')
channel_state = metadata[state_field].decode("utf-8")
# IMPROVED: Check for *any* state that indicates initialization is in progress
active_states = [ChannelState.INITIALIZING, ChannelState.CONNECTING, ChannelState.WAITING_FOR_CLIENTS, ChannelState.ACTIVE]
active_states = [
ChannelState.INITIALIZING,
ChannelState.CONNECTING,
ChannelState.WAITING_FOR_CLIENTS,
ChannelState.ACTIVE,
]
if channel_state in active_states:
# Channel is being initialized or already active - no need for reinitialization
needs_initialization = False
logger.debug(f"[{client_id}] Channel {channel_id} already in state {channel_state}, skipping initialization")
logger.debug(
f"[{client_id}] Channel {channel_id} already in state {channel_state}, skipping initialization"
)
# Special handling for initializing/connecting states
if channel_state in [ChannelState.INITIALIZING, ChannelState.CONNECTING]:
if channel_state in [
ChannelState.INITIALIZING,
ChannelState.CONNECTING,
]:
channel_initializing = True
logger.debug(f"[{client_id}] Channel {channel_id} is still initializing, client will wait for completion")
logger.debug(
f"[{client_id}] Channel {channel_id} is still initializing, client will wait for completion"
)
else:
# Only check for owner if channel is in a valid state
owner_field = ChannelMetadataField.OWNER.encode('utf-8')
owner_field = ChannelMetadataField.OWNER.encode("utf-8")
if owner_field in metadata:
owner = metadata[owner_field].decode('utf-8')
owner = metadata[owner_field].decode("utf-8")
owner_heartbeat_key = f"ts_proxy:worker:{owner}:heartbeat"
if proxy_server.redis_client.exists(owner_heartbeat_key):
# Owner is still active, so we don't need to reinitialize
needs_initialization = False
logger.debug(f"[{client_id}] Channel {channel_id} has active owner {owner}")
logger.debug(
f"[{client_id}] Channel {channel_id} has active owner {owner}"
)
# Start initialization if needed
if needs_initialization or not proxy_server.check_if_channel_exists(channel_id):
logger.info(f"[{client_id}] Starting channel {channel_id} initialization")
# Force cleanup of any previous instance if in terminal state
if channel_state in [ChannelState.ERROR, ChannelState.STOPPING, ChannelState.STOPPED]:
logger.warning(f"[{client_id}] Channel {channel_id} in state {channel_state}, forcing cleanup")
if channel_state in [
ChannelState.ERROR,
ChannelState.STOPPING,
ChannelState.STOPPED,
]:
logger.warning(
f"[{client_id}] Channel {channel_id} in state {channel_state}, forcing cleanup"
)
proxy_server.stop_channel(channel_id)
# Use max retry attempts and connection timeout from config
@ -107,67 +146,90 @@ def stream_ts(request, channel_id):
# Try to get a stream with configured retries
for attempt in range(max_retries):
stream_url, stream_user_agent, transcode, profile_value = generate_stream_url(channel_id)
stream_url, stream_user_agent, transcode, profile_value = (
generate_stream_url(channel_id)
)
if stream_url is not None:
logger.info(f"[{client_id}] Successfully obtained stream for channel {channel_id}")
logger.info(
f"[{client_id}] Successfully obtained stream for channel {channel_id}"
)
break
# If we failed because there are no streams assigned, don't retry
_, _, error_reason = channel.get_stream()
if error_reason and 'maximum connection limits' not in error_reason:
logger.warning(f"[{client_id}] Can't retry - error not related to connection limits: {error_reason}")
if error_reason and "maximum connection limits" not in error_reason:
logger.warning(
f"[{client_id}] Can't retry - error not related to connection limits: {error_reason}"
)
break
# Don't exceed the overall connection timeout
if time.time() - wait_start_time > retry_timeout:
logger.warning(f"[{client_id}] Connection wait timeout exceeded ({retry_timeout}s)")
logger.warning(
f"[{client_id}] Connection wait timeout exceeded ({retry_timeout}s)"
)
break
# Wait before retrying (using exponential backoff with a cap)
wait_time = min(0.5 * (2 ** attempt), 2.0) # Caps at 2 seconds
logger.info(f"[{client_id}] Waiting {wait_time:.1f}s for a connection to become available (attempt {attempt+1}/{max_retries})")
gevent.sleep(wait_time) # FIXED: Using gevent.sleep instead of time.sleep
wait_time = min(0.5 * (2**attempt), 2.0) # Caps at 2 seconds
logger.info(
f"[{client_id}] Waiting {wait_time:.1f}s for a connection to become available (attempt {attempt+1}/{max_retries})"
)
gevent.sleep(
wait_time
) # FIXED: Using gevent.sleep instead of time.sleep
if stream_url is None:
# Make sure to release any stream locks that might have been acquired
if hasattr(channel, 'streams') and channel.streams.exists():
if hasattr(channel, "streams") and channel.streams.exists():
for stream in channel.streams.all():
try:
stream.release_stream()
logger.info(f"[{client_id}] Released stream {stream.id} for channel {channel_id}")
logger.info(
f"[{client_id}] Released stream {stream.id} for channel {channel_id}"
)
except Exception as e:
logger.error(f"[{client_id}] Error releasing stream: {e}")
# Get the specific error message if available
wait_duration = f"{int(time.time() - wait_start_time)}s"
error_msg = error_reason if error_reason else 'No available streams for this channel'
return JsonResponse({
'error': error_msg,
'waited': wait_duration
}, status=503) # 503 Service Unavailable is appropriate here
error_msg = (
error_reason
if error_reason
else "No available streams for this channel"
)
return JsonResponse(
{"error": error_msg, "waited": wait_duration}, status=503
) # 503 Service Unavailable is appropriate here
# Get the stream ID from the channel
stream_id, m3u_profile_id, _ = channel.get_stream()
logger.info(f"Channel {channel_id} using stream ID {stream_id}, m3u account profile ID {m3u_profile_id}")
logger.info(
f"Channel {channel_id} using stream ID {stream_id}, m3u account profile ID {m3u_profile_id}"
)
# Generate transcode command if needed
stream_profile = channel.get_stream_profile()
if stream_profile.is_redirect():
# Validate the stream URL before redirecting
from .url_utils import validate_stream_url, get_alternate_streams, get_stream_info_for_switch
from .url_utils import (
validate_stream_url,
get_alternate_streams,
get_stream_info_for_switch,
)
# Try initial URL
logger.info(f"[{client_id}] Validating redirect URL: {stream_url}")
is_valid, final_url, status_code, message = validate_stream_url(
stream_url,
user_agent=stream_user_agent,
timeout=(5, 5)
stream_url, user_agent=stream_user_agent, timeout=(5, 5)
)
# If first URL doesn't validate, try alternates
if not is_valid:
logger.warning(f"[{client_id}] Primary stream URL failed validation: {message}")
logger.warning(
f"[{client_id}] Primary stream URL failed validation: {message}"
)
# Track tried streams to avoid loops
tried_streams = {stream_id}
@ -177,49 +239,71 @@ def stream_ts(request, channel_id):
# Try each alternate until one works
for alt in alternates:
if alt['stream_id'] in tried_streams:
if alt["stream_id"] in tried_streams:
continue
tried_streams.add(alt['stream_id'])
tried_streams.add(alt["stream_id"])
# Get stream info
alt_info = get_stream_info_for_switch(channel_id, alt['stream_id'])
if 'error' in alt_info:
logger.warning(f"[{client_id}] Error getting alternate stream info: {alt_info['error']}")
alt_info = get_stream_info_for_switch(
channel_id, alt["stream_id"]
)
if "error" in alt_info:
logger.warning(
f"[{client_id}] Error getting alternate stream info: {alt_info['error']}"
)
continue
# Validate the alternate URL
logger.info(f"[{client_id}] Trying alternate stream #{alt['stream_id']}: {alt_info['url']}")
logger.info(
f"[{client_id}] Trying alternate stream #{alt['stream_id']}: {alt_info['url']}"
)
is_valid, final_url, status_code, message = validate_stream_url(
alt_info['url'],
user_agent=alt_info['user_agent'],
timeout=(5, 5)
alt_info["url"],
user_agent=alt_info["user_agent"],
timeout=(5, 5),
)
if is_valid:
logger.info(f"[{client_id}] Alternate stream #{alt['stream_id']} validated successfully")
logger.info(
f"[{client_id}] Alternate stream #{alt['stream_id']} validated successfully"
)
break
else:
logger.warning(f"[{client_id}] Alternate stream #{alt['stream_id']} failed validation: {message}")
logger.warning(
f"[{client_id}] Alternate stream #{alt['stream_id']} failed validation: {message}"
)
# Release stream lock before redirecting
channel.release_stream()
# Final decision based on validation results
if is_valid:
logger.info(f"[{client_id}] Redirecting to validated URL: {final_url} ({message})")
logger.info(
f"[{client_id}] Redirecting to validated URL: {final_url} ({message})"
)
return HttpResponseRedirect(final_url)
else:
logger.error(f"[{client_id}] All available redirect URLs failed validation")
return JsonResponse({
'error': 'All available streams failed validation'
}, status=502) # 502 Bad Gateway
logger.error(
f"[{client_id}] All available redirect URLs failed validation"
)
return JsonResponse(
{"error": "All available streams failed validation"}, status=502
) # 502 Bad Gateway
# Initialize channel with the stream's user agent (not the client's)
success = ChannelService.initialize_channel(
channel_id, stream_url, stream_user_agent, transcode, profile_value, stream_id, m3u_profile_id
channel_id,
stream_url,
stream_user_agent,
transcode,
profile_value,
stream_id,
m3u_profile_id,
)
if not success:
return JsonResponse({'error': 'Failed to initialize channel'}, status=500)
return JsonResponse(
{"error": "Failed to initialize channel"}, status=500
)
# If we're the owner, wait for connection to establish
if proxy_server.am_i_owner(channel_id):
@ -230,7 +314,9 @@ def stream_ts(request, channel_id):
while not manager.connected:
if time.time() - wait_start > timeout:
proxy_server.stop_channel(channel_id)
return JsonResponse({'error': 'Connection timeout'}, status=504)
return JsonResponse(
{"error": "Connection timeout"}, status=504
)
# Check if this manager should keep retrying or stop
if not manager.should_retry():
@ -240,41 +326,68 @@ def stream_ts(request, channel_id):
if proxy_server.redis_client:
try:
state_bytes = proxy_server.redis_client.hget(metadata_key, ChannelMetadataField.STATE)
state_bytes = proxy_server.redis_client.hget(
metadata_key, ChannelMetadataField.STATE
)
if state_bytes:
current_state = state_bytes.decode('utf-8')
logger.debug(f"[{client_id}] Current state of channel {channel_id}: {current_state}")
current_state = state_bytes.decode("utf-8")
logger.debug(
f"[{client_id}] Current state of channel {channel_id}: {current_state}"
)
except Exception as e:
logger.warning(f"[{client_id}] Error getting channel state: {e}")
logger.warning(
f"[{client_id}] Error getting channel state: {e}"
)
# Allow normal transitional states to continue
if current_state in [ChannelState.INITIALIZING, ChannelState.CONNECTING]:
logger.info(f"[{client_id}] Channel {channel_id} is in {current_state} state, continuing to wait")
if current_state in [
ChannelState.INITIALIZING,
ChannelState.CONNECTING,
]:
logger.info(
f"[{client_id}] Channel {channel_id} is in {current_state} state, continuing to wait"
)
# Reset wait timer to allow the transition to complete
wait_start = time.time()
continue
# Check if we're switching URLs
if hasattr(manager, 'url_switching') and manager.url_switching:
logger.info(f"[{client_id}] Stream manager is currently switching URLs for channel {channel_id}")
if (
hasattr(manager, "url_switching")
and manager.url_switching
):
logger.info(
f"[{client_id}] Stream manager is currently switching URLs for channel {channel_id}"
)
# Reset wait timer to give the switch a chance
wait_start = time.time()
continue
# If we reach here, we've exhausted retries and the channel isn't in a valid transitional state
logger.warning(f"[{client_id}] Channel {channel_id} failed to connect and is not in transitional state")
logger.warning(
f"[{client_id}] Channel {channel_id} failed to connect and is not in transitional state"
)
proxy_server.stop_channel(channel_id)
return JsonResponse({'error': 'Failed to connect'}, status=502)
return JsonResponse(
{"error": "Failed to connect"}, status=502
)
gevent.sleep(0.1) # FIXED: Using gevent.sleep instead of time.sleep
gevent.sleep(
0.1
) # FIXED: Using gevent.sleep instead of time.sleep
logger.info(f"[{client_id}] Successfully initialized channel {channel_id}")
channel_initializing = True
# Register client - can do this regardless of initialization state
# Create local resources if needed
if channel_id not in proxy_server.stream_buffers or channel_id not in proxy_server.client_managers:
logger.debug(f"[{client_id}] Channel {channel_id} exists in Redis but not initialized in this worker - initializing now")
if (
channel_id not in proxy_server.stream_buffers
or channel_id not in proxy_server.client_managers
):
logger.debug(
f"[{client_id}] Channel {channel_id} exists in Redis but not initialized in this worker - initializing now"
)
# Get URL from Redis metadata
url = None
@ -282,32 +395,54 @@ def stream_ts(request, channel_id):
if proxy_server.redis_client:
metadata_key = RedisKeys.channel_metadata(channel_id)
url_bytes = proxy_server.redis_client.hget(metadata_key, ChannelMetadataField.URL)
ua_bytes = proxy_server.redis_client.hget(metadata_key, ChannelMetadataField.USER_AGENT)
profile_bytes = proxy_server.redis_client.hget(metadata_key, ChannelMetadataField.STREAM_PROFILE)
url_bytes = proxy_server.redis_client.hget(
metadata_key, ChannelMetadataField.URL
)
ua_bytes = proxy_server.redis_client.hget(
metadata_key, ChannelMetadataField.USER_AGENT
)
profile_bytes = proxy_server.redis_client.hget(
metadata_key, ChannelMetadataField.STREAM_PROFILE
)
if url_bytes:
url = url_bytes.decode('utf-8')
url = url_bytes.decode("utf-8")
if ua_bytes:
stream_user_agent = ua_bytes.decode('utf-8')
stream_user_agent = ua_bytes.decode("utf-8")
# Extract transcode setting from Redis
if profile_bytes:
profile_str = profile_bytes.decode('utf-8')
use_transcode = (profile_str == PROXY_PROFILE_NAME or profile_str == 'None')
logger.debug(f"Using profile '{profile_str}' for channel {channel_id}, transcode={use_transcode}")
profile_str = profile_bytes.decode("utf-8")
use_transcode = (
profile_str == PROXY_PROFILE_NAME or profile_str == "None"
)
logger.debug(
f"Using profile '{profile_str}' for channel {channel_id}, transcode={use_transcode}"
)
else:
# Default settings when profile not found in Redis
profile_str = 'None' # Default profile name
use_transcode = False # Default to direct streaming without transcoding
logger.debug(f"No profile found in Redis for channel {channel_id}, defaulting to transcode={use_transcode}")
profile_str = "None" # Default profile name
use_transcode = (
False # Default to direct streaming without transcoding
)
logger.debug(
f"No profile found in Redis for channel {channel_id}, defaulting to transcode={use_transcode}"
)
# Use client_user_agent as fallback if stream_user_agent is None
success = proxy_server.initialize_channel(url, channel_id, stream_user_agent or client_user_agent, use_transcode)
success = proxy_server.initialize_channel(
url, channel_id, stream_user_agent or client_user_agent, use_transcode
)
if not success:
logger.error(f"[{client_id}] Failed to initialize channel {channel_id} locally")
return JsonResponse({'error': 'Failed to initialize channel locally'}, status=500)
logger.error(
f"[{client_id}] Failed to initialize channel {channel_id} locally"
)
return JsonResponse(
{"error": "Failed to initialize channel locally"}, status=500
)
logger.info(f"[{client_id}] Successfully initialized channel {channel_id} locally")
logger.info(
f"[{client_id}] Successfully initialized channel {channel_id} locally"
)
# Register client
buffer = proxy_server.stream_buffers[channel_id]
@ -322,53 +457,99 @@ def stream_ts(request, channel_id):
# Return the StreamingHttpResponse from the main function
response = StreamingHttpResponse(
streaming_content=generate(),
content_type='video/mp2t'
streaming_content=generate(), content_type="video/mp2t"
)
response['Cache-Control'] = 'no-cache'
response["Cache-Control"] = "no-cache"
return response
except Exception as e:
logger.error(f"Error in stream_ts: {e}", exc_info=True)
return JsonResponse({'error': str(e)}, status=500)
return JsonResponse({"error": str(e)}, status=500)
@api_view(["GET"])
def stream_xc(request, username, password, channel_id):
user = get_object_or_404(User, username=username)
extension = pathlib.Path(channel_id).suffix
channel_id = pathlib.Path(channel_id).stem
custom_properties = (
json.loads(user.custom_properties) if user.custom_properties else {}
)
if "xc_password" not in custom_properties:
return Response({"error": "Invalid credentials"}, status=401)
if custom_properties["xc_password"] != password:
return Response({"error": "Invalid credentials"}, status=401)
print(f"Fetchin channel with ID: {channel_id}")
if user.user_level < 10:
filters = {
"id": int(channel_id),
"channelprofilemembership__enabled": True,
"user_level__lte": user.user_level,
}
if user.channel_profiles.count() > 0:
channel_profiles = user.channel_profiles.all()
filters["channelprofilemembership__channel_profile__in"] = channel_profiles
channel = Channel.objects.filter(**filters).distinct().first()
if not channel:
return JsonResponse({"error": "Not found"}, status=404)
else:
channel = get_object_or_404(Channel, id=channel_id)
# @TODO: we've got the file 'type' via extension, support this when we support multiple outputs
return stream_ts(request._request, channel.uuid)
@csrf_exempt
@api_view(['POST'])
@permission_classes([IsAuthenticated])
@api_view(["POST"])
@permission_classes([IsAdmin])
def change_stream(request, channel_id):
"""Change stream URL for existing channel with enhanced diagnostics"""
proxy_server = ProxyServer.get_instance()
try:
data = json.loads(request.body)
new_url = data.get('url')
user_agent = data.get('user_agent')
stream_id = data.get('stream_id')
new_url = data.get("url")
user_agent = data.get("user_agent")
stream_id = data.get("stream_id")
# If stream_id is provided, get the URL and user_agent from it
if stream_id:
logger.info(f"Stream ID {stream_id} provided, looking up stream info for channel {channel_id}")
logger.info(
f"Stream ID {stream_id} provided, looking up stream info for channel {channel_id}"
)
stream_info = get_stream_info_for_switch(channel_id, stream_id)
if 'error' in stream_info:
return JsonResponse({
'error': stream_info['error'],
'stream_id': stream_id
}, status=404)
if "error" in stream_info:
return JsonResponse(
{"error": stream_info["error"], "stream_id": stream_id}, status=404
)
# Use the info from the stream
new_url = stream_info['url']
user_agent = stream_info['user_agent']
m3u_profile_id = stream_info.get('m3u_profile_id')
new_url = stream_info["url"]
user_agent = stream_info["user_agent"]
m3u_profile_id = stream_info.get("m3u_profile_id")
# Stream ID will be passed to change_stream_url later
elif not new_url:
return JsonResponse({'error': 'Either url or stream_id must be provided'}, status=400)
return JsonResponse(
{"error": "Either url or stream_id must be provided"}, status=400
)
logger.info(f"Attempting to change stream for channel {channel_id} to {new_url}")
logger.info(
f"Attempting to change stream for channel {channel_id} to {new_url}"
)
# Use the service layer instead of direct implementation
# Pass stream_id to ensure proper connection tracking
result = ChannelService.change_stream_url(channel_id, new_url, user_agent, stream_id, m3u_profile_id)
result = ChannelService.change_stream_url(
channel_id, new_url, user_agent, stream_id, m3u_profile_id
)
# Get the stream manager before updating URL
stream_manager = proxy_server.stream_managers.get(channel_id)
@ -377,37 +558,43 @@ def change_stream(request, channel_id):
if stream_manager:
# Reset tried streams when manually switching URL via API
stream_manager.tried_stream_ids = set()
logger.debug(f"Reset tried stream IDs for channel {channel_id} during manual stream change")
logger.debug(
f"Reset tried stream IDs for channel {channel_id} during manual stream change"
)
if result.get('status') == 'error':
return JsonResponse({
'error': result.get('message', 'Unknown error'),
'diagnostics': result.get('diagnostics', {})
}, status=404)
if result.get("status") == "error":
return JsonResponse(
{
"error": result.get("message", "Unknown error"),
"diagnostics": result.get("diagnostics", {}),
},
status=404,
)
# Format response based on whether it was a direct update or event-based
response_data = {
'message': 'Stream changed successfully',
'channel': channel_id,
'url': new_url,
'owner': result.get('direct_update', False),
'worker_id': proxy_server.worker_id
"message": "Stream changed successfully",
"channel": channel_id,
"url": new_url,
"owner": result.get("direct_update", False),
"worker_id": proxy_server.worker_id,
}
# Include stream_id in response if it was used
if stream_id:
response_data['stream_id'] = stream_id
response_data["stream_id"] = stream_id
return JsonResponse(response_data)
except json.JSONDecodeError:
return JsonResponse({'error': 'Invalid JSON'}, status=400)
return JsonResponse({"error": "Invalid JSON"}, status=400)
except Exception as e:
logger.error(f"Failed to change stream: {e}", exc_info=True)
return JsonResponse({'error': str(e)}, status=500)
return JsonResponse({"error": str(e)}, status=500)
@api_view(['GET'])
@permission_classes([IsAuthenticated])
@api_view(["GET"])
@permission_classes([IsAdmin])
def channel_status(request, channel_id=None):
"""
Returns status information about channels with detail level based on request:
@ -419,7 +606,7 @@ def channel_status(request, channel_id=None):
try:
# Check if Redis is available
if not proxy_server.redis_client:
return JsonResponse({'error': 'Redis connection not available'}, status=500)
return JsonResponse({"error": "Redis connection not available"}, status=500)
# Handle single channel or all channels
if channel_id:
@ -428,7 +615,9 @@ def channel_status(request, channel_id=None):
if channel_info:
return JsonResponse(channel_info)
else:
return JsonResponse({'error': f'Channel {channel_id} not found'}, status=404)
return JsonResponse(
{"error": f"Channel {channel_id} not found"}, status=404
)
else:
# Basic info for all channels
channel_pattern = "ts_proxy:channel:*:metadata"
@ -437,9 +626,13 @@ def channel_status(request, channel_id=None):
# Extract channel IDs from keys
cursor = 0
while True:
cursor, keys = proxy_server.redis_client.scan(cursor, match=channel_pattern)
cursor, keys = proxy_server.redis_client.scan(
cursor, match=channel_pattern
)
for key in keys:
channel_id_match = re.search(r"ts_proxy:channel:(.*):metadata", key.decode('utf-8'))
channel_id_match = re.search(
r"ts_proxy:channel:(.*):metadata", key.decode("utf-8")
)
if channel_id_match:
ch_id = channel_id_match.group(1)
channel_info = ChannelStatus.get_basic_channel_info(ch_id)
@ -449,15 +642,16 @@ def channel_status(request, channel_id=None):
if cursor == 0:
break
return JsonResponse({'channels': all_channels, 'count': len(all_channels)})
return JsonResponse({"channels": all_channels, "count": len(all_channels)})
except Exception as e:
logger.error(f"Error in channel_status: {e}", exc_info=True)
return JsonResponse({'error': str(e)}, status=500)
return JsonResponse({"error": str(e)}, status=500)
@csrf_exempt
@api_view(['POST', 'DELETE'])
@permission_classes([IsAuthenticated])
@api_view(["POST", "DELETE"])
@permission_classes([IsAdmin])
def stop_channel(request, channel_id):
"""Stop a channel and release all associated resources using PubSub events"""
try:
@ -466,60 +660,70 @@ def stop_channel(request, channel_id):
# Use the service layer instead of direct implementation
result = ChannelService.stop_channel(channel_id)
if result.get('status') == 'error':
return JsonResponse({'error': result.get('message', 'Unknown error')}, status=404)
if result.get("status") == "error":
return JsonResponse(
{"error": result.get("message", "Unknown error")}, status=404
)
return JsonResponse({
'message': 'Channel stop request sent',
'channel_id': channel_id,
'previous_state': result.get('previous_state')
})
return JsonResponse(
{
"message": "Channel stop request sent",
"channel_id": channel_id,
"previous_state": result.get("previous_state"),
}
)
except Exception as e:
logger.error(f"Failed to stop channel: {e}", exc_info=True)
return JsonResponse({'error': str(e)}, status=500)
return JsonResponse({"error": str(e)}, status=500)
@csrf_exempt
@api_view(['POST'])
@permission_classes([IsAuthenticated])
@api_view(["POST"])
@permission_classes([IsAdmin])
def stop_client(request, channel_id):
"""Stop a specific client connection using existing client management"""
try:
# Parse request body to get client ID
data = json.loads(request.body)
client_id = data.get('client_id')
client_id = data.get("client_id")
if not client_id:
return JsonResponse({'error': 'No client_id provided'}, status=400)
return JsonResponse({"error": "No client_id provided"}, status=400)
# Use the service layer instead of direct implementation
result = ChannelService.stop_client(channel_id, client_id)
if result.get('status') == 'error':
return JsonResponse({'error': result.get('message')}, status=404)
if result.get("status") == "error":
return JsonResponse({"error": result.get("message")}, status=404)
return JsonResponse({
'message': 'Client stop request processed',
'channel_id': channel_id,
'client_id': client_id,
'locally_processed': result.get('locally_processed', False)
})
return JsonResponse(
{
"message": "Client stop request processed",
"channel_id": channel_id,
"client_id": client_id,
"locally_processed": result.get("locally_processed", False),
}
)
except json.JSONDecodeError:
return JsonResponse({'error': 'Invalid JSON'}, status=400)
return JsonResponse({"error": "Invalid JSON"}, status=400)
except Exception as e:
logger.error(f"Failed to stop client: {e}", exc_info=True)
return JsonResponse({'error': str(e)}, status=500)
return JsonResponse({"error": str(e)}, status=500)
@csrf_exempt
@api_view(['POST'])
@permission_classes([IsAuthenticated])
@api_view(["POST"])
@permission_classes([IsAdmin])
def next_stream(request, channel_id):
"""Switch to the next available stream for a channel"""
proxy_server = ProxyServer.get_instance()
try:
logger.info(f"Request to switch to next stream for channel {channel_id} received")
logger.info(
f"Request to switch to next stream for channel {channel_id} received"
)
# Check if the channel exists
channel = get_stream_object(channel_id)
@ -532,29 +736,42 @@ def next_stream(request, channel_id):
metadata_key = RedisKeys.channel_metadata(channel_id)
if proxy_server.redis_client.exists(metadata_key):
# Get current stream ID from Redis
stream_id_bytes = proxy_server.redis_client.hget(metadata_key, ChannelMetadataField.STREAM_ID)
stream_id_bytes = proxy_server.redis_client.hget(
metadata_key, ChannelMetadataField.STREAM_ID
)
if stream_id_bytes:
current_stream_id = int(stream_id_bytes.decode('utf-8'))
logger.info(f"Found current stream ID {current_stream_id} in Redis for channel {channel_id}")
current_stream_id = int(stream_id_bytes.decode("utf-8"))
logger.info(
f"Found current stream ID {current_stream_id} in Redis for channel {channel_id}"
)
# Get M3U profile from Redis if available
profile_id_bytes = proxy_server.redis_client.hget(metadata_key, ChannelMetadataField.M3U_PROFILE)
profile_id_bytes = proxy_server.redis_client.hget(
metadata_key, ChannelMetadataField.M3U_PROFILE
)
if profile_id_bytes:
profile_id = int(profile_id_bytes.decode('utf-8'))
logger.info(f"Found M3U profile ID {profile_id} in Redis for channel {channel_id}")
profile_id = int(profile_id_bytes.decode("utf-8"))
logger.info(
f"Found M3U profile ID {profile_id} in Redis for channel {channel_id}"
)
if not current_stream_id:
# Channel is not running
return JsonResponse({'error': 'No current stream found for channel'}, status=404)
return JsonResponse(
{"error": "No current stream found for channel"}, status=404
)
# Get all streams for this channel in their defined order
streams = list(channel.streams.all().order_by('channelstream__order'))
streams = list(channel.streams.all().order_by("channelstream__order"))
if len(streams) <= 1:
return JsonResponse({
'error': 'No alternate streams available for this channel',
'current_stream_id': current_stream_id
}, status=404)
return JsonResponse(
{
"error": "No alternate streams available for this channel",
"current_stream_id": current_stream_id,
},
status=404,
)
# Find the current stream's position in the list
current_index = None
@ -564,61 +781,74 @@ def next_stream(request, channel_id):
break
if current_index is None:
logger.warning(f"Current stream ID {current_stream_id} not found in channel's streams list")
logger.warning(
f"Current stream ID {current_stream_id} not found in channel's streams list"
)
# Fall back to the first stream that's not the current one
next_stream = next((s for s in streams if s.id != current_stream_id), None)
if not next_stream:
return JsonResponse({
'error': 'Could not find current stream in channel list',
'current_stream_id': current_stream_id
}, status=404)
return JsonResponse(
{
"error": "Could not find current stream in channel list",
"current_stream_id": current_stream_id,
},
status=404,
)
else:
# Get the next stream in the rotation (with wrap-around)
next_index = (current_index + 1) % len(streams)
next_stream = streams[next_index]
next_stream_id = next_stream.id
logger.info(f"Rotating to next stream ID {next_stream_id} for channel {channel_id}")
logger.info(
f"Rotating to next stream ID {next_stream_id} for channel {channel_id}"
)
# Get full stream info including URL for the next stream
stream_info = get_stream_info_for_switch(channel_id, next_stream_id)
if 'error' in stream_info:
return JsonResponse({
'error': stream_info['error'],
'current_stream_id': current_stream_id,
'next_stream_id': next_stream_id
}, status=404)
if "error" in stream_info:
return JsonResponse(
{
"error": stream_info["error"],
"current_stream_id": current_stream_id,
"next_stream_id": next_stream_id,
},
status=404,
)
# Now use the ChannelService to change the stream URL
result = ChannelService.change_stream_url(
channel_id,
stream_info['url'],
stream_info['user_agent'],
next_stream_id # Pass the stream_id to be stored in Redis
stream_info["url"],
stream_info["user_agent"],
next_stream_id, # Pass the stream_id to be stored in Redis
)
if result.get('status') == 'error':
return JsonResponse({
'error': result.get('message', 'Unknown error'),
'diagnostics': result.get('diagnostics', {}),
'current_stream_id': current_stream_id,
'next_stream_id': next_stream_id
}, status=404)
if result.get("status") == "error":
return JsonResponse(
{
"error": result.get("message", "Unknown error"),
"diagnostics": result.get("diagnostics", {}),
"current_stream_id": current_stream_id,
"next_stream_id": next_stream_id,
},
status=404,
)
# Format success response
response_data = {
'message': 'Stream switched to next available',
'channel': channel_id,
'previous_stream_id': current_stream_id,
'new_stream_id': next_stream_id,
'new_url': stream_info['url'],
'owner': result.get('direct_update', False),
'worker_id': proxy_server.worker_id
"message": "Stream switched to next available",
"channel": channel_id,
"previous_stream_id": current_stream_id,
"new_stream_id": next_stream_id,
"new_url": stream_info["url"],
"owner": result.get("direct_update", False),
"worker_id": proxy_server.worker_id,
}
return JsonResponse(response_data)
except Exception as e:
logger.error(f"Failed to switch to next stream: {e}", exc_info=True)
return JsonResponse({'error': str(e)}, status=500)
return JsonResponse({"error": str(e)}, status=500)

View file

@ -1,38 +1,70 @@
# core/api_views.py
import json
import ipaddress
import logging
from rest_framework import viewsets, status
from rest_framework.decorators import action
from rest_framework.response import Response
from django.shortcuts import get_object_or_404
from .models import UserAgent, StreamProfile, CoreSettings, ProxySettings, STREAM_HASH_KEY
from .serializers import UserAgentSerializer, StreamProfileSerializer, CoreSettingsSerializer, ProxySettingsSerializer
<<<<<<< HEAD
from rest_framework.permissions import IsAuthenticated
from rest_framework.decorators import api_view, permission_classes
=======
from .models import (
UserAgent,
StreamProfile,
CoreSettings,
STREAM_HASH_KEY,
NETWORK_ACCESS,
ProxySettings,
)
from .serializers import (
UserAgentSerializer,
StreamProfileSerializer,
CoreSettingsSerializer,
ProxySettingsSerializer,
)
from rest_framework.decorators import api_view, permission_classes, action
>>>>>>> 59e4a28b311d00d073f238e01e735d68a821c3f3
from drf_yasg.utils import swagger_auto_schema
import socket
import requests
import os
from core.tasks import rehash_streams
from apps.accounts.permissions import (
Authenticated,
)
from dispatcharr.utils import get_client_ip
logger = logging.getLogger(__name__)
class UserAgentViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows user agents to be viewed, created, edited, or deleted.
"""
queryset = UserAgent.objects.all()
serializer_class = UserAgentSerializer
class StreamProfileViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows stream profiles to be viewed, created, edited, or deleted.
"""
queryset = StreamProfile.objects.all()
serializer_class = StreamProfileSerializer
class CoreSettingsViewSet(viewsets.ModelViewSet):
"""
API endpoint for editing core settings.
This is treated as a singleton: only one instance should exist.
"""
queryset = CoreSettings.objects.all()
serializer_class = CoreSettingsSerializer
@ -40,11 +72,51 @@ class CoreSettingsViewSet(viewsets.ModelViewSet):
instance = self.get_object()
response = super().update(request, *args, **kwargs)
if instance.key == STREAM_HASH_KEY:
if instance.value != request.data['value']:
rehash_streams.delay(request.data['value'].split(','))
if instance.value != request.data["value"]:
rehash_streams.delay(request.data["value"].split(","))
return response
@action(detail=False, methods=["post"], url_path="check")
def check(self, request, *args, **kwargs):
data = request.data
if data.get("key") == NETWORK_ACCESS:
client_ip = ipaddress.ip_address(get_client_ip(request))
in_network = {}
invalid = []
value = json.loads(data.get("value", "{}"))
for key, val in value.items():
in_network[key] = []
cidrs = val.split(",")
for cidr in cidrs:
try:
network = ipaddress.ip_network(cidr)
if client_ip in network:
in_network[key] = []
break
in_network[key].append(cidr)
except:
invalid.append(cidr)
if len(invalid) > 0:
return Response(
{
"error": True,
"message": "Invalid CIDR(s)",
"data": invalid,
},
status=status.HTTP_200_OK,
)
return Response(in_network, status=status.HTTP_200_OK)
return Response({}, status=status.HTTP_200_OK)
<<<<<<< HEAD
class ProxySettingsViewSet(viewsets.ModelViewSet):
"""
API endpoint for proxy settings.
@ -101,17 +173,19 @@ class ProxySettingsViewSet(viewsets.ModelViewSet):
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data)
=======
>>>>>>> 59e4a28b311d00d073f238e01e735d68a821c3f3
@swagger_auto_schema(
method='get',
method="get",
operation_description="Endpoint for environment details",
responses={200: "Environment variables"}
responses={200: "Environment variables"},
)
@api_view(['GET'])
@permission_classes([IsAuthenticated])
@api_view(["GET"])
@permission_classes([Authenticated])
def environment(request):
public_ip = None
local_ip = None
country_code = None
@ -135,36 +209,60 @@ def environment(request):
except Exception as e:
local_ip = f"Error: {e}"
# 3) If we got a valid public_ip, fetch geo info from ipapi.co
# 3) If we got a valid public_ip, fetch geo info from ipapi.co or ip-api.com
if public_ip and "Error" not in public_ip:
try:
geo = requests.get(f"https://ipapi.co/{public_ip}/json/", timeout=5).json()
# ipapi returns fields like country_code, country_name, etc.
country_code = geo.get("country_code", "") # e.g. "US"
country_name = geo.get("country_name", "") # e.g. "United States"
except requests.RequestException as e:
# Attempt to get geo information from ipapi.co first
r = requests.get(f"https://ipapi.co/{public_ip}/json/", timeout=5)
if r.status_code == requests.codes.ok:
geo = r.json()
country_code = geo.get("country_code") # e.g. "US"
country_name = geo.get("country_name") # e.g. "United States"
else:
# If ipapi.co fails, fallback to ip-api.com
# only supports http requests for free tier
r = requests.get("http://ip-api.com/json/", timeout=5)
if r.status_code == requests.codes.ok:
geo = r.json()
country_code = geo.get("countryCode") # e.g. "US"
country_name = geo.get("country") # e.g. "United States"
else:
raise Exception("Geo lookup failed with both services")
except Exception as e:
logger.error(f"Error during geo lookup: {e}")
country_code = None
country_name = None
return Response({
'authenticated': True,
'public_ip': public_ip,
'local_ip': local_ip,
'country_code': country_code,
'country_name': country_name,
'env_mode': "dev" if os.getenv('DISPATCHARR_ENV') == "dev" else "prod",
})
return Response(
{
"authenticated": True,
"public_ip": public_ip,
"local_ip": local_ip,
"country_code": country_code,
"country_name": country_name,
"env_mode": "dev" if os.getenv("DISPATCHARR_ENV") == "dev" else "prod",
}
)
@swagger_auto_schema(
method='get',
method="get",
operation_description="Get application version information",
responses={200: "Version information"}
responses={200: "Version information"},
)
@api_view(['GET'])
@api_view(["GET"])
def version(request):
# Import version information
from version import __version__, __timestamp__
return Response({
'version': __version__,
'timestamp': __timestamp__,
})
return Response(
{
"version": __version__,
"timestamp": __timestamp__,
}
)

View file

@ -0,0 +1,24 @@
# Generated by Django 5.1.6 on 2025-03-01 14:01
from django.db import migrations
from django.utils.text import slugify
def preload_network_access_settings(apps, schema_editor):
CoreSettings = apps.get_model("core", "CoreSettings")
CoreSettings.objects.create(
key=slugify("Network Access"),
name="Network Access",
value="{}",
)
class Migration(migrations.Migration):
dependencies = [
("core", "0012_default_active_m3u_accounts"),
]
operations = [
migrations.RunPython(preload_network_access_settings),
]

View file

@ -2,25 +2,24 @@
from django.db import models
from django.utils.text import slugify
class UserAgent(models.Model):
name = models.CharField(
max_length=512,
unique=True,
help_text="The User-Agent name."
max_length=512, unique=True, help_text="The User-Agent name."
)
user_agent = models.CharField(
max_length=512,
unique=True,
help_text="The complete User-Agent string sent by the client."
help_text="The complete User-Agent string sent by the client.",
)
description = models.CharField(
max_length=255,
blank=True,
help_text="An optional description of the client or device type."
help_text="An optional description of the client or device type.",
)
is_active = models.BooleanField(
default=True,
help_text="Whether this user agent is currently allowed/recognized."
help_text="Whether this user agent is currently allowed/recognized.",
)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
@ -28,31 +27,34 @@ class UserAgent(models.Model):
def __str__(self):
return self.name
PROXY_PROFILE_NAME = 'Proxy'
REDIRECT_PROFILE_NAME = 'Redirect'
PROXY_PROFILE_NAME = "Proxy"
REDIRECT_PROFILE_NAME = "Redirect"
class StreamProfile(models.Model):
name = models.CharField(max_length=255, help_text="Name of the stream profile")
command = models.CharField(
max_length=255,
help_text="Command to execute (e.g., 'yt.sh', 'streamlink', or 'vlc')",
blank=True
blank=True,
)
parameters = models.TextField(
help_text="Command-line parameters. Use {userAgent} and {streamUrl} as placeholders.",
blank=True
blank=True,
)
locked = models.BooleanField(
default=False,
help_text="Protected - can't be deleted or modified"
default=False, help_text="Protected - can't be deleted or modified"
)
is_active = models.BooleanField(
default=True, help_text="Whether this profile is active"
)
is_active = models.BooleanField(default=True, help_text="Whether this profile is active")
user_agent = models.ForeignKey(
"UserAgent",
on_delete=models.SET_NULL,
null=True,
blank=True,
help_text="Optional user agent to use. If not set, you can fall back to a default."
help_text="Optional user agent to use. If not set, you can fall back to a default.",
)
def __str__(self):
@ -77,7 +79,9 @@ class StreamProfile(models.Model):
new_value = new_value.pk
if field_name not in allowed_fields and orig_value != new_value:
raise ValidationError(f"Cannot modify {field_name} on a protected profile.")
raise ValidationError(
f"Cannot modify {field_name} on a protected profile."
)
super().save(*args, **kwargs)
@ -90,10 +94,14 @@ class StreamProfile(models.Model):
for field_name, new_value in kwargs.items():
if field_name not in allowed_fields:
raise ValidationError(f"Cannot modify {field_name} on a protected profile.")
raise ValidationError(
f"Cannot modify {field_name} on a protected profile."
)
# Ensure user_agent ForeignKey updates correctly
if field_name == "user_agent" and isinstance(new_value, cls._meta.get_field("user_agent").related_model):
if field_name == "user_agent" and isinstance(
new_value, cls._meta.get_field("user_agent").related_model
):
new_value = new_value.pk # Convert object to ID if needed
setattr(instance, field_name, new_value)
@ -122,7 +130,8 @@ class StreamProfile(models.Model):
# Split the command and iterate through each part to apply replacements
cmd = [self.command] + [
self._replace_in_part(part, replacements) for part in self.parameters.split()
self._replace_in_part(part, replacements)
for part in self.parameters.split()
]
return cmd
@ -134,11 +143,13 @@ class StreamProfile(models.Model):
return part
DEFAULT_USER_AGENT_KEY= slugify("Default User-Agent")
DEFAULT_USER_AGENT_KEY = slugify("Default User-Agent")
DEFAULT_STREAM_PROFILE_KEY = slugify("Default Stream Profile")
STREAM_HASH_KEY = slugify("M3U Hash Key")
PREFERRED_REGION_KEY = slugify("Preferred Region")
AUTO_IMPORT_MAPPED_FILES = slugify("Auto-Import Mapped Files")
NETWORK_ACCESS = slugify("Network Access")
class CoreSettings(models.Model):
key = models.CharField(

View file

@ -1,22 +1,70 @@
# core/serializers.py
import json
import ipaddress
from rest_framework import serializers
from .models import CoreSettings, UserAgent, StreamProfile, ProxySettings
from .models import CoreSettings, UserAgent, StreamProfile, ProxySettings, NETWORK_ACCESS
class UserAgentSerializer(serializers.ModelSerializer):
class Meta:
model = UserAgent
fields = ['id', 'name', 'user_agent', 'description', 'is_active', 'created_at', 'updated_at']
fields = [
"id",
"name",
"user_agent",
"description",
"is_active",
"created_at",
"updated_at",
]
class StreamProfileSerializer(serializers.ModelSerializer):
class Meta:
model = StreamProfile
fields = ['id', 'name', 'command', 'parameters', 'is_active', 'user_agent', 'locked']
fields = [
"id",
"name",
"command",
"parameters",
"is_active",
"user_agent",
"locked",
]
class CoreSettingsSerializer(serializers.ModelSerializer):
class Meta:
model = CoreSettings
fields = '__all__'
fields = "__all__"
def update(self, instance, validated_data):
if instance.key == NETWORK_ACCESS:
errors = False
invalid = {}
value = json.loads(validated_data.get("value"))
for key, val in value.items():
cidrs = val.split(",")
for cidr in cidrs:
try:
ipaddress.ip_network(cidr)
except:
errors = True
if key not in invalid:
invalid[key] = []
invalid[key].append(cidr)
if errors:
# Perform CIDR validation
raise serializers.ValidationError(
{
"message": "Invalid CIDRs",
"value": invalid,
}
)
return super().update(instance, validated_data)
class ProxySettingsSerializer(serializers.ModelSerializer):
class Meta:
@ -24,7 +72,7 @@ class ProxySettingsSerializer(serializers.ModelSerializer):
fields = [
'id',
'buffering_timeout',
'buffering_speed',
'buffering_speed',
'redis_chunk_ttl',
'channel_shutdown_delay',
'channel_init_grace_period',

View file

@ -4,43 +4,44 @@ from datetime import timedelta
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = 'REPLACE_ME_WITH_A_REAL_SECRET'
SECRET_KEY = "REPLACE_ME_WITH_A_REAL_SECRET"
REDIS_HOST = os.environ.get("REDIS_HOST", "localhost")
REDIS_DB = os.environ.get("REDIS_DB", "0")
# Set DEBUG to True for development, False for production
if os.environ.get('DISPATCHARR_DEBUG', 'False').lower() == 'true':
if os.environ.get("DISPATCHARR_DEBUG", "False").lower() == "true":
DEBUG = True
else:
DEBUG = False
ALLOWED_HOSTS = ["*"]
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
INSTALLED_APPS = [
'apps.api',
'apps.accounts',
'apps.channels.apps.ChannelsConfig',
'apps.dashboard',
'apps.epg',
'apps.hdhr',
'apps.m3u',
'apps.output',
'apps.proxy.apps.ProxyConfig',
'apps.proxy.ts_proxy',
'core',
'daphne',
'drf_yasg',
'channels',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'corsheaders',
'django_filters',
'django_celery_beat',
"apps.api",
"apps.accounts",
"apps.channels.apps.ChannelsConfig",
"apps.dashboard",
"apps.epg",
"apps.hdhr",
"apps.m3u",
"apps.output",
"apps.proxy.apps.ProxyConfig",
"apps.proxy.ts_proxy",
"core",
"daphne",
"drf_yasg",
"channels",
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"rest_framework",
"corsheaders",
"django_filters",
"django_celery_beat",
]
# EPG Processing optimization settings
@ -50,21 +51,23 @@ EPG_ENABLE_MEMORY_MONITORING = True # Whether to monitor memory usage during pr
# Database optimization settings
DATABASE_STATEMENT_TIMEOUT = 300 # Seconds before timing out long-running queries
DATABASE_CONN_MAX_AGE = 60 # Connection max age in seconds, helps with frequent reconnects
DATABASE_CONN_MAX_AGE = (
60 # Connection max age in seconds, helps with frequent reconnects
)
# Disable atomic requests for performance-sensitive views
ATOMIC_REQUESTS = False
# Cache settings - add caching for EPG operations
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'dispatcharr-epg-cache',
'TIMEOUT': 3600, # 1 hour cache timeout
'OPTIONS': {
'MAX_ENTRIES': 10000,
'CULL_FREQUENCY': 3, # Purge 1/3 of entries when max is reached
}
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
"LOCATION": "dispatcharr-epg-cache",
"TIMEOUT": 3600, # 1 hour cache timeout
"OPTIONS": {
"MAX_ENTRIES": 10000,
"CULL_FREQUENCY": 3, # Purge 1/3 of entries when max is reached
},
}
}
@ -72,29 +75,26 @@ CACHES = {
REQUESTS_TIMEOUT = 30 # Seconds for external API requests
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'corsheaders.middleware.CorsMiddleware',
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
"corsheaders.middleware.CorsMiddleware",
]
ROOT_URLCONF = 'dispatcharr.urls'
ROOT_URLCONF = "dispatcharr.urls"
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'frontend/dist'),
BASE_DIR / "templates"
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [os.path.join(BASE_DIR, "frontend/dist"), BASE_DIR / "templates"],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
@ -104,8 +104,8 @@ TEMPLATES = [
},
]
WSGI_APPLICATION = 'dispatcharr.wsgi.application'
ASGI_APPLICATION = 'dispatcharr.asgi.application'
WSGI_APPLICATION = "dispatcharr.wsgi.application"
ASGI_APPLICATION = "dispatcharr.asgi.application"
CHANNEL_LAYERS = {
"default": {
@ -116,76 +116,72 @@ CHANNEL_LAYERS = {
},
}
if os.getenv('DB_ENGINE', None) == 'sqlite':
if os.getenv("DB_ENGINE", None) == "sqlite":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': '/data/dispatcharr.db',
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "/data/dispatcharr.db",
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': os.environ.get('POSTGRES_DB', 'dispatcharr'),
'USER': os.environ.get('POSTGRES_USER', 'dispatch'),
'PASSWORD': os.environ.get('POSTGRES_PASSWORD', 'secret'),
'HOST': os.environ.get('POSTGRES_HOST', 'localhost'),
'PORT': int(os.environ.get('POSTGRES_PORT', 5432)),
"default": {
"ENGINE": "django.db.backends.postgresql",
"NAME": os.environ.get("POSTGRES_DB", "dispatcharr"),
"USER": os.environ.get("POSTGRES_USER", "dispatch"),
"PASSWORD": os.environ.get("POSTGRES_PASSWORD", "secret"),
"HOST": os.environ.get("POSTGRES_HOST", "localhost"),
"PORT": int(os.environ.get("POSTGRES_PORT", 5432)),
}
}
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
]
REST_FRAMEWORK = {
'DEFAULT_SCHEMA_CLASS': 'rest_framework.schemas.coreapi.AutoSchema',
'DEFAULT_RENDERER_CLASSES': [
'rest_framework.renderers.JSONRenderer',
'rest_framework.renderers.BrowsableAPIRenderer',
"DEFAULT_SCHEMA_CLASS": "rest_framework.schemas.coreapi.AutoSchema",
"DEFAULT_RENDERER_CLASSES": [
"rest_framework.renderers.JSONRenderer",
"rest_framework.renderers.BrowsableAPIRenderer",
],
'DEFAULT_AUTHENTICATION_CLASSES': [
'rest_framework_simplejwt.authentication.JWTAuthentication',
"DEFAULT_AUTHENTICATION_CLASSES": [
"rest_framework_simplejwt.authentication.JWTAuthentication",
],
'DEFAULT_FILTER_BACKENDS': ['django_filters.rest_framework.DjangoFilterBackend'],
"DEFAULT_FILTER_BACKENDS": ["django_filters.rest_framework.DjangoFilterBackend"],
}
SWAGGER_SETTINGS = {
'SECURITY_DEFINITIONS': {
'Bearer': {
'type': 'apiKey',
'name': 'Authorization',
'in': 'header'
}
}
"SECURITY_DEFINITIONS": {
"Bearer": {"type": "apiKey", "name": "Authorization", "in": "header"}
}
}
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_TZ = True
STATIC_URL = '/static/'
STATIC_ROOT = BASE_DIR / 'static' # Directory where static files will be collected
STATIC_URL = "/static/"
STATIC_ROOT = BASE_DIR / "static" # Directory where static files will be collected
# Adjust STATICFILES_DIRS to include the paths to the directories that contain your static files.
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'frontend/dist'), # React build static files
os.path.join(BASE_DIR, "frontend/dist"), # React build static files
]
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
AUTH_USER_MODEL = 'accounts.User'
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
AUTH_USER_MODEL = "accounts.User"
CELERY_BROKER_URL = os.environ.get('CELERY_BROKER_URL', 'redis://localhost:6379/0')
CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL", "redis://localhost:6379/0")
CELERY_RESULT_BACKEND = CELERY_BROKER_URL
# Configure Redis key prefix
CELERY_RESULT_BACKEND_TRANSPORT_OPTIONS = {
'global_keyprefix': 'celery-tasks:', # Set the Redis key prefix for Celery
"global_keyprefix": "celery-tasks:", # Set the Redis key prefix for Celery
}
# Set TTL (Time-to-Live) for task results (in seconds)
@ -193,47 +189,44 @@ CELERY_RESULT_EXPIRES = 3600 # 1 hour TTL for task results
# Optionally, set visibility timeout for task retries (if using Redis)
CELERY_BROKER_TRANSPORT_OPTIONS = {
'visibility_timeout': 3600, # Time in seconds that a task remains invisible during retries
"visibility_timeout": 3600, # Time in seconds that a task remains invisible during retries
}
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_ACCEPT_CONTENT = ["json"]
CELERY_TASK_SERIALIZER = "json"
CELERY_BEAT_SCHEDULER = "django_celery_beat.schedulers.DatabaseScheduler"
CELERY_BEAT_SCHEDULE = {
'fetch-channel-statuses': {
'task': 'apps.proxy.tasks.fetch_channel_stats', # Direct task call
'schedule': 2.0, # Every 2 seconds
"fetch-channel-statuses": {
"task": "apps.proxy.tasks.fetch_channel_stats", # Direct task call
"schedule": 2.0, # Every 2 seconds
},
'scan-files': {
'task': 'core.tasks.scan_and_process_files', # Direct task call
'schedule': 20.0, # Every 20 seconds
"scan-files": {
"task": "core.tasks.scan_and_process_files", # Direct task call
"schedule": 20.0, # Every 20 seconds
},
}
MEDIA_ROOT = BASE_DIR / 'media'
MEDIA_URL = '/media/'
MEDIA_ROOT = BASE_DIR / "media"
MEDIA_URL = "/media/"
SERVER_IP = "127.0.0.1"
CORS_ALLOW_ALL_ORIGINS = True
CORS_ALLOW_CREDENTIALS = True
CSRF_TRUSTED_ORIGINS = [
'http://*',
'https://*'
]
CSRF_TRUSTED_ORIGINS = ["http://*", "https://*"]
APPEND_SLASH = True
SIMPLE_JWT = {
'ACCESS_TOKEN_LIFETIME': timedelta(minutes=30),
'REFRESH_TOKEN_LIFETIME': timedelta(days=1),
'ROTATE_REFRESH_TOKENS': False, # Optional: Whether to rotate refresh tokens
'BLACKLIST_AFTER_ROTATION': True, # Optional: Whether to blacklist refresh tokens
"ACCESS_TOKEN_LIFETIME": timedelta(minutes=30),
"REFRESH_TOKEN_LIFETIME": timedelta(days=1),
"ROTATE_REFRESH_TOKENS": False, # Optional: Whether to rotate refresh tokens
"BLACKLIST_AFTER_ROTATION": True, # Optional: Whether to blacklist refresh tokens
}
# Redis connection settings
REDIS_URL = 'redis://localhost:6379/0'
REDIS_URL = "redis://localhost:6379/0"
REDIS_SOCKET_TIMEOUT = 60 # Socket timeout in seconds
REDIS_SOCKET_CONNECT_TIMEOUT = 5 # Connection timeout in seconds
REDIS_HEALTH_CHECK_INTERVAL = 15 # Health check every 15 seconds
@ -244,45 +237,45 @@ REDIS_RETRY_INTERVAL = 1 # Initial retry interval in seconds
# Proxy Settings
PROXY_SETTINGS = {
'HLS': {
'DEFAULT_URL': '', # Default HLS stream URL if needed
'BUFFER_SIZE': 1000,
'USER_AGENT': 'VLC/3.0.20 LibVLC/3.0.20',
'CHUNK_SIZE': 8192,
'CLIENT_POLL_INTERVAL': 0.1,
'MAX_RETRIES': 3,
'MIN_SEGMENTS': 12,
'MAX_SEGMENTS': 16,
'WINDOW_SIZE': 12,
'INITIAL_SEGMENTS': 3,
"HLS": {
"DEFAULT_URL": "", # Default HLS stream URL if needed
"BUFFER_SIZE": 1000,
"USER_AGENT": "VLC/3.0.20 LibVLC/3.0.20",
"CHUNK_SIZE": 8192,
"CLIENT_POLL_INTERVAL": 0.1,
"MAX_RETRIES": 3,
"MIN_SEGMENTS": 12,
"MAX_SEGMENTS": 16,
"WINDOW_SIZE": 12,
"INITIAL_SEGMENTS": 3,
},
"TS": {
"DEFAULT_URL": "", # Default TS stream URL if needed
"BUFFER_SIZE": 1000,
"RECONNECT_DELAY": 5,
"USER_AGENT": "VLC/3.0.20 LibVLC/3.0.20",
"REDIS_CHUNK_TTL": 60, # How long to keep chunks in Redis (seconds)
},
'TS': {
'DEFAULT_URL': '', # Default TS stream URL if needed
'BUFFER_SIZE': 1000,
'RECONNECT_DELAY': 5,
'USER_AGENT': 'VLC/3.0.20 LibVLC/3.0.20',
'REDIS_CHUNK_TTL': 60, # How long to keep chunks in Redis (seconds)
}
}
# Map log level names to their numeric values
LOG_LEVEL_MAP = {
'TRACE': 5,
'DEBUG': 10,
'INFO': 20,
'WARNING': 30,
'ERROR': 40,
'CRITICAL': 50
"TRACE": 5,
"DEBUG": 10,
"INFO": 20,
"WARNING": 30,
"ERROR": 40,
"CRITICAL": 50,
}
# Get log level from environment variable, default to INFO if not set
# Add debugging output to see exactly what's being detected
env_log_level = os.environ.get('DISPATCHARR_LOG_LEVEL', '')
env_log_level = os.environ.get("DISPATCHARR_LOG_LEVEL", "")
print(f"Environment DISPATCHARR_LOG_LEVEL detected as: '{env_log_level}'")
if not env_log_level:
print("No DISPATCHARR_LOG_LEVEL found in environment, using default INFO")
LOG_LEVEL_NAME = 'INFO'
LOG_LEVEL_NAME = "INFO"
else:
LOG_LEVEL_NAME = env_log_level.upper()
print(f"Setting log level to: {LOG_LEVEL_NAME}")
@ -291,68 +284,68 @@ LOG_LEVEL = LOG_LEVEL_MAP.get(LOG_LEVEL_NAME, 20) # Default to INFO (20) if inv
# Add this to your existing LOGGING configuration or create one if it doesn't exist
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '{asctime} {levelname} {name} {message}',
'style': '{',
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"verbose": {
"format": "{asctime} {levelname} {name} {message}",
"style": "{",
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
'level': 5, # Always allow TRACE level messages through the handler
"handlers": {
"console": {
"class": "logging.StreamHandler",
"formatter": "verbose",
"level": 5, # Always allow TRACE level messages through the handler
},
},
'loggers': {
'core.tasks': {
'handlers': ['console'],
'level': LOG_LEVEL, # Use environment-configured level
'propagate': False, # Don't propagate to root logger to avoid duplicate logs
"loggers": {
"core.tasks": {
"handlers": ["console"],
"level": LOG_LEVEL, # Use environment-configured level
"propagate": False, # Don't propagate to root logger to avoid duplicate logs
},
'core.utils': {
'handlers': ['console'],
'level': LOG_LEVEL,
'propagate': False,
"core.utils": {
"handlers": ["console"],
"level": LOG_LEVEL,
"propagate": False,
},
'apps.proxy': {
'handlers': ['console'],
'level': LOG_LEVEL, # Use environment-configured level
'propagate': False, # Don't propagate to root logger
"apps.proxy": {
"handlers": ["console"],
"level": LOG_LEVEL, # Use environment-configured level
"propagate": False, # Don't propagate to root logger
},
# Add parent logger for all app modules
'apps': {
'handlers': ['console'],
'level': LOG_LEVEL,
'propagate': False,
"apps": {
"handlers": ["console"],
"level": LOG_LEVEL,
"propagate": False,
},
# Celery loggers to capture task execution messages
'celery': {
'handlers': ['console'],
'level': LOG_LEVEL, # Use configured log level for Celery logs
'propagate': False,
"celery": {
"handlers": ["console"],
"level": LOG_LEVEL, # Use configured log level for Celery logs
"propagate": False,
},
'celery.task': {
'handlers': ['console'],
'level': LOG_LEVEL, # Use configured log level for task-specific logs
'propagate': False,
"celery.task": {
"handlers": ["console"],
"level": LOG_LEVEL, # Use configured log level for task-specific logs
"propagate": False,
},
'celery.worker': {
'handlers': ['console'],
'level': LOG_LEVEL, # Use configured log level for worker logs
'propagate': False,
"celery.worker": {
"handlers": ["console"],
"level": LOG_LEVEL, # Use configured log level for worker logs
"propagate": False,
},
'celery.beat': {
'handlers': ['console'],
'level': LOG_LEVEL, # Use configured log level for scheduler logs
'propagate': False,
"celery.beat": {
"handlers": ["console"],
"level": LOG_LEVEL, # Use configured log level for scheduler logs
"propagate": False,
},
# Add any other loggers you need to capture TRACE logs from
},
'root': {
'handlers': ['console'],
'level': LOG_LEVEL, # Use user-configured level instead of hardcoded 'INFO'
"root": {
"handlers": ["console"],
"level": LOG_LEVEL, # Use user-configured level instead of hardcoded 'INFO'
},
}

View file

@ -7,13 +7,14 @@ from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
from .routing import websocket_urlpatterns
from apps.output.views import xc_player_api, xc_panel_api, xc_get, xc_xmltv
from apps.proxy.ts_proxy.views import stream_xc
# Define schema_view for Swagger
schema_view = get_schema_view(
openapi.Info(
title="Dispatcharr API",
default_version='v1',
default_version="v1",
description="API documentation for Dispatcharr",
terms_of_service="https://www.google.com/policies/terms/",
contact=openapi.Contact(email="contact@dispatcharr.local"),
@ -25,38 +26,48 @@ schema_view = get_schema_view(
urlpatterns = [
# API Routes
path('api/', include(('apps.api.urls', 'api'), namespace='api')),
path('api', RedirectView.as_view(url='/api/', permanent=True)),
path("api/", include(("apps.api.urls", "api"), namespace="api")),
path("api", RedirectView.as_view(url="/api/", permanent=True)),
# Admin
path('admin', RedirectView.as_view(url='/admin/', permanent=True)),
path('admin/', admin.site.urls),
path("admin", RedirectView.as_view(url="/admin/", permanent=True)),
path("admin/", admin.site.urls),
# Outputs
path('output', RedirectView.as_view(url='/output/', permanent=True)),
path('output/', include(('apps.output.urls', 'output'), namespace='output')),
path("output", RedirectView.as_view(url="/output/", permanent=True)),
path("output/", include(("apps.output.urls", "output"), namespace="output")),
# HDHR
path('hdhr', RedirectView.as_view(url='/hdhr/', permanent=True)),
path('hdhr/', include(('apps.hdhr.urls', 'hdhr'), namespace='hdhr')),
path("hdhr", RedirectView.as_view(url="/hdhr/", permanent=True)),
path("hdhr/", include(("apps.hdhr.urls", "hdhr"), namespace="hdhr")),
# Add proxy apps - Move these before the catch-all
path('proxy/', include(('apps.proxy.urls', 'proxy'), namespace='proxy')),
path('proxy', RedirectView.as_view(url='/proxy/', permanent=True)),
path("proxy/", include(("apps.proxy.urls", "proxy"), namespace="proxy")),
path("proxy", RedirectView.as_view(url="/proxy/", permanent=True)),
# xc
re_path("player_api.php", xc_player_api, name="xc_player_api"),
re_path("panel_api.php", xc_panel_api, name="xc_panel_api"),
re_path("get.php", xc_get, name="xc_get"),
re_path("xmltv.php", xc_xmltv, name="xc_xmltv"),
path(
"live/<str:username>/<str:password>/<str:channel_id>",
stream_xc,
name="xc_live_stream_endpoint",
),
path(
"<str:username>/<str:password>/<str:channel_id>",
stream_xc,
name="xc_stream_endpoint",
),
# Swagger UI
path('swagger/', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
path(
"swagger/",
schema_view.with_ui("swagger", cache_timeout=0),
name="schema-swagger-ui",
),
# ReDoc UI
path('redoc/', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'),
path("redoc/", schema_view.with_ui("redoc", cache_timeout=0), name="schema-redoc"),
# Optionally, serve the raw Swagger JSON
path('swagger.json', schema_view.without_ui(cache_timeout=0), name='schema-json'),
path("swagger.json", schema_view.without_ui(cache_timeout=0), name="schema-json"),
# Catch-all routes should always be last
path('', TemplateView.as_view(template_name='index.html')), # React entry point
path('<path:unused_path>', TemplateView.as_view(template_name='index.html')),
path("", TemplateView.as_view(template_name="index.html")), # React entry point
path("<path:unused_path>", TemplateView.as_view(template_name="index.html")),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += websocket_urlpatterns

View file

@ -1,23 +1,58 @@
# dispatcharr/utils.py
import json
import ipaddress
from django.http import JsonResponse
from django.core.exceptions import ValidationError
from core.models import CoreSettings, NETWORK_ACCESS
def json_error_response(message, status=400):
"""Return a standardized error JSON response."""
return JsonResponse({'success': False, 'error': message}, status=status)
return JsonResponse({"success": False, "error": message}, status=status)
def json_success_response(data=None, status=200):
"""Return a standardized success JSON response."""
response = {'success': True}
response = {"success": True}
if data is not None:
response.update(data)
return JsonResponse(response, status=status)
def validate_logo_file(file):
"""Validate uploaded logo file size and MIME type."""
valid_mime_types = ['image/jpeg', 'image/png', 'image/gif']
valid_mime_types = ["image/jpeg", "image/png", "image/gif"]
if file.content_type not in valid_mime_types:
raise ValidationError('Unsupported file type. Allowed types: JPEG, PNG, GIF.')
raise ValidationError("Unsupported file type. Allowed types: JPEG, PNG, GIF.")
if file.size > 2 * 1024 * 1024:
raise ValidationError('File too large. Max 2MB.')
raise ValidationError("File too large. Max 2MB.")
def get_client_ip(request):
x_forwarded_for = request.META.get("HTTP_X_REAL_IP")
if x_forwarded_for:
# X-Forwarded-For can be a comma-separated list of IPs
ip = x_forwarded_for.split(",")[0].strip()
else:
ip = request.META.get("REMOTE_ADDR")
return ip
def network_access_allowed(request, settings_key):
network_access = json.loads(CoreSettings.objects.get(key=NETWORK_ACCESS).value)
cidrs = (
network_access[settings_key].split(",")
if settings_key in network_access
else ["0.0.0.0/0"]
)
network_allowed = False
client_ip = ipaddress.ip_address(get_client_ip(request))
for cidr in cidrs:
network = ipaddress.ip_network(cidr)
if client_ip in network:
network_allowed = True
break
return network_allowed

View file

@ -9,13 +9,16 @@ server {
proxy_read_timeout 300;
client_max_body_size 0; # Allow file uploads up to 128MB
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Host $host:$server_port;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $host;
# Serve Django via uWSGI
location / {
include uwsgi_params;
uwsgi_pass unix:/app/uwsgi.sock;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $host;
}
location /assets/ {
@ -55,11 +58,6 @@ server {
location /hdhr {
include uwsgi_params;
uwsgi_pass unix:/app/uwsgi.sock;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Host $host:$server_port;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $host;
}
# Serve FFmpeg streams efficiently
@ -78,9 +76,6 @@ server {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "Upgrade";
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $host;
}
# Route TS proxy requests to the dedicated instance
@ -94,8 +89,5 @@ server {
proxy_read_timeout 300s;
proxy_send_timeout 300s;
client_max_body_size 0;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $host;
}
}

View file

@ -12,25 +12,20 @@
"@dnd-kit/modifiers": "^9.0.0",
"@dnd-kit/sortable": "^10.0.0",
"@dnd-kit/utilities": "^3.2.2",
"@mantine/charts": "^7.17.2",
"@mantine/core": "^7.17.2",
"@mantine/dates": "^7.17.2",
"@mantine/dropzone": "^7.17.2",
"@mantine/form": "^7.17.3",
"@mantine/hooks": "^7.17.2",
"@mantine/notifications": "^7.17.2",
"@tabler/icons-react": "^3.31.0",
"@mantine/charts": "~8.0.1",
"@mantine/core": "~8.0.1",
"@mantine/dates": "~8.0.1",
"@mantine/dropzone": "~8.0.1",
"@mantine/form": "~8.0.1",
"@mantine/hooks": "~8.0.1",
"@mantine/notifications": "~8.0.1",
"@tanstack/react-table": "^8.21.2",
"allotment": "^1.20.3",
"axios": "^1.8.2",
"clsx": "^2.1.1",
"dayjs": "^1.11.13",
"formik": "^2.4.6",
"hls.js": "^1.5.20",
"lucide-react": "^0.479.0",
"mantine-react-table": "^2.0.0-beta.9",
"lucide-react": "^0.511.0",
"mpegts.js": "^1.8.0",
"prettier": "^3.5.3",
"react": "^19.0.0",
"react-dom": "^19.0.0",
"react-draggable": "^4.4.6",
@ -53,6 +48,7 @@
"eslint-plugin-react-hooks": "^5.1.0",
"eslint-plugin-react-refresh": "^0.4.19",
"globals": "^15.15.0",
"prettier": "^3.5.3",
"vite": "^6.2.0"
}
},
@ -753,72 +749,72 @@
"license": "Apache-2.0"
},
"node_modules/@mantine/charts": {
"version": "7.17.2",
"resolved": "https://registry.npmjs.org/@mantine/charts/-/charts-7.17.2.tgz",
"integrity": "sha512-ckB23pIqRjzysUz2EiWZD9AVyf7t0r7o7zfJbl01nzOezFgYq5RGeRoxvpcsfBC+YoSbB/43rjNcXtYhtA7QzA==",
"version": "8.0.1",
"resolved": "https://registry.npmjs.org/@mantine/charts/-/charts-8.0.1.tgz",
"integrity": "sha512-yntk4siXpQGSj83tDwftJw6fHTOBS6c/VWinjvTW29ptEdjBCxbKFfyyDc9UGVVuO7ovbdtpfCZBpuN2I7HPCA==",
"license": "MIT",
"peerDependencies": {
"@mantine/core": "7.17.2",
"@mantine/hooks": "7.17.2",
"@mantine/core": "8.0.1",
"@mantine/hooks": "8.0.1",
"react": "^18.x || ^19.x",
"react-dom": "^18.x || ^19.x",
"recharts": "^2.13.3"
}
},
"node_modules/@mantine/core": {
"version": "7.17.2",
"resolved": "https://registry.npmjs.org/@mantine/core/-/core-7.17.2.tgz",
"integrity": "sha512-R6MYhitJ0JEgrhadd31Nw9FhRaQwDHjXUs5YIlitKH/fTOz9gKSxKjzmNng3bEBQCcbEDOkZj3FRcBgTUh/F0Q==",
"version": "8.0.1",
"resolved": "https://registry.npmjs.org/@mantine/core/-/core-8.0.1.tgz",
"integrity": "sha512-4ezaxKjChSPtawamQ3KrJq+x506uTouXlL0Z5fP+t105KnyxMrAJUENhbh2ivD4pq9Zh1BFiD9IWzyu3IXFR8w==",
"license": "MIT",
"dependencies": {
"@floating-ui/react": "^0.26.28",
"clsx": "^2.1.1",
"react-number-format": "^5.4.3",
"react-remove-scroll": "^2.6.2",
"react-textarea-autosize": "8.5.6",
"react-textarea-autosize": "8.5.9",
"type-fest": "^4.27.0"
},
"peerDependencies": {
"@mantine/hooks": "7.17.2",
"@mantine/hooks": "8.0.1",
"react": "^18.x || ^19.x",
"react-dom": "^18.x || ^19.x"
}
},
"node_modules/@mantine/dates": {
"version": "7.17.2",
"resolved": "https://registry.npmjs.org/@mantine/dates/-/dates-7.17.2.tgz",
"integrity": "sha512-7bB992j8f+uEi280jab0/8i5yfsN/3oSrMDFwatZ+7XSDUwiP0YFib/FVX0pNSSqdFpbXhUmsZEECX71QtHw+Q==",
"version": "8.0.1",
"resolved": "https://registry.npmjs.org/@mantine/dates/-/dates-8.0.1.tgz",
"integrity": "sha512-YCmV5jiGE9Ts2uhNS217IA1Hd5kAa8oaEtfnU0bS1sL36zKEf2s6elmzY718XdF8tFil0jJWAj0jiCrA3/udMg==",
"license": "MIT",
"dependencies": {
"clsx": "^2.1.1"
},
"peerDependencies": {
"@mantine/core": "7.17.2",
"@mantine/hooks": "7.17.2",
"@mantine/core": "8.0.1",
"@mantine/hooks": "8.0.1",
"dayjs": ">=1.0.0",
"react": "^18.x || ^19.x",
"react-dom": "^18.x || ^19.x"
}
},
"node_modules/@mantine/dropzone": {
"version": "7.17.2",
"resolved": "https://registry.npmjs.org/@mantine/dropzone/-/dropzone-7.17.2.tgz",
"integrity": "sha512-NMQ1SDmnW0sf3GO6p1r/VIcg/xWqlRmfnWCr00/bGRbBEGbyaUwL3LSn+KYBJdY+3/jNGvGa+xflWDvnby5tzw==",
"version": "8.0.1",
"resolved": "https://registry.npmjs.org/@mantine/dropzone/-/dropzone-8.0.1.tgz",
"integrity": "sha512-8PH5yrtA/ebCIwjs0m4J9qOvEyS/P4XmNlHrw0E389/qq64Ol7+/ZH7Xtiq64IaY8kvsMW1XHaV0c+bdYrijiA==",
"license": "MIT",
"dependencies": {
"react-dropzone-esm": "15.2.0"
"react-dropzone": "14.3.8"
},
"peerDependencies": {
"@mantine/core": "7.17.2",
"@mantine/hooks": "7.17.2",
"@mantine/core": "8.0.1",
"@mantine/hooks": "8.0.1",
"react": "^18.x || ^19.x",
"react-dom": "^18.x || ^19.x"
}
},
"node_modules/@mantine/form": {
"version": "7.17.3",
"resolved": "https://registry.npmjs.org/@mantine/form/-/form-7.17.3.tgz",
"integrity": "sha512-ktERldD8f9lrjjz6wIbwMnNbAZq8XEWPx4K5WuFyjXaK0PI8D+gsXIGKMtA5rVrAUFHCWCdbK3yLgtjJNki8ew==",
"version": "8.0.1",
"resolved": "https://registry.npmjs.org/@mantine/form/-/form-8.0.1.tgz",
"integrity": "sha512-lQ94gn/9p60C+tKEW7psQ1tZHod58Q0bXLbRDadRKMwnqBb2WFoIuaQWPDo7ox+PqyOv28dtflgS+Lm95EbBhg==",
"license": "MIT",
"dependencies": {
"fast-deep-equal": "^3.1.3",
@ -829,34 +825,34 @@
}
},
"node_modules/@mantine/hooks": {
"version": "7.17.2",
"resolved": "https://registry.npmjs.org/@mantine/hooks/-/hooks-7.17.2.tgz",
"integrity": "sha512-tbErVcGZu0E4dSmE6N0k6Tv1y9R3SQmmQgwqorcc+guEgKMdamc36lucZGlJnSGUmGj+WLUgELkEQ0asdfYBDA==",
"version": "8.0.1",
"resolved": "https://registry.npmjs.org/@mantine/hooks/-/hooks-8.0.1.tgz",
"integrity": "sha512-GvLdM4Ro3QcDyIgqrdXsUZmeeKye2TNL/k3mEr9JhM5KacHQjr83JPp0u9eLobn7kiyBqpLTYmVYAbmjJdCxHw==",
"license": "MIT",
"peerDependencies": {
"react": "^18.x || ^19.x"
}
},
"node_modules/@mantine/notifications": {
"version": "7.17.2",
"resolved": "https://registry.npmjs.org/@mantine/notifications/-/notifications-7.17.2.tgz",
"integrity": "sha512-vg0L8cmihz0ODg4WJ9MAyK06WPt/6g67ksIUFxd4F8RfdJbIMLTsNG9yWoSfuhtXenUg717KaA917IWLjDSaqw==",
"version": "8.0.1",
"resolved": "https://registry.npmjs.org/@mantine/notifications/-/notifications-8.0.1.tgz",
"integrity": "sha512-7TX9OyAmUcok3qffnheS7gTAMKDczETy8XEYDr38Sy/XIoXLjM+3CwO+a/vfd1F9oW2LvkahkHT0Ey+vBOVd0Q==",
"license": "MIT",
"dependencies": {
"@mantine/store": "7.17.2",
"@mantine/store": "8.0.1",
"react-transition-group": "4.4.5"
},
"peerDependencies": {
"@mantine/core": "7.17.2",
"@mantine/hooks": "7.17.2",
"@mantine/core": "8.0.1",
"@mantine/hooks": "8.0.1",
"react": "^18.x || ^19.x",
"react-dom": "^18.x || ^19.x"
}
},
"node_modules/@mantine/store": {
"version": "7.17.2",
"resolved": "https://registry.npmjs.org/@mantine/store/-/store-7.17.2.tgz",
"integrity": "sha512-UoMUYQK/z58hMueCkpDIXc49gPgrVO/zcpb0k+B7MFU51EIUiFzHLxLFBmWrgCAM6rzJORqN8JjyCd/PB9j4aw==",
"version": "8.0.1",
"resolved": "https://registry.npmjs.org/@mantine/store/-/store-8.0.1.tgz",
"integrity": "sha512-3wfUDeiERXJEI+MGgRAbh+9aY35D9oE4UzquLqZh8cIiH5i5g64Y/eJx3PfjHgO5+Zeu6lbgTgL6k4lg4a2SBQ==",
"license": "MIT",
"peerDependencies": {
"react": "^18.x || ^19.x"
@ -990,48 +986,6 @@
"@swc/counter": "^0.1.3"
}
},
"node_modules/@tabler/icons": {
"version": "3.31.0",
"resolved": "https://registry.npmjs.org/@tabler/icons/-/icons-3.31.0.tgz",
"integrity": "sha512-dblAdeKY3+GA1U+Q9eziZ0ooVlZMHsE8dqP0RkwvRtEsAULoKOYaCUOcJ4oW1DjWegdxk++UAt2SlQVnmeHv+g==",
"license": "MIT",
"funding": {
"type": "github",
"url": "https://github.com/sponsors/codecalm"
}
},
"node_modules/@tabler/icons-react": {
"version": "3.31.0",
"resolved": "https://registry.npmjs.org/@tabler/icons-react/-/icons-react-3.31.0.tgz",
"integrity": "sha512-2rrCM5y/VnaVKnORpDdAua9SEGuJKVqPtWxeQ/vUVsgaUx30LDgBZph7/lterXxDY1IKR6NO//HDhWiifXTi3w==",
"license": "MIT",
"dependencies": {
"@tabler/icons": "3.31.0"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/codecalm"
},
"peerDependencies": {
"react": ">= 16"
}
},
"node_modules/@tanstack/match-sorter-utils": {
"version": "8.19.4",
"resolved": "https://registry.npmjs.org/@tanstack/match-sorter-utils/-/match-sorter-utils-8.19.4.tgz",
"integrity": "sha512-Wo1iKt2b9OT7d+YGhvEPD3DXvPv2etTusIMhMUoG7fbhmxcXCtIjJDEygy91Y2JFlwGyjqiBPRozme7UD8hoqg==",
"license": "MIT",
"dependencies": {
"remove-accents": "0.5.0"
},
"engines": {
"node": ">=12"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/tannerlinsley"
}
},
"node_modules/@tanstack/react-table": {
"version": "8.21.3",
"resolved": "https://registry.npmjs.org/@tanstack/react-table/-/react-table-8.21.3.tgz",
@ -1052,23 +1006,6 @@
"react-dom": ">=16.8"
}
},
"node_modules/@tanstack/react-virtual": {
"version": "3.11.2",
"resolved": "https://registry.npmjs.org/@tanstack/react-virtual/-/react-virtual-3.11.2.tgz",
"integrity": "sha512-OuFzMXPF4+xZgx8UzJha0AieuMihhhaWG0tCqpp6tDzlFwOmNBPYMuLOtMJ1Tr4pXLHmgjcWhG6RlknY2oNTdQ==",
"license": "MIT",
"dependencies": {
"@tanstack/virtual-core": "3.11.2"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/tannerlinsley"
},
"peerDependencies": {
"react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0",
"react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
}
},
"node_modules/@tanstack/table-core": {
"version": "8.21.3",
"resolved": "https://registry.npmjs.org/@tanstack/table-core/-/table-core-8.21.3.tgz",
@ -1082,22 +1019,6 @@
"url": "https://github.com/sponsors/tannerlinsley"
}
},
"node_modules/@tanstack/virtual-core": {
"version": "3.11.2",
"resolved": "https://registry.npmjs.org/@tanstack/virtual-core/-/virtual-core-3.11.2.tgz",
"integrity": "sha512-vTtpNt7mKCiZ1pwU9hfKPhpdVO2sVzFQsxoVBGtOSHxlrRRzYr8iQ2TlwbAcRYCcEiZ9ECAM8kBzH0v2+VzfKw==",
"license": "MIT",
"funding": {
"type": "github",
"url": "https://github.com/sponsors/tannerlinsley"
}
},
"node_modules/@types/cookie": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.6.0.tgz",
"integrity": "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==",
"license": "MIT"
},
"node_modules/@types/d3-array": {
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.1.tgz",
@ -1386,21 +1307,13 @@
"dev": true,
"license": "Python-2.0"
},
"node_modules/asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
"license": "MIT"
},
"node_modules/axios": {
"version": "1.8.3",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.8.3.tgz",
"integrity": "sha512-iP4DebzoNlP/YN2dpwCgb8zoCmhtkajzS48JvwmkSkXvPI3DHc7m+XYL5tGnSlJtR6nImXZmdCuN5aP8dh1d8A==",
"node_modules/attr-accept": {
"version": "2.2.5",
"resolved": "https://registry.npmjs.org/attr-accept/-/attr-accept-2.2.5.tgz",
"integrity": "sha512-0bDNnY/u6pPwHDMoF0FieU354oBi0a8rD9FcsLwzcGWbc8KS8KPIi7y+s13OlVY+gMWc/9xEMUgNE6Qm8ZllYQ==",
"license": "MIT",
"dependencies": {
"follow-redirects": "^1.15.6",
"form-data": "^4.0.0",
"proxy-from-env": "^1.1.0"
"engines": {
"node": ">=4"
}
},
"node_modules/babel-plugin-macros": {
@ -1436,19 +1349,6 @@
"concat-map": "0.0.1"
}
},
"node_modules/call-bind-apply-helpers": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
"integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
"function-bind": "^1.1.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/callsites": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
@ -1510,18 +1410,6 @@
"dev": true,
"license": "MIT"
},
"node_modules/combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
"license": "MIT",
"dependencies": {
"delayed-stream": "~1.0.0"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
@ -1756,15 +1644,6 @@
"node": ">=0.10.0"
}
},
"node_modules/delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
"license": "MIT",
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/detect-node-es": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz",
@ -1786,20 +1665,6 @@
"resolved": "https://registry.npmjs.org/dom-walk/-/dom-walk-0.1.2.tgz",
"integrity": "sha512-6QvTW9mrGeIegrFXdtQi9pk7O/nSK6lSdXW2eqUspN5LWD7UTji2Fqw5V2YLjBpHEoU9Xl/eUWNpDeZvoyOv2w=="
},
"node_modules/dunder-proto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
"integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
"license": "MIT",
"dependencies": {
"call-bind-apply-helpers": "^1.0.1",
"es-errors": "^1.3.0",
"gopd": "^1.2.0"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/error-ex": {
"version": "1.3.2",
"resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz",
@ -1809,51 +1674,6 @@
"is-arrayish": "^0.2.1"
}
},
"node_modules/es-define-property": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz",
"integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es-errors": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
"integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es-object-atoms": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz",
"integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es-set-tostringtag": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
"integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
"get-intrinsic": "^1.2.6",
"has-tostringtag": "^1.0.2",
"hasown": "^2.0.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es6-promise": {
"version": "4.2.8",
"resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz",
@ -2126,6 +1946,21 @@
"dev": true,
"license": "MIT"
},
"node_modules/fdir": {
"version": "6.4.4",
"resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz",
"integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==",
"dev": true,
"license": "MIT",
"peerDependencies": {
"picomatch": "^3 || ^4"
},
"peerDependenciesMeta": {
"picomatch": {
"optional": true
}
}
},
"node_modules/file-entry-cache": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz",
@ -2139,6 +1974,18 @@
"node": ">=16.0.0"
}
},
"node_modules/file-selector": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/file-selector/-/file-selector-2.1.2.tgz",
"integrity": "sha512-QgXo+mXTe8ljeqUFaX3QVHc5osSItJ/Km+xpocx0aSqWGMSCf6qYs/VnzZgS864Pjn5iceMRFigeAV7AfTlaig==",
"license": "MIT",
"dependencies": {
"tslib": "^2.7.0"
},
"engines": {
"node": ">= 12"
}
},
"node_modules/find-root": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/find-root/-/find-root-1.1.0.tgz",
@ -2183,41 +2030,6 @@
"dev": true,
"license": "ISC"
},
"node_modules/follow-redirects": {
"version": "1.15.9",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz",
"integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==",
"funding": [
{
"type": "individual",
"url": "https://github.com/sponsors/RubenVerborgh"
}
],
"license": "MIT",
"engines": {
"node": ">=4.0"
},
"peerDependenciesMeta": {
"debug": {
"optional": true
}
}
},
"node_modules/form-data": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.2.tgz",
"integrity": "sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==",
"license": "MIT",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"es-set-tostringtag": "^2.1.0",
"mime-types": "^2.1.12"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/formik": {
"version": "2.4.6",
"resolved": "https://registry.npmjs.org/formik/-/formik-2.4.6.tgz",
@ -2243,6 +2055,21 @@
"react": ">=16.8.0"
}
},
"node_modules/fsevents": {
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
"integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
}
},
"node_modules/function-bind": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
@ -2252,30 +2079,6 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/get-intrinsic": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
"integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
"license": "MIT",
"dependencies": {
"call-bind-apply-helpers": "^1.0.2",
"es-define-property": "^1.0.1",
"es-errors": "^1.3.0",
"es-object-atoms": "^1.1.1",
"function-bind": "^1.1.2",
"get-proto": "^1.0.1",
"gopd": "^1.2.0",
"has-symbols": "^1.1.0",
"hasown": "^2.0.2",
"math-intrinsics": "^1.1.0"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/get-nonce": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/get-nonce/-/get-nonce-1.0.1.tgz",
@ -2285,19 +2088,6 @@
"node": ">=6"
}
},
"node_modules/get-proto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz",
"integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
"license": "MIT",
"dependencies": {
"dunder-proto": "^1.0.1",
"es-object-atoms": "^1.0.0"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/glob-parent": {
"version": "6.0.2",
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz",
@ -2334,18 +2124,6 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/gopd": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
"integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/has-flag": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
@ -2356,33 +2134,6 @@
"node": ">=8"
}
},
"node_modules/has-symbols": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
"integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/has-tostringtag": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
"integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
"license": "MIT",
"dependencies": {
"has-symbols": "^1.0.3"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/hasown": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
@ -2682,9 +2433,9 @@
}
},
"node_modules/lucide-react": {
"version": "0.479.0",
"resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.479.0.tgz",
"integrity": "sha512-aBhNnveRhorBOK7uA4gDjgaf+YlHMdMhQ/3cupk6exM10hWlEU+2QtWYOfhXhjAsmdb6LeKR+NZnow4UxRRiTQ==",
"version": "0.511.0",
"resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.511.0.tgz",
"integrity": "sha512-VK5a2ydJ7xm8GvBeKLS9mu1pVK6ucef9780JVUjw6bAjJL/QXnd4Y0p7SPeOUMC27YhzNCZvm5d/QX0Tp3rc0w==",
"license": "ISC",
"peerDependencies": {
"react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0"
@ -2701,103 +2452,12 @@
"global": "^4.4.0"
}
},
"node_modules/mantine-react-table": {
"version": "2.0.0-beta.9",
"resolved": "https://registry.npmjs.org/mantine-react-table/-/mantine-react-table-2.0.0-beta.9.tgz",
"integrity": "sha512-ZdfcwebWaPERoDvAuk43VYcBCzamohARVclnbuepT0PHZ0wRcDPMBR+zgaocL+pFy8EXUGwvWTOKNh25ITpjNQ==",
"license": "MIT",
"dependencies": {
"@tanstack/match-sorter-utils": "8.19.4",
"@tanstack/react-table": "8.20.5",
"@tanstack/react-virtual": "3.11.2"
},
"engines": {
"node": ">=16"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/kevinvandy"
},
"peerDependencies": {
"@mantine/core": "^7.9",
"@mantine/dates": "^7.9",
"@mantine/hooks": "^7.9",
"@tabler/icons-react": ">=2.23.0",
"clsx": ">=2",
"dayjs": ">=1.11",
"react": ">=18.0",
"react-dom": ">=18.0"
}
},
"node_modules/mantine-react-table/node_modules/@tanstack/react-table": {
"version": "8.20.5",
"resolved": "https://registry.npmjs.org/@tanstack/react-table/-/react-table-8.20.5.tgz",
"integrity": "sha512-WEHopKw3znbUZ61s9i0+i9g8drmDo6asTWbrQh8Us63DAk/M0FkmIqERew6P71HI75ksZ2Pxyuf4vvKh9rAkiA==",
"license": "MIT",
"dependencies": {
"@tanstack/table-core": "8.20.5"
},
"engines": {
"node": ">=12"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/tannerlinsley"
},
"peerDependencies": {
"react": ">=16.8",
"react-dom": ">=16.8"
}
},
"node_modules/mantine-react-table/node_modules/@tanstack/table-core": {
"version": "8.20.5",
"resolved": "https://registry.npmjs.org/@tanstack/table-core/-/table-core-8.20.5.tgz",
"integrity": "sha512-P9dF7XbibHph2PFRz8gfBKEXEY/HJPOhym8CHmjF8y3q5mWpKx9xtZapXQUWCgkqvsK0R46Azuz+VaxD4Xl+Tg==",
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/tannerlinsley"
}
},
"node_modules/math-intrinsics": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
"integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
}
},
"node_modules/memoize-one": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/memoize-one/-/memoize-one-5.2.1.tgz",
"integrity": "sha512-zYiwtZUcYyXKo/np96AGZAckk+FWWsUdJ3cHGGmld7+AhvcWmQyGCYUh1hc4Q/pkOhb65dQR/pqCyK0cOaHz4Q==",
"license": "MIT"
},
"node_modules/mime-db": {
"version": "1.52.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/mime-types": {
"version": "2.1.35",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"license": "MIT",
"dependencies": {
"mime-db": "1.52.0"
},
"engines": {
"node": ">= 0.6"
}
},
"node_modules/min-document": {
"version": "2.19.0",
"resolved": "https://registry.npmjs.org/min-document/-/min-document-2.19.0.tgz",
@ -3023,6 +2683,19 @@
"integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
"license": "ISC"
},
"node_modules/picomatch": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz",
"integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/sponsors/jonschlinkert"
}
},
"node_modules/pkcs7": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/pkcs7/-/pkcs7-1.0.4.tgz",
@ -3078,6 +2751,7 @@
"version": "3.5.3",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.3.tgz",
"integrity": "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==",
"dev": true,
"license": "MIT",
"bin": {
"prettier": "bin/prettier.cjs"
@ -3121,12 +2795,6 @@
"integrity": "sha512-SVtmxhRE/CGkn3eZY1T6pC8Nln6Fr/lu1mKSgRud0eC73whjGfoAogbn78LkD8aFL0zz3bAFerKSnOl7NlErBA==",
"license": "MIT"
},
"node_modules/proxy-from-env": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
"license": "MIT"
},
"node_modules/punycode": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
@ -3181,12 +2849,14 @@
"node": ">=6"
}
},
"node_modules/react-dropzone-esm": {
"version": "15.2.0",
"resolved": "https://registry.npmjs.org/react-dropzone-esm/-/react-dropzone-esm-15.2.0.tgz",
"integrity": "sha512-pPwR8xWVL+tFLnbAb8KVH5f6Vtl397tck8dINkZ1cPMxHWH+l9dFmIgRWgbh7V7jbjIcuKXCsVrXbhQz68+dVA==",
"node_modules/react-dropzone": {
"version": "14.3.8",
"resolved": "https://registry.npmjs.org/react-dropzone/-/react-dropzone-14.3.8.tgz",
"integrity": "sha512-sBgODnq+lcA4P296DY4wacOZz3JFpD99fp+hb//iBO2HHnyeZU3FwWyXJ6salNpqQdsZrgMrotuko/BdJMV8Ug==",
"license": "MIT",
"dependencies": {
"attr-accept": "^2.2.4",
"file-selector": "^2.1.0",
"prop-types": "^15.8.1"
},
"engines": {
@ -3288,15 +2958,13 @@
}
},
"node_modules/react-router": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/react-router/-/react-router-7.3.0.tgz",
"integrity": "sha512-466f2W7HIWaNXTKM5nHTqNxLrHTyXybm7R0eBlVSt0k/u55tTCDO194OIx/NrYD4TS5SXKTNekXfT37kMKUjgw==",
"version": "7.6.0",
"resolved": "https://registry.npmjs.org/react-router/-/react-router-7.6.0.tgz",
"integrity": "sha512-GGufuHIVCJDbnIAXP3P9Sxzq3UUsddG3rrI3ut1q6m0FI6vxVBF3JoPQ38+W/blslLH4a5Yutp8drkEpXoddGQ==",
"license": "MIT",
"dependencies": {
"@types/cookie": "^0.6.0",
"cookie": "^1.0.1",
"set-cookie-parser": "^2.6.0",
"turbo-stream": "2.4.0"
"set-cookie-parser": "^2.6.0"
},
"engines": {
"node": ">=20.0.0"
@ -3312,12 +2980,12 @@
}
},
"node_modules/react-router-dom": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.3.0.tgz",
"integrity": "sha512-z7Q5FTiHGgQfEurX/FBinkOXhWREJIAB2RiU24lvcBa82PxUpwqvs/PAXb9lJyPjTs2jrl6UkLvCZVGJPeNuuQ==",
"version": "7.6.0",
"resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.6.0.tgz",
"integrity": "sha512-DYgm6RDEuKdopSyGOWZGtDfSm7Aofb8CCzgkliTjtu/eDuB0gcsv6qdFhhi8HdtmA+KHkt5MfZ5K2PdzjugYsA==",
"license": "MIT",
"dependencies": {
"react-router": "7.3.0"
"react-router": "7.6.0"
},
"engines": {
"node": ">=20.0.0"
@ -3365,9 +3033,9 @@
}
},
"node_modules/react-textarea-autosize": {
"version": "8.5.6",
"resolved": "https://registry.npmjs.org/react-textarea-autosize/-/react-textarea-autosize-8.5.6.tgz",
"integrity": "sha512-aT3ioKXMa8f6zHYGebhbdMD2L00tKeRX1zuVuDx9YQK/JLLRSaSxq3ugECEmUB9z2kvk6bFSIoRHLkkUv0RJiw==",
"version": "8.5.9",
"resolved": "https://registry.npmjs.org/react-textarea-autosize/-/react-textarea-autosize-8.5.9.tgz",
"integrity": "sha512-U1DGlIQN5AwgjTyOEnI1oCcMuEr1pv1qOtklB2l4nyMGbHzWrI0eFsYK0zos2YWqAolJyG0IWJaqWmWj5ETh0A==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.20.13",
@ -3495,12 +3163,6 @@
"integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==",
"license": "MIT"
},
"node_modules/remove-accents": {
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/remove-accents/-/remove-accents-0.5.0.tgz",
"integrity": "sha512-8g3/Otx1eJaVD12e31UbJj1YzdtVvzH85HV7t+9MJYk/u3XmkOUJ5Ys9wQrf9PCPK8+xn4ymzqYCiZl6QWKn+A==",
"license": "MIT"
},
"node_modules/resolve": {
"version": "1.22.10",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz",
@ -3691,6 +3353,23 @@
"integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==",
"license": "MIT"
},
"node_modules/tinyglobby": {
"version": "0.2.13",
"resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.13.tgz",
"integrity": "sha512-mEwzpUgrLySlveBwEVDMKk5B57bhLPYovRfPAXD5gA/98Opn0rCDj3GtLwFvCvH5RK9uPCExUROW5NjDwvqkxw==",
"dev": true,
"license": "MIT",
"dependencies": {
"fdir": "^6.4.4",
"picomatch": "^4.0.2"
},
"engines": {
"node": ">=12.0.0"
},
"funding": {
"url": "https://github.com/sponsors/SuperchupuDev"
}
},
"node_modules/toposort": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/toposort/-/toposort-2.0.2.tgz",
@ -3703,12 +3382,6 @@
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
"license": "0BSD"
},
"node_modules/turbo-stream": {
"version": "2.4.0",
"resolved": "https://registry.npmjs.org/turbo-stream/-/turbo-stream-2.4.0.tgz",
"integrity": "sha512-FHncC10WpBd2eOmGwpmQsWLDoK4cqsA/UT/GqNoaKOQnT8uzhtCbg3EoUDMvqpOSAI0S26mr0rkjzbOO6S3v1g==",
"license": "ISC"
},
"node_modules/type-check": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
@ -3780,9 +3453,9 @@
}
},
"node_modules/use-isomorphic-layout-effect": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/use-isomorphic-layout-effect/-/use-isomorphic-layout-effect-1.2.0.tgz",
"integrity": "sha512-q6ayo8DWoPZT0VdG4u3D3uxcgONP3Mevx2i2b0434cwWBoL+aelL1DzkXI6w3PhTZzUeR2kaVlZn70iCiseP6w==",
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/use-isomorphic-layout-effect/-/use-isomorphic-layout-effect-1.2.1.tgz",
"integrity": "sha512-tpZZ+EX0gaghDAiFR37hj5MgY6ZN55kLiPkJsKxBMZ6GZdOSPJXiOzPM984oPYZ5AnehYx5WQp1+ME8I/P/pRA==",
"license": "MIT",
"peerDependencies": {
"react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
@ -3906,15 +3579,18 @@
}
},
"node_modules/vite": {
"version": "6.2.1",
"resolved": "https://registry.npmjs.org/vite/-/vite-6.2.1.tgz",
"integrity": "sha512-n2GnqDb6XPhlt9B8olZPrgMD/es/Nd1RdChF6CBD/fHW6pUyUTt2sQW2fPRX5GiD9XEa6+8A6A4f2vT6pSsE7Q==",
"version": "6.3.5",
"resolved": "https://registry.npmjs.org/vite/-/vite-6.3.5.tgz",
"integrity": "sha512-cZn6NDFE7wdTpINgs++ZJ4N49W2vRp8LCKrn3Ob1kYNtOo21vfDoaV5GzBfLU4MovSAB8uNRm4jgzVQZ+mBzPQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"esbuild": "^0.25.0",
"fdir": "^6.4.4",
"picomatch": "^4.0.2",
"postcss": "^8.5.3",
"rollup": "^4.30.1"
"rollup": "^4.34.9",
"tinyglobby": "^0.2.13"
},
"bin": {
"vite": "bin/vite.js"
@ -4008,21 +3684,6 @@
"node": ">=0.10.0"
}
},
"node_modules/yaml": {
"version": "2.7.0",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.7.0.tgz",
"integrity": "sha512-+hSoy/QHluxmC9kCIJyL/uyFmLmc+e5CFR5Wa+bpIhIj85LVb9ZH2nVnqrHoSvKogwODv0ClqZkmiSSaIH5LTA==",
"dev": true,
"license": "ISC",
"optional": true,
"peer": true,
"bin": {
"yaml": "bin.mjs"
},
"engines": {
"node": ">= 14"
}
},
"node_modules/yocto-queue": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",

View file

@ -14,25 +14,20 @@
"@dnd-kit/modifiers": "^9.0.0",
"@dnd-kit/sortable": "^10.0.0",
"@dnd-kit/utilities": "^3.2.2",
"@mantine/charts": "^7.17.2",
"@mantine/core": "^7.17.2",
"@mantine/dates": "^7.17.2",
"@mantine/dropzone": "^7.17.2",
"@mantine/form": "^7.17.3",
"@mantine/hooks": "^7.17.2",
"@mantine/notifications": "^7.17.2",
"@tabler/icons-react": "^3.31.0",
"@mantine/charts": "~8.0.1",
"@mantine/core": "~8.0.1",
"@mantine/dates": "~8.0.1",
"@mantine/dropzone": "~8.0.1",
"@mantine/form": "~8.0.1",
"@mantine/hooks": "~8.0.1",
"@mantine/notifications": "~8.0.1",
"@tanstack/react-table": "^8.21.2",
"allotment": "^1.20.3",
"axios": "^1.8.2",
"clsx": "^2.1.1",
"dayjs": "^1.11.13",
"formik": "^2.4.6",
"hls.js": "^1.5.20",
"lucide-react": "^0.479.0",
"mantine-react-table": "^2.0.0-beta.9",
"lucide-react": "^0.511.0",
"mpegts.js": "^1.8.0",
"prettier": "^3.5.3",
"react": "^19.0.0",
"react-dom": "^19.0.0",
"react-draggable": "^4.4.6",
@ -55,6 +50,7 @@
"eslint-plugin-react-hooks": "^5.1.0",
"eslint-plugin-react-refresh": "^0.4.19",
"globals": "^15.15.0",
"prettier": "^3.5.3",
"vite": "^6.2.0"
}
}

View file

@ -14,13 +14,13 @@ import Guide from './pages/Guide';
import Stats from './pages/Stats';
import DVR from './pages/DVR';
import Settings from './pages/Settings';
import Users from './pages/Users';
import useAuthStore from './store/auth';
import FloatingVideo from './components/FloatingVideo';
import { WebsocketProvider } from './WebSocket';
import { Box, AppShell, MantineProvider } from '@mantine/core';
import '@mantine/core/styles.css'; // Ensure Mantine global styles load
import '@mantine/notifications/styles.css';
import 'mantine-react-table/styles.css';
import '@mantine/dropzone/styles.css';
import '@mantine/dates/styles.css';
import './index.css';
@ -75,18 +75,17 @@ const App = () => {
const loggedIn = await initializeAuth();
if (loggedIn) {
await initData();
setIsAuthenticated(true);
} else {
await logout();
}
} catch (error) {
console.error("Auth check failed:", error);
console.error('Auth check failed:', error);
await logout();
}
};
checkAuth();
}, [initializeAuth, initData, setIsAuthenticated, logout]);
}, [initializeAuth, initData, logout]);
return (
<MantineProvider
@ -132,6 +131,7 @@ const App = () => {
<Route path="/guide" element={<Guide />} />
<Route path="/dvr" element={<DVR />} />
<Route path="/stats" element={<Stats />} />
<Route path="/users" element={<Users />} />
<Route path="/settings" element={<Settings />} />
</>
) : (

View file

@ -9,6 +9,7 @@ import useStreamProfilesStore from './store/streamProfiles';
import useSettingsStore from './store/settings';
import { notifications } from '@mantine/notifications';
import useChannelsTableStore from './store/channelsTable';
import useUsersStore from './store/users';
// If needed, you can set a base host or keep it empty if relative requests
const host = import.meta.env.DEV
@ -1084,6 +1085,21 @@ export default class API {
}
}
static async checkSetting(values) {
const { id, ...payload } = values;
try {
const response = await request(`${host}/api/core/settings/check/`, {
method: 'POST',
body: payload,
});
return response;
} catch (e) {
errorNotification('Failed to update settings', e);
}
}
static async updateSetting(values) {
const { id, ...payload } = values;
@ -1392,4 +1408,59 @@ export default class API {
return null;
}
}
static async me() {
return await request(`${host}/api/accounts/users/me/`);
}
static async getUsers() {
try {
const response = await request(`${host}/api/accounts/users/`);
return response;
} catch (e) {
errorNotification('Failed to fetch users', e);
}
}
static async createUser(body) {
try {
const response = await request(`${host}/api/accounts/users/`, {
method: 'POST',
body,
});
useUsersStore.getState().addUser(response);
return response;
} catch (e) {
errorNotification('Failed to fetch users', e);
}
}
static async updateUser(id, body) {
try {
const response = await request(`${host}/api/accounts/users/${id}/`, {
method: 'PATCH',
body,
});
useUsersStore.getState().updateUser(response);
return response;
} catch (e) {
errorNotification('Failed to fetch users', e);
}
}
static async deleteUser(id) {
try {
await request(`${host}/api/accounts/users/${id}/`, {
method: 'DELETE',
});
useUsersStore.getState().removeUser(id);
} catch (e) {
errorNotification('Failed to delete user', e);
}
}
}

View file

@ -18,60 +18,60 @@ import useWarningsStore from '../store/warnings';
* @param {string} [props.size='md'] - Size of the modal
*/
const ConfirmationDialog = ({
opened,
onClose,
onConfirm,
title = 'Confirm Action',
message = 'Are you sure you want to proceed?',
confirmLabel = 'Confirm',
cancelLabel = 'Cancel',
actionKey,
onSuppressChange,
size = 'md', // Add default size parameter - md is a medium width
opened,
onClose,
onConfirm,
title = 'Confirm Action',
message = 'Are you sure you want to proceed?',
confirmLabel = 'Confirm',
cancelLabel = 'Cancel',
actionKey,
onSuppressChange,
size = 'md', // Add default size parameter - md is a medium width
}) => {
const suppressWarning = useWarningsStore((s) => s.suppressWarning);
const isWarningSuppressed = useWarningsStore((s) => s.isWarningSuppressed);
const [suppressChecked, setSuppressChecked] = useState(
isWarningSuppressed(actionKey)
);
const suppressWarning = useWarningsStore((s) => s.suppressWarning);
const isWarningSuppressed = useWarningsStore((s) => s.isWarningSuppressed);
const [suppressChecked, setSuppressChecked] = useState(
isWarningSuppressed(actionKey)
);
const handleToggleSuppress = (e) => {
setSuppressChecked(e.currentTarget.checked);
if (onSuppressChange) {
onSuppressChange(e.currentTarget.checked);
}
};
const handleToggleSuppress = (e) => {
setSuppressChecked(e.currentTarget.checked);
if (onSuppressChange) {
onSuppressChange(e.currentTarget.checked);
}
};
const handleConfirm = () => {
if (suppressChecked) {
suppressWarning(actionKey);
}
onConfirm();
};
const handleConfirm = () => {
if (suppressChecked) {
suppressWarning(actionKey);
}
onConfirm();
};
return (
<Modal opened={opened} onClose={onClose} title={title} size={size} centered>
<Box mb={20}>{message}</Box>
return (
<Modal opened={opened} onClose={onClose} title={title} size={size} centered>
<Box mb={20}>{message}</Box>
{actionKey && (
<Checkbox
label="Don't ask me again"
checked={suppressChecked}
onChange={handleToggleSuppress}
mb={20}
/>
)}
{actionKey && (
<Checkbox
label="Don't ask me again"
checked={suppressChecked}
onChange={handleToggleSuppress}
mb={20}
/>
)}
<Group justify="flex-end">
<Button variant="outline" onClick={onClose}>
{cancelLabel}
</Button>
<Button color="red" onClick={handleConfirm}>
{confirmLabel}
</Button>
</Group>
</Modal>
);
<Group justify="flex-end">
<Button variant="outline" onClick={onClose}>
{cancelLabel}
</Button>
<Button color="red" onClick={handleConfirm}>
{confirmLabel}
</Button>
</Group>
</Modal>
);
};
export default ConfirmationDialog;

View file

@ -73,72 +73,109 @@ export default function FloatingVideo() {
console.log("Attempting to play stream:", streamUrl);
try {
// If the browser supports MSE for live playback, initialize mpegts.js
if (mpegts.getFeatureList().mseLivePlayback) {
// Set loading flag
setIsLoading(true);
const player = mpegts.createPlayer({
type: 'mpegts', // MPEG-TS format
url: streamUrl,
isLive: true,
enableWorker: true,
enableStashBuffer: false, // Try disabling stash buffer for live streams
liveBufferLatencyChasing: true,
liveSync: true,
cors: true, // Enable CORS for cross-domain requests
// Add error recovery options
autoCleanupSourceBuffer: true,
autoCleanupMaxBackwardDuration: 10,
autoCleanupMinBackwardDuration: 5,
reuseRedirectedURL: true,
});
player.attachMediaElement(videoRef.current);
// Add events to track loading state
player.on(mpegts.Events.LOADING_COMPLETE, () => {
setIsLoading(false);
});
player.on(mpegts.Events.METADATA_ARRIVED, () => {
setIsLoading(false);
});
// Add error event handler
player.on(mpegts.Events.ERROR, (errorType, errorDetail) => {
setIsLoading(false);
// Filter out aborted errors
if (errorType !== 'NetworkError' || !errorDetail?.includes('aborted')) {
console.error('Player error:', errorType, errorDetail);
setLoadError(`Error: ${errorType}${errorDetail ? ` - ${errorDetail}` : ''}`);
}
});
player.load();
// Don't auto-play until we've loaded properly
player.on(mpegts.Events.MEDIA_INFO, () => {
setIsLoading(false);
try {
player.play().catch(e => {
console.log("Auto-play prevented:", e);
setLoadError("Auto-play was prevented. Click play to start.");
});
} catch (e) {
console.log("Error during play:", e);
setLoadError(`Playback error: ${e.message}`);
}
});
// Store player instance so we can clean up later
playerRef.current = player;
// Check for MSE support first
if (!mpegts.getFeatureList().mseLivePlayback) {
setIsLoading(false);
setLoadError("Your browser doesn't support live video streaming. Please try Chrome or Edge.");
return;
}
// Check for basic codec support
const video = document.createElement('video');
const h264Support = video.canPlayType('video/mp4; codecs="avc1.42E01E"');
const aacSupport = video.canPlayType('audio/mp4; codecs="mp4a.40.2"');
console.log("Browser codec support - H264:", h264Support, "AAC:", aacSupport);
// If the browser supports MSE for live playback, initialize mpegts.js
setIsLoading(true);
const player = mpegts.createPlayer({
type: 'mpegts',
url: streamUrl,
isLive: true,
enableWorker: true,
enableStashBuffer: false,
liveBufferLatencyChasing: true,
liveSync: true,
cors: true,
autoCleanupSourceBuffer: true,
autoCleanupMaxBackwardDuration: 10,
autoCleanupMinBackwardDuration: 5,
reuseRedirectedURL: true,
});
player.attachMediaElement(videoRef.current);
// Add events to track loading state
player.on(mpegts.Events.LOADING_COMPLETE, () => {
setIsLoading(false);
});
player.on(mpegts.Events.METADATA_ARRIVED, () => {
setIsLoading(false);
});
// Enhanced error event handler with codec-specific messages
player.on(mpegts.Events.ERROR, (errorType, errorDetail) => {
setIsLoading(false);
// Filter out aborted errors
if (errorType !== 'NetworkError' || !errorDetail?.includes('aborted')) {
console.error('Player error:', errorType, errorDetail);
// Provide specific error messages based on error type
let errorMessage = `Error: ${errorType}`;
if (errorType === 'MediaError') {
// Try to determine if it's an audio or video codec issue
const errorString = errorDetail?.toLowerCase() || '';
if (errorString.includes('audio') || errorString.includes('ac3') || errorString.includes('ac-3')) {
errorMessage = "Audio codec not supported by your browser. Try Chrome or Edge for better audio codec support.";
} else if (errorString.includes('video') || errorString.includes('h264') || errorString.includes('h.264')) {
errorMessage = "Video codec not supported by your browser. Try Chrome or Edge for better video codec support.";
} else if (errorString.includes('mse')) {
errorMessage = "Your browser doesn't support the codecs used in this stream. Try Chrome or Edge for better compatibility.";
} else {
errorMessage = "Media codec not supported by your browser. This may be due to unsupported audio (AC3) or video codecs. Try Chrome or Edge.";
}
} else if (errorDetail) {
errorMessage += ` - ${errorDetail}`;
}
setLoadError(errorMessage);
}
});
player.load();
// Don't auto-play until we've loaded properly
player.on(mpegts.Events.MEDIA_INFO, () => {
setIsLoading(false);
try {
player.play().catch(e => {
console.log("Auto-play prevented:", e);
setLoadError("Auto-play was prevented. Click play to start.");
});
} catch (e) {
console.log("Error during play:", e);
setLoadError(`Playback error: ${e.message}`);
}
});
// Store player instance so we can clean up later
playerRef.current = player;
} catch (error) {
setIsLoading(false);
setLoadError(`Initialization error: ${error.message}`);
console.error("Error initializing player:", error);
// Provide helpful error message based on the error
if (error.message?.includes('codec') || error.message?.includes('format')) {
setLoadError("Codec not supported by your browser. Please try a different browser (Chrome/Edge recommended).");
} else {
setLoadError(`Initialization error: ${error.message}`);
}
}
// Cleanup when component unmounts or streamUrl changes
@ -191,7 +228,7 @@ export default function FloatingVideo() {
style={{ width: '100%', height: '180px', backgroundColor: '#000' }}
/>
{/* Loading overlay */}
{/* Loading overlay - only show when loading */}
{isLoading && (
<Box
style={{
@ -214,31 +251,22 @@ export default function FloatingVideo() {
</Text>
</Box>
)}
{/* Error message overlay */}
{!isLoading && loadError && (
<Box
style={{
position: 'absolute',
top: 0,
left: 0,
width: '100%',
height: '100%',
backgroundColor: 'rgba(0, 0, 0, 0.7)',
display: 'flex',
alignItems: 'center',
justifyContent: 'center',
zIndex: 5,
padding: '0 10px',
textAlign: 'center',
}}
>
<Text color="red" size="sm">
{loadError}
</Text>
</Box>
)}
</Box>
{/* Error message below video - doesn't block controls */}
{!isLoading && loadError && (
<Box
style={{
padding: '10px',
backgroundColor: '#2d1b2e',
borderTop: '1px solid #444',
}}
>
<Text color="red" size="xs" style={{ textAlign: 'center' }}>
{loadError}
</Text>
</Box>
)}
</div>
</Draggable>
);

View file

@ -2,13 +2,13 @@
import React, { useEffect, useState } from 'react';
import usePlaylistsStore from '../store/playlists';
import { notifications } from '@mantine/notifications';
import { IconCheck } from '@tabler/icons-react';
import useStreamsStore from '../store/streams';
import useChannelsStore from '../store/channels';
import useEPGsStore from '../store/epgs';
import { Stack, Button, Group } from '@mantine/core';
import API from '../api';
import { useNavigate } from 'react-router-dom';
import { CircleCheck } from 'lucide-react';
export default function M3URefreshNotification() {
const playlists = usePlaylistsStore((s) => s.playlists);
@ -40,7 +40,7 @@ export default function M3URefreshNotification() {
});
// Special handling for pending setup status
if (data.status === "pending_setup") {
if (data.status === 'pending_setup') {
fetchChannelGroups();
fetchPlaylists();
@ -48,7 +48,8 @@ export default function M3URefreshNotification() {
title: `M3U Setup: ${playlist.name}`,
message: (
<Stack>
{data.message || "M3U groups loaded. Please select groups or refresh M3U to complete setup."}
{data.message ||
'M3U groups loaded. Please select groups or refresh M3U to complete setup.'}
<Group grow>
<Button
size="xs"
@ -77,21 +78,21 @@ export default function M3URefreshNotification() {
</Stack>
),
color: 'orange.5',
autoClose: 5000, // Keep visible a bit longer
autoClose: 5000, // Keep visible a bit longer
});
return;
}
// Check for error status FIRST before doing anything else
if (data.status === "error") {
if (data.status === 'error') {
// Only show the error notification if we have a complete task (progress=100)
// or if it's explicitly flagged as an error
if (data.progress === 100) {
notifications.show({
title: `M3U Processing: ${playlist.name}`,
message: `${data.action || 'Processing'} failed: ${data.error || "Unknown error"}`,
message: `${data.action || 'Processing'} failed: ${data.error || 'Unknown error'}`,
color: 'red',
autoClose: 5000, // Keep error visible a bit longer
autoClose: 5000, // Keep error visible a bit longer
});
}
return; // Exit early for any error status
@ -99,7 +100,7 @@ export default function M3URefreshNotification() {
// Check if we already have an error stored for this account, and if so, don't show further notifications
const currentStatus = notificationStatus[data.account];
if (currentStatus && currentStatus.status === "error") {
if (currentStatus && currentStatus.status === 'error') {
// Don't show any other notifications once we've hit an error
return;
}
@ -147,18 +148,18 @@ export default function M3URefreshNotification() {
message,
loading: taskProgress == 0,
autoClose: 2000,
icon: taskProgress == 100 ? <IconCheck /> : null,
icon: taskProgress == 100 ? <CircleCheck /> : null,
});
};
useEffect(() => {
// Reset notificationStatus when playlists change to prevent stale data
if (playlists.length > 0 && Object.keys(notificationStatus).length > 0) {
const validIds = playlists.map(p => p.id);
const validIds = playlists.map((p) => p.id);
const currentIds = Object.keys(notificationStatus).map(Number);
// If we have notification statuses for playlists that no longer exist, reset the state
if (!currentIds.every(id => validIds.includes(id))) {
if (!currentIds.every((id) => validIds.includes(id))) {
setNotificationStatus({});
}
}

View file

@ -1,11 +0,0 @@
// ...existing imports...
const menuItems = [
// existing items go here,
{
key: 'proxy',
label: 'Proxy Manager',
icon: <ApiOutlined />,
path: '/proxy',
},
];

View file

@ -1,82 +0,0 @@
import React, { useState } from 'react';
import { Button, Form, Input, Select, message } from 'antd';
import axios from 'axios';
const { Option } = Select;
const ProxyManager = () => {
const [form] = Form.useForm();
const [loading, setLoading] = useState(false);
const handleSubmit = async (values) => {
setLoading(true);
try {
const { action, ...data } = values;
await axios.post(`/proxy/api/proxy/${action}/`, data);
message.success(`Proxy ${action} successful`);
form.resetFields();
} catch (error) {
message.error(error.response?.data?.error || 'An error occurred');
} finally {
setLoading(false);
}
};
return (
<div className="proxy-manager">
<h2>Proxy Manager</h2>
<Form form={form} onFinish={handleSubmit} layout="vertical">
<Form.Item
name="type"
label="Proxy Type"
rules={[{ required: true }]}
>
<Select>
<Option value="hls">HLS</Option>
<Option value="ts">TS</Option>
</Select>
</Form.Item>
<Form.Item
name="channel"
label="Channel ID"
rules={[{ required: true }]}
>
<Input />
</Form.Item>
<Form.Item
name="url"
label="Stream URL"
rules={[{ required: true, type: 'url' }]}
>
<Input />
</Form.Item>
<Form.Item>
<Button.Group>
<Button
type="primary"
onClick={() => form.submit()}
loading={loading}
>
Start Proxy
</Button>
<Button
danger
onClick={() => {
form.setFieldsValue({ action: 'stop' });
form.submit();
}}
loading={loading}
>
Stop Proxy
</Button>
</Button.Group>
</Form.Item>
</Form>
</div>
);
};
export default ProxyManager;

View file

@ -10,6 +10,9 @@ import {
Copy,
ChartLine,
Video,
Ellipsis,
LogOut,
User,
} from 'lucide-react';
import {
Avatar,
@ -21,6 +24,7 @@ import {
UnstyledButton,
TextInput,
ActionIcon,
Menu,
} from '@mantine/core';
import logo from '../images/logo.png';
import useChannelsStore from '../store/channels';
@ -28,6 +32,8 @@ import './sidebar.css';
import useSettingsStore from '../store/settings';
import useAuthStore from '../store/auth'; // Add this import
import API from '../api';
import { USER_LEVELS } from '../constants';
import UserForm from './forms/User';
const NavLink = ({ item, isActive, collapsed }) => {
return (
@ -63,11 +69,66 @@ const NavLink = ({ item, isActive, collapsed }) => {
const Sidebar = ({ collapsed, toggleDrawer, drawerWidth, miniDrawerWidth }) => {
const location = useLocation();
const channels = useChannelsStore((s) => s.channels);
const environment = useSettingsStore((s) => s.environment);
const isAuthenticated = useAuthStore((s) => s.isAuthenticated);
const authUser = useAuthStore((s) => s.user);
const logout = useAuthStore((s) => s.logout);
const publicIPRef = useRef(null);
const [appVersion, setAppVersion] = useState({ version: '', timestamp: null });
const [appVersion, setAppVersion] = useState({
version: '',
timestamp: null,
});
const [userFormOpen, setUserFormOpen] = useState(false);
const closeUserForm = () => setUserFormOpen(false);
// Navigation Items
const navItems =
authUser && authUser.user_level == USER_LEVELS.ADMIN
? [
{
label: 'Channels',
icon: <ListOrdered size={20} />,
path: '/channels',
badge: `(${Object.keys(channels).length})`,
},
{
label: 'M3U & EPG Manager',
icon: <Play size={20} />,
path: '/sources',
},
{ label: 'TV Guide', icon: <LayoutGrid size={20} />, path: '/guide' },
{ label: 'DVR', icon: <Video size={20} />, path: '/dvr' },
{ label: 'Stats', icon: <ChartLine size={20} />, path: '/stats' },
{
label: 'Users',
icon: <User size={20} />,
path: '/users',
},
{
label: 'Settings',
icon: <LucideSettings size={20} />,
path: '/settings',
},
]
: [
{
label: 'Channels',
icon: <ListOrdered size={20} />,
path: '/channels',
badge: `(${Object.keys(channels).length})`,
},
{ label: 'TV Guide', icon: <LayoutGrid size={20} />, path: '/guide' },
{
label: 'Settings',
icon: <LucideSettings size={20} />,
path: '/settings',
},
];
// Fetch environment settings including version on component mount
useEffect(() => {
@ -99,24 +160,6 @@ const Sidebar = ({ collapsed, toggleDrawer, drawerWidth, miniDrawerWidth }) => {
fetchVersion();
}, []);
// Navigation Items
const navItems = [
{
label: 'Channels',
icon: <ListOrdered size={20} />,
path: '/channels',
badge: `(${Object.keys(channels).length})`,
},
{ label: 'M3U & EPG Manager', icon: <Play size={20} />, path: '/sources' },
{ label: 'TV Guide', icon: <LayoutGrid size={20} />, path: '/guide' },
{ label: 'DVR', icon: <Video size={20} />, path: '/dvr' },
{ label: 'Stats', icon: <ChartLine size={20} />, path: '/stats' },
{
label: 'Settings',
icon: <LucideSettings size={20} />,
path: '/settings',
},
];
const copyPublicIP = async () => {
try {
@ -135,6 +178,11 @@ const Sidebar = ({ collapsed, toggleDrawer, drawerWidth, miniDrawerWidth }) => {
}
};
const onLogout = () => {
logout();
window.location.reload();
};
return (
<AppShell.Navbar
width={{ base: collapsed ? miniDrawerWidth : drawerWidth }}
@ -243,7 +291,7 @@ const Sidebar = ({ collapsed, toggleDrawer, drawerWidth, miniDrawerWidth }) => {
)}
<Avatar src="https://via.placeholder.com/40" radius="xl" />
{!collapsed && (
{!collapsed && authUser && (
<Group
style={{
flex: 1,
@ -251,12 +299,13 @@ const Sidebar = ({ collapsed, toggleDrawer, drawerWidth, miniDrawerWidth }) => {
whiteSpace: 'nowrap',
}}
>
<Text size="sm" color="white">
John Doe
</Text>
<Text size="sm" color="white">
</Text>
<UnstyledButton onClick={() => setUserFormOpen(true)}>
{authUser.username}
</UnstyledButton>
<ActionIcon variant="transparent" color="white" size="sm">
<LogOut onClick={logout} />
</ActionIcon>
</Group>
)}
</Group>
@ -270,6 +319,8 @@ const Sidebar = ({ collapsed, toggleDrawer, drawerWidth, miniDrawerWidth }) => {
{appVersion?.timestamp ? `-${appVersion.timestamp}` : ''}
</Text>
)}
<UserForm user={authUser} isOpen={userFormOpen} onClose={closeUserForm} />
</AppShell.Navbar>
);
};

View file

@ -0,0 +1,88 @@
import React, { useState, useEffect, useRef } from 'react';
import API from '../../api';
import {
Button,
Modal,
Text,
Group,
Flex,
useMantineTheme,
NumberInput,
} from '@mantine/core';
import { ListOrdered } from 'lucide-react';
import { useForm } from '@mantine/form';
import { notifications } from '@mantine/notifications';
const AssignChannelNumbers = ({ channelIds, isOpen, onClose }) => {
const theme = useMantineTheme();
const form = useForm({
mode: 'uncontrolled',
initialValues: {
starting_number: 1,
},
});
const onSubmit = async () => {
const { starting_number } = form.getValues();
try {
const result = await API.assignChannelNumbers(
channelIds,
starting_number
);
notifications.show({
title: result.message || 'Channels assigned',
color: 'green.5',
});
API.requeryChannels();
onClose();
} catch (err) {
console.error(err);
notifications.show({
title: 'Failed to assign channels',
color: 'red.5',
});
}
};
if (!isOpen) {
return <></>;
}
return (
<Modal
opened={isOpen}
onClose={onClose}
size="xs"
title={
<Group gap="5">
<ListOrdered size="20" />
<Text>Assign Channel #s</Text>
</Group>
}
styles={{ hannontent: { '--mantine-color-body': '#27272A' } }}
>
<form onSubmit={form.onSubmit(onSubmit)}>
<NumberInput
placeholder="Starting #"
mb="xs"
size="xs"
{...form.getInputProps('starting_number')}
key={form.key('starting_number')}
/>
<Flex mih={50} gap="xs" justify="flex-end" align="flex-end">
<Button type="submit" variant="default" disabled={form.submitting}>
Submit
</Button>
</Flex>
</form>
</Modal>
);
};
export default AssignChannelNumbers;

View file

@ -5,7 +5,6 @@ import useChannelsStore from '../../store/channels';
import API from '../../api';
import useStreamProfilesStore from '../../store/streamProfiles';
import useStreamsStore from '../../store/streams';
import { MantineReactTable, useMantineReactTable } from 'mantine-react-table';
import ChannelGroupForm from './ChannelGroup';
import usePlaylistsStore from '../../store/playlists';
import logo from '../../images/logo.png';
@ -36,6 +35,7 @@ import { ListOrdered, SquarePlus, SquareX, X } from 'lucide-react';
import useEPGsStore from '../../store/epgs';
import { Dropzone } from '@mantine/dropzone';
import { FixedSizeList as List } from 'react-window';
import { USER_LEVELS, USER_LEVEL_LABELS } from '../../constants';
const ChannelForm = ({ channel = null, isOpen, onClose }) => {
const theme = useMantineTheme();
@ -94,13 +94,17 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
const formik = useFormik({
initialValues: {
name: '',
channel_number: '', // Change from 0 to empty string for consistency
channel_group_id: Object.keys(channelGroups).length > 0 ? Object.keys(channelGroups)[0] : '',
channel_number: '', // Change from 0 to empty string for consistency
channel_group_id:
Object.keys(channelGroups).length > 0
? Object.keys(channelGroups)[0]
: '',
stream_profile_id: '0',
tvg_id: '',
tvc_guide_stationid: '',
epg_data_id: '',
logo_id: '',
user_level: '0',
},
validationSchema: Yup.object({
name: Yup.string().required('Name is required'),
@ -124,7 +128,8 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
formattedValues.tvg_id = formattedValues.tvg_id || null;
// Ensure tvc_guide_stationid is properly included (no empty strings)
formattedValues.tvc_guide_stationid = formattedValues.tvc_guide_stationid || null;
formattedValues.tvc_guide_stationid =
formattedValues.tvc_guide_stationid || null;
if (channel) {
// If there's an EPG to set, use our enhanced endpoint
@ -183,7 +188,8 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
formik.setValues({
name: channel.name || '',
channel_number: channel.channel_number !== null ? channel.channel_number : '',
channel_number:
channel.channel_number !== null ? channel.channel_number : '',
channel_group_id: channel.channel_group_id
? `${channel.channel_group_id}`
: '',
@ -194,6 +200,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
tvc_guide_stationid: channel.tvc_guide_stationid || '',
epg_data_id: channel.epg_data_id ?? '',
logo_id: channel.logo_id ? `${channel.logo_id}` : '',
user_level: `${channel.user_level}`,
});
setChannelStreams(channel.streams || []);
@ -216,134 +223,6 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
);
};
// const activeStreamsTable = useMantineReactTable({
// data: channelStreams,
// columns: useMemo(
// () => [
// {
// header: 'Name',
// accessorKey: 'name',
// Cell: ({ cell }) => (
// <div
// style={{
// whiteSpace: 'nowrap',
// overflow: 'hidden',
// textOverflow: 'ellipsis',
// }}
// >
// {cell.getValue()}
// </div>
// ),
// },
// {
// header: 'M3U',
// accessorKey: 'group_name',
// Cell: ({ cell }) => (
// <div
// style={{
// whiteSpace: 'nowrap',
// overflow: 'hidden',
// textOverflow: 'ellipsis',
// }}
// >
// {cell.getValue()}
// </div>
// ),
// },
// ],
// []
// ),
// enableSorting: false,
// enableBottomToolbar: false,
// enableTopToolbar: false,
// columnFilterDisplayMode: 'popover',
// enablePagination: false,
// enableRowVirtualization: true,
// enableRowOrdering: true,
// rowVirtualizerOptions: { overscan: 5 }, //optionally customize the row virtualizer
// initialState: {
// density: 'compact',
// },
// enableRowActions: true,
// positionActionsColumn: 'last',
// renderRowActions: ({ row }) => (
// <>
// <IconButton
// size="small" // Makes the button smaller
// color="error" // Red color for delete actions
// onClick={() => removeStream(row.original)}
// >
// <RemoveIcon fontSize="small" /> {/* Small icon size */}
// </IconButton>
// </>
// ),
// mantineTableContainerProps: {
// style: {
// height: '200px',
// },
// },
// mantineRowDragHandleProps: ({ table }) => ({
// onDragEnd: () => {
// const { draggingRow, hoveredRow } = table.getState();
// if (hoveredRow && draggingRow) {
// channelStreams.splice(
// hoveredRow.index,
// 0,
// channelStreams.splice(draggingRow.index, 1)[0]
// );
// setChannelStreams([...channelStreams]);
// }
// },
// }),
// });
// const availableStreamsTable = useMantineReactTable({
// data: streams,
// columns: useMemo(
// () => [
// {
// header: 'Name',
// accessorKey: 'name',
// },
// {
// header: 'M3U',
// accessorFn: (row) =>
// playlists.find((playlist) => playlist.id === row.m3u_account)?.name,
// },
// ],
// []
// ),
// enableBottomToolbar: false,
// enableTopToolbar: false,
// columnFilterDisplayMode: 'popover',
// enablePagination: false,
// enableRowVirtualization: true,
// rowVirtualizerOptions: { overscan: 5 }, //optionally customize the row virtualizer
// initialState: {
// density: 'compact',
// },
// enableRowActions: true,
// renderRowActions: ({ row }) => (
// <>
// <IconButton
// size="small" // Makes the button smaller
// color="success" // Red color for delete actions
// onClick={() => addStream(row.original)}
// >
// <AddIcon fontSize="small" /> {/* Small icon size */}
// </IconButton>
// </>
// ),
// positionActionsColumn: 'last',
// mantineTableContainerProps: {
// style: {
// height: '200px',
// },
// },
// });
// Update the handler for when channel group modal is closed
const handleChannelGroupModalClose = (newGroup) => {
setChannelGroupModalOpen(false);
@ -353,7 +232,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
// Preserve all current form values while updating just the channel_group_id
formik.setValues({
...formik.values,
channel_group_id: `${newGroup.id}`
channel_group_id: `${newGroup.id}`,
});
}
};
@ -542,6 +421,23 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
)}
size="xs"
/>
<Select
label="User Level Access"
data={Object.entries(USER_LEVELS).map(([label, value]) => {
return {
label: USER_LEVEL_LABELS[value],
value: `${value}`,
};
})}
value={formik.values.user_level}
onChange={(value) => {
formik.setFieldValue('user_level', value);
}}
error={
formik.errors.user_level ? formik.touched.user_level : ''
}
/>
</Stack>
<Divider size="sm" orientation="vertical" />
@ -667,9 +563,9 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
: ''
}
size="xs"
step={0.1} // Add step prop to allow decimal inputs
precision={1} // Specify decimal precision
removeTrailingZeros // Optional: remove trailing zeros for cleaner display
step={0.1} // Add step prop to allow decimal inputs
precision={1} // Specify decimal precision
removeTrailingZeros // Optional: remove trailing zeros for cleaner display
/>
<TextInput
@ -688,7 +584,11 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
label="Gracenote StationId"
value={formik.values.tvc_guide_stationid}
onChange={formik.handleChange}
error={formik.errors.tvc_guide_stationid ? formik.touched.tvc_guide_stationid : ''}
error={
formik.errors.tvc_guide_stationid
? formik.touched.tvc_guide_stationid
: ''
}
size="xs"
/>
@ -810,18 +710,6 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
</Stack>
</Group>
{/* <Grid gap={2}>
<Grid.Col span={6}>
<Typography>Active Streams</Typography>
<MantineReactTable table={activeStreamsTable} />
</Grid.Col>
<Grid.Col span={6}>
<Typography>Available Streams</Typography>
<MantineReactTable table={availableStreamsTable} />
</Grid.Col>
</Grid> */}
<Flex mih={50} gap="xs" justify="flex-end" align="flex-end">
<Button
type="submit"

View file

@ -0,0 +1,294 @@
import React, { useState, useEffect, useRef } from 'react';
import useChannelsStore from '../../store/channels';
import API from '../../api';
import useStreamProfilesStore from '../../store/streamProfiles';
import ChannelGroupForm from './ChannelGroup';
import {
Box,
Button,
Modal,
TextInput,
Text,
Group,
ActionIcon,
Flex,
Select,
Stack,
useMantineTheme,
Popover,
ScrollArea,
Tooltip,
UnstyledButton,
Center,
} from '@mantine/core';
import { ListOrdered, SquarePlus, SquareX, X } from 'lucide-react';
import { FixedSizeList as List } from 'react-window';
import { useForm } from '@mantine/form';
import { USER_LEVELS, USER_LEVEL_LABELS } from '../../constants';
const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => {
const theme = useMantineTheme();
const groupListRef = useRef(null);
const channelGroups = useChannelsStore((s) => s.channelGroups);
const streamProfiles = useStreamProfilesStore((s) => s.profiles);
const [channelGroupModelOpen, setChannelGroupModalOpen] = useState(false);
const [selectedChannelGroup, setSelectedChannelGroup] = useState('');
const [groupPopoverOpened, setGroupPopoverOpened] = useState(false);
const [groupFilter, setGroupFilter] = useState('');
const groupOptions = Object.values(channelGroups);
const form = useForm({
mode: 'uncontrolled',
initialValues: {
channel_group: '',
stream_profile_id: '0',
user_level: '-1',
},
});
const onSubmit = async () => {
const values = {
...form.getValues(),
channel_group_id: selectedChannelGroup,
};
if (!values.stream_profile_id || values.stream_profile_id === '0') {
values.stream_profile_id = null;
}
if (!values.channel_group_id) {
delete values.channel_group_id;
}
if (values.user_level == '-1') {
delete values.user_level;
}
await API.batchUpdateChannels({
ids: channelIds,
values,
});
};
// useEffect(() => {
// // const sameStreamProfile = channels.every(
// // (channel) => channel.stream_profile_id == channels[0].stream_profile_id
// // );
// // const sameChannelGroup = channels.every(
// // (channel) => channel.channel_group_id == channels[0].channel_group_id
// // );
// // const sameUserLevel = channels.every(
// // (channel) => channel.user_level == channels[0].user_level
// // );
// // form.setValues({
// // ...(sameStreamProfile && {
// // stream_profile_id: `${channels[0].stream_profile_id}`,
// // }),
// // ...(sameChannelGroup && {
// // channel_group_id: `${channels[0].channel_group_id}`,
// // }),
// // ...(sameUserLevel && {
// // user_level: `${channels[0].user_level}`,
// // }),
// // });
// }, [channelIds, streamProfiles, channelGroups]);
const handleChannelGroupModalClose = (newGroup) => {
setChannelGroupModalOpen(false);
if (newGroup && newGroup.id) {
setSelectedChannelGroup(newGroup.id);
form.setValues({
channel_group: `${newGroup.name}`,
});
}
};
const filteredGroups = groupOptions.filter((group) =>
group.name.toLowerCase().includes(groupFilter.toLowerCase())
);
if (!isOpen) {
return <></>;
}
return (
<>
<Modal
opened={isOpen}
onClose={onClose}
size="xs"
title={
<Group gap="5">
<ListOrdered size="20" />
<Text>Channels</Text>
</Group>
}
styles={{ hannontent: { '--mantine-color-body': '#27272A' } }}
>
<form onSubmit={form.onSubmit(onSubmit)}>
<Group justify="space-between" align="top">
<Stack gap="5" style={{ flex: 1 }}>
<Popover
opened={groupPopoverOpened}
onChange={setGroupPopoverOpened}
// position="bottom-start"
withArrow
>
<Popover.Target>
<Group style={{ width: '100%' }} align="flex-end">
<TextInput
id="channel_group"
name="channel_group"
label="Channel Group"
readOnly
{...form.getInputProps('channel_group')}
key={form.key('channel_group')}
onClick={() => setGroupPopoverOpened(true)}
size="xs"
style={{ flex: 1 }}
/>
<ActionIcon
color={theme.tailwind.green[5]}
onClick={() => setChannelGroupModalOpen(true)}
title="Create new group"
size="small"
variant="transparent"
style={{ marginBottom: 5 }}
>
<SquarePlus size="20" />
</ActionIcon>
</Group>
</Popover.Target>
<Popover.Dropdown onMouseDown={(e) => e.stopPropagation()}>
<Group style={{ width: '100%' }} spacing="xs">
<TextInput
placeholder="Filter"
value={groupFilter}
onChange={(event) =>
setGroupFilter(event.currentTarget.value)
}
mb="xs"
size="xs"
style={{ flex: 1 }}
/>
<ActionIcon
color={theme.tailwind.green[5]}
onClick={() => setChannelGroupModalOpen(true)}
title="Create new group"
size="small"
variant="transparent"
style={{ marginBottom: 5 }}
>
<SquarePlus size="20" />
</ActionIcon>
</Group>
<ScrollArea style={{ height: 200 }}>
<List
height={200} // Set max height for visible items
itemCount={filteredGroups.length}
itemSize={20} // Adjust row height for each item
width={200}
ref={groupListRef}
>
{({ index, style }) => (
<Box
style={{ ...style, height: 20, overflow: 'hidden' }}
>
<Tooltip
openDelay={500}
label={filteredGroups[index].name}
size="xs"
>
<UnstyledButton
onClick={() => {
setSelectedChannelGroup(
filteredGroups[index].id
);
form.setValues({
channel_group: filteredGroups[index].name,
});
setGroupPopoverOpened(false);
}}
>
<Text
size="xs"
style={{
whiteSpace: 'nowrap',
overflow: 'hidden',
textOverflow: 'ellipsis',
}}
>
{filteredGroups[index].name}
</Text>
</UnstyledButton>
</Tooltip>
</Box>
)}
</List>
</ScrollArea>
</Popover.Dropdown>
</Popover>
<Select
id="stream_profile_id"
label="Stream Profile"
name="stream_profile_id"
{...form.getInputProps('stream_profile_id')}
key={form.key('stream_profile_id')}
data={[{ value: '0', label: '(use default)' }].concat(
streamProfiles.map((option) => ({
value: `${option.id}`,
label: option.name,
}))
)}
size="xs"
/>
<Select
size="xs"
label="User Level Access"
{...form.getInputProps('user_level')}
key={form.key('user_level')}
data={[
{
value: '-1',
label: '(no change)',
},
].concat(
Object.entries(USER_LEVELS).map(([label, value]) => {
return {
label: USER_LEVEL_LABELS[value],
value: `${value}`,
};
})
)}
/>
</Stack>
</Group>
<Flex mih={50} gap="xs" justify="flex-end" align="flex-end">
<Button type="submit" variant="default" disabled={form.submitting}>
Submit
</Button>
</Flex>
</form>
</Modal>
<ChannelGroupForm
isOpen={channelGroupModelOpen}
onClose={handleChannelGroupModalClose}
/>
</>
);
};
export default ChannelBatchForm;

View file

@ -0,0 +1,703 @@
import React, { useState, useEffect, useRef } from 'react';
import { useFormik } from 'formik';
import * as Yup from 'yup';
import useChannelsStore from '../../store/channels';
import API from '../../api';
import useStreamProfilesStore from '../../store/streamProfiles';
import useStreamsStore from '../../store/streams';
import ChannelGroupForm from './ChannelGroup';
import usePlaylistsStore from '../../store/playlists';
import logo from '../../images/logo.png';
import {
Box,
Button,
Modal,
TextInput,
NativeSelect,
Text,
Group,
ActionIcon,
Center,
Grid,
Flex,
Select,
Divider,
Stack,
useMantineTheme,
Popover,
ScrollArea,
Tooltip,
NumberInput,
Image,
UnstyledButton,
} from '@mantine/core';
import { ListOrdered, SquarePlus, SquareX, X } from 'lucide-react';
import useEPGsStore from '../../store/epgs';
import { Dropzone } from '@mantine/dropzone';
import { FixedSizeList as List } from 'react-window';
const ChannelsForm = ({ channel = null, isOpen, onClose }) => {
const theme = useMantineTheme();
const listRef = useRef(null);
const logoListRef = useRef(null);
const groupListRef = useRef(null);
const channelGroups = useChannelsStore((s) => s.channelGroups);
const logos = useChannelsStore((s) => s.logos);
const fetchLogos = useChannelsStore((s) => s.fetchLogos);
const streams = useStreamsStore((state) => state.streams);
const streamProfiles = useStreamProfilesStore((s) => s.profiles);
const playlists = usePlaylistsStore((s) => s.playlists);
const epgs = useEPGsStore((s) => s.epgs);
const tvgs = useEPGsStore((s) => s.tvgs);
const tvgsById = useEPGsStore((s) => s.tvgsById);
const [logoPreview, setLogoPreview] = useState(null);
const [channelStreams, setChannelStreams] = useState([]);
const [channelGroupModelOpen, setChannelGroupModalOpen] = useState(false);
const [epgPopoverOpened, setEpgPopoverOpened] = useState(false);
const [logoPopoverOpened, setLogoPopoverOpened] = useState(false);
const [selectedEPG, setSelectedEPG] = useState('');
const [tvgFilter, setTvgFilter] = useState('');
const [logoFilter, setLogoFilter] = useState('');
const [logoOptions, setLogoOptions] = useState([]);
const [groupPopoverOpened, setGroupPopoverOpened] = useState(false);
const [groupFilter, setGroupFilter] = useState('');
const groupOptions = Object.values(channelGroups);
const addStream = (stream) => {
const streamSet = new Set(channelStreams);
streamSet.add(stream);
setChannelStreams(Array.from(streamSet));
};
const removeStream = (stream) => {
const streamSet = new Set(channelStreams);
streamSet.delete(stream);
setChannelStreams(Array.from(streamSet));
};
const handleLogoChange = async (files) => {
if (files.length === 1) {
const retval = await API.uploadLogo(files[0]);
await fetchLogos();
setLogoPreview(retval.cache_url);
formik.setFieldValue('logo_id', retval.id);
} else {
setLogoPreview(null);
}
};
const formik = useFormik({
initialValues: {
name: '',
channel_number: '', // Change from 0 to empty string for consistency
channel_group_id:
Object.keys(channelGroups).length > 0
? Object.keys(channelGroups)[0]
: '',
stream_profile_id: '0',
tvg_id: '',
tvc_guide_stationid: '',
epg_data_id: '',
logo_id: '',
},
validationSchema: Yup.object({
name: Yup.string().required('Name is required'),
channel_group_id: Yup.string().required('Channel group is required'),
}),
onSubmit: async (values, { setSubmitting }) => {
let response;
try {
const formattedValues = { ...values };
// Convert empty or "0" stream_profile_id to null for the API
if (
!formattedValues.stream_profile_id ||
formattedValues.stream_profile_id === '0'
) {
formattedValues.stream_profile_id = null;
}
// Ensure tvg_id is properly included (no empty strings)
formattedValues.tvg_id = formattedValues.tvg_id || null;
// Ensure tvc_guide_stationid is properly included (no empty strings)
formattedValues.tvc_guide_stationid =
formattedValues.tvc_guide_stationid || null;
if (channel) {
// If there's an EPG to set, use our enhanced endpoint
if (values.epg_data_id !== (channel.epg_data_id ?? '')) {
// Use the special endpoint to set EPG and trigger refresh
const epgResponse = await API.setChannelEPG(
channel.id,
values.epg_data_id
);
// Remove epg_data_id from values since we've handled it separately
const { epg_data_id, ...otherValues } = formattedValues;
// Update other channel fields if needed
if (Object.keys(otherValues).length > 0) {
response = await API.updateChannel({
id: channel.id,
...otherValues,
streams: channelStreams.map((stream) => stream.id),
});
}
} else {
// No EPG change, regular update
response = await API.updateChannel({
id: channel.id,
...formattedValues,
streams: channelStreams.map((stream) => stream.id),
});
}
} else {
// New channel creation - use the standard method
response = await API.addChannel({
...formattedValues,
streams: channelStreams.map((stream) => stream.id),
});
}
} catch (error) {
console.error('Error saving channel:', error);
}
formik.resetForm();
API.requeryChannels();
setSubmitting(false);
setTvgFilter('');
setLogoFilter('');
onClose();
},
});
useEffect(() => {
if (channel) {
if (channel.epg_data_id) {
const epgSource = epgs[tvgsById[channel.epg_data_id]?.epg_source];
setSelectedEPG(epgSource ? `${epgSource.id}` : '');
}
formik.setValues({
name: channel.name || '',
channel_number:
channel.channel_number !== null ? channel.channel_number : '',
channel_group_id: channel.channel_group_id
? `${channel.channel_group_id}`
: '',
stream_profile_id: channel.stream_profile_id
? `${channel.stream_profile_id}`
: '0',
tvg_id: channel.tvg_id || '',
tvc_guide_stationid: channel.tvc_guide_stationid || '',
epg_data_id: channel.epg_data_id ?? '',
logo_id: channel.logo_id ? `${channel.logo_id}` : '',
});
setChannelStreams(channel.streams || []);
} else {
formik.resetForm();
setTvgFilter('');
setLogoFilter('');
}
}, [channel, tvgsById, channelGroups]);
useEffect(() => {
setLogoOptions([{ id: '0', name: 'Default' }].concat(Object.values(logos)));
}, [logos]);
const renderLogoOption = ({ option, checked }) => {
return (
<Center style={{ width: '100%' }}>
<img src={logos[option.value].cache_url} width="30" />
</Center>
);
};
// Update the handler for when channel group modal is closed
const handleChannelGroupModalClose = (newGroup) => {
setChannelGroupModalOpen(false);
// If a new group was created and returned, update the form with it
if (newGroup && newGroup.id) {
// Preserve all current form values while updating just the channel_group_id
formik.setValues({
...formik.values,
channel_group_id: `${newGroup.id}`,
});
}
};
if (!isOpen) {
return <></>;
}
const filteredTvgs = tvgs
.filter((tvg) => tvg.epg_source == selectedEPG)
.filter(
(tvg) =>
tvg.name.toLowerCase().includes(tvgFilter.toLowerCase()) ||
tvg.tvg_id.toLowerCase().includes(tvgFilter.toLowerCase())
);
const filteredLogos = logoOptions.filter((logo) =>
logo.name.toLowerCase().includes(logoFilter.toLowerCase())
);
const filteredGroups = groupOptions.filter((group) =>
group.name.toLowerCase().includes(groupFilter.toLowerCase())
);
return (
<Modal
opened={isOpen}
onClose={onClose}
size={1000}
title={
<Group gap="5">
<ListOrdered size="20" />
<Text>Channels</Text>
</Group>
}
styles={{ content: { '--mantine-color-body': '#27272A' } }}
>
<form onSubmit={formik.handleSubmit}>
<Group justify="space-between" align="top">
<Stack gap="5" style={{ flex: 1 }}>
<TextInput
id="name"
name="name"
label="Channel Name"
value={formik.values.name}
onChange={formik.handleChange}
error={formik.errors.name ? formik.touched.name : ''}
size="xs"
/>
<Flex gap="sm">
<Popover
opened={groupPopoverOpened}
onChange={setGroupPopoverOpened}
// position="bottom-start"
withArrow
>
<Popover.Target>
<TextInput
id="channel_group_id"
name="channel_group_id"
label="Channel Group"
readOnly
value={
channelGroups[formik.values.channel_group_id]
? channelGroups[formik.values.channel_group_id].name
: ''
}
onClick={() => setGroupPopoverOpened(true)}
size="xs"
/>
</Popover.Target>
<Popover.Dropdown onMouseDown={(e) => e.stopPropagation()}>
<Group>
<TextInput
placeholder="Filter"
value={groupFilter}
onChange={(event) =>
setGroupFilter(event.currentTarget.value)
}
mb="xs"
size="xs"
/>
</Group>
<ScrollArea style={{ height: 200 }}>
<List
height={200} // Set max height for visible items
itemCount={filteredGroups.length}
itemSize={20} // Adjust row height for each item
width={200}
ref={groupListRef}
>
{({ index, style }) => (
<Box
style={{ ...style, height: 20, overflow: 'hidden' }}
>
<Tooltip
openDelay={500}
label={filteredGroups[index].name}
size="xs"
>
<UnstyledButton
onClick={() => {
formik.setFieldValue(
'channel_group_id',
filteredGroups[index].id
);
setGroupPopoverOpened(false);
}}
>
<Text
size="xs"
style={{
whiteSpace: 'nowrap',
overflow: 'hidden',
textOverflow: 'ellipsis',
}}
>
{filteredGroups[index].name}
</Text>
</UnstyledButton>
</Tooltip>
</Box>
)}
</List>
</ScrollArea>
</Popover.Dropdown>
</Popover>
{/* <Select
id="channel_group_id"
name="channel_group_id"
label="Channel Group"
value={formik.values.channel_group_id}
searchable
onChange={(value) => {
formik.setFieldValue('channel_group_id', value); // Update Formik's state with the new value
}}
error={
formik.errors.channel_group_id
? formik.touched.channel_group_id
: ''
}
data={Object.values(channelGroups).map((option, index) => ({
value: `${option.id}`,
label: option.name,
}))}
size="xs"
style={{ flex: 1 }}
/> */}
<Flex align="flex-end">
<ActionIcon
color={theme.tailwind.green[5]}
onClick={() => setChannelGroupModalOpen(true)}
title="Create new group"
size="small"
variant="transparent"
style={{ marginBottom: 5 }}
>
<SquarePlus size="20" />
</ActionIcon>
</Flex>
</Flex>
<Select
id="stream_profile_id"
label="Stream Profile"
name="stream_profile_id"
value={formik.values.stream_profile_id}
onChange={(value) => {
formik.setFieldValue('stream_profile_id', value); // Update Formik's state with the new value
}}
error={
formik.errors.stream_profile_id
? formik.touched.stream_profile_id
: ''
}
data={[{ value: '0', label: '(use default)' }].concat(
streamProfiles.map((option) => ({
value: `${option.id}`,
label: option.name,
}))
)}
size="xs"
/>
</Stack>
<Divider size="sm" orientation="vertical" />
<Stack justify="flex-start" style={{ flex: 1 }}>
<Group justify="space-between">
<Popover
opened={logoPopoverOpened}
onChange={setLogoPopoverOpened}
// position="bottom-start"
withArrow
>
<Popover.Target>
<TextInput
id="logo_id"
name="logo_id"
label="Logo"
readOnly
value={logos[formik.values.logo_id]?.name || 'Default'}
onClick={() => setLogoPopoverOpened(true)}
size="xs"
/>
</Popover.Target>
<Popover.Dropdown onMouseDown={(e) => e.stopPropagation()}>
<Group>
<TextInput
placeholder="Filter"
value={logoFilter}
onChange={(event) =>
setLogoFilter(event.currentTarget.value)
}
mb="xs"
size="xs"
/>
</Group>
<ScrollArea style={{ height: 200 }}>
<List
height={200} // Set max height for visible items
itemCount={filteredLogos.length}
itemSize={20} // Adjust row height for each item
width="100%"
ref={logoListRef}
>
{({ index, style }) => (
<div style={style}>
<Center>
<img
src={filteredLogos[index].cache_url || logo}
height="20"
style={{ maxWidth: 80 }}
onClick={() => {
formik.setFieldValue(
'logo_id',
filteredLogos[index].id
);
}}
/>
</Center>
</div>
)}
</List>
</ScrollArea>
</Popover.Dropdown>
</Popover>
<img
src={
logos[formik.values.logo_id]
? logos[formik.values.logo_id].cache_url
: logo
}
height="40"
/>
</Group>
<Group>
<Divider size="xs" style={{ flex: 1 }} />
<Text size="xs" c="dimmed">
OR
</Text>
<Divider size="xs" style={{ flex: 1 }} />
</Group>
<Stack>
<Text size="sm">Upload Logo</Text>
<Dropzone
onDrop={handleLogoChange}
onReject={(files) => console.log('rejected files', files)}
maxSize={5 * 1024 ** 2}
>
<Group
justify="center"
gap="xl"
mih={40}
style={{ pointerEvents: 'none' }}
>
<Text size="sm" inline>
Drag images here or click to select files
</Text>
</Group>
</Dropzone>
<Center></Center>
</Stack>
</Stack>
<Divider size="sm" orientation="vertical" />
<Stack gap="5" style={{ flex: 1 }} justify="flex-start">
<NumberInput
id="channel_number"
name="channel_number"
label="Channel # (blank to auto-assign)"
value={formik.values.channel_number}
onChange={(value) =>
formik.setFieldValue('channel_number', value)
}
error={
formik.errors.channel_number
? formik.touched.channel_number
: ''
}
size="xs"
/>
<TextInput
id="tvg_id"
name="tvg_id"
label="TVG-ID"
value={formik.values.tvg_id}
onChange={formik.handleChange}
error={formik.errors.tvg_id ? formik.touched.tvg_id : ''}
size="xs"
/>
<TextInput
id="tvc_guide_stationid"
name="tvc_guide_stationid"
label="Gracenote StationId"
value={formik.values.tvc_guide_stationid}
onChange={formik.handleChange}
error={
formik.errors.tvc_guide_stationid
? formik.touched.tvc_guide_stationid
: ''
}
size="xs"
/>
<Popover
opened={epgPopoverOpened}
onChange={setEpgPopoverOpened}
// position="bottom-start"
withArrow
>
<Popover.Target>
<TextInput
id="epg_data_id"
name="epg_data_id"
label={
<Group style={{ width: '100%' }}>
<Box>EPG</Box>
<Button
size="xs"
variant="transparent"
onClick={() =>
formik.setFieldValue('epg_data_id', null)
}
>
Use Dummy
</Button>
</Group>
}
readOnly
value={
formik.values.epg_data_id
? tvgsById[formik.values.epg_data_id].name
: 'Dummy'
}
onClick={() => setEpgPopoverOpened(true)}
size="xs"
rightSection={
<Tooltip label="Use dummy EPG">
<ActionIcon
// color={theme.tailwind.green[5]}
color="white"
onClick={(e) => {
e.stopPropagation();
formik.setFieldValue('epg_data_id', null);
}}
title="Create new group"
size="small"
variant="transparent"
>
<X size="20" />
</ActionIcon>
</Tooltip>
}
/>
</Popover.Target>
<Popover.Dropdown onMouseDown={(e) => e.stopPropagation()}>
<Group>
<Select
label="Source"
value={selectedEPG}
onChange={setSelectedEPG}
data={Object.values(epgs).map((epg) => ({
value: `${epg.id}`,
label: epg.name,
}))}
size="xs"
mb="xs"
/>
{/* Filter Input */}
<TextInput
label="Filter"
value={tvgFilter}
onChange={(event) =>
setTvgFilter(event.currentTarget.value)
}
mb="xs"
size="xs"
/>
</Group>
<ScrollArea style={{ height: 200 }}>
<List
height={200} // Set max height for visible items
itemCount={filteredTvgs.length}
itemSize={40} // Adjust row height for each item
width="100%"
ref={listRef}
>
{({ index, style }) => (
<div style={style}>
<Button
key={filteredTvgs[index].id}
variant="subtle"
color="gray"
fullWidth
justify="left"
size="xs"
onClick={() => {
if (filteredTvgs[index].id == '0') {
formik.setFieldValue('epg_data_id', null);
} else {
formik.setFieldValue(
'epg_data_id',
filteredTvgs[index].id
);
}
setEpgPopoverOpened(false);
}}
>
{filteredTvgs[index].tvg_id}
</Button>
</div>
)}
</List>
</ScrollArea>
</Popover.Dropdown>
</Popover>
</Stack>
</Group>
<Flex mih={50} gap="xs" justify="flex-end" align="flex-end">
<Button
type="submit"
variant="default"
disabled={formik.isSubmitting}
>
Submit
</Button>
</Flex>
</form>
</Modal>
);
};
export default ChannelsForm;

View file

@ -22,7 +22,6 @@ import {
Box,
} from '@mantine/core';
import { isNotEmpty, useForm } from '@mantine/form';
import { IconUpload } from '@tabler/icons-react';
const EPG = ({ epg = null, isOpen, onClose }) => {
const epgs = useEPGsStore((state) => state.epgs);
@ -123,7 +122,9 @@ const EPG = ({ epg = null, isOpen, onClose }) => {
value: 'schedules_direct',
},
]}
onChange={(event) => handleSourceTypeChange(event.currentTarget.value)}
onChange={(event) =>
handleSourceTypeChange(event.currentTarget.value)
}
/>
<NumberInput
@ -160,14 +161,20 @@ const EPG = ({ epg = null, isOpen, onClose }) => {
{/* Put checkbox at the same level as Refresh Interval */}
<Box style={{ marginTop: 0 }}>
<Text size="sm" fw={500} mb={3}>Status</Text>
<Text size="xs" c="dimmed" mb={12}>When enabled, this EPG source will auto update.</Text>
<Box style={{
display: 'flex',
alignItems: 'center',
height: '30px', // Reduced height
marginTop: '-4px' // Slight negative margin to move it up
}}>
<Text size="sm" fw={500} mb={3}>
Status
</Text>
<Text size="xs" c="dimmed" mb={12}>
When enabled, this EPG source will auto update.
</Text>
<Box
style={{
display: 'flex',
alignItems: 'center',
height: '30px', // Reduced height
marginTop: '-4px', // Slight negative margin to move it up
}}
>
<Checkbox
id="is_active"
name="is_active"
@ -185,12 +192,10 @@ const EPG = ({ epg = null, isOpen, onClose }) => {
<Divider my="sm" />
<Group justify="end" mt="xl">
<Button variant="outline" onClick={onClose}>Cancel</Button>
<Button
type="submit"
variant="filled"
disabled={form.submitting}
>
<Button variant="outline" onClick={onClose}>
Cancel
</Button>
<Button type="submit" variant="filled" disabled={form.submitting}>
{epg?.id ? 'Update' : 'Create'} EPG Source
</Button>
</Group>

View file

@ -5,17 +5,18 @@ import { Paper, Title, TextInput, Button, Center, Stack } from '@mantine/core';
const LoginForm = () => {
const login = useAuthStore((s) => s.login);
const logout = useAuthStore((s) => s.logout);
const isAuthenticated = useAuthStore((s) => s.isAuthenticated);
const initData = useAuthStore((s) => s.initData);
const navigate = useNavigate(); // Hook to navigate to other routes
const [formData, setFormData] = useState({ username: '', password: '' });
useEffect(() => {
if (isAuthenticated) {
navigate('/channels');
}
}, [isAuthenticated, navigate]);
// useEffect(() => {
// if (isAuthenticated) {
// navigate('/channels');
// }
// }, [isAuthenticated, navigate]);
const handleInputChange = (e) => {
setFormData({
@ -27,8 +28,13 @@ const LoginForm = () => {
const handleSubmit = async (e) => {
e.preventDefault();
await login(formData);
initData();
navigate('/channels'); // Or any other route you'd like
try {
await initData();
navigate('/channels');
} catch (e) {
console.log(`Failed to login: ${e}`);
}
};
return (
@ -60,7 +66,7 @@ const LoginForm = () => {
name="password"
value={formData.password}
onChange={handleInputChange}
required
// required
/>
<Button type="submit" mt="sm">

View file

@ -15,7 +15,6 @@ import {
} from '@mantine/core';
import { useWebSocket } from '../../WebSocket';
import usePlaylistsStore from '../../store/playlists';
import { useDebounce } from '../../utils';
const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => {
const [websocketReady, sendMessage] = useWebSocket();
@ -139,7 +138,10 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => {
if (!searchPattern || !sampleInput) return sampleInput;
try {
const regex = new RegExp(searchPattern, 'g');
return sampleInput.replace(regex, match => `<mark style="background-color: #ffee58;">${match}</mark>`);
return sampleInput.replace(
regex,
(match) => `<mark style="background-color: #ffee58;">${match}</mark>`
);
} catch (e) {
return sampleInput;
}
@ -213,10 +215,14 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => {
</Flex>
</form>
<Title order={4} mt={15} mb={10}>Live Regex Demonstration</Title>
<Title order={4} mt={15} mb={10}>
Live Regex Demonstration
</Title>
<Paper shadow="sm" p="xs" radius="md" withBorder mb={8}>
<Text size="sm" weight={500} mb={3}>Sample Text</Text>
<Text size="sm" weight={500} mb={3}>
Sample Text
</Text>
<TextInput
value={sampleInput}
onChange={handleSampleInputChange}
@ -228,7 +234,12 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => {
<Grid gutter="xs">
<Grid.Col span={12}>
<Paper shadow="sm" p="xs" radius="md" withBorder>
<Text size="sm" weight={500} mb={3}>Matched Text <Badge size="xs" color="yellow">highlighted</Badge></Text>
<Text size="sm" weight={500} mb={3}>
Matched Text{' '}
<Badge size="xs" color="yellow">
highlighted
</Badge>
</Text>
<Text
size="sm"
dangerouslySetInnerHTML={{
@ -241,8 +252,13 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => {
<Grid.Col span={12}>
<Paper shadow="sm" p="xs" radius="md" withBorder>
<Text size="sm" weight={500} mb={3}>Result After Replace</Text>
<Text size="sm" sx={{ whiteSpace: 'pre-wrap', wordBreak: 'break-all' }}>
<Text size="sm" weight={500} mb={3}>
Result After Replace
</Text>
<Text
size="sm"
sx={{ whiteSpace: 'pre-wrap', wordBreak: 'break-all' }}
>
{getLocalReplaceResult()}
</Text>
</Paper>

View file

@ -42,6 +42,9 @@ const DVR = ({ recording = null, channel = null, isOpen, onClose }) => {
const onSubmit = async () => {
const { channel_id, ...values } = form.getValues();
console.log(values);
await API.createRecording({
...values,
channel: channel_id,

View file

@ -0,0 +1,256 @@
// Modal.js
import React, { useState, useEffect } from 'react';
import API from '../../api';
import useEPGsStore from '../../store/epgs';
import {
LoadingOverlay,
TextInput,
Button,
Checkbox,
Modal,
Flex,
NativeSelect,
NumberInput,
Space,
Select,
PasswordInput,
Box,
Group,
Stack,
MultiSelect,
Switch,
Text,
Center,
ActionIcon,
} from '@mantine/core';
import { RotateCcwKey, X } from 'lucide-react';
import { isNotEmpty, useForm } from '@mantine/form';
import useChannelsStore from '../../store/channels';
import { USER_LEVELS, USER_LEVEL_LABELS } from '../../constants';
import useAuthStore from '../../store/auth';
const User = ({ user = null, isOpen, onClose }) => {
const profiles = useChannelsStore((s) => s.profiles);
const authUser = useAuthStore((s) => s.user);
const setUser = useAuthStore((s) => s.setUser);
const [enableXC, setEnableXC] = useState(false);
const [selectedProfiles, setSelectedProfiles] = useState(new Set());
const form = useForm({
mode: 'uncontrolled',
initialValues: {
username: '',
email: '',
user_level: '0',
password: '',
xc_password: '',
channel_profiles: [],
},
validate: (values) => ({
username: !values.username
? 'Username is required'
: values.user_level == USER_LEVELS.STREAMER &&
!values.username.match(/^[a-z0-9]+$/i)
? 'Streamer username must be alphanumeric'
: null,
password:
!user && !values.password && values.user_level != USER_LEVELS.STREAMER
? 'Password is requried'
: null,
xc_password:
values.xc_password && !values.xc_password.match(/^[a-z0-9]+$/i)
? 'XC password must be alphanumeric'
: null,
}),
});
const onChannelProfilesChange = (values) => {
let newValues = new Set(values);
if (selectedProfiles.has('0')) {
newValues.delete('0');
} else if (newValues.has('0')) {
newValues = new Set(['0']);
}
setSelectedProfiles(newValues);
form.setFieldValue('channel_profiles', [...newValues]);
};
const onSubmit = async () => {
const values = form.getValues();
const { xc_password, ...customProps } = JSON.parse(
user?.custom_properties || '{}'
);
if (values.xc_password) {
customProps.xc_password = values.xc_password;
}
delete values.xc_password;
values.custom_properties = JSON.stringify(customProps);
// If 'All' is included, clear this and we assume access to all channels
if (values.channel_profiles.includes('0')) {
values.channel_profiles = [];
}
if (!user && values.user_level == USER_LEVELS.STREAMER) {
// Generate random password - they can't log in, but user can't be created without a password
values.password = Math.random().toString(36).slice(2);
}
if (!user) {
await API.createUser(values);
} else {
if (!values.password) {
delete values.password;
}
const response = await API.updateUser(user.id, values);
if (user.id == authUser.id) {
setUser(response);
}
}
form.reset();
onClose();
};
useEffect(() => {
if (user?.id) {
const customProps = JSON.parse(user.custom_properties || '{}');
form.setValues({
username: user.username,
email: user.email,
user_level: `${user.user_level}`,
channel_profiles:
user.channel_profiles.length > 0
? user.channel_profiles.map((id) => `${id}`)
: ['0'],
xc_password: customProps.xc_password || '',
});
if (customProps.xc_password) {
setEnableXC(true);
}
} else {
form.reset();
}
}, [user]);
const generateXCPassword = () => {
form.setValues({
xc_password: Math.random().toString(36).slice(2),
});
};
if (!isOpen) {
return <></>;
}
const showPermissions =
authUser.user_level == USER_LEVELS.ADMIN && authUser.id !== user?.id;
return (
<Modal opened={isOpen} onClose={onClose} title="User" size="xl">
<form onSubmit={form.onSubmit(onSubmit)}>
<Group justify="space-between" align="top">
<Stack gap="xs" style={{ flex: 1 }}>
<TextInput
id="username"
name="username"
label="Username"
{...form.getInputProps('username')}
key={form.key('username')}
/>
<PasswordInput
label="Password"
description="Used for UI authentication"
{...form.getInputProps('password')}
key={form.key('password')}
disabled={form.getValues().user_level == USER_LEVELS.STREAMER}
/>
{showPermissions && (
<Select
label="User Level"
data={Object.entries(USER_LEVELS).map(([label, value]) => {
return {
label: USER_LEVEL_LABELS[value],
value: `${value}`,
};
})}
{...form.getInputProps('user_level')}
key={form.key('user_level')}
/>
)}
</Stack>
<Stack gap="xs" style={{ flex: 1 }}>
<TextInput
id="email"
name="email"
label="E-Mail"
{...form.getInputProps('email')}
key={form.key('email')}
/>
<Group align="flex-end">
<TextInput
label="XC Password"
description="Clear to disable XC API"
{...form.getInputProps('xc_password')}
key={form.key('xc_password')}
style={{ flex: 1 }}
rightSectionWidth={30}
rightSection={
<ActionIcon
variant="transparent"
size="sm"
color="white"
onClick={generateXCPassword}
>
<RotateCcwKey />
</ActionIcon>
}
/>
</Group>
{showPermissions && (
<MultiSelect
label="Channel Profiles"
{...form.getInputProps('channel_profiles')}
key={form.key('channel_profiles')}
onChange={onChannelProfilesChange}
data={Object.values(profiles).map((profile) => ({
label: profile.name,
value: `${profile.id}`,
}))}
/>
)}
</Stack>
</Group>
<Flex mih={50} gap="xs" justify="flex-end" align="flex-end">
<Button
type="submit"
variant="contained"
disabled={form.submitting}
size="small"
>
Save
</Button>
</Flex>
</form>
</Modal>
);
};
export default User;

View file

@ -14,6 +14,7 @@ import {
FileInput,
Space,
} from '@mantine/core';
import { NETWORK_ACCESS_OPTIONS } from '../../constants';
const UserAgent = ({ userAgent = null, isOpen, onClose }) => {
const formik = useFormik({

View file

@ -36,6 +36,8 @@ import {
import { useSortable } from '@dnd-kit/sortable';
import { CSS } from '@dnd-kit/utilities';
import { shallow } from 'zustand/shallow';
import useAuthStore from '../../store/auth';
import { USER_LEVELS } from '../../constants';
const RowDragHandleCell = ({ rowId }) => {
const { attributes, listeners, setNodeRef } = useDraggable({
@ -120,6 +122,7 @@ const ChannelStreams = ({ channel, isExpanded }) => {
shallow
);
const playlists = usePlaylistsStore((s) => s.playlists);
const authUser = useAuthStore((s) => s.user);
const [data, setData] = useState(channelStreams || []);
@ -168,6 +171,7 @@ const ChannelStreams = ({ channel, isExpanded }) => {
<SquareMinus
color={theme.tailwind.red[6]}
onClick={() => removeStream(row.original)}
disabled={authUser.user_level != USER_LEVELS.ADMIN}
/>
</ActionIcon>
</Center>
@ -192,7 +196,11 @@ const ChannelStreams = ({ channel, isExpanded }) => {
getCoreRowModel: getCoreRowModel(),
});
function handleDragEnd(event) {
const handleDragEnd = (event) => {
if (authUser.user_level != USER_LEVELS.ADMIN) {
return;
}
const { active, over } = event;
if (active && over && active.id !== over.id) {
setData((data) => {
@ -211,7 +219,7 @@ const ChannelStreams = ({ channel, isExpanded }) => {
return retval; //this is just a splice util
});
}
}
};
const sensors = useSensors(
useSensor(MouseSensor, {}),

View file

@ -3,6 +3,7 @@ import useChannelsStore from '../../store/channels';
import { notifications } from '@mantine/notifications';
import API from '../../api';
import ChannelForm from '../forms/Channel';
import ChannelBatchForm from '../forms/ChannelBatch';
import RecordingForm from '../forms/Recording';
import { useDebounce, copyToClipboard } from '../../utils';
import logo from '../../images/logo.png';
@ -51,6 +52,8 @@ import ChannelsTableOnboarding from './ChannelsTable/ChannelsTableOnboarding';
import ChannelTableHeader from './ChannelsTable/ChannelTableHeader';
import useWarningsStore from '../../store/warnings';
import ConfirmationDialog from '../ConfirmationDialog';
import useAuthStore from '../../store/auth';
import { USER_LEVELS } from '../../constants';
const m3uUrlBase = `${window.location.protocol}//${window.location.host}/output/m3u`;
const epgUrlBase = `${window.location.protocol}//${window.location.host}/output/epg`;
@ -108,6 +111,8 @@ const ChannelRowActions = React.memo(
const channelUuid = row.original.uuid;
const [tableSize, _] = useLocalStorage('table-size', 'default');
const authUser = useAuthStore((s) => s.user);
const onEdit = useCallback(() => {
// Use the ID directly to avoid issues with filtered tables
console.log(`Editing channel ID: ${channelId}`);
@ -141,6 +146,7 @@ const ChannelRowActions = React.memo(
variant="transparent"
color={theme.tailwind.yellow[3]}
onClick={onEdit}
disabled={authUser.user_level != USER_LEVELS.ADMIN}
>
<SquarePen size="18" />
</ActionIcon>
@ -150,6 +156,7 @@ const ChannelRowActions = React.memo(
variant="transparent"
color={theme.tailwind.red[6]}
onClick={onDelete}
disabled={authUser.user_level != USER_LEVELS.ADMIN}
>
<SquareMinus size="18" />
</ActionIcon>
@ -181,6 +188,7 @@ const ChannelRowActions = React.memo(
</Menu.Item>
<Menu.Item
onClick={onRecord}
disabled={authUser.user_level != USER_LEVELS.ADMIN}
leftSection={
<div
style={{
@ -203,7 +211,7 @@ const ChannelRowActions = React.memo(
}
);
const ChannelsTable = ({ }) => {
const ChannelsTable = ({}) => {
const theme = useMantineTheme();
/**
@ -261,6 +269,7 @@ const ChannelsTable = ({ }) => {
*/
const [channel, setChannel] = useState(null);
const [channelModalOpen, setChannelModalOpen] = useState(false);
const [channelBatchModalOpen, setChannelBatchModalOpen] = useState(false);
const [recordingModalOpen, setRecordingModalOpen] = useState(false);
const [selectedProfile, setSelectedProfile] = useState(
profiles[selectedProfileId]
@ -291,7 +300,12 @@ const ChannelsTable = ({ }) => {
const groupOptions = Object.values(channelGroups)
.filter((group) => activeGroupIds.has(group.id))
.map((group) => group.name);
const debouncedFilters = useDebounce(filters, 500);
const debouncedFilters = useDebounce(filters, 500, () => {
setPagination({
...pagination,
pageIndex: 0,
});
});
/**
* Functions
@ -329,14 +343,8 @@ const ChannelsTable = ({ }) => {
e.stopPropagation();
}, []);
// Remove useCallback to ensure we're using the latest setPagination function
const handleFilterChange = (e) => {
const { name, value } = e.target;
// First reset pagination to page 0
setPagination({
...pagination,
pageIndex: 0,
});
// Then update filters
setFilters((prev) => ({
...prev,
@ -345,11 +353,6 @@ const ChannelsTable = ({ }) => {
};
const handleGroupChange = (value) => {
// First reset pagination to page 0
setPagination({
...pagination,
pageIndex: 0,
});
// Then update filters
setFilters((prev) => ({
...prev,
@ -358,8 +361,12 @@ const ChannelsTable = ({ }) => {
};
const editChannel = async (ch = null) => {
setChannel(ch);
setChannelModalOpen(true);
if (selectedChannelIds.length > 0) {
setChannelBatchModalOpen(true);
} else {
setChannel(ch);
setChannelModalOpen(true);
}
};
const deleteChannel = async (id) => {
@ -473,6 +480,10 @@ const ChannelsTable = ({ }) => {
});
};
const closeChannelBatchForm = () => {
setChannelBatchModalOpen(false);
};
const closeChannelForm = () => {
setChannel(null);
setChannelModalOpen(false);
@ -596,8 +607,12 @@ const ChannelsTable = ({ }) => {
cell: ({ getValue }) => {
const value = getValue();
// Format as integer if no decimal component
const formattedValue = value !== null && value !== undefined ?
(value === Math.floor(value) ? Math.floor(value) : value) : '';
const formattedValue =
value !== null && value !== undefined
? value === Math.floor(value)
? Math.floor(value)
: value
: '';
return (
<Flex justify="flex-end" style={{ width: '100%' }}>
@ -797,8 +812,8 @@ const ChannelsTable = ({ }) => {
return hasStreams
? {} // Default style for channels with streams
: {
className: 'no-streams-row', // Add a class instead of background color
};
className: 'no-streams-row', // Add a class instead of background color
};
},
});
@ -1030,6 +1045,12 @@ const ChannelsTable = ({ }) => {
onClose={closeChannelForm}
/>
<ChannelBatchForm
channelIds={selectedChannelIds}
isOpen={channelBatchModalOpen}
onClose={closeChannelBatchForm}
/>
<RecordingForm
channel={channel}
isOpen={recordingModalOpen}

View file

@ -5,12 +5,14 @@ import {
Button,
Flex,
Group,
Menu,
NumberInput,
Popover,
Select,
Text,
TextInput,
Tooltip,
UnstyledButton,
useMantineTheme,
} from '@mantine/core';
import {
@ -18,18 +20,26 @@ import {
Binary,
Check,
CircleCheck,
Ellipsis,
EllipsisVertical,
SquareMinus,
SquarePen,
SquarePlus,
} from 'lucide-react';
import API from '../../../api';
import { notifications } from '@mantine/notifications';
import useChannelsStore from '../../../store/channels';
import useAuthStore from '../../../store/auth';
import { USER_LEVELS } from '../../../constants';
import AssignChannelNumbersForm from '../../forms/AssignChannelNumbers';
const CreateProfilePopover = React.memo(() => {
const [opened, setOpened] = useState(false);
const [name, setName] = useState('');
const theme = useMantineTheme();
const authUser = useAuthStore((s) => s.user);
const setOpen = () => {
setName('');
setOpened(!opened);
@ -54,6 +64,7 @@ const CreateProfilePopover = React.memo(() => {
variant="transparent"
color={theme.tailwind.green[5]}
onClick={setOpen}
disabled={authUser.user_level != USER_LEVELS.ADMIN}
>
<SquarePlus />
</ActionIcon>
@ -91,10 +102,16 @@ const ChannelTableHeader = ({
const theme = useMantineTheme();
const [channelNumAssignmentStart, setChannelNumAssignmentStart] = useState(1);
const [assignNumbersModalOpen, setAssignNumbersModalOpen] = useState(false);
const profiles = useChannelsStore((s) => s.profiles);
const selectedProfileId = useChannelsStore((s) => s.selectedProfileId);
const setSelectedProfileId = useChannelsStore((s) => s.setSelectedProfileId);
const authUser = useAuthStore((s) => s.user);
const closeAssignChannelNumbersModal = () => {
setAssignNumbersModalOpen(false);
};
const deleteProfile = async (id) => {
await API.deleteChannelProfile(id);
@ -152,6 +169,7 @@ const ChannelTableHeader = ({
e.stopPropagation();
deleteProfile(option.value);
}}
disabled={authUser.user_level != USER_LEVELS.ADMIN}
>
<SquareMinus />
</ActionIcon>
@ -188,80 +206,92 @@ const ChannelTableHeader = ({
}}
>
<Flex gap={6}>
<Button
leftSection={<SquarePen size={18} />}
variant="default"
size="xs"
onClick={editChannel}
disabled={
selectedTableIds.length == 0 ||
authUser.user_level != USER_LEVELS.ADMIN
}
>
Edit
</Button>
<Button
leftSection={<SquareMinus size={18} />}
variant="default"
size="xs"
onClick={deleteChannels}
disabled={selectedTableIds.length == 0}
disabled={
selectedTableIds.length == 0 ||
authUser.user_level != USER_LEVELS.ADMIN
}
>
Remove
Delete
</Button>
<Tooltip label="Assign Channel #s">
<Popover withArrow shadow="md">
<Popover.Target>
<Button
leftSection={<ArrowDown01 size={18} />}
variant="default"
size="xs"
p={5}
disabled={selectedTableIds.length == 0}
>
Assign
</Button>
</Popover.Target>
<Popover.Dropdown>
<Group>
<Text>Start #</Text>
<NumberInput
value={channelNumAssignmentStart}
onChange={setChannelNumAssignmentStart}
size="small"
style={{ width: 50 }}
/>
<ActionIcon
size="xs"
color={theme.tailwind.green[5]}
variant="transparent"
onClick={assignChannels}
>
<Check />
</ActionIcon>
</Group>
</Popover.Dropdown>
</Popover>
</Tooltip>
<Tooltip label="Auto-Match EPG">
<Button
leftSection={<Binary size={18} />}
variant="default"
size="xs"
onClick={matchEpg}
p={5}
>
Auto-Match
</Button>
</Tooltip>
<Button
leftSection={<SquarePlus size={18} />}
variant="light"
size="xs"
onClick={() => editChannel()}
disabled={authUser.user_level != USER_LEVELS.ADMIN}
p={5}
color={theme.tailwind.green[5]}
style={{
borderWidth: '1px',
borderColor: theme.tailwind.green[5],
color: 'white',
...(authUser.user_level == USER_LEVELS.ADMIN && {
borderWidth: '1px',
borderColor: theme.tailwind.green[5],
color: 'white',
}),
}}
>
Add
</Button>
<Menu>
<Menu.Target>
<ActionIcon variant="default" size={30}>
<EllipsisVertical size={18} />
</ActionIcon>
</Menu.Target>
<Menu.Dropdown>
<Menu.Item
leftSection={<ArrowDown01 size={18} />}
disabled={
selectedTableIds.length == 0 ||
authUser.user_level != USER_LEVELS.ADMIN
}
>
<UnstyledButton
size="xs"
onClick={() => setAssignNumbersModalOpen(true)}
>
<Text size="xs">Assign #s</Text>
</UnstyledButton>
</Menu.Item>
<Menu.Item
leftSection={<Binary size={18} />}
disabled={authUser.user_level != USER_LEVELS.ADMIN}
>
<UnstyledButton size="xs" onClick={matchEpg}>
<Text size="xs">Auto-Match</Text>
</UnstyledButton>
</Menu.Item>
</Menu.Dropdown>
</Menu>
</Flex>
</Box>
<AssignChannelNumbersForm
channelIds={selectedTableIds}
isOpen={assignNumbersModalOpen}
onClose={closeAssignChannelNumbersModal}
/>
</Group>
);
};

View file

@ -27,6 +27,7 @@ const CustomTable = ({ table }) => {
table.onSelectAllChange ? table.onSelectAllChange : null
}
selectedTableIds={table.selectedTableIds}
tableCellProps={table.tableCellProps}
/>
<CustomTableBody
getRowModel={table.getRowModel}
@ -36,6 +37,8 @@ const CustomTable = ({ table }) => {
renderBodyCell={table.renderBodyCell}
getExpandedRowHeight={table.getExpandedRowHeight}
getRowStyles={table.getRowStyles} // Pass the getRowStyles function
tableBodyProps={table.tableBodyProps}
tableCellProps={table.tableCellProps}
/>
</Box>
);

View file

@ -1,6 +1,7 @@
import { Box, Flex } from '@mantine/core';
import { VariableSizeList as List } from 'react-window';
import AutoSizer from 'react-virtualized-auto-sizer';
import table from '../../../helpers/table';
const CustomTableBody = ({
getRowModel,
@ -9,6 +10,8 @@ const CustomTableBody = ({
renderBodyCell,
getExpandedRowHeight,
getRowStyles, // Add this prop to receive row styles
tableBodyProps,
tableCellProps,
}) => {
const renderExpandedRow = (row) => {
if (expandedRowRenderer) {
@ -25,7 +28,10 @@ const CustomTableBody = ({
if (virtualized) {
return (
<Box className="tbody" style={{ flex: 1 }}>
<Box
className="tbody"
style={{ flex: 1, ...(tableBodyProps && tableBodyProps()) }}
>
<AutoSizer disableWidth>
{({ height }) => {
const getItemSize = (index) => {
@ -105,6 +111,7 @@ const CustomTableBody = ({
? cell.column.getSize()
: undefined,
minWidth: 0,
...(tableCellProps && tableCellProps({ cell })),
}}
>
<Flex align="center" style={{ height: '100%' }}>

View file

@ -8,6 +8,7 @@ const CustomTableHeader = ({
selectedTableIds,
headerCellRenderFns,
onSelectAllChange,
tableCellProps,
}) => {
const renderHeaderCell = (header) => {
if (headerCellRenderFns[header.id]) {
@ -66,6 +67,7 @@ const CustomTableHeader = ({
? header.getSize()
: undefined,
minWidth: 0,
// ...(tableCellProps && tableCellProps({ cell: header })),
}}
>
<Flex

View file

@ -1,5 +1,4 @@
import { useEffect, useMemo, useRef, useState } from 'react';
import { MantineReactTable, useMantineReactTable } from 'mantine-react-table';
import React, { useEffect, useMemo, useRef, useState } from 'react';
import API from '../../api';
import useEPGsStore from '../../store/epgs';
import EPGForm from '../forms/EPG';
@ -17,15 +16,24 @@ import {
Badge,
Progress,
Stack,
Group,
} from '@mantine/core';
import { notifications } from '@mantine/notifications';
import { IconSquarePlus } from '@tabler/icons-react';
import { RefreshCcw, SquareMinus, SquarePen } from 'lucide-react';
import {
ArrowDownWideNarrow,
ArrowUpDown,
ArrowUpNarrowWide,
RefreshCcw,
SquareMinus,
SquarePen,
SquarePlus,
} from 'lucide-react';
import dayjs from 'dayjs';
import useSettingsStore from '../../store/settings';
import useLocalStorage from '../../hooks/useLocalStorage';
import ConfirmationDialog from '../../components/ConfirmationDialog';
import useWarningsStore from '../../store/warnings';
import { CustomTable, useTable } from './CustomTable';
// Helper function to format status text
const formatStatusText = (status) => {
@ -36,15 +44,59 @@ const formatStatusText = (status) => {
// Helper function to get status text color
const getStatusColor = (status) => {
switch (status) {
case 'idle': return 'gray.5';
case 'fetching': return 'blue.5';
case 'parsing': return 'indigo.5';
case 'error': return 'red.5';
case 'success': return 'green.5';
default: return 'gray.5';
case 'idle':
return 'gray.5';
case 'fetching':
return 'blue.5';
case 'parsing':
return 'indigo.5';
case 'error':
return 'red.5';
case 'success':
return 'green.5';
default:
return 'gray.5';
}
};
const RowActions = ({ tableSize, row, editEPG, deleteEPG, refreshEPG }) => {
const iconSize =
tableSize == 'default' ? 'sm' : tableSize == 'compact' ? 'xs' : 'md';
return (
<>
<ActionIcon
variant="transparent"
size={iconSize} // Use standardized icon size
color="yellow.5" // Red color for delete actions
onClick={() => editEPG(row.original)}
>
<SquarePen size={tableSize === 'compact' ? 16 : 18} />{' '}
{/* Small icon size */}
</ActionIcon>
<ActionIcon
variant="transparent"
size={iconSize} // Use standardized icon size
color="red.9" // Red color for delete actions
onClick={() => deleteEPG(row.original.id)}
>
<SquareMinus size={tableSize === 'compact' ? 16 : 18} />{' '}
{/* Small icon size */}
</ActionIcon>
<ActionIcon
variant="transparent"
size={iconSize} // Use standardized icon size
color="blue.5" // Red color for delete actions
onClick={() => refreshEPG(row.original.id)}
disabled={!row.original.is_active}
>
<RefreshCcw size={tableSize === 'compact' ? 16 : 18} />{' '}
{/* Small icon size */}
</ActionIcon>
</>
);
};
const EPGsTable = () => {
const [epg, setEPG] = useState(null);
const [epgModalOpen, setEPGModalOpen] = useState(false);
@ -52,6 +104,7 @@ const EPGsTable = () => {
const [confirmDeleteOpen, setConfirmDeleteOpen] = useState(false);
const [deleteTarget, setDeleteTarget] = useState(null);
const [epgToDelete, setEpgToDelete] = useState(null);
const [data, setData] = useState([]);
const epgs = useEPGsStore((s) => s.epgs);
const refreshProgress = useEPGsStore((s) => s.refreshProgress);
@ -61,10 +114,12 @@ const EPGsTable = () => {
const [tableSize] = useLocalStorage('table-size', 'default');
// Get proper size for action icons to match ChannelsTable
const iconSize = tableSize === 'compact' ? 'xs' : tableSize === 'large' ? 'md' : 'sm';
const iconSize =
tableSize === 'compact' ? 'xs' : tableSize === 'large' ? 'md' : 'sm';
// Calculate density for Mantine Table
const tableDensity = tableSize === 'compact' ? 'xs' : tableSize === 'large' ? 'xl' : 'md';
const tableDensity =
tableSize === 'compact' ? 'xs' : tableSize === 'large' ? 'xl' : 'md';
const isWarningSuppressed = useWarningsStore((s) => s.isWarningSuppressed);
const suppressWarning = useWarningsStore((s) => s.suppressWarning);
@ -72,10 +127,13 @@ const EPGsTable = () => {
const toggleActive = async (epg) => {
try {
// Send only the is_active field to trigger our special handling
await API.updateEPG({
id: epg.id,
is_active: !epg.is_active,
}, true); // Add a new parameter to indicate this is just a toggle
await API.updateEPG(
{
id: epg.id,
is_active: !epg.is_active,
},
true
); // Add a new parameter to indicate this is just a toggle
} catch (error) {
console.error('Error toggling active state:', error);
}
@ -103,36 +161,46 @@ const EPGsTable = () => {
return (
<Stack spacing={2}>
<Text size="xs">{label}: {parseInt(progress.progress)}%</Text>
<Progress value={parseInt(progress.progress)} size="xs" style={{ margin: '2px 0' }} />
{progress.speed && <Text size="xs">Speed: {parseInt(progress.speed)} KB/s</Text>}
<Text size="xs">
{label}: {parseInt(progress.progress)}%
</Text>
<Progress
value={parseInt(progress.progress)}
size="xs"
style={{ margin: '2px 0' }}
/>
{progress.speed && (
<Text size="xs">Speed: {parseInt(progress.speed)} KB/s</Text>
)}
</Stack>
);
};
console.log(epgs);
const columns = useMemo(
//column definitions...
() => [
{
header: 'Name',
accessorKey: 'name',
size: 150,
minSize: 100,
size: 200,
},
{
header: 'Source Type',
accessorKey: 'source_type',
size: 120,
minSize: 100,
size: 150,
},
{
header: 'URL / API Key / File Path',
accessorKey: 'url',
size: 200,
minSize: 120,
enableSorting: false,
Cell: ({ cell, row }) => {
const value = cell.getValue() || row.original.api_key || row.original.file_path || '';
cell: ({ cell, row }) => {
const value =
cell.getValue() ||
row.original.api_key ||
row.original.file_path ||
'';
return (
<Tooltip label={value} disabled={!value}>
<div
@ -152,18 +220,13 @@ const EPGsTable = () => {
{
header: 'Status',
accessorKey: 'status',
size: 100,
minSize: 80,
Cell: ({ row }) => {
size: 150,
cell: ({ row }) => {
const data = row.original;
// Always show status text, even when there's progress happening
return (
<Text
size="sm"
fw={500}
c={getStatusColor(data.status)}
>
<Text size="sm" fw={500} c={getStatusColor(data.status)}>
{formatStatusText(data.status)}
</Text>
);
@ -172,14 +235,15 @@ const EPGsTable = () => {
{
header: 'Status Message',
accessorKey: 'last_message',
size: 250,
minSize: 150,
enableSorting: false,
Cell: ({ row }) => {
cell: ({ row }) => {
const data = row.original;
// Check if there's an active progress for this EPG - show progress first if active
if (refreshProgress[data.id] && refreshProgress[data.id].progress < 100) {
if (
refreshProgress[data.id] &&
refreshProgress[data.id].progress < 100
) {
return buildProgressDisplay(data);
}
@ -187,7 +251,12 @@ const EPGsTable = () => {
if (data.status === 'error' && data.last_message) {
return (
<Tooltip label={data.last_message} multiline width={300}>
<Text c="dimmed" size="xs" lineClamp={2} style={{ color: theme.colors.red[6], lineHeight: 1.3 }}>
<Text
c="dimmed"
size="xs"
lineClamp={2}
style={{ color: theme.colors.red[6], lineHeight: 1.3 }}
>
{data.last_message}
</Text>
</Tooltip>
@ -197,7 +266,11 @@ const EPGsTable = () => {
// Show success message for successful sources
if (data.status === 'success') {
return (
<Text c="dimmed" size="xs" style={{ color: theme.colors.green[6], lineHeight: 1.3 }}>
<Text
c="dimmed"
size="xs"
style={{ color: theme.colors.green[6], lineHeight: 1.3 }}
>
EPG data refreshed successfully
</Text>
);
@ -210,24 +283,26 @@ const EPGsTable = () => {
{
header: 'Updated',
accessorKey: 'updated_at',
size: 180,
minSize: 100,
size: 175,
enableSorting: false,
Cell: ({ cell }) => {
cell: ({ cell }) => {
const value = cell.getValue();
return value ? dayjs(value).format('MMMM D, YYYY h:mma') : 'Never';
return value ? (
<Text size="xs">{new Date(value).toLocaleString()}</Text>
) : (
<Text size="xs">Never</Text>
);
},
},
{
header: 'Active',
accessorKey: 'is_active',
size: 80,
minSize: 60,
size: 50,
sortingFn: 'basic',
mantineTableBodyCellProps: {
align: 'left',
},
Cell: ({ row, cell }) => (
cell: ({ row, cell }) => (
<Box sx={{ display: 'flex', justifyContent: 'center' }}>
<Switch
size="xs"
@ -237,13 +312,15 @@ const EPGsTable = () => {
</Box>
),
},
{
id: 'actions',
header: 'Actions',
size: tableSize == 'compact' ? 75 : 100,
},
],
[refreshProgress]
);
//optionally access the underlying virtualizer instance
const rowVirtualizerInstanceRef = useRef(null);
const [isLoading, setIsLoading] = useState(true);
const [sorting, setSorting] = useState([]);
@ -286,118 +363,133 @@ const EPGsTable = () => {
};
useEffect(() => {
if (typeof window !== 'undefined') {
setIsLoading(false);
}
}, []);
useEffect(() => {
//scroll to the top of the table when the sorting changes
try {
rowVirtualizerInstanceRef.current?.scrollToIndex?.(0);
} catch (error) {
console.error(error);
}
}, [sorting]);
const table = useMantineReactTable({
...TableHelper.defaultProperties,
columns,
// Sort data before passing to table: active first, then by name
data: Object.values(epgs)
.sort((a, b) => {
setData(
Object.values(epgs).sort((a, b) => {
// First sort by active status (active items first)
if (a.is_active !== b.is_active) {
return a.is_active ? -1 : 1;
}
// Then sort by name (case-insensitive)
return a.name.toLowerCase().localeCompare(b.name.toLowerCase());
}),
})
);
}, [epgs]);
const renderBodyCell = ({ cell, row }) => {
switch (cell.column.id) {
case 'actions':
return (
<RowActions
tableSize={tableSize}
row={row}
editEPG={editEPG}
deleteEPG={deleteEPG}
refreshEPG={refreshEPG}
/>
);
}
};
const renderHeaderCell = (header) => {
let sortingIcon = ArrowUpDown;
if (sorting[0]?.id == header.id) {
if (sorting[0].desc === false) {
sortingIcon = ArrowUpNarrowWide;
} else {
sortingIcon = ArrowDownWideNarrow;
}
}
switch (header.id) {
default:
return (
<Group>
<Text size="sm" name={header.id}>
{header.column.columnDef.header}
</Text>
{header.column.columnDef.sortable && (
<Center>
{React.createElement(sortingIcon, {
onClick: () => onSortingChange(header.id),
size: 14,
})}
</Center>
)}
</Group>
);
}
};
const onSortingChange = (column) => {
console.log(column);
const sortField = sorting[0]?.id;
const sortDirection = sorting[0]?.desc;
const newSorting = [];
if (sortField == column) {
if (sortDirection == false) {
newSorting[0] = {
id: column,
desc: true,
};
}
} else {
newSorting[0] = {
id: column,
desc: false,
};
}
setSorting(newSorting);
if (newSorting.length > 0) {
const compareColumn = newSorting[0].id;
const compareDesc = newSorting[0].desc;
setData(
epgs.sort((a, b) => {
console.log(a);
console.log(newSorting[0].id);
if (a[compareColumn] !== b[compareColumn]) {
return compareDesc ? 1 : -1;
}
return 0;
})
);
}
};
const table = useTable({
columns,
data,
allRowIds: data.map((epg) => epg.id),
enablePagination: false,
enableRowVirtualization: true,
enableRowSelection: false,
renderTopToolbar: false,
onRowSelectionChange: setRowSelection,
onSortingChange: setSorting,
state: {
isLoading,
sorting,
rowSelection,
density: tableDensity,
manualSorting: true,
bodyCellRenderFns: {
actions: renderBodyCell,
},
rowVirtualizerInstanceRef, //optional
rowVirtualizerOptions: { overscan: 5 }, //optionally customize the row virtualizer
initialState: {
density: tableDensity,
},
enableRowActions: true,
positionActionsColumn: 'last',
displayColumnDefOptions: {
'mrt-row-actions': {
size: 120, // Make action column wider
minSize: 120, // Ensure minimum width for action buttons
},
},
renderRowActions: ({ row }) => (
<>
<ActionIcon
variant="transparent"
size={iconSize} // Use standardized icon size
color="yellow.5" // Red color for delete actions
onClick={() => editEPG(row.original)}
>
<SquarePen size={tableSize === 'compact' ? 16 : 18} /> {/* Small icon size */}
</ActionIcon>
<ActionIcon
variant="transparent"
size={iconSize} // Use standardized icon size
color="red.9" // Red color for delete actions
onClick={() => deleteEPG(row.original.id)}
>
<SquareMinus size={tableSize === 'compact' ? 16 : 18} /> {/* Small icon size */}
</ActionIcon>
<ActionIcon
variant="transparent"
size={iconSize} // Use standardized icon size
color="blue.5" // Red color for delete actions
onClick={() => refreshEPG(row.original.id)}
disabled={!row.original.is_active}
>
<RefreshCcw size={tableSize === 'compact' ? 16 : 18} /> {/* Small icon size */}
</ActionIcon>
</>
),
mantineTableContainerProps: {
style: {
height: 'calc(40vh - 10px)',
overflowX: 'auto', // Ensure horizontal scrolling works
},
},
mantineTableProps: {
...TableHelper.defaultProperties.mantineTableProps,
className: `table-size-${tableSize}`,
headerCellRenderFns: {
name: renderHeaderCell,
source_type: renderHeaderCell,
url: renderHeaderCell,
status: renderHeaderCell,
last_message: renderHeaderCell,
updated_at: renderHeaderCell,
is_active: renderHeaderCell,
actions: renderHeaderCell,
},
// Add custom cell styles to match CustomTable's sizing
mantineTableBodyCellProps: ({ cell }) => {
// Check if this is a status message cell with active progress
const progressData = cell.column.id === 'last_message' &&
refreshProgress[cell.row.original.id] &&
refreshProgress[cell.row.original.id].progress < 100 ?
refreshProgress[cell.row.original.id] : null;
// Only expand height for certain actions that need more space
const needsExpandedHeight = progressData &&
['downloading', 'parsing_channels', 'parsing_programs'].includes(progressData.action);
tableCellProps: ({ cell }) => {
return {
style: {
// Apply taller height for progress cells (except initializing), otherwise use standard height
height: needsExpandedHeight ? '80px' : (
tableSize === 'compact' ? '28px' : tableSize === 'large' ? '48px' : '40px'
),
fontSize: tableSize === 'compact' ? 'var(--mantine-font-size-xs)' : 'var(--mantine-font-size-sm)',
padding: tableSize === 'compact' ? '2px 8px' : '4px 10px'
}
// Apply taller height for progress cells (except initializing), otherwise use standard height
fontSize:
tableSize === 'compact'
? 'var(--mantine-font-size-xs)'
: 'var(--mantine-font-size-sm)',
padding: tableSize === 'compact' ? '2px 8px' : '4px 10px',
};
},
});
@ -448,7 +540,7 @@ const EPGsTable = () => {
<Flex gap={6}>
<Tooltip label="Assign">
<Button
leftSection={<IconSquarePlus size={18} />}
leftSection={<SquarePlus size={18} />}
variant="light"
size="xs"
onClick={() => editEPG()}
@ -467,7 +559,26 @@ const EPGsTable = () => {
</Box>
</Paper>
<MantineReactTable table={table} />
<Box
style={{
display: 'flex',
flexDirection: 'column',
height: 'calc(40vh - 10px)',
}}
>
<Box
style={{
flex: 1,
overflowY: 'auto',
overflowX: 'hidden',
border: 'solid 1px rgb(68,68,68)',
borderRadius: 'var(--mantine-radius-default)',
}}
>
<CustomTable table={table} />
</Box>
</Box>
<EPGForm epg={epg} isOpen={epgModalOpen} onClose={closeEPGForm} />
<ConfirmationDialog
@ -482,9 +593,15 @@ const EPGsTable = () => {
Name: ${epgToDelete.name}
Source Type: ${epgToDelete.source_type}
${epgToDelete.url ? `URL: ${epgToDelete.url}` :
epgToDelete.api_key ? `API Key: ${epgToDelete.api_key}` :
epgToDelete.file_path ? `File Path: ${epgToDelete.file_path}` : ''}
${
epgToDelete.url
? `URL: ${epgToDelete.url}`
: epgToDelete.api_key
? `API Key: ${epgToDelete.api_key}`
: epgToDelete.file_path
? `File Path: ${epgToDelete.file_path}`
: ''
}
This will remove all related program information and channel associations.
This action cannot be undone.`}

View file

@ -1,5 +1,10 @@
import { useEffect, useMemo, useRef, useState } from 'react';
import { MantineReactTable, useMantineReactTable } from 'mantine-react-table';
import React, {
useEffect,
useMemo,
useRef,
useState,
useCallback,
} from 'react';
import API from '../../api';
import usePlaylistsStore from '../../store/playlists';
import M3UForm from '../forms/M3U';
@ -18,41 +23,111 @@ import {
Stack,
Badge,
Group,
Center,
} from '@mantine/core';
import { SquareMinus, SquarePen, RefreshCcw, Check, X } from 'lucide-react';
import { IconSquarePlus } from '@tabler/icons-react'; // Import custom icons
import {
SquareMinus,
SquarePen,
RefreshCcw,
Check,
X,
ArrowUpDown,
ArrowUpNarrowWide,
ArrowDownWideNarrow,
SquarePlus,
} from 'lucide-react';
import dayjs from 'dayjs';
import useSettingsStore from '../../store/settings';
import useLocalStorage from '../../hooks/useLocalStorage';
import ConfirmationDialog from '../../components/ConfirmationDialog';
import useWarningsStore from '../../store/warnings';
import { CustomTable, useTable } from './CustomTable';
// Helper function to format status text
const formatStatusText = (status) => {
switch (status) {
case 'idle': return 'Idle';
case 'fetching': return 'Fetching';
case 'parsing': return 'Parsing';
case 'error': return 'Error';
case 'success': return 'Success';
case 'pending_setup': return 'Pending Setup';
default: return status ? status.charAt(0).toUpperCase() + status.slice(1) : 'Unknown';
case 'idle':
return 'Idle';
case 'fetching':
return 'Fetching';
case 'parsing':
return 'Parsing';
case 'error':
return 'Error';
case 'success':
return 'Success';
case 'pending_setup':
return 'Pending Setup';
default:
return status
? status.charAt(0).toUpperCase() + status.slice(1)
: 'Unknown';
}
};
// Helper function to get status text color
const getStatusColor = (status) => {
switch (status) {
case 'idle': return 'gray.5';
case 'fetching': return 'blue.5';
case 'parsing': return 'indigo.5';
case 'error': return 'red.5';
case 'success': return 'green.5';
case 'pending_setup': return 'orange.5'; // Orange to indicate action needed
default: return 'gray.5';
case 'idle':
return 'gray.5';
case 'fetching':
return 'blue.5';
case 'parsing':
return 'indigo.5';
case 'error':
return 'red.5';
case 'success':
return 'green.5';
case 'pending_setup':
return 'orange.5'; // Orange to indicate action needed
default:
return 'gray.5';
}
};
const RowActions = ({
tableSize,
editPlaylist,
deletePlaylist,
row,
refreshPlaylist,
}) => {
const iconSize =
tableSize == 'default' ? 'sm' : tableSize == 'compact' ? 'xs' : 'md';
return (
<>
<ActionIcon
variant="transparent"
size={iconSize}
color="yellow.5"
onClick={() => {
editPlaylist(row.original);
}}
>
<SquarePen size={tableSize === 'compact' ? 16 : 18} />
</ActionIcon>
<ActionIcon
variant="transparent"
size={iconSize}
color="red.9"
onClick={() => deletePlaylist(row.original.id)}
>
<SquareMinus size={tableSize === 'compact' ? 16 : 18} />
</ActionIcon>
<ActionIcon
variant="transparent"
size={iconSize}
color="blue.5"
onClick={() => refreshPlaylist(row.original.id)}
disabled={!row.original.is_active}
>
<RefreshCcw size={tableSize === 'compact' ? 16 : 18} />
</ActionIcon>
</>
);
};
const M3UTable = () => {
const [playlist, setPlaylist] = useState(null);
const [playlistModalOpen, setPlaylistModalOpen] = useState(false);
@ -63,6 +138,8 @@ const M3UTable = () => {
const [confirmDeleteOpen, setConfirmDeleteOpen] = useState(false);
const [deleteTarget, setDeleteTarget] = useState(null);
const [playlistToDelete, setPlaylistToDelete] = useState(null);
const [data, setData] = useState([]);
const [sorting, setSorting] = useState([{ id: 'name', desc: '' }]);
const playlists = usePlaylistsStore((s) => s.playlists);
const refreshProgress = usePlaylistsStore((s) => s.refreshProgress);
@ -94,7 +171,9 @@ const M3UTable = () => {
return buildParsingStats(data);
default:
return data.status === 'error' ? buildErrorStats(data) : `${data.action || 'Processing'}...`;
return data.status === 'error'
? buildErrorStats(data)
: `${data.action || 'Processing'}...`;
}
};
@ -108,28 +187,35 @@ const M3UTable = () => {
}
// Format time remaining in minutes:seconds
const timeRemaining = data.time_remaining ?
`${Math.floor(data.time_remaining / 60)}:${String(Math.floor(data.time_remaining % 60)).padStart(2, '0')}` :
'calculating...';
const timeRemaining = data.time_remaining
? `${Math.floor(data.time_remaining / 60)}:${String(Math.floor(data.time_remaining % 60)).padStart(2, '0')}`
: 'calculating...';
// Format speed with appropriate unit (KB/s or MB/s)
const speed = data.speed >= 1024 ?
`${(data.speed / 1024).toFixed(2)} MB/s` :
`${Math.round(data.speed)} KB/s`;
const speed =
data.speed >= 1024
? `${(data.speed / 1024).toFixed(2)} MB/s`
: `${Math.round(data.speed)} KB/s`;
return (
<Box>
<Flex direction="column" gap={2}>
<Flex justify="space-between" align="center">
<Text size="xs" fw={500}>Downloading:</Text>
<Text size="xs" fw={500}>
Downloading:
</Text>
<Text size="xs">{parseInt(data.progress)}%</Text>
</Flex>
<Flex justify="space-between" align="center">
<Text size="xs" fw={500}>Speed:</Text>
<Text size="xs" fw={500}>
Speed:
</Text>
<Text size="xs">{speed}</Text>
</Flex>
<Flex justify="space-between" align="center">
<Text size="xs" fw={500}>Time left:</Text>
<Text size="xs" fw={500}>
Time left:
</Text>
<Text size="xs">{timeRemaining}</Text>
</Flex>
</Flex>
@ -147,26 +233,32 @@ const M3UTable = () => {
}
// Format time displays if available
const elapsedTime = data.elapsed_time ?
`${Math.floor(data.elapsed_time / 60)}:${String(Math.floor(data.elapsed_time % 60)).padStart(2, '0')}` :
null;
const elapsedTime = data.elapsed_time
? `${Math.floor(data.elapsed_time / 60)}:${String(Math.floor(data.elapsed_time % 60)).padStart(2, '0')}`
: null;
return (
<Box>
<Flex direction="column" gap={2}>
<Flex justify="space-between" align="center">
<Text size="xs" fw={500}>Processing groups:</Text>
<Text size="xs" fw={500}>
Processing groups:
</Text>
<Text size="xs">{parseInt(data.progress)}%</Text>
</Flex>
{elapsedTime && (
<Flex justify="space-between" align="center">
<Text size="xs" fw={500}>Elapsed:</Text>
<Text size="xs" fw={500}>
Elapsed:
</Text>
<Text size="xs">{elapsedTime}</Text>
</Flex>
)}
{data.groups_processed && (
<Flex justify="space-between" align="center">
<Text size="xs" fw={500}>Groups:</Text>
<Text size="xs" fw={500}>
Groups:
</Text>
<Text size="xs">{data.groups_processed}</Text>
</Flex>
)}
@ -180,9 +272,13 @@ const M3UTable = () => {
<Box>
<Flex direction="column" gap={2}>
<Flex align="center">
<Text size="xs" fw={500} color="red">Error:</Text>
<Text size="xs" fw={500} color="red">
Error:
</Text>
</Flex>
<Text size="xs" color="red" style={{ lineHeight: 1.3 }}>{data.error || "Unknown error occurred"}</Text>
<Text size="xs" color="red" style={{ lineHeight: 1.3 }}>
{data.error || 'Unknown error occurred'}
</Text>
</Flex>
</Box>
);
@ -198,36 +294,44 @@ const M3UTable = () => {
}
// Format time displays
const timeRemaining = data.time_remaining ?
`${Math.floor(data.time_remaining / 60)}:${String(Math.floor(data.time_remaining % 60)).padStart(2, '0')}` :
'calculating...';
const timeRemaining = data.time_remaining
? `${Math.floor(data.time_remaining / 60)}:${String(Math.floor(data.time_remaining % 60)).padStart(2, '0')}`
: 'calculating...';
const elapsedTime = data.elapsed_time ?
`${Math.floor(data.elapsed_time / 60)}:${String(Math.floor(data.elapsed_time % 60)).padStart(2, '0')}` :
'0:00';
const elapsedTime = data.elapsed_time
? `${Math.floor(data.elapsed_time / 60)}:${String(Math.floor(data.elapsed_time % 60)).padStart(2, '0')}`
: '0:00';
return (
<Box>
<Flex direction="column" gap={2}>
<Flex justify="space-between" align="center">
<Text size="xs" fw={500} style={{ width: '80px' }}>Parsing:</Text>
<Text size="xs" fw={500} style={{ width: '80px' }}>
Parsing:
</Text>
<Text size="xs">{parseInt(data.progress)}%</Text>
</Flex>
{data.elapsed_time && (
<Flex justify="space-between" align="center">
<Text size="xs" fw={500} style={{ width: '80px' }}>Elapsed:</Text>
<Text size="xs" fw={500} style={{ width: '80px' }}>
Elapsed:
</Text>
<Text size="xs">{elapsedTime}</Text>
</Flex>
)}
{data.time_remaining && (
<Flex justify="space-between" align="center">
<Text size="xs" fw={500} style={{ width: '60px' }}>Remaining:</Text>
<Text size="xs" fw={500} style={{ width: '60px' }}>
Remaining:
</Text>
<Text size="xs">{timeRemaining}</Text>
</Flex>
)}
{data.streams_processed && (
<Flex justify="space-between" align="center">
<Text size="xs" fw={500} style={{ width: '80px' }}>Streams:</Text>
<Text size="xs" fw={500} style={{ width: '80px' }}>
Streams:
</Text>
<Text size="xs">{data.streams_processed}</Text>
</Flex>
)}
@ -241,7 +345,9 @@ const M3UTable = () => {
<Box>
<Flex direction="column" gap={2}>
<Flex align="center">
<Text size="xs" fw={500}>Initializing refresh...</Text>
<Text size="xs" fw={500}>
Initializing refresh...
</Text>
</Flex>
</Flex>
</Box>
@ -261,7 +367,7 @@ const M3UTable = () => {
action: 'initializing',
progress: 0,
account: id,
type: 'm3u_refresh'
type: 'm3u_refresh',
});
try {
@ -275,14 +381,14 @@ const M3UTable = () => {
account: id,
type: 'm3u_refresh',
error: 'Failed to start refresh task',
status: 'error'
status: 'error',
});
}
};
const deletePlaylist = async (id) => {
// Get playlist details for the confirmation dialog
const playlist = playlists.find(p => p.id === id);
const playlist = playlists.find((p) => p.id === id);
setPlaylistToDelete(playlist);
setDeleteTarget(id);
@ -304,10 +410,13 @@ const M3UTable = () => {
const toggleActive = async (playlist) => {
try {
// Send only the is_active field to trigger our special handling
await API.updatePlaylist({
id: playlist.id,
is_active: !playlist.is_active,
}, true); // Add a new parameter to indicate this is just a toggle
await API.updatePlaylist(
{
id: playlist.id,
is_active: !playlist.is_active,
},
true
); // Add a new parameter to indicate this is just a toggle
} catch (error) {
console.error('Error toggling active state:', error);
}
@ -318,14 +427,15 @@ const M3UTable = () => {
{
header: 'Name',
accessorKey: 'name',
size: 150,
minSize: 100, // Minimum width
size: 200,
sortable: true,
},
{
header: 'Account Type',
accessorKey: 'account_type',
size: 100,
Cell: ({ cell }) => {
sortable: true,
size: 150,
cell: ({ cell }) => {
const value = cell.getValue();
return value === 'XC' ? 'XC' : 'M3U';
},
@ -333,9 +443,7 @@ const M3UTable = () => {
{
header: 'URL / File',
accessorKey: 'server_url',
size: 200,
minSize: 120,
Cell: ({ cell, row }) => {
cell: ({ cell, row }) => {
const value = cell.getValue() || row.original.file_path || '';
return (
<Tooltip label={value} disabled={!value}>
@ -356,13 +464,14 @@ const M3UTable = () => {
{
header: 'Max Streams',
accessorKey: 'max_streams',
size: 100,
sortable: true,
size: 150,
},
{
header: 'Status',
accessorKey: 'status',
size: 100,
Cell: ({ cell }) => {
size: 150,
cell: ({ cell }) => {
const value = cell.getValue();
if (!value) return null;
@ -377,10 +486,7 @@ const M3UTable = () => {
{
header: 'Status Message',
accessorKey: 'last_message',
size: 250, // Increase default size
minSize: 200, // Set minimum size
maxSize: 400, // Allow expansion up to this size
Cell: ({ cell, row }) => {
cell: ({ cell, row }) => {
const value = cell.getValue();
const data = row.original;
@ -391,16 +497,18 @@ const M3UTable = () => {
// If we have active progress data for this account, show that instead
if (progressData && progressData.progress < 100) {
return (
<Box style={{
// Use full height of the cell with proper spacing
height: '100%',
width: '100%',
display: 'flex',
alignItems: 'center',
justifyContent: 'flex-start',
// Add some padding to give content room to breathe
padding: '4px 0'
}}>
<Box
style={{
// Use full height of the cell with proper spacing
height: '100%',
width: '100%',
display: 'flex',
alignItems: 'center',
justifyContent: 'flex-start',
// Add some padding to give content room to breathe
padding: '4px 0',
}}
>
{generateStatusString(progressData)}
</Box>
);
@ -413,7 +521,12 @@ const M3UTable = () => {
if (data.status === 'error') {
return (
<Tooltip label={value} multiline width={300}>
<Text c="dimmed" size="xs" lineClamp={2} style={{ color: theme.colors.red[6], lineHeight: 1.3 }}>
<Text
c="dimmed"
size="xs"
lineClamp={2}
style={{ color: theme.colors.red[6], lineHeight: 1.3 }}
>
{value}
</Text>
</Tooltip>
@ -424,7 +537,17 @@ const M3UTable = () => {
if (data.status === 'success') {
return (
<Tooltip label={value} multiline width={300}>
<Text c="dimmed" size="xs" style={{ color: theme.colors.green[6], lineHeight: 1.1 }}>
<Text
c="dimmed"
size="xs"
style={{
color: theme.colors.green[6],
lineHeight: 1.3,
whiteSpace: 'nowrap',
overflow: 'hidden',
textOverflow: 'ellipsis',
}}
>
{value}
</Text>
</Tooltip>
@ -434,7 +557,12 @@ const M3UTable = () => {
// For all other status values, just use dimmed text
return (
<Tooltip label={value} multiline width={300}>
<Text c="dimmed" size="xs" lineClamp={2} style={{ lineHeight: 1.1 }}>
<Text
c="dimmed"
size="xs"
lineClamp={2}
style={{ lineHeight: 1.1 }}
>
{value}
</Text>
</Tooltip>
@ -444,22 +572,21 @@ const M3UTable = () => {
{
header: 'Updated',
accessorKey: 'updated_at',
size: 120,
Cell: ({ cell }) => {
size: 175,
cell: ({ cell }) => {
const value = cell.getValue();
return value ? <Text size="xs">{new Date(value).toLocaleString()}</Text> : <Text size="xs">Never</Text>;
return value ? (
<Text size="xs">{new Date(value).toLocaleString()}</Text>
) : (
<Text size="xs">Never</Text>
);
},
},
{
header: 'Active',
accessorKey: 'is_active',
size: 80,
minSize: 60,
sortingFn: 'basic',
mantineTableBodyCellProps: {
align: 'left',
},
Cell: ({ cell, row }) => {
size: 50,
cell: ({ cell, row }) => {
return (
<Box sx={{ display: 'flex', justifyContent: 'center' }}>
<Switch
@ -471,7 +598,11 @@ const M3UTable = () => {
);
},
},
// Remove the custom Actions column here
{
id: 'actions',
header: 'Actions',
size: tableSize == 'compact' ? 75 : 100,
},
],
[refreshPlaylist, editPlaylist, deletePlaylist, toggleActive]
);
@ -480,7 +611,6 @@ const M3UTable = () => {
const rowVirtualizerInstanceRef = useRef(null);
const [isLoading, setIsLoading] = useState(true);
const [sorting, setSorting] = useState([]);
const closeModal = (newPlaylist = null) => {
if (newPlaylist) {
@ -493,19 +623,6 @@ const M3UTable = () => {
}
};
const deletePlaylists = async (ids) => {
const selected = table
.getRowModel()
.rows.filter((row) => row.getIsSelected());
// await API.deleteStreams(selected.map(stream => stream.original.id))
};
useEffect(() => {
if (typeof window !== 'undefined') {
setIsLoading(false);
}
}, []);
useEffect(() => {
//scroll to the top of the table when the sorting changes
try {
@ -517,8 +634,21 @@ const M3UTable = () => {
// Listen for edit playlist requests from notifications
useEffect(() => {
setData(
playlists
.filter((playlist) => playlist.locked === false)
.sort((a, b) => {
// First sort by active status (active items first)
if (a.is_active !== b.is_active) {
return a.is_active ? -1 : 1;
}
// Then sort by name (case-insensitive)
return a.name.toLowerCase().localeCompare(b.name.toLowerCase());
})
);
if (editPlaylistId) {
const playlistToEdit = playlists.find(p => p.id === editPlaylistId);
const playlistToEdit = playlists.find((p) => p.id === editPlaylistId);
if (playlistToEdit) {
editPlaylist(playlistToEdit);
// Reset the ID after handling
@ -527,80 +657,120 @@ const M3UTable = () => {
}
}, [editPlaylistId, playlists]);
const tableDensity = tableSize === 'compact' ? 'xs' : tableSize === 'large' ? 'xl' : 'md';
const onSortingChange = (column) => {
console.log(column);
const sortField = sorting[0]?.id;
const sortDirection = sorting[0]?.desc;
const table = useMantineReactTable({
...TableHelper.defaultProperties,
const newSorting = [];
if (sortField == column) {
if (sortDirection == false) {
newSorting[0] = {
id: column,
desc: true,
};
}
} else {
newSorting[0] = {
id: column,
desc: false,
};
}
setSorting(newSorting);
if (newSorting.length > 0) {
const compareColumn = newSorting[0].id;
const compareDesc = newSorting[0].desc;
setData(
playlists
.filter((playlist) => playlist.locked === false)
.sort((a, b) => {
console.log(a);
console.log(newSorting[0].id);
if (a[compareColumn] !== b[compareColumn]) {
return compareDesc ? 1 : -1;
}
return 0;
})
);
}
};
const renderHeaderCell = (header) => {
let sortingIcon = ArrowUpDown;
if (sorting[0]?.id == header.id) {
if (sorting[0].desc === false) {
sortingIcon = ArrowUpNarrowWide;
} else {
sortingIcon = ArrowDownWideNarrow;
}
}
switch (header.id) {
default:
return (
<Group>
<Text size="sm" name={header.id}>
{header.column.columnDef.header}
</Text>
{header.column.columnDef.sortable && (
<Center>
{React.createElement(sortingIcon, {
onClick: () => onSortingChange(header.id),
size: 14,
})}
</Center>
)}
</Group>
);
}
};
const renderBodyCell = useCallback(({ cell, row }) => {
switch (cell.column.id) {
case 'actions':
return (
<RowActions
tableSize={tableSize}
editPlaylist={editPlaylist}
deletePlaylist={deletePlaylist}
row={row}
refreshPlaylist={refreshPlaylist}
/>
);
}
}, []);
const table = useTable({
columns,
// Sort data before passing to table: active first, then by name
data: playlists
.filter((playlist) => playlist.locked === false)
.sort((a, b) => {
// First sort by active status (active items first)
if (a.is_active !== b.is_active) {
return a.is_active ? -1 : 1;
}
// Then sort by name (case-insensitive)
return a.name.toLowerCase().localeCompare(b.name.toLowerCase());
}),
data,
allRowIds: data.map((playlist) => playlist.id),
enablePagination: false,
enableRowVirtualization: true,
enableRowSelection: false,
onRowSelectionChange: setRowSelection,
renderTopToolbar: false,
onSortingChange: setSorting,
state: {
isLoading,
sorting,
rowSelection,
// Use density directly from tableSize
density: tableDensity,
},
sorting,
manualSorting: true,
rowVirtualizerInstanceRef, //optional
rowVirtualizerOptions: { overscan: 5 }, //optionally customize the row virtualizer
initialState: {
// Use density directly from tableSize
density: tableDensity,
bodyCellRenderFns: {
actions: renderBodyCell,
},
enableRowActions: true, // Enable row actions
positionActionsColumn: 'last',
displayColumnDefOptions: {
'mrt-row-actions': {
size: 120, // Make action column wider
minSize: 120, // Ensure minimum width for action buttons
},
headerCellRenderFns: {
name: renderHeaderCell,
account_type: renderHeaderCell,
server_url: renderHeaderCell,
max_streams: renderHeaderCell,
status: renderHeaderCell,
last_message: renderHeaderCell,
updated_at: renderHeaderCell,
is_active: renderHeaderCell,
actions: renderHeaderCell,
},
renderRowActions: ({ row }) => (
<>
<ActionIcon
variant="transparent"
size={tableSize === 'compact' ? 'xs' : tableSize === 'large' ? 'md' : 'sm'} // Use standardized icon size
color="yellow.5"
onClick={() => {
editPlaylist(row.original);
}}
>
<SquarePen size={tableSize === 'compact' ? 16 : 18} />
</ActionIcon>
<ActionIcon
variant="transparent"
size={tableSize === 'compact' ? 'xs' : tableSize === 'large' ? 'md' : 'sm'} // Use standardized icon size
color="red.9"
onClick={() => deletePlaylist(row.original.id)}
>
<SquareMinus size={tableSize === 'compact' ? 16 : 18} />
</ActionIcon>
<ActionIcon
variant="transparent"
size={tableSize === 'compact' ? 'xs' : tableSize === 'large' ? 'md' : 'sm'} // Use standardized icon size
color="blue.5"
onClick={() => refreshPlaylist(row.original.id)}
disabled={!row.original.is_active}
>
<RefreshCcw size={tableSize === 'compact' ? 16 : 18} />
</ActionIcon>
</>
),
mantineTableContainerProps: {
style: {
height: 'calc(40vh - 10px)',
@ -612,34 +782,22 @@ const M3UTable = () => {
className: `table-size-${tableSize}`,
},
// Add custom cell styles to match CustomTable's sizing
mantineTableBodyCellProps: ({ cell }) => {
// Check if this is a status message cell with active progress
const progressData = cell.column.id === 'last_message' &&
refreshProgress[cell.row.original.id] &&
refreshProgress[cell.row.original.id].progress < 100 ?
refreshProgress[cell.row.original.id] : null;
// Only expand height for certain actions that need more space
const needsExpandedHeight = progressData &&
['downloading', 'parsing', 'processing_groups'].includes(progressData.action);
tableCellProps: ({ cell }) => {
return {
style: {
// Apply taller height for progress cells (except initializing), otherwise use standard height
height: needsExpandedHeight ? '80px' : (
tableSize === 'compact' ? '28px' : tableSize === 'large' ? '48px' : '40px'
),
fontSize: tableSize === 'compact' ? 'var(--mantine-font-size-xs)' : 'var(--mantine-font-size-sm)',
padding: tableSize === 'compact' ? '2px 8px' : '4px 10px'
}
fontSize:
tableSize === 'compact'
? 'var(--mantine-font-size-xs)'
: 'var(--mantine-font-size-sm)',
padding: tableSize === 'compact' ? '2px 8px' : '4px 10px',
};
},
// Additional text styling to match ChannelsTable
mantineTableBodyProps: {
style: {
fontSize: tableSize === 'compact' ? 'var(--mantine-font-size-xs)' : 'var(--mantine-font-size-sm)',
}
},
tableBodyProps: () => ({
fontSize:
tableSize === 'compact'
? 'var(--mantine-font-size-xs)'
: 'var(--mantine-font-size-sm)',
}),
});
return (
@ -684,7 +842,7 @@ const M3UTable = () => {
<Flex gap={6}>
<Tooltip label="Assign">
<Button
leftSection={<IconSquarePlus size={14} />}
leftSection={<SquarePlus size={14} />}
variant="light"
size="xs"
onClick={() => editPlaylist()}
@ -696,13 +854,32 @@ const M3UTable = () => {
color: 'white',
}}
>
Add
Add M3U
</Button>
</Tooltip>
</Flex>
</Box>
</Paper>
<MantineReactTable table={table} />
<Box
style={{
display: 'flex',
flexDirection: 'column',
height: 'calc(40vh - 10px)',
}}
>
<Box
style={{
flex: 1,
overflowY: 'auto',
overflowX: 'hidden',
border: 'solid 1px rgb(68,68,68)',
borderRadius: 'var(--mantine-radius-default)',
}}
>
<CustomTable table={table} />
</Box>
</Box>
<M3UForm
m3uAccount={playlist}

View file

@ -1,5 +1,4 @@
import { useEffect, useMemo, useRef, useState } from 'react';
import { MantineReactTable, useMantineReactTable } from 'mantine-react-table';
import API from '../../api';
import StreamProfileForm from '../forms/StreamProfile';
import useStreamProfilesStore from '../../store/streamProfiles';
@ -19,59 +18,112 @@ import {
Switch,
Stack,
} from '@mantine/core';
import { IconSquarePlus } from '@tabler/icons-react';
import { SquareMinus, SquarePen, Check, X, Eye, EyeOff } from 'lucide-react';
import {
SquareMinus,
SquarePen,
Check,
X,
Eye,
EyeOff,
SquarePlus,
} from 'lucide-react';
import { CustomTable, useTable } from './CustomTable';
import useLocalStorage from '../../hooks/useLocalStorage';
const RowActions = ({ row, editStreamProfile, deleteStreamProfile }) => {
return (
<>
<ActionIcon
variant="transparent"
color="yellow.5"
size="sm"
disabled={row.original.locked}
onClick={() => editStreamProfile(row.original)}
>
<SquarePen size="18" /> {/* Small icon size */}
</ActionIcon>
<ActionIcon
variant="transparent"
size="sm"
color="red.9"
disabled={row.original.locked}
onClick={() => deleteStreamProfile(row.original.id)}
>
<SquareMinus fontSize="small" /> {/* Small icon size */}
</ActionIcon>
</>
);
};
const StreamProfiles = () => {
const [profile, setProfile] = useState(null);
const [profileModalOpen, setProfileModalOpen] = useState(false);
const [rowSelection, setRowSelection] = useState([]);
const [activeFilterValue, setActiveFilterValue] = useState('all');
const [hideInactive, setHideInactive] = useState(false);
const [data, setData] = useState([]);
const streamProfiles = useStreamProfilesStore((state) => state.profiles);
const settings = useSettingsStore((s) => s.settings);
const [tableSize] = useLocalStorage('table-size', 'default');
const theme = useMantineTheme();
const columns = useMemo(
//column definitions...
() => [
{
header: 'Name',
accessorKey: 'name',
size: 50,
size: 150,
cell: ({ cell }) => (
<div
style={{
whiteSpace: 'nowrap',
overflow: 'hidden',
textOverflow: 'ellipsis',
}}
>
{cell.getValue()}
</div>
),
},
{
header: 'Command',
accessorKey: 'command',
size: 100,
size: 150,
cell: ({ cell }) => (
<div
style={{
whiteSpace: 'nowrap',
overflow: 'hidden',
textOverflow: 'ellipsis',
}}
>
{cell.getValue()}
</div>
),
},
{
header: 'Parameters',
accessorKey: 'parameters',
enableSorting: false,
mantineTableBodyCellProps: {
style: {
whiteSpace: 'nowrap',
// maxWidth: 400,
paddingLeft: 10,
paddingRight: 10,
},
},
// size: 200,
cell: ({ cell }) => (
<Tooltip label={cell.getValue()}>
<div
style={{
whiteSpace: 'nowrap',
overflow: 'hidden',
textOverflow: 'ellipsis',
}}
>
{cell.getValue()}
</div>
</Tooltip>
),
},
{
header: 'Active',
accessorKey: 'is_active',
size: 10,
enableSorting: false,
mantineTableHeadCellProps: {
align: 'right',
},
mantineTableBodyCellProps: {
align: 'right',
},
Cell: ({ row, cell }) => (
size: 50,
cell: ({ row, cell }) => (
<Center>
<Switch
size="xs"
@ -81,24 +133,11 @@ const StreamProfiles = () => {
/>
</Center>
),
Filter: ({ column }) => (
<Box>
<Select
size="small"
value={activeFilterValue}
onChange={(e) => {
setActiveFilterValue(e.target.value);
column.setFilterValue(e.target.value);
}}
displayEmpty
data={['All', 'Active', 'Inactive']}
/>
</Box>
),
filterFn: (row, _columnId, filterValue) => {
if (filterValue == 'all') return true;
return String(row.getValue('is_active')) === filterValue;
},
},
{
id: 'actions',
header: 'Actions',
size: tableSize == 'compact' ? 75 : 100,
},
],
[]
@ -159,63 +198,48 @@ const StreamProfiles = () => {
});
};
const filteredData = streamProfiles.filter((profile) =>
hideInactive && !profile.is_active ? false : true
);
useEffect(() => {
setData(
streamProfiles.filter((profile) =>
hideInactive && !profile.is_active ? false : true
)
);
}, [streamProfiles, hideInactive]);
const table = useMantineReactTable({
...TableHelper.defaultProperties,
const renderHeaderCell = (header) => {
return (
<Text size="sm" name={header.id}>
{header.column.columnDef.header}
</Text>
);
};
const renderBodyCell = ({ cell, row }) => {
switch (cell.column.id) {
case 'actions':
return (
<RowActions
row={row}
editUserAgent={editStreamProfile}
deleteUserAgent={deleteStreamProfile}
/>
);
}
};
const table = useTable({
columns,
data: filteredData,
enablePagination: false,
enableRowVirtualization: true,
// enableRowSelection: true,
renderTopToolbar: false,
// onRowSelectionChange: setRowSelection,
onSortingChange: setSorting,
state: {
isLoading,
sorting,
// rowSelection,
data,
allRowIds: data.map((d) => d.id),
bodyCellRenderFns: {
actions: renderBodyCell,
},
rowVirtualizerInstanceRef, //optional
rowVirtualizerOptions: { overscan: 5 }, //optionally customize the row virtualizer
initialState: {
density: 'compact',
},
displayColumnDefOptions: {
'mrt-row-actions': {
size: 10,
},
},
enableRowActions: true,
renderRowActions: ({ row }) => (
<>
<ActionIcon
variant="transparent"
color="yellow.5"
size="sm"
disabled={row.original.locked}
onClick={() => editStreamProfile(row.original)}
>
<SquarePen size="18" /> {/* Small icon size */}
</ActionIcon>
<ActionIcon
variant="transparent"
size="sm"
color="red.9"
disabled={row.original.locked}
onClick={() => deleteStreamProfile(row.original.id)}
>
<SquareMinus fontSize="small" /> {/* Small icon size */}
</ActionIcon>
</>
),
mantineTableContainerProps: {
style: {
// height: 'calc(60vh - 100px)',
overflowY: 'auto',
},
headerCellRenderFns: {
name: renderHeaderCell,
command: renderHeaderCell,
parameters: renderHeaderCell,
is_active: renderHeaderCell,
actions: renderHeaderCell,
},
});
@ -256,7 +280,7 @@ const StreamProfiles = () => {
</Tooltip>
<Tooltip label="Assign">
<Button
leftSection={<IconSquarePlus size={18} />}
leftSection={<SquarePlus size={18} />}
variant="light"
size="xs"
onClick={() => editStreamProfile()}
@ -275,7 +299,25 @@ const StreamProfiles = () => {
</Box>
</Paper>
<MantineReactTable table={table} />
<Box
style={{
display: 'flex',
flexDirection: 'column',
maxHeight: 300,
}}
>
<Box
style={{
flex: 1,
overflowY: 'auto',
overflowX: 'hidden',
border: 'solid 1px rgb(68,68,68)',
borderRadius: 'var(--mantine-radius-default)',
}}
>
<CustomTable table={table} />
</Box>
</Box>
<StreamProfileForm
profile={profile}

View file

@ -36,8 +36,9 @@ import {
MultiSelect,
useMantineTheme,
UnstyledButton,
LoadingOverlay,
Skeleton,
} from '@mantine/core';
import { IconSquarePlus } from '@tabler/icons-react';
import { useNavigate } from 'react-router-dom';
import useSettingsStore from '../../store/settings';
import useVideoStore from '../../store/useVideoStore';
@ -197,7 +198,13 @@ const StreamsTable = ({}) => {
channel_group: '',
m3u_account: '',
});
const debouncedFilters = useDebounce(filters, 500);
const debouncedFilters = useDebounce(filters, 500, () => {
// Reset to first page whenever filters change to avoid "Invalid page" errors
setPagination((prev) => ({
...prev,
pageIndex: 0,
}));
});
// Add state to track if stream groups are loaded
const [groupsLoaded, setGroupsLoaded] = useState(false);
@ -306,12 +313,6 @@ const StreamsTable = ({}) => {
...prev,
[name]: value,
}));
// Reset to first page whenever filters change to avoid "Invalid page" errors
setPagination((prev) => ({
...prev,
pageIndex: 0,
}));
};
const handleGroupChange = (value) => {
@ -319,12 +320,6 @@ const StreamsTable = ({}) => {
...prev,
channel_group: value ? value : '',
}));
// Reset to first page whenever filters change to avoid "Invalid page" errors
setPagination((prev) => ({
...prev,
pageIndex: 0,
}));
};
const handleM3UChange = (value) => {
@ -332,12 +327,6 @@ const StreamsTable = ({}) => {
...prev,
m3u_account: value ? value : '',
}));
// Reset to first page whenever filters change to avoid "Invalid page" errors
setPagination((prev) => ({
...prev,
pageIndex: 0,
}));
};
const fetchData = useCallback(async () => {
@ -672,7 +661,7 @@ const StreamsTable = ({}) => {
<Group justify="space-between" style={{ paddingLeft: 10 }}>
<Box>
<Button
leftSection={<IconSquarePlus size={18} />}
leftSection={<SquarePlus size={18} />}
variant={
selectedStreamIds.length > 0 && selectedChannelIds.length === 1
? 'light'
@ -725,7 +714,7 @@ const StreamsTable = ({}) => {
</Button>
<Button
leftSection={<IconSquarePlus size={18} />}
leftSection={<SquarePlus size={18} />}
variant="default"
size="xs"
onClick={createChannelsFromStreams}
@ -736,7 +725,7 @@ const StreamsTable = ({}) => {
</Button>
<Button
leftSection={<IconSquarePlus size={18} />}
leftSection={<SquarePlus size={18} />}
variant="light"
size="xs"
onClick={() => editStream()}
@ -824,6 +813,7 @@ const StreamsTable = ({}) => {
borderRadius: 'var(--mantine-radius-default)',
}}
>
<LoadingOverlay visible={isLoading} />
<CustomTable table={table} />
</Box>

View file

@ -1,5 +1,4 @@
import { useEffect, useMemo, useRef, useState } from 'react';
import { MantineReactTable, useMantineReactTable } from 'mantine-react-table';
import API from '../../api';
import useUserAgentsStore from '../../store/userAgents';
import UserAgentForm from '../forms/UserAgent';
@ -18,17 +17,43 @@ import {
Button,
Stack,
} from '@mantine/core';
import { IconSquarePlus } from '@tabler/icons-react';
import { SquareMinus, SquarePen, Check, X } from 'lucide-react';
import { SquareMinus, SquarePen, Check, X, SquarePlus } from 'lucide-react';
import { CustomTable, useTable } from './CustomTable';
import useLocalStorage from '../../hooks/useLocalStorage';
const RowActions = ({ row, editUserAgent, deleteUserAgent }) => {
return (
<>
<ActionIcon
variant="transparent"
size="sm" // Makes the button smaller
color="yellow.5" // Red color for delete actions
onClick={() => {
editUserAgent(row.original);
}}
>
<SquarePen size="18" /> {/* Small icon size */}
</ActionIcon>
<ActionIcon
variant="transparent"
size="sm"
color="red.9" // Red color for delete actions
onClick={() => deleteUserAgent(row.original.id)}
>
<SquareMinus size="18" /> {/* Small icon size */}
</ActionIcon>
</>
);
};
const UserAgentsTable = () => {
const [userAgent, setUserAgent] = useState(null);
const [userAgentModalOpen, setUserAgentModalOpen] = useState(false);
const [rowSelection, setRowSelection] = useState([]);
const [activeFilterValue, setActiveFilterValue] = useState('all');
const userAgents = useUserAgentsStore((state) => state.userAgents);
const settings = useSettingsStore((s) => s.settings);
const [tableSize] = useLocalStorage('table-size', 'default');
const columns = useMemo(
//column definitions...
@ -36,13 +61,12 @@ const UserAgentsTable = () => {
{
header: 'Name',
accessorKey: 'name',
size: 100,
},
{
header: 'User-Agent',
accessorKey: 'user_agent',
enableSorting: false,
Cell: ({ cell }) => (
cell: ({ cell }) => (
<div
style={{
whiteSpace: 'nowrap',
@ -58,7 +82,7 @@ const UserAgentsTable = () => {
header: 'Description',
accessorKey: 'description',
enableSorting: false,
Cell: ({ cell }) => (
cell: ({ cell }) => (
<div
style={{
whiteSpace: 'nowrap',
@ -73,57 +97,23 @@ const UserAgentsTable = () => {
{
header: 'Active',
accessorKey: 'is_active',
size: 10,
sortingFn: 'basic',
enableSorting: false,
mantineTableHeadCellProps: {
align: 'right',
},
mantineTableBodyCellProps: {
align: 'right',
},
Cell: ({ cell }) => (
cell: ({ cell }) => (
<Center>
{cell.getValue() ? <Check color="green" /> : <X color="red" />}
</Center>
),
Filter: ({ column }) => (
<Select
size="small"
value={activeFilterValue}
onChange={(e) => {
setActiveFilterValue(e.target.value);
column.setFilterValue(e.target.value);
}}
displayEmpty
data={[
{
value: 'all',
label: 'All',
},
{
value: 'active',
label: 'Active',
},
{
value: 'inactive',
label: 'Inactive',
},
]}
/>
),
filterFn: (row, _columnId, activeFilterValue) => {
if (activeFilterValue == 'all') return true; // Show all if no filter
return String(row.getValue('is_active')) === activeFilterValue;
},
},
{
id: 'actions',
header: 'Actions',
size: tableSize == 'compact' ? 75 : 100,
},
],
[]
);
//optionally access the underlying virtualizer instance
const rowVirtualizerInstanceRef = useRef(null);
const [isLoading, setIsLoading] = useState(true);
const [sorting, setSorting] = useState([]);
@ -167,101 +157,60 @@ const UserAgentsTable = () => {
}
}, []);
useEffect(() => {
//scroll to the top of the table when the sorting changes
try {
rowVirtualizerInstanceRef.current?.scrollToIndex?.(0);
} catch (error) {
console.error(error);
const renderHeaderCell = (header) => {
switch (header.id) {
default:
return (
<Text size="sm" name={header.id}>
{header.column.columnDef.header}
</Text>
);
}
}, [sorting]);
};
const table = useMantineReactTable({
...TableHelper.defaultProperties,
const renderBodyCell = ({ cell, row }) => {
switch (cell.column.id) {
case 'actions':
return (
<RowActions
row={row}
editUserAgent={editUserAgent}
deleteUserAgent={deleteUserAgent}
/>
);
}
};
const table = useTable({
columns,
data: userAgents,
enablePagination: false,
enableRowVirtualization: true,
// enableRowSelection: true,
renderTopToolbar: false,
// onRowSelectionChange: setRowSelection,
onSortingChange: setSorting,
state: {
isLoading,
sorting,
// rowSelection,
allRowIds: userAgents.map((ua) => ua.id),
bodyCellRenderFns: {
actions: renderBodyCell,
},
rowVirtualizerInstanceRef, //optional
rowVirtualizerOptions: { overscan: 5 }, //optionally customize the row virtualizer
initialState: {
density: 'compact',
},
enableRowActions: true,
renderRowActions: ({ row }) => (
<>
<ActionIcon
variant="transparent"
size="sm" // Makes the button smaller
color="yellow.5" // Red color for delete actions
onClick={() => {
editUserAgent(row.original);
}}
>
<SquarePen size="18" /> {/* Small icon size */}
</ActionIcon>
<ActionIcon
variant="transparent"
size="sm"
color="red.9" // Red color for delete actions
onClick={() => deleteUserAgent(row.original.id)}
>
<SquareMinus size="18" /> {/* Small icon size */}
</ActionIcon>
</>
),
mantineTableContainerProps: {
style: {
maxHeight: 300,
overflowY: 'auto',
// margin: 5,
},
},
displayColumnDefOptions: {
'mrt-row-actions': {
size: 10,
},
headerCellRenderFns: {
name: renderHeaderCell,
user_agent: renderHeaderCell,
description: renderHeaderCell,
is_active: renderHeaderCell,
actions: renderHeaderCell,
},
});
return (
<Stack gap={0} style={{ padding: 0 }}>
<Paper
style={
{
// bgcolor: theme.palette.background.paper,
// borderRadius: 2,
// overflow: 'hidden',
// height: 'calc(100vh - 75px)',
// display: 'flex',
// flexDirection: 'column',
}
}
>
{/* Top toolbar with Remove, Assign, Auto-match, and Add buttons */}
<Paper>
<Box
style={{
display: 'flex',
// alignItems: 'center',
// backgroundColor: theme.palette.background.paper,
justifyContent: 'flex-end',
padding: 10,
// gap: 1,
}}
>
<Flex gap={6}>
<Tooltip label="Assign">
<Button
leftSection={<IconSquarePlus size={18} />}
leftSection={<SquarePlus size={18} />}
variant="light"
size="xs"
onClick={() => editUserAgent()}
@ -280,7 +229,26 @@ const UserAgentsTable = () => {
</Box>
</Paper>
<MantineReactTable table={table} />
<Box
style={{
display: 'flex',
flexDirection: 'column',
maxHeight: 300,
}}
>
<Box
style={{
flex: 1,
overflowY: 'auto',
overflowX: 'hidden',
border: 'solid 1px rgb(68,68,68)',
borderRadius: 'var(--mantine-radius-default)',
}}
>
<CustomTable table={table} />
</Box>
</Box>
<UserAgentForm
userAgent={userAgent}
isOpen={userAgentModalOpen}

View file

@ -49,17 +49,17 @@ html {
}
.divTable.table-size-compact .td {
height: 28px;
min-height: 28px;
font-size: var(--mantine-font-size-sm);
}
.divTable.table-size-default .td {
height: 40px;
min-height: 40px;
font-size: var(--mantine-font-size-md);
}
.divTable.table-size-large .td {
height: 48px;
min-height: 48px;
font-size: var(--mantine-font-size-md);
}

31
frontend/src/constants.js Normal file
View file

@ -0,0 +1,31 @@
export const USER_LEVELS = {
STREAMER: 0,
STANDARD: 1,
ADMIN: 10,
};
export const USER_LEVEL_LABELS = {
[USER_LEVELS.STREAMER]: 'Streamer',
[USER_LEVELS.STANDARD]: 'Standard User',
[USER_LEVELS.ADMIN]: 'Admin',
};
export const NETWORK_ACCESS_OPTIONS = {
M3U_EPG: {
label: 'M3U / EPG Endpoints',
description: 'Limit access to M3U, EPG, and HDHR URLs',
},
STREAMS: {
label: 'Stream Endpoints',
description:
'Limit network access to stream URLs, including XC stream URLs',
},
XC_API: {
label: 'XC API',
description: 'Limit access to the XC API',
},
UI: {
label: 'UI',
description: 'Limit access to the Dispatcharr UI',
},
};

View file

@ -1,15 +0,0 @@
import React from 'react';
import { Allotment } from 'allotment';
import { Box, Container } from '@mantine/core';
import 'allotment/dist/style.css';
const ChannelsPage = () => {
return (
<Allotment>
<div>Pane 1</div>
<div>Pane 1</div>
</Allotment>
);
};
export default ChannelsPage;

View file

@ -3,8 +3,24 @@ import ChannelsTable from '../components/tables/ChannelsTable';
import StreamsTable from '../components/tables/StreamsTable';
import { Box } from '@mantine/core';
import { Allotment } from 'allotment';
import { USER_LEVELS } from '../constants';
import useAuthStore from '../store/auth';
const ChannelsPage = () => {
const authUser = useAuthStore((s) => s.user);
if (!authUser.id) {
return <></>;
}
if (authUser.user_level <= USER_LEVELS.STANDARD) {
return (
<Box style={{ padding: 10 }}>
<ChannelsTable />
</Box>
);
}
return (
<div style={{ height: '100vh', width: '100%', display: 'flex' }}>
<Allotment

View file

@ -38,11 +38,13 @@ dayjs.extend(relativeTime);
const RecordingCard = ({ recording }) => {
const channels = useChannelsStore((s) => s.channels);
console.log(recording);
const deleteRecording = (id) => {
API.deleteRecording(id);
};
const customProps = JSON.parse(recording.custom_properties);
const customProps = JSON.parse(recording.custom_properties || '{}');
let recordingName = 'Custom Recording';
if (customProps.program) {
recordingName = customProps.program.title;
@ -87,13 +89,13 @@ const RecordingCard = ({ recording }) => {
<Group justify="space-between">
<Text size="sm">Start:</Text>
<Text size="sm">
{dayjs(new Date(recording.start_time)).format('MMMM D, YYYY h:MMa')}
{dayjs(new Date(recording.start_time)).format('MMMM D, YYYY h:mma')}
</Text>
</Group>
<Group justify="space-between">
<Text size="sm">End:</Text>
<Text size="sm">
{dayjs(new Date(recording.end_time)).format('MMMM D, YYYY h:MMa')}
{dayjs(new Date(recording.end_time)).format('MMMM D, YYYY h:mma')}
</Text>
</Group>
</Card>

View file

@ -1,10 +1,11 @@
import React, { useEffect } from 'react';
import React, { useEffect, useState } from 'react';
import API from '../api';
import useSettingsStore from '../store/settings';
import useUserAgentsStore from '../store/userAgents';
import useStreamProfilesStore from '../store/streamProfiles';
import {
Accordion,
Alert,
Box,
Button,
Center,
@ -12,18 +13,32 @@ import {
Group,
MultiSelect,
Select,
Stack,
Switch,
Text,
TextInput,
} from '@mantine/core';
import { isNotEmpty, useForm } from '@mantine/form';
import UserAgentsTable from '../components/tables/UserAgentsTable';
import StreamProfilesTable from '../components/tables/StreamProfilesTable';
import useLocalStorage from '../hooks/useLocalStorage';
import useAuthStore from '../store/auth';
import { USER_LEVELS, NETWORK_ACCESS_OPTIONS } from '../constants';
import ConfirmationDialog from '../components/ConfirmationDialog';
const SettingsPage = () => {
const settings = useSettingsStore((s) => s.settings);
const userAgents = useUserAgentsStore((s) => s.userAgents);
const streamProfiles = useStreamProfilesStore((s) => s.profiles);
const authUser = useAuthStore((s) => s.user);
const [accordianValue, setAccordianValue] = useState(null);
const [networkAccessSaved, setNetworkAccessSaved] = useState(false);
const [networkAccessError, setNetworkAccessError] = useState(null);
const [networkAccessConfirmOpen, setNetworkAccessConfirmOpen] =
useState(false);
const [netNetworkAccessConfirmCIDRs, setNetNetworkAccessConfirmCIDRs] =
useState([]);
// UI / local storage settings
const [tableSize, setTableSize] = useLocalStorage('table-size', 'default');
@ -296,9 +311,31 @@ const SettingsPage = () => {
},
});
const networkAccessForm = useForm({
mode: 'uncontrolled',
initialValues: Object.keys(NETWORK_ACCESS_OPTIONS).reduce((acc, key) => {
acc[key] = '0.0.0.0/0';
return acc;
}, {}),
validate: Object.keys(NETWORK_ACCESS_OPTIONS).reduce((acc, key) => {
acc[key] = (value) => {
const cidrs = value.split(',');
for (const cidr of cidrs) {
if (cidr.match(/^([0-9]{1,3}\.){3}[0-9]{1,3}\/\d+$/)) {
continue;
}
return 'Invalid CIDR range';
}
return null;
};
return acc;
}, {}),
});
useEffect(() => {
if (settings) {
console.log(settings);
const formValues = Object.entries(settings).reduce(
(acc, [key, value]) => {
// Modify each value based on its own properties
@ -326,8 +363,18 @@ const SettingsPage = () => {
},
{}
);
console.log(formValues);
form.setValues(formValues);
const networkAccessSettings = JSON.parse(
settings['network-access'].value || '{}'
);
networkAccessForm.setValues(
Object.keys(NETWORK_ACCESS_OPTIONS).reduce((acc, key) => {
acc[key] = networkAccessSettings[key];
return acc;
}, {})
);
}
}, [settings]);
@ -350,6 +397,47 @@ const SettingsPage = () => {
}
};
const onNetworkAccessSubmit = async () => {
setNetworkAccessSaved(false);
setNetworkAccessError(null);
const check = await API.checkSetting({
...settings['network-access'],
value: JSON.stringify(networkAccessForm.getValues()),
});
if (check.error && check.message) {
setNetworkAccessError(`${check.message}: ${check.data}`);
return;
}
// For now, only warn if we're blocking the UI
const blockedAccess = check.UI;
if (blockedAccess.length == 0) {
return saveNetworkAccess();
}
setNetNetworkAccessConfirmCIDRs(blockedAccess);
setNetworkAccessConfirmOpen(true);
};
const saveNetworkAccess = async () => {
setNetworkAccessSaved(false);
try {
await API.updateSetting({
...settings['network-access'],
value: JSON.stringify(networkAccessForm.getValues()),
});
setNetworkAccessSaved(true);
setNetworkAccessConfirmOpen(false);
} catch (e) {
const errors = {};
for (const key in e.body.value) {
errors[key] = `Invalid CIDR(s): ${e.body.value[key]}`;
}
networkAccessForm.setErrors(errors);
}
};
const onUISettingsChange = (name, value) => {
switch (name) {
case 'table-size':
@ -365,140 +453,271 @@ const SettingsPage = () => {
}}
>
<Box style={{ width: '100%', maxWidth: 800 }}>
<Accordion variant="separated" defaultValue="ui-settings">
<Accordion.Item value="ui-settings">
<Accordion.Control>UI Settings</Accordion.Control>
<Accordion.Panel>
<Select
label="Table Size"
value={tableSize}
onChange={(val) => onUISettingsChange('table-size', val)}
data={[
{
value: 'default',
label: 'Default',
},
{
value: 'compact',
label: 'Compact',
},
{
value: 'large',
label: 'Large',
},
]}
/>
</Accordion.Panel>
</Accordion.Item>
<Accordion.Item value="stream-settings">
<Accordion.Control>Stream Settings</Accordion.Control>
<Accordion.Panel>
<form onSubmit={form.onSubmit(onSubmit)}>
<Accordion
variant="separated"
defaultValue="ui-settings"
onChange={setAccordianValue}
>
{[
<Accordion.Item value="ui-settings">
<Accordion.Control>UI Settings</Accordion.Control>
<Accordion.Panel>
<Select
searchable
{...form.getInputProps('default-user-agent')}
key={form.key('default-user-agent')}
id={settings['default-user-agent']?.id || 'default-user-agent'}
name={settings['default-user-agent']?.key || 'default-user-agent'}
label={settings['default-user-agent']?.name || 'Default User Agent'}
data={userAgents.map((option) => ({
value: `${option.id}`,
label: option.name,
}))}
/>
<Select
searchable
{...form.getInputProps('default-stream-profile')}
key={form.key('default-stream-profile')}
id={settings['default-stream-profile']?.id || 'default-stream-profile'}
name={settings['default-stream-profile']?.key || 'default-stream-profile'}
label={settings['default-stream-profile']?.name || 'Default Stream Profile'}
data={streamProfiles.map((option) => ({
value: `${option.id}`,
label: option.name,
}))}
/>
<Select
searchable
{...form.getInputProps('preferred-region')}
key={form.key('preferred-region')}
id={settings['preferred-region']?.id || 'preferred-region'}
name={settings['preferred-region']?.key || 'preferred-region'}
label={settings['preferred-region']?.name || 'Preferred Region'}
data={regionChoices.map((r) => ({
label: r.label,
value: `${r.value}`,
}))}
/>
<Group justify="space-between" style={{ paddingTop: 5 }}>
<Text size="sm" fw={500}>
Auto-Import Mapped Files
</Text>
<Switch
{...form.getInputProps('auto-import-mapped-files', {
type: 'checkbox',
})}
key={form.key('auto-import-mapped-files')}
id={
settings['auto-import-mapped-files']?.id ||
'auto-import-mapped-files'
}
/>
</Group>
<MultiSelect
id="m3u-hash-key"
name="m3u-hash-key"
label="M3U Hash Key"
label="Table Size"
value={tableSize}
onChange={(val) => onUISettingsChange('table-size', val)}
data={[
{
value: 'name',
label: 'Name',
value: 'default',
label: 'Default',
},
{
value: 'url',
label: 'URL',
value: 'compact',
label: 'Compact',
},
{
value: 'tvg_id',
label: 'TVG-ID',
value: 'large',
label: 'Large',
},
]}
{...form.getInputProps('m3u-hash-key')}
key={form.key('m3u-hash-key')}
/>
</Accordion.Panel>
</Accordion.Item>,
].concat(
authUser.user_level == USER_LEVELS.ADMIN
? [
<Accordion.Item value="stream-settings">
<Accordion.Control>Stream Settings</Accordion.Control>
<Accordion.Panel>
<form onSubmit={form.onSubmit(onSubmit)}>
<Select
searchable
{...form.getInputProps('default-user-agent')}
key={form.key('default-user-agent')}
id={
settings['default-user-agent']?.id ||
'default-user-agent'
}
name={
settings['default-user-agent']?.key ||
'default-user-agent'
}
label={
settings['default-user-agent']?.name ||
'Default User Agent'
}
data={userAgents.map((option) => ({
value: `${option.id}`,
label: option.name,
}))}
/>
<Flex mih={50} gap="xs" justify="flex-end" align="flex-end">
<Button
type="submit"
disabled={form.submitting}
variant="default"
>
Save
</Button>
</Flex>
</form>
</Accordion.Panel>
</Accordion.Item>
<Select
searchable
{...form.getInputProps('default-stream-profile')}
key={form.key('default-stream-profile')}
id={
settings['default-stream-profile']?.id ||
'default-stream-profile'
}
name={
settings['default-stream-profile']?.key ||
'default-stream-profile'
}
label={
settings['default-stream-profile']?.name ||
'Default Stream Profile'
}
data={streamProfiles.map((option) => ({
value: `${option.id}`,
label: option.name,
}))}
/>
<Select
searchable
{...form.getInputProps('preferred-region')}
key={form.key('preferred-region')}
id={
settings['preferred-region']?.id ||
'preferred-region'
}
name={
settings['preferred-region']?.key ||
'preferred-region'
}
label={
settings['preferred-region']?.name ||
'Preferred Region'
}
data={regionChoices.map((r) => ({
label: r.label,
value: `${r.value}`,
}))}
/>
<Accordion.Item value="user-agents">
<Accordion.Control>User-Agents</Accordion.Control>
<Accordion.Panel>
<UserAgentsTable />
</Accordion.Panel>
</Accordion.Item>
<Group
justify="space-between"
style={{ paddingTop: 5 }}
>
<Text size="sm" fw={500}>
Auto-Import Mapped Files
</Text>
<Switch
{...form.getInputProps('auto-import-mapped-files', {
type: 'checkbox',
})}
key={form.key('auto-import-mapped-files')}
id={
settings['auto-import-mapped-files']?.id ||
'auto-import-mapped-files'
}
/>
</Group>
<Accordion.Item value="stream-profiles">
<Accordion.Control>Stream Profiles</Accordion.Control>
<Accordion.Panel>
<StreamProfilesTable />
</Accordion.Panel>
</Accordion.Item>
<MultiSelect
id="m3u-hash-key"
name="m3u-hash-key"
label="M3U Hash Key"
data={[
{
value: 'name',
label: 'Name',
},
{
value: 'url',
label: 'URL',
},
{
value: 'tvg_id',
label: 'TVG-ID',
},
]}
{...form.getInputProps('m3u-hash-key')}
key={form.key('m3u-hash-key')}
/>
<Flex
mih={50}
gap="xs"
justify="flex-end"
align="flex-end"
>
<Button
type="submit"
disabled={form.submitting}
variant="default"
>
Save
</Button>
</Flex>
</form>
</Accordion.Panel>
</Accordion.Item>,
<Accordion.Item value="user-agents">
<Accordion.Control>User-Agents</Accordion.Control>
<Accordion.Panel>
<UserAgentsTable />
</Accordion.Panel>
</Accordion.Item>,
<Accordion.Item value="stream-profiles">
<Accordion.Control>Stream Profiles</Accordion.Control>
<Accordion.Panel>
<StreamProfilesTable />
</Accordion.Panel>
</Accordion.Item>,
<Accordion.Item value="network-access">
<Accordion.Control>
<Box>Network Access</Box>
{accordianValue == 'network-access' && (
<Box>
<Text size="sm">Comma-Delimited CIDR ranges</Text>
</Box>
)}
</Accordion.Control>
<Accordion.Panel>
<form
onSubmit={networkAccessForm.onSubmit(
onNetworkAccessSubmit
)}
>
<Stack gap="sm">
{networkAccessSaved && (
<Alert
variant="light"
color="green"
title="Saved Successfully"
></Alert>
)}
{networkAccessError && (
<Alert
variant="light"
color="red"
title={networkAccessError}
></Alert>
)}
{Object.entries(NETWORK_ACCESS_OPTIONS).map(
([key, config]) => {
return (
<TextInput
label={config.label}
{...networkAccessForm.getInputProps(key)}
key={networkAccessForm.key(key)}
description={config.description}
/>
);
}
)}
<Flex
mih={50}
gap="xs"
justify="flex-end"
align="flex-end"
>
<Button
type="submit"
disabled={networkAccessForm.submitting}
variant="default"
>
Save
</Button>
</Flex>
</Stack>
</form>
</Accordion.Panel>
</Accordion.Item>,
]
: []
)}
</Accordion>
</Box>
<ConfirmationDialog
opened={networkAccessConfirmOpen}
onClose={() => setNetworkAccessConfirmOpen(false)}
onConfirm={saveNetworkAccess}
title={`Confirm Network Access Blocks`}
message={
<>
<Text>
Your client is not included in the allowed networks for the web
UI. Are you sure you want to proceed?
</Text>
<ul>
{netNetworkAccessConfirmCIDRs.map((cidr) => (
<li>{cidr}</li>
))}
</ul>
</>
}
confirmLabel="Save"
cancelLabel="Cancel"
size="md"
/>
</Center>
);
};

View file

@ -16,7 +16,6 @@ import {
Select,
Badge,
} from '@mantine/core';
import { MantineReactTable, useMantineReactTable } from 'mantine-react-table';
import { TableHelper } from '../helpers';
import API from '../api';
import useChannelsStore from '../store/channels';
@ -38,6 +37,8 @@ import useStreamProfilesStore from '../store/streamProfiles';
import usePlaylistsStore from '../store/playlists'; // Add this import
import { useLocation } from 'react-router-dom';
import { notifications } from '@mantine/notifications';
import { CustomTable, useTable } from '../components/tables/CustomTable';
import useLocalStorage from '../hooks/useLocalStorage';
dayjs.extend(duration);
dayjs.extend(relativeTime);
@ -79,21 +80,30 @@ const getStartDate = (uptime) => {
};
// Create a separate component for each channel card to properly handle the hook
const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channelsByUUID }) => {
const ChannelCard = ({
channel,
clients,
stopClient,
stopChannel,
logos,
channelsByUUID,
}) => {
const location = useLocation();
const [availableStreams, setAvailableStreams] = useState([]);
const [isLoadingStreams, setIsLoadingStreams] = useState(false);
const [activeStreamId, setActiveStreamId] = useState(null);
const [currentM3UProfile, setCurrentM3UProfile] = useState(null); // Add state for current M3U profile
const [currentM3UProfile, setCurrentM3UProfile] = useState(null); // Add state for current M3U profile
const [data, setData] = useState([]);
// Get M3U account data from the playlists store
const m3uAccounts = usePlaylistsStore((s) => s.playlists);
const [tableSize] = useLocalStorage('table-size', 'default');
// Create a map of M3U account IDs to names for quick lookup
const m3uAccountsMap = useMemo(() => {
const map = {};
if (m3uAccounts && Array.isArray(m3uAccounts)) {
m3uAccounts.forEach(account => {
m3uAccounts.forEach((account) => {
if (account.id) {
map[account.id] = account.name;
}
@ -102,17 +112,15 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel
return map;
}, [m3uAccounts]);
// Safety check - if channel doesn't have required data, don't render
if (!channel || !channel.channel_id) {
return null;
}
// Update M3U profile information when channel data changes
useEffect(() => {
// If the channel data includes M3U profile information, update our state
if (channel.m3u_profile || channel.m3u_profile_name) {
setCurrentM3UProfile({
name: channel.m3u_profile?.name || channel.m3u_profile_name || 'Default M3U'
name:
channel.m3u_profile?.name ||
channel.m3u_profile_name ||
'Default M3U',
});
}
}, [channel.m3u_profile, channel.m3u_profile_name, channel.stream_id]);
@ -133,8 +141,10 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel
// If we have a channel URL, try to find the matching stream
if (channel.url && streamData.length > 0) {
// Try to find matching stream based on URL
const matchingStream = streamData.find(stream =>
channel.url.includes(stream.url) || stream.url.includes(channel.url)
const matchingStream = streamData.find(
(stream) =>
channel.url.includes(stream.url) ||
stream.url.includes(channel.url)
);
if (matchingStream) {
@ -148,7 +158,7 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel
}
}
} catch (error) {
console.error("Error fetching streams:", error);
console.error('Error fetching streams:', error);
} finally {
setIsLoadingStreams(false);
}
@ -157,17 +167,70 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel
fetchStreams();
}, [channel.channel_id, channel.url, channelsByUUID]);
useEffect(() => {
setData(
clients
.filter((client) => client.channel.channel_id === channel.channel_id)
.map((client) => ({
id: client.client_id,
...client,
}))
);
}, [clients]);
const renderHeaderCell = (header) => {
switch (header.id) {
default:
return (
<Group>
<Text size="sm" name={header.id}>
{header.column.columnDef.header}
</Text>
</Group>
);
}
};
const renderBodyCell = ({ cell, row }) => {
switch (cell.column.id) {
case 'actions':
return (
<Box sx={{ justifyContent: 'right' }}>
<Center>
<Tooltip label="Disconnect client">
<ActionIcon
size="sm"
variant="transparent"
color="red.9"
onClick={() =>
stopClient(
row.original.channel.uuid,
row.original.client_id
)
}
>
<SquareX size="18" />
</ActionIcon>
</Tooltip>
</Center>
</Box>
);
}
};
// Handle stream switching
const handleStreamChange = async (streamId) => {
try {
console.log("Switching to stream ID:", streamId);
console.log('Switching to stream ID:', streamId);
// Find the selected stream in availableStreams for debugging
const selectedStream = availableStreams.find(s => s.id.toString() === streamId);
console.log("Selected stream details:", selectedStream);
const selectedStream = availableStreams.find(
(s) => s.id.toString() === streamId
);
console.log('Selected stream details:', selectedStream);
// Make sure we're passing the correct ID to the API
const response = await API.switchStream(channel.channel_id, streamId);
console.log("Stream switch API response:", response);
console.log('Stream switch API response:', response);
// Update the local active stream ID immediately
setActiveStreamId(streamId);
@ -193,21 +256,22 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel
const channelId = channelsByUUID[channel.channel_id];
if (channelId) {
const updatedStreamData = await API.getChannelStreams(channelId);
console.log("Channel streams after switch:", updatedStreamData);
console.log('Channel streams after switch:', updatedStreamData);
// Update current stream information with fresh data
const updatedStream = updatedStreamData.find(s => s.id.toString() === streamId);
const updatedStream = updatedStreamData.find(
(s) => s.id.toString() === streamId
);
if (updatedStream && updatedStream.m3u_profile) {
setCurrentM3UProfile(updatedStream.m3u_profile);
}
}
} catch (error) {
console.error("Error checking streams after switch:", error);
console.error('Error checking streams after switch:', error);
}
}, 2000);
} catch (error) {
console.error("Stream switch error:", error);
console.error('Stream switch error:', error);
notifications.show({
title: 'Error switching stream',
message: error.toString(),
@ -215,23 +279,31 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel
});
}
};
console.log(data);
const clientsColumns = useMemo(
() => [
{
id: 'expand',
size: 20,
},
{
header: 'IP Address',
accessorKey: 'ip_address',
size: 50,
},
// Updated Connected column with tooltip
{
id: 'connected',
header: 'Connected',
accessorFn: (row) => {
// Check for connected_since (which is seconds since connection)
if (row.connected_since) {
// Calculate the actual connection time by subtracting the seconds from current time
const currentTime = dayjs();
const connectedTime = currentTime.subtract(row.connected_since, 'second');
const connectedTime = currentTime.subtract(
row.connected_since,
'second'
);
return connectedTime.format('MM/DD HH:mm:ss');
}
@ -243,15 +315,21 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel
return 'Unknown';
},
Cell: ({ cell }) => (
<Tooltip label={cell.getValue() !== 'Unknown' ? `Connected at ${cell.getValue()}` : 'Unknown connection time'}>
cell: ({ cell }) => (
<Tooltip
label={
cell.getValue() !== 'Unknown'
? `Connected at ${cell.getValue()}`
: 'Unknown connection time'
}
>
<Text size="xs">{cell.getValue()}</Text>
</Tooltip>
),
size: 50,
},
// Update Duration column with tooltip showing exact seconds
{
id: 'duration',
header: 'Duration',
accessorFn: (row) => {
if (row.connected_since) {
@ -259,72 +337,74 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel
}
if (row.connection_duration) {
return dayjs.duration(row.connection_duration, 'seconds').humanize();
return dayjs
.duration(row.connection_duration, 'seconds')
.humanize();
}
return '-';
},
Cell: ({ cell, row }) => {
const exactDuration = row.original.connected_since || row.original.connection_duration;
cell: ({ cell, row }) => {
const exactDuration =
row.original.connected_since || row.original.connection_duration;
return (
<Tooltip label={exactDuration ? `${exactDuration.toFixed(1)} seconds` : 'Unknown duration'}>
<Tooltip
label={
exactDuration
? `${exactDuration.toFixed(1)} seconds`
: 'Unknown duration'
}
>
<Text size="xs">{cell.getValue()}</Text>
</Tooltip>
);
},
size: 50,
}
},
{
id: 'actions',
header: 'Actions',
size: tableSize == 'compact' ? 75 : 100,
},
],
[]
);
// This hook is now at the top level of this component
const channelClientsTable = useMantineReactTable({
const channelClientsTable = useTable({
...TableHelper.defaultProperties,
columns: clientsColumns,
data: clients.filter(
(client) => client.channel.channel_id === channel.channel_id
),
enablePagination: false,
enableTopToolbar: false,
enableBottomToolbar: false,
enableRowSelection: false,
enableColumnFilters: false,
mantineTableBodyCellProps: {
style: {
padding: 4,
borderColor: '#444',
color: '#E0E0E0',
fontSize: '0.85rem',
},
data,
allRowIds: data.map((client) => client.id),
tableCellProps: () => ({
padding: 4,
borderColor: '#444',
color: '#E0E0E0',
fontSize: '0.85rem',
}),
headerCellRenderFns: {
ip_address: renderHeaderCell,
connected: renderHeaderCell,
duration: renderHeaderCell,
actions: renderHeaderCell,
},
bodyCellRenderFns: {
actions: renderBodyCell,
},
getExpandedRowHeight: (row) => {
return 20 + 28 * row.original.streams.length;
},
expandedRowRenderer: ({ row }) => {
return (
<Box p="xs">
<Group spacing="xs" align="flex-start">
<Text size="xs" fw={500} color="dimmed">
User Agent:
</Text>
<Text size="xs">{row.original.user_agent || 'Unknown'}</Text>
</Group>
</Box>
);
},
enableRowActions: true,
renderRowActions: ({ row }) => (
<Box sx={{ justifyContent: 'right' }}>
<Center>
<Tooltip label="Disconnect client">
<ActionIcon
size="sm"
variant="transparent"
color="red.9"
onClick={() =>
stopClient(row.original.channel.uuid, row.original.client_id)
}
>
<SquareX size="18" />
</ActionIcon>
</Tooltip>
</Center>
</Box>
),
renderDetailPanel: ({ row }) => (
<Box p="xs">
<Group spacing="xs" align="flex-start">
<Text size="xs" fw={500} color="dimmed">User Agent:</Text>
<Text size="xs">{row.original.user_agent || "Unknown"}</Text>
</Group>
</Box>
),
mantineExpandButtonProps: ({ row, table }) => ({
size: 'xs',
style: {
@ -343,13 +423,11 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel
},
});
if (location.pathname != '/stats') {
return <></>;
}
// Get logo URL from the logos object if available
const logoUrl = channel.logo_id && logos && logos[channel.logo_id] ?
logos[channel.logo_id].cache_url : null;
const logoUrl =
channel.logo_id && logos && logos[channel.logo_id]
? logos[channel.logo_id].cache_url
: null;
// Ensure these values exist to prevent errors
const channelName = channel.name || 'Unnamed Channel';
@ -361,19 +439,21 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel
const streamProfileName = channel.stream_profile?.name || 'Unknown Profile';
// Use currentM3UProfile if available, otherwise fall back to channel data
const m3uProfileName = currentM3UProfile?.name ||
const m3uProfileName =
currentM3UProfile?.name ||
channel.m3u_profile?.name ||
channel.m3u_profile_name ||
'Unknown M3U Profile';
// Create select options for available streams
const streamOptions = availableStreams.map(stream => {
const streamOptions = availableStreams.map((stream) => {
// Get account name from our mapping if it exists
const accountName = stream.m3u_account && m3uAccountsMap[stream.m3u_account]
? m3uAccountsMap[stream.m3u_account]
: stream.m3u_account
? `M3U #${stream.m3u_account}`
: 'Unknown M3U';
const accountName =
stream.m3u_account && m3uAccountsMap[stream.m3u_account]
? m3uAccountsMap[stream.m3u_account]
: stream.m3u_account
? `M3U #${stream.m3u_account}`
: 'Unknown M3U';
return {
value: stream.id.toString(),
@ -381,6 +461,15 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel
};
});
if (location.pathname != '/stats') {
return <></>;
}
// Safety check - if channel doesn't have required data, don't render
if (!channel || !channel.channel_id) {
return null;
}
return (
<Card
key={channel.channel_id}
@ -395,19 +484,21 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel
>
<Stack style={{ position: 'relative' }}>
<Group justify="space-between">
<Box style={{
width: '100px',
height: '50px',
display: 'flex',
alignItems: 'center',
justifyContent: 'center'
}}>
<Box
style={{
width: '100px',
height: '50px',
display: 'flex',
alignItems: 'center',
justifyContent: 'center',
}}
>
<img
src={logoUrl || logo}
style={{
maxWidth: '100%',
maxHeight: '100%',
objectFit: 'contain'
objectFit: 'contain',
}}
alt="channel logo"
/>
@ -465,7 +556,9 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel
<Select
size="xs"
label="Active Stream"
placeholder={isLoadingStreams ? "Loading streams..." : "Select stream"}
placeholder={
isLoadingStreams ? 'Loading streams...' : 'Select stream'
}
data={streamOptions}
value={activeStreamId || channel.stream_id?.toString() || null}
onChange={handleStreamChange}
@ -534,7 +627,9 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel
<Group justify="space-between">
<Group gap={4}>
<Tooltip label={`Current bitrate: ${formatSpeed(bitrates.at(-1) || 0)}`}>
<Tooltip
label={`Current bitrate: ${formatSpeed(bitrates.at(-1) || 0)}`}
>
<Group gap={4} style={{ cursor: 'help' }}>
<Gauge style={{ paddingRight: 5 }} size="22" />
<Text size="sm">{formatSpeed(bitrates.at(-1) || 0)}</Text>
@ -543,7 +638,9 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel
</Group>
<Tooltip label={`Average bitrate: ${avgBitrate}`}>
<Text size="sm" style={{ cursor: 'help' }}>Avg: {avgBitrate}</Text>
<Text size="sm" style={{ cursor: 'help' }}>
Avg: {avgBitrate}
</Text>
</Tooltip>
<Group gap={4}>
@ -556,7 +653,9 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel
</Group>
<Group gap={5}>
<Tooltip label={`${clientCount} active client${clientCount !== 1 ? 's' : ''}`}>
<Tooltip
label={`${clientCount} active client${clientCount !== 1 ? 's' : ''}`}
>
<Group gap={4} style={{ cursor: 'help' }}>
<Users size="18" />
<Text size="sm">{clientCount}</Text>
@ -565,7 +664,7 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel
</Group>
</Group>
<MantineReactTable table={channelClientsTable} />
<CustomTable table={channelClientsTable} />
</Stack>
</Card>
);
@ -590,7 +689,7 @@ const ChannelsPage = () => {
header: 'Logo',
accessorKey: 'logo_url',
size: 50,
Cell: ({ cell }) => (
cell: ({ cell }) => (
<Center>
<img src={cell.getValue() || logo} width="20" alt="channel logo" />
</Center>
@ -600,7 +699,7 @@ const ChannelsPage = () => {
id: 'name',
header: 'Name',
accessorKey: 'name',
Cell: ({ cell }) => (
cell: ({ cell }) => (
<div
style={{
whiteSpace: 'nowrap',
@ -674,8 +773,13 @@ const ChannelsPage = () => {
// The main clientsTable is no longer needed since each channel card has its own table
useEffect(() => {
if (!channelStats || !channelStats.channels || !Array.isArray(channelStats.channels) || channelStats.channels.length === 0) {
console.log("No channel stats available:", channelStats);
if (
!channelStats ||
!channelStats.channels ||
!Array.isArray(channelStats.channels) ||
channelStats.channels.length === 0
) {
console.log('No channel stats available:', channelStats);
// Clear active channels when there are no stats
if (Object.keys(activeChannels).length > 0) {
setActiveChannels({});
@ -689,13 +793,13 @@ const ChannelsPage = () => {
// Track which channels are currently active according to channelStats
const currentActiveChannelIds = new Set(
channelStats.channels.map(ch => ch.channel_id).filter(Boolean)
channelStats.channels.map((ch) => ch.channel_id).filter(Boolean)
);
channelStats.channels.forEach(ch => {
channelStats.channels.forEach((ch) => {
// Make sure we have a valid channel_id
if (!ch.channel_id) {
console.warn("Found channel without channel_id:", ch);
console.warn('Found channel without channel_id:', ch);
return;
}
@ -714,12 +818,14 @@ const ChannelsPage = () => {
}
// Find corresponding channel data
const channelData = channelsByUUID && ch.channel_id ?
channels[channelsByUUID[ch.channel_id]] : null;
const channelData =
channelsByUUID && ch.channel_id
? channels[channelsByUUID[ch.channel_id]]
: null;
// Find stream profile
const streamProfile = streamProfiles.find(
profile => profile.id == parseInt(ch.stream_profile)
(profile) => profile.id == parseInt(ch.stream_profile)
);
stats[ch.channel_id] = {
@ -732,7 +838,7 @@ const ChannelsPage = () => {
};
});
console.log("Processed active channels:", stats);
console.log('Processed active channels:', stats);
setActiveChannels(stats);
const clientStats = Object.values(stats).reduce((acc, ch) => {
@ -762,8 +868,16 @@ const ChannelsPage = () => {
verticalSpacing="lg"
>
{Object.keys(activeChannels).length === 0 ? (
<Box style={{ gridColumn: '1 / -1', textAlign: 'center', padding: '40px' }}>
<Text size="xl" color="dimmed">No active channels currently streaming</Text>
<Box
style={{
gridColumn: '1 / -1',
textAlign: 'center',
padding: '40px',
}}
>
<Text size="xl" color="dimmed">
No active channels currently streaming
</Text>
</Box>
) : (
Object.values(activeChannels).map((channel) => (

View file

@ -0,0 +1,132 @@
import React, { useState } from 'react';
import useUsersStore from '../store/users';
import {
ActionIcon,
Box,
Button,
Center,
Divider,
Group,
Paper,
Select,
Stack,
Text,
useMantineTheme,
} from '@mantine/core';
import { SquareMinus, SquarePen, SquarePlus } from 'lucide-react';
import UserForm from '../components/forms/User';
import useAuthStore from '../store/auth';
import API from '../api';
import { USER_LEVELS, USER_LEVEL_LABELS } from '../constants';
const UsersPage = () => {
const theme = useMantineTheme();
const authUser = useAuthStore((s) => s.user);
const users = useUsersStore((s) => s.users);
const [selectedUser, setSelectedUser] = useState(null);
const [userModalOpen, setUserModalOpen] = useState(false);
console.log(authUser);
const closeUserModal = () => {
setSelectedUser(null);
setUserModalOpen(false);
};
const editUser = (user) => {
setSelectedUser(user);
setUserModalOpen(true);
};
const deleteUser = (id) => {
API.deleteUser(id);
};
return (
<>
<Center>
<Paper
style={{
minWidth: 600,
padding: 10,
margin: 20,
}}
>
<Stack>
<Box>
<Button
leftSection={<SquarePlus size={18} />}
variant="light"
size="xs"
onClick={() => editUser()}
p={5}
color="green"
style={{
borderWidth: '1px',
borderColor: 'green',
color: 'white',
}}
>
Add User
</Button>
</Box>
{users
.sort((a, b) => a.id > b.id)
.map((user) => {
if (!user) {
return <></>;
}
return (
<Group justify="space-between">
<Box flex={1} style={{ alignContent: 'flex-start' }}>
{user.username}
</Box>
<Box flex={1} style={{ alignContent: 'flex-start' }}>
{user.email}
</Box>
{authUser.user_level == USER_LEVELS.ADMIN && (
<Group>
<Text>{USER_LEVEL_LABELS[user.user_level]}</Text>
<ActionIcon
size={18}
variant="transparent"
color={theme.tailwind.yellow[3]}
onClick={() => editUser(user)}
>
<SquarePen size="18" />
</ActionIcon>
<ActionIcon
size={18}
variant="transparent"
color={theme.tailwind.red[6]}
onClick={() => deleteUser(user.id)}
disabled={authUser.id === user.id}
>
<SquareMinus size="18" />
</ActionIcon>
</Group>
)}
</Group>
);
})}
</Stack>
</Paper>
</Center>
<UserForm
user={selectedUser}
isOpen={userModalOpen}
onClose={closeUserModal}
/>
</>
);
};
export default UsersPage;

View file

@ -1,14 +0,0 @@
import ProxyManager from './components/ProxyManager';
// ...existing code...
const routes = [
...existingRoutes,
{
path: '/proxy',
element: <ProxyManager />,
name: 'Proxy Manager',
},
];
export default routes;

View file

@ -6,6 +6,9 @@ import usePlaylistsStore from './playlists';
import useEPGsStore from './epgs';
import useStreamProfilesStore from './streamProfiles';
import useUserAgentsStore from './userAgents';
import useUsersStore from './users';
import API from '../api';
import { USER_LEVELS } from '../constants';
const decodeToken = (token) => {
if (!token) return null;
@ -26,11 +29,19 @@ const useAuthStore = create((set, get) => ({
user: {
username: '',
email: '',
user_level: '',
},
isLoading: false,
error: null,
setUser: (user) => set({ user }),
initData: async () => {
const user = await API.me();
if (user.user_level <= USER_LEVELS.STREAMER) {
throw new Error('Unauthorized');
}
// Ensure settings are loaded first
await useSettingsStore.getState().fetchSettings();
@ -47,8 +58,14 @@ const useAuthStore = create((set, get) => ({
useStreamProfilesStore.getState().fetchProfiles(),
useUserAgentsStore.getState().fetchUserAgents(),
]);
if (user.user_level >= USER_LEVELS.ADMIN) {
await Promise.all([useUsersStore.getState().fetchUsers()]);
}
set({ user, isAuthenticated: true });
} catch (error) {
console.error("Error initializing data:", error);
console.error('Error initializing data:', error);
}
},
@ -83,7 +100,6 @@ const useAuthStore = create((set, get) => ({
accessToken: response.access,
refreshToken: response.refresh,
tokenExpiration: expiration, // 1 hour from now
isAuthenticated: true,
});
// Store in localStorage
localStorage.setItem('accessToken', response.access);
@ -128,6 +144,7 @@ const useAuthStore = create((set, get) => ({
refreshToken: null,
tokenExpiration: null,
isAuthenticated: false,
user: null,
});
localStorage.removeItem('accessToken');
localStorage.removeItem('refreshToken');

View file

@ -0,0 +1,41 @@
import { create } from 'zustand';
import api from '../api';
const useUsersStore = create((set) => ({
users: [],
isLoading: false,
error: null,
fetchUsers: async () => {
set({ isLoading: true, error: null });
try {
const users = await api.getUsers();
set({
users,
isLoading: false,
});
} catch (error) {
console.error('Failed to fetch users:', error);
set({ error: 'Failed to load users.', isLoading: false });
}
},
addUser: (user) =>
set((state) => ({
users: state.users.concat([user]),
})),
updateUser: (updatedUser) =>
set((state) => ({
users: state.users.map((user) =>
user.id === updatedUser.id ? updatedUser : user
),
})),
removeUser: (userId) =>
set((state) => ({
users: state.users.filter((user) => (user.id === userId ? false : true)),
})),
}));
export default useUsersStore;

View file

@ -38,12 +38,15 @@ export default {
};
// Custom debounce hook
export function useDebounce(value, delay = 500) {
export function useDebounce(value, delay = 500, callback = null) {
const [debouncedValue, setDebouncedValue] = useState(value);
useEffect(() => {
const handler = setTimeout(() => {
setDebouncedValue(value);
if (callback) {
callback();
}
}, delay);
return () => clearTimeout(handler); // Cleanup timeout on unmount or value change

View file

@ -18,10 +18,11 @@ django-cors-headers
djangorestframework-simplejwt
m3u8
rapidfuzz==3.12.1
tzlocal
# PyTorch dependencies (CPU only)
--extra-index-url https://download.pytorch.org/whl/cpu/
torch==2.6.0+cpu
tzlocal
# ML/NLP dependencies
sentence-transformers==3.4.1