mirror of
https://github.com/Dispatcharr/Dispatcharr.git
synced 2026-01-23 02:35:14 +00:00
Merge pull request #336 from Dispatcharr/vod-relationtest
Implement Video on Demand (VOD) functionality
This commit is contained in:
commit
5c6d1fe6fd
70 changed files with 13909 additions and 2386 deletions
|
|
@ -22,6 +22,7 @@ Dispatcharr has officially entered **BETA**, bringing powerful new features and
|
|||
📊 **Real-Time Stats Dashboard** — Live insights into stream health and client activity\
|
||||
🧠 **EPG Auto-Match** — Match program data to channels automatically\
|
||||
⚙️ **Streamlink + FFmpeg Support** — Flexible backend options for streaming and recording\
|
||||
🎬 **VOD Management** — Full Video on Demand support with movies and TV series\
|
||||
🧼 **UI & UX Enhancements** — Smoother, faster, more responsive interface\
|
||||
🛁 **Output Compatibility** — HDHomeRun, M3U, and XMLTV EPG support for Plex, Jellyfin, and more
|
||||
|
||||
|
|
@ -31,6 +32,7 @@ Dispatcharr has officially entered **BETA**, bringing powerful new features and
|
|||
|
||||
✅ **Full IPTV Control** — Import, organize, proxy, and monitor IPTV streams on your own terms\
|
||||
✅ **Smart Playlist Handling** — M3U import, filtering, grouping, and failover support\
|
||||
✅ **VOD Content Management** — Organize movies and TV series with metadata and streaming\
|
||||
✅ **Reliable EPG Integration** — Match and manage TV guide data with ease\
|
||||
✅ **Clean & Responsive Interface** — Modern design that gets out of your way\
|
||||
✅ **Fully Self-Hosted** — Total control, zero reliance on third-party services
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ urlpatterns = [
|
|||
path('hdhr/', include(('apps.hdhr.api_urls', 'hdhr'), namespace='hdhr')),
|
||||
path('m3u/', include(('apps.m3u.api_urls', 'm3u'), namespace='m3u')),
|
||||
path('core/', include(('core.api_urls', 'core'), namespace='core')),
|
||||
path('vod/', include(('apps.vod.api_urls', 'vod'), namespace='vod')),
|
||||
# path('output/', include(('apps.output.api_urls', 'output'), namespace='output')),
|
||||
#path('player/', include(('apps.player.api_urls', 'player'), namespace='player')),
|
||||
#path('settings/', include(('apps.settings.api_urls', 'settings'), namespace='settings')),
|
||||
|
|
|
|||
|
|
@ -44,6 +44,7 @@ import django_filters
|
|||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from rest_framework.filters import SearchFilter, OrderingFilter
|
||||
from apps.epg.models import EPGData
|
||||
from apps.vod.models import Movie, Series
|
||||
from django.db.models import Q
|
||||
from django.http import StreamingHttpResponse, FileResponse, Http404
|
||||
import mimetypes
|
||||
|
|
@ -195,7 +196,7 @@ class ChannelGroupViewSet(viewsets.ModelViewSet):
|
|||
from django.db.models import Count
|
||||
return ChannelGroup.objects.annotate(
|
||||
channel_count=Count('channels', distinct=True),
|
||||
m3u_account_count=Count('m3u_account', distinct=True)
|
||||
m3u_account_count=Count('m3u_accounts', distinct=True)
|
||||
)
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
|
|
@ -237,7 +238,7 @@ class ChannelGroupViewSet(viewsets.ModelViewSet):
|
|||
# Find groups with no channels and no M3U account associations
|
||||
unused_groups = ChannelGroup.objects.annotate(
|
||||
channel_count=Count('channels', distinct=True),
|
||||
m3u_account_count=Count('m3u_account', distinct=True)
|
||||
m3u_account_count=Count('m3u_accounts', distinct=True)
|
||||
).filter(
|
||||
channel_count=0,
|
||||
m3u_account_count=0
|
||||
|
|
@ -1206,7 +1207,7 @@ class CleanupUnusedLogosAPIView(APIView):
|
|||
return [Authenticated()]
|
||||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Delete all logos that are not used by any channels",
|
||||
operation_description="Delete all logos that are not used by any channels, movies, or series",
|
||||
request_body=openapi.Schema(
|
||||
type=openapi.TYPE_OBJECT,
|
||||
properties={
|
||||
|
|
@ -1220,10 +1221,24 @@ class CleanupUnusedLogosAPIView(APIView):
|
|||
responses={200: "Cleanup completed"},
|
||||
)
|
||||
def post(self, request):
|
||||
"""Delete all logos with no channel associations"""
|
||||
"""Delete all logos with no channel, movie, or series associations"""
|
||||
delete_files = request.data.get("delete_files", False)
|
||||
|
||||
unused_logos = Logo.objects.filter(channels__isnull=True)
|
||||
# Find logos that are not used by channels, movies, or series
|
||||
filter_conditions = Q(channels__isnull=True)
|
||||
|
||||
# Add VOD conditions if models are available
|
||||
try:
|
||||
filter_conditions &= Q(movie__isnull=True)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
filter_conditions &= Q(series__isnull=True)
|
||||
except:
|
||||
pass
|
||||
|
||||
unused_logos = Logo.objects.filter(filter_conditions)
|
||||
deleted_count = unused_logos.count()
|
||||
logo_names = list(unused_logos.values_list('name', flat=True))
|
||||
local_files_deleted = 0
|
||||
|
|
@ -1259,9 +1274,23 @@ class CleanupUnusedLogosAPIView(APIView):
|
|||
})
|
||||
|
||||
|
||||
class LogoPagination(PageNumberPagination):
|
||||
page_size = 50 # Default page size to match frontend default
|
||||
page_size_query_param = "page_size" # Allow clients to specify page size
|
||||
max_page_size = 1000 # Prevent excessive page sizes
|
||||
|
||||
def paginate_queryset(self, queryset, request, view=None):
|
||||
# Check if pagination should be disabled for specific requests
|
||||
if request.query_params.get('no_pagination') == 'true':
|
||||
return None # disables pagination, returns full queryset
|
||||
|
||||
return super().paginate_queryset(queryset, request, view)
|
||||
|
||||
|
||||
class LogoViewSet(viewsets.ModelViewSet):
|
||||
queryset = Logo.objects.all()
|
||||
serializer_class = LogoSerializer
|
||||
pagination_class = LogoPagination
|
||||
parser_classes = (MultiPartParser, FormParser, JSONParser)
|
||||
|
||||
def get_permissions(self):
|
||||
|
|
@ -1278,8 +1307,16 @@ class LogoViewSet(viewsets.ModelViewSet):
|
|||
|
||||
def get_queryset(self):
|
||||
"""Optimize queryset with prefetch and add filtering"""
|
||||
# Start with basic prefetch for channels
|
||||
queryset = Logo.objects.prefetch_related('channels').order_by('name')
|
||||
|
||||
# Try to prefetch VOD relations if available
|
||||
try:
|
||||
queryset = queryset.prefetch_related('movie', 'series')
|
||||
except:
|
||||
# VOD app might not be available, continue without VOD prefetch
|
||||
pass
|
||||
|
||||
# Filter by specific IDs
|
||||
ids = self.request.query_params.getlist('ids')
|
||||
if ids:
|
||||
|
|
@ -1292,12 +1329,62 @@ class LogoViewSet(viewsets.ModelViewSet):
|
|||
pass # Invalid IDs, return empty queryset
|
||||
queryset = Logo.objects.none()
|
||||
|
||||
# Filter by usage
|
||||
# Filter by usage - now includes VOD content
|
||||
used_filter = self.request.query_params.get('used', None)
|
||||
if used_filter == 'true':
|
||||
queryset = queryset.filter(channels__isnull=False).distinct()
|
||||
# Logo is used if it has any channels, movies, or series
|
||||
filter_conditions = Q(channels__isnull=False)
|
||||
|
||||
# Add VOD conditions if models are available
|
||||
try:
|
||||
filter_conditions |= Q(movie__isnull=False)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
filter_conditions |= Q(series__isnull=False)
|
||||
except:
|
||||
pass
|
||||
|
||||
queryset = queryset.filter(filter_conditions).distinct()
|
||||
|
||||
elif used_filter == 'false':
|
||||
queryset = queryset.filter(channels__isnull=True)
|
||||
# Logo is unused if it has no channels, movies, or series
|
||||
filter_conditions = Q(channels__isnull=True)
|
||||
|
||||
# Add VOD conditions if models are available
|
||||
try:
|
||||
filter_conditions &= Q(movie__isnull=True)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
filter_conditions &= Q(series__isnull=True)
|
||||
except:
|
||||
pass
|
||||
|
||||
queryset = queryset.filter(filter_conditions)
|
||||
|
||||
# Filter for channel assignment (unused + channel-used, exclude VOD-only)
|
||||
channel_assignable = self.request.query_params.get('channel_assignable', None)
|
||||
if channel_assignable == 'true':
|
||||
# Include logos that are either:
|
||||
# 1. Completely unused, OR
|
||||
# 2. Used by channels (but may also be used by VOD)
|
||||
# Exclude logos that are ONLY used by VOD content
|
||||
|
||||
unused_condition = Q(channels__isnull=True)
|
||||
channel_used_condition = Q(channels__isnull=False)
|
||||
|
||||
# Add VOD conditions if models are available
|
||||
try:
|
||||
unused_condition &= Q(movie__isnull=True) & Q(series__isnull=True)
|
||||
except:
|
||||
pass
|
||||
|
||||
# Combine: unused OR used by channels
|
||||
filter_conditions = unused_condition | channel_used_condition
|
||||
queryset = queryset.filter(filter_conditions).distinct()
|
||||
|
||||
# Filter by name
|
||||
name_filter = self.request.query_params.get('name', None)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,19 @@
|
|||
# Generated by Django 5.2.4 on 2025-08-22 20:14
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0023_stream_stream_stats_stream_stream_stats_updated_at'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='channelgroupm3uaccount',
|
||||
name='channel_group',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='m3u_accounts', to='dispatcharr_channels.channelgroup'),
|
||||
),
|
||||
]
|
||||
|
|
@ -95,7 +95,7 @@ class Stream(models.Model):
|
|||
)
|
||||
last_seen = models.DateTimeField(db_index=True, default=datetime.now)
|
||||
custom_properties = models.TextField(null=True, blank=True)
|
||||
|
||||
|
||||
# Stream statistics fields
|
||||
stream_stats = models.JSONField(
|
||||
null=True,
|
||||
|
|
@ -560,7 +560,7 @@ class ChannelStream(models.Model):
|
|||
|
||||
class ChannelGroupM3UAccount(models.Model):
|
||||
channel_group = models.ForeignKey(
|
||||
ChannelGroup, on_delete=models.CASCADE, related_name="m3u_account"
|
||||
ChannelGroup, on_delete=models.CASCADE, related_name="m3u_accounts"
|
||||
)
|
||||
m3u_account = models.ForeignKey(
|
||||
M3UAccount, on_delete=models.CASCADE, related_name="channel_group"
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import json
|
||||
from rest_framework import serializers
|
||||
from .models import (
|
||||
Stream,
|
||||
|
|
@ -60,19 +61,81 @@ class LogoSerializer(serializers.ModelSerializer):
|
|||
return reverse("api:channels:logo-cache", args=[obj.id])
|
||||
|
||||
def get_channel_count(self, obj):
|
||||
"""Get the number of channels using this logo"""
|
||||
return obj.channels.count()
|
||||
"""Get the number of channels, movies, and series using this logo"""
|
||||
channel_count = obj.channels.count()
|
||||
|
||||
# Safely get movie count
|
||||
try:
|
||||
movie_count = obj.movie.count() if hasattr(obj, 'movie') else 0
|
||||
except AttributeError:
|
||||
movie_count = 0
|
||||
|
||||
# Safely get series count
|
||||
try:
|
||||
series_count = obj.series.count() if hasattr(obj, 'series') else 0
|
||||
except AttributeError:
|
||||
series_count = 0
|
||||
|
||||
return channel_count + movie_count + series_count
|
||||
|
||||
def get_is_used(self, obj):
|
||||
"""Check if this logo is used by any channels"""
|
||||
return obj.channels.exists()
|
||||
"""Check if this logo is used by any channels, movies, or series"""
|
||||
# Check if used by channels
|
||||
if obj.channels.exists():
|
||||
return True
|
||||
|
||||
# Check if used by movies (handle case where VOD app might not be available)
|
||||
try:
|
||||
if hasattr(obj, 'movie') and obj.movie.exists():
|
||||
return True
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# Check if used by series (handle case where VOD app might not be available)
|
||||
try:
|
||||
if hasattr(obj, 'series') and obj.series.exists():
|
||||
return True
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
return False
|
||||
|
||||
def get_channel_names(self, obj):
|
||||
"""Get the names of channels using this logo (limited to first 5)"""
|
||||
"""Get the names of channels, movies, and series using this logo (limited to first 5)"""
|
||||
names = []
|
||||
|
||||
# Get channel names
|
||||
channels = obj.channels.all()[:5]
|
||||
names = [channel.name for channel in channels]
|
||||
if obj.channels.count() > 5:
|
||||
names.append(f"...and {obj.channels.count() - 5} more")
|
||||
for channel in channels:
|
||||
names.append(f"Channel: {channel.name}")
|
||||
|
||||
# Get movie names (only if we haven't reached limit)
|
||||
if len(names) < 5:
|
||||
try:
|
||||
if hasattr(obj, 'movie'):
|
||||
remaining_slots = 5 - len(names)
|
||||
movies = obj.movie.all()[:remaining_slots]
|
||||
for movie in movies:
|
||||
names.append(f"Movie: {movie.name}")
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# Get series names (only if we haven't reached limit)
|
||||
if len(names) < 5:
|
||||
try:
|
||||
if hasattr(obj, 'series'):
|
||||
remaining_slots = 5 - len(names)
|
||||
series = obj.series.all()[:remaining_slots]
|
||||
for series_item in series:
|
||||
names.append(f"Series: {series_item.name}")
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# Calculate total count for "more" message
|
||||
total_count = self.get_channel_count(obj)
|
||||
if total_count > 5:
|
||||
names.append(f"...and {total_count - 5} more")
|
||||
|
||||
return names
|
||||
|
||||
|
||||
|
|
@ -134,16 +197,54 @@ class StreamSerializer(serializers.ModelSerializer):
|
|||
return fields
|
||||
|
||||
|
||||
class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer):
|
||||
m3u_accounts = serializers.IntegerField(source="m3u_accounts.id", read_only=True)
|
||||
enabled = serializers.BooleanField()
|
||||
auto_channel_sync = serializers.BooleanField(default=False)
|
||||
auto_sync_channel_start = serializers.FloatField(allow_null=True, required=False)
|
||||
custom_properties = serializers.JSONField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = ChannelGroupM3UAccount
|
||||
fields = ["m3u_accounts", "channel_group", "enabled", "auto_channel_sync", "auto_sync_channel_start", "custom_properties"]
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
|
||||
custom_props = {}
|
||||
if instance.custom_properties:
|
||||
try:
|
||||
custom_props = json.loads(instance.custom_properties)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
custom_props = {}
|
||||
|
||||
return data
|
||||
|
||||
def to_internal_value(self, data):
|
||||
# Accept both dict and JSON string for custom_properties
|
||||
val = data.get("custom_properties")
|
||||
if isinstance(val, str):
|
||||
try:
|
||||
data["custom_properties"] = json.loads(val)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return super().to_internal_value(data)
|
||||
|
||||
#
|
||||
# Channel Group
|
||||
#
|
||||
class ChannelGroupSerializer(serializers.ModelSerializer):
|
||||
channel_count = serializers.IntegerField(read_only=True)
|
||||
m3u_account_count = serializers.IntegerField(read_only=True)
|
||||
m3u_accounts = ChannelGroupM3UAccountSerializer(
|
||||
many=True,
|
||||
read_only=True
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = ChannelGroup
|
||||
fields = ["id", "name", "channel_count", "m3u_account_count"]
|
||||
fields = ["id", "name", "channel_count", "m3u_account_count", "m3u_accounts"]
|
||||
|
||||
|
||||
class ChannelProfileSerializer(serializers.ModelSerializer):
|
||||
|
|
@ -347,40 +448,6 @@ class ChannelSerializer(serializers.ModelSerializer):
|
|||
return None
|
||||
|
||||
|
||||
class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer):
|
||||
enabled = serializers.BooleanField()
|
||||
auto_channel_sync = serializers.BooleanField(default=False)
|
||||
auto_sync_channel_start = serializers.FloatField(allow_null=True, required=False)
|
||||
custom_properties = serializers.JSONField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = ChannelGroupM3UAccount
|
||||
fields = ["id", "channel_group", "enabled", "auto_channel_sync", "auto_sync_channel_start", "custom_properties"]
|
||||
|
||||
def to_representation(self, instance):
|
||||
ret = super().to_representation(instance)
|
||||
# Ensure custom_properties is always a dict or None
|
||||
val = ret.get("custom_properties")
|
||||
if isinstance(val, str):
|
||||
import json
|
||||
try:
|
||||
ret["custom_properties"] = json.loads(val)
|
||||
except Exception:
|
||||
ret["custom_properties"] = None
|
||||
return ret
|
||||
|
||||
def to_internal_value(self, data):
|
||||
# Accept both dict and JSON string for custom_properties
|
||||
val = data.get("custom_properties")
|
||||
if isinstance(val, str):
|
||||
import json
|
||||
try:
|
||||
data["custom_properties"] = json.loads(val)
|
||||
except Exception:
|
||||
pass
|
||||
return super().to_internal_value(data)
|
||||
|
||||
|
||||
class RecordingSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Recording
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
from django.contrib import admin
|
||||
from django.utils.html import format_html
|
||||
from .models import M3UAccount, M3UFilter, ServerGroup, UserAgent
|
||||
import json
|
||||
|
||||
|
||||
class M3UFilterInline(admin.TabularInline):
|
||||
|
|
@ -17,6 +18,7 @@ class M3UAccountAdmin(admin.ModelAdmin):
|
|||
"server_url",
|
||||
"server_group",
|
||||
"max_streams",
|
||||
"priority",
|
||||
"is_active",
|
||||
"user_agent_display",
|
||||
"uploaded_file_link",
|
||||
|
|
@ -38,6 +40,18 @@ class M3UAccountAdmin(admin.ModelAdmin):
|
|||
|
||||
user_agent_display.short_description = "User Agent(s)"
|
||||
|
||||
def vod_enabled_display(self, obj):
|
||||
"""Display whether VOD is enabled for this account"""
|
||||
if obj.custom_properties:
|
||||
try:
|
||||
custom_props = json.loads(obj.custom_properties)
|
||||
return "Yes" if custom_props.get('enable_vod', False) else "No"
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
pass
|
||||
return "No"
|
||||
vod_enabled_display.short_description = "VOD Enabled"
|
||||
vod_enabled_display.boolean = True
|
||||
|
||||
def uploaded_file_link(self, obj):
|
||||
if obj.uploaded_file:
|
||||
return format_html(
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ from .models import M3UAccount, M3UFilter, ServerGroup, M3UAccountProfile
|
|||
from core.models import UserAgent
|
||||
from apps.channels.models import ChannelGroupM3UAccount
|
||||
from core.serializers import UserAgentSerializer
|
||||
from apps.vod.models import M3UVODCategoryRelation
|
||||
|
||||
from .serializers import (
|
||||
M3UAccountSerializer,
|
||||
|
|
@ -30,8 +31,7 @@ from .serializers import (
|
|||
)
|
||||
|
||||
from .tasks import refresh_single_m3u_account, refresh_m3u_accounts
|
||||
from django.core.files.storage import default_storage
|
||||
from django.core.files.base import ContentFile
|
||||
import json
|
||||
|
||||
|
||||
class M3UAccountViewSet(viewsets.ModelViewSet):
|
||||
|
|
@ -78,15 +78,33 @@ class M3UAccountViewSet(viewsets.ModelViewSet):
|
|||
# Now call super().create() to create the instance
|
||||
response = super().create(request, *args, **kwargs)
|
||||
|
||||
print(response.data.get("account_type"))
|
||||
if response.data.get("account_type") == M3UAccount.Types.XC:
|
||||
refresh_m3u_groups(response.data.get("id"))
|
||||
account_type = response.data.get("account_type")
|
||||
account_id = response.data.get("id")
|
||||
|
||||
if account_type == M3UAccount.Types.XC:
|
||||
refresh_m3u_groups(account_id)
|
||||
|
||||
# Check if VOD is enabled
|
||||
enable_vod = request.data.get("enable_vod", False)
|
||||
if enable_vod:
|
||||
from apps.vod.tasks import refresh_categories
|
||||
|
||||
refresh_categories(account_id)
|
||||
|
||||
# After the instance is created, return the response
|
||||
return response
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
old_vod_enabled = False
|
||||
|
||||
# Check current VOD setting
|
||||
if instance.custom_properties:
|
||||
try:
|
||||
custom_props = json.loads(instance.custom_properties)
|
||||
old_vod_enabled = custom_props.get("enable_vod", False)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
pass
|
||||
|
||||
# Handle file upload first, if any
|
||||
file_path = None
|
||||
|
|
@ -122,6 +140,18 @@ class M3UAccountViewSet(viewsets.ModelViewSet):
|
|||
# Now call super().update() to update the instance
|
||||
response = super().update(request, *args, **kwargs)
|
||||
|
||||
# Check if VOD setting changed and trigger refresh if needed
|
||||
new_vod_enabled = request.data.get("enable_vod", old_vod_enabled)
|
||||
|
||||
if (
|
||||
instance.account_type == M3UAccount.Types.XC
|
||||
and not old_vod_enabled
|
||||
and new_vod_enabled
|
||||
):
|
||||
from apps.vod.tasks import refresh_vod_content
|
||||
|
||||
refresh_vod_content.delay(instance.id)
|
||||
|
||||
# After the instance is updated, return the response
|
||||
return response
|
||||
|
||||
|
|
@ -143,11 +173,52 @@ class M3UAccountViewSet(viewsets.ModelViewSet):
|
|||
# Continue with regular partial update
|
||||
return super().partial_update(request, *args, **kwargs)
|
||||
|
||||
@action(detail=True, methods=["post"], url_path="refresh-vod")
|
||||
def refresh_vod(self, request, pk=None):
|
||||
"""Trigger VOD content refresh for XtreamCodes accounts"""
|
||||
account = self.get_object()
|
||||
|
||||
if account.account_type != M3UAccount.Types.XC:
|
||||
return Response(
|
||||
{"error": "VOD refresh is only available for XtreamCodes accounts"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Check if VOD is enabled
|
||||
vod_enabled = False
|
||||
if account.custom_properties:
|
||||
try:
|
||||
custom_props = json.loads(account.custom_properties)
|
||||
vod_enabled = custom_props.get("enable_vod", False)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
pass
|
||||
|
||||
if not vod_enabled:
|
||||
return Response(
|
||||
{"error": "VOD is not enabled for this account"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
try:
|
||||
from apps.vod.tasks import refresh_vod_content
|
||||
|
||||
refresh_vod_content.delay(account.id)
|
||||
return Response(
|
||||
{"message": f"VOD refresh initiated for account {account.name}"},
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"error": f"Failed to initiate VOD refresh: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
@action(detail=True, methods=["patch"], url_path="group-settings")
|
||||
def update_group_settings(self, request, pk=None):
|
||||
"""Update auto channel sync settings for M3U account groups"""
|
||||
account = self.get_object()
|
||||
group_settings = request.data.get("group_settings", [])
|
||||
category_settings = request.data.get("category_settings", [])
|
||||
|
||||
try:
|
||||
for setting in group_settings:
|
||||
|
|
@ -173,6 +244,25 @@ class M3UAccountViewSet(viewsets.ModelViewSet):
|
|||
},
|
||||
)
|
||||
|
||||
for setting in category_settings:
|
||||
category_id = setting.get("id")
|
||||
enabled = setting.get("enabled", True)
|
||||
custom_properties = setting.get("custom_properties", {})
|
||||
|
||||
if category_id:
|
||||
M3UVODCategoryRelation.objects.update_or_create(
|
||||
category_id=category_id,
|
||||
m3u_account=account,
|
||||
defaults={
|
||||
"enabled": enabled,
|
||||
"custom_properties": (
|
||||
custom_properties
|
||||
if isinstance(custom_properties, str)
|
||||
else json.dumps(custom_properties)
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
return Response({"message": "Group settings updated successfully"})
|
||||
|
||||
except Exception as e:
|
||||
|
|
|
|||
|
|
@ -4,6 +4,13 @@ from .models import M3UAccount, M3UFilter
|
|||
import re
|
||||
|
||||
class M3UAccountForm(forms.ModelForm):
|
||||
enable_vod = forms.BooleanField(
|
||||
required=False,
|
||||
initial=False,
|
||||
label="Enable VOD Content",
|
||||
help_text="Parse and import VOD (movies/series) content for XtreamCodes accounts"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = M3UAccount
|
||||
fields = [
|
||||
|
|
@ -13,8 +20,44 @@ class M3UAccountForm(forms.ModelForm):
|
|||
'server_group',
|
||||
'max_streams',
|
||||
'is_active',
|
||||
'enable_vod',
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# Set initial value for enable_vod from custom_properties
|
||||
if self.instance and self.instance.custom_properties:
|
||||
try:
|
||||
import json
|
||||
custom_props = json.loads(self.instance.custom_properties)
|
||||
self.fields['enable_vod'].initial = custom_props.get('enable_vod', False)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
pass
|
||||
|
||||
def save(self, commit=True):
|
||||
instance = super().save(commit=False)
|
||||
|
||||
# Handle enable_vod field
|
||||
enable_vod = self.cleaned_data.get('enable_vod', False)
|
||||
|
||||
# Parse existing custom_properties
|
||||
custom_props = {}
|
||||
if instance.custom_properties:
|
||||
try:
|
||||
import json
|
||||
custom_props = json.loads(instance.custom_properties)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
custom_props = {}
|
||||
|
||||
# Update VOD preference
|
||||
custom_props['enable_vod'] = enable_vod
|
||||
instance.custom_properties = json.dumps(custom_props)
|
||||
|
||||
if commit:
|
||||
instance.save()
|
||||
return instance
|
||||
|
||||
def clean_uploaded_file(self):
|
||||
uploaded_file = self.cleaned_data.get('uploaded_file')
|
||||
if uploaded_file:
|
||||
|
|
|
|||
18
apps/m3u/migrations/0016_m3uaccount_priority.py
Normal file
18
apps/m3u/migrations/0016_m3uaccount_priority.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.2.4 on 2025-08-20 22:35
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('m3u', '0015_alter_m3ufilter_options_m3ufilter_custom_properties'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='m3uaccount',
|
||||
name='priority',
|
||||
field=models.PositiveIntegerField(default=0, help_text='Priority for VOD provider selection (higher numbers = higher priority). Used when multiple providers offer the same content.'),
|
||||
),
|
||||
]
|
||||
|
|
@ -94,6 +94,10 @@ class M3UAccount(models.Model):
|
|||
default=7,
|
||||
help_text="Number of days after which a stream will be removed if not seen in the M3U source.",
|
||||
)
|
||||
priority = models.PositiveIntegerField(
|
||||
default=0,
|
||||
help_text="Priority for VOD provider selection (higher numbers = higher priority). Used when multiple providers offer the same content.",
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from core.utils import validate_flexible_url
|
||||
from rest_framework import serializers
|
||||
from rest_framework import serializers, status
|
||||
from rest_framework.response import Response
|
||||
from .models import M3UAccount, M3UFilter, ServerGroup, M3UAccountProfile
|
||||
from core.models import UserAgent
|
||||
|
|
@ -8,6 +8,7 @@ from apps.channels.serializers import (
|
|||
ChannelGroupM3UAccountSerializer,
|
||||
)
|
||||
import logging
|
||||
import json
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -87,6 +88,7 @@ class M3UAccountSerializer(serializers.ModelSerializer):
|
|||
allow_null=True,
|
||||
validators=[validate_flexible_url],
|
||||
)
|
||||
enable_vod = serializers.BooleanField(required=False, write_only=True)
|
||||
|
||||
class Meta:
|
||||
model = M3UAccount
|
||||
|
|
@ -111,8 +113,10 @@ class M3UAccountSerializer(serializers.ModelSerializer):
|
|||
"username",
|
||||
"password",
|
||||
"stale_stream_days",
|
||||
"priority",
|
||||
"status",
|
||||
"last_message",
|
||||
"enable_vod",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"password": {
|
||||
|
|
@ -121,7 +125,37 @@ class M3UAccountSerializer(serializers.ModelSerializer):
|
|||
},
|
||||
}
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
|
||||
# Parse custom_properties to get VOD preference
|
||||
custom_props = {}
|
||||
if instance.custom_properties:
|
||||
try:
|
||||
custom_props = json.loads(instance.custom_properties)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
custom_props = {}
|
||||
|
||||
data["enable_vod"] = custom_props.get("enable_vod", False)
|
||||
return data
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
# Handle enable_vod preference
|
||||
enable_vod = validated_data.pop("enable_vod", None)
|
||||
|
||||
if enable_vod is not None:
|
||||
# Parse existing custom_properties
|
||||
custom_props = {}
|
||||
if instance.custom_properties:
|
||||
try:
|
||||
custom_props = json.loads(instance.custom_properties)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
custom_props = {}
|
||||
|
||||
# Update VOD preference
|
||||
custom_props["enable_vod"] = enable_vod
|
||||
validated_data["custom_properties"] = json.dumps(custom_props)
|
||||
|
||||
# Pop out channel group memberships so we can handle them manually
|
||||
channel_group_data = validated_data.pop("channel_group", [])
|
||||
|
||||
|
|
@ -153,6 +187,24 @@ class M3UAccountSerializer(serializers.ModelSerializer):
|
|||
|
||||
return instance
|
||||
|
||||
def create(self, validated_data):
|
||||
# Handle enable_vod preference during creation
|
||||
enable_vod = validated_data.pop("enable_vod", False)
|
||||
|
||||
# Parse existing custom_properties or create new
|
||||
custom_props = {}
|
||||
if validated_data.get("custom_properties"):
|
||||
try:
|
||||
custom_props = json.loads(validated_data["custom_properties"])
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
custom_props = {}
|
||||
|
||||
# Set VOD preference
|
||||
custom_props["enable_vod"] = enable_vod
|
||||
validated_data["custom_properties"] = json.dumps(custom_props)
|
||||
|
||||
return super().create(validated_data)
|
||||
|
||||
def get_filters(self, obj):
|
||||
filters = obj.filters.order_by("order")
|
||||
return M3UFilterSerializer(filters, many=True).data
|
||||
|
|
|
|||
|
|
@ -663,8 +663,8 @@ def process_m3u_batch(account_id, batch, groups, hash_keys):
|
|||
def cleanup_streams(account_id, scan_start_time=timezone.now):
|
||||
account = M3UAccount.objects.get(id=account_id, is_active=True)
|
||||
existing_groups = ChannelGroup.objects.filter(
|
||||
m3u_account__m3u_account=account,
|
||||
m3u_account__enabled=True,
|
||||
m3u_accounts__m3u_account=account,
|
||||
m3u_accounts__enabled=True,
|
||||
).values_list("id", flat=True)
|
||||
logger.info(
|
||||
f"Found {len(existing_groups)} active groups for M3U account {account_id}"
|
||||
|
|
@ -1613,7 +1613,19 @@ def refresh_single_m3u_account(account_id):
|
|||
|
||||
# Set status to fetching
|
||||
account.status = M3UAccount.Status.FETCHING
|
||||
account.save(update_fields=["status"])
|
||||
account.save(update_fields=['status'])
|
||||
|
||||
filters = list(account.filters.all())
|
||||
|
||||
# Check if VOD is enabled for this account
|
||||
vod_enabled = False
|
||||
if account.custom_properties:
|
||||
try:
|
||||
custom_props = json.loads(account.custom_properties)
|
||||
vod_enabled = custom_props.get('enable_vod', False)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
vod_enabled = False
|
||||
|
||||
except M3UAccount.DoesNotExist:
|
||||
# The M3U account doesn't exist, so delete the periodic task if it exists
|
||||
logger.warning(
|
||||
|
|
@ -1742,8 +1754,8 @@ def refresh_single_m3u_account(account_id):
|
|||
existing_groups = {
|
||||
group.name: group.id
|
||||
for group in ChannelGroup.objects.filter(
|
||||
m3u_account__m3u_account=account, # Filter by the M3UAccount
|
||||
m3u_account__enabled=True, # Filter by the enabled flag in the join table
|
||||
m3u_accounts__m3u_account=account, # Filter by the M3UAccount
|
||||
m3u_accounts__enabled=True, # Filter by the enabled flag in the join table
|
||||
)
|
||||
}
|
||||
|
||||
|
|
@ -1946,6 +1958,16 @@ def refresh_single_m3u_account(account_id):
|
|||
message=account.last_message,
|
||||
)
|
||||
|
||||
# Trigger VOD refresh if enabled and account is XtreamCodes type
|
||||
if vod_enabled and account.account_type == M3UAccount.Types.XC:
|
||||
logger.info(f"VOD is enabled for account {account_id}, triggering VOD refresh")
|
||||
try:
|
||||
from apps.vod.tasks import refresh_vod_content
|
||||
refresh_vod_content.delay(account_id)
|
||||
logger.info(f"VOD refresh task queued for account {account_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to queue VOD refresh for account {account_id}: {str(e)}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing M3U for account {account_id}: {str(e)}")
|
||||
account.status = M3UAccount.Status.ERROR
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ from django.views import View
|
|||
from django.utils.decorators import method_decorator
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from django.http import JsonResponse
|
||||
from apps.m3u.models import M3UAccount
|
||||
import json
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from django.urls import path, re_path, include
|
||||
from .views import m3u_endpoint, epg_endpoint, xc_get
|
||||
from .views import m3u_endpoint, epg_endpoint, xc_get, xc_movie_stream, xc_series_stream
|
||||
from core.views import stream_view
|
||||
|
||||
app_name = "output"
|
||||
|
|
|
|||
|
|
@ -18,6 +18,10 @@ import time # Add this import for keep-alive delays
|
|||
from tzlocal import get_localzone
|
||||
from urllib.parse import urlparse
|
||||
import base64
|
||||
import logging
|
||||
import os
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def m3u_endpoint(request, profile_name=None, user=None):
|
||||
if not network_access_allowed(request, "M3U_EPG"):
|
||||
|
|
@ -117,7 +121,7 @@ def generate_m3u(request, profile_name=None, user=None):
|
|||
if channel.logo:
|
||||
if use_cached_logos:
|
||||
# Use cached logo as before
|
||||
tvg_logo = request.build_absolute_uri(reverse('api:channels:logo-cache', args=[channel.logo.id]))
|
||||
tvg_logo = build_absolute_uri_with_port(request, reverse('api:channels:logo-cache', args=[channel.logo.id]))
|
||||
else:
|
||||
# Try to find direct logo URL from channel's streams
|
||||
direct_logo = channel.logo.url if channel.logo.url.startswith(('http://', 'https://')) else None
|
||||
|
|
@ -125,7 +129,7 @@ def generate_m3u(request, profile_name=None, user=None):
|
|||
if direct_logo:
|
||||
tvg_logo = direct_logo
|
||||
else:
|
||||
tvg_logo = request.build_absolute_uri(reverse('api:channels:logo-cache', args=[channel.logo.id]))
|
||||
tvg_logo = build_absolute_uri_with_port(request, reverse('api:channels:logo-cache', args=[channel.logo.id]))
|
||||
|
||||
# create possible gracenote id insertion
|
||||
tvc_guide_stationid = ""
|
||||
|
|
@ -369,7 +373,7 @@ def generate_epg(request, profile_name=None, user=None):
|
|||
if channel.logo:
|
||||
if use_cached_logos:
|
||||
# Use cached logo as before
|
||||
tvg_logo = request.build_absolute_uri(reverse('api:channels:logo-cache', args=[channel.logo.id]))
|
||||
tvg_logo = build_absolute_uri_with_port(request, reverse('api:channels:logo-cache', args=[channel.logo.id]))
|
||||
else:
|
||||
# Try to find direct logo URL from channel's streams
|
||||
direct_logo = channel.logo.url if channel.logo.url.startswith(('http://', 'https://')) else None
|
||||
|
|
@ -377,7 +381,7 @@ def generate_epg(request, profile_name=None, user=None):
|
|||
if direct_logo:
|
||||
tvg_logo = direct_logo
|
||||
else:
|
||||
tvg_logo = request.build_absolute_uri(reverse('api:channels:logo-cache', args=[channel.logo.id]))
|
||||
tvg_logo = build_absolute_uri_with_port(request, reverse('api:channels:logo-cache', args=[channel.logo.id]))
|
||||
display_name = channel.name
|
||||
xml_lines.append(f' <channel id="{channel_id}">')
|
||||
xml_lines.append(f' <display-name>{html.escape(display_name)}</display-name>')
|
||||
|
|
@ -789,7 +793,20 @@ def xc_player_api(request, full=False):
|
|||
"get_series_info",
|
||||
"get_vod_info",
|
||||
]:
|
||||
return JsonResponse([], safe=False)
|
||||
if action == "get_vod_categories":
|
||||
return JsonResponse(xc_get_vod_categories(user), safe=False)
|
||||
elif action == "get_vod_streams":
|
||||
return JsonResponse(xc_get_vod_streams(request, user, request.GET.get("category_id")), safe=False)
|
||||
elif action == "get_series_categories":
|
||||
return JsonResponse(xc_get_series_categories(user), safe=False)
|
||||
elif action == "get_series":
|
||||
return JsonResponse(xc_get_series(request, user, request.GET.get("category_id")), safe=False)
|
||||
elif action == "get_series_info":
|
||||
return JsonResponse(xc_get_series_info(request, user, request.GET.get("series_id")), safe=False)
|
||||
elif action == "get_vod_info":
|
||||
return JsonResponse(xc_get_vod_info(request, user, request.GET.get("vod_id")), safe=False)
|
||||
else:
|
||||
return JsonResponse([], safe=False)
|
||||
|
||||
raise Http404()
|
||||
|
||||
|
|
@ -901,7 +918,8 @@ def xc_get_live_streams(request, user, category_id=None):
|
|||
"stream_icon": (
|
||||
None
|
||||
if not channel.logo
|
||||
else request.build_absolute_uri(
|
||||
else build_absolute_uri_with_port(
|
||||
request,
|
||||
reverse("api:channels:logo-cache", args=[channel.logo.id])
|
||||
)
|
||||
),
|
||||
|
|
@ -986,3 +1004,734 @@ def xc_get_epg(request, user, short=False):
|
|||
output['epg_listings'].append(program_output)
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def xc_get_vod_categories(user):
|
||||
"""Get VOD categories for XtreamCodes API"""
|
||||
from apps.vod.models import VODCategory, M3UMovieRelation
|
||||
|
||||
response = []
|
||||
|
||||
# Filter categories based on user's M3U accounts
|
||||
if user.user_level == 0:
|
||||
# For regular users, get categories from their accessible M3U accounts
|
||||
if user.channel_profiles.count() > 0:
|
||||
channel_profiles = user.channel_profiles.all()
|
||||
# Get M3U accounts accessible through user's profiles
|
||||
from apps.m3u.models import M3UAccount
|
||||
m3u_accounts = M3UAccount.objects.filter(
|
||||
is_active=True,
|
||||
profiles__in=channel_profiles
|
||||
).distinct()
|
||||
else:
|
||||
m3u_accounts = []
|
||||
|
||||
# Get categories that have movie relations with these accounts
|
||||
categories = VODCategory.objects.filter(
|
||||
category_type='movie',
|
||||
m3umovierelation__m3u_account__in=m3u_accounts
|
||||
).distinct()
|
||||
else:
|
||||
# Admins can see all categories that have active movie relations
|
||||
categories = VODCategory.objects.filter(
|
||||
category_type='movie',
|
||||
m3umovierelation__m3u_account__is_active=True
|
||||
).distinct()
|
||||
|
||||
for category in categories:
|
||||
response.append({
|
||||
"category_id": str(category.id),
|
||||
"category_name": category.name,
|
||||
"parent_id": 0,
|
||||
})
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def xc_get_vod_streams(request, user, category_id=None):
|
||||
"""Get VOD streams (movies) for XtreamCodes API"""
|
||||
from apps.vod.models import Movie
|
||||
|
||||
streams = []
|
||||
|
||||
# Build filters for movies based on user access
|
||||
filters = {"m3u_relations__m3u_account__is_active": True}
|
||||
|
||||
if user.user_level == 0:
|
||||
# For regular users, filter by accessible M3U accounts
|
||||
if user.channel_profiles.count() > 0:
|
||||
channel_profiles = user.channel_profiles.all()
|
||||
from apps.m3u.models import M3UAccount
|
||||
m3u_accounts = M3UAccount.objects.filter(
|
||||
is_active=True,
|
||||
profiles__in=channel_profiles
|
||||
).distinct()
|
||||
filters["m3u_relations__m3u_account__in"] = m3u_accounts
|
||||
else:
|
||||
return [] # No accessible accounts
|
||||
|
||||
if category_id:
|
||||
filters["m3u_relations__category_id"] = category_id
|
||||
|
||||
# Get movies directly with their relations
|
||||
movies = Movie.objects.filter(**filters).select_related('logo').distinct()
|
||||
|
||||
for movie in movies:
|
||||
# Get the highest priority relation for this movie (for metadata like container_extension)
|
||||
relation = movie.m3u_relations.filter(
|
||||
m3u_account__is_active=True
|
||||
).select_related('m3u_account').order_by('-m3u_account__priority', 'id').first()
|
||||
|
||||
if relation:
|
||||
relation_custom = relation.custom_properties or {}
|
||||
relation_info = relation_custom.get('basic_data', {})
|
||||
streams.append({
|
||||
"num": movie.id,
|
||||
"name": movie.name,
|
||||
"stream_type": "movie",
|
||||
"stream_id": movie.id,
|
||||
"stream_icon": (
|
||||
None if not movie.logo
|
||||
else build_absolute_uri_with_port(
|
||||
request,
|
||||
reverse("api:channels:logo-cache", args=[movie.logo.id])
|
||||
)
|
||||
),
|
||||
#'stream_icon': movie.logo.url if movie.logo else '',
|
||||
"rating": movie.rating or "0",
|
||||
"rating_5based": round(float(movie.rating or 0) / 2, 2) if movie.rating else 0,
|
||||
"added": str(movie.created_at.timestamp()),
|
||||
"is_adult": 0,
|
||||
"tmdb_id": movie.tmdb_id or "",
|
||||
"imdb_id": movie.imdb_id or "",
|
||||
"trailer": (movie.custom_properties or {}).get('youtube_trailer') or relation_info.get('youtube_trailer') or relation_info.get('trailer', ''),
|
||||
"category_id": str(relation.category.id) if relation.category else "0",
|
||||
"category_ids": [int(relation.category.id)] if relation.category else [],
|
||||
"container_extension": relation.container_extension or "mp4",
|
||||
"custom_sid": None,
|
||||
"direct_source": "",
|
||||
})
|
||||
|
||||
return streams
|
||||
|
||||
|
||||
def xc_get_series_categories(user):
|
||||
"""Get series categories for XtreamCodes API"""
|
||||
from apps.vod.models import VODCategory, M3USeriesRelation
|
||||
|
||||
response = []
|
||||
|
||||
# Similar filtering as VOD categories but for series
|
||||
if user.user_level == 0:
|
||||
if user.channel_profiles.count() > 0:
|
||||
channel_profiles = user.channel_profiles.all()
|
||||
from apps.m3u.models import M3UAccount
|
||||
m3u_accounts = M3UAccount.objects.filter(
|
||||
is_active=True,
|
||||
profiles__in=channel_profiles
|
||||
).distinct()
|
||||
else:
|
||||
m3u_accounts = []
|
||||
|
||||
# Get categories that have series relations with these accounts
|
||||
categories = VODCategory.objects.filter(
|
||||
category_type='series',
|
||||
m3useriesrelation__m3u_account__in=m3u_accounts
|
||||
).distinct()
|
||||
else:
|
||||
categories = VODCategory.objects.filter(
|
||||
category_type='series',
|
||||
m3useriesrelation__m3u_account__is_active=True
|
||||
).distinct()
|
||||
|
||||
for category in categories:
|
||||
response.append({
|
||||
"category_id": str(category.id),
|
||||
"category_name": category.name,
|
||||
"parent_id": 0,
|
||||
})
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def xc_get_series(request, user, category_id=None):
|
||||
"""Get series list for XtreamCodes API"""
|
||||
from apps.vod.models import M3USeriesRelation
|
||||
|
||||
series_list = []
|
||||
|
||||
# Build filters based on user access
|
||||
filters = {"m3u_account__is_active": True}
|
||||
|
||||
if user.user_level == 0:
|
||||
if user.channel_profiles.count() > 0:
|
||||
channel_profiles = user.channel_profiles.all()
|
||||
from apps.m3u.models import M3UAccount
|
||||
m3u_accounts = M3UAccount.objects.filter(
|
||||
is_active=True,
|
||||
profiles__in=channel_profiles
|
||||
).distinct()
|
||||
filters["m3u_account__in"] = m3u_accounts
|
||||
else:
|
||||
return []
|
||||
|
||||
if category_id:
|
||||
filters["category_id"] = category_id
|
||||
|
||||
# Get series relations instead of series directly
|
||||
series_relations = M3USeriesRelation.objects.filter(**filters).select_related(
|
||||
'series', 'series__logo', 'category', 'm3u_account'
|
||||
)
|
||||
|
||||
for relation in series_relations:
|
||||
series = relation.series
|
||||
series_list.append({
|
||||
"num": relation.id, # Use relation ID
|
||||
"name": series.name,
|
||||
"series_id": relation.id, # Use relation ID
|
||||
"cover": (
|
||||
None if not series.logo
|
||||
else build_absolute_uri_with_port(
|
||||
request,
|
||||
reverse("api:channels:logo-cache", args=[series.logo.id])
|
||||
)
|
||||
),
|
||||
"plot": series.description or "",
|
||||
"cast": series.custom_properties.get('cast', '') if series.custom_properties else "",
|
||||
"director": series.custom_properties.get('director', '') if series.custom_properties else "",
|
||||
"genre": series.genre or "",
|
||||
"release_date": series.custom_properties.get('release_date', str(series.year) if series.year else "") if series.custom_properties else (str(series.year) if series.year else ""),
|
||||
"releaseDate": series.custom_properties.get('release_date', str(series.year) if series.year else "") if series.custom_properties else (str(series.year) if series.year else ""),
|
||||
"last_modified": str(int(relation.updated_at.timestamp())),
|
||||
"rating": str(series.rating or "0"),
|
||||
"rating_5based": str(round(float(series.rating or 0) / 2, 2)) if series.rating else "0",
|
||||
"backdrop_path": series.custom_properties.get('backdrop_path', []) if series.custom_properties else [],
|
||||
"youtube_trailer": series.custom_properties.get('youtube_trailer', '') if series.custom_properties else "",
|
||||
"episode_run_time": series.custom_properties.get('episode_run_time', '') if series.custom_properties else "",
|
||||
"category_id": str(relation.category.id) if relation.category else "0",
|
||||
"category_ids": [int(relation.category.id)] if relation.category else [],
|
||||
})
|
||||
|
||||
return series_list
|
||||
|
||||
|
||||
def xc_get_series_info(request, user, series_id):
|
||||
"""Get detailed series information including episodes"""
|
||||
from apps.vod.models import M3USeriesRelation, M3UEpisodeRelation
|
||||
|
||||
if not series_id:
|
||||
raise Http404()
|
||||
|
||||
# Get series relation with user access filtering
|
||||
filters = {"id": series_id, "m3u_account__is_active": True}
|
||||
|
||||
if user.user_level == 0:
|
||||
if user.channel_profiles.count() > 0:
|
||||
channel_profiles = user.channel_profiles.all()
|
||||
from apps.m3u.models import M3UAccount
|
||||
m3u_accounts = M3UAccount.objects.filter(
|
||||
is_active=True,
|
||||
profiles__in=channel_profiles
|
||||
).distinct()
|
||||
filters["m3u_account__in"] = m3u_accounts
|
||||
else:
|
||||
raise Http404()
|
||||
|
||||
try:
|
||||
series_relation = M3USeriesRelation.objects.select_related('series', 'series__logo').get(**filters)
|
||||
series = series_relation.series
|
||||
except M3USeriesRelation.DoesNotExist:
|
||||
raise Http404()
|
||||
|
||||
# Check if we need to refresh detailed info (similar to vod api_views pattern)
|
||||
try:
|
||||
should_refresh = (
|
||||
not series_relation.last_episode_refresh or
|
||||
series_relation.last_episode_refresh < timezone.now() - timedelta(hours=24)
|
||||
)
|
||||
|
||||
# Check if detailed data has been fetched
|
||||
custom_props = series_relation.custom_properties or {}
|
||||
episodes_fetched = custom_props.get('episodes_fetched', False)
|
||||
detailed_fetched = custom_props.get('detailed_fetched', False)
|
||||
|
||||
# Force refresh if episodes/details have never been fetched or time interval exceeded
|
||||
if not episodes_fetched or not detailed_fetched or should_refresh:
|
||||
from apps.vod.tasks import refresh_series_episodes
|
||||
account = series_relation.m3u_account
|
||||
if account and account.is_active:
|
||||
refresh_series_episodes(account, series, series_relation.external_series_id)
|
||||
# Refresh objects from database after task completion
|
||||
series.refresh_from_db()
|
||||
series_relation.refresh_from_db()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error refreshing series data for relation {series_relation.id}: {str(e)}")
|
||||
|
||||
# Get episodes for this series from the same M3U account
|
||||
episode_relations = M3UEpisodeRelation.objects.filter(
|
||||
episode__series=series,
|
||||
m3u_account=series_relation.m3u_account
|
||||
).select_related('episode').order_by('episode__season_number', 'episode__episode_number')
|
||||
|
||||
# Group episodes by season
|
||||
seasons = {}
|
||||
for relation in episode_relations:
|
||||
episode = relation.episode
|
||||
season_num = episode.season_number or 1
|
||||
if season_num not in seasons:
|
||||
seasons[season_num] = []
|
||||
|
||||
# Try to get the highest priority related M3UEpisodeRelation for this episode (for video/audio/bitrate)
|
||||
from apps.vod.models import M3UEpisodeRelation
|
||||
first_relation = M3UEpisodeRelation.objects.filter(
|
||||
episode=episode
|
||||
).select_related('m3u_account').order_by('-m3u_account__priority', 'id').first()
|
||||
video = audio = bitrate = None
|
||||
if first_relation and first_relation.custom_properties:
|
||||
info = first_relation.custom_properties.get('info')
|
||||
if info and isinstance(info, dict):
|
||||
info_info = info.get('info')
|
||||
if info_info and isinstance(info_info, dict):
|
||||
video = info_info.get('video', {})
|
||||
audio = info_info.get('audio', {})
|
||||
bitrate = info_info.get('bitrate', 0)
|
||||
if video is None:
|
||||
video = episode.custom_properties.get('video', {}) if episode.custom_properties else {}
|
||||
if audio is None:
|
||||
audio = episode.custom_properties.get('audio', {}) if episode.custom_properties else {}
|
||||
if bitrate is None:
|
||||
bitrate = episode.custom_properties.get('bitrate', 0) if episode.custom_properties else 0
|
||||
|
||||
seasons[season_num].append({
|
||||
"id": episode.id,
|
||||
"season": season_num,
|
||||
"episode_num": episode.episode_number or 0,
|
||||
"title": episode.name,
|
||||
"container_extension": relation.container_extension or "mp4",
|
||||
"added": str(int(relation.created_at.timestamp())),
|
||||
"custom_sid": None,
|
||||
"direct_source": "",
|
||||
"info": {
|
||||
"id": int(episode.id),
|
||||
"name": episode.name,
|
||||
"overview": episode.description or "",
|
||||
"crew": str(episode.custom_properties.get('crew', "") if episode.custom_properties else ""),
|
||||
"directed_by": episode.custom_properties.get('director', '') if episode.custom_properties else "",
|
||||
"imdb_id": episode.imdb_id or "",
|
||||
"air_date": f"{episode.air_date}" if episode.air_date else "",
|
||||
"backdrop_path": episode.custom_properties.get('backdrop_path', []) if episode.custom_properties else [],
|
||||
"movie_image": episode.custom_properties.get('movie_image', '') if episode.custom_properties else "",
|
||||
"rating": float(episode.rating or 0),
|
||||
"release_date": f"{episode.air_date}" if episode.air_date else "",
|
||||
"duration_secs": (episode.duration_secs or 0),
|
||||
"duration": format_duration_hms(episode.duration_secs),
|
||||
"video": video,
|
||||
"audio": audio,
|
||||
"bitrate": bitrate,
|
||||
}
|
||||
})
|
||||
|
||||
# Build response using potentially refreshed data
|
||||
series_data = {
|
||||
'name': series.name,
|
||||
'description': series.description or '',
|
||||
'year': series.year,
|
||||
'genre': series.genre or '',
|
||||
'rating': series.rating or '0',
|
||||
'cast': '',
|
||||
'director': '',
|
||||
'youtube_trailer': '',
|
||||
'episode_run_time': '',
|
||||
'backdrop_path': [],
|
||||
}
|
||||
|
||||
# Add detailed info from custom_properties if available
|
||||
try:
|
||||
if series.custom_properties:
|
||||
custom_data = series.custom_properties
|
||||
series_data.update({
|
||||
'cast': custom_data.get('cast', ''),
|
||||
'director': custom_data.get('director', ''),
|
||||
'youtube_trailer': custom_data.get('youtube_trailer', ''),
|
||||
'episode_run_time': custom_data.get('episode_run_time', ''),
|
||||
'backdrop_path': custom_data.get('backdrop_path', []),
|
||||
})
|
||||
|
||||
# Check relation custom_properties for detailed_info
|
||||
if series_relation.custom_properties and 'detailed_info' in series_relation.custom_properties:
|
||||
detailed_info = series_relation.custom_properties['detailed_info']
|
||||
|
||||
# Override with detailed_info values where available
|
||||
for key in ['name', 'description', 'year', 'genre', 'rating']:
|
||||
if detailed_info.get(key):
|
||||
series_data[key] = detailed_info[key]
|
||||
|
||||
# Handle plot vs description
|
||||
if detailed_info.get('plot'):
|
||||
series_data['description'] = detailed_info['plot']
|
||||
elif detailed_info.get('description'):
|
||||
series_data['description'] = detailed_info['description']
|
||||
|
||||
# Update additional fields from detailed info
|
||||
series_data.update({
|
||||
'cast': detailed_info.get('cast', series_data['cast']),
|
||||
'director': detailed_info.get('director', series_data['director']),
|
||||
'youtube_trailer': detailed_info.get('youtube_trailer', series_data['youtube_trailer']),
|
||||
'episode_run_time': detailed_info.get('episode_run_time', series_data['episode_run_time']),
|
||||
'backdrop_path': detailed_info.get('backdrop_path', series_data['backdrop_path']),
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error parsing series custom_properties: {str(e)}")
|
||||
|
||||
seasons_list = [
|
||||
{"season_number": int(season_num), "name": f"Season {season_num}"}
|
||||
for season_num in sorted(seasons.keys(), key=lambda x: int(x))
|
||||
]
|
||||
|
||||
info = {
|
||||
'seasons': seasons_list,
|
||||
"info": {
|
||||
"name": series_data['name'],
|
||||
"cover": (
|
||||
None if not series.logo
|
||||
else build_absolute_uri_with_port(
|
||||
request,
|
||||
reverse("api:channels:logo-cache", args=[series.logo.id])
|
||||
)
|
||||
),
|
||||
"plot": series_data['description'],
|
||||
"cast": series_data['cast'],
|
||||
"director": series_data['director'],
|
||||
"genre": series_data['genre'],
|
||||
"release_date": series.custom_properties.get('release_date', str(series.year) if series.year else "") if series.custom_properties else (str(series.year) if series.year else ""),
|
||||
"releaseDate": series.custom_properties.get('release_date', str(series.year) if series.year else "") if series.custom_properties else (str(series.year) if series.year else ""),
|
||||
"added": str(int(series_relation.created_at.timestamp())),
|
||||
"last_modified": str(int(series_relation.updated_at.timestamp())),
|
||||
"rating": str(series_data['rating']),
|
||||
"rating_5based": str(round(float(series_data['rating'] or 0) / 2, 2)) if series_data['rating'] else "0",
|
||||
"backdrop_path": series_data['backdrop_path'],
|
||||
"youtube_trailer": series_data['youtube_trailer'],
|
||||
"imdb": str(series.imdb_id) if series.imdb_id else "",
|
||||
"tmdb": str(series.tmdb_id) if series.tmdb_id else "",
|
||||
"episode_run_time": str(series_data['episode_run_time']),
|
||||
"category_id": str(series_relation.category.id) if series_relation.category else "0",
|
||||
"category_ids": [int(series_relation.category.id)] if series_relation.category else [],
|
||||
},
|
||||
"episodes": dict(seasons)
|
||||
}
|
||||
return info
|
||||
|
||||
|
||||
def xc_get_vod_info(request, user, vod_id):
|
||||
"""Get detailed VOD (movie) information"""
|
||||
from apps.vod.models import M3UMovieRelation
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
|
||||
if not vod_id:
|
||||
raise Http404()
|
||||
|
||||
# Get movie relation with user access filtering - use movie ID instead of relation ID
|
||||
filters = {"movie_id": vod_id, "m3u_account__is_active": True}
|
||||
|
||||
if user.user_level == 0:
|
||||
if user.channel_profiles.count() > 0:
|
||||
channel_profiles = user.channel_profiles.all()
|
||||
from apps.m3u.models import M3UAccount
|
||||
m3u_accounts = M3UAccount.objects.filter(
|
||||
is_active=True,
|
||||
profiles__in=channel_profiles
|
||||
).distinct()
|
||||
filters["m3u_account__in"] = m3u_accounts
|
||||
else:
|
||||
raise Http404()
|
||||
|
||||
try:
|
||||
movie_relation = M3UMovieRelation.objects.select_related('movie', 'movie__logo').get(**filters)
|
||||
movie = movie_relation.movie
|
||||
except M3UMovieRelation.DoesNotExist:
|
||||
raise Http404()
|
||||
|
||||
# Initialize basic movie data first
|
||||
movie_data = {
|
||||
'name': movie.name,
|
||||
'description': movie.description or '',
|
||||
'year': movie.year,
|
||||
'genre': movie.genre or '',
|
||||
'rating': movie.rating or 0,
|
||||
'tmdb_id': movie.tmdb_id or '',
|
||||
'imdb_id': movie.imdb_id or '',
|
||||
'director': '',
|
||||
'actors': '',
|
||||
'country': '',
|
||||
'release_date': '',
|
||||
'youtube_trailer': '',
|
||||
'backdrop_path': [],
|
||||
'cover_big': '',
|
||||
'bitrate': 0,
|
||||
'video': {},
|
||||
'audio': {},
|
||||
}
|
||||
|
||||
# Duplicate the provider_info logic for detailed information
|
||||
try:
|
||||
# Check if we need to refresh detailed info (same logic as provider_info)
|
||||
should_refresh = (
|
||||
not movie_relation.last_advanced_refresh or
|
||||
movie_relation.last_advanced_refresh < timezone.now() - timedelta(hours=24)
|
||||
)
|
||||
|
||||
if should_refresh:
|
||||
# Trigger refresh of detailed info
|
||||
from apps.vod.tasks import refresh_movie_advanced_data
|
||||
refresh_movie_advanced_data(movie_relation.id)
|
||||
# Refresh objects from database after task completion
|
||||
movie.refresh_from_db()
|
||||
movie_relation.refresh_from_db()
|
||||
|
||||
# Add detailed info from custom_properties if available
|
||||
if movie.custom_properties:
|
||||
try:
|
||||
if isinstance(movie.custom_properties, dict):
|
||||
custom_data = movie.custom_properties
|
||||
else:
|
||||
custom_data = json.loads(movie.custom_properties)
|
||||
|
||||
# Extract detailed info
|
||||
#detailed_info = custom_data.get('detailed_info', {})
|
||||
detailed_info = movie_relation.custom_properties.get('detailed_info', {})
|
||||
# Update movie_data with detailed info
|
||||
movie_data.update({
|
||||
'director': custom_data.get('director') or detailed_info.get('director', ''),
|
||||
'actors': custom_data.get('actors') or detailed_info.get('actors', ''),
|
||||
'country': custom_data.get('country') or detailed_info.get('country', ''),
|
||||
'release_date': custom_data.get('release_date') or detailed_info.get('release_date') or detailed_info.get('releasedate', ''),
|
||||
'youtube_trailer': custom_data.get('youtube_trailer') or detailed_info.get('youtube_trailer') or detailed_info.get('trailer', ''),
|
||||
'backdrop_path': custom_data.get('backdrop_path') or detailed_info.get('backdrop_path', []),
|
||||
'cover_big': detailed_info.get('cover_big', ''),
|
||||
'bitrate': detailed_info.get('bitrate', 0),
|
||||
'video': detailed_info.get('video', {}),
|
||||
'audio': detailed_info.get('audio', {}),
|
||||
})
|
||||
|
||||
# Override with detailed_info values where available
|
||||
for key in ['name', 'description', 'year', 'genre', 'rating', 'tmdb_id', 'imdb_id']:
|
||||
if detailed_info.get(key):
|
||||
movie_data[key] = detailed_info[key]
|
||||
|
||||
# Handle plot vs description
|
||||
if detailed_info.get('plot'):
|
||||
movie_data['description'] = detailed_info['plot']
|
||||
elif detailed_info.get('description'):
|
||||
movie_data['description'] = detailed_info['description']
|
||||
|
||||
except (json.JSONDecodeError, AttributeError, TypeError) as e:
|
||||
logger.warning(f"Error parsing custom_properties for movie {movie.id}: {e}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to process movie data: {e}")
|
||||
|
||||
# Transform API response to XtreamCodes format
|
||||
info = {
|
||||
"info": {
|
||||
"name": movie_data.get('name', movie.name),
|
||||
"o_name": movie_data.get('name', movie.name),
|
||||
"cover_big": (
|
||||
None if not movie.logo
|
||||
else build_absolute_uri_with_port(
|
||||
request,
|
||||
reverse("api:channels:logo-cache", args=[movie.logo.id])
|
||||
)
|
||||
),
|
||||
"movie_image": (
|
||||
None if not movie.logo
|
||||
else build_absolute_uri_with_port(
|
||||
request,
|
||||
reverse("api:channels:logo-cache", args=[movie.logo.id])
|
||||
)
|
||||
),
|
||||
'description': movie_data.get('description', ''),
|
||||
'plot': movie_data.get('description', ''),
|
||||
'year': movie_data.get('year', ''),
|
||||
'release_date': movie_data.get('release_date', ''),
|
||||
'genre': movie_data.get('genre', ''),
|
||||
'director': movie_data.get('director', ''),
|
||||
'actors': movie_data.get('actors', ''),
|
||||
'cast': movie_data.get('actors', ''),
|
||||
'country': movie_data.get('country', ''),
|
||||
'rating': movie_data.get('rating', 0),
|
||||
'imdb_id': movie_data.get('imdb_id', ''),
|
||||
"tmdb_id": movie_data.get('tmdb_id', ''),
|
||||
'youtube_trailer': movie_data.get('youtube_trailer', ''),
|
||||
'backdrop_path': movie_data.get('backdrop_path', []),
|
||||
'cover': movie_data.get('cover_big', ''),
|
||||
'bitrate': movie_data.get('bitrate', 0),
|
||||
'video': movie_data.get('video', {}),
|
||||
'audio': movie_data.get('audio', {}),
|
||||
},
|
||||
"movie_data": {
|
||||
"stream_id": movie.id,
|
||||
"name": movie.name,
|
||||
"added": int(movie_relation.created_at.timestamp()),
|
||||
"category_id": str(movie_relation.category.id) if movie_relation.category else "0",
|
||||
"category_ids": [int(movie_relation.category.id)] if movie_relation.category else [],
|
||||
"container_extension": movie_relation.container_extension or "mp4",
|
||||
"custom_sid": None,
|
||||
"direct_source": "",
|
||||
}
|
||||
}
|
||||
|
||||
return info
|
||||
|
||||
|
||||
def xc_movie_stream(request, username, password, stream_id, extension):
|
||||
"""Handle XtreamCodes movie streaming requests"""
|
||||
from apps.vod.models import M3UMovieRelation
|
||||
|
||||
user = get_object_or_404(User, username=username)
|
||||
|
||||
custom_properties = (
|
||||
json.loads(user.custom_properties) if user.custom_properties else {}
|
||||
)
|
||||
|
||||
if "xc_password" not in custom_properties:
|
||||
return JsonResponse({"error": "Invalid credentials"}, status=401)
|
||||
|
||||
if custom_properties["xc_password"] != password:
|
||||
return JsonResponse({"error": "Invalid credentials"}, status=401)
|
||||
|
||||
# Get movie relation based on user access level - use movie ID instead of relation ID
|
||||
filters = {"movie_id": stream_id, "m3u_account__is_active": True}
|
||||
|
||||
if user.user_level < 10:
|
||||
# For regular users, filter by accessible M3U accounts
|
||||
if user.channel_profiles.count() > 0:
|
||||
channel_profiles = user.channel_profiles.all()
|
||||
from apps.m3u.models import M3UAccount
|
||||
m3u_accounts = M3UAccount.objects.filter(
|
||||
is_active=True,
|
||||
profiles__in=channel_profiles
|
||||
).distinct()
|
||||
filters["m3u_account__in"] = m3u_accounts
|
||||
else:
|
||||
return JsonResponse({"error": "No accessible content"}, status=403)
|
||||
|
||||
try:
|
||||
movie_relation = M3UMovieRelation.objects.select_related('movie').get(**filters)
|
||||
except M3UMovieRelation.DoesNotExist:
|
||||
return JsonResponse({"error": "Movie not found"}, status=404)
|
||||
|
||||
# Redirect to the VOD proxy endpoint
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.urls import reverse
|
||||
|
||||
vod_url = reverse('proxy:vod_proxy:vod_stream', kwargs={
|
||||
'content_type': 'movie',
|
||||
'content_id': movie_relation.movie.uuid
|
||||
})
|
||||
|
||||
return HttpResponseRedirect(vod_url)
|
||||
|
||||
|
||||
def xc_series_stream(request, username, password, stream_id, extension):
|
||||
"""Handle XtreamCodes series/episode streaming requests"""
|
||||
from apps.vod.models import M3UEpisodeRelation
|
||||
|
||||
user = get_object_or_404(User, username=username)
|
||||
|
||||
custom_properties = (
|
||||
json.loads(user.custom_properties) if user.custom_properties else {}
|
||||
)
|
||||
|
||||
if "xc_password" not in custom_properties:
|
||||
return JsonResponse({"error": "Invalid credentials"}, status=401)
|
||||
|
||||
if custom_properties["xc_password"] != password:
|
||||
return JsonResponse({"error": "Invalid credentials"}, status=401)
|
||||
|
||||
# Get episode relation based on user access level - use episode ID instead of stream_id
|
||||
filters = {"episode_id": stream_id, "m3u_account__is_active": True}
|
||||
|
||||
if user.user_level < 10:
|
||||
# For regular users, filter by accessible M3U accounts
|
||||
if user.channel_profiles.count() > 0:
|
||||
channel_profiles = user.channel_profiles.all()
|
||||
from apps.m3u.models import M3UAccount
|
||||
m3u_accounts = M3UAccount.objects.filter(
|
||||
is_active=True,
|
||||
profiles__in=channel_profiles
|
||||
).distinct()
|
||||
filters["m3u_account__in"] = m3u_accounts
|
||||
else:
|
||||
return JsonResponse({"error": "No accessible content"}, status=403)
|
||||
|
||||
try:
|
||||
episode_relation = M3UEpisodeRelation.objects.select_related('episode').get(**filters)
|
||||
except M3UEpisodeRelation.DoesNotExist:
|
||||
return JsonResponse({"error": "Episode not found"}, status=404)
|
||||
|
||||
# Redirect to the VOD proxy endpoint
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.urls import reverse
|
||||
|
||||
vod_url = reverse('proxy:vod_proxy:vod_stream', kwargs={
|
||||
'content_type': 'episode',
|
||||
'content_id': episode_relation.episode.uuid
|
||||
})
|
||||
|
||||
return HttpResponseRedirect(vod_url)
|
||||
|
||||
|
||||
def get_host_and_port(request):
|
||||
"""
|
||||
Returns (host, port) for building absolute URIs.
|
||||
- Prefers X-Forwarded-Host/X-Forwarded-Port (nginx).
|
||||
- Falls back to Host header.
|
||||
- In dev, if missing, uses 5656 or 8000 as a guess.
|
||||
"""
|
||||
# 1. Try X-Forwarded-Host (may include port)
|
||||
xfh = request.META.get("HTTP_X_FORWARDED_HOST")
|
||||
if xfh:
|
||||
if ":" in xfh:
|
||||
host, port = xfh.split(":", 1)
|
||||
else:
|
||||
host = xfh
|
||||
port = request.META.get("HTTP_X_FORWARDED_PORT")
|
||||
if port:
|
||||
return host, port
|
||||
|
||||
# 2. Try Host header
|
||||
raw_host = request.get_host()
|
||||
if ":" in raw_host:
|
||||
host, port = raw_host.split(":", 1)
|
||||
return host, port
|
||||
else:
|
||||
host = raw_host
|
||||
|
||||
# 3. Try X-Forwarded-Port
|
||||
port = request.META.get("HTTP_X_FORWARDED_PORT")
|
||||
if port:
|
||||
return host, port
|
||||
|
||||
# 4. Dev fallback: guess port
|
||||
if os.environ.get("DISPATCHARR_ENV") == "dev" or host in ("localhost", "127.0.0.1"):
|
||||
guess = "5656"
|
||||
return host, guess
|
||||
|
||||
# 5. Fallback to scheme default
|
||||
port = "443" if request.is_secure() else "9191"
|
||||
return host, port
|
||||
|
||||
def build_absolute_uri_with_port(request, path):
|
||||
host, port = get_host_and_port(request)
|
||||
scheme = request.scheme
|
||||
return f"{scheme}://{host}:{port}{path}"
|
||||
|
||||
def format_duration_hms(seconds):
|
||||
"""
|
||||
Format a duration in seconds as HH:MM:SS zero-padded string.
|
||||
"""
|
||||
seconds = int(seconds or 0)
|
||||
return f"{seconds//3600:02}:{(seconds%3600)//60:02}:{seconds%60:02}"
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ import gc # Add import for garbage collection
|
|||
from core.utils import RedisClient
|
||||
from apps.proxy.ts_proxy.channel_status import ChannelStatus
|
||||
from core.utils import send_websocket_update
|
||||
from apps.proxy.vod_proxy.connection_manager import get_connection_manager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -59,3 +60,13 @@ def fetch_channel_stats():
|
|||
# Explicitly clean up large data structures
|
||||
all_channels = None
|
||||
gc.collect()
|
||||
|
||||
@shared_task
|
||||
def cleanup_vod_connections():
|
||||
"""Clean up stale VOD connections"""
|
||||
try:
|
||||
connection_manager = get_connection_manager()
|
||||
connection_manager.cleanup_stale_connections(max_age_seconds=3600) # 1 hour
|
||||
logger.info("VOD connection cleanup completed")
|
||||
except Exception as e:
|
||||
logger.error(f"Error in VOD connection cleanup: {e}", exc_info=True)
|
||||
|
|
|
|||
|
|
@ -5,4 +5,5 @@ app_name = 'proxy'
|
|||
urlpatterns = [
|
||||
path('ts/', include('apps.proxy.ts_proxy.urls')),
|
||||
path('hls/', include('apps.proxy.hls_proxy.urls')),
|
||||
path('vod/', include('apps.proxy.vod_proxy.urls')),
|
||||
]
|
||||
0
apps/proxy/vod_proxy/__init__.py
Normal file
0
apps/proxy/vod_proxy/__init__.py
Normal file
1444
apps/proxy/vod_proxy/connection_manager.py
Normal file
1444
apps/proxy/vod_proxy/connection_manager.py
Normal file
File diff suppressed because it is too large
Load diff
1025
apps/proxy/vod_proxy/multi_worker_connection_manager.py
Normal file
1025
apps/proxy/vod_proxy/multi_worker_connection_manager.py
Normal file
File diff suppressed because it is too large
Load diff
21
apps/proxy/vod_proxy/urls.py
Normal file
21
apps/proxy/vod_proxy/urls.py
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
from django.urls import path
|
||||
from . import views
|
||||
|
||||
app_name = 'vod_proxy'
|
||||
|
||||
urlpatterns = [
|
||||
# Generic VOD streaming with session ID in path (for compatibility)
|
||||
path('<str:content_type>/<uuid:content_id>/<str:session_id>', views.VODStreamView.as_view(), name='vod_stream_with_session'),
|
||||
path('<str:content_type>/<uuid:content_id>/<str:session_id>/<int:profile_id>/', views.VODStreamView.as_view(), name='vod_stream_with_session_and_profile'),
|
||||
|
||||
# Generic VOD streaming (supports movies, episodes, series) - legacy patterns
|
||||
path('<str:content_type>/<uuid:content_id>', views.VODStreamView.as_view(), name='vod_stream'),
|
||||
path('<str:content_type>/<uuid:content_id>/<int:profile_id>/', views.VODStreamView.as_view(), name='vod_stream_with_profile'),
|
||||
|
||||
# VOD playlist generation
|
||||
path('playlist/', views.VODPlaylistView.as_view(), name='vod_playlist'),
|
||||
path('playlist/<int:profile_id>/', views.VODPlaylistView.as_view(), name='vod_playlist_with_profile'),
|
||||
|
||||
# Position tracking
|
||||
path('position/<uuid:content_id>/', views.VODPositionView.as_view(), name='vod_position'),
|
||||
]
|
||||
58
apps/proxy/vod_proxy/utils.py
Normal file
58
apps/proxy/vod_proxy/utils.py
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
"""
|
||||
Utility functions for VOD proxy operations.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from django.http import HttpResponse
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_client_info(request):
|
||||
"""
|
||||
Extract client IP and User-Agent from request.
|
||||
|
||||
Args:
|
||||
request: Django HttpRequest object
|
||||
|
||||
Returns:
|
||||
tuple: (client_ip, user_agent)
|
||||
"""
|
||||
# Get client IP, checking for proxy headers
|
||||
client_ip = request.META.get('HTTP_X_FORWARDED_FOR')
|
||||
if client_ip:
|
||||
# Take the first IP if there are multiple (comma-separated)
|
||||
client_ip = client_ip.split(',')[0].strip()
|
||||
else:
|
||||
client_ip = request.META.get('HTTP_X_REAL_IP') or request.META.get('REMOTE_ADDR', 'unknown')
|
||||
|
||||
# Get User-Agent
|
||||
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
|
||||
|
||||
return client_ip, user_agent
|
||||
|
||||
|
||||
def create_vod_response(content, content_type='video/mp4', filename=None):
|
||||
"""
|
||||
Create a streaming HTTP response for VOD content.
|
||||
|
||||
Args:
|
||||
content: Content to stream (file-like object or bytes)
|
||||
content_type: MIME type of the content
|
||||
filename: Optional filename for Content-Disposition header
|
||||
|
||||
Returns:
|
||||
HttpResponse: Configured HTTP response for streaming
|
||||
"""
|
||||
response = HttpResponse(content, content_type=content_type)
|
||||
|
||||
if filename:
|
||||
response['Content-Disposition'] = f'attachment; filename="{filename}"'
|
||||
|
||||
# Add headers for streaming
|
||||
response['Accept-Ranges'] = 'bytes'
|
||||
response['Cache-Control'] = 'no-cache, no-store, must-revalidate'
|
||||
response['Pragma'] = 'no-cache'
|
||||
response['Expires'] = '0'
|
||||
|
||||
return response
|
||||
666
apps/proxy/vod_proxy/views.py
Normal file
666
apps/proxy/vod_proxy/views.py
Normal file
|
|
@ -0,0 +1,666 @@
|
|||
"""
|
||||
VOD (Video on Demand) proxy views for handling movie and series streaming.
|
||||
Supports M3U profiles for authentication and URL transformation.
|
||||
"""
|
||||
|
||||
import time
|
||||
import random
|
||||
import logging
|
||||
import requests
|
||||
from django.http import StreamingHttpResponse, JsonResponse, Http404, HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views import View
|
||||
from apps.vod.models import Movie, Series, Episode
|
||||
from apps.m3u.models import M3UAccount, M3UAccountProfile
|
||||
from apps.proxy.vod_proxy.connection_manager import VODConnectionManager
|
||||
from apps.proxy.vod_proxy.multi_worker_connection_manager import MultiWorkerVODConnectionManager
|
||||
from .utils import get_client_info, create_vod_response
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@method_decorator(csrf_exempt, name='dispatch')
|
||||
class VODStreamView(View):
|
||||
"""Handle VOD streaming requests with M3U profile support"""
|
||||
|
||||
def get(self, request, content_type, content_id, session_id=None, profile_id=None):
|
||||
"""
|
||||
Stream VOD content (movies or series episodes) with session-based connection reuse
|
||||
|
||||
Args:
|
||||
content_type: 'movie', 'series', or 'episode'
|
||||
content_id: ID of the content
|
||||
session_id: Optional session ID from URL path (for persistent connections)
|
||||
profile_id: Optional M3U profile ID for authentication
|
||||
"""
|
||||
logger.info(f"[VOD-REQUEST] Starting VOD stream request: {content_type}/{content_id}, session: {session_id}, profile: {profile_id}")
|
||||
logger.info(f"[VOD-REQUEST] Full request path: {request.get_full_path()}")
|
||||
logger.info(f"[VOD-REQUEST] Request method: {request.method}")
|
||||
logger.info(f"[VOD-REQUEST] Request headers: {dict(request.headers)}")
|
||||
|
||||
try:
|
||||
client_ip, user_agent = get_client_info(request)
|
||||
|
||||
# Extract timeshift parameters from query string
|
||||
# Support multiple timeshift parameter formats
|
||||
utc_start = request.GET.get('utc_start') or request.GET.get('start') or request.GET.get('playliststart')
|
||||
utc_end = request.GET.get('utc_end') or request.GET.get('end') or request.GET.get('playlistend')
|
||||
offset = request.GET.get('offset') or request.GET.get('seek') or request.GET.get('t')
|
||||
|
||||
# VLC specific timeshift parameters
|
||||
if not utc_start and not offset:
|
||||
# Check for VLC-style timestamp parameters
|
||||
if 'timestamp' in request.GET:
|
||||
offset = request.GET.get('timestamp')
|
||||
elif 'time' in request.GET:
|
||||
offset = request.GET.get('time')
|
||||
|
||||
# Session ID now comes from URL path parameter
|
||||
# Remove legacy query parameter extraction since we're using path-based routing
|
||||
|
||||
# Extract Range header for seeking support
|
||||
range_header = request.META.get('HTTP_RANGE')
|
||||
|
||||
logger.info(f"[VOD-TIMESHIFT] Timeshift params - utc_start: {utc_start}, utc_end: {utc_end}, offset: {offset}")
|
||||
logger.info(f"[VOD-SESSION] Session ID: {session_id}")
|
||||
|
||||
# Log all query parameters for debugging
|
||||
if request.GET:
|
||||
logger.debug(f"[VOD-PARAMS] All query params: {dict(request.GET)}")
|
||||
|
||||
if range_header:
|
||||
logger.info(f"[VOD-RANGE] Range header: {range_header}")
|
||||
|
||||
# Parse the range to understand what position VLC is seeking to
|
||||
try:
|
||||
if 'bytes=' in range_header:
|
||||
range_part = range_header.replace('bytes=', '')
|
||||
if '-' in range_part:
|
||||
start_byte, end_byte = range_part.split('-', 1)
|
||||
if start_byte:
|
||||
start_pos_mb = int(start_byte) / (1024 * 1024)
|
||||
logger.info(f"[VOD-SEEK] Seeking to byte position: {start_byte} (~{start_pos_mb:.1f} MB)")
|
||||
if int(start_byte) > 0:
|
||||
logger.info(f"[VOD-SEEK] *** ACTUAL SEEK DETECTED *** Position: {start_pos_mb:.1f} MB")
|
||||
else:
|
||||
logger.info(f"[VOD-SEEK] Open-ended range request (from start)")
|
||||
if end_byte:
|
||||
end_pos_mb = int(end_byte) / (1024 * 1024)
|
||||
logger.info(f"[VOD-SEEK] End position: {end_byte} bytes (~{end_pos_mb:.1f} MB)")
|
||||
except Exception as e:
|
||||
logger.warning(f"[VOD-SEEK] Could not parse range header: {e}")
|
||||
|
||||
# Simple seek detection - track rapid requests
|
||||
current_time = time.time()
|
||||
request_key = f"{client_ip}:{content_type}:{content_id}"
|
||||
|
||||
if not hasattr(self.__class__, '_request_times'):
|
||||
self.__class__._request_times = {}
|
||||
|
||||
if request_key in self.__class__._request_times:
|
||||
time_diff = current_time - self.__class__._request_times[request_key]
|
||||
if time_diff < 5.0:
|
||||
logger.info(f"[VOD-SEEK] Rapid request detected ({time_diff:.1f}s) - likely seeking")
|
||||
|
||||
self.__class__._request_times[request_key] = current_time
|
||||
else:
|
||||
logger.info(f"[VOD-RANGE] No Range header - full content request")
|
||||
|
||||
logger.info(f"[VOD-CLIENT] Client info - IP: {client_ip}, User-Agent: {user_agent[:50]}...")
|
||||
|
||||
# If no session ID, create one and redirect to path-based URL
|
||||
if not session_id:
|
||||
new_session_id = f"vod_{int(time.time() * 1000)}_{random.randint(1000, 9999)}"
|
||||
logger.info(f"[VOD-SESSION] Creating new session: {new_session_id}")
|
||||
|
||||
# Build redirect URL with session ID in path, preserve query parameters
|
||||
path_parts = request.path.rstrip('/').split('/')
|
||||
|
||||
# Construct new path: /vod/movie/UUID/SESSION_ID or /vod/movie/UUID/SESSION_ID/PROFILE_ID/
|
||||
if profile_id:
|
||||
new_path = f"{'/'.join(path_parts)}/{new_session_id}/{profile_id}/"
|
||||
else:
|
||||
new_path = f"{'/'.join(path_parts)}/{new_session_id}"
|
||||
|
||||
# Preserve any query parameters (except session_id)
|
||||
query_params = dict(request.GET)
|
||||
query_params.pop('session_id', None) # Remove if present
|
||||
|
||||
if query_params:
|
||||
from urllib.parse import urlencode
|
||||
query_string = urlencode(query_params, doseq=True)
|
||||
redirect_url = f"{new_path}?{query_string}"
|
||||
else:
|
||||
redirect_url = new_path
|
||||
|
||||
logger.info(f"[VOD-SESSION] Redirecting to path-based URL: {redirect_url}")
|
||||
|
||||
return HttpResponse(
|
||||
status=301,
|
||||
headers={'Location': redirect_url}
|
||||
)
|
||||
|
||||
# Extract preferred M3U account ID and stream ID from query parameters
|
||||
preferred_m3u_account_id = request.GET.get('m3u_account_id')
|
||||
preferred_stream_id = request.GET.get('stream_id')
|
||||
|
||||
if preferred_m3u_account_id:
|
||||
try:
|
||||
preferred_m3u_account_id = int(preferred_m3u_account_id)
|
||||
except (ValueError, TypeError):
|
||||
logger.warning(f"[VOD-PARAM] Invalid m3u_account_id parameter: {preferred_m3u_account_id}")
|
||||
preferred_m3u_account_id = None
|
||||
|
||||
if preferred_stream_id:
|
||||
logger.info(f"[VOD-PARAM] Preferred stream ID: {preferred_stream_id}")
|
||||
|
||||
# Get the content object and its relation
|
||||
content_obj, relation = self._get_content_and_relation(content_type, content_id, preferred_m3u_account_id, preferred_stream_id)
|
||||
if not content_obj or not relation:
|
||||
logger.error(f"[VOD-ERROR] Content or relation not found: {content_type} {content_id}")
|
||||
raise Http404(f"Content not found: {content_type} {content_id}")
|
||||
|
||||
logger.info(f"[VOD-CONTENT] Found content: {getattr(content_obj, 'name', 'Unknown')}")
|
||||
|
||||
# Get M3U account from relation
|
||||
m3u_account = relation.m3u_account
|
||||
logger.info(f"[VOD-ACCOUNT] Using M3U account: {m3u_account.name}")
|
||||
|
||||
# Get stream URL from relation
|
||||
stream_url = self._get_stream_url_from_relation(relation)
|
||||
logger.info(f"[VOD-CONTENT] Content URL: {stream_url or 'No URL found'}")
|
||||
|
||||
if not stream_url:
|
||||
logger.error(f"[VOD-ERROR] No stream URL available for {content_type} {content_id}")
|
||||
return HttpResponse("No stream URL available", status=503)
|
||||
|
||||
# Get M3U profile
|
||||
m3u_profile = self._get_m3u_profile(m3u_account, profile_id, user_agent)
|
||||
|
||||
if not m3u_profile:
|
||||
logger.error(f"[VOD-ERROR] No suitable M3U profile found for {content_type} {content_id}")
|
||||
return HttpResponse("No available stream", status=503)
|
||||
|
||||
logger.info(f"[VOD-PROFILE] Using M3U profile: {m3u_profile.id} (max_streams: {m3u_profile.max_streams}, current: {m3u_profile.current_viewers})")
|
||||
|
||||
# Connection tracking is handled by the connection manager
|
||||
# Transform URL based on profile
|
||||
final_stream_url = self._transform_url(stream_url, m3u_profile)
|
||||
logger.info(f"[VOD-URL] Final stream URL: {final_stream_url}")
|
||||
|
||||
# Validate stream URL
|
||||
if not final_stream_url or not final_stream_url.startswith(('http://', 'https://')):
|
||||
logger.error(f"[VOD-ERROR] Invalid stream URL: {final_stream_url}")
|
||||
return HttpResponse("Invalid stream URL", status=500)
|
||||
|
||||
# Get connection manager (Redis-backed for multi-worker support)
|
||||
connection_manager = MultiWorkerVODConnectionManager.get_instance()
|
||||
|
||||
|
||||
# Stream the content with session-based connection reuse
|
||||
logger.info("[VOD-STREAM] Calling connection manager to stream content")
|
||||
response = connection_manager.stream_content_with_session(
|
||||
session_id=session_id,
|
||||
content_obj=content_obj,
|
||||
stream_url=final_stream_url,
|
||||
m3u_profile=m3u_profile,
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent,
|
||||
request=request,
|
||||
utc_start=utc_start,
|
||||
utc_end=utc_end,
|
||||
offset=offset,
|
||||
range_header=range_header
|
||||
)
|
||||
|
||||
logger.info(f"[VOD-SUCCESS] Stream response created successfully, type: {type(response)}")
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"[VOD-EXCEPTION] Error streaming {content_type} {content_id}: {e}", exc_info=True)
|
||||
return HttpResponse(f"Streaming error: {str(e)}", status=500)
|
||||
|
||||
def head(self, request, content_type, content_id, session_id=None, profile_id=None):
|
||||
"""
|
||||
Handle HEAD requests for FUSE filesystem integration
|
||||
|
||||
Returns content length and session URL header for subsequent GET requests
|
||||
"""
|
||||
logger.info(f"[VOD-HEAD] HEAD request: {content_type}/{content_id}, session: {session_id}, profile: {profile_id}")
|
||||
|
||||
try:
|
||||
# Get client info for M3U profile selection
|
||||
client_ip, user_agent = get_client_info(request)
|
||||
logger.info(f"[VOD-HEAD] Client info - IP: {client_ip}, User-Agent: {user_agent[:50] if user_agent else 'None'}...")
|
||||
|
||||
# If no session ID, create one (same logic as GET)
|
||||
if not session_id:
|
||||
new_session_id = f"vod_{int(time.time() * 1000)}_{random.randint(1000, 9999)}"
|
||||
logger.info(f"[VOD-HEAD] Creating new session for HEAD: {new_session_id}")
|
||||
|
||||
# Build session URL for response header
|
||||
path_parts = request.path.rstrip('/').split('/')
|
||||
if profile_id:
|
||||
session_url = f"{'/'.join(path_parts)}/{new_session_id}/{profile_id}/"
|
||||
else:
|
||||
session_url = f"{'/'.join(path_parts)}/{new_session_id}"
|
||||
|
||||
session_id = new_session_id
|
||||
else:
|
||||
# Session already in URL, construct the current session URL
|
||||
session_url = request.path
|
||||
logger.info(f"[VOD-HEAD] Using existing session: {session_id}")
|
||||
|
||||
# Extract preferred M3U account ID and stream ID from query parameters
|
||||
preferred_m3u_account_id = request.GET.get('m3u_account_id')
|
||||
preferred_stream_id = request.GET.get('stream_id')
|
||||
|
||||
if preferred_m3u_account_id:
|
||||
try:
|
||||
preferred_m3u_account_id = int(preferred_m3u_account_id)
|
||||
except (ValueError, TypeError):
|
||||
logger.warning(f"[VOD-HEAD] Invalid m3u_account_id parameter: {preferred_m3u_account_id}")
|
||||
preferred_m3u_account_id = None
|
||||
|
||||
if preferred_stream_id:
|
||||
logger.info(f"[VOD-HEAD] Preferred stream ID: {preferred_stream_id}")
|
||||
|
||||
# Get content and relation (same as GET)
|
||||
content_obj, relation = self._get_content_and_relation(content_type, content_id, preferred_m3u_account_id, preferred_stream_id)
|
||||
if not content_obj or not relation:
|
||||
logger.error(f"[VOD-HEAD] Content or relation not found: {content_type} {content_id}")
|
||||
return HttpResponse("Content not found", status=404)
|
||||
|
||||
# Get M3U account and stream URL
|
||||
m3u_account = relation.m3u_account
|
||||
stream_url = self._get_stream_url_from_relation(relation)
|
||||
if not stream_url:
|
||||
logger.error(f"[VOD-HEAD] No stream URL available for {content_type} {content_id}")
|
||||
return HttpResponse("No stream URL available", status=503)
|
||||
|
||||
# Get M3U profile
|
||||
m3u_profile = self._get_m3u_profile(m3u_account, profile_id, user_agent)
|
||||
if not m3u_profile:
|
||||
logger.error(f"[VOD-HEAD] No M3U profile found")
|
||||
return HttpResponse("Profile not found", status=404)
|
||||
|
||||
# Transform URL if needed
|
||||
final_stream_url = self._transform_url(stream_url, m3u_profile)
|
||||
|
||||
# Make a small range GET request to get content length since providers don't support HEAD
|
||||
# We'll use a tiny range to minimize data transfer but get the headers we need
|
||||
headers = {
|
||||
'User-Agent': user_agent or 'Dispatcharr/1.0',
|
||||
'Accept': '*/*',
|
||||
'Range': 'bytes=0-1' # Request only first 2 bytes
|
||||
}
|
||||
|
||||
logger.info(f"[VOD-HEAD] Making small range GET request to provider: {final_stream_url}")
|
||||
response = requests.get(final_stream_url, headers=headers, timeout=30, allow_redirects=True, stream=True)
|
||||
|
||||
# Check for range support - should be 206 for partial content
|
||||
if response.status_code == 206:
|
||||
# Parse Content-Range header to get total file size
|
||||
content_range = response.headers.get('Content-Range', '')
|
||||
if content_range:
|
||||
# Content-Range: bytes 0-1/1234567890
|
||||
total_size = content_range.split('/')[-1]
|
||||
logger.info(f"[VOD-HEAD] Got file size from Content-Range: {total_size}")
|
||||
else:
|
||||
logger.warning(f"[VOD-HEAD] No Content-Range header in 206 response")
|
||||
total_size = response.headers.get('Content-Length', '0')
|
||||
elif response.status_code == 200:
|
||||
# Server doesn't support range requests, use Content-Length from full response
|
||||
total_size = response.headers.get('Content-Length', '0')
|
||||
logger.info(f"[VOD-HEAD] Server doesn't support ranges, got Content-Length: {total_size}")
|
||||
else:
|
||||
logger.error(f"[VOD-HEAD] Provider GET request failed: {response.status_code}")
|
||||
return HttpResponse("Provider error", status=response.status_code)
|
||||
|
||||
# Close the small range request - we don't need to keep this connection
|
||||
response.close()
|
||||
|
||||
# Store the total content length in Redis for the persistent connection to use
|
||||
try:
|
||||
import redis
|
||||
r = redis.StrictRedis(host='localhost', port=6379, db=0, decode_responses=True)
|
||||
content_length_key = f"vod_content_length:{session_id}"
|
||||
r.set(content_length_key, total_size, ex=1800) # Store for 30 minutes
|
||||
logger.info(f"[VOD-HEAD] Stored total content length {total_size} for session {session_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"[VOD-HEAD] Failed to store content length in Redis: {e}")
|
||||
|
||||
# Now create a persistent connection for the session (if one doesn't exist)
|
||||
# This ensures the FUSE GET requests will reuse the same connection
|
||||
|
||||
connection_manager = MultiWorkerVODConnectionManager.get_instance()
|
||||
|
||||
logger.info(f"[VOD-HEAD] Pre-creating persistent connection for session: {session_id}")
|
||||
|
||||
# We don't actually stream content here, just ensure connection is ready
|
||||
# The actual GET requests from FUSE will use the persistent connection
|
||||
|
||||
# Use the total_size we extracted from the range response
|
||||
content_type_header = response.headers.get('Content-Type', 'video/mp4')
|
||||
|
||||
logger.info(f"[VOD-HEAD] Provider response - Total Size: {total_size}, Type: {content_type_header}")
|
||||
|
||||
# Create response with content length and session URL header
|
||||
head_response = HttpResponse()
|
||||
head_response['Content-Length'] = total_size
|
||||
head_response['Content-Type'] = content_type_header
|
||||
head_response['Accept-Ranges'] = 'bytes'
|
||||
|
||||
# Custom header with session URL for FUSE
|
||||
head_response['X-Session-URL'] = session_url
|
||||
head_response['X-Dispatcharr-Session'] = session_id
|
||||
|
||||
logger.info(f"[VOD-HEAD] Returning HEAD response with session URL: {session_url}")
|
||||
return head_response
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"[VOD-HEAD] Error in HEAD request: {e}", exc_info=True)
|
||||
return HttpResponse(f"HEAD error: {str(e)}", status=500)
|
||||
|
||||
def _get_content_and_relation(self, content_type, content_id, preferred_m3u_account_id=None, preferred_stream_id=None):
|
||||
"""Get the content object and its M3U relation"""
|
||||
try:
|
||||
logger.info(f"[CONTENT-LOOKUP] Looking up {content_type} with UUID {content_id}")
|
||||
if preferred_m3u_account_id:
|
||||
logger.info(f"[CONTENT-LOOKUP] Preferred M3U account ID: {preferred_m3u_account_id}")
|
||||
if preferred_stream_id:
|
||||
logger.info(f"[CONTENT-LOOKUP] Preferred stream ID: {preferred_stream_id}")
|
||||
|
||||
if content_type == 'movie':
|
||||
content_obj = get_object_or_404(Movie, uuid=content_id)
|
||||
logger.info(f"[CONTENT-FOUND] Movie: {content_obj.name} (ID: {content_obj.id})")
|
||||
|
||||
# Filter by preferred stream ID first (most specific)
|
||||
relations_query = content_obj.m3u_relations.filter(m3u_account__is_active=True)
|
||||
if preferred_stream_id:
|
||||
specific_relation = relations_query.filter(stream_id=preferred_stream_id).first()
|
||||
if specific_relation:
|
||||
logger.info(f"[STREAM-SELECTED] Using specific stream: {specific_relation.stream_id} from provider: {specific_relation.m3u_account.name}")
|
||||
return content_obj, specific_relation
|
||||
else:
|
||||
logger.warning(f"[STREAM-FALLBACK] Preferred stream ID {preferred_stream_id} not found, falling back to account/priority selection")
|
||||
|
||||
# Filter by preferred M3U account if specified
|
||||
if preferred_m3u_account_id:
|
||||
specific_relation = relations_query.filter(m3u_account__id=preferred_m3u_account_id).first()
|
||||
if specific_relation:
|
||||
logger.info(f"[PROVIDER-SELECTED] Using preferred provider: {specific_relation.m3u_account.name}")
|
||||
return content_obj, specific_relation
|
||||
else:
|
||||
logger.warning(f"[PROVIDER-FALLBACK] Preferred M3U account {preferred_m3u_account_id} not found, using highest priority")
|
||||
|
||||
# Get the highest priority active relation (fallback or default)
|
||||
relation = relations_query.select_related('m3u_account').order_by('-m3u_account__priority', 'id').first()
|
||||
|
||||
if relation:
|
||||
logger.info(f"[PROVIDER-SELECTED] Using provider: {relation.m3u_account.name} (priority: {relation.m3u_account.priority})")
|
||||
|
||||
return content_obj, relation
|
||||
|
||||
elif content_type == 'episode':
|
||||
content_obj = get_object_or_404(Episode, uuid=content_id)
|
||||
logger.info(f"[CONTENT-FOUND] Episode: {content_obj.name} (ID: {content_obj.id}, Series: {content_obj.series.name})")
|
||||
|
||||
# Filter by preferred stream ID first (most specific)
|
||||
relations_query = content_obj.m3u_relations.filter(m3u_account__is_active=True)
|
||||
if preferred_stream_id:
|
||||
specific_relation = relations_query.filter(stream_id=preferred_stream_id).first()
|
||||
if specific_relation:
|
||||
logger.info(f"[STREAM-SELECTED] Using specific stream: {specific_relation.stream_id} from provider: {specific_relation.m3u_account.name}")
|
||||
return content_obj, specific_relation
|
||||
else:
|
||||
logger.warning(f"[STREAM-FALLBACK] Preferred stream ID {preferred_stream_id} not found, falling back to account/priority selection")
|
||||
|
||||
# Filter by preferred M3U account if specified
|
||||
if preferred_m3u_account_id:
|
||||
specific_relation = relations_query.filter(m3u_account__id=preferred_m3u_account_id).first()
|
||||
if specific_relation:
|
||||
logger.info(f"[PROVIDER-SELECTED] Using preferred provider: {specific_relation.m3u_account.name}")
|
||||
return content_obj, specific_relation
|
||||
else:
|
||||
logger.warning(f"[PROVIDER-FALLBACK] Preferred M3U account {preferred_m3u_account_id} not found, using highest priority")
|
||||
|
||||
# Get the highest priority active relation (fallback or default)
|
||||
relation = relations_query.select_related('m3u_account').order_by('-m3u_account__priority', 'id').first()
|
||||
|
||||
if relation:
|
||||
logger.info(f"[PROVIDER-SELECTED] Using provider: {relation.m3u_account.name} (priority: {relation.m3u_account.priority})")
|
||||
|
||||
return content_obj, relation
|
||||
|
||||
elif content_type == 'series':
|
||||
# For series, get the first episode
|
||||
series = get_object_or_404(Series, uuid=content_id)
|
||||
logger.info(f"[CONTENT-FOUND] Series: {series.name} (ID: {series.id})")
|
||||
episode = series.episodes.first()
|
||||
if not episode:
|
||||
logger.error(f"[CONTENT-ERROR] No episodes found for series {series.name}")
|
||||
return None, None
|
||||
|
||||
logger.info(f"[CONTENT-FOUND] First episode: {episode.name} (ID: {episode.id})")
|
||||
|
||||
# Filter by preferred stream ID first (most specific)
|
||||
relations_query = episode.m3u_relations.filter(m3u_account__is_active=True)
|
||||
if preferred_stream_id:
|
||||
specific_relation = relations_query.filter(stream_id=preferred_stream_id).first()
|
||||
if specific_relation:
|
||||
logger.info(f"[STREAM-SELECTED] Using specific stream: {specific_relation.stream_id} from provider: {specific_relation.m3u_account.name}")
|
||||
return episode, specific_relation
|
||||
else:
|
||||
logger.warning(f"[STREAM-FALLBACK] Preferred stream ID {preferred_stream_id} not found, falling back to account/priority selection")
|
||||
|
||||
# Filter by preferred M3U account if specified
|
||||
if preferred_m3u_account_id:
|
||||
specific_relation = relations_query.filter(m3u_account__id=preferred_m3u_account_id).first()
|
||||
if specific_relation:
|
||||
logger.info(f"[PROVIDER-SELECTED] Using preferred provider: {specific_relation.m3u_account.name}")
|
||||
return episode, specific_relation
|
||||
else:
|
||||
logger.warning(f"[PROVIDER-FALLBACK] Preferred M3U account {preferred_m3u_account_id} not found, using highest priority")
|
||||
|
||||
# Get the highest priority active relation (fallback or default)
|
||||
relation = relations_query.select_related('m3u_account').order_by('-m3u_account__priority', 'id').first()
|
||||
|
||||
if relation:
|
||||
logger.info(f"[PROVIDER-SELECTED] Using provider: {relation.m3u_account.name} (priority: {relation.m3u_account.priority})")
|
||||
|
||||
return episode, relation
|
||||
else:
|
||||
logger.error(f"[CONTENT-ERROR] Invalid content type: {content_type}")
|
||||
return None, None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting content object: {e}")
|
||||
return None, None
|
||||
|
||||
def _get_stream_url_from_relation(self, relation):
|
||||
"""Get stream URL from the M3U relation"""
|
||||
try:
|
||||
# Log the relation type and available attributes
|
||||
logger.info(f"[VOD-URL] Relation type: {type(relation).__name__}")
|
||||
logger.info(f"[VOD-URL] Account type: {relation.m3u_account.account_type}")
|
||||
logger.info(f"[VOD-URL] Stream ID: {getattr(relation, 'stream_id', 'N/A')}")
|
||||
|
||||
# First try the get_stream_url method (this should build URLs dynamically)
|
||||
if hasattr(relation, 'get_stream_url'):
|
||||
url = relation.get_stream_url()
|
||||
if url:
|
||||
logger.info(f"[VOD-URL] Built URL from get_stream_url(): {url}")
|
||||
return url
|
||||
else:
|
||||
logger.warning(f"[VOD-URL] get_stream_url() returned None")
|
||||
|
||||
logger.error(f"[VOD-URL] Relation has no get_stream_url method or it failed")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"[VOD-URL] Error getting stream URL from relation: {e}", exc_info=True)
|
||||
return None
|
||||
|
||||
def _get_m3u_profile(self, m3u_account, profile_id, user_agent):
|
||||
"""Get appropriate M3U profile for streaming"""
|
||||
try:
|
||||
# If specific profile requested, try to use it
|
||||
if profile_id:
|
||||
try:
|
||||
profile = M3UAccountProfile.objects.get(
|
||||
id=profile_id,
|
||||
m3u_account=m3u_account,
|
||||
is_active=True
|
||||
)
|
||||
if profile.current_viewers < profile.max_streams or profile.max_streams == 0:
|
||||
return profile
|
||||
except M3UAccountProfile.DoesNotExist:
|
||||
pass
|
||||
|
||||
# Find available profile based on user agent matching
|
||||
profiles = M3UAccountProfile.objects.filter(
|
||||
m3u_account=m3u_account,
|
||||
is_active=True
|
||||
).order_by('current_viewers')
|
||||
|
||||
for profile in profiles:
|
||||
# Check if profile matches user agent pattern
|
||||
if self._matches_user_agent_pattern(profile, user_agent):
|
||||
if profile.current_viewers < profile.max_streams or profile.max_streams == 0:
|
||||
return profile
|
||||
|
||||
# Fallback to default profile
|
||||
return profiles.filter(is_default=True).first()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting M3U profile: {e}")
|
||||
return None
|
||||
|
||||
def _matches_user_agent_pattern(self, profile, user_agent):
|
||||
"""Check if user agent matches profile pattern"""
|
||||
try:
|
||||
import re
|
||||
pattern = profile.search_pattern
|
||||
if pattern and user_agent:
|
||||
return bool(re.search(pattern, user_agent, re.IGNORECASE))
|
||||
return True # If no pattern, match all
|
||||
except Exception:
|
||||
return True
|
||||
|
||||
def _transform_url(self, original_url, m3u_profile):
|
||||
"""Transform URL based on M3U profile settings"""
|
||||
try:
|
||||
import re
|
||||
|
||||
if not original_url:
|
||||
return None
|
||||
|
||||
search_pattern = m3u_profile.search_pattern
|
||||
replace_pattern = m3u_profile.replace_pattern
|
||||
safe_replace_pattern = re.sub(r'\$(\d+)', r'\\\1', replace_pattern)
|
||||
|
||||
if search_pattern and replace_pattern:
|
||||
transformed_url = re.sub(search_pattern, safe_replace_pattern, original_url)
|
||||
return transformed_url
|
||||
|
||||
return original_url
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error transforming URL: {e}")
|
||||
return original_url
|
||||
|
||||
@method_decorator(csrf_exempt, name='dispatch')
|
||||
class VODPlaylistView(View):
|
||||
"""Generate M3U playlists for VOD content"""
|
||||
|
||||
def get(self, request, profile_id=None):
|
||||
"""Generate VOD playlist"""
|
||||
try:
|
||||
# Get profile if specified
|
||||
m3u_profile = None
|
||||
if profile_id:
|
||||
try:
|
||||
m3u_profile = M3UAccountProfile.objects.get(
|
||||
id=profile_id,
|
||||
is_active=True
|
||||
)
|
||||
except M3UAccountProfile.DoesNotExist:
|
||||
return HttpResponse("Profile not found", status=404)
|
||||
|
||||
# Generate playlist content
|
||||
playlist_content = self._generate_playlist(m3u_profile)
|
||||
|
||||
response = HttpResponse(playlist_content, content_type='application/vnd.apple.mpegurl')
|
||||
response['Content-Disposition'] = 'attachment; filename="vod_playlist.m3u8"'
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error generating VOD playlist: {e}")
|
||||
return HttpResponse("Playlist generation error", status=500)
|
||||
|
||||
def _generate_playlist(self, m3u_profile=None):
|
||||
"""Generate M3U playlist content for VOD"""
|
||||
lines = ["#EXTM3U"]
|
||||
|
||||
# Add movies
|
||||
movies = Movie.objects.filter(is_active=True)
|
||||
if m3u_profile:
|
||||
movies = movies.filter(m3u_account=m3u_profile.m3u_account)
|
||||
|
||||
for movie in movies:
|
||||
profile_param = f"?profile={m3u_profile.id}" if m3u_profile else ""
|
||||
lines.append(f'#EXTINF:-1 tvg-id="{movie.tmdb_id}" group-title="Movies",{movie.title}')
|
||||
lines.append(f'/proxy/vod/movie/{movie.uuid}/{profile_param}')
|
||||
|
||||
# Add series
|
||||
series_list = Series.objects.filter(is_active=True)
|
||||
if m3u_profile:
|
||||
series_list = series_list.filter(m3u_account=m3u_profile.m3u_account)
|
||||
|
||||
for series in series_list:
|
||||
for episode in series.episodes.all():
|
||||
profile_param = f"?profile={m3u_profile.id}" if m3u_profile else ""
|
||||
episode_title = f"{series.title} - S{episode.season_number:02d}E{episode.episode_number:02d}"
|
||||
lines.append(f'#EXTINF:-1 tvg-id="{series.tmdb_id}" group-title="Series",{episode_title}')
|
||||
lines.append(f'/proxy/vod/episode/{episode.uuid}/{profile_param}')
|
||||
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
@method_decorator(csrf_exempt, name='dispatch')
|
||||
class VODPositionView(View):
|
||||
"""Handle VOD position updates"""
|
||||
|
||||
def post(self, request, content_id):
|
||||
"""Update playback position for VOD content"""
|
||||
try:
|
||||
import json
|
||||
data = json.loads(request.body)
|
||||
client_id = data.get('client_id')
|
||||
position = data.get('position', 0)
|
||||
|
||||
# Find the content object
|
||||
content_obj = None
|
||||
try:
|
||||
content_obj = Movie.objects.get(uuid=content_id)
|
||||
except Movie.DoesNotExist:
|
||||
try:
|
||||
content_obj = Episode.objects.get(uuid=content_id)
|
||||
except Episode.DoesNotExist:
|
||||
return JsonResponse({'error': 'Content not found'}, status=404)
|
||||
|
||||
# Here you could store the position in a model or cache
|
||||
# For now, just return success
|
||||
logger.info(f"Position update for {content_obj.__class__.__name__} {content_id}: {position}s")
|
||||
|
||||
return JsonResponse({
|
||||
'success': True,
|
||||
'content_id': str(content_id),
|
||||
'position': position
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating VOD position: {e}")
|
||||
return JsonResponse({'error': str(e)}, status=500)
|
||||
0
apps/vod/__init__.py
Normal file
0
apps/vod/__init__.py
Normal file
67
apps/vod/admin.py
Normal file
67
apps/vod/admin.py
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
from django.contrib import admin
|
||||
from .models import (
|
||||
Series, VODCategory, Movie, Episode,
|
||||
M3USeriesRelation, M3UMovieRelation, M3UEpisodeRelation
|
||||
)
|
||||
|
||||
|
||||
@admin.register(VODCategory)
|
||||
class VODCategoryAdmin(admin.ModelAdmin):
|
||||
list_display = ['name', 'category_type', 'created_at']
|
||||
list_filter = ['category_type', 'created_at']
|
||||
search_fields = ['name']
|
||||
|
||||
|
||||
@admin.register(Series)
|
||||
class SeriesAdmin(admin.ModelAdmin):
|
||||
list_display = ['name', 'year', 'genre', 'created_at']
|
||||
list_filter = ['year', 'created_at']
|
||||
search_fields = ['name', 'description', 'tmdb_id', 'imdb_id']
|
||||
readonly_fields = ['uuid', 'created_at', 'updated_at']
|
||||
|
||||
|
||||
@admin.register(Movie)
|
||||
class MovieAdmin(admin.ModelAdmin):
|
||||
list_display = ['name', 'year', 'genre', 'duration_secs', 'created_at']
|
||||
list_filter = ['year', 'created_at']
|
||||
search_fields = ['name', 'description', 'tmdb_id', 'imdb_id']
|
||||
readonly_fields = ['uuid', 'created_at', 'updated_at']
|
||||
|
||||
def get_queryset(self, request):
|
||||
return super().get_queryset(request).select_related('logo')
|
||||
|
||||
|
||||
@admin.register(Episode)
|
||||
class EpisodeAdmin(admin.ModelAdmin):
|
||||
list_display = ['name', 'series', 'season_number', 'episode_number', 'duration_secs', 'created_at']
|
||||
list_filter = ['series', 'season_number', 'created_at']
|
||||
search_fields = ['name', 'description', 'series__name']
|
||||
readonly_fields = ['uuid', 'created_at', 'updated_at']
|
||||
|
||||
def get_queryset(self, request):
|
||||
return super().get_queryset(request).select_related('series')
|
||||
|
||||
|
||||
@admin.register(M3UMovieRelation)
|
||||
class M3UMovieRelationAdmin(admin.ModelAdmin):
|
||||
list_display = ['movie', 'm3u_account', 'category', 'stream_id', 'created_at']
|
||||
list_filter = ['m3u_account', 'category', 'created_at']
|
||||
search_fields = ['movie__name', 'm3u_account__name', 'stream_id']
|
||||
readonly_fields = ['created_at', 'updated_at']
|
||||
|
||||
|
||||
@admin.register(M3USeriesRelation)
|
||||
class M3USeriesRelationAdmin(admin.ModelAdmin):
|
||||
list_display = ['series', 'm3u_account', 'category', 'external_series_id', 'created_at']
|
||||
list_filter = ['m3u_account', 'category', 'created_at']
|
||||
search_fields = ['series__name', 'm3u_account__name', 'external_series_id']
|
||||
readonly_fields = ['created_at', 'updated_at']
|
||||
|
||||
|
||||
@admin.register(M3UEpisodeRelation)
|
||||
class M3UEpisodeRelationAdmin(admin.ModelAdmin):
|
||||
list_display = ['episode', 'm3u_account', 'stream_id', 'created_at']
|
||||
list_filter = ['m3u_account', 'created_at']
|
||||
search_fields = ['episode__name', 'episode__series__name', 'm3u_account__name', 'stream_id']
|
||||
readonly_fields = ['created_at', 'updated_at']
|
||||
|
||||
18
apps/vod/api_urls.py
Normal file
18
apps/vod/api_urls.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
from django.urls import path, include
|
||||
from rest_framework.routers import DefaultRouter
|
||||
from .api_views import (
|
||||
MovieViewSet,
|
||||
EpisodeViewSet,
|
||||
SeriesViewSet,
|
||||
VODCategoryViewSet,
|
||||
)
|
||||
|
||||
app_name = 'vod'
|
||||
|
||||
router = DefaultRouter()
|
||||
router.register(r'movies', MovieViewSet, basename='movie')
|
||||
router.register(r'episodes', EpisodeViewSet, basename='episode')
|
||||
router.register(r'series', SeriesViewSet, basename='series')
|
||||
router.register(r'categories', VODCategoryViewSet, basename='vodcategory')
|
||||
|
||||
urlpatterns = router.urls
|
||||
471
apps/vod/api_views.py
Normal file
471
apps/vod/api_views.py
Normal file
|
|
@ -0,0 +1,471 @@
|
|||
from rest_framework import viewsets, status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.filters import SearchFilter, OrderingFilter
|
||||
from rest_framework.pagination import PageNumberPagination
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from django.shortcuts import get_object_or_404
|
||||
import django_filters
|
||||
import logging
|
||||
from apps.accounts.permissions import (
|
||||
Authenticated,
|
||||
permission_classes_by_action,
|
||||
)
|
||||
from .models import (
|
||||
Series, VODCategory, Movie, Episode,
|
||||
M3USeriesRelation, M3UMovieRelation, M3UEpisodeRelation
|
||||
)
|
||||
from .serializers import (
|
||||
MovieSerializer,
|
||||
EpisodeSerializer,
|
||||
SeriesSerializer,
|
||||
VODCategorySerializer,
|
||||
M3UMovieRelationSerializer,
|
||||
M3USeriesRelationSerializer,
|
||||
M3UEpisodeRelationSerializer
|
||||
)
|
||||
from .tasks import refresh_series_episodes, refresh_movie_advanced_data
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class VODPagination(PageNumberPagination):
|
||||
page_size = 20 # Default page size to match frontend default
|
||||
page_size_query_param = "page_size" # Allow clients to specify page size
|
||||
max_page_size = 100 # Prevent excessive page sizes for VOD content
|
||||
|
||||
|
||||
class MovieFilter(django_filters.FilterSet):
|
||||
name = django_filters.CharFilter(lookup_expr="icontains")
|
||||
m3u_account = django_filters.NumberFilter(field_name="m3u_relations__m3u_account__id")
|
||||
category = django_filters.CharFilter(method='filter_category')
|
||||
year = django_filters.NumberFilter()
|
||||
year_gte = django_filters.NumberFilter(field_name="year", lookup_expr="gte")
|
||||
year_lte = django_filters.NumberFilter(field_name="year", lookup_expr="lte")
|
||||
|
||||
class Meta:
|
||||
model = Movie
|
||||
fields = ['name', 'm3u_account', 'category', 'year']
|
||||
|
||||
def filter_category(self, queryset, name, value):
|
||||
"""Custom category filter that handles 'name|type' format"""
|
||||
if not value:
|
||||
return queryset
|
||||
|
||||
# Handle the format 'category_name|category_type'
|
||||
if '|' in value:
|
||||
category_name, category_type = value.split('|', 1)
|
||||
return queryset.filter(
|
||||
m3u_relations__category__name=category_name,
|
||||
m3u_relations__category__category_type=category_type
|
||||
)
|
||||
else:
|
||||
# Fallback: treat as category name only
|
||||
return queryset.filter(m3u_relations__category__name=value)
|
||||
|
||||
|
||||
class MovieViewSet(viewsets.ReadOnlyModelViewSet):
|
||||
"""ViewSet for Movie content"""
|
||||
queryset = Movie.objects.all()
|
||||
serializer_class = MovieSerializer
|
||||
pagination_class = VODPagination
|
||||
|
||||
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||
filterset_class = MovieFilter
|
||||
search_fields = ['name', 'description', 'genre']
|
||||
ordering_fields = ['name', 'year', 'created_at']
|
||||
ordering = ['name']
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
def get_queryset(self):
|
||||
# Only return movies that have active M3U relations
|
||||
return Movie.objects.filter(
|
||||
m3u_relations__m3u_account__is_active=True
|
||||
).distinct().select_related('logo').prefetch_related('m3u_relations__m3u_account')
|
||||
|
||||
@action(detail=True, methods=['get'], url_path='providers')
|
||||
def get_providers(self, request, pk=None):
|
||||
"""Get all providers (M3U accounts) that have this movie"""
|
||||
movie = self.get_object()
|
||||
relations = M3UMovieRelation.objects.filter(
|
||||
movie=movie,
|
||||
m3u_account__is_active=True
|
||||
).select_related('m3u_account', 'category')
|
||||
|
||||
serializer = M3UMovieRelationSerializer(relations, many=True)
|
||||
return Response(serializer.data)
|
||||
|
||||
|
||||
@action(detail=True, methods=['get'], url_path='provider-info')
|
||||
def provider_info(self, request, pk=None):
|
||||
"""Get detailed movie information from the original provider, throttled to 24h."""
|
||||
movie = self.get_object()
|
||||
|
||||
# Get the highest priority active relation
|
||||
relation = M3UMovieRelation.objects.filter(
|
||||
movie=movie,
|
||||
m3u_account__is_active=True
|
||||
).select_related('m3u_account').order_by('-m3u_account__priority', 'id').first()
|
||||
|
||||
if not relation:
|
||||
return Response(
|
||||
{'error': 'No active M3U account associated with this movie'},
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
force_refresh = request.query_params.get('force_refresh', 'false').lower() == 'true'
|
||||
now = timezone.now()
|
||||
needs_refresh = (
|
||||
force_refresh or
|
||||
not relation.last_advanced_refresh or
|
||||
(now - relation.last_advanced_refresh).total_seconds() > 86400
|
||||
)
|
||||
|
||||
if needs_refresh:
|
||||
# Trigger advanced data refresh
|
||||
logger.debug(f"Refreshing advanced data for movie {movie.id} (relation ID: {relation.id})")
|
||||
refresh_movie_advanced_data(relation.id, force_refresh=force_refresh)
|
||||
|
||||
# Refresh objects from database after task completion
|
||||
movie.refresh_from_db()
|
||||
relation.refresh_from_db()
|
||||
|
||||
# Use refreshed data from database
|
||||
custom_props = relation.custom_properties or {}
|
||||
info = custom_props.get('detailed_info', {})
|
||||
movie_data = custom_props.get('movie_data', {})
|
||||
|
||||
# Build response with available data
|
||||
response_data = {
|
||||
'id': movie.id,
|
||||
'uuid': movie.uuid,
|
||||
'stream_id': relation.stream_id,
|
||||
'name': info.get('name', movie.name),
|
||||
'o_name': info.get('o_name', ''),
|
||||
'description': info.get('description', info.get('plot', movie.description)),
|
||||
'plot': info.get('plot', info.get('description', movie.description)),
|
||||
'year': movie.year or info.get('year'),
|
||||
'release_date': (movie.custom_properties or {}).get('release_date') or info.get('release_date') or info.get('releasedate', ''),
|
||||
'genre': movie.genre or info.get('genre', ''),
|
||||
'director': (movie.custom_properties or {}).get('director') or info.get('director', ''),
|
||||
'actors': (movie.custom_properties or {}).get('actors') or info.get('actors', ''),
|
||||
'country': (movie.custom_properties or {}).get('country') or info.get('country', ''),
|
||||
'rating': movie.rating or info.get('rating', movie.rating or 0),
|
||||
'tmdb_id': movie.tmdb_id or info.get('tmdb_id', ''),
|
||||
'imdb_id': movie.imdb_id or info.get('imdb_id', ''),
|
||||
'youtube_trailer': (movie.custom_properties or {}).get('youtube_trailer') or info.get('youtube_trailer') or info.get('trailer', ''),
|
||||
'duration_secs': movie.duration_secs or info.get('duration_secs'),
|
||||
'age': info.get('age', ''),
|
||||
'backdrop_path': (movie.custom_properties or {}).get('backdrop_path') or info.get('backdrop_path', []),
|
||||
'cover': info.get('cover_big', ''),
|
||||
'cover_big': info.get('cover_big', ''),
|
||||
'movie_image': movie.logo.url if movie.logo else info.get('movie_image', ''),
|
||||
'bitrate': info.get('bitrate', 0),
|
||||
'video': info.get('video', {}),
|
||||
'audio': info.get('audio', {}),
|
||||
'container_extension': movie_data.get('container_extension', 'mp4'),
|
||||
'direct_source': movie_data.get('direct_source', ''),
|
||||
'category_id': movie_data.get('category_id', ''),
|
||||
'added': movie_data.get('added', ''),
|
||||
'm3u_account': {
|
||||
'id': relation.m3u_account.id,
|
||||
'name': relation.m3u_account.name,
|
||||
'account_type': relation.m3u_account.account_type
|
||||
}
|
||||
}
|
||||
return Response(response_data)
|
||||
|
||||
class EpisodeFilter(django_filters.FilterSet):
|
||||
name = django_filters.CharFilter(lookup_expr="icontains")
|
||||
series = django_filters.NumberFilter(field_name="series__id")
|
||||
m3u_account = django_filters.NumberFilter(field_name="m3u_account__id")
|
||||
season_number = django_filters.NumberFilter()
|
||||
episode_number = django_filters.NumberFilter()
|
||||
|
||||
class Meta:
|
||||
model = Episode
|
||||
fields = ['name', 'series', 'm3u_account', 'season_number', 'episode_number']
|
||||
|
||||
|
||||
class SeriesFilter(django_filters.FilterSet):
|
||||
name = django_filters.CharFilter(lookup_expr="icontains")
|
||||
m3u_account = django_filters.NumberFilter(field_name="m3u_relations__m3u_account__id")
|
||||
category = django_filters.CharFilter(method='filter_category')
|
||||
year = django_filters.NumberFilter()
|
||||
year_gte = django_filters.NumberFilter(field_name="year", lookup_expr="gte")
|
||||
year_lte = django_filters.NumberFilter(field_name="year", lookup_expr="lte")
|
||||
|
||||
class Meta:
|
||||
model = Series
|
||||
fields = ['name', 'm3u_account', 'category', 'year']
|
||||
|
||||
def filter_category(self, queryset, name, value):
|
||||
"""Custom category filter that handles 'name|type' format"""
|
||||
if not value:
|
||||
return queryset
|
||||
|
||||
# Handle the format 'category_name|category_type'
|
||||
if '|' in value:
|
||||
category_name, category_type = value.split('|', 1)
|
||||
return queryset.filter(
|
||||
m3u_relations__category__name=category_name,
|
||||
m3u_relations__category__category_type=category_type
|
||||
)
|
||||
else:
|
||||
# Fallback: treat as category name only
|
||||
return queryset.filter(m3u_relations__category__name=value)
|
||||
|
||||
|
||||
class EpisodeViewSet(viewsets.ReadOnlyModelViewSet):
|
||||
"""ViewSet for Episode content"""
|
||||
queryset = Episode.objects.all()
|
||||
serializer_class = EpisodeSerializer
|
||||
pagination_class = VODPagination
|
||||
|
||||
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||
filterset_class = EpisodeFilter
|
||||
search_fields = ['name', 'description']
|
||||
ordering_fields = ['name', 'season_number', 'episode_number', 'created_at']
|
||||
ordering = ['series__name', 'season_number', 'episode_number']
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
def get_queryset(self):
|
||||
return Episode.objects.select_related(
|
||||
'series', 'm3u_account'
|
||||
).filter(m3u_account__is_active=True)
|
||||
|
||||
|
||||
class SeriesViewSet(viewsets.ReadOnlyModelViewSet):
|
||||
"""ViewSet for Series management"""
|
||||
queryset = Series.objects.all()
|
||||
serializer_class = SeriesSerializer
|
||||
pagination_class = VODPagination
|
||||
|
||||
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||
filterset_class = SeriesFilter
|
||||
search_fields = ['name', 'description', 'genre']
|
||||
ordering_fields = ['name', 'year', 'created_at']
|
||||
ordering = ['name']
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
def get_queryset(self):
|
||||
# Only return series that have active M3U relations
|
||||
return Series.objects.filter(
|
||||
m3u_relations__m3u_account__is_active=True
|
||||
).distinct().select_related('logo').prefetch_related('episodes', 'm3u_relations__m3u_account')
|
||||
|
||||
@action(detail=True, methods=['get'], url_path='providers')
|
||||
def get_providers(self, request, pk=None):
|
||||
"""Get all providers (M3U accounts) that have this series"""
|
||||
series = self.get_object()
|
||||
relations = M3USeriesRelation.objects.filter(
|
||||
series=series,
|
||||
m3u_account__is_active=True
|
||||
).select_related('m3u_account', 'category')
|
||||
|
||||
serializer = M3USeriesRelationSerializer(relations, many=True)
|
||||
return Response(serializer.data)
|
||||
|
||||
@action(detail=True, methods=['get'], url_path='episodes')
|
||||
def get_episodes(self, request, pk=None):
|
||||
"""Get episodes for this series with provider information"""
|
||||
series = self.get_object()
|
||||
episodes = Episode.objects.filter(series=series).prefetch_related(
|
||||
'm3u_relations__m3u_account'
|
||||
).order_by('season_number', 'episode_number')
|
||||
|
||||
episodes_data = []
|
||||
for episode in episodes:
|
||||
episode_serializer = EpisodeSerializer(episode)
|
||||
episode_data = episode_serializer.data
|
||||
|
||||
# Add provider information
|
||||
relations = M3UEpisodeRelation.objects.filter(
|
||||
episode=episode,
|
||||
m3u_account__is_active=True
|
||||
).select_related('m3u_account')
|
||||
|
||||
episode_data['providers'] = M3UEpisodeRelationSerializer(relations, many=True).data
|
||||
episodes_data.append(episode_data)
|
||||
|
||||
return Response(episodes_data)
|
||||
|
||||
@action(detail=True, methods=['get'], url_path='provider-info')
|
||||
def series_info(self, request, pk=None):
|
||||
"""Get detailed series information, refreshing from provider if needed"""
|
||||
logger.debug(f"SeriesViewSet.series_info called for series ID: {pk}")
|
||||
series = self.get_object()
|
||||
logger.debug(f"Retrieved series: {series.name} (ID: {series.id})")
|
||||
|
||||
# Get the highest priority active relation
|
||||
relation = M3USeriesRelation.objects.filter(
|
||||
series=series,
|
||||
m3u_account__is_active=True
|
||||
).select_related('m3u_account').order_by('-m3u_account__priority', 'id').first()
|
||||
|
||||
if not relation:
|
||||
return Response(
|
||||
{'error': 'No active M3U account associated with this series'},
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
try:
|
||||
# Check if we should refresh data (optional force refresh parameter)
|
||||
force_refresh = request.query_params.get('force_refresh', 'false').lower() == 'true'
|
||||
refresh_interval_hours = int(request.query_params.get("refresh_interval", 24)) # Default to 24 hours
|
||||
|
||||
now = timezone.now()
|
||||
last_refreshed = relation.last_episode_refresh
|
||||
|
||||
# Check if detailed data has been fetched
|
||||
custom_props = relation.custom_properties or {}
|
||||
episodes_fetched = custom_props.get('episodes_fetched', False)
|
||||
detailed_fetched = custom_props.get('detailed_fetched', False)
|
||||
|
||||
# Force refresh if episodes have never been fetched or if forced
|
||||
if not episodes_fetched or not detailed_fetched or force_refresh:
|
||||
force_refresh = True
|
||||
logger.debug(f"Series {series.id} needs detailed/episode refresh, forcing refresh")
|
||||
elif last_refreshed is None or (now - last_refreshed) > timedelta(hours=refresh_interval_hours):
|
||||
force_refresh = True
|
||||
logger.debug(f"Series {series.id} refresh interval exceeded or never refreshed, forcing refresh")
|
||||
|
||||
if force_refresh:
|
||||
logger.debug(f"Refreshing series {series.id} data from provider")
|
||||
# Use existing refresh logic with external_series_id
|
||||
from .tasks import refresh_series_episodes
|
||||
account = relation.m3u_account
|
||||
if account and account.is_active:
|
||||
refresh_series_episodes(account, series, relation.external_series_id)
|
||||
series.refresh_from_db() # Reload from database after refresh
|
||||
relation.refresh_from_db() # Reload relation too
|
||||
|
||||
# Return the database data (which should now be fresh)
|
||||
custom_props = relation.custom_properties or {}
|
||||
response_data = {
|
||||
'id': series.id,
|
||||
'series_id': relation.external_series_id,
|
||||
'name': series.name,
|
||||
'description': series.description,
|
||||
'year': series.year,
|
||||
'genre': series.genre,
|
||||
'rating': series.rating,
|
||||
'tmdb_id': series.tmdb_id,
|
||||
'imdb_id': series.imdb_id,
|
||||
'category_id': relation.category.id if relation.category else None,
|
||||
'category_name': relation.category.name if relation.category else None,
|
||||
'cover': {
|
||||
'id': series.logo.id,
|
||||
'url': series.logo.url,
|
||||
'name': series.logo.name,
|
||||
} if series.logo else None,
|
||||
'last_refreshed': series.updated_at,
|
||||
'custom_properties': series.custom_properties,
|
||||
'm3u_account': {
|
||||
'id': relation.m3u_account.id,
|
||||
'name': relation.m3u_account.name,
|
||||
'account_type': relation.m3u_account.account_type
|
||||
},
|
||||
'episodes_fetched': custom_props.get('episodes_fetched', False),
|
||||
'detailed_fetched': custom_props.get('detailed_fetched', False)
|
||||
}
|
||||
|
||||
# Always include episodes for series info if they've been fetched
|
||||
include_episodes = request.query_params.get('include_episodes', 'true').lower() == 'true'
|
||||
if include_episodes and custom_props.get('episodes_fetched', False):
|
||||
logger.debug(f"Including episodes for series {series.id}")
|
||||
episodes_by_season = {}
|
||||
for episode in series.episodes.all().order_by('season_number', 'episode_number'):
|
||||
season_key = str(episode.season_number or 0)
|
||||
if season_key not in episodes_by_season:
|
||||
episodes_by_season[season_key] = []
|
||||
|
||||
# Get episode relation for additional data
|
||||
episode_relation = M3UEpisodeRelation.objects.filter(
|
||||
episode=episode,
|
||||
m3u_account=relation.m3u_account
|
||||
).first()
|
||||
|
||||
episode_data = {
|
||||
'id': episode.id,
|
||||
'uuid': episode.uuid,
|
||||
'name': episode.name,
|
||||
'title': episode.name,
|
||||
'episode_number': episode.episode_number,
|
||||
'season_number': episode.season_number,
|
||||
'description': episode.description,
|
||||
'air_date': episode.air_date,
|
||||
'plot': episode.description,
|
||||
'duration_secs': episode.duration_secs,
|
||||
'rating': episode.rating,
|
||||
'tmdb_id': episode.tmdb_id,
|
||||
'imdb_id': episode.imdb_id,
|
||||
'movie_image': episode.custom_properties.get('movie_image', '') if episode.custom_properties else '',
|
||||
'container_extension': episode_relation.container_extension if episode_relation else 'mp4',
|
||||
'type': 'episode',
|
||||
'series': {
|
||||
'id': series.id,
|
||||
'name': series.name
|
||||
}
|
||||
}
|
||||
episodes_by_season[season_key].append(episode_data)
|
||||
|
||||
response_data['episodes'] = episodes_by_season
|
||||
logger.debug(f"Added {len(episodes_by_season)} seasons of episodes to response")
|
||||
elif include_episodes:
|
||||
# Episodes not yet fetched, include empty episodes list
|
||||
response_data['episodes'] = {}
|
||||
|
||||
logger.debug(f"Returning series info response for series {series.id}")
|
||||
return Response(response_data)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching series info for series {pk}: {str(e)}")
|
||||
return Response(
|
||||
{'error': f'Failed to fetch series information: {str(e)}'},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR
|
||||
)
|
||||
|
||||
|
||||
class VODCategoryFilter(django_filters.FilterSet):
|
||||
name = django_filters.CharFilter(lookup_expr="icontains")
|
||||
category_type = django_filters.ChoiceFilter(choices=VODCategory.CATEGORY_TYPE_CHOICES)
|
||||
m3u_account = django_filters.NumberFilter(field_name="m3u_account__id")
|
||||
|
||||
class Meta:
|
||||
model = VODCategory
|
||||
fields = ['name', 'category_type', 'm3u_account']
|
||||
|
||||
|
||||
class VODCategoryViewSet(viewsets.ReadOnlyModelViewSet):
|
||||
"""ViewSet for VOD Categories"""
|
||||
queryset = VODCategory.objects.all()
|
||||
serializer_class = VODCategorySerializer
|
||||
|
||||
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||
filterset_class = VODCategoryFilter
|
||||
search_fields = ['name']
|
||||
ordering = ['name']
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
12
apps/vod/apps.py
Normal file
12
apps/vod/apps.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class VODConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'apps.vod'
|
||||
verbose_name = 'Video on Demand'
|
||||
|
||||
def ready(self):
|
||||
"""Initialize VOD app when Django is ready"""
|
||||
# Import models to ensure they're registered
|
||||
from . import models
|
||||
201
apps/vod/migrations/0001_initial.py
Normal file
201
apps/vod/migrations/0001_initial.py
Normal file
|
|
@ -0,0 +1,201 @@
|
|||
# Generated by Django 5.2.4 on 2025-08-28 18:16
|
||||
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0024_alter_channelgroupm3uaccount_channel_group'),
|
||||
('m3u', '0016_m3uaccount_priority'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Movie',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, unique=True)),
|
||||
('name', models.CharField(max_length=255)),
|
||||
('description', models.TextField(blank=True, null=True)),
|
||||
('year', models.IntegerField(blank=True, null=True)),
|
||||
('rating', models.CharField(blank=True, max_length=10, null=True)),
|
||||
('genre', models.CharField(blank=True, max_length=255, null=True)),
|
||||
('duration_secs', models.IntegerField(blank=True, help_text='Duration in seconds', null=True)),
|
||||
('tmdb_id', models.CharField(blank=True, help_text='TMDB ID for metadata', max_length=50, null=True, unique=True)),
|
||||
('imdb_id', models.CharField(blank=True, help_text='IMDB ID for metadata', max_length=50, null=True, unique=True)),
|
||||
('custom_properties', models.JSONField(blank=True, help_text='Additional metadata and properties for the movie', null=True)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
('logo', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='movie', to='dispatcharr_channels.logo')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Movie',
|
||||
'verbose_name_plural': 'Movies',
|
||||
'ordering': ['name'],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Series',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, unique=True)),
|
||||
('name', models.CharField(max_length=255)),
|
||||
('description', models.TextField(blank=True, null=True)),
|
||||
('year', models.IntegerField(blank=True, null=True)),
|
||||
('rating', models.CharField(blank=True, max_length=10, null=True)),
|
||||
('genre', models.CharField(blank=True, max_length=255, null=True)),
|
||||
('tmdb_id', models.CharField(blank=True, help_text='TMDB ID for metadata', max_length=50, null=True, unique=True)),
|
||||
('imdb_id', models.CharField(blank=True, help_text='IMDB ID for metadata', max_length=50, null=True, unique=True)),
|
||||
('custom_properties', models.JSONField(blank=True, help_text='Additional metadata and properties for the series', null=True)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
('logo', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='series', to='dispatcharr_channels.logo')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Series',
|
||||
'verbose_name_plural': 'Series',
|
||||
'ordering': ['name'],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Episode',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, unique=True)),
|
||||
('name', models.CharField(max_length=255)),
|
||||
('description', models.TextField(blank=True, null=True)),
|
||||
('air_date', models.DateField(blank=True, null=True)),
|
||||
('rating', models.CharField(blank=True, max_length=10, null=True)),
|
||||
('duration_secs', models.IntegerField(blank=True, help_text='Duration in seconds', null=True)),
|
||||
('season_number', models.IntegerField(blank=True, null=True)),
|
||||
('episode_number', models.IntegerField(blank=True, null=True)),
|
||||
('tmdb_id', models.CharField(blank=True, db_index=True, help_text='TMDB ID for metadata', max_length=50, null=True)),
|
||||
('imdb_id', models.CharField(blank=True, db_index=True, help_text='IMDB ID for metadata', max_length=50, null=True)),
|
||||
('custom_properties', models.JSONField(blank=True, help_text='Custom properties for this episode', null=True)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
('series', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='episodes', to='vod.series')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Episode',
|
||||
'verbose_name_plural': 'Episodes',
|
||||
'ordering': ['series__name', 'season_number', 'episode_number'],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='VODCategory',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(max_length=255)),
|
||||
('category_type', models.CharField(choices=[('movie', 'Movie'), ('series', 'Series')], default='movie', help_text='Type of content this category contains', max_length=10)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'VOD Category',
|
||||
'verbose_name_plural': 'VOD Categories',
|
||||
'ordering': ['name'],
|
||||
'unique_together': {('name', 'category_type')},
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='M3UVODCategoryRelation',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('enabled', models.BooleanField(default=False, help_text='Set to false to deactivate this category for the M3U account')),
|
||||
('custom_properties', models.JSONField(blank=True, help_text='Provider-specific data like quality, language, etc.', null=True)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
('m3u_account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='category_relations', to='m3u.m3uaccount')),
|
||||
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='m3u_relations', to='vod.vodcategory')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'M3U VOD Category Relation',
|
||||
'verbose_name_plural': 'M3U VOD Category Relations',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='M3USeriesRelation',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('external_series_id', models.CharField(help_text='External series ID from M3U provider', max_length=255)),
|
||||
('custom_properties', models.JSONField(blank=True, help_text='Provider-specific data', null=True)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
('last_episode_refresh', models.DateTimeField(blank=True, help_text='Last time episodes were refreshed', null=True)),
|
||||
('m3u_account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='series_relations', to='m3u.m3uaccount')),
|
||||
('series', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='m3u_relations', to='vod.series')),
|
||||
('category', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='vod.vodcategory')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'M3U Series Relation',
|
||||
'verbose_name_plural': 'M3U Series Relations',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='M3UMovieRelation',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('stream_id', models.CharField(help_text='External stream ID from M3U provider', max_length=255)),
|
||||
('container_extension', models.CharField(blank=True, max_length=10, null=True)),
|
||||
('custom_properties', models.JSONField(blank=True, help_text='Provider-specific data like quality, language, etc.', null=True)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
('last_advanced_refresh', models.DateTimeField(blank=True, help_text='Last time advanced data was fetched from provider', null=True)),
|
||||
('m3u_account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='movie_relations', to='m3u.m3uaccount')),
|
||||
('movie', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='m3u_relations', to='vod.movie')),
|
||||
('category', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='vod.vodcategory')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'M3U Movie Relation',
|
||||
'verbose_name_plural': 'M3U Movie Relations',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='M3UEpisodeRelation',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('stream_id', models.CharField(help_text='External stream ID from M3U provider', max_length=255)),
|
||||
('container_extension', models.CharField(blank=True, max_length=10, null=True)),
|
||||
('custom_properties', models.JSONField(blank=True, help_text='Provider-specific data like quality, language, etc.', null=True)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
('episode', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='m3u_relations', to='vod.episode')),
|
||||
('m3u_account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='episode_relations', to='m3u.m3uaccount')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'M3U Episode Relation',
|
||||
'verbose_name_plural': 'M3U Episode Relations',
|
||||
'unique_together': {('m3u_account', 'stream_id')},
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='movie',
|
||||
constraint=models.UniqueConstraint(condition=models.Q(('tmdb_id__isnull', True), ('imdb_id__isnull', True)), fields=('name', 'year'), name='unique_movie_name_year_no_external_id'),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='series',
|
||||
constraint=models.UniqueConstraint(condition=models.Q(('tmdb_id__isnull', True), ('imdb_id__isnull', True)), fields=('name', 'year'), name='unique_series_name_year_no_external_id'),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='episode',
|
||||
unique_together={('series', 'season_number', 'episode_number')},
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='m3uvodcategoryrelation',
|
||||
unique_together={('m3u_account', 'category')},
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='m3useriesrelation',
|
||||
unique_together={('m3u_account', 'external_series_id')},
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='m3umovierelation',
|
||||
unique_together={('m3u_account', 'stream_id')},
|
||||
),
|
||||
]
|
||||
0
apps/vod/migrations/__init__.py
Normal file
0
apps/vod/migrations/__init__.py
Normal file
303
apps/vod/models.py
Normal file
303
apps/vod/models.py
Normal file
|
|
@ -0,0 +1,303 @@
|
|||
from django.db import models
|
||||
from django.db.models import Q
|
||||
from django.utils import timezone
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from apps.m3u.models import M3UAccount
|
||||
from apps.channels.models import Logo
|
||||
import uuid
|
||||
|
||||
|
||||
class VODCategory(models.Model):
|
||||
"""Categories for organizing VODs (e.g., Action, Comedy, Drama)"""
|
||||
|
||||
CATEGORY_TYPE_CHOICES = [
|
||||
('movie', 'Movie'),
|
||||
('series', 'Series'),
|
||||
]
|
||||
|
||||
name = models.CharField(max_length=255)
|
||||
category_type = models.CharField(
|
||||
max_length=10,
|
||||
choices=CATEGORY_TYPE_CHOICES,
|
||||
default='movie',
|
||||
help_text="Type of content this category contains"
|
||||
)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = 'VOD Category'
|
||||
verbose_name_plural = 'VOD Categories'
|
||||
ordering = ['name']
|
||||
unique_together = [('name', 'category_type')]
|
||||
|
||||
@classmethod
|
||||
def bulk_create_and_fetch(cls, objects, ignore_conflicts=False):
|
||||
# Perform the bulk create operation
|
||||
cls.objects.bulk_create(objects, ignore_conflicts=ignore_conflicts)
|
||||
|
||||
# Use the unique fields to fetch the created objects
|
||||
# Since we have unique_together on ('name', 'category_type'), we need both fields
|
||||
filter_conditions = []
|
||||
for obj in objects:
|
||||
filter_conditions.append(
|
||||
Q(name=obj.name, category_type=obj.category_type)
|
||||
)
|
||||
|
||||
if filter_conditions:
|
||||
# Combine all conditions with OR
|
||||
combined_condition = filter_conditions[0]
|
||||
for condition in filter_conditions[1:]:
|
||||
combined_condition |= condition
|
||||
|
||||
created_objects = cls.objects.filter(combined_condition)
|
||||
else:
|
||||
created_objects = cls.objects.none()
|
||||
|
||||
return created_objects
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.name} ({self.get_category_type_display()})"
|
||||
|
||||
|
||||
class Series(models.Model):
|
||||
"""Series information for TV shows"""
|
||||
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
|
||||
name = models.CharField(max_length=255)
|
||||
description = models.TextField(blank=True, null=True)
|
||||
year = models.IntegerField(blank=True, null=True)
|
||||
rating = models.CharField(max_length=10, blank=True, null=True)
|
||||
genre = models.CharField(max_length=255, blank=True, null=True)
|
||||
logo = models.ForeignKey(Logo, on_delete=models.SET_NULL, null=True, blank=True, related_name='series')
|
||||
|
||||
# Metadata IDs for deduplication - these should be globally unique when present
|
||||
tmdb_id = models.CharField(max_length=50, blank=True, null=True, unique=True, help_text="TMDB ID for metadata")
|
||||
imdb_id = models.CharField(max_length=50, blank=True, null=True, unique=True, help_text="IMDB ID for metadata")
|
||||
|
||||
# Additional metadata and properties
|
||||
custom_properties = models.JSONField(blank=True, null=True, help_text='Additional metadata and properties for the series')
|
||||
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = 'Series'
|
||||
verbose_name_plural = 'Series'
|
||||
ordering = ['name']
|
||||
# Only enforce name+year uniqueness when no external IDs are present
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=['name', 'year'],
|
||||
condition=models.Q(tmdb_id__isnull=True) & models.Q(imdb_id__isnull=True),
|
||||
name='unique_series_name_year_no_external_id'
|
||||
),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
year_str = f" ({self.year})" if self.year else ""
|
||||
return f"{self.name}{year_str}"
|
||||
|
||||
|
||||
class Movie(models.Model):
|
||||
"""Movie content"""
|
||||
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
|
||||
name = models.CharField(max_length=255)
|
||||
description = models.TextField(blank=True, null=True)
|
||||
year = models.IntegerField(blank=True, null=True)
|
||||
rating = models.CharField(max_length=10, blank=True, null=True)
|
||||
genre = models.CharField(max_length=255, blank=True, null=True)
|
||||
duration_secs = models.IntegerField(blank=True, null=True, help_text="Duration in seconds")
|
||||
logo = models.ForeignKey(Logo, on_delete=models.SET_NULL, null=True, blank=True, related_name='movie')
|
||||
|
||||
# Metadata IDs for deduplication - these should be globally unique when present
|
||||
tmdb_id = models.CharField(max_length=50, blank=True, null=True, unique=True, help_text="TMDB ID for metadata")
|
||||
imdb_id = models.CharField(max_length=50, blank=True, null=True, unique=True, help_text="IMDB ID for metadata")
|
||||
|
||||
# Additional metadata and properties
|
||||
custom_properties = models.JSONField(blank=True, null=True, help_text='Additional metadata and properties for the movie')
|
||||
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = 'Movie'
|
||||
verbose_name_plural = 'Movies'
|
||||
ordering = ['name']
|
||||
# Only enforce name+year uniqueness when no external IDs are present
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=['name', 'year'],
|
||||
condition=models.Q(tmdb_id__isnull=True) & models.Q(imdb_id__isnull=True),
|
||||
name='unique_movie_name_year_no_external_id'
|
||||
),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
year_str = f" ({self.year})" if self.year else ""
|
||||
return f"{self.name}{year_str}"
|
||||
|
||||
|
||||
class Episode(models.Model):
|
||||
"""Episode content for TV series"""
|
||||
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
|
||||
name = models.CharField(max_length=255)
|
||||
description = models.TextField(blank=True, null=True)
|
||||
air_date = models.DateField(blank=True, null=True)
|
||||
rating = models.CharField(max_length=10, blank=True, null=True)
|
||||
duration_secs = models.IntegerField(blank=True, null=True, help_text="Duration in seconds")
|
||||
|
||||
# Episode specific fields
|
||||
series = models.ForeignKey(Series, on_delete=models.CASCADE, related_name='episodes')
|
||||
season_number = models.IntegerField(blank=True, null=True)
|
||||
episode_number = models.IntegerField(blank=True, null=True)
|
||||
|
||||
# Metadata IDs
|
||||
tmdb_id = models.CharField(max_length=50, blank=True, null=True, help_text="TMDB ID for metadata", db_index=True)
|
||||
imdb_id = models.CharField(max_length=50, blank=True, null=True, help_text="IMDB ID for metadata", db_index=True)
|
||||
|
||||
# Custom properties for episode
|
||||
custom_properties = models.JSONField(blank=True, null=True, help_text="Custom properties for this episode")
|
||||
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = 'Episode'
|
||||
verbose_name_plural = 'Episodes'
|
||||
ordering = ['series__name', 'season_number', 'episode_number']
|
||||
unique_together = [
|
||||
('series', 'season_number', 'episode_number'),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
season_ep = f"S{self.season_number or 0:02d}E{self.episode_number or 0:02d}"
|
||||
return f"{self.series.name} - {season_ep} - {self.name}"
|
||||
|
||||
|
||||
# New relation models to link M3U accounts with VOD content
|
||||
|
||||
class M3USeriesRelation(models.Model):
|
||||
"""Links M3U accounts to Series with provider-specific information"""
|
||||
m3u_account = models.ForeignKey(M3UAccount, on_delete=models.CASCADE, related_name='series_relations')
|
||||
series = models.ForeignKey(Series, on_delete=models.CASCADE, related_name='m3u_relations')
|
||||
category = models.ForeignKey(VODCategory, on_delete=models.SET_NULL, null=True, blank=True)
|
||||
|
||||
# Provider-specific fields - renamed to avoid clash with series ForeignKey
|
||||
external_series_id = models.CharField(max_length=255, help_text="External series ID from M3U provider")
|
||||
custom_properties = models.JSONField(blank=True, null=True, help_text="Provider-specific data")
|
||||
|
||||
# Timestamps
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
last_episode_refresh = models.DateTimeField(blank=True, null=True, help_text="Last time episodes were refreshed")
|
||||
|
||||
class Meta:
|
||||
verbose_name = 'M3U Series Relation'
|
||||
verbose_name_plural = 'M3U Series Relations'
|
||||
unique_together = [('m3u_account', 'external_series_id')]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.m3u_account.name} - {self.series.name}"
|
||||
|
||||
|
||||
class M3UMovieRelation(models.Model):
|
||||
"""Links M3U accounts to Movies with provider-specific information"""
|
||||
m3u_account = models.ForeignKey(M3UAccount, on_delete=models.CASCADE, related_name='movie_relations')
|
||||
movie = models.ForeignKey(Movie, on_delete=models.CASCADE, related_name='m3u_relations')
|
||||
category = models.ForeignKey(VODCategory, on_delete=models.SET_NULL, null=True, blank=True)
|
||||
|
||||
# Streaming information (provider-specific)
|
||||
stream_id = models.CharField(max_length=255, help_text="External stream ID from M3U provider")
|
||||
container_extension = models.CharField(max_length=10, blank=True, null=True)
|
||||
|
||||
# Provider-specific data
|
||||
custom_properties = models.JSONField(blank=True, null=True, help_text="Provider-specific data like quality, language, etc.")
|
||||
|
||||
# Timestamps
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
last_advanced_refresh = models.DateTimeField(blank=True, null=True, help_text="Last time advanced data was fetched from provider")
|
||||
|
||||
class Meta:
|
||||
verbose_name = 'M3U Movie Relation'
|
||||
verbose_name_plural = 'M3U Movie Relations'
|
||||
unique_together = [('m3u_account', 'stream_id')]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.m3u_account.name} - {self.movie.name}"
|
||||
|
||||
def get_stream_url(self):
|
||||
"""Get the full stream URL for this movie from this provider"""
|
||||
# Build URL dynamically for XtreamCodes accounts
|
||||
if self.m3u_account.account_type == 'XC':
|
||||
server_url = self.m3u_account.server_url.rstrip('/')
|
||||
username = self.m3u_account.username
|
||||
password = self.m3u_account.password
|
||||
return f"{server_url}/movie/{username}/{password}/{self.stream_id}.{self.container_extension or 'mp4'}"
|
||||
else:
|
||||
# For other account types, we would need another way to build URLs
|
||||
return None
|
||||
|
||||
|
||||
class M3UEpisodeRelation(models.Model):
|
||||
"""Links M3U accounts to Episodes with provider-specific information"""
|
||||
m3u_account = models.ForeignKey(M3UAccount, on_delete=models.CASCADE, related_name='episode_relations')
|
||||
episode = models.ForeignKey(Episode, on_delete=models.CASCADE, related_name='m3u_relations')
|
||||
|
||||
# Streaming information (provider-specific)
|
||||
stream_id = models.CharField(max_length=255, help_text="External stream ID from M3U provider")
|
||||
container_extension = models.CharField(max_length=10, blank=True, null=True)
|
||||
|
||||
# Provider-specific data
|
||||
custom_properties = models.JSONField(blank=True, null=True, help_text="Provider-specific data like quality, language, etc.")
|
||||
|
||||
# Timestamps
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = 'M3U Episode Relation'
|
||||
verbose_name_plural = 'M3U Episode Relations'
|
||||
unique_together = [('m3u_account', 'stream_id')]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.m3u_account.name} - {self.episode}"
|
||||
|
||||
def get_stream_url(self):
|
||||
"""Get the full stream URL for this episode from this provider"""
|
||||
from core.xtream_codes import Client as XtreamCodesClient
|
||||
|
||||
if self.m3u_account.account_type == 'XC':
|
||||
# For XtreamCodes accounts, build the URL dynamically
|
||||
server_url = self.m3u_account.server_url.rstrip('/')
|
||||
username = self.m3u_account.username
|
||||
password = self.m3u_account.password
|
||||
return f"{server_url}/series/{username}/{password}/{self.stream_id}.{self.container_extension or 'mp4'}"
|
||||
else:
|
||||
# We might support non XC accounts in the future
|
||||
# For now, return None
|
||||
return None
|
||||
|
||||
class M3UVODCategoryRelation(models.Model):
|
||||
"""Links M3U accounts to categories with provider-specific information"""
|
||||
m3u_account = models.ForeignKey(M3UAccount, on_delete=models.CASCADE, related_name='category_relations')
|
||||
category = models.ForeignKey(VODCategory, on_delete=models.CASCADE, related_name='m3u_relations')
|
||||
|
||||
enabled = models.BooleanField(
|
||||
default=False, help_text="Set to false to deactivate this category for the M3U account"
|
||||
)
|
||||
|
||||
custom_properties = models.JSONField(blank=True, null=True, help_text="Provider-specific data like quality, language, etc.")
|
||||
|
||||
# Timestamps
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = 'M3U VOD Category Relation'
|
||||
verbose_name_plural = 'M3U VOD Category Relations'
|
||||
unique_together = [('m3u_account', 'category')]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.m3u_account.name} - {self.category.name}"
|
||||
237
apps/vod/serializers.py
Normal file
237
apps/vod/serializers.py
Normal file
|
|
@ -0,0 +1,237 @@
|
|||
from rest_framework import serializers
|
||||
from .models import (
|
||||
Series, VODCategory, Movie, Episode,
|
||||
M3USeriesRelation, M3UMovieRelation, M3UEpisodeRelation, M3UVODCategoryRelation
|
||||
)
|
||||
from apps.channels.serializers import LogoSerializer
|
||||
from apps.m3u.serializers import M3UAccountSerializer
|
||||
|
||||
|
||||
class M3UVODCategoryRelationSerializer(serializers.ModelSerializer):
|
||||
category = serializers.IntegerField(source="category.id")
|
||||
m3u_account = serializers.IntegerField(source="m3u_account.id")
|
||||
|
||||
class Meta:
|
||||
model = M3UVODCategoryRelation
|
||||
fields = ["category", "m3u_account", "enabled"]
|
||||
|
||||
|
||||
class VODCategorySerializer(serializers.ModelSerializer):
|
||||
category_type_display = serializers.CharField(source='get_category_type_display', read_only=True)
|
||||
m3u_accounts = M3UVODCategoryRelationSerializer(many=True, source="m3u_relations", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = VODCategory
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"category_type",
|
||||
"category_type_display",
|
||||
"m3u_accounts",
|
||||
]
|
||||
|
||||
class SeriesSerializer(serializers.ModelSerializer):
|
||||
logo = LogoSerializer(read_only=True)
|
||||
episode_count = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = Series
|
||||
fields = '__all__'
|
||||
|
||||
def get_episode_count(self, obj):
|
||||
return obj.episodes.count()
|
||||
|
||||
|
||||
class MovieSerializer(serializers.ModelSerializer):
|
||||
logo = LogoSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Movie
|
||||
fields = '__all__'
|
||||
|
||||
|
||||
class EpisodeSerializer(serializers.ModelSerializer):
|
||||
series = SeriesSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Episode
|
||||
fields = '__all__'
|
||||
|
||||
|
||||
class M3USeriesRelationSerializer(serializers.ModelSerializer):
|
||||
series = SeriesSerializer(read_only=True)
|
||||
category = VODCategorySerializer(read_only=True)
|
||||
m3u_account = M3UAccountSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = M3USeriesRelation
|
||||
fields = '__all__'
|
||||
|
||||
|
||||
class M3UMovieRelationSerializer(serializers.ModelSerializer):
|
||||
movie = MovieSerializer(read_only=True)
|
||||
category = VODCategorySerializer(read_only=True)
|
||||
m3u_account = M3UAccountSerializer(read_only=True)
|
||||
quality_info = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = M3UMovieRelation
|
||||
fields = '__all__'
|
||||
|
||||
def get_quality_info(self, obj):
|
||||
"""Extract quality information from various sources"""
|
||||
quality_info = {}
|
||||
|
||||
# 1. Check custom_properties first
|
||||
if obj.custom_properties:
|
||||
if obj.custom_properties.get('quality'):
|
||||
quality_info['quality'] = obj.custom_properties['quality']
|
||||
return quality_info
|
||||
elif obj.custom_properties.get('resolution'):
|
||||
quality_info['resolution'] = obj.custom_properties['resolution']
|
||||
return quality_info
|
||||
|
||||
# 2. Try to get detailed info from the movie if available
|
||||
movie = obj.movie
|
||||
if hasattr(movie, 'video') and movie.video:
|
||||
video_data = movie.video
|
||||
if isinstance(video_data, dict) and 'width' in video_data and 'height' in video_data:
|
||||
width = video_data['width']
|
||||
height = video_data['height']
|
||||
quality_info['resolution'] = f"{width}x{height}"
|
||||
|
||||
# Convert to common quality names (prioritize width for ultrawide/cinematic content)
|
||||
if width >= 3840:
|
||||
quality_info['quality'] = '4K'
|
||||
elif width >= 1920:
|
||||
quality_info['quality'] = '1080p'
|
||||
elif width >= 1280:
|
||||
quality_info['quality'] = '720p'
|
||||
elif width >= 854:
|
||||
quality_info['quality'] = '480p'
|
||||
else:
|
||||
quality_info['quality'] = f"{width}x{height}"
|
||||
return quality_info
|
||||
|
||||
# 3. Extract from movie name/title
|
||||
if movie and movie.name:
|
||||
name = movie.name
|
||||
if '4K' in name or '2160p' in name:
|
||||
quality_info['quality'] = '4K'
|
||||
return quality_info
|
||||
elif '1080p' in name or 'FHD' in name:
|
||||
quality_info['quality'] = '1080p'
|
||||
return quality_info
|
||||
elif '720p' in name or 'HD' in name:
|
||||
quality_info['quality'] = '720p'
|
||||
return quality_info
|
||||
elif '480p' in name:
|
||||
quality_info['quality'] = '480p'
|
||||
return quality_info
|
||||
|
||||
# 4. Try bitrate as last resort
|
||||
if hasattr(movie, 'bitrate') and movie.bitrate and movie.bitrate > 0:
|
||||
bitrate = movie.bitrate
|
||||
if bitrate >= 6000:
|
||||
quality_info['quality'] = '4K'
|
||||
elif bitrate >= 3000:
|
||||
quality_info['quality'] = '1080p'
|
||||
elif bitrate >= 1500:
|
||||
quality_info['quality'] = '720p'
|
||||
else:
|
||||
quality_info['bitrate'] = f"{round(bitrate/1000)}Mbps"
|
||||
return quality_info
|
||||
|
||||
# 5. Fallback - no quality info available
|
||||
return None
|
||||
|
||||
|
||||
class M3UEpisodeRelationSerializer(serializers.ModelSerializer):
|
||||
episode = EpisodeSerializer(read_only=True)
|
||||
m3u_account = M3UAccountSerializer(read_only=True)
|
||||
quality_info = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = M3UEpisodeRelation
|
||||
fields = '__all__'
|
||||
|
||||
def get_quality_info(self, obj):
|
||||
"""Extract quality information from various sources"""
|
||||
quality_info = {}
|
||||
|
||||
# 1. Check custom_properties first
|
||||
if obj.custom_properties:
|
||||
if obj.custom_properties.get('quality'):
|
||||
quality_info['quality'] = obj.custom_properties['quality']
|
||||
return quality_info
|
||||
elif obj.custom_properties.get('resolution'):
|
||||
quality_info['resolution'] = obj.custom_properties['resolution']
|
||||
return quality_info
|
||||
|
||||
# 2. Try to get detailed info from the episode if available
|
||||
episode = obj.episode
|
||||
if hasattr(episode, 'video') and episode.video:
|
||||
video_data = episode.video
|
||||
if isinstance(video_data, dict) and 'width' in video_data and 'height' in video_data:
|
||||
width = video_data['width']
|
||||
height = video_data['height']
|
||||
quality_info['resolution'] = f"{width}x{height}"
|
||||
|
||||
# Convert to common quality names (prioritize width for ultrawide/cinematic content)
|
||||
if width >= 3840:
|
||||
quality_info['quality'] = '4K'
|
||||
elif width >= 1920:
|
||||
quality_info['quality'] = '1080p'
|
||||
elif width >= 1280:
|
||||
quality_info['quality'] = '720p'
|
||||
elif width >= 854:
|
||||
quality_info['quality'] = '480p'
|
||||
else:
|
||||
quality_info['quality'] = f"{width}x{height}"
|
||||
return quality_info
|
||||
|
||||
# 3. Extract from episode name/title
|
||||
if episode and episode.name:
|
||||
name = episode.name
|
||||
if '4K' in name or '2160p' in name:
|
||||
quality_info['quality'] = '4K'
|
||||
return quality_info
|
||||
elif '1080p' in name or 'FHD' in name:
|
||||
quality_info['quality'] = '1080p'
|
||||
return quality_info
|
||||
elif '720p' in name or 'HD' in name:
|
||||
quality_info['quality'] = '720p'
|
||||
return quality_info
|
||||
elif '480p' in name:
|
||||
quality_info['quality'] = '480p'
|
||||
return quality_info
|
||||
|
||||
# 4. Try bitrate as last resort
|
||||
if hasattr(episode, 'bitrate') and episode.bitrate and episode.bitrate > 0:
|
||||
bitrate = episode.bitrate
|
||||
if bitrate >= 6000:
|
||||
quality_info['quality'] = '4K'
|
||||
elif bitrate >= 3000:
|
||||
quality_info['quality'] = '1080p'
|
||||
elif bitrate >= 1500:
|
||||
quality_info['quality'] = '720p'
|
||||
else:
|
||||
quality_info['bitrate'] = f"{round(bitrate/1000)}Mbps"
|
||||
return quality_info
|
||||
|
||||
# 5. Fallback - no quality info available
|
||||
return None
|
||||
|
||||
|
||||
class EnhancedSeriesSerializer(serializers.ModelSerializer):
|
||||
"""Enhanced serializer for series with provider information"""
|
||||
logo = LogoSerializer(read_only=True)
|
||||
providers = M3USeriesRelationSerializer(source='m3u_relations', many=True, read_only=True)
|
||||
episode_count = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = Series
|
||||
fields = '__all__'
|
||||
|
||||
def get_episode_count(self, obj):
|
||||
return obj.episodes.count()
|
||||
1873
apps/vod/tasks.py
Normal file
1873
apps/vod/tasks.py
Normal file
File diff suppressed because it is too large
Load diff
16
apps/vod/urls.py
Normal file
16
apps/vod/urls.py
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
from django.urls import path, include
|
||||
from rest_framework.routers import DefaultRouter
|
||||
from .api_views import MovieViewSet, EpisodeViewSet, SeriesViewSet, VODCategoryViewSet, VODConnectionViewSet
|
||||
|
||||
app_name = 'vod'
|
||||
|
||||
router = DefaultRouter()
|
||||
router.register(r'movies', MovieViewSet)
|
||||
router.register(r'episodes', EpisodeViewSet)
|
||||
router.register(r'series', SeriesViewSet)
|
||||
router.register(r'categories', VODCategoryViewSet)
|
||||
router.register(r'connections', VODConnectionViewSet)
|
||||
|
||||
urlpatterns = [
|
||||
path('api/', include(router.urls)),
|
||||
]
|
||||
|
|
@ -8,7 +8,6 @@ router = DefaultRouter()
|
|||
router.register(r'useragents', UserAgentViewSet, basename='useragent')
|
||||
router.register(r'streamprofiles', StreamProfileViewSet, basename='streamprofile')
|
||||
router.register(r'settings', CoreSettingsViewSet, basename='coresettings')
|
||||
router.register(r'settings', CoreSettingsViewSet, basename='settings')
|
||||
urlpatterns = [
|
||||
path('settings/env/', environment, name='token_refresh'),
|
||||
path('version/', version, name='version'),
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
# yourapp/tasks.py
|
||||
from celery import shared_task
|
||||
from channels.layers import get_channel_layer
|
||||
from asgiref.sync import async_to_sync
|
||||
|
|
@ -633,3 +632,17 @@ def rehash_streams(keys):
|
|||
for account_id in acquired_locks:
|
||||
release_task_lock('refresh_single_m3u_account', account_id)
|
||||
logger.info(f"Released M3U task locks for {len(acquired_locks)} accounts")
|
||||
|
||||
|
||||
@shared_task
|
||||
def cleanup_vod_persistent_connections():
|
||||
"""Clean up stale VOD persistent connections"""
|
||||
try:
|
||||
from apps.proxy.vod_proxy.connection_manager import VODConnectionManager
|
||||
|
||||
# Clean up connections older than 30 minutes
|
||||
VODConnectionManager.cleanup_stale_persistent_connections(max_age_seconds=1800)
|
||||
logger.info("VOD persistent connection cleanup completed")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during VOD persistent connection cleanup: {e}")
|
||||
|
|
|
|||
|
|
@ -73,7 +73,6 @@ def stream_view(request, channel_uuid):
|
|||
default_profile = next((obj for obj in m3u_profiles if obj.is_default), None)
|
||||
profiles = [obj for obj in m3u_profiles if not obj.is_default]
|
||||
|
||||
|
||||
# -- Loop through profiles and pick the first active one --
|
||||
for profile in [default_profile] + profiles:
|
||||
logger.debug(f'Checking profile {profile.name}...')
|
||||
|
|
@ -174,7 +173,7 @@ def stream_view(request, channel_uuid):
|
|||
persistent_lock.release()
|
||||
logger.debug("Persistent lock released for channel ID=%s", channel.id)
|
||||
|
||||
return StreamingHttpResponse(
|
||||
stream_generator(process, stream, persistent_lock),
|
||||
content_type="video/MP2T"
|
||||
)
|
||||
return StreamingHttpResponse(
|
||||
stream_generator(process, stream, persistent_lock),
|
||||
content_type="video/MP2T"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -196,6 +196,184 @@ class Client:
|
|||
"""Get the playback URL for a stream"""
|
||||
return f"{self.server_url}/live/{self.username}/{self.password}/{stream_id}.ts"
|
||||
|
||||
def get_episode_stream_url(self, stream_id, container_extension='mp4'):
|
||||
"""Get the playback URL for an episode stream"""
|
||||
return f"{self.server_url}/series/{self.username}/{self.password}/{stream_id}.{container_extension}"
|
||||
|
||||
def get_vod_stream_url(self, stream_id, container_extension='mp4'):
|
||||
"""Get the playback URL for a VOD stream"""
|
||||
return f"{self.server_url}/movie/{self.username}/{self.password}/{stream_id}.{container_extension}"
|
||||
|
||||
def get_vod_categories(self):
|
||||
"""Get VOD categories"""
|
||||
try:
|
||||
if not self.server_info:
|
||||
self.authenticate()
|
||||
|
||||
endpoint = "player_api.php"
|
||||
params = {
|
||||
'username': self.username,
|
||||
'password': self.password,
|
||||
'action': 'get_vod_categories'
|
||||
}
|
||||
|
||||
categories = self._make_request(endpoint, params)
|
||||
|
||||
if not isinstance(categories, list):
|
||||
error_msg = f"Invalid VOD categories response: {categories}"
|
||||
logger.error(error_msg)
|
||||
raise ValueError(error_msg)
|
||||
|
||||
logger.info(f"Successfully retrieved {len(categories)} VOD categories")
|
||||
return categories
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get VOD categories: {str(e)}")
|
||||
logger.error(traceback.format_exc())
|
||||
raise
|
||||
|
||||
def get_vod_streams(self, category_id=None):
|
||||
"""Get VOD streams for a specific category"""
|
||||
try:
|
||||
if not self.server_info:
|
||||
self.authenticate()
|
||||
|
||||
endpoint = "player_api.php"
|
||||
params = {
|
||||
'username': self.username,
|
||||
'password': self.password,
|
||||
'action': 'get_vod_streams'
|
||||
}
|
||||
|
||||
if category_id:
|
||||
params['category_id'] = category_id
|
||||
|
||||
streams = self._make_request(endpoint, params)
|
||||
|
||||
if not isinstance(streams, list):
|
||||
error_msg = f"Invalid VOD streams response for category {category_id}: {streams}"
|
||||
logger.error(error_msg)
|
||||
raise ValueError(error_msg)
|
||||
|
||||
logger.info(f"Successfully retrieved {len(streams)} VOD streams for category {category_id}")
|
||||
return streams
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get VOD streams for category {category_id}: {str(e)}")
|
||||
logger.error(traceback.format_exc())
|
||||
raise
|
||||
|
||||
def get_vod_info(self, vod_id):
|
||||
"""Get detailed information for a specific VOD"""
|
||||
try:
|
||||
if not self.server_info:
|
||||
self.authenticate()
|
||||
|
||||
endpoint = "player_api.php"
|
||||
params = {
|
||||
'username': self.username,
|
||||
'password': self.password,
|
||||
'action': 'get_vod_info',
|
||||
'vod_id': vod_id
|
||||
}
|
||||
|
||||
vod_info = self._make_request(endpoint, params)
|
||||
|
||||
if not isinstance(vod_info, dict):
|
||||
error_msg = f"Invalid VOD info response for vod_id {vod_id}: {vod_info}"
|
||||
logger.error(error_msg)
|
||||
raise ValueError(error_msg)
|
||||
|
||||
logger.info(f"Successfully retrieved VOD info for vod_id {vod_id}")
|
||||
return vod_info
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get VOD info for vod_id {vod_id}: {str(e)}")
|
||||
logger.error(traceback.format_exc())
|
||||
raise
|
||||
|
||||
def get_series_categories(self):
|
||||
"""Get series categories"""
|
||||
try:
|
||||
if not self.server_info:
|
||||
self.authenticate()
|
||||
|
||||
endpoint = "player_api.php"
|
||||
params = {
|
||||
'username': self.username,
|
||||
'password': self.password,
|
||||
'action': 'get_series_categories'
|
||||
}
|
||||
|
||||
categories = self._make_request(endpoint, params)
|
||||
|
||||
if not isinstance(categories, list):
|
||||
error_msg = f"Invalid series categories response: {categories}"
|
||||
logger.error(error_msg)
|
||||
raise ValueError(error_msg)
|
||||
|
||||
logger.info(f"Successfully retrieved {len(categories)} series categories")
|
||||
return categories
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get series categories: {str(e)}")
|
||||
logger.error(traceback.format_exc())
|
||||
raise
|
||||
|
||||
def get_series(self, category_id=None):
|
||||
"""Get series for a specific category"""
|
||||
try:
|
||||
if not self.server_info:
|
||||
self.authenticate()
|
||||
|
||||
endpoint = "player_api.php"
|
||||
params = {
|
||||
'username': self.username,
|
||||
'password': self.password,
|
||||
'action': 'get_series'
|
||||
}
|
||||
|
||||
if category_id:
|
||||
params['category_id'] = category_id
|
||||
|
||||
series = self._make_request(endpoint, params)
|
||||
|
||||
if not isinstance(series, list):
|
||||
error_msg = f"Invalid series response for category {category_id}: {series}"
|
||||
logger.error(error_msg)
|
||||
raise ValueError(error_msg)
|
||||
|
||||
logger.info(f"Successfully retrieved {len(series)} series for category {category_id}")
|
||||
return series
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get series for category {category_id}: {str(e)}")
|
||||
logger.error(traceback.format_exc())
|
||||
raise
|
||||
|
||||
def get_series_info(self, series_id):
|
||||
"""Get detailed information for a specific series including episodes"""
|
||||
try:
|
||||
if not self.server_info:
|
||||
self.authenticate()
|
||||
|
||||
endpoint = "player_api.php"
|
||||
params = {
|
||||
'username': self.username,
|
||||
'password': self.password,
|
||||
'action': 'get_series_info',
|
||||
'series_id': series_id
|
||||
}
|
||||
|
||||
series_info = self._make_request(endpoint, params)
|
||||
|
||||
if not isinstance(series_info, dict):
|
||||
error_msg = f"Invalid series info response for series_id {series_id}: {series_info}"
|
||||
logger.error(error_msg)
|
||||
raise ValueError(error_msg)
|
||||
|
||||
logger.info(f"Successfully retrieved series info for series_id {series_id}")
|
||||
return series_info
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get series info for series_id {series_id}: {str(e)}")
|
||||
logger.error(traceback.format_exc())
|
||||
raise
|
||||
|
||||
def close(self):
|
||||
"""Close the session and cleanup resources"""
|
||||
if hasattr(self, 'session') and self.session:
|
||||
|
|
|
|||
|
|
@ -28,6 +28,7 @@ INSTALLED_APPS = [
|
|||
"apps.output",
|
||||
"apps.proxy.apps.ProxyConfig",
|
||||
"apps.proxy.ts_proxy",
|
||||
"apps.vod.apps.VODConfig",
|
||||
"core",
|
||||
"daphne",
|
||||
"drf_yasg",
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ from drf_yasg import openapi
|
|||
from .routing import websocket_urlpatterns
|
||||
from apps.output.views import xc_player_api, xc_panel_api, xc_get, xc_xmltv
|
||||
from apps.proxy.ts_proxy.views import stream_xc
|
||||
from apps.output.views import xc_movie_stream, xc_series_stream
|
||||
|
||||
# Define schema_view for Swagger
|
||||
schema_view = get_schema_view(
|
||||
|
|
@ -55,11 +56,25 @@ urlpatterns = [
|
|||
stream_xc,
|
||||
name="xc_stream_endpoint",
|
||||
),
|
||||
# XC VOD endpoints
|
||||
path(
|
||||
"movie/<str:username>/<str:password>/<str:stream_id>.<str:extension>",
|
||||
xc_movie_stream,
|
||||
name="xc_movie_stream",
|
||||
),
|
||||
path(
|
||||
"series/<str:username>/<str:password>/<str:stream_id>.<str:extension>",
|
||||
xc_series_stream,
|
||||
name="xc_series_stream",
|
||||
),
|
||||
|
||||
re_path(r"^swagger/?$", schema_view.with_ui("swagger", cache_timeout=0), name="schema-swagger-ui"),
|
||||
# ReDoc UI
|
||||
path("redoc/", schema_view.with_ui("redoc", cache_timeout=0), name="schema-redoc"),
|
||||
# Optionally, serve the raw Swagger JSON
|
||||
path("swagger.json", schema_view.without_ui(cache_timeout=0), name="schema-json"),
|
||||
|
||||
# VOD proxy is now handled by the main proxy URLs above
|
||||
# Catch-all routes should always be last
|
||||
path("", TemplateView.as_view(template_name="index.html")), # React entry point
|
||||
path("<path:unused_path>", TemplateView.as_view(template_name="index.html")),
|
||||
|
|
|
|||
|
|
@ -118,6 +118,10 @@ postgres_pid=$(su - postgres -c "$PG_BINDIR/pg_ctl -D ${POSTGRES_DIR} status" |
|
|||
echo "✅ Postgres started with PID $postgres_pid"
|
||||
pids+=("$postgres_pid")
|
||||
|
||||
# Ensure database encoding is UTF8
|
||||
. /app/docker/init/02-postgres.sh
|
||||
ensure_utf8_encoding
|
||||
|
||||
if [[ "$DISPATCHARR_ENV" = "dev" ]]; then
|
||||
. /app/docker/init/99-init-dev.sh
|
||||
echo "Starting frontend dev environment"
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Temporary migration from postgres in /data to $POSTGRES_DIR. Can likely remove
|
||||
# some time in the future.
|
||||
if [ -e "/data/postgresql.conf" ]; then
|
||||
|
|
@ -115,9 +114,8 @@ if [ -z "$(ls -A $POSTGRES_DIR)" ]; then
|
|||
if ! su - postgres -c "psql -p ${POSTGRES_PORT} -tAc \"SELECT 1 FROM pg_database WHERE datname = '$POSTGRES_DB';\"" | grep -q 1; then
|
||||
# Create PostgreSQL database
|
||||
echo "Creating PostgreSQL database..."
|
||||
su - postgres -c "createdb -p ${POSTGRES_PORT} ${POSTGRES_DB}"
|
||||
|
||||
# Create user, set ownership, and grant privileges
|
||||
su - postgres -c "createdb -p ${POSTGRES_PORT} --encoding=UTF8 ${POSTGRES_DB}"
|
||||
# Create user, set ownership, and grant privileges
|
||||
echo "Creating PostgreSQL user..."
|
||||
su - postgres -c "psql -p ${POSTGRES_PORT} -d ${POSTGRES_DB}" <<EOF
|
||||
DO \$\$
|
||||
|
|
@ -140,3 +138,29 @@ EOF
|
|||
sleep 1
|
||||
done
|
||||
fi
|
||||
|
||||
ensure_utf8_encoding() {
|
||||
# Check encoding of existing database
|
||||
CURRENT_ENCODING=$(su - postgres -c "psql -p ${POSTGRES_PORT} -tAc \"SELECT pg_encoding_to_char(encoding) FROM pg_database WHERE datname = '$POSTGRES_DB';\"" | tr -d ' ')
|
||||
if [ "$CURRENT_ENCODING" != "UTF8" ]; then
|
||||
echo "Database $POSTGRES_DB encoding is $CURRENT_ENCODING, converting to UTF8..."
|
||||
DUMP_FILE="/tmp/${POSTGRES_DB}_utf8_dump_$(date +%s).sql"
|
||||
# Dump database (include permissions and ownership)
|
||||
su - postgres -c "pg_dump -p ${POSTGRES_PORT} $POSTGRES_DB > $DUMP_FILE"
|
||||
# Drop and recreate database with UTF8 encoding using template0
|
||||
su - postgres -c "dropdb -p ${POSTGRES_PORT} $POSTGRES_DB"
|
||||
# Recreate database with UTF8 encoding
|
||||
su - postgres -c "createdb -p ${POSTGRES_PORT} --encoding=UTF8 --template=template0 ${POSTGRES_DB}"
|
||||
|
||||
|
||||
# Restore data
|
||||
su - postgres -c "psql -p ${POSTGRES_PORT} -d $POSTGRES_DB < $DUMP_FILE"
|
||||
#configure_db
|
||||
|
||||
|
||||
rm -f "$DUMP_FILE"
|
||||
echo "Database $POSTGRES_DB converted to UTF8 and permissions set."
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -7,13 +7,14 @@ server {
|
|||
proxy_connect_timeout 75;
|
||||
proxy_send_timeout 300;
|
||||
proxy_read_timeout 300;
|
||||
client_max_body_size 0; # Allow file uploads up to 128MB
|
||||
client_max_body_size 0;
|
||||
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Host $host:$server_port;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Forwarded-Port $server_port;
|
||||
|
||||
# Serve Django via uWSGI
|
||||
location / {
|
||||
|
|
|
|||
|
|
@ -16,7 +16,9 @@ import DVR from './pages/DVR';
|
|||
import Settings from './pages/Settings';
|
||||
import Users from './pages/Users';
|
||||
import LogosPage from './pages/Logos';
|
||||
import VODsPage from './pages/VODs';
|
||||
import useAuthStore from './store/auth';
|
||||
import useLogosStore from './store/logos';
|
||||
import FloatingVideo from './components/FloatingVideo';
|
||||
import { WebsocketProvider } from './WebSocket';
|
||||
import { Box, AppShell, MantineProvider } from '@mantine/core';
|
||||
|
|
@ -37,6 +39,8 @@ const defaultRoute = '/channels';
|
|||
|
||||
const App = () => {
|
||||
const [open, setOpen] = useState(true);
|
||||
const [backgroundLoadingStarted, setBackgroundLoadingStarted] =
|
||||
useState(false);
|
||||
const isAuthenticated = useAuthStore((s) => s.isAuthenticated);
|
||||
const setIsAuthenticated = useAuthStore((s) => s.setIsAuthenticated);
|
||||
const logout = useAuthStore((s) => s.logout);
|
||||
|
|
@ -76,6 +80,11 @@ const App = () => {
|
|||
const loggedIn = await initializeAuth();
|
||||
if (loggedIn) {
|
||||
await initData();
|
||||
// Start background logo loading after app is fully initialized (only once)
|
||||
if (!backgroundLoadingStarted) {
|
||||
setBackgroundLoadingStarted(true);
|
||||
useLogosStore.getState().startBackgroundLoading();
|
||||
}
|
||||
} else {
|
||||
await logout();
|
||||
}
|
||||
|
|
@ -86,7 +95,7 @@ const App = () => {
|
|||
};
|
||||
|
||||
checkAuth();
|
||||
}, [initializeAuth, initData, logout]);
|
||||
}, [initializeAuth, initData, logout, backgroundLoadingStarted]);
|
||||
|
||||
return (
|
||||
<MantineProvider
|
||||
|
|
@ -135,6 +144,7 @@ const App = () => {
|
|||
<Route path="/users" element={<Users />} />
|
||||
<Route path="/settings" element={<Settings />} />
|
||||
<Route path="/logos" element={<LogosPage />} />
|
||||
<Route path="/vods" element={<VODsPage />} />
|
||||
</>
|
||||
) : (
|
||||
<Route path="/login" element={<Login needsSuperuser />} />
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ import API from './api';
|
|||
import useSettingsStore from './store/settings';
|
||||
import useAuthStore from './store/auth';
|
||||
|
||||
export const WebsocketContext = createContext([false, () => { }, null]);
|
||||
export const WebsocketContext = createContext([false, () => {}, null]);
|
||||
|
||||
export const WebsocketProvider = ({ children }) => {
|
||||
const [isReady, setIsReady] = useState(false);
|
||||
|
|
@ -215,7 +215,10 @@ export const WebsocketProvider = ({ children }) => {
|
|||
) {
|
||||
updateData.updated_at = new Date().toISOString();
|
||||
// Log successful completion for debugging
|
||||
console.log('M3U refresh completed successfully:', updateData);
|
||||
console.log(
|
||||
'M3U refresh completed successfully:',
|
||||
updateData
|
||||
);
|
||||
}
|
||||
|
||||
updatePlaylist(updateData);
|
||||
|
|
@ -225,7 +228,9 @@ export const WebsocketProvider = ({ children }) => {
|
|||
// Log when playlist can't be found for debugging purposes
|
||||
console.warn(
|
||||
`Received update for unknown playlist ID: ${parsedEvent.data.account}`,
|
||||
Array.isArray(playlists) ? 'playlists is array' : 'playlists is object',
|
||||
Array.isArray(playlists)
|
||||
? 'playlists is array'
|
||||
: 'playlists is object',
|
||||
Object.keys(playlists).length
|
||||
);
|
||||
}
|
||||
|
|
@ -500,7 +505,7 @@ export const WebsocketProvider = ({ children }) => {
|
|||
const setProfilePreview = usePlaylistsStore((s) => s.setProfilePreview);
|
||||
const fetchEPGData = useEPGsStore((s) => s.fetchEPGData);
|
||||
const fetchEPGs = useEPGsStore((s) => s.fetchEPGs);
|
||||
const fetchLogos = useLogosStore((s) => s.fetchLogos);
|
||||
const fetchLogos = useLogosStore((s) => s.fetchAllLogos);
|
||||
const fetchChannelProfiles = useChannelsStore((s) => s.fetchChannelProfiles);
|
||||
|
||||
const ret = useMemo(() => {
|
||||
|
|
|
|||
|
|
@ -735,13 +735,20 @@ export default class API {
|
|||
}
|
||||
}
|
||||
|
||||
static async updateM3UGroupSettings(playlistId, groupSettings) {
|
||||
static async updateM3UGroupSettings(
|
||||
playlistId,
|
||||
groupSettings = [],
|
||||
categorySettings = []
|
||||
) {
|
||||
try {
|
||||
const response = await request(
|
||||
`${host}/api/m3u/accounts/${playlistId}/group-settings/`,
|
||||
{
|
||||
method: 'PATCH',
|
||||
body: { group_settings: groupSettings },
|
||||
body: {
|
||||
group_settings: groupSettings,
|
||||
category_settings: categorySettings,
|
||||
},
|
||||
}
|
||||
);
|
||||
// Fetch the updated playlist and update the store
|
||||
|
|
@ -793,7 +800,6 @@ export default class API {
|
|||
errorNotification('Failed to refresh M3U account', e);
|
||||
}
|
||||
}
|
||||
|
||||
static async refreshAllPlaylist() {
|
||||
try {
|
||||
const response = await request(`${host}/api/m3u/refresh/`, {
|
||||
|
|
@ -805,6 +811,19 @@ export default class API {
|
|||
errorNotification('Failed to refresh all M3U accounts', e);
|
||||
}
|
||||
}
|
||||
static async refreshVODContent(accountId) {
|
||||
try {
|
||||
const response = await request(
|
||||
`${host}/api/m3u/accounts/${accountId}/refresh-vod/`,
|
||||
{
|
||||
method: 'POST',
|
||||
}
|
||||
);
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to refresh VOD content', e);
|
||||
}
|
||||
}
|
||||
|
||||
static async deletePlaylist(id) {
|
||||
try {
|
||||
|
|
@ -1291,6 +1310,8 @@ export default class API {
|
|||
|
||||
const params = new URLSearchParams();
|
||||
logoIds.forEach(id => params.append('ids', id));
|
||||
// Disable pagination for ID-based queries to get all matching logos
|
||||
params.append('no_pagination', 'true');
|
||||
|
||||
const response = await request(
|
||||
`${host}/api/channels/logos/?${params.toString()}`
|
||||
|
|
@ -1777,4 +1798,105 @@ export default class API {
|
|||
errorNotification('Failed to retrieve streams by IDs', e);
|
||||
}
|
||||
}
|
||||
|
||||
// VOD Methods
|
||||
static async getMovies(params = new URLSearchParams()) {
|
||||
try {
|
||||
const response = await request(
|
||||
`${host}/api/vod/movies/?${params.toString()}`
|
||||
);
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to retrieve movies', e);
|
||||
}
|
||||
}
|
||||
|
||||
static async getSeries(params = new URLSearchParams()) {
|
||||
try {
|
||||
const response = await request(
|
||||
`${host}/api/vod/series/?${params.toString()}`
|
||||
);
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to retrieve series', e);
|
||||
}
|
||||
}
|
||||
|
||||
static async getMovieDetails(movieId) {
|
||||
try {
|
||||
const response = await request(`${host}/api/vod/movies/${movieId}/`);
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to retrieve movie details', e);
|
||||
}
|
||||
}
|
||||
|
||||
static async getMovieProviderInfo(movieId) {
|
||||
try {
|
||||
const response = await request(
|
||||
`${host}/api/vod/movies/${movieId}/provider-info/`
|
||||
);
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to retrieve movie provider info', e);
|
||||
}
|
||||
}
|
||||
|
||||
static async getMovieProviders(movieId) {
|
||||
try {
|
||||
const response = await request(
|
||||
`${host}/api/vod/movies/${movieId}/providers/`
|
||||
);
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to retrieve movie providers', e);
|
||||
}
|
||||
}
|
||||
|
||||
static async getSeriesProviders(seriesId) {
|
||||
try {
|
||||
const response = await request(
|
||||
`${host}/api/vod/series/${seriesId}/providers/`
|
||||
);
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to retrieve series providers', e);
|
||||
}
|
||||
}
|
||||
|
||||
static async getVODCategories() {
|
||||
try {
|
||||
const response = await request(`${host}/api/vod/categories/`);
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to retrieve VOD categories', e);
|
||||
}
|
||||
}
|
||||
|
||||
static async getSeriesInfo(seriesId) {
|
||||
try {
|
||||
// Call the provider-info endpoint that includes episodes
|
||||
const response = await request(
|
||||
`${host}/api/vod/series/${seriesId}/provider-info/?include_episodes=true`
|
||||
);
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to retrieve series info', e);
|
||||
}
|
||||
}
|
||||
|
||||
static async updateVODPosition(vodUuid, clientId, position) {
|
||||
try {
|
||||
const response = await request(
|
||||
`${host}/proxy/vod/stream/${vodUuid}/position/`,
|
||||
{
|
||||
method: 'POST',
|
||||
body: { client_id: clientId, position },
|
||||
}
|
||||
);
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to update playback position', e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ const ConfirmationDialog = ({
|
|||
size = 'md',
|
||||
zIndex = 1000,
|
||||
showDeleteFileOption = false,
|
||||
deleteFileLabel = "Also delete files from disk",
|
||||
deleteFileLabel = 'Also delete files from disk',
|
||||
}) => {
|
||||
const suppressWarning = useWarningsStore((s) => s.suppressWarning);
|
||||
const isWarningSuppressed = useWarningsStore((s) => s.isWarningSuppressed);
|
||||
|
|
|
|||
|
|
@ -8,88 +8,181 @@ import { CloseButton, Flex, Loader, Text, Box } from '@mantine/core';
|
|||
export default function FloatingVideo() {
|
||||
const isVisible = useVideoStore((s) => s.isVisible);
|
||||
const streamUrl = useVideoStore((s) => s.streamUrl);
|
||||
const contentType = useVideoStore((s) => s.contentType);
|
||||
const metadata = useVideoStore((s) => s.metadata);
|
||||
const hideVideo = useVideoStore((s) => s.hideVideo);
|
||||
const videoRef = useRef(null);
|
||||
const playerRef = useRef(null);
|
||||
const videoContainerRef = useRef(null);
|
||||
// Convert ref to state so we can use it for rendering
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [loadError, setLoadError] = useState(null);
|
||||
const [showOverlay, setShowOverlay] = useState(true);
|
||||
const overlayTimeoutRef = useRef(null);
|
||||
|
||||
// Safely destroy the player to prevent errors
|
||||
// Safely destroy the mpegts player to prevent errors
|
||||
const safeDestroyPlayer = () => {
|
||||
try {
|
||||
if (playerRef.current) {
|
||||
// Set loading to false when destroying player
|
||||
setIsLoading(false);
|
||||
setLoadError(null);
|
||||
|
||||
// First unload the source to stop any in-progress fetches
|
||||
if (videoRef.current) {
|
||||
// Remove src attribute and force a load to clear any pending requests
|
||||
videoRef.current.removeAttribute('src');
|
||||
videoRef.current.load();
|
||||
}
|
||||
|
||||
// Pause the player first
|
||||
try {
|
||||
playerRef.current.pause();
|
||||
} catch (e) {
|
||||
// Ignore pause errors
|
||||
}
|
||||
|
||||
// Use a try-catch block specifically for the destroy call
|
||||
try {
|
||||
playerRef.current.destroy();
|
||||
} catch (error) {
|
||||
// Ignore expected abort errors
|
||||
if (error.name !== 'AbortError' && !error.message?.includes('aborted')) {
|
||||
console.log("Error during player destruction:", error.message);
|
||||
if (
|
||||
error.name !== 'AbortError' &&
|
||||
!error.message?.includes('aborted')
|
||||
) {
|
||||
console.log('Error during player destruction:', error.message);
|
||||
}
|
||||
} finally {
|
||||
playerRef.current = null;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log("Error during player cleanup:", error);
|
||||
console.log('Error during player cleanup:', error);
|
||||
playerRef.current = null;
|
||||
}
|
||||
|
||||
// Clear overlay timer
|
||||
if (overlayTimeoutRef.current) {
|
||||
clearTimeout(overlayTimeoutRef.current);
|
||||
overlayTimeoutRef.current = null;
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (!isVisible || !streamUrl) {
|
||||
safeDestroyPlayer();
|
||||
return;
|
||||
// Start overlay auto-hide timer
|
||||
const startOverlayTimer = () => {
|
||||
if (overlayTimeoutRef.current) {
|
||||
clearTimeout(overlayTimeoutRef.current);
|
||||
}
|
||||
overlayTimeoutRef.current = setTimeout(() => {
|
||||
setShowOverlay(false);
|
||||
}, 4000); // Hide after 4 seconds
|
||||
};
|
||||
|
||||
// Check if we have an existing player and clean it up
|
||||
safeDestroyPlayer();
|
||||
// Initialize VOD player (native HTML5 with enhanced controls)
|
||||
const initializeVODPlayer = () => {
|
||||
if (!videoRef.current || !streamUrl) return;
|
||||
|
||||
setIsLoading(true);
|
||||
setLoadError(null);
|
||||
setShowOverlay(true); // Show overlay initially
|
||||
|
||||
console.log('Initializing VOD player for:', streamUrl);
|
||||
|
||||
const video = videoRef.current;
|
||||
|
||||
// Enhanced video element configuration for VOD
|
||||
video.preload = 'metadata';
|
||||
video.crossOrigin = 'anonymous';
|
||||
|
||||
// Set up event listeners
|
||||
const handleLoadStart = () => setIsLoading(true);
|
||||
const handleLoadedData = () => setIsLoading(false);
|
||||
const handleCanPlay = () => {
|
||||
setIsLoading(false);
|
||||
// Auto-play for VOD content
|
||||
video.play().catch((e) => {
|
||||
console.log('Auto-play prevented:', e);
|
||||
setLoadError('Auto-play was prevented. Click play to start.');
|
||||
});
|
||||
// Start overlay timer when video is ready
|
||||
startOverlayTimer();
|
||||
};
|
||||
const handleError = (e) => {
|
||||
setIsLoading(false);
|
||||
const error = e.target.error;
|
||||
let errorMessage = 'Video playback error';
|
||||
|
||||
if (error) {
|
||||
switch (error.code) {
|
||||
case error.MEDIA_ERR_ABORTED:
|
||||
errorMessage = 'Video playback was aborted';
|
||||
break;
|
||||
case error.MEDIA_ERR_NETWORK:
|
||||
errorMessage = 'Network error while loading video';
|
||||
break;
|
||||
case error.MEDIA_ERR_DECODE:
|
||||
errorMessage = 'Video codec not supported by your browser';
|
||||
break;
|
||||
case error.MEDIA_ERR_SRC_NOT_SUPPORTED:
|
||||
errorMessage = 'Video format not supported by your browser';
|
||||
break;
|
||||
default:
|
||||
errorMessage = error.message || 'Unknown video error';
|
||||
}
|
||||
}
|
||||
|
||||
setLoadError(errorMessage);
|
||||
};
|
||||
|
||||
// Enhanced progress tracking for VOD
|
||||
const handleProgress = () => {
|
||||
if (video.buffered.length > 0) {
|
||||
const bufferedEnd = video.buffered.end(video.buffered.length - 1);
|
||||
const duration = video.duration;
|
||||
if (duration > 0) {
|
||||
const bufferedPercent = (bufferedEnd / duration) * 100;
|
||||
// You could emit this to a store for UI feedback
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Add event listeners
|
||||
video.addEventListener('loadstart', handleLoadStart);
|
||||
video.addEventListener('loadeddata', handleLoadedData);
|
||||
video.addEventListener('canplay', handleCanPlay);
|
||||
video.addEventListener('error', handleError);
|
||||
video.addEventListener('progress', handleProgress);
|
||||
|
||||
// Set the source
|
||||
video.src = streamUrl;
|
||||
video.load();
|
||||
|
||||
// Store cleanup function
|
||||
playerRef.current = {
|
||||
destroy: () => {
|
||||
video.removeEventListener('loadstart', handleLoadStart);
|
||||
video.removeEventListener('loadeddata', handleLoadedData);
|
||||
video.removeEventListener('canplay', handleCanPlay);
|
||||
video.removeEventListener('error', handleError);
|
||||
video.removeEventListener('progress', handleProgress);
|
||||
video.removeAttribute('src');
|
||||
video.load();
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
// Initialize live stream player (mpegts.js)
|
||||
const initializeLivePlayer = () => {
|
||||
if (!videoRef.current || !streamUrl) return;
|
||||
|
||||
// Set loading state to true when starting a new stream
|
||||
setIsLoading(true);
|
||||
setLoadError(null);
|
||||
|
||||
// Debug log to help diagnose stream issues
|
||||
console.log("Attempting to play stream:", streamUrl);
|
||||
console.log('Initializing live stream player for:', streamUrl);
|
||||
|
||||
try {
|
||||
// Check for MSE support first
|
||||
if (!mpegts.getFeatureList().mseLivePlayback) {
|
||||
setIsLoading(false);
|
||||
setLoadError("Your browser doesn't support live video streaming. Please try Chrome or Edge.");
|
||||
setLoadError(
|
||||
"Your browser doesn't support live video streaming. Please try Chrome or Edge."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for basic codec support
|
||||
const video = document.createElement('video');
|
||||
const h264Support = video.canPlayType('video/mp4; codecs="avc1.42E01E"');
|
||||
const aacSupport = video.canPlayType('audio/mp4; codecs="mp4a.40.2"');
|
||||
|
||||
console.log("Browser codec support - H264:", h264Support, "AAC:", aacSupport);
|
||||
|
||||
// If the browser supports MSE for live playback, initialize mpegts.js
|
||||
setIsLoading(true);
|
||||
|
||||
const player = mpegts.createPlayer({
|
||||
type: 'mpegts',
|
||||
url: streamUrl,
|
||||
|
|
@ -107,7 +200,6 @@ export default function FloatingVideo() {
|
|||
|
||||
player.attachMediaElement(videoRef.current);
|
||||
|
||||
// Add events to track loading state
|
||||
player.on(mpegts.Events.LOADING_COMPLETE, () => {
|
||||
setIsLoading(false);
|
||||
});
|
||||
|
|
@ -116,29 +208,37 @@ export default function FloatingVideo() {
|
|||
setIsLoading(false);
|
||||
});
|
||||
|
||||
// Enhanced error event handler with codec-specific messages
|
||||
player.on(mpegts.Events.ERROR, (errorType, errorDetail) => {
|
||||
setIsLoading(false);
|
||||
|
||||
// Filter out aborted errors
|
||||
if (errorType !== 'NetworkError' || !errorDetail?.includes('aborted')) {
|
||||
console.error('Player error:', errorType, errorDetail);
|
||||
|
||||
// Provide specific error messages based on error type
|
||||
let errorMessage = `Error: ${errorType}`;
|
||||
|
||||
if (errorType === 'MediaError') {
|
||||
// Try to determine if it's an audio or video codec issue
|
||||
const errorString = errorDetail?.toLowerCase() || '';
|
||||
|
||||
if (errorString.includes('audio') || errorString.includes('ac3') || errorString.includes('ac-3')) {
|
||||
errorMessage = "Audio codec not supported by your browser. Try Chrome or Edge for better audio codec support.";
|
||||
} else if (errorString.includes('video') || errorString.includes('h264') || errorString.includes('h.264')) {
|
||||
errorMessage = "Video codec not supported by your browser. Try Chrome or Edge for better video codec support.";
|
||||
if (
|
||||
errorString.includes('audio') ||
|
||||
errorString.includes('ac3') ||
|
||||
errorString.includes('ac-3')
|
||||
) {
|
||||
errorMessage =
|
||||
'Audio codec not supported by your browser. Try Chrome or Edge for better audio codec support.';
|
||||
} else if (
|
||||
errorString.includes('video') ||
|
||||
errorString.includes('h264') ||
|
||||
errorString.includes('h.264')
|
||||
) {
|
||||
errorMessage =
|
||||
'Video codec not supported by your browser. Try Chrome or Edge for better video codec support.';
|
||||
} else if (errorString.includes('mse')) {
|
||||
errorMessage = "Your browser doesn't support the codecs used in this stream. Try Chrome or Edge for better compatibility.";
|
||||
errorMessage =
|
||||
"Your browser doesn't support the codecs used in this stream. Try Chrome or Edge for better compatibility.";
|
||||
} else {
|
||||
errorMessage = "Media codec not supported by your browser. This may be due to unsupported audio (AC3) or video codecs. Try Chrome or Edge.";
|
||||
errorMessage =
|
||||
'Media codec not supported by your browser. This may be due to unsupported audio (AC3) or video codecs. Try Chrome or Edge.';
|
||||
}
|
||||
} else if (errorDetail) {
|
||||
errorMessage += ` - ${errorDetail}`;
|
||||
|
|
@ -150,49 +250,66 @@ export default function FloatingVideo() {
|
|||
|
||||
player.load();
|
||||
|
||||
// Don't auto-play until we've loaded properly
|
||||
player.on(mpegts.Events.MEDIA_INFO, () => {
|
||||
setIsLoading(false);
|
||||
try {
|
||||
player.play().catch(e => {
|
||||
console.log("Auto-play prevented:", e);
|
||||
setLoadError("Auto-play was prevented. Click play to start.");
|
||||
player.play().catch((e) => {
|
||||
console.log('Auto-play prevented:', e);
|
||||
setLoadError('Auto-play was prevented. Click play to start.');
|
||||
});
|
||||
} catch (e) {
|
||||
console.log("Error during play:", e);
|
||||
console.log('Error during play:', e);
|
||||
setLoadError(`Playback error: ${e.message}`);
|
||||
}
|
||||
});
|
||||
|
||||
// Store player instance so we can clean up later
|
||||
playerRef.current = player;
|
||||
} catch (error) {
|
||||
setIsLoading(false);
|
||||
console.error("Error initializing player:", error);
|
||||
console.error('Error initializing player:', error);
|
||||
|
||||
// Provide helpful error message based on the error
|
||||
if (error.message?.includes('codec') || error.message?.includes('format')) {
|
||||
setLoadError("Codec not supported by your browser. Please try a different browser (Chrome/Edge recommended).");
|
||||
if (
|
||||
error.message?.includes('codec') ||
|
||||
error.message?.includes('format')
|
||||
) {
|
||||
setLoadError(
|
||||
'Codec not supported by your browser. Please try a different browser (Chrome/Edge recommended).'
|
||||
);
|
||||
} else {
|
||||
setLoadError(`Initialization error: ${error.message}`);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (!isVisible || !streamUrl) {
|
||||
safeDestroyPlayer();
|
||||
return;
|
||||
}
|
||||
|
||||
// Clean up any existing player
|
||||
safeDestroyPlayer();
|
||||
|
||||
// Initialize the appropriate player based on content type
|
||||
if (contentType === 'vod') {
|
||||
initializeVODPlayer();
|
||||
} else {
|
||||
initializeLivePlayer();
|
||||
}
|
||||
|
||||
// Cleanup when component unmounts or streamUrl changes
|
||||
return () => {
|
||||
safeDestroyPlayer();
|
||||
};
|
||||
}, [isVisible, streamUrl]);
|
||||
}, [isVisible, streamUrl, contentType]);
|
||||
|
||||
// Modified hideVideo handler to clean up player first
|
||||
const handleClose = (e) => {
|
||||
// Prevent event propagation to avoid triggering drag events
|
||||
if (e) {
|
||||
e.stopPropagation();
|
||||
e.preventDefault();
|
||||
}
|
||||
safeDestroyPlayer();
|
||||
// Small delay before hiding the video component to ensure cleanup is complete
|
||||
setTimeout(() => {
|
||||
hideVideo();
|
||||
}, 50);
|
||||
|
|
@ -223,7 +340,7 @@ export default function FloatingVideo() {
|
|||
<Flex
|
||||
justify="flex-end"
|
||||
style={{
|
||||
padding: 3
|
||||
padding: 3,
|
||||
}}
|
||||
>
|
||||
<CloseButton
|
||||
|
|
@ -235,20 +352,83 @@ export default function FloatingVideo() {
|
|||
minHeight: '32px',
|
||||
minWidth: '32px',
|
||||
cursor: 'pointer',
|
||||
touchAction: 'manipulation'
|
||||
touchAction: 'manipulation',
|
||||
}}
|
||||
/>
|
||||
</Flex>
|
||||
|
||||
{/* Video container with relative positioning for the overlay */}
|
||||
<Box style={{ position: 'relative' }}>
|
||||
{/* The <video> element used by mpegts.js */}
|
||||
<Box
|
||||
style={{ position: 'relative' }}
|
||||
onMouseEnter={() => {
|
||||
if (contentType === 'vod' && !isLoading) {
|
||||
setShowOverlay(true);
|
||||
if (overlayTimeoutRef.current) {
|
||||
clearTimeout(overlayTimeoutRef.current);
|
||||
}
|
||||
}
|
||||
}}
|
||||
onMouseLeave={() => {
|
||||
if (contentType === 'vod' && !isLoading) {
|
||||
startOverlayTimer();
|
||||
}
|
||||
}}
|
||||
>
|
||||
{/* Enhanced video element with better controls for VOD */}
|
||||
<video
|
||||
ref={videoRef}
|
||||
controls
|
||||
style={{ width: '100%', height: '180px', backgroundColor: '#000' }}
|
||||
style={{
|
||||
width: '100%',
|
||||
height: '180px',
|
||||
backgroundColor: '#000',
|
||||
// Better controls styling for VOD
|
||||
...(contentType === 'vod' && {
|
||||
controlsList: 'nodownload',
|
||||
playsInline: true,
|
||||
}),
|
||||
}}
|
||||
// Add poster for VOD if available
|
||||
{...(contentType === 'vod' && {
|
||||
poster: metadata?.logo?.url, // Use VOD poster if available
|
||||
})}
|
||||
/>
|
||||
|
||||
{/* VOD title overlay when not loading - auto-hides after 4 seconds */}
|
||||
{!isLoading && metadata && contentType === 'vod' && showOverlay && (
|
||||
<Box
|
||||
style={{
|
||||
position: 'absolute',
|
||||
top: 0,
|
||||
left: 0,
|
||||
right: 0,
|
||||
background: 'linear-gradient(rgba(0,0,0,0.8), transparent)',
|
||||
padding: '10px 10px 20px',
|
||||
color: 'white',
|
||||
pointerEvents: 'none', // Allow clicks to pass through to video controls
|
||||
transition: 'opacity 0.3s ease-in-out',
|
||||
opacity: showOverlay ? 1 : 0,
|
||||
}}
|
||||
>
|
||||
<Text
|
||||
size="sm"
|
||||
weight={500}
|
||||
style={{ textShadow: '1px 1px 2px rgba(0,0,0,0.8)' }}
|
||||
>
|
||||
{metadata.name}
|
||||
</Text>
|
||||
{metadata.year && (
|
||||
<Text
|
||||
size="xs"
|
||||
color="dimmed"
|
||||
style={{ textShadow: '1px 1px 2px rgba(0,0,0,0.8)' }}
|
||||
>
|
||||
{metadata.year}
|
||||
</Text>
|
||||
)}
|
||||
</Box>
|
||||
)}
|
||||
|
||||
{/* Loading overlay - only show when loading */}
|
||||
{isLoading && (
|
||||
<Box
|
||||
|
|
@ -268,7 +448,7 @@ export default function FloatingVideo() {
|
|||
>
|
||||
<Loader color="cyan" size="md" />
|
||||
<Text color="white" size="sm" mt={10}>
|
||||
Loading stream...
|
||||
Loading {contentType === 'vod' ? 'video' : 'stream'}...
|
||||
</Text>
|
||||
</Box>
|
||||
)}
|
||||
|
|
|
|||
|
|
@ -1,66 +1,122 @@
|
|||
import React, { useState, useEffect } from 'react';
|
||||
import React, { useState, useEffect, useRef } from 'react';
|
||||
import { Skeleton } from '@mantine/core';
|
||||
import useLogosStore from '../store/logos';
|
||||
import logo from '../images/logo.png'; // Default logo
|
||||
|
||||
// Global request queue to batch logo requests
|
||||
const logoRequestQueue = new Set();
|
||||
let logoRequestTimer = null;
|
||||
|
||||
const LazyLogo = ({
|
||||
logoId,
|
||||
alt = 'logo',
|
||||
style = { maxHeight: 18, maxWidth: 55 },
|
||||
fallbackSrc = logo,
|
||||
...props
|
||||
logoId,
|
||||
alt = 'logo',
|
||||
style = { maxHeight: 18, maxWidth: 55 },
|
||||
fallbackSrc = logo,
|
||||
...props
|
||||
}) => {
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [hasError, setHasError] = useState(false);
|
||||
const logos = useLogosStore((s) => s.logos);
|
||||
const fetchLogosByIds = useLogosStore((s) => s.fetchLogosByIds);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [hasError, setHasError] = useState(false);
|
||||
const fetchAttempted = useRef(new Set()); // Track which IDs we've already tried to fetch
|
||||
const isMountedRef = useRef(true);
|
||||
|
||||
// Determine the logo source
|
||||
const logoData = logoId && logos[logoId];
|
||||
const logoSrc = logoData?.cache_url || (logoId ? `/api/channels/logos/${logoId}/cache/` : fallbackSrc);
|
||||
const logos = useLogosStore((s) => s.logos);
|
||||
const fetchLogosByIds = useLogosStore((s) => s.fetchLogosByIds);
|
||||
|
||||
useEffect(() => {
|
||||
// If we have a logoId but no logo data, try to fetch it
|
||||
if (logoId && !logoData && !isLoading && !hasError) {
|
||||
setIsLoading(true);
|
||||
fetchLogosByIds([logoId])
|
||||
.then(() => {
|
||||
setIsLoading(false);
|
||||
})
|
||||
.catch((error) => {
|
||||
console.warn(`Failed to load logo ${logoId}:`, error);
|
||||
setIsLoading(false);
|
||||
setHasError(true);
|
||||
});
|
||||
// Determine the logo source
|
||||
const logoData = logoId && logos[logoId];
|
||||
const logoSrc = logoData?.cache_url || fallbackSrc; // Only use cache URL if we have logo data
|
||||
|
||||
// Cleanup on unmount
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
isMountedRef.current = false;
|
||||
};
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
// If we have a logoId but no logo data, add it to the batch request queue
|
||||
if (
|
||||
logoId &&
|
||||
!logoData &&
|
||||
!isLoading &&
|
||||
!hasError &&
|
||||
!fetchAttempted.current.has(logoId) &&
|
||||
isMountedRef.current
|
||||
) {
|
||||
setIsLoading(true);
|
||||
fetchAttempted.current.add(logoId); // Mark this ID as attempted
|
||||
logoRequestQueue.add(logoId);
|
||||
|
||||
// Clear existing timer and set new one to batch requests
|
||||
if (logoRequestTimer) {
|
||||
clearTimeout(logoRequestTimer);
|
||||
}
|
||||
|
||||
logoRequestTimer = setTimeout(async () => {
|
||||
if (logoRequestQueue.size > 0) {
|
||||
const idsToFetch = Array.from(logoRequestQueue);
|
||||
logoRequestQueue.clear();
|
||||
|
||||
try {
|
||||
await fetchLogosByIds(idsToFetch);
|
||||
} catch (error) {
|
||||
console.warn(`Failed to load logos:`, error);
|
||||
// Mark failed IDs so they can be retried
|
||||
idsToFetch.forEach((id) => {
|
||||
if (fetchAttempted.current.has(id)) {
|
||||
fetchAttempted.current.delete(id);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}, [logoId, logoData, fetchLogosByIds, isLoading, hasError]);
|
||||
|
||||
// Show skeleton while loading
|
||||
if (isLoading) {
|
||||
return (
|
||||
<Skeleton
|
||||
height={style.maxHeight || 18}
|
||||
width={style.maxWidth || 55}
|
||||
style={{ ...style, borderRadius: 4 }}
|
||||
/>
|
||||
);
|
||||
// Update loading state for all components
|
||||
if (isMountedRef.current) {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, 100); // Batch requests for 100ms
|
||||
}
|
||||
|
||||
// Show image (will use fallback if logo fails to load)
|
||||
// If we now have the logo data, stop loading
|
||||
if (logoData && isLoading && isMountedRef.current) {
|
||||
setIsLoading(false);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [logoId, fetchLogosByIds, logoData]); // Include logoData to detect when it becomes available
|
||||
|
||||
// Reset error state when logoId changes
|
||||
useEffect(() => {
|
||||
if (logoId) {
|
||||
setHasError(false);
|
||||
}
|
||||
}, [logoId]);
|
||||
|
||||
// Show skeleton while loading
|
||||
if (isLoading && !logoData) {
|
||||
return (
|
||||
<img
|
||||
src={logoSrc}
|
||||
alt={alt}
|
||||
style={style}
|
||||
onError={(e) => {
|
||||
if (!hasError) {
|
||||
setHasError(true);
|
||||
e.target.src = fallbackSrc;
|
||||
}
|
||||
}}
|
||||
{...props}
|
||||
/>
|
||||
<Skeleton
|
||||
height={style.maxHeight || 18}
|
||||
width={style.maxWidth || 55}
|
||||
style={{ ...style, borderRadius: 4 }}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
// Show image (will use fallback if logo fails to load)
|
||||
return (
|
||||
<img
|
||||
src={logoSrc}
|
||||
alt={alt}
|
||||
style={style}
|
||||
onError={(e) => {
|
||||
if (!hasError) {
|
||||
setHasError(true);
|
||||
e.target.src = fallbackSrc;
|
||||
}
|
||||
}}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
export default LazyLogo;
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import { notifications } from '@mantine/notifications';
|
|||
import useStreamsStore from '../store/streams';
|
||||
import useChannelsStore from '../store/channels';
|
||||
import useEPGsStore from '../store/epgs';
|
||||
import useVODStore from '../store/useVODStore';
|
||||
import { Stack, Button, Group } from '@mantine/core';
|
||||
import API from '../api';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
|
|
@ -18,6 +19,7 @@ export default function M3URefreshNotification() {
|
|||
const fetchChannels = useChannelsStore((s) => s.fetchChannels);
|
||||
const fetchPlaylists = usePlaylistsStore((s) => s.fetchPlaylists);
|
||||
const fetchEPGData = useEPGsStore((s) => s.fetchEPGData);
|
||||
const fetchCategories = useVODStore((s) => s.fetchCategories);
|
||||
|
||||
const [notificationStatus, setNotificationStatus] = useState({});
|
||||
const navigate = useNavigate();
|
||||
|
|
@ -126,6 +128,10 @@ export default function M3URefreshNotification() {
|
|||
case 'processing_groups':
|
||||
message = 'Group parsing';
|
||||
break;
|
||||
|
||||
case 'vod_refresh':
|
||||
message = 'VOD content refresh';
|
||||
break;
|
||||
}
|
||||
|
||||
if (taskProgress == 0) {
|
||||
|
|
@ -143,6 +149,10 @@ export default function M3URefreshNotification() {
|
|||
fetchChannelGroups();
|
||||
fetchEPGData();
|
||||
fetchPlaylists();
|
||||
} else if (data.action == 'vod_refresh') {
|
||||
// VOD refresh completed, trigger VOD categories refresh
|
||||
fetchPlaylists(); // Refresh playlist data to show updated VOD info
|
||||
fetchCategories(); // Refresh VOD categories to make them visible
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -99,13 +99,18 @@ const Sidebar = ({ collapsed, toggleDrawer, drawerWidth, miniDrawerWidth }) => {
|
|||
path: '/channels',
|
||||
badge: `(${Object.keys(channels).length})`,
|
||||
},
|
||||
{
|
||||
label: 'VODs',
|
||||
path: '/vods',
|
||||
icon: <Video size={20} />,
|
||||
},
|
||||
{
|
||||
label: 'M3U & EPG Manager',
|
||||
icon: <Play size={20} />,
|
||||
path: '/sources',
|
||||
},
|
||||
{ label: 'TV Guide', icon: <LayoutGrid size={20} />, path: '/guide' },
|
||||
{ label: 'DVR', icon: <Video size={20} />, path: '/dvr' },
|
||||
{ label: 'DVR', icon: <Database size={20} />, path: '/dvr' },
|
||||
{ label: 'Stats', icon: <ChartLine size={20} />, path: '/stats' },
|
||||
{
|
||||
label: 'Users',
|
||||
|
|
|
|||
|
|
@ -1,15 +1,14 @@
|
|||
import React, { useState, useEffect, useRef } from 'react';
|
||||
import React, { useState, useEffect, useRef, useMemo } from 'react';
|
||||
import { useFormik } from 'formik';
|
||||
import * as Yup from 'yup';
|
||||
import useChannelsStore from '../../store/channels';
|
||||
import useLogosStore from '../../store/logos';
|
||||
import API from '../../api';
|
||||
import useStreamProfilesStore from '../../store/streamProfiles';
|
||||
import useStreamsStore from '../../store/streams';
|
||||
import ChannelGroupForm from './ChannelGroup';
|
||||
import usePlaylistsStore from '../../store/playlists';
|
||||
import logo from '../../images/logo.png';
|
||||
import { useLogoSelection } from '../../hooks/useSmartLogos';
|
||||
import { useChannelLogoSelection } from '../../hooks/useSmartLogos';
|
||||
import LazyLogo from '../LazyLogo';
|
||||
import {
|
||||
Box,
|
||||
|
|
@ -51,8 +50,11 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
const channelGroups = useChannelsStore((s) => s.channelGroups);
|
||||
const canEditChannelGroup = useChannelsStore((s) => s.canEditChannelGroup);
|
||||
|
||||
const { logos, ensureLogosLoaded, isLoading: logosLoading } = useLogoSelection();
|
||||
const fetchLogos = useLogosStore((s) => s.fetchLogos);
|
||||
const {
|
||||
logos,
|
||||
ensureLogosLoaded,
|
||||
isLoading: logosLoading,
|
||||
} = useChannelLogoSelection();
|
||||
const streams = useStreamsStore((state) => state.streams);
|
||||
const streamProfiles = useStreamProfilesStore((s) => s.profiles);
|
||||
const playlists = usePlaylistsStore((s) => s.playlists);
|
||||
|
|
@ -68,7 +70,6 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
const [selectedEPG, setSelectedEPG] = useState('');
|
||||
const [tvgFilter, setTvgFilter] = useState('');
|
||||
const [logoFilter, setLogoFilter] = useState('');
|
||||
const [logoOptions, setLogoOptions] = useState([]);
|
||||
|
||||
const [groupPopoverOpened, setGroupPopoverOpened] = useState(false);
|
||||
const [groupFilter, setGroupFilter] = useState('');
|
||||
|
|
@ -103,7 +104,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
|
||||
try {
|
||||
const retval = await API.uploadLogo(file);
|
||||
await fetchLogos();
|
||||
// Note: API.uploadLogo already adds the logo to the store, no need to fetch
|
||||
setLogoPreview(retval.cache_url);
|
||||
formik.setFieldValue('logo_id', retval.id);
|
||||
} catch (error) {
|
||||
|
|
@ -239,9 +240,10 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
}
|
||||
}, [channel, tvgsById, channelGroups]);
|
||||
|
||||
useEffect(() => {
|
||||
setLogoOptions([{ id: '0', name: 'Default' }].concat(Object.values(logos)));
|
||||
}, [logos]);
|
||||
// Memoize logo options to prevent infinite re-renders during background loading
|
||||
const logoOptions = useMemo(() => {
|
||||
return [{ id: '0', name: 'Default' }].concat(Object.values(logos));
|
||||
}, [logos]); // Only depend on logos object
|
||||
|
||||
const renderLogoOption = ({ option, checked }) => {
|
||||
return (
|
||||
|
|
|
|||
|
|
@ -1,13 +1,11 @@
|
|||
import React, { useState, useEffect, useRef } from 'react';
|
||||
import React, { useState, useEffect, useRef, useMemo } from 'react';
|
||||
import { useFormik } from 'formik';
|
||||
import * as Yup from 'yup';
|
||||
import useChannelsStore from '../../store/channels';
|
||||
import useLogosStore from '../../store/logos';
|
||||
import API from '../../api';
|
||||
import useStreamProfilesStore from '../../store/streamProfiles';
|
||||
import useStreamProfilesStore from '../../store/streamProfiles';
|
||||
import useStreamsStore from '../../store/streams';
|
||||
import { useLogoSelection } from '../../hooks/useSmartLogos';
|
||||
import { useChannelLogoSelection } from '../../hooks/useSmartLogos';
|
||||
import LazyLogo from '../LazyLogo';
|
||||
import ChannelGroupForm from './ChannelGroup';
|
||||
import usePlaylistsStore from '../../store/playlists';
|
||||
|
|
@ -49,8 +47,7 @@ const ChannelsForm = ({ channel = null, isOpen, onClose }) => {
|
|||
const groupListRef = useRef(null);
|
||||
|
||||
const channelGroups = useChannelsStore((s) => s.channelGroups);
|
||||
const { logos, ensureLogosLoaded } = useLogoSelection();
|
||||
const fetchLogos = useLogosStore((s) => s.fetchLogos);
|
||||
const { logos, ensureLogosLoaded } = useChannelLogoSelection();
|
||||
const streams = useStreamsStore((state) => state.streams);
|
||||
const streamProfiles = useStreamProfilesStore((s) => s.profiles);
|
||||
const playlists = usePlaylistsStore((s) => s.playlists);
|
||||
|
|
@ -66,7 +63,6 @@ const ChannelsForm = ({ channel = null, isOpen, onClose }) => {
|
|||
const [selectedEPG, setSelectedEPG] = useState('');
|
||||
const [tvgFilter, setTvgFilter] = useState('');
|
||||
const [logoFilter, setLogoFilter] = useState('');
|
||||
const [logoOptions, setLogoOptions] = useState([]);
|
||||
|
||||
const [groupPopoverOpened, setGroupPopoverOpened] = useState(false);
|
||||
const [groupFilter, setGroupFilter] = useState('');
|
||||
|
|
@ -101,7 +97,7 @@ const ChannelsForm = ({ channel = null, isOpen, onClose }) => {
|
|||
|
||||
try {
|
||||
const retval = await API.uploadLogo(file);
|
||||
await fetchLogos();
|
||||
// Note: API.uploadLogo already adds the logo to the store, no need to fetch
|
||||
setLogoPreview(retval.cache_url);
|
||||
formik.setFieldValue('logo_id', retval.id);
|
||||
} catch (error) {
|
||||
|
|
@ -235,9 +231,10 @@ const ChannelsForm = ({ channel = null, isOpen, onClose }) => {
|
|||
}
|
||||
}, [channel, tvgsById, channelGroups]);
|
||||
|
||||
useEffect(() => {
|
||||
setLogoOptions([{ id: '0', name: 'Default' }].concat(Object.values(logos)));
|
||||
}, [logos]);
|
||||
// Memoize logo options to prevent infinite re-renders during background loading
|
||||
const logoOptions = useMemo(() => {
|
||||
return [{ id: '0', name: 'Default' }].concat(Object.values(logos));
|
||||
}, [logos]); // Only depend on logos object
|
||||
|
||||
const renderLogoOption = ({ option, checked }) => {
|
||||
return (
|
||||
|
|
|
|||
717
frontend/src/components/forms/LiveGroupFilter.jsx
Normal file
717
frontend/src/components/forms/LiveGroupFilter.jsx
Normal file
|
|
@ -0,0 +1,717 @@
|
|||
// Modal.js
|
||||
import React, { useState, useEffect, forwardRef } from 'react';
|
||||
import {
|
||||
TextInput,
|
||||
Button,
|
||||
Checkbox,
|
||||
Flex,
|
||||
Select,
|
||||
Stack,
|
||||
Group,
|
||||
SimpleGrid,
|
||||
Text,
|
||||
NumberInput,
|
||||
Divider,
|
||||
Alert,
|
||||
Box,
|
||||
MultiSelect,
|
||||
Tooltip,
|
||||
} from '@mantine/core';
|
||||
import { Info } from 'lucide-react';
|
||||
import useChannelsStore from '../../store/channels';
|
||||
import { CircleCheck, CircleX } from 'lucide-react';
|
||||
|
||||
// Custom item component for MultiSelect with tooltip
|
||||
const OptionWithTooltip = forwardRef(
|
||||
({ label, description, ...others }, ref) => (
|
||||
<Tooltip label={description} withArrow>
|
||||
<div ref={ref} {...others}>
|
||||
{label}
|
||||
</div>
|
||||
</Tooltip>
|
||||
)
|
||||
);
|
||||
|
||||
const LiveGroupFilter = ({ playlist, groupStates, setGroupStates }) => {
|
||||
const channelGroups = useChannelsStore((s) => s.channelGroups);
|
||||
const profiles = useChannelsStore((s) => s.profiles);
|
||||
const [groupFilter, setGroupFilter] = useState('');
|
||||
|
||||
useEffect(() => {
|
||||
if (Object.keys(channelGroups).length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
setGroupStates(
|
||||
playlist.channel_groups.map((group) => {
|
||||
// Parse custom_properties if present
|
||||
let customProps = {};
|
||||
if (group.custom_properties) {
|
||||
try {
|
||||
customProps =
|
||||
typeof group.custom_properties === 'string'
|
||||
? JSON.parse(group.custom_properties)
|
||||
: group.custom_properties;
|
||||
} catch (e) {
|
||||
customProps = {};
|
||||
}
|
||||
}
|
||||
return {
|
||||
...group,
|
||||
name: channelGroups[group.channel_group].name,
|
||||
auto_channel_sync: group.auto_channel_sync || false,
|
||||
auto_sync_channel_start: group.auto_sync_channel_start || 1.0,
|
||||
custom_properties: customProps,
|
||||
original_enabled: group.enabled,
|
||||
};
|
||||
})
|
||||
);
|
||||
}, [playlist, channelGroups]);
|
||||
|
||||
const toggleGroupEnabled = (id) => {
|
||||
setGroupStates(
|
||||
groupStates.map((state) => ({
|
||||
...state,
|
||||
enabled: state.channel_group == id ? !state.enabled : state.enabled,
|
||||
}))
|
||||
);
|
||||
};
|
||||
|
||||
const toggleAutoSync = (id) => {
|
||||
setGroupStates(
|
||||
groupStates.map((state) => ({
|
||||
...state,
|
||||
auto_channel_sync:
|
||||
state.channel_group == id
|
||||
? !state.auto_channel_sync
|
||||
: state.auto_channel_sync,
|
||||
}))
|
||||
);
|
||||
};
|
||||
|
||||
const updateChannelStart = (id, value) => {
|
||||
setGroupStates(
|
||||
groupStates.map((state) => ({
|
||||
...state,
|
||||
auto_sync_channel_start:
|
||||
state.channel_group == id ? value : state.auto_sync_channel_start,
|
||||
}))
|
||||
);
|
||||
};
|
||||
|
||||
// Toggle force_dummy_epg in custom_properties for a group
|
||||
const toggleForceDummyEPG = (id) => {
|
||||
setGroupStates(
|
||||
groupStates.map((state) => {
|
||||
if (state.channel_group == id) {
|
||||
const customProps = { ...(state.custom_properties || {}) };
|
||||
customProps.force_dummy_epg = !customProps.force_dummy_epg;
|
||||
return {
|
||||
...state,
|
||||
custom_properties: customProps,
|
||||
};
|
||||
}
|
||||
return state;
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
const selectAll = () => {
|
||||
setGroupStates(
|
||||
groupStates.map((state) => ({
|
||||
...state,
|
||||
enabled: state.name.toLowerCase().includes(groupFilter.toLowerCase())
|
||||
? true
|
||||
: state.enabled,
|
||||
}))
|
||||
);
|
||||
};
|
||||
|
||||
const deselectAll = () => {
|
||||
setGroupStates(
|
||||
groupStates.map((state) => ({
|
||||
...state,
|
||||
enabled: state.name.toLowerCase().includes(groupFilter.toLowerCase())
|
||||
? false
|
||||
: state.enabled,
|
||||
}))
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<Stack style={{ paddingTop: 10 }}>
|
||||
<Alert icon={<Info size={16} />} color="blue" variant="light">
|
||||
<Text size="sm">
|
||||
<strong>Auto Channel Sync:</strong> When enabled, channels will be
|
||||
automatically created for all streams in the group during M3U updates,
|
||||
and removed when streams are no longer present. Set a starting channel
|
||||
number for each group to organize your channels.
|
||||
</Text>
|
||||
</Alert>
|
||||
|
||||
<Flex gap="sm">
|
||||
<TextInput
|
||||
placeholder="Filter groups..."
|
||||
value={groupFilter}
|
||||
onChange={(event) => setGroupFilter(event.currentTarget.value)}
|
||||
style={{ flex: 1 }}
|
||||
size="xs"
|
||||
/>
|
||||
<Button variant="default" size="xs" onClick={selectAll}>
|
||||
Select Visible
|
||||
</Button>
|
||||
<Button variant="default" size="xs" onClick={deselectAll}>
|
||||
Deselect Visible
|
||||
</Button>
|
||||
</Flex>
|
||||
|
||||
<Divider label="Groups & Auto Sync Settings" labelPosition="center" />
|
||||
|
||||
<Box style={{ maxHeight: '50vh', overflowY: 'auto' }}>
|
||||
<SimpleGrid
|
||||
cols={{ base: 1, sm: 2, md: 3 }}
|
||||
spacing="xs"
|
||||
verticalSpacing="xs"
|
||||
>
|
||||
{groupStates
|
||||
.filter((group) =>
|
||||
group.name.toLowerCase().includes(groupFilter.toLowerCase())
|
||||
)
|
||||
.sort((a, b) => a.name.localeCompare(b.name))
|
||||
.map((group) => (
|
||||
<Group
|
||||
key={group.channel_group}
|
||||
spacing="xs"
|
||||
style={{
|
||||
padding: '8px',
|
||||
border: '1px solid #444',
|
||||
borderRadius: '8px',
|
||||
backgroundColor: group.enabled ? '#2A2A2E' : '#1E1E22',
|
||||
flexDirection: 'column',
|
||||
alignItems: 'stretch',
|
||||
}}
|
||||
>
|
||||
{/* Group Enable/Disable Button */}
|
||||
<Button
|
||||
color={group.enabled ? 'green' : 'gray'}
|
||||
variant="filled"
|
||||
onClick={() => toggleGroupEnabled(group.channel_group)}
|
||||
radius="md"
|
||||
size="xs"
|
||||
leftSection={
|
||||
group.enabled ? (
|
||||
<CircleCheck size={14} />
|
||||
) : (
|
||||
<CircleX size={14} />
|
||||
)
|
||||
}
|
||||
fullWidth
|
||||
>
|
||||
<Text size="xs" truncate>
|
||||
{group.name}
|
||||
</Text>
|
||||
</Button>
|
||||
|
||||
{/* Auto Sync Controls */}
|
||||
<Stack spacing="xs" style={{ '--stack-gap': '4px' }}>
|
||||
<Flex align="center" gap="xs">
|
||||
<Checkbox
|
||||
label="Auto Channel Sync"
|
||||
checked={group.auto_channel_sync && group.enabled}
|
||||
disabled={!group.enabled}
|
||||
onChange={() => toggleAutoSync(group.channel_group)}
|
||||
size="xs"
|
||||
/>
|
||||
</Flex>
|
||||
|
||||
{group.auto_channel_sync && group.enabled && (
|
||||
<>
|
||||
<NumberInput
|
||||
label="Start Channel #"
|
||||
value={group.auto_sync_channel_start}
|
||||
onChange={(value) =>
|
||||
updateChannelStart(group.channel_group, value)
|
||||
}
|
||||
min={1}
|
||||
step={1}
|
||||
size="xs"
|
||||
precision={1}
|
||||
/>
|
||||
|
||||
{/* Auto Channel Sync Options Multi-Select */}
|
||||
<MultiSelect
|
||||
label="Advanced Options"
|
||||
placeholder="Select options..."
|
||||
data={[
|
||||
{
|
||||
value: 'force_dummy_epg',
|
||||
label: 'Force Dummy EPG',
|
||||
description:
|
||||
'Assign a dummy EPG to all channels in this group if no EPG is matched',
|
||||
},
|
||||
{
|
||||
value: 'group_override',
|
||||
label: 'Override Channel Group',
|
||||
description:
|
||||
'Override the group assignment for all channels in this group',
|
||||
},
|
||||
{
|
||||
value: 'name_regex',
|
||||
label: 'Channel Name Find & Replace (Regex)',
|
||||
description:
|
||||
'Find and replace part of the channel name using a regex pattern',
|
||||
},
|
||||
{
|
||||
value: 'name_match_regex',
|
||||
label: 'Channel Name Filter (Regex)',
|
||||
description:
|
||||
'Only include channels whose names match this regex pattern',
|
||||
},
|
||||
{
|
||||
value: 'profile_assignment',
|
||||
label: 'Channel Profile Assignment',
|
||||
description:
|
||||
'Specify which channel profiles the auto-synced channels should be added to',
|
||||
},
|
||||
{
|
||||
value: 'channel_sort_order',
|
||||
label: 'Channel Sort Order',
|
||||
description:
|
||||
'Specify the order in which channels are created (name, tvg_id, updated_at)',
|
||||
},
|
||||
]}
|
||||
itemComponent={OptionWithTooltip}
|
||||
value={(() => {
|
||||
const selectedValues = [];
|
||||
if (group.custom_properties?.force_dummy_epg) {
|
||||
selectedValues.push('force_dummy_epg');
|
||||
}
|
||||
if (
|
||||
group.custom_properties?.group_override !==
|
||||
undefined
|
||||
) {
|
||||
selectedValues.push('group_override');
|
||||
}
|
||||
if (
|
||||
group.custom_properties?.name_regex_pattern !==
|
||||
undefined ||
|
||||
group.custom_properties?.name_replace_pattern !==
|
||||
undefined
|
||||
) {
|
||||
selectedValues.push('name_regex');
|
||||
}
|
||||
if (
|
||||
group.custom_properties?.name_match_regex !==
|
||||
undefined
|
||||
) {
|
||||
selectedValues.push('name_match_regex');
|
||||
}
|
||||
if (
|
||||
group.custom_properties?.channel_profile_ids !==
|
||||
undefined
|
||||
) {
|
||||
selectedValues.push('profile_assignment');
|
||||
}
|
||||
if (
|
||||
group.custom_properties?.channel_sort_order !==
|
||||
undefined
|
||||
) {
|
||||
selectedValues.push('channel_sort_order');
|
||||
}
|
||||
return selectedValues;
|
||||
})()}
|
||||
onChange={(values) => {
|
||||
// MultiSelect always returns an array
|
||||
const selectedOptions = values || [];
|
||||
|
||||
setGroupStates(
|
||||
groupStates.map((state) => {
|
||||
if (state.channel_group === group.channel_group) {
|
||||
let newCustomProps = {
|
||||
...(state.custom_properties || {}),
|
||||
};
|
||||
|
||||
// Handle force_dummy_epg
|
||||
if (
|
||||
selectedOptions.includes('force_dummy_epg')
|
||||
) {
|
||||
newCustomProps.force_dummy_epg = true;
|
||||
} else {
|
||||
delete newCustomProps.force_dummy_epg;
|
||||
}
|
||||
|
||||
// Handle group_override
|
||||
if (
|
||||
selectedOptions.includes('group_override')
|
||||
) {
|
||||
if (
|
||||
newCustomProps.group_override === undefined
|
||||
) {
|
||||
newCustomProps.group_override = null;
|
||||
}
|
||||
} else {
|
||||
delete newCustomProps.group_override;
|
||||
}
|
||||
|
||||
// Handle name_regex
|
||||
if (selectedOptions.includes('name_regex')) {
|
||||
if (
|
||||
newCustomProps.name_regex_pattern ===
|
||||
undefined
|
||||
) {
|
||||
newCustomProps.name_regex_pattern = '';
|
||||
}
|
||||
if (
|
||||
newCustomProps.name_replace_pattern ===
|
||||
undefined
|
||||
) {
|
||||
newCustomProps.name_replace_pattern = '';
|
||||
}
|
||||
} else {
|
||||
delete newCustomProps.name_regex_pattern;
|
||||
delete newCustomProps.name_replace_pattern;
|
||||
}
|
||||
|
||||
// Handle name_match_regex
|
||||
if (
|
||||
selectedOptions.includes('name_match_regex')
|
||||
) {
|
||||
if (
|
||||
newCustomProps.name_match_regex ===
|
||||
undefined
|
||||
) {
|
||||
newCustomProps.name_match_regex = '';
|
||||
}
|
||||
} else {
|
||||
delete newCustomProps.name_match_regex;
|
||||
}
|
||||
|
||||
// Handle profile_assignment
|
||||
if (
|
||||
selectedOptions.includes('profile_assignment')
|
||||
) {
|
||||
if (
|
||||
newCustomProps.channel_profile_ids ===
|
||||
undefined
|
||||
) {
|
||||
newCustomProps.channel_profile_ids = [];
|
||||
}
|
||||
} else {
|
||||
delete newCustomProps.channel_profile_ids;
|
||||
}
|
||||
// Handle channel_sort_order
|
||||
if (
|
||||
selectedOptions.includes('channel_sort_order')
|
||||
) {
|
||||
if (
|
||||
newCustomProps.channel_sort_order ===
|
||||
undefined
|
||||
) {
|
||||
newCustomProps.channel_sort_order = '';
|
||||
}
|
||||
// Keep channel_sort_reverse if it exists
|
||||
if (
|
||||
newCustomProps.channel_sort_reverse ===
|
||||
undefined
|
||||
) {
|
||||
newCustomProps.channel_sort_reverse = false;
|
||||
}
|
||||
} else {
|
||||
delete newCustomProps.channel_sort_order;
|
||||
delete newCustomProps.channel_sort_reverse; // Remove reverse when sort is removed
|
||||
}
|
||||
|
||||
return {
|
||||
...state,
|
||||
custom_properties: newCustomProps,
|
||||
};
|
||||
}
|
||||
return state;
|
||||
})
|
||||
);
|
||||
}}
|
||||
clearable
|
||||
size="xs"
|
||||
/>
|
||||
{/* Show only channel_sort_order if selected */}
|
||||
{group.custom_properties?.channel_sort_order !==
|
||||
undefined && (
|
||||
<>
|
||||
<Select
|
||||
label="Channel Sort Order"
|
||||
placeholder="Select sort order..."
|
||||
value={
|
||||
group.custom_properties?.channel_sort_order || ''
|
||||
}
|
||||
onChange={(value) => {
|
||||
setGroupStates(
|
||||
groupStates.map((state) => {
|
||||
if (
|
||||
state.channel_group === group.channel_group
|
||||
) {
|
||||
return {
|
||||
...state,
|
||||
custom_properties: {
|
||||
...state.custom_properties,
|
||||
channel_sort_order: value || '',
|
||||
},
|
||||
};
|
||||
}
|
||||
return state;
|
||||
})
|
||||
);
|
||||
}}
|
||||
data={[
|
||||
{
|
||||
value: '',
|
||||
label: 'Provider Order (Default)',
|
||||
},
|
||||
{ value: 'name', label: 'Name' },
|
||||
{ value: 'tvg_id', label: 'TVG ID' },
|
||||
{
|
||||
value: 'updated_at',
|
||||
label: 'Updated At',
|
||||
},
|
||||
]}
|
||||
clearable
|
||||
searchable
|
||||
size="xs"
|
||||
/>
|
||||
|
||||
{/* Add reverse sort checkbox when sort order is selected (including default) */}
|
||||
{group.custom_properties?.channel_sort_order !==
|
||||
undefined && (
|
||||
<Flex align="center" gap="xs" mt="xs">
|
||||
<Checkbox
|
||||
label="Reverse Sort Order"
|
||||
checked={
|
||||
group.custom_properties
|
||||
?.channel_sort_reverse || false
|
||||
}
|
||||
onChange={(event) => {
|
||||
setGroupStates(
|
||||
groupStates.map((state) => {
|
||||
if (
|
||||
state.channel_group ===
|
||||
group.channel_group
|
||||
) {
|
||||
return {
|
||||
...state,
|
||||
custom_properties: {
|
||||
...state.custom_properties,
|
||||
channel_sort_reverse:
|
||||
event.target.checked,
|
||||
},
|
||||
};
|
||||
}
|
||||
return state;
|
||||
})
|
||||
);
|
||||
}}
|
||||
size="xs"
|
||||
/>
|
||||
</Flex>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Show profile selection only if profile_assignment is selected */}
|
||||
{group.custom_properties?.channel_profile_ids !==
|
||||
undefined && (
|
||||
<Tooltip
|
||||
label="Select which channel profiles the auto-synced channels should be added to. Leave empty to add to all profiles."
|
||||
withArrow
|
||||
>
|
||||
<MultiSelect
|
||||
label="Channel Profiles"
|
||||
placeholder="Select profiles..."
|
||||
value={
|
||||
group.custom_properties?.channel_profile_ids || []
|
||||
}
|
||||
onChange={(value) => {
|
||||
setGroupStates(
|
||||
groupStates.map((state) => {
|
||||
if (
|
||||
state.channel_group === group.channel_group
|
||||
) {
|
||||
return {
|
||||
...state,
|
||||
custom_properties: {
|
||||
...state.custom_properties,
|
||||
channel_profile_ids: value || [],
|
||||
},
|
||||
};
|
||||
}
|
||||
return state;
|
||||
})
|
||||
);
|
||||
}}
|
||||
data={Object.values(profiles).map((profile) => ({
|
||||
value: profile.id.toString(),
|
||||
label: profile.name,
|
||||
}))}
|
||||
clearable
|
||||
searchable
|
||||
size="xs"
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
|
||||
{/* Show group select only if group_override is selected */}
|
||||
{group.custom_properties?.group_override !==
|
||||
undefined && (
|
||||
<Tooltip
|
||||
label="Select a group to override the assignment for all channels in this group."
|
||||
withArrow
|
||||
>
|
||||
<Select
|
||||
label="Override Channel Group"
|
||||
placeholder="Choose group..."
|
||||
value={
|
||||
group.custom_properties?.group_override?.toString() ||
|
||||
null
|
||||
}
|
||||
onChange={(value) => {
|
||||
const newValue = value ? parseInt(value) : null;
|
||||
setGroupStates(
|
||||
groupStates.map((state) => {
|
||||
if (
|
||||
state.channel_group === group.channel_group
|
||||
) {
|
||||
return {
|
||||
...state,
|
||||
custom_properties: {
|
||||
...state.custom_properties,
|
||||
group_override: newValue,
|
||||
},
|
||||
};
|
||||
}
|
||||
return state;
|
||||
})
|
||||
);
|
||||
}}
|
||||
data={Object.values(channelGroups).map((g) => ({
|
||||
value: g.id.toString(),
|
||||
label: g.name,
|
||||
}))}
|
||||
clearable
|
||||
searchable
|
||||
size="xs"
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
|
||||
{/* Show regex fields only if name_regex is selected */}
|
||||
{(group.custom_properties?.name_regex_pattern !==
|
||||
undefined ||
|
||||
group.custom_properties?.name_replace_pattern !==
|
||||
undefined) && (
|
||||
<>
|
||||
<Tooltip
|
||||
label="Regex pattern to find in the channel name. Example: ^.*? - PPV\\d+ - (.+)$"
|
||||
withArrow
|
||||
>
|
||||
<TextInput
|
||||
label="Channel Name Find (Regex)"
|
||||
placeholder="e.g. ^.*? - PPV\\d+ - (.+)$"
|
||||
value={
|
||||
group.custom_properties?.name_regex_pattern ||
|
||||
''
|
||||
}
|
||||
onChange={(e) => {
|
||||
const val = e.currentTarget.value;
|
||||
setGroupStates(
|
||||
groupStates.map((state) =>
|
||||
state.channel_group === group.channel_group
|
||||
? {
|
||||
...state,
|
||||
custom_properties: {
|
||||
...state.custom_properties,
|
||||
name_regex_pattern: val,
|
||||
},
|
||||
}
|
||||
: state
|
||||
)
|
||||
);
|
||||
}}
|
||||
size="xs"
|
||||
/>
|
||||
</Tooltip>
|
||||
<Tooltip
|
||||
label="Replacement pattern for the channel name. Example: $1"
|
||||
withArrow
|
||||
>
|
||||
<TextInput
|
||||
label="Channel Name Replace"
|
||||
placeholder="e.g. $1"
|
||||
value={
|
||||
group.custom_properties?.name_replace_pattern ||
|
||||
''
|
||||
}
|
||||
onChange={(e) => {
|
||||
const val = e.currentTarget.value;
|
||||
setGroupStates(
|
||||
groupStates.map((state) =>
|
||||
state.channel_group === group.channel_group
|
||||
? {
|
||||
...state,
|
||||
custom_properties: {
|
||||
...state.custom_properties,
|
||||
name_replace_pattern: val,
|
||||
},
|
||||
}
|
||||
: state
|
||||
)
|
||||
);
|
||||
}}
|
||||
size="xs"
|
||||
/>
|
||||
</Tooltip>
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Show name_match_regex field only if selected */}
|
||||
{group.custom_properties?.name_match_regex !==
|
||||
undefined && (
|
||||
<Tooltip
|
||||
label="Only channels whose names match this regex will be included. Example: ^Sports.*"
|
||||
withArrow
|
||||
>
|
||||
<TextInput
|
||||
label="Channel Name Filter (Regex)"
|
||||
placeholder="e.g. ^Sports.*"
|
||||
value={
|
||||
group.custom_properties?.name_match_regex || ''
|
||||
}
|
||||
onChange={(e) => {
|
||||
const val = e.currentTarget.value;
|
||||
setGroupStates(
|
||||
groupStates.map((state) =>
|
||||
state.channel_group === group.channel_group
|
||||
? {
|
||||
...state,
|
||||
custom_properties: {
|
||||
...state.custom_properties,
|
||||
name_match_regex: val,
|
||||
},
|
||||
}
|
||||
: state
|
||||
)
|
||||
);
|
||||
}}
|
||||
size="xs"
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</Stack>
|
||||
</Group>
|
||||
))}
|
||||
</SimpleGrid>
|
||||
</Box>
|
||||
</Stack>
|
||||
);
|
||||
};
|
||||
|
||||
export default LiveGroupFilter;
|
||||
|
|
@ -2,319 +2,357 @@ import React, { useState, useEffect } from 'react';
|
|||
import { useFormik } from 'formik';
|
||||
import * as Yup from 'yup';
|
||||
import {
|
||||
Modal,
|
||||
TextInput,
|
||||
Button,
|
||||
Group,
|
||||
Stack,
|
||||
Image,
|
||||
Text,
|
||||
Center,
|
||||
Box,
|
||||
Divider,
|
||||
Modal,
|
||||
TextInput,
|
||||
Button,
|
||||
Group,
|
||||
Stack,
|
||||
Image,
|
||||
Text,
|
||||
Center,
|
||||
Box,
|
||||
Divider,
|
||||
} from '@mantine/core';
|
||||
import { Dropzone } from '@mantine/dropzone';
|
||||
import { Upload, FileImage, X } from 'lucide-react';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
import API from '../../api';
|
||||
|
||||
const LogoForm = ({ logo = null, isOpen, onClose }) => {
|
||||
const [logoPreview, setLogoPreview] = useState(null);
|
||||
const [uploading, setUploading] = useState(false);
|
||||
const [selectedFile, setSelectedFile] = useState(null); // Store selected file
|
||||
const LogoForm = ({ logo = null, isOpen, onClose, onSuccess }) => {
|
||||
const [logoPreview, setLogoPreview] = useState(null);
|
||||
const [uploading, setUploading] = useState(false);
|
||||
const [selectedFile, setSelectedFile] = useState(null); // Store selected file
|
||||
|
||||
const formik = useFormik({
|
||||
initialValues: {
|
||||
name: '',
|
||||
url: '',
|
||||
},
|
||||
validationSchema: Yup.object({
|
||||
name: Yup.string().required('Name is required'),
|
||||
url: Yup.string()
|
||||
.required('URL is required')
|
||||
.test('valid-url-or-path', 'Must be a valid URL or local file path', (value) => {
|
||||
if (!value) return false;
|
||||
// Allow local file paths starting with /data/logos/
|
||||
if (value.startsWith('/data/logos/')) return true;
|
||||
// Allow valid URLs
|
||||
try {
|
||||
new URL(value);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}),
|
||||
}),
|
||||
onSubmit: async (values, { setSubmitting }) => {
|
||||
const formik = useFormik({
|
||||
initialValues: {
|
||||
name: '',
|
||||
url: '',
|
||||
},
|
||||
validationSchema: Yup.object({
|
||||
name: Yup.string().required('Name is required'),
|
||||
url: Yup.string()
|
||||
.required('URL is required')
|
||||
.test(
|
||||
'valid-url-or-path',
|
||||
'Must be a valid URL or local file path',
|
||||
(value) => {
|
||||
if (!value) return false;
|
||||
// Allow local file paths starting with /data/logos/
|
||||
if (value.startsWith('/data/logos/')) return true;
|
||||
// Allow valid URLs
|
||||
try {
|
||||
setUploading(true);
|
||||
|
||||
// If we have a selected file, upload it first
|
||||
if (selectedFile) {
|
||||
try {
|
||||
const uploadResponse = await API.uploadLogo(selectedFile, values.name);
|
||||
// Use the uploaded file data instead of form values
|
||||
values.name = uploadResponse.name;
|
||||
values.url = uploadResponse.url;
|
||||
} catch (uploadError) {
|
||||
let errorMessage = 'Failed to upload logo file';
|
||||
|
||||
if (uploadError.code === 'NETWORK_ERROR' || uploadError.message?.includes('timeout')) {
|
||||
errorMessage = 'Upload timed out. Please try again.';
|
||||
} else if (uploadError.status === 413) {
|
||||
errorMessage = 'File too large. Please choose a smaller file.';
|
||||
} else if (uploadError.body?.error) {
|
||||
errorMessage = uploadError.body.error;
|
||||
}
|
||||
|
||||
notifications.show({
|
||||
title: 'Upload Error',
|
||||
message: errorMessage,
|
||||
color: 'red',
|
||||
});
|
||||
return; // Don't proceed with creation if upload fails
|
||||
}
|
||||
}
|
||||
|
||||
// Now create or update the logo with the final values
|
||||
// Only proceed if we don't already have a logo from file upload
|
||||
if (logo) {
|
||||
await API.updateLogo(logo.id, values);
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: 'Logo updated successfully',
|
||||
color: 'green',
|
||||
});
|
||||
} else if (!selectedFile) {
|
||||
// Only create a new logo entry if we're not uploading a file
|
||||
// (file upload already created the logo entry)
|
||||
await API.createLogo(values);
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: 'Logo created successfully',
|
||||
color: 'green',
|
||||
});
|
||||
} else {
|
||||
// File was uploaded and logo was already created
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: 'Logo uploaded successfully',
|
||||
color: 'green',
|
||||
});
|
||||
}
|
||||
onClose();
|
||||
} catch (error) {
|
||||
let errorMessage = logo ? 'Failed to update logo' : 'Failed to create logo';
|
||||
|
||||
// Handle specific timeout errors
|
||||
if (error.code === 'NETWORK_ERROR' || error.message?.includes('timeout')) {
|
||||
errorMessage = 'Request timed out. Please try again.';
|
||||
} else if (error.response?.data?.error) {
|
||||
errorMessage = error.response.data.error;
|
||||
}
|
||||
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: errorMessage,
|
||||
color: 'red',
|
||||
});
|
||||
} finally {
|
||||
setSubmitting(false);
|
||||
setUploading(false);
|
||||
new URL(value);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
),
|
||||
}),
|
||||
onSubmit: async (values, { setSubmitting }) => {
|
||||
try {
|
||||
setUploading(true);
|
||||
let uploadResponse = null; // Store upload response for later use
|
||||
|
||||
useEffect(() => {
|
||||
if (logo) {
|
||||
formik.setValues({
|
||||
name: logo.name || '',
|
||||
url: logo.url || '',
|
||||
});
|
||||
setLogoPreview(logo.cache_url);
|
||||
} else {
|
||||
formik.resetForm();
|
||||
setLogoPreview(null);
|
||||
}
|
||||
// Clear any selected file when logo changes
|
||||
setSelectedFile(null);
|
||||
}, [logo, isOpen]);
|
||||
|
||||
const handleFileSelect = (files) => {
|
||||
if (files.length === 0) return;
|
||||
|
||||
const file = files[0];
|
||||
|
||||
// Validate file size on frontend first
|
||||
if (file.size > 5 * 1024 * 1024) { // 5MB
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: 'File too large. Maximum size is 5MB.',
|
||||
color: 'red',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Store the file for later upload and create preview
|
||||
setSelectedFile(file);
|
||||
|
||||
// Generate a local preview URL
|
||||
const previewUrl = URL.createObjectURL(file);
|
||||
setLogoPreview(previewUrl);
|
||||
|
||||
// Auto-fill the name field if empty
|
||||
if (!formik.values.name) {
|
||||
const nameWithoutExtension = file.name.replace(/\.[^/.]+$/, "");
|
||||
formik.setFieldValue('name', nameWithoutExtension);
|
||||
}
|
||||
|
||||
// Set a placeholder URL (will be replaced after upload)
|
||||
formik.setFieldValue('url', 'file://pending-upload');
|
||||
};
|
||||
|
||||
const handleUrlChange = (event) => {
|
||||
const url = event.target.value;
|
||||
formik.setFieldValue('url', url);
|
||||
|
||||
// Clear any selected file when manually entering URL
|
||||
// If we have a selected file, upload it first
|
||||
if (selectedFile) {
|
||||
setSelectedFile(null);
|
||||
// Revoke the object URL to free memory
|
||||
if (logoPreview && logoPreview.startsWith('blob:')) {
|
||||
URL.revokeObjectURL(logoPreview);
|
||||
try {
|
||||
uploadResponse = await API.uploadLogo(selectedFile, values.name);
|
||||
// Use the uploaded file data instead of form values
|
||||
values.name = uploadResponse.name;
|
||||
values.url = uploadResponse.url;
|
||||
} catch (uploadError) {
|
||||
let errorMessage = 'Failed to upload logo file';
|
||||
|
||||
if (
|
||||
uploadError.code === 'NETWORK_ERROR' ||
|
||||
uploadError.message?.includes('timeout')
|
||||
) {
|
||||
errorMessage = 'Upload timed out. Please try again.';
|
||||
} else if (uploadError.status === 413) {
|
||||
errorMessage = 'File too large. Please choose a smaller file.';
|
||||
} else if (uploadError.body?.error) {
|
||||
errorMessage = uploadError.body.error;
|
||||
}
|
||||
|
||||
notifications.show({
|
||||
title: 'Upload Error',
|
||||
message: errorMessage,
|
||||
color: 'red',
|
||||
});
|
||||
return; // Don't proceed with creation if upload fails
|
||||
}
|
||||
}
|
||||
|
||||
// Update preview for remote URLs
|
||||
if (url && url.startsWith('http')) {
|
||||
setLogoPreview(url);
|
||||
} else if (!url) {
|
||||
setLogoPreview(null);
|
||||
// Now create or update the logo with the final values
|
||||
// Only proceed if we don't already have a logo from file upload
|
||||
if (logo) {
|
||||
const updatedLogo = await API.updateLogo(logo.id, values);
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: 'Logo updated successfully',
|
||||
color: 'green',
|
||||
});
|
||||
onSuccess?.({ type: 'update', logo: updatedLogo }); // Call onSuccess for updates
|
||||
} else if (!selectedFile) {
|
||||
// Only create a new logo entry if we're not uploading a file
|
||||
// (file upload already created the logo entry)
|
||||
const newLogo = await API.createLogo(values);
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: 'Logo created successfully',
|
||||
color: 'green',
|
||||
});
|
||||
onSuccess?.({ type: 'create', logo: newLogo }); // Call onSuccess for creates
|
||||
} else {
|
||||
// File was uploaded and logo was already created
|
||||
// Note: API.uploadLogo already calls addLogo() in the store, so no need to call onSuccess
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: 'Logo uploaded successfully',
|
||||
color: 'green',
|
||||
});
|
||||
// No onSuccess call needed - API.uploadLogo already updated the store
|
||||
}
|
||||
onClose();
|
||||
} catch (error) {
|
||||
let errorMessage = logo
|
||||
? 'Failed to update logo'
|
||||
: 'Failed to create logo';
|
||||
|
||||
// Handle specific timeout errors
|
||||
if (
|
||||
error.code === 'NETWORK_ERROR' ||
|
||||
error.message?.includes('timeout')
|
||||
) {
|
||||
errorMessage = 'Request timed out. Please try again.';
|
||||
} else if (error.response?.data?.error) {
|
||||
errorMessage = error.response.data.error;
|
||||
}
|
||||
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: errorMessage,
|
||||
color: 'red',
|
||||
});
|
||||
} finally {
|
||||
setSubmitting(false);
|
||||
setUploading(false);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (logo) {
|
||||
formik.setValues({
|
||||
name: logo.name || '',
|
||||
url: logo.url || '',
|
||||
});
|
||||
setLogoPreview(logo.cache_url);
|
||||
} else {
|
||||
formik.resetForm();
|
||||
setLogoPreview(null);
|
||||
}
|
||||
// Clear any selected file when logo changes
|
||||
setSelectedFile(null);
|
||||
}, [logo, isOpen]);
|
||||
|
||||
const handleFileSelect = (files) => {
|
||||
if (files.length === 0) return;
|
||||
|
||||
const file = files[0];
|
||||
|
||||
// Validate file size on frontend first
|
||||
if (file.size > 5 * 1024 * 1024) {
|
||||
// 5MB
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: 'File too large. Maximum size is 5MB.',
|
||||
color: 'red',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Store the file for later upload and create preview
|
||||
setSelectedFile(file);
|
||||
|
||||
// Generate a local preview URL
|
||||
const previewUrl = URL.createObjectURL(file);
|
||||
setLogoPreview(previewUrl);
|
||||
|
||||
// Auto-fill the name field if empty
|
||||
if (!formik.values.name) {
|
||||
const nameWithoutExtension = file.name.replace(/\.[^/.]+$/, '');
|
||||
formik.setFieldValue('name', nameWithoutExtension);
|
||||
}
|
||||
|
||||
// Set a placeholder URL (will be replaced after upload)
|
||||
formik.setFieldValue('url', 'file://pending-upload');
|
||||
};
|
||||
|
||||
const handleUrlChange = (event) => {
|
||||
const url = event.target.value;
|
||||
formik.setFieldValue('url', url);
|
||||
|
||||
// Clear any selected file when manually entering URL
|
||||
if (selectedFile) {
|
||||
setSelectedFile(null);
|
||||
// Revoke the object URL to free memory
|
||||
if (logoPreview && logoPreview.startsWith('blob:')) {
|
||||
URL.revokeObjectURL(logoPreview);
|
||||
}
|
||||
}
|
||||
|
||||
// Update preview for remote URLs
|
||||
if (url && url.startsWith('http')) {
|
||||
setLogoPreview(url);
|
||||
} else if (!url) {
|
||||
setLogoPreview(null);
|
||||
}
|
||||
};
|
||||
|
||||
// Clean up object URLs when component unmounts or preview changes
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (logoPreview && logoPreview.startsWith('blob:')) {
|
||||
URL.revokeObjectURL(logoPreview);
|
||||
}
|
||||
};
|
||||
}, [logoPreview]);
|
||||
|
||||
// Clean up object URLs when component unmounts or preview changes
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (logoPreview && logoPreview.startsWith('blob:')) {
|
||||
URL.revokeObjectURL(logoPreview);
|
||||
}
|
||||
};
|
||||
}, [logoPreview]);
|
||||
return (
|
||||
<Modal
|
||||
opened={isOpen}
|
||||
onClose={onClose}
|
||||
title={logo ? 'Edit Logo' : 'Add Logo'}
|
||||
size="md"
|
||||
>
|
||||
<form onSubmit={formik.handleSubmit}>
|
||||
<Stack spacing="md">
|
||||
{/* Logo Preview */}
|
||||
{logoPreview && (
|
||||
<Center>
|
||||
<Box>
|
||||
<Text size="sm" color="dimmed" mb="xs" ta="center">
|
||||
Preview
|
||||
</Text>
|
||||
<Image
|
||||
src={logoPreview}
|
||||
alt="Logo preview"
|
||||
width={100}
|
||||
height={75}
|
||||
fit="contain"
|
||||
fallbackSrc="/logo.png"
|
||||
style={{
|
||||
transition: 'transform 0.3s ease',
|
||||
cursor: 'pointer',
|
||||
':hover': {
|
||||
transform: 'scale(1.5)',
|
||||
},
|
||||
}}
|
||||
onMouseEnter={(e) => {
|
||||
e.target.style.transform = 'scale(1.5)';
|
||||
}}
|
||||
onMouseLeave={(e) => {
|
||||
e.target.style.transform = 'scale(1)';
|
||||
}}
|
||||
/>
|
||||
</Box>
|
||||
</Center>
|
||||
)}
|
||||
|
||||
return (
|
||||
<Modal
|
||||
opened={isOpen}
|
||||
onClose={onClose}
|
||||
title={logo ? 'Edit Logo' : 'Add Logo'}
|
||||
size="md"
|
||||
>
|
||||
<form onSubmit={formik.handleSubmit}>
|
||||
<Stack spacing="md">
|
||||
{/* Logo Preview */}
|
||||
{logoPreview && (
|
||||
<Center>
|
||||
<Box>
|
||||
<Text size="sm" color="dimmed" mb="xs" ta="center">
|
||||
Preview
|
||||
</Text>
|
||||
<Image
|
||||
src={logoPreview}
|
||||
alt="Logo preview"
|
||||
width={100}
|
||||
height={75}
|
||||
fit="contain"
|
||||
fallbackSrc="/logo.png"
|
||||
style={{
|
||||
transition: 'transform 0.3s ease',
|
||||
cursor: 'pointer',
|
||||
':hover': {
|
||||
transform: 'scale(1.5)'
|
||||
}
|
||||
}}
|
||||
onMouseEnter={(e) => {
|
||||
e.target.style.transform = 'scale(1.5)';
|
||||
}}
|
||||
onMouseLeave={(e) => {
|
||||
e.target.style.transform = 'scale(1)';
|
||||
}}
|
||||
/>
|
||||
</Box>
|
||||
</Center>
|
||||
)}
|
||||
{/* File Upload */}
|
||||
<Box>
|
||||
<Text size="sm" fw={500} mb="xs">
|
||||
Upload Logo File
|
||||
</Text>
|
||||
<Dropzone
|
||||
onDrop={handleFileSelect}
|
||||
loading={uploading}
|
||||
accept={{
|
||||
'image/*': [
|
||||
'.png',
|
||||
'.jpg',
|
||||
'.jpeg',
|
||||
'.gif',
|
||||
'.webp',
|
||||
'.bmp',
|
||||
'.svg',
|
||||
],
|
||||
}}
|
||||
multiple={false}
|
||||
maxSize={5 * 1024 * 1024} // 5MB limit
|
||||
>
|
||||
<Group
|
||||
justify="center"
|
||||
gap="xl"
|
||||
mih={120}
|
||||
style={{ pointerEvents: 'none' }}
|
||||
>
|
||||
<Dropzone.Accept>
|
||||
<Upload size={50} color="green" />
|
||||
</Dropzone.Accept>
|
||||
<Dropzone.Reject>
|
||||
<X size={50} color="red" />
|
||||
</Dropzone.Reject>
|
||||
<Dropzone.Idle>
|
||||
<FileImage size={50} />
|
||||
</Dropzone.Idle>
|
||||
|
||||
{/* File Upload */}
|
||||
<Box>
|
||||
<Text size="sm" fw={500} mb="xs">
|
||||
Upload Logo File
|
||||
</Text>
|
||||
<Dropzone
|
||||
onDrop={handleFileSelect}
|
||||
loading={uploading}
|
||||
accept={{ "image/*": [".png", ".jpg", ".jpeg", ".gif", ".webp", ".bmp", ".svg"] }}
|
||||
multiple={false}
|
||||
maxSize={5 * 1024 * 1024} // 5MB limit
|
||||
>
|
||||
<Group justify="center" gap="xl" mih={120} style={{ pointerEvents: 'none' }}>
|
||||
<Dropzone.Accept>
|
||||
<Upload size={50} color="green" />
|
||||
</Dropzone.Accept>
|
||||
<Dropzone.Reject>
|
||||
<X size={50} color="red" />
|
||||
</Dropzone.Reject>
|
||||
<Dropzone.Idle>
|
||||
<FileImage size={50} />
|
||||
</Dropzone.Idle>
|
||||
<div>
|
||||
<Text size="xl" inline>
|
||||
{selectedFile
|
||||
? `Selected: ${selectedFile.name}`
|
||||
: 'Drag image here or click to select'}
|
||||
</Text>
|
||||
<Text size="sm" color="dimmed" inline mt={7}>
|
||||
{selectedFile
|
||||
? 'File will be uploaded when you click Create/Update'
|
||||
: 'Supports PNG, JPEG, GIF, WebP, SVG files'}
|
||||
</Text>
|
||||
</div>
|
||||
</Group>
|
||||
</Dropzone>
|
||||
</Box>
|
||||
|
||||
<div>
|
||||
<Text size="xl" inline>
|
||||
{selectedFile ? `Selected: ${selectedFile.name}` : 'Drag image here or click to select'}
|
||||
</Text>
|
||||
<Text size="sm" color="dimmed" inline mt={7}>
|
||||
{selectedFile ? 'File will be uploaded when you click Create/Update' : 'Supports PNG, JPEG, GIF, WebP, SVG files'}
|
||||
</Text>
|
||||
</div>
|
||||
</Group>
|
||||
</Dropzone>
|
||||
</Box>
|
||||
<Divider label="OR" labelPosition="center" />
|
||||
|
||||
<Divider label="OR" labelPosition="center" />
|
||||
{/* Manual URL Input */}
|
||||
<TextInput
|
||||
label="Logo URL"
|
||||
placeholder="https://example.com/logo.png"
|
||||
{...formik.getFieldProps('url')}
|
||||
onChange={handleUrlChange}
|
||||
error={formik.touched.url && formik.errors.url}
|
||||
disabled={!!selectedFile} // Disable when file is selected
|
||||
/>
|
||||
|
||||
{/* Manual URL Input */}
|
||||
<TextInput
|
||||
label="Logo URL"
|
||||
placeholder="https://example.com/logo.png"
|
||||
{...formik.getFieldProps('url')}
|
||||
onChange={handleUrlChange}
|
||||
error={formik.touched.url && formik.errors.url}
|
||||
disabled={!!selectedFile} // Disable when file is selected
|
||||
/>
|
||||
<TextInput
|
||||
label="Name"
|
||||
placeholder="Enter logo name"
|
||||
{...formik.getFieldProps('name')}
|
||||
error={formik.touched.name && formik.errors.name}
|
||||
/>
|
||||
|
||||
<TextInput
|
||||
label="Name"
|
||||
placeholder="Enter logo name"
|
||||
{...formik.getFieldProps('name')}
|
||||
error={formik.touched.name && formik.errors.name}
|
||||
/>
|
||||
{selectedFile && (
|
||||
<Text size="sm" color="blue">
|
||||
Selected file: {selectedFile.name} - will be uploaded when you
|
||||
submit
|
||||
</Text>
|
||||
)}
|
||||
|
||||
{selectedFile && (
|
||||
<Text size="sm" color="blue">
|
||||
Selected file: {selectedFile.name} - will be uploaded when you submit
|
||||
</Text>
|
||||
)}
|
||||
|
||||
<Group justify="flex-end" mt="md">
|
||||
<Button variant="light" onClick={onClose}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button type="submit" loading={formik.isSubmitting || uploading}>
|
||||
{logo ? 'Update' : 'Create'}
|
||||
</Button>
|
||||
</Group>
|
||||
</Stack>
|
||||
</form>
|
||||
</Modal>
|
||||
);
|
||||
<Group justify="flex-end" mt="md">
|
||||
<Button variant="light" onClick={onClose}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button type="submit" loading={formik.isSubmitting || uploading}>
|
||||
{logo ? 'Update' : 'Create'}
|
||||
</Button>
|
||||
</Group>
|
||||
</Stack>
|
||||
</form>
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
|
||||
export default LogoForm;
|
||||
|
|
|
|||
|
|
@ -27,6 +27,7 @@ import usePlaylistsStore from '../../store/playlists';
|
|||
import { notifications } from '@mantine/notifications';
|
||||
import { isNotEmpty, useForm } from '@mantine/form';
|
||||
import useEPGsStore from '../../store/epgs';
|
||||
import useVODStore from '../../store/useVODStore';
|
||||
import M3UFilters from './M3UFilters';
|
||||
|
||||
const M3U = ({
|
||||
|
|
@ -41,6 +42,7 @@ const M3U = ({
|
|||
const fetchChannelGroups = useChannelsStore((s) => s.fetchChannelGroups);
|
||||
const fetchPlaylists = usePlaylistsStore((s) => s.fetchPlaylists);
|
||||
const fetchEPGs = useEPGsStore((s) => s.fetchEPGs);
|
||||
const fetchCategories = useVODStore((s) => s.fetchCategories);
|
||||
|
||||
const [playlist, setPlaylist] = useState(null);
|
||||
const [file, setFile] = useState(null);
|
||||
|
|
@ -64,6 +66,8 @@ const M3U = ({
|
|||
username: '',
|
||||
password: '',
|
||||
stale_stream_days: 7,
|
||||
priority: 0,
|
||||
enable_vod: false,
|
||||
},
|
||||
|
||||
validate: {
|
||||
|
|
@ -92,6 +96,11 @@ const M3U = ({
|
|||
m3uAccount.stale_stream_days !== null
|
||||
? m3uAccount.stale_stream_days
|
||||
: 7,
|
||||
priority:
|
||||
m3uAccount.priority !== undefined && m3uAccount.priority !== null
|
||||
? m3uAccount.priority
|
||||
: 0,
|
||||
enable_vod: m3uAccount.enable_vod || false,
|
||||
});
|
||||
|
||||
if (m3uAccount.account_type == 'XC') {
|
||||
|
|
@ -164,6 +173,12 @@ const M3U = ({
|
|||
|
||||
const updatedPlaylist = await API.getPlaylist(newPlaylist.id);
|
||||
await Promise.all([fetchChannelGroups(), fetchPlaylists(), fetchEPGs()]);
|
||||
|
||||
// If this is an XC account with VOD enabled, also fetch VOD categories
|
||||
if (values.account_type === 'XC' && values.enable_vod) {
|
||||
fetchCategories();
|
||||
}
|
||||
|
||||
console.log('opening group options');
|
||||
setPlaylist(updatedPlaylist);
|
||||
setGroupFilterModalOpen(true);
|
||||
|
|
@ -272,6 +287,19 @@ const M3U = ({
|
|||
</Group>
|
||||
)}
|
||||
|
||||
<Group justify="space-between">
|
||||
<Box>Enable VOD Scanning</Box>
|
||||
<Switch
|
||||
id="enable_vod"
|
||||
name="enable_vod"
|
||||
description="Scan and import VOD content (movies/series) from this Xtream account"
|
||||
key={form.key('enable_vod')}
|
||||
{...form.getInputProps('enable_vod', {
|
||||
type: 'checkbox',
|
||||
})}
|
||||
/>
|
||||
</Group>
|
||||
|
||||
<TextInput
|
||||
id="username"
|
||||
name="username"
|
||||
|
|
@ -351,6 +379,15 @@ const M3U = ({
|
|||
{...form.getInputProps('stale_stream_days')}
|
||||
/>
|
||||
|
||||
<NumberInput
|
||||
min={0}
|
||||
max={999}
|
||||
label="VOD Priority"
|
||||
description="Priority for VOD provider selection (higher numbers = higher priority). Used when multiple providers offer the same content."
|
||||
{...form.getInputProps('priority')}
|
||||
key={form.key('priority')}
|
||||
/>
|
||||
|
||||
<Checkbox
|
||||
label="Is Active"
|
||||
description="Enable or disable this M3U account"
|
||||
|
|
@ -374,7 +411,13 @@ const M3U = ({
|
|||
variant="filled"
|
||||
// color={theme.custom.colors.buttonPrimary}
|
||||
size="sm"
|
||||
onClick={() => setGroupFilterModalOpen(true)}
|
||||
onClick={() => {
|
||||
// If this is an XC account with VOD enabled, fetch VOD categories
|
||||
if (m3uAccount?.account_type === 'XC' && m3uAccount?.enable_vod) {
|
||||
fetchCategories();
|
||||
}
|
||||
setGroupFilterModalOpen(true);
|
||||
}}
|
||||
>
|
||||
Groups
|
||||
</Button>
|
||||
|
|
|
|||
|
|
@ -27,27 +27,34 @@ import {
|
|||
Box,
|
||||
MultiSelect,
|
||||
Tooltip,
|
||||
Tabs,
|
||||
} from '@mantine/core';
|
||||
import { Info } from 'lucide-react';
|
||||
import useChannelsStore from '../../store/channels';
|
||||
import useVODStore from '../../store/useVODStore';
|
||||
import { CircleCheck, CircleX } from 'lucide-react';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
import LiveGroupFilter from './LiveGroupFilter';
|
||||
import VODCategoryFilter from './VODCategoryFilter';
|
||||
|
||||
// Custom item component for MultiSelect with tooltip
|
||||
const OptionWithTooltip = forwardRef(({ label, description, ...others }, ref) => (
|
||||
<Tooltip label={description} withArrow>
|
||||
<div ref={ref} {...others}>
|
||||
{label}
|
||||
</div>
|
||||
</Tooltip>
|
||||
));
|
||||
const OptionWithTooltip = forwardRef(
|
||||
({ label, description, ...others }, ref) => (
|
||||
<Tooltip label={description} withArrow>
|
||||
<div ref={ref} {...others}>
|
||||
{label}
|
||||
</div>
|
||||
</Tooltip>
|
||||
)
|
||||
);
|
||||
|
||||
const M3UGroupFilter = ({ playlist = null, isOpen, onClose }) => {
|
||||
const channelGroups = useChannelsStore((s) => s.channelGroups);
|
||||
const profiles = useChannelsStore((s) => s.profiles);
|
||||
const fetchCategories = useVODStore((s) => s.fetchCategories);
|
||||
const [groupStates, setGroupStates] = useState([]);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [groupFilter, setGroupFilter] = useState('');
|
||||
const [movieCategoryStates, setMovieCategoryStates] = useState([]);
|
||||
const [seriesCategoryStates, setSeriesCategoryStates] = useState([]);
|
||||
|
||||
useEffect(() => {
|
||||
if (Object.keys(channelGroups).length === 0) {
|
||||
|
|
@ -60,9 +67,10 @@ const M3UGroupFilter = ({ playlist = null, isOpen, onClose }) => {
|
|||
let customProps = {};
|
||||
if (group.custom_properties) {
|
||||
try {
|
||||
customProps = typeof group.custom_properties === 'string'
|
||||
? JSON.parse(group.custom_properties)
|
||||
: group.custom_properties;
|
||||
customProps =
|
||||
typeof group.custom_properties === 'string'
|
||||
? JSON.parse(group.custom_properties)
|
||||
: group.custom_properties;
|
||||
} catch (e) {
|
||||
customProps = {};
|
||||
}
|
||||
|
|
@ -78,63 +86,47 @@ const M3UGroupFilter = ({ playlist = null, isOpen, onClose }) => {
|
|||
);
|
||||
}, [playlist, channelGroups]);
|
||||
|
||||
const toggleGroupEnabled = (id) => {
|
||||
setGroupStates(
|
||||
groupStates.map((state) => ({
|
||||
...state,
|
||||
enabled: state.channel_group == id ? !state.enabled : state.enabled,
|
||||
}))
|
||||
);
|
||||
};
|
||||
|
||||
const toggleAutoSync = (id) => {
|
||||
setGroupStates(
|
||||
groupStates.map((state) => ({
|
||||
...state,
|
||||
auto_channel_sync: state.channel_group == id ? !state.auto_channel_sync : state.auto_channel_sync,
|
||||
}))
|
||||
);
|
||||
};
|
||||
|
||||
const updateChannelStart = (id, value) => {
|
||||
setGroupStates(
|
||||
groupStates.map((state) => ({
|
||||
...state,
|
||||
auto_sync_channel_start: state.channel_group == id ? value : state.auto_sync_channel_start,
|
||||
}))
|
||||
);
|
||||
};
|
||||
|
||||
// Toggle force_dummy_epg in custom_properties for a group
|
||||
const toggleForceDummyEPG = (id) => {
|
||||
setGroupStates(
|
||||
groupStates.map((state) => {
|
||||
if (state.channel_group == id) {
|
||||
const customProps = { ...(state.custom_properties || {}) };
|
||||
customProps.force_dummy_epg = !customProps.force_dummy_epg;
|
||||
return {
|
||||
...state,
|
||||
custom_properties: customProps,
|
||||
};
|
||||
}
|
||||
return state;
|
||||
})
|
||||
);
|
||||
};
|
||||
// Fetch VOD categories when modal opens for XC accounts with VOD enabled
|
||||
useEffect(() => {
|
||||
if (
|
||||
isOpen &&
|
||||
playlist &&
|
||||
playlist.account_type === 'XC' &&
|
||||
playlist.enable_vod
|
||||
) {
|
||||
fetchCategories();
|
||||
}
|
||||
}, [isOpen, playlist, fetchCategories]);
|
||||
|
||||
const submit = async () => {
|
||||
setIsLoading(true);
|
||||
try {
|
||||
// Prepare groupStates for API: custom_properties must be stringified
|
||||
const payload = groupStates.map((state) => ({
|
||||
...state,
|
||||
custom_properties: state.custom_properties
|
||||
? JSON.stringify(state.custom_properties)
|
||||
: undefined,
|
||||
}));
|
||||
const groupSettings = groupStates
|
||||
.map((state) => ({
|
||||
...state,
|
||||
custom_properties: state.custom_properties
|
||||
? JSON.stringify(state.custom_properties)
|
||||
: undefined,
|
||||
}))
|
||||
.filter((group) => group.enabled !== group.original_enabled);
|
||||
|
||||
const categorySettings = movieCategoryStates
|
||||
.concat(seriesCategoryStates)
|
||||
.map((state) => ({
|
||||
...state,
|
||||
custom_properties: state.custom_properties
|
||||
? JSON.stringify(state.custom_properties)
|
||||
: undefined,
|
||||
}))
|
||||
.filter((state) => state.enabled !== state.original_enabled);
|
||||
|
||||
// Update group settings via API endpoint
|
||||
await API.updateM3UGroupSettings(playlist.id, payload);
|
||||
await API.updateM3UGroupSettings(
|
||||
playlist.id,
|
||||
groupSettings,
|
||||
categorySettings
|
||||
);
|
||||
|
||||
// Show notification about the refresh process
|
||||
notifications.show({
|
||||
|
|
@ -149,7 +141,8 @@ const M3UGroupFilter = ({ playlist = null, isOpen, onClose }) => {
|
|||
|
||||
notifications.show({
|
||||
title: 'M3U Refresh Started',
|
||||
message: 'The M3U account is being refreshed. Channel sync will occur automatically after parsing completes.',
|
||||
message:
|
||||
'The M3U account is being refreshed. Channel sync will occur automatically after parsing completes.',
|
||||
color: 'blue',
|
||||
autoClose: 5000,
|
||||
});
|
||||
|
|
@ -162,28 +155,6 @@ const M3UGroupFilter = ({ playlist = null, isOpen, onClose }) => {
|
|||
}
|
||||
};
|
||||
|
||||
const selectAll = () => {
|
||||
setGroupStates(
|
||||
groupStates.map((state) => ({
|
||||
...state,
|
||||
enabled: state.name.toLowerCase().includes(groupFilter.toLowerCase())
|
||||
? true
|
||||
: state.enabled,
|
||||
}))
|
||||
);
|
||||
};
|
||||
|
||||
const deselectAll = () => {
|
||||
setGroupStates(
|
||||
groupStates.map((state) => ({
|
||||
...state,
|
||||
enabled: state.name.toLowerCase().includes(groupFilter.toLowerCase())
|
||||
? false
|
||||
: state.enabled,
|
||||
}))
|
||||
);
|
||||
};
|
||||
|
||||
if (!isOpen) {
|
||||
return <></>;
|
||||
}
|
||||
|
|
@ -198,475 +169,39 @@ const M3UGroupFilter = ({ playlist = null, isOpen, onClose }) => {
|
|||
>
|
||||
<LoadingOverlay visible={isLoading} overlayBlur={2} />
|
||||
<Stack>
|
||||
<Alert icon={<Info size={16} />} color="blue" variant="light">
|
||||
<Text size="sm">
|
||||
<strong>Auto Channel Sync:</strong> When enabled, channels will be automatically created for all streams in the group during M3U updates,
|
||||
and removed when streams are no longer present. Set a starting channel number for each group to organize your channels.
|
||||
</Text>
|
||||
</Alert>
|
||||
<Tabs defaultValue="live">
|
||||
<Tabs.List>
|
||||
<Tabs.Tab value="live">Live</Tabs.Tab>
|
||||
<Tabs.Tab value="vod-movie">VOD - Movies</Tabs.Tab>
|
||||
<Tabs.Tab value="vod-series">VOD - Series</Tabs.Tab>
|
||||
</Tabs.List>
|
||||
|
||||
<Flex gap="sm">
|
||||
<TextInput
|
||||
placeholder="Filter groups..."
|
||||
value={groupFilter}
|
||||
onChange={(event) => setGroupFilter(event.currentTarget.value)}
|
||||
style={{ flex: 1 }}
|
||||
size="xs"
|
||||
/>
|
||||
<Button variant="default" size="xs" onClick={selectAll}>
|
||||
Select Visible
|
||||
</Button>
|
||||
<Button variant="default" size="xs" onClick={deselectAll}>
|
||||
Deselect Visible
|
||||
</Button>
|
||||
</Flex>
|
||||
<Tabs.Panel value="live">
|
||||
<LiveGroupFilter
|
||||
playlist={playlist}
|
||||
groupStates={groupStates}
|
||||
setGroupStates={setGroupStates}
|
||||
/>
|
||||
</Tabs.Panel>
|
||||
|
||||
<Divider label="Groups & Auto Sync Settings" labelPosition="center" />
|
||||
<Tabs.Panel value="vod-movie">
|
||||
<VODCategoryFilter
|
||||
playlist={playlist}
|
||||
categoryStates={movieCategoryStates}
|
||||
setCategoryStates={setMovieCategoryStates}
|
||||
type="movie"
|
||||
/>
|
||||
</Tabs.Panel>
|
||||
|
||||
<Box style={{ maxHeight: '50vh', overflowY: 'auto' }}>
|
||||
<SimpleGrid
|
||||
cols={{ base: 1, sm: 2, md: 3 }}
|
||||
spacing="xs"
|
||||
verticalSpacing="xs"
|
||||
>
|
||||
{groupStates
|
||||
.filter((group) =>
|
||||
group.name.toLowerCase().includes(groupFilter.toLowerCase())
|
||||
)
|
||||
.sort((a, b) => a.name.localeCompare(b.name))
|
||||
.map((group) => (
|
||||
<Group key={group.channel_group} spacing="xs" style={{
|
||||
padding: '8px',
|
||||
border: '1px solid #444',
|
||||
borderRadius: '8px',
|
||||
backgroundColor: group.enabled ? '#2A2A2E' : '#1E1E22',
|
||||
flexDirection: 'column',
|
||||
alignItems: 'stretch'
|
||||
}}>
|
||||
{/* Group Enable/Disable Button */}
|
||||
<Button
|
||||
color={group.enabled ? 'green' : 'gray'}
|
||||
variant="filled"
|
||||
onClick={() => toggleGroupEnabled(group.channel_group)}
|
||||
radius="md"
|
||||
size="xs"
|
||||
leftSection={
|
||||
group.enabled ? (
|
||||
<CircleCheck size={14} />
|
||||
) : (
|
||||
<CircleX size={14} />
|
||||
)
|
||||
}
|
||||
fullWidth
|
||||
>
|
||||
<Text size="xs" truncate>
|
||||
{group.name}
|
||||
</Text>
|
||||
</Button>
|
||||
|
||||
{/* Auto Sync Controls */}
|
||||
<Stack spacing="xs" style={{ '--stack-gap': '4px' }}>
|
||||
<Flex align="center" gap="xs">
|
||||
<Checkbox
|
||||
label="Auto Channel Sync"
|
||||
checked={group.auto_channel_sync && group.enabled}
|
||||
disabled={!group.enabled}
|
||||
onChange={() => toggleAutoSync(group.channel_group)}
|
||||
size="xs"
|
||||
/>
|
||||
</Flex>
|
||||
|
||||
{group.auto_channel_sync && group.enabled && (
|
||||
<>
|
||||
<NumberInput
|
||||
label="Start Channel #"
|
||||
value={group.auto_sync_channel_start}
|
||||
onChange={(value) => updateChannelStart(group.channel_group, value)}
|
||||
min={1}
|
||||
step={1}
|
||||
size="xs"
|
||||
precision={1}
|
||||
/>
|
||||
|
||||
{/* Auto Channel Sync Options Multi-Select */}
|
||||
<MultiSelect
|
||||
label="Advanced Options"
|
||||
placeholder="Select options..."
|
||||
data={[
|
||||
{
|
||||
value: 'force_dummy_epg',
|
||||
label: 'Force Dummy EPG',
|
||||
description: 'Assign a dummy EPG to all channels in this group if no EPG is matched',
|
||||
},
|
||||
{
|
||||
value: 'group_override',
|
||||
label: 'Override Channel Group',
|
||||
description: 'Override the group assignment for all channels in this group',
|
||||
},
|
||||
{
|
||||
value: 'name_regex',
|
||||
label: 'Channel Name Find & Replace (Regex)',
|
||||
description: 'Find and replace part of the channel name using a regex pattern',
|
||||
},
|
||||
{
|
||||
value: 'name_match_regex',
|
||||
label: 'Channel Name Filter (Regex)',
|
||||
description: 'Only include channels whose names match this regex pattern',
|
||||
},
|
||||
{
|
||||
value: 'profile_assignment',
|
||||
label: 'Channel Profile Assignment',
|
||||
description: 'Specify which channel profiles the auto-synced channels should be added to',
|
||||
},
|
||||
{
|
||||
value: 'channel_sort_order',
|
||||
label: 'Channel Sort Order',
|
||||
description: 'Specify the order in which channels are created (name, tvg_id, updated_at)',
|
||||
},
|
||||
]}
|
||||
itemComponent={OptionWithTooltip}
|
||||
value={(() => {
|
||||
const selectedValues = [];
|
||||
if (group.custom_properties?.force_dummy_epg) {
|
||||
selectedValues.push('force_dummy_epg');
|
||||
}
|
||||
if (group.custom_properties?.group_override !== undefined) {
|
||||
selectedValues.push('group_override');
|
||||
}
|
||||
if (
|
||||
group.custom_properties?.name_regex_pattern !== undefined ||
|
||||
group.custom_properties?.name_replace_pattern !== undefined
|
||||
) {
|
||||
selectedValues.push('name_regex');
|
||||
}
|
||||
if (group.custom_properties?.name_match_regex !== undefined) {
|
||||
selectedValues.push('name_match_regex');
|
||||
}
|
||||
if (group.custom_properties?.channel_profile_ids !== undefined) {
|
||||
selectedValues.push('profile_assignment');
|
||||
}
|
||||
if (group.custom_properties?.channel_sort_order !== undefined) {
|
||||
selectedValues.push('channel_sort_order');
|
||||
}
|
||||
return selectedValues;
|
||||
})()}
|
||||
onChange={(values) => {
|
||||
// MultiSelect always returns an array
|
||||
const selectedOptions = values || [];
|
||||
|
||||
setGroupStates(
|
||||
groupStates.map((state) => {
|
||||
if (state.channel_group === group.channel_group) {
|
||||
let newCustomProps = { ...(state.custom_properties || {}) };
|
||||
|
||||
// Handle force_dummy_epg
|
||||
if (selectedOptions.includes('force_dummy_epg')) {
|
||||
newCustomProps.force_dummy_epg = true;
|
||||
} else {
|
||||
delete newCustomProps.force_dummy_epg;
|
||||
}
|
||||
|
||||
// Handle group_override
|
||||
if (selectedOptions.includes('group_override')) {
|
||||
if (newCustomProps.group_override === undefined) {
|
||||
newCustomProps.group_override = null;
|
||||
}
|
||||
} else {
|
||||
delete newCustomProps.group_override;
|
||||
}
|
||||
|
||||
// Handle name_regex
|
||||
if (selectedOptions.includes('name_regex')) {
|
||||
if (newCustomProps.name_regex_pattern === undefined) {
|
||||
newCustomProps.name_regex_pattern = '';
|
||||
}
|
||||
if (newCustomProps.name_replace_pattern === undefined) {
|
||||
newCustomProps.name_replace_pattern = '';
|
||||
}
|
||||
} else {
|
||||
delete newCustomProps.name_regex_pattern;
|
||||
delete newCustomProps.name_replace_pattern;
|
||||
}
|
||||
|
||||
// Handle name_match_regex
|
||||
if (selectedOptions.includes('name_match_regex')) {
|
||||
if (newCustomProps.name_match_regex === undefined) {
|
||||
newCustomProps.name_match_regex = '';
|
||||
}
|
||||
} else {
|
||||
delete newCustomProps.name_match_regex;
|
||||
}
|
||||
|
||||
// Handle profile_assignment
|
||||
if (selectedOptions.includes('profile_assignment')) {
|
||||
if (newCustomProps.channel_profile_ids === undefined) {
|
||||
newCustomProps.channel_profile_ids = [];
|
||||
}
|
||||
} else {
|
||||
delete newCustomProps.channel_profile_ids;
|
||||
}
|
||||
// Handle channel_sort_order
|
||||
if (selectedOptions.includes('channel_sort_order')) {
|
||||
if (newCustomProps.channel_sort_order === undefined) {
|
||||
newCustomProps.channel_sort_order = '';
|
||||
}
|
||||
// Keep channel_sort_reverse if it exists
|
||||
if (newCustomProps.channel_sort_reverse === undefined) {
|
||||
newCustomProps.channel_sort_reverse = false;
|
||||
}
|
||||
} else {
|
||||
delete newCustomProps.channel_sort_order;
|
||||
delete newCustomProps.channel_sort_reverse; // Remove reverse when sort is removed
|
||||
}
|
||||
|
||||
return {
|
||||
...state,
|
||||
custom_properties: newCustomProps,
|
||||
};
|
||||
}
|
||||
return state;
|
||||
})
|
||||
);
|
||||
}}
|
||||
clearable
|
||||
size="xs"
|
||||
/>
|
||||
{/* Show only channel_sort_order if selected */}
|
||||
{group.custom_properties?.channel_sort_order !== undefined && (
|
||||
<>
|
||||
<Select
|
||||
label="Channel Sort Order"
|
||||
placeholder="Select sort order..."
|
||||
value={group.custom_properties?.channel_sort_order || ''}
|
||||
onChange={(value) => {
|
||||
setGroupStates(
|
||||
groupStates.map((state) => {
|
||||
if (state.channel_group === group.channel_group) {
|
||||
return {
|
||||
...state,
|
||||
custom_properties: {
|
||||
...state.custom_properties,
|
||||
channel_sort_order: value || '',
|
||||
},
|
||||
};
|
||||
}
|
||||
return state;
|
||||
})
|
||||
);
|
||||
}}
|
||||
data={[
|
||||
{ value: '', label: 'Provider Order (Default)' },
|
||||
{ value: 'name', label: 'Name' },
|
||||
{ value: 'tvg_id', label: 'TVG ID' },
|
||||
{ value: 'updated_at', label: 'Updated At' },
|
||||
]}
|
||||
clearable
|
||||
searchable
|
||||
size="xs"
|
||||
/>
|
||||
|
||||
{/* Add reverse sort checkbox when sort order is selected (including default) */}
|
||||
{group.custom_properties?.channel_sort_order !== undefined && (
|
||||
<Flex align="center" gap="xs" mt="xs">
|
||||
<Checkbox
|
||||
label="Reverse Sort Order"
|
||||
checked={group.custom_properties?.channel_sort_reverse || false}
|
||||
onChange={(event) => {
|
||||
setGroupStates(
|
||||
groupStates.map((state) => {
|
||||
if (state.channel_group === group.channel_group) {
|
||||
return {
|
||||
...state,
|
||||
custom_properties: {
|
||||
...state.custom_properties,
|
||||
channel_sort_reverse: event.target.checked,
|
||||
},
|
||||
};
|
||||
}
|
||||
return state;
|
||||
})
|
||||
);
|
||||
}}
|
||||
size="xs"
|
||||
/>
|
||||
</Flex>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Show profile selection only if profile_assignment is selected */}
|
||||
{group.custom_properties?.channel_profile_ids !== undefined && (
|
||||
<Tooltip
|
||||
label="Select which channel profiles the auto-synced channels should be added to. Leave empty to add to all profiles."
|
||||
withArrow
|
||||
>
|
||||
<MultiSelect
|
||||
label="Channel Profiles"
|
||||
placeholder="Select profiles..."
|
||||
value={group.custom_properties?.channel_profile_ids || []}
|
||||
onChange={(value) => {
|
||||
setGroupStates(
|
||||
groupStates.map((state) => {
|
||||
if (state.channel_group === group.channel_group) {
|
||||
return {
|
||||
...state,
|
||||
custom_properties: {
|
||||
...state.custom_properties,
|
||||
channel_profile_ids: value || [],
|
||||
},
|
||||
};
|
||||
}
|
||||
return state;
|
||||
})
|
||||
);
|
||||
}}
|
||||
data={Object.values(profiles).map((profile) => ({
|
||||
value: profile.id.toString(),
|
||||
label: profile.name,
|
||||
}))}
|
||||
clearable
|
||||
searchable
|
||||
size="xs"
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
|
||||
{/* Show group select only if group_override is selected */}
|
||||
{group.custom_properties?.group_override !== undefined && (
|
||||
<Tooltip
|
||||
label="Select a group to override the assignment for all channels in this group."
|
||||
withArrow
|
||||
>
|
||||
<Select
|
||||
label="Override Channel Group"
|
||||
placeholder="Choose group..."
|
||||
value={group.custom_properties?.group_override?.toString() || null}
|
||||
onChange={(value) => {
|
||||
const newValue = value ? parseInt(value) : null;
|
||||
setGroupStates(
|
||||
groupStates.map((state) => {
|
||||
if (state.channel_group === group.channel_group) {
|
||||
return {
|
||||
...state,
|
||||
custom_properties: {
|
||||
...state.custom_properties,
|
||||
group_override: newValue,
|
||||
},
|
||||
};
|
||||
}
|
||||
return state;
|
||||
})
|
||||
);
|
||||
}}
|
||||
data={Object.values(channelGroups).map((g) => ({
|
||||
value: g.id.toString(),
|
||||
label: g.name,
|
||||
}))}
|
||||
clearable
|
||||
searchable
|
||||
size="xs"
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
|
||||
{/* Show regex fields only if name_regex is selected */}
|
||||
{(group.custom_properties?.name_regex_pattern !== undefined ||
|
||||
group.custom_properties?.name_replace_pattern !== undefined) && (
|
||||
<>
|
||||
<Tooltip
|
||||
label="Regex pattern to find in the channel name. Example: ^.*? - PPV\\d+ - (.+)$"
|
||||
withArrow
|
||||
>
|
||||
<TextInput
|
||||
label="Channel Name Find (Regex)"
|
||||
placeholder="e.g. ^.*? - PPV\\d+ - (.+)$"
|
||||
value={group.custom_properties?.name_regex_pattern || ''}
|
||||
onChange={e => {
|
||||
const val = e.currentTarget.value;
|
||||
setGroupStates(
|
||||
groupStates.map(state =>
|
||||
state.channel_group === group.channel_group
|
||||
? {
|
||||
...state,
|
||||
custom_properties: {
|
||||
...state.custom_properties,
|
||||
name_regex_pattern: val,
|
||||
},
|
||||
}
|
||||
: state
|
||||
)
|
||||
);
|
||||
}}
|
||||
size="xs"
|
||||
/>
|
||||
</Tooltip>
|
||||
<Tooltip
|
||||
label="Replacement pattern for the channel name. Example: $1"
|
||||
withArrow
|
||||
>
|
||||
<TextInput
|
||||
label="Channel Name Replace"
|
||||
placeholder="e.g. $1"
|
||||
value={group.custom_properties?.name_replace_pattern || ''}
|
||||
onChange={e => {
|
||||
const val = e.currentTarget.value;
|
||||
setGroupStates(
|
||||
groupStates.map(state =>
|
||||
state.channel_group === group.channel_group
|
||||
? {
|
||||
...state,
|
||||
custom_properties: {
|
||||
...state.custom_properties,
|
||||
name_replace_pattern: val,
|
||||
},
|
||||
}
|
||||
: state
|
||||
)
|
||||
);
|
||||
}}
|
||||
size="xs"
|
||||
/>
|
||||
</Tooltip>
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Show name_match_regex field only if selected */}
|
||||
{group.custom_properties?.name_match_regex !== undefined && (
|
||||
<Tooltip
|
||||
label="Only channels whose names match this regex will be included. Example: ^Sports.*"
|
||||
withArrow
|
||||
>
|
||||
<TextInput
|
||||
label="Channel Name Filter (Regex)"
|
||||
placeholder="e.g. ^Sports.*"
|
||||
value={group.custom_properties?.name_match_regex || ''}
|
||||
onChange={e => {
|
||||
const val = e.currentTarget.value;
|
||||
setGroupStates(
|
||||
groupStates.map(state =>
|
||||
state.channel_group === group.channel_group
|
||||
? {
|
||||
...state,
|
||||
custom_properties: {
|
||||
...state.custom_properties,
|
||||
name_match_regex: val,
|
||||
},
|
||||
}
|
||||
: state
|
||||
)
|
||||
);
|
||||
}}
|
||||
size="xs"
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</Stack>
|
||||
</Group>
|
||||
))}
|
||||
</SimpleGrid>
|
||||
</Box>
|
||||
<Tabs.Panel value="vod-series">
|
||||
<VODCategoryFilter
|
||||
playlist={playlist}
|
||||
categoryStates={seriesCategoryStates}
|
||||
setCategoryStates={setSeriesCategoryStates}
|
||||
type="series"
|
||||
/>
|
||||
</Tabs.Panel>
|
||||
</Tabs>
|
||||
|
||||
<Flex mih={50} gap="xs" justify="flex-end" align="flex-end">
|
||||
<Button variant="default" onClick={onClose} size="xs">
|
||||
|
|
@ -687,4 +222,4 @@ const M3UGroupFilter = ({ playlist = null, isOpen, onClose }) => {
|
|||
);
|
||||
};
|
||||
|
||||
export default M3UGroupFilter;
|
||||
export default M3UGroupFilter;
|
||||
|
|
|
|||
156
frontend/src/components/forms/VODCategoryFilter.jsx
Normal file
156
frontend/src/components/forms/VODCategoryFilter.jsx
Normal file
|
|
@ -0,0 +1,156 @@
|
|||
// Modal.js
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import {
|
||||
TextInput,
|
||||
Button,
|
||||
Flex,
|
||||
Stack,
|
||||
Group,
|
||||
SimpleGrid,
|
||||
Text,
|
||||
Divider,
|
||||
Box,
|
||||
} from '@mantine/core';
|
||||
import { CircleCheck, CircleX } from 'lucide-react';
|
||||
import useVODStore from '../../store/useVODStore';
|
||||
|
||||
const VODCategoryFilter = ({
|
||||
playlist = null,
|
||||
categoryStates,
|
||||
setCategoryStates,
|
||||
type,
|
||||
}) => {
|
||||
const categories = useVODStore((s) => s.categories);
|
||||
const [filter, setFilter] = useState('');
|
||||
|
||||
useEffect(() => {
|
||||
if (Object.keys(categories).length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(categories);
|
||||
|
||||
setCategoryStates(
|
||||
Object.values(categories)
|
||||
.filter(
|
||||
(cat) =>
|
||||
cat.m3u_accounts.find((acc) => acc.m3u_account == playlist.id) &&
|
||||
cat.category_type == type
|
||||
)
|
||||
.map((cat) => {
|
||||
const match = cat.m3u_accounts.find(
|
||||
(acc) => acc.m3u_account == playlist.id
|
||||
);
|
||||
if (match) {
|
||||
return {
|
||||
...cat,
|
||||
enabled: match.enabled || false, // Keep user's previous choice, default to false for new categories
|
||||
original_enabled: match.enabled,
|
||||
};
|
||||
}
|
||||
})
|
||||
);
|
||||
}, [categories, playlist.id, setCategoryStates, type]);
|
||||
|
||||
const toggleEnabled = (id) => {
|
||||
setCategoryStates(
|
||||
categoryStates.map((state) => ({
|
||||
...state,
|
||||
enabled: state.id == id ? !state.enabled : state.enabled,
|
||||
}))
|
||||
);
|
||||
};
|
||||
|
||||
const selectAll = () => {
|
||||
setCategoryStates(
|
||||
categoryStates.map((state) => ({
|
||||
...state,
|
||||
enabled: state.name.toLowerCase().includes(filter.toLowerCase())
|
||||
? true
|
||||
: state.enabled,
|
||||
}))
|
||||
);
|
||||
};
|
||||
|
||||
const deselectAll = () => {
|
||||
setCategoryStates(
|
||||
categoryStates.map((state) => ({
|
||||
...state,
|
||||
enabled: state.name.toLowerCase().includes(filter.toLowerCase())
|
||||
? false
|
||||
: state.enabled,
|
||||
}))
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<Stack style={{ paddingTop: 10 }}>
|
||||
<Flex gap="sm">
|
||||
<TextInput
|
||||
placeholder="Filter categories..."
|
||||
value={filter}
|
||||
onChange={(event) => setFilter(event.currentTarget.value)}
|
||||
style={{ flex: 1 }}
|
||||
size="xs"
|
||||
/>
|
||||
<Button variant="default" size="xs" onClick={selectAll}>
|
||||
Select Visible
|
||||
</Button>
|
||||
<Button variant="default" size="xs" onClick={deselectAll}>
|
||||
Deselect Visible
|
||||
</Button>
|
||||
</Flex>
|
||||
|
||||
<Box style={{ maxHeight: '50vh', overflowY: 'auto' }}>
|
||||
<SimpleGrid
|
||||
cols={{ base: 1, sm: 2, md: 3 }}
|
||||
spacing="xs"
|
||||
verticalSpacing="xs"
|
||||
>
|
||||
{categoryStates
|
||||
.filter((category) => {
|
||||
return category.name.toLowerCase().includes(filter.toLowerCase());
|
||||
})
|
||||
.sort((a, b) => a.name.localeCompare(b.name))
|
||||
.map((category) => (
|
||||
<Group
|
||||
key={category.id}
|
||||
spacing="xs"
|
||||
style={{
|
||||
padding: '8px',
|
||||
border: '1px solid #444',
|
||||
borderRadius: '8px',
|
||||
backgroundColor: category.enabled ? '#2A2A2E' : '#1E1E22',
|
||||
flexDirection: 'column',
|
||||
alignItems: 'stretch',
|
||||
}}
|
||||
>
|
||||
{/* Group Enable/Disable Button */}
|
||||
<Button
|
||||
color={category.enabled ? 'green' : 'gray'}
|
||||
variant="filled"
|
||||
onClick={() => toggleEnabled(category.id)}
|
||||
radius="md"
|
||||
size="xs"
|
||||
leftSection={
|
||||
category.enabled ? (
|
||||
<CircleCheck size={14} />
|
||||
) : (
|
||||
<CircleX size={14} />
|
||||
)
|
||||
}
|
||||
fullWidth
|
||||
>
|
||||
<Text size="xs" truncate>
|
||||
{category.name}
|
||||
</Text>
|
||||
</Button>
|
||||
</Group>
|
||||
))}
|
||||
</SimpleGrid>
|
||||
</Box>
|
||||
</Stack>
|
||||
);
|
||||
};
|
||||
|
||||
export default VODCategoryFilter;
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -6,424 +6,433 @@ import useAuthStore from '../../store/auth';
|
|||
import { USER_LEVELS, USER_LEVEL_LABELS } from '../../constants';
|
||||
import useWarningsStore from '../../store/warnings';
|
||||
import {
|
||||
SquarePlus,
|
||||
SquareMinus,
|
||||
SquarePen,
|
||||
EllipsisVertical,
|
||||
Eye,
|
||||
EyeOff,
|
||||
SquarePlus,
|
||||
SquareMinus,
|
||||
SquarePen,
|
||||
EllipsisVertical,
|
||||
Eye,
|
||||
EyeOff,
|
||||
} from 'lucide-react';
|
||||
import {
|
||||
ActionIcon,
|
||||
Box,
|
||||
Text,
|
||||
Paper,
|
||||
Button,
|
||||
Flex,
|
||||
Group,
|
||||
useMantineTheme,
|
||||
Menu,
|
||||
UnstyledButton,
|
||||
LoadingOverlay,
|
||||
Stack,
|
||||
ActionIcon,
|
||||
Box,
|
||||
Text,
|
||||
Paper,
|
||||
Button,
|
||||
Flex,
|
||||
Group,
|
||||
useMantineTheme,
|
||||
Menu,
|
||||
UnstyledButton,
|
||||
LoadingOverlay,
|
||||
Stack,
|
||||
} from '@mantine/core';
|
||||
import { CustomTable, useTable } from './CustomTable';
|
||||
import ConfirmationDialog from '../ConfirmationDialog';
|
||||
import useLocalStorage from '../../hooks/useLocalStorage';
|
||||
|
||||
const UserRowActions = ({ theme, row, editUser, deleteUser }) => {
|
||||
const [tableSize, _] = useLocalStorage('table-size', 'default');
|
||||
const authUser = useAuthStore((s) => s.user);
|
||||
const [tableSize, _] = useLocalStorage('table-size', 'default');
|
||||
const authUser = useAuthStore((s) => s.user);
|
||||
|
||||
const onEdit = useCallback(() => {
|
||||
editUser(row.original);
|
||||
}, [row.original, editUser]);
|
||||
const onEdit = useCallback(() => {
|
||||
editUser(row.original);
|
||||
}, [row.original, editUser]);
|
||||
|
||||
const onDelete = useCallback(() => {
|
||||
deleteUser(row.original.id);
|
||||
}, [row.original.id, deleteUser]);
|
||||
const onDelete = useCallback(() => {
|
||||
deleteUser(row.original.id);
|
||||
}, [row.original.id, deleteUser]);
|
||||
|
||||
const iconSize =
|
||||
tableSize == 'default' ? 'sm' : tableSize == 'compact' ? 'xs' : 'md';
|
||||
const iconSize =
|
||||
tableSize == 'default' ? 'sm' : tableSize == 'compact' ? 'xs' : 'md';
|
||||
|
||||
return (
|
||||
<Box style={{ width: '100%', justifyContent: 'left' }}>
|
||||
<Group gap={2} justify="center">
|
||||
<ActionIcon
|
||||
size={iconSize}
|
||||
variant="transparent"
|
||||
color={theme.tailwind.yellow[3]}
|
||||
onClick={onEdit}
|
||||
disabled={authUser.user_level !== USER_LEVELS.ADMIN}
|
||||
>
|
||||
<SquarePen size="18" />
|
||||
</ActionIcon>
|
||||
return (
|
||||
<Box style={{ width: '100%', justifyContent: 'left' }}>
|
||||
<Group gap={2} justify="center">
|
||||
<ActionIcon
|
||||
size={iconSize}
|
||||
variant="transparent"
|
||||
color={theme.tailwind.yellow[3]}
|
||||
onClick={onEdit}
|
||||
disabled={authUser.user_level !== USER_LEVELS.ADMIN}
|
||||
>
|
||||
<SquarePen size="18" />
|
||||
</ActionIcon>
|
||||
|
||||
<ActionIcon
|
||||
size={iconSize}
|
||||
variant="transparent"
|
||||
color={theme.tailwind.red[6]}
|
||||
onClick={onDelete}
|
||||
disabled={authUser.user_level !== USER_LEVELS.ADMIN || authUser.id === row.original.id}
|
||||
>
|
||||
<SquareMinus size="18" />
|
||||
</ActionIcon>
|
||||
</Group>
|
||||
</Box>
|
||||
);
|
||||
<ActionIcon
|
||||
size={iconSize}
|
||||
variant="transparent"
|
||||
color={theme.tailwind.red[6]}
|
||||
onClick={onDelete}
|
||||
disabled={
|
||||
authUser.user_level !== USER_LEVELS.ADMIN ||
|
||||
authUser.id === row.original.id
|
||||
}
|
||||
>
|
||||
<SquareMinus size="18" />
|
||||
</ActionIcon>
|
||||
</Group>
|
||||
</Box>
|
||||
);
|
||||
};
|
||||
|
||||
const UsersTable = () => {
|
||||
const theme = useMantineTheme();
|
||||
const theme = useMantineTheme();
|
||||
|
||||
/**
|
||||
* STORES
|
||||
*/
|
||||
const users = useUsersStore((s) => s.users);
|
||||
const authUser = useAuthStore((s) => s.user);
|
||||
const isWarningSuppressed = useWarningsStore((s) => s.isWarningSuppressed);
|
||||
const suppressWarning = useWarningsStore((s) => s.suppressWarning);
|
||||
/**
|
||||
* STORES
|
||||
*/
|
||||
const users = useUsersStore((s) => s.users);
|
||||
const authUser = useAuthStore((s) => s.user);
|
||||
const isWarningSuppressed = useWarningsStore((s) => s.isWarningSuppressed);
|
||||
const suppressWarning = useWarningsStore((s) => s.suppressWarning);
|
||||
|
||||
/**
|
||||
* useState
|
||||
*/
|
||||
const [selectedUser, setSelectedUser] = useState(null);
|
||||
const [userModalOpen, setUserModalOpen] = useState(false);
|
||||
const [confirmDeleteOpen, setConfirmDeleteOpen] = useState(false);
|
||||
const [deleteTarget, setDeleteTarget] = useState(null);
|
||||
const [userToDelete, setUserToDelete] = useState(null);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [visiblePasswords, setVisiblePasswords] = useState({});
|
||||
/**
|
||||
* useState
|
||||
*/
|
||||
const [selectedUser, setSelectedUser] = useState(null);
|
||||
const [userModalOpen, setUserModalOpen] = useState(false);
|
||||
const [confirmDeleteOpen, setConfirmDeleteOpen] = useState(false);
|
||||
const [deleteTarget, setDeleteTarget] = useState(null);
|
||||
const [userToDelete, setUserToDelete] = useState(null);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [visiblePasswords, setVisiblePasswords] = useState({});
|
||||
|
||||
/**
|
||||
* Functions
|
||||
*/
|
||||
const togglePasswordVisibility = useCallback((userId) => {
|
||||
setVisiblePasswords(prev => ({
|
||||
...prev,
|
||||
[userId]: !prev[userId]
|
||||
}));
|
||||
}, []);
|
||||
/**
|
||||
* Functions
|
||||
*/
|
||||
const togglePasswordVisibility = useCallback((userId) => {
|
||||
setVisiblePasswords((prev) => ({
|
||||
...prev,
|
||||
[userId]: !prev[userId],
|
||||
}));
|
||||
}, []);
|
||||
|
||||
const executeDeleteUser = useCallback(async (id) => {
|
||||
setIsLoading(true);
|
||||
await API.deleteUser(id);
|
||||
setIsLoading(false);
|
||||
setConfirmDeleteOpen(false);
|
||||
}, []);
|
||||
const executeDeleteUser = useCallback(async (id) => {
|
||||
setIsLoading(true);
|
||||
await API.deleteUser(id);
|
||||
setIsLoading(false);
|
||||
setConfirmDeleteOpen(false);
|
||||
}, []);
|
||||
|
||||
const editUser = useCallback(async (user = null) => {
|
||||
setSelectedUser(user);
|
||||
setUserModalOpen(true);
|
||||
}, []);
|
||||
const editUser = useCallback(async (user = null) => {
|
||||
setSelectedUser(user);
|
||||
setUserModalOpen(true);
|
||||
}, []);
|
||||
|
||||
const deleteUser = useCallback(async (id) => {
|
||||
const user = users.find((u) => u.id === id);
|
||||
setUserToDelete(user);
|
||||
setDeleteTarget(id);
|
||||
const deleteUser = useCallback(
|
||||
async (id) => {
|
||||
const user = users.find((u) => u.id === id);
|
||||
setUserToDelete(user);
|
||||
setDeleteTarget(id);
|
||||
|
||||
if (isWarningSuppressed('delete-user')) {
|
||||
return executeDeleteUser(id);
|
||||
}
|
||||
if (isWarningSuppressed('delete-user')) {
|
||||
return executeDeleteUser(id);
|
||||
}
|
||||
|
||||
setConfirmDeleteOpen(true);
|
||||
}, [users, isWarningSuppressed, executeDeleteUser]);
|
||||
setConfirmDeleteOpen(true);
|
||||
},
|
||||
[users, isWarningSuppressed, executeDeleteUser]
|
||||
);
|
||||
|
||||
/**
|
||||
* useMemo
|
||||
*/
|
||||
const columns = useMemo(
|
||||
() => [
|
||||
{
|
||||
header: 'User Level',
|
||||
accessorKey: 'user_level',
|
||||
size: 120,
|
||||
cell: ({ getValue }) => (
|
||||
<Text size="sm">
|
||||
{USER_LEVEL_LABELS[getValue()]}
|
||||
</Text>
|
||||
),
|
||||
},
|
||||
{
|
||||
header: 'Username',
|
||||
accessorKey: 'username',
|
||||
size: 150,
|
||||
cell: ({ getValue }) => (
|
||||
<Box
|
||||
style={{
|
||||
whiteSpace: 'nowrap',
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
}}
|
||||
>
|
||||
{getValue()}
|
||||
</Box>
|
||||
),
|
||||
},
|
||||
{
|
||||
id: 'name',
|
||||
header: 'Name',
|
||||
accessorFn: (row) => `${row.first_name || ''} ${row.last_name || ''}`.trim(),
|
||||
cell: ({ getValue }) => (
|
||||
<Box
|
||||
style={{
|
||||
whiteSpace: 'nowrap',
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
}}
|
||||
>
|
||||
{getValue() || '-'}
|
||||
</Box>
|
||||
),
|
||||
},
|
||||
{
|
||||
header: 'Email',
|
||||
accessorKey: 'email',
|
||||
cell: ({ getValue }) => (
|
||||
<Box
|
||||
style={{
|
||||
whiteSpace: 'nowrap',
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
}}
|
||||
>
|
||||
{getValue()}
|
||||
</Box>
|
||||
),
|
||||
},
|
||||
{
|
||||
header: 'Date Joined',
|
||||
accessorKey: 'date_joined',
|
||||
size: 125,
|
||||
cell: ({ getValue }) => {
|
||||
const date = getValue();
|
||||
return (
|
||||
<Text size="sm">
|
||||
{date ? new Date(date).toLocaleDateString() : '-'}
|
||||
</Text>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
header: 'Last Login',
|
||||
accessorKey: 'last_login',
|
||||
size: 175,
|
||||
cell: ({ getValue }) => {
|
||||
const date = getValue();
|
||||
return (
|
||||
<Text size="sm">
|
||||
{date ? new Date(date).toLocaleString() : 'Never'}
|
||||
</Text>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
header: 'XC Password',
|
||||
accessorKey: 'custom_properties',
|
||||
size: 125,
|
||||
enableSorting: false,
|
||||
cell: ({ getValue, row }) => {
|
||||
const userId = row.original.id;
|
||||
const isVisible = visiblePasswords[userId];
|
||||
|
||||
// Parse custom_properties and extract xc_password
|
||||
let password = 'N/A';
|
||||
try {
|
||||
const customProps = JSON.parse(getValue() || '{}');
|
||||
password = customProps.xc_password || 'N/A';
|
||||
} catch {
|
||||
password = 'N/A';
|
||||
}
|
||||
|
||||
return (
|
||||
<Group gap={4} style={{ alignItems: 'center' }}>
|
||||
<Text size="sm" style={{ fontFamily: 'monospace', minWidth: '60px' }}>
|
||||
{password === 'N/A' ? 'N/A' : (isVisible ? password : '••••••••')}
|
||||
</Text>
|
||||
{password !== 'N/A' && (
|
||||
<ActionIcon
|
||||
size="xs"
|
||||
variant="transparent"
|
||||
color="gray"
|
||||
onClick={() => togglePasswordVisibility(userId)}
|
||||
>
|
||||
{isVisible ? <EyeOff size={12} /> : <Eye size={12} />}
|
||||
</ActionIcon>
|
||||
)}
|
||||
</Group>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'actions',
|
||||
size: 80,
|
||||
header: 'Actions',
|
||||
enableSorting: false,
|
||||
cell: ({ row }) => (
|
||||
<UserRowActions
|
||||
theme={theme}
|
||||
row={row}
|
||||
editUser={editUser}
|
||||
deleteUser={deleteUser}
|
||||
/>
|
||||
),
|
||||
},
|
||||
],
|
||||
[theme, editUser, deleteUser, visiblePasswords, togglePasswordVisibility]
|
||||
);
|
||||
|
||||
const closeUserForm = () => {
|
||||
setSelectedUser(null);
|
||||
setUserModalOpen(false);
|
||||
};
|
||||
|
||||
const data = useMemo(() => {
|
||||
return users.sort((a, b) => a.id - b.id);
|
||||
}, [users]);
|
||||
|
||||
const renderHeaderCell = (header) => {
|
||||
return (
|
||||
<Text size="sm" name={header.id}>
|
||||
{header.column.columnDef.header}
|
||||
/**
|
||||
* useMemo
|
||||
*/
|
||||
const columns = useMemo(
|
||||
() => [
|
||||
{
|
||||
header: 'User Level',
|
||||
accessorKey: 'user_level',
|
||||
size: 120,
|
||||
cell: ({ getValue }) => (
|
||||
<Text size="sm">{USER_LEVEL_LABELS[getValue()]}</Text>
|
||||
),
|
||||
},
|
||||
{
|
||||
header: 'Username',
|
||||
accessorKey: 'username',
|
||||
size: 150,
|
||||
cell: ({ getValue }) => (
|
||||
<Box
|
||||
style={{
|
||||
whiteSpace: 'nowrap',
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
}}
|
||||
>
|
||||
{getValue()}
|
||||
</Box>
|
||||
),
|
||||
},
|
||||
{
|
||||
id: 'name',
|
||||
header: 'Name',
|
||||
accessorFn: (row) =>
|
||||
`${row.first_name || ''} ${row.last_name || ''}`.trim(),
|
||||
cell: ({ getValue }) => (
|
||||
<Box
|
||||
style={{
|
||||
whiteSpace: 'nowrap',
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
}}
|
||||
>
|
||||
{getValue() || '-'}
|
||||
</Box>
|
||||
),
|
||||
},
|
||||
{
|
||||
header: 'Email',
|
||||
accessorKey: 'email',
|
||||
cell: ({ getValue }) => (
|
||||
<Box
|
||||
style={{
|
||||
whiteSpace: 'nowrap',
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
}}
|
||||
>
|
||||
{getValue()}
|
||||
</Box>
|
||||
),
|
||||
},
|
||||
{
|
||||
header: 'Date Joined',
|
||||
accessorKey: 'date_joined',
|
||||
size: 125,
|
||||
cell: ({ getValue }) => {
|
||||
const date = getValue();
|
||||
return (
|
||||
<Text size="sm">
|
||||
{date ? new Date(date).toLocaleDateString() : '-'}
|
||||
</Text>
|
||||
);
|
||||
};
|
||||
|
||||
const table = useTable({
|
||||
columns,
|
||||
data,
|
||||
allRowIds: data.map((user) => user.id),
|
||||
enablePagination: false,
|
||||
enableRowSelection: false,
|
||||
enableRowVirtualization: false,
|
||||
renderTopToolbar: false,
|
||||
manualSorting: false,
|
||||
manualFiltering: false,
|
||||
manualPagination: false,
|
||||
headerCellRenderFns: {
|
||||
actions: renderHeaderCell,
|
||||
username: renderHeaderCell,
|
||||
name: renderHeaderCell,
|
||||
email: renderHeaderCell,
|
||||
user_level: renderHeaderCell,
|
||||
last_login: renderHeaderCell,
|
||||
date_joined: renderHeaderCell,
|
||||
custom_properties: renderHeaderCell,
|
||||
);
|
||||
},
|
||||
});
|
||||
},
|
||||
{
|
||||
header: 'Last Login',
|
||||
accessorKey: 'last_login',
|
||||
size: 175,
|
||||
cell: ({ getValue }) => {
|
||||
const date = getValue();
|
||||
return (
|
||||
<Text size="sm">
|
||||
{date ? new Date(date).toLocaleString() : 'Never'}
|
||||
</Text>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
header: 'XC Password',
|
||||
accessorKey: 'custom_properties',
|
||||
size: 125,
|
||||
enableSorting: false,
|
||||
cell: ({ getValue, row }) => {
|
||||
const userId = row.original.id;
|
||||
const isVisible = visiblePasswords[userId];
|
||||
|
||||
// Parse custom_properties and extract xc_password
|
||||
let password = 'N/A';
|
||||
try {
|
||||
const customProps = JSON.parse(getValue() || '{}');
|
||||
password = customProps.xc_password || 'N/A';
|
||||
} catch {
|
||||
password = 'N/A';
|
||||
}
|
||||
|
||||
return (
|
||||
<Group gap={4} style={{ alignItems: 'center' }}>
|
||||
<Text
|
||||
size="sm"
|
||||
style={{ fontFamily: 'monospace', minWidth: '60px' }}
|
||||
>
|
||||
{password === 'N/A' ? 'N/A' : isVisible ? password : '••••••••'}
|
||||
</Text>
|
||||
{password !== 'N/A' && (
|
||||
<ActionIcon
|
||||
size="xs"
|
||||
variant="transparent"
|
||||
color="gray"
|
||||
onClick={() => togglePasswordVisibility(userId)}
|
||||
>
|
||||
{isVisible ? <EyeOff size={12} /> : <Eye size={12} />}
|
||||
</ActionIcon>
|
||||
)}
|
||||
</Group>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'actions',
|
||||
size: 80,
|
||||
header: 'Actions',
|
||||
enableSorting: false,
|
||||
cell: ({ row }) => (
|
||||
<UserRowActions
|
||||
theme={theme}
|
||||
row={row}
|
||||
editUser={editUser}
|
||||
deleteUser={deleteUser}
|
||||
/>
|
||||
),
|
||||
},
|
||||
],
|
||||
[theme, editUser, deleteUser, visiblePasswords, togglePasswordVisibility]
|
||||
);
|
||||
|
||||
const closeUserForm = () => {
|
||||
setSelectedUser(null);
|
||||
setUserModalOpen(false);
|
||||
};
|
||||
|
||||
const data = useMemo(() => {
|
||||
return users.sort((a, b) => a.id - b.id);
|
||||
}, [users]);
|
||||
|
||||
const renderHeaderCell = (header) => {
|
||||
return (
|
||||
<>
|
||||
<Box
|
||||
style={{
|
||||
display: 'flex',
|
||||
justifyContent: 'center',
|
||||
padding: '0px',
|
||||
minHeight: '100vh',
|
||||
}}
|
||||
<Text size="sm" name={header.id}>
|
||||
{header.column.columnDef.header}
|
||||
</Text>
|
||||
);
|
||||
};
|
||||
|
||||
const table = useTable({
|
||||
columns,
|
||||
data,
|
||||
allRowIds: data.map((user) => user.id),
|
||||
enablePagination: false,
|
||||
enableRowSelection: false,
|
||||
enableRowVirtualization: false,
|
||||
renderTopToolbar: false,
|
||||
manualSorting: false,
|
||||
manualFiltering: false,
|
||||
manualPagination: false,
|
||||
headerCellRenderFns: {
|
||||
actions: renderHeaderCell,
|
||||
username: renderHeaderCell,
|
||||
name: renderHeaderCell,
|
||||
email: renderHeaderCell,
|
||||
user_level: renderHeaderCell,
|
||||
last_login: renderHeaderCell,
|
||||
date_joined: renderHeaderCell,
|
||||
custom_properties: renderHeaderCell,
|
||||
},
|
||||
});
|
||||
|
||||
return (
|
||||
<>
|
||||
<Box
|
||||
style={{
|
||||
display: 'flex',
|
||||
justifyContent: 'center',
|
||||
padding: '0px',
|
||||
minHeight: '100vh',
|
||||
}}
|
||||
>
|
||||
<Stack gap="md" style={{ maxWidth: '1200px', width: '100%' }}>
|
||||
<Flex style={{ alignItems: 'center', paddingBottom: 10 }} gap={15}>
|
||||
<Text
|
||||
style={{
|
||||
fontFamily: 'Inter, sans-serif',
|
||||
fontWeight: 500,
|
||||
fontSize: '20px',
|
||||
lineHeight: 1,
|
||||
letterSpacing: '-0.3px',
|
||||
color: 'gray.6',
|
||||
marginBottom: 0,
|
||||
}}
|
||||
>
|
||||
<Stack gap="md" style={{ maxWidth: '1200px', width: '100%' }}>
|
||||
<Flex style={{ alignItems: 'center', paddingBottom: 10 }} gap={15}>
|
||||
<Text
|
||||
style={{
|
||||
fontFamily: 'Inter, sans-serif',
|
||||
fontWeight: 500,
|
||||
fontSize: '20px',
|
||||
lineHeight: 1,
|
||||
letterSpacing: '-0.3px',
|
||||
color: 'gray.6',
|
||||
marginBottom: 0,
|
||||
}}
|
||||
>
|
||||
Users
|
||||
</Text>
|
||||
</Flex>
|
||||
Users
|
||||
</Text>
|
||||
</Flex>
|
||||
|
||||
<Paper
|
||||
style={{
|
||||
backgroundColor: '#27272A',
|
||||
border: '1px solid #3f3f46',
|
||||
borderRadius: 'var(--mantine-radius-md)',
|
||||
}}
|
||||
>
|
||||
{/* Top toolbar */}
|
||||
<Box
|
||||
style={{
|
||||
display: 'flex',
|
||||
justifyContent: 'flex-end',
|
||||
padding: '16px',
|
||||
borderBottom: '1px solid #3f3f46',
|
||||
}}
|
||||
>
|
||||
<Button
|
||||
leftSection={<SquarePlus size={18} />}
|
||||
variant="light"
|
||||
size="xs"
|
||||
onClick={() => editUser()}
|
||||
p={5}
|
||||
color={theme.tailwind.green[5]}
|
||||
style={{
|
||||
borderWidth: '1px',
|
||||
borderColor: theme.tailwind.green[5],
|
||||
color: 'white',
|
||||
}}
|
||||
disabled={authUser.user_level !== USER_LEVELS.ADMIN}
|
||||
>
|
||||
Add User
|
||||
</Button>
|
||||
</Box>
|
||||
|
||||
{/* Table container */}
|
||||
<Box
|
||||
style={{
|
||||
position: 'relative',
|
||||
overflow: 'auto',
|
||||
borderRadius: '0 0 var(--mantine-radius-md) var(--mantine-radius-md)',
|
||||
}}
|
||||
>
|
||||
<div style={{ minWidth: '900px' }}>
|
||||
<LoadingOverlay visible={isLoading} />
|
||||
<CustomTable table={table} />
|
||||
</div>
|
||||
</Box>
|
||||
</Paper>
|
||||
</Stack>
|
||||
<Paper
|
||||
style={{
|
||||
backgroundColor: '#27272A',
|
||||
border: '1px solid #3f3f46',
|
||||
borderRadius: 'var(--mantine-radius-md)',
|
||||
}}
|
||||
>
|
||||
{/* Top toolbar */}
|
||||
<Box
|
||||
style={{
|
||||
display: 'flex',
|
||||
justifyContent: 'flex-end',
|
||||
padding: '16px',
|
||||
borderBottom: '1px solid #3f3f46',
|
||||
}}
|
||||
>
|
||||
<Button
|
||||
leftSection={<SquarePlus size={18} />}
|
||||
variant="light"
|
||||
size="xs"
|
||||
onClick={() => editUser()}
|
||||
p={5}
|
||||
color={theme.tailwind.green[5]}
|
||||
style={{
|
||||
borderWidth: '1px',
|
||||
borderColor: theme.tailwind.green[5],
|
||||
color: 'white',
|
||||
}}
|
||||
disabled={authUser.user_level !== USER_LEVELS.ADMIN}
|
||||
>
|
||||
Add User
|
||||
</Button>
|
||||
</Box>
|
||||
|
||||
<UserForm
|
||||
user={selectedUser}
|
||||
isOpen={userModalOpen}
|
||||
onClose={closeUserForm}
|
||||
/>
|
||||
{/* Table container */}
|
||||
<Box
|
||||
style={{
|
||||
position: 'relative',
|
||||
overflow: 'auto',
|
||||
borderRadius:
|
||||
'0 0 var(--mantine-radius-md) var(--mantine-radius-md)',
|
||||
}}
|
||||
>
|
||||
<div style={{ minWidth: '900px' }}>
|
||||
<LoadingOverlay visible={isLoading} />
|
||||
<CustomTable table={table} />
|
||||
</div>
|
||||
</Box>
|
||||
</Paper>
|
||||
</Stack>
|
||||
</Box>
|
||||
|
||||
<ConfirmationDialog
|
||||
opened={confirmDeleteOpen}
|
||||
onClose={() => setConfirmDeleteOpen(false)}
|
||||
onConfirm={() => executeDeleteUser(deleteTarget)}
|
||||
title="Confirm User Deletion"
|
||||
message={
|
||||
userToDelete ? (
|
||||
<div style={{ whiteSpace: 'pre-line' }}>
|
||||
{`Are you sure you want to delete the following user?
|
||||
<UserForm
|
||||
user={selectedUser}
|
||||
isOpen={userModalOpen}
|
||||
onClose={closeUserForm}
|
||||
/>
|
||||
|
||||
<ConfirmationDialog
|
||||
opened={confirmDeleteOpen}
|
||||
onClose={() => setConfirmDeleteOpen(false)}
|
||||
onConfirm={() => executeDeleteUser(deleteTarget)}
|
||||
title="Confirm User Deletion"
|
||||
message={
|
||||
userToDelete ? (
|
||||
<div style={{ whiteSpace: 'pre-line' }}>
|
||||
{`Are you sure you want to delete the following user?
|
||||
|
||||
Username: ${userToDelete.username}
|
||||
Email: ${userToDelete.email}
|
||||
User Level: ${USER_LEVEL_LABELS[userToDelete.user_level]}
|
||||
|
||||
This action cannot be undone.`}
|
||||
</div>
|
||||
) : (
|
||||
'Are you sure you want to delete this user? This action cannot be undone.'
|
||||
)
|
||||
}
|
||||
confirmLabel="Delete"
|
||||
cancelLabel="Cancel"
|
||||
actionKey="delete-user"
|
||||
onSuppressChange={suppressWarning}
|
||||
size="md"
|
||||
/>
|
||||
</>
|
||||
);
|
||||
</div>
|
||||
) : (
|
||||
'Are you sure you want to delete this user? This action cannot be undone.'
|
||||
)
|
||||
}
|
||||
confirmLabel="Delete"
|
||||
cancelLabel="Cancel"
|
||||
actionKey="delete-user"
|
||||
onSuppressChange={suppressWarning}
|
||||
size="md"
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default UsersTable;
|
||||
export default UsersTable;
|
||||
|
|
|
|||
19
frontend/src/components/theme/Button.jsx
Normal file
19
frontend/src/components/theme/Button.jsx
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
import { Button as MantineButton } from '@mantine/core';
|
||||
|
||||
const Button = (props) => {
|
||||
return (
|
||||
<MantineButton
|
||||
{...props}
|
||||
style={{
|
||||
color: 'black',
|
||||
// fontWeight: '400',
|
||||
backgroundColor: '#14917E',
|
||||
'&:hover': {
|
||||
backgroundColor: '#14917E',
|
||||
},
|
||||
}}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
export default Button;
|
||||
|
|
@ -322,3 +322,25 @@ export const REGION_CHOICES = [
|
|||
{ value: 'zm', label: 'ZM' },
|
||||
{ value: 'zw', label: 'ZW' },
|
||||
];
|
||||
|
||||
export const VOD_TYPES = {
|
||||
MOVIE: 'movie',
|
||||
EPISODE: 'episode'
|
||||
};
|
||||
|
||||
export const VOD_FILTERS = {
|
||||
ALL: 'all',
|
||||
MOVIES: 'movies',
|
||||
SERIES: 'series'
|
||||
};
|
||||
|
||||
export const VOD_SORT_OPTIONS = [
|
||||
{ value: 'name', label: 'Name' },
|
||||
{ value: 'year', label: 'Year' },
|
||||
{ value: 'created_at', label: 'Date Added' },
|
||||
{ value: 'rating', label: 'Rating' }
|
||||
];
|
||||
|
||||
export const CONTAINER_EXTENSIONS = [
|
||||
'mp4', 'mkv', 'avi', 'mov', 'wmv', 'flv', 'webm', 'm4v', 'ts', 'mpg'
|
||||
];
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { useState, useEffect, useCallback } from 'react';
|
||||
import { useState, useEffect, useCallback, useMemo } from 'react';
|
||||
import useLogosStore from '../store/logos';
|
||||
|
||||
/**
|
||||
|
|
@ -6,62 +6,122 @@ import useLogosStore from '../store/logos';
|
|||
* Loads logos on-demand when the component is opened
|
||||
*/
|
||||
export const useLogoSelection = () => {
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [isInitialized, setIsInitialized] = useState(false);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [isInitialized, setIsInitialized] = useState(false);
|
||||
|
||||
const logos = useLogosStore((s) => s.logos);
|
||||
const fetchLogos = useLogosStore((s) => s.fetchLogos); // Check if we have a reasonable number of logos loaded
|
||||
const hasEnoughLogos = Object.keys(logos).length > 0;
|
||||
const logos = useLogosStore((s) => s.logos);
|
||||
const fetchLogos = useLogosStore((s) => s.fetchLogos); // Check if we have a reasonable number of logos loaded
|
||||
const hasEnoughLogos = Object.keys(logos).length > 0;
|
||||
|
||||
const ensureLogosLoaded = useCallback(async () => {
|
||||
if (isLoading || (hasEnoughLogos && isInitialized)) {
|
||||
return;
|
||||
}
|
||||
const ensureLogosLoaded = useCallback(async () => {
|
||||
if (isLoading || (hasEnoughLogos && isInitialized)) {
|
||||
return;
|
||||
}
|
||||
|
||||
setIsLoading(true);
|
||||
try {
|
||||
await fetchLogos();
|
||||
setIsInitialized(true);
|
||||
} catch (error) {
|
||||
console.error('Failed to load logos for selection:', error);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [isLoading, hasEnoughLogos, isInitialized, fetchLogos]);
|
||||
setIsLoading(true);
|
||||
try {
|
||||
await fetchLogos();
|
||||
setIsInitialized(true);
|
||||
} catch (error) {
|
||||
console.error('Failed to load logos for selection:', error);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [isLoading, hasEnoughLogos, isInitialized, fetchLogos]);
|
||||
|
||||
return {
|
||||
logos,
|
||||
isLoading,
|
||||
ensureLogosLoaded,
|
||||
hasLogos: hasEnoughLogos,
|
||||
};
|
||||
return {
|
||||
logos,
|
||||
isLoading,
|
||||
ensureLogosLoaded,
|
||||
hasLogos: hasEnoughLogos,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Hook for channel forms that need only channel-assignable logos
|
||||
* (unused + channel-used, excluding VOD-only logos)
|
||||
*/
|
||||
export const useChannelLogoSelection = () => {
|
||||
const [isInitialized, setIsInitialized] = useState(false);
|
||||
|
||||
const channelLogos = useLogosStore((s) => s.channelLogos);
|
||||
const hasLoadedChannelLogos = useLogosStore((s) => s.hasLoadedChannelLogos);
|
||||
const backgroundLoading = useLogosStore((s) => s.backgroundLoading);
|
||||
const fetchChannelAssignableLogos = useLogosStore(
|
||||
(s) => s.fetchChannelAssignableLogos
|
||||
);
|
||||
|
||||
const hasLogos = Object.keys(channelLogos).length > 0;
|
||||
|
||||
const ensureLogosLoaded = useCallback(async () => {
|
||||
if (backgroundLoading || (hasLoadedChannelLogos && isInitialized)) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await fetchChannelAssignableLogos();
|
||||
setIsInitialized(true);
|
||||
} catch (error) {
|
||||
console.error('Failed to load channel-assignable logos:', error);
|
||||
}
|
||||
}, [
|
||||
backgroundLoading,
|
||||
hasLoadedChannelLogos,
|
||||
isInitialized,
|
||||
fetchChannelAssignableLogos,
|
||||
]);
|
||||
|
||||
return {
|
||||
logos: channelLogos,
|
||||
isLoading: backgroundLoading,
|
||||
ensureLogosLoaded,
|
||||
hasLogos,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Hook for components that need specific logos by IDs
|
||||
*/
|
||||
export const useLogosById = (logoIds = []) => {
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [loadedIds, setLoadedIds] = useState(new Set());
|
||||
|
||||
const logos = useLogosStore((s) => s.logos);
|
||||
const fetchLogosByIds = useLogosStore((s) => s.fetchLogosByIds); // Find missing logos
|
||||
const missingIds = logoIds.filter(id => id && !logos[id]);
|
||||
const logos = useLogosStore((s) => s.logos);
|
||||
const fetchLogosByIds = useLogosStore((s) => s.fetchLogosByIds);
|
||||
|
||||
useEffect(() => {
|
||||
if (missingIds.length > 0 && !isLoading) {
|
||||
setIsLoading(true);
|
||||
fetchLogosByIds(missingIds)
|
||||
.then(() => setIsLoading(false))
|
||||
.catch((error) => {
|
||||
console.error('Failed to load logos by IDs:', error);
|
||||
setIsLoading(false);
|
||||
});
|
||||
}
|
||||
}, [missingIds.length, isLoading, fetchLogosByIds]);
|
||||
// Memoize missing IDs calculation to prevent infinite loops
|
||||
const missingIds = useMemo(() => {
|
||||
return logoIds.filter((id) => id && !logos[id] && !loadedIds.has(id));
|
||||
}, [logoIds, logos, loadedIds]);
|
||||
|
||||
return {
|
||||
logos,
|
||||
isLoading,
|
||||
missingLogos: missingIds.length,
|
||||
};
|
||||
// Stringify logoIds to prevent array reference issues
|
||||
const logoIdsString = logoIds.join(',');
|
||||
|
||||
useEffect(() => {
|
||||
if (missingIds.length > 0 && !isLoading) {
|
||||
setIsLoading(true);
|
||||
|
||||
// Track that we're loading these IDs to prevent re-requests
|
||||
setLoadedIds((prev) => new Set([...prev, ...missingIds]));
|
||||
|
||||
fetchLogosByIds(missingIds)
|
||||
.then(() => setIsLoading(false))
|
||||
.catch((error) => {
|
||||
console.error('Failed to load logos by IDs:', error);
|
||||
// Remove failed IDs from loaded set so they can be retried
|
||||
setLoadedIds((prev) => {
|
||||
const newSet = new Set(prev);
|
||||
missingIds.forEach((id) => newSet.delete(id));
|
||||
return newSet;
|
||||
});
|
||||
setIsLoading(false);
|
||||
});
|
||||
}
|
||||
}, [logoIdsString, missingIds, isLoading, fetchLogosByIds]);
|
||||
|
||||
return {
|
||||
logos,
|
||||
isLoading,
|
||||
missingLogos: missingIds.length,
|
||||
};
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,37 +1,47 @@
|
|||
import React, { useEffect, useCallback } from 'react';
|
||||
import { Box } from '@mantine/core';
|
||||
import { Box, Loader, Center, Text, Stack } from '@mantine/core';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
import useLogosStore from '../store/logos';
|
||||
import LogosTable from '../components/tables/LogosTable';
|
||||
|
||||
const LogosPage = () => {
|
||||
const { fetchLogos, logos } = useLogosStore();
|
||||
const { fetchAllLogos, isLoading, needsAllLogos } = useLogosStore();
|
||||
|
||||
const loadLogos = useCallback(async () => {
|
||||
try {
|
||||
// Only fetch all logos if we don't have any yet
|
||||
if (Object.keys(logos).length === 0) {
|
||||
await fetchLogos();
|
||||
}
|
||||
} catch (err) {
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: 'Failed to load logos',
|
||||
color: 'red',
|
||||
});
|
||||
console.error('Failed to load logos:', err);
|
||||
}
|
||||
}, [fetchLogos, logos]);
|
||||
const loadLogos = useCallback(async () => {
|
||||
try {
|
||||
// Only fetch all logos if we haven't loaded them yet
|
||||
if (needsAllLogos()) {
|
||||
await fetchAllLogos();
|
||||
}
|
||||
} catch (err) {
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: 'Failed to load logos',
|
||||
color: 'red',
|
||||
});
|
||||
console.error('Failed to load logos:', err);
|
||||
}
|
||||
}, [fetchAllLogos, needsAllLogos]);
|
||||
|
||||
useEffect(() => {
|
||||
loadLogos();
|
||||
}, [loadLogos]);
|
||||
useEffect(() => {
|
||||
loadLogos();
|
||||
}, [loadLogos]);
|
||||
|
||||
return (
|
||||
<Box style={{ padding: 10 }}>
|
||||
<LogosTable />
|
||||
</Box>
|
||||
);
|
||||
return (
|
||||
<Box style={{ padding: 10 }}>
|
||||
{isLoading && (
|
||||
<Center style={{ marginBottom: 20 }}>
|
||||
<Stack align="center" spacing="sm">
|
||||
<Loader size="sm" />
|
||||
<Text size="sm" color="dimmed">
|
||||
Loading all logos...
|
||||
</Text>
|
||||
</Stack>
|
||||
</Center>
|
||||
)}
|
||||
<LogosTable />
|
||||
</Box>
|
||||
);
|
||||
};
|
||||
|
||||
export default LogosPage;
|
||||
|
|
|
|||
2003
frontend/src/pages/VODs.jsx
Normal file
2003
frontend/src/pages/VODs.jsx
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -2,12 +2,12 @@ import { create } from 'zustand';
|
|||
import api from '../api';
|
||||
import useSettingsStore from './settings';
|
||||
import useChannelsStore from './channels';
|
||||
import useLogosStore from './logos';
|
||||
import usePlaylistsStore from './playlists';
|
||||
import useEPGsStore from './epgs';
|
||||
import useStreamProfilesStore from './streamProfiles';
|
||||
import useUserAgentsStore from './userAgents';
|
||||
import useUsersStore from './users';
|
||||
import useLogosStore from './logos';
|
||||
import API from '../api';
|
||||
import { USER_LEVELS } from '../constants';
|
||||
|
||||
|
|
@ -47,7 +47,7 @@ const useAuthStore = create((set, get) => ({
|
|||
await useSettingsStore.getState().fetchSettings();
|
||||
|
||||
try {
|
||||
// Load essential data first (without all logos)
|
||||
// Only after settings are loaded, fetch the essential data
|
||||
await Promise.all([
|
||||
useChannelsStore.getState().fetchChannels(),
|
||||
useChannelsStore.getState().fetchChannelGroups(),
|
||||
|
|
@ -59,20 +59,11 @@ const useAuthStore = create((set, get) => ({
|
|||
useUserAgentsStore.getState().fetchUserAgents(),
|
||||
]);
|
||||
|
||||
// Load only logos that are currently used by channels (much faster)
|
||||
await useLogosStore.getState().fetchUsedLogos();
|
||||
|
||||
if (user.user_level >= USER_LEVELS.ADMIN) {
|
||||
await Promise.all([useUsersStore.getState().fetchUsers()]);
|
||||
}
|
||||
|
||||
set({ user, isAuthenticated: true });
|
||||
|
||||
// Start background loading of remaining logos after login is complete
|
||||
setTimeout(() => {
|
||||
useLogosStore.getState().fetchLogosInBackground();
|
||||
}, 2000); // 2 second delay to let UI settle
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error initializing data:', error);
|
||||
}
|
||||
|
|
@ -114,6 +105,8 @@ const useAuthStore = create((set, get) => ({
|
|||
localStorage.setItem('accessToken', response.access);
|
||||
localStorage.setItem('refreshToken', response.refresh);
|
||||
localStorage.setItem('tokenExpiration', expiration);
|
||||
|
||||
// Don't start background loading here - let it happen after app initialization
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Login failed:', error);
|
||||
|
|
|
|||
|
|
@ -2,141 +2,354 @@ import { create } from 'zustand';
|
|||
import api from '../api';
|
||||
|
||||
const useLogosStore = create((set, get) => ({
|
||||
logos: {},
|
||||
isLoading: false,
|
||||
error: null,
|
||||
logos: {},
|
||||
channelLogos: {}, // Separate state for channel-assignable logos
|
||||
isLoading: false,
|
||||
backgroundLoading: false,
|
||||
hasLoadedAll: false, // Track if we've loaded all logos
|
||||
hasLoadedChannelLogos: false, // Track if we've loaded channel-assignable logos
|
||||
error: null,
|
||||
|
||||
// Basic CRUD operations
|
||||
setLogos: (logos) => {
|
||||
set({
|
||||
logos: logos.reduce((acc, logo) => {
|
||||
acc[logo.id] = { ...logo };
|
||||
return acc;
|
||||
// Basic CRUD operations
|
||||
setLogos: (logos) => {
|
||||
set({
|
||||
logos: logos.reduce((acc, logo) => {
|
||||
acc[logo.id] = { ...logo };
|
||||
return acc;
|
||||
}, {}),
|
||||
});
|
||||
},
|
||||
|
||||
addLogo: (newLogo) =>
|
||||
set((state) => ({
|
||||
logos: {
|
||||
...state.logos,
|
||||
[newLogo.id]: { ...newLogo },
|
||||
},
|
||||
})),
|
||||
|
||||
updateLogo: (logo) =>
|
||||
set((state) => ({
|
||||
logos: {
|
||||
...state.logos,
|
||||
[logo.id]: { ...logo },
|
||||
},
|
||||
})),
|
||||
|
||||
removeLogo: (logoId) =>
|
||||
set((state) => {
|
||||
const newLogos = { ...state.logos };
|
||||
delete newLogos[logoId];
|
||||
return { logos: newLogos };
|
||||
}),
|
||||
|
||||
// Smart loading methods
|
||||
fetchLogos: async (pageSize = 100) => {
|
||||
set({ isLoading: true, error: null });
|
||||
try {
|
||||
const response = await api.getLogos({ page_size: pageSize });
|
||||
|
||||
// Handle both paginated and non-paginated responses
|
||||
const logos = Array.isArray(response) ? response : response.results || [];
|
||||
|
||||
set({
|
||||
logos: logos.reduce((acc, logo) => {
|
||||
acc[logo.id] = { ...logo };
|
||||
return acc;
|
||||
}, {}),
|
||||
isLoading: false,
|
||||
});
|
||||
return response;
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch logos:', error);
|
||||
set({ error: 'Failed to load logos.', isLoading: false });
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
fetchAllLogos: async () => {
|
||||
const { isLoading, hasLoadedAll, logos } = get();
|
||||
|
||||
// Prevent unnecessary reloading if we already have all logos
|
||||
if (isLoading || (hasLoadedAll && Object.keys(logos).length > 0)) {
|
||||
return Object.values(logos);
|
||||
}
|
||||
|
||||
set({ isLoading: true, error: null });
|
||||
try {
|
||||
// Disable pagination to get all logos for management interface
|
||||
const response = await api.getLogos({ no_pagination: 'true' });
|
||||
|
||||
// Handle both paginated and non-paginated responses
|
||||
const logosArray = Array.isArray(response)
|
||||
? response
|
||||
: response.results || [];
|
||||
|
||||
set({
|
||||
logos: logosArray.reduce((acc, logo) => {
|
||||
acc[logo.id] = { ...logo };
|
||||
return acc;
|
||||
}, {}),
|
||||
hasLoadedAll: true, // Mark that we've loaded all logos
|
||||
isLoading: false,
|
||||
});
|
||||
return logosArray;
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch all logos:', error);
|
||||
set({ error: 'Failed to load all logos.', isLoading: false });
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
fetchUsedLogos: async (pageSize = 100) => {
|
||||
set({ isLoading: true, error: null });
|
||||
try {
|
||||
// Load used logos with pagination for better performance
|
||||
const response = await api.getLogos({
|
||||
used: 'true',
|
||||
page_size: pageSize,
|
||||
});
|
||||
|
||||
// Handle both paginated and non-paginated responses
|
||||
const logos = Array.isArray(response) ? response : response.results || [];
|
||||
|
||||
set((state) => ({
|
||||
logos: {
|
||||
...state.logos,
|
||||
...logos.reduce((acc, logo) => {
|
||||
acc[logo.id] = { ...logo };
|
||||
return acc;
|
||||
}, {}),
|
||||
},
|
||||
isLoading: false,
|
||||
}));
|
||||
return response;
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch used logos:', error);
|
||||
set({ error: 'Failed to load used logos.', isLoading: false });
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
fetchChannelAssignableLogos: async () => {
|
||||
const { backgroundLoading, hasLoadedChannelLogos, channelLogos } = get();
|
||||
|
||||
// Prevent concurrent calls
|
||||
if (
|
||||
backgroundLoading ||
|
||||
(hasLoadedChannelLogos && Object.keys(channelLogos).length > 0)
|
||||
) {
|
||||
return Object.values(channelLogos);
|
||||
}
|
||||
|
||||
set({ backgroundLoading: true, error: null });
|
||||
try {
|
||||
// Load logos suitable for channel assignment (unused + channel-used, exclude VOD-only)
|
||||
const response = await api.getLogos({
|
||||
channel_assignable: 'true',
|
||||
no_pagination: 'true', // Get all channel-assignable logos
|
||||
});
|
||||
|
||||
// Handle both paginated and non-paginated responses
|
||||
const logos = Array.isArray(response) ? response : response.results || [];
|
||||
|
||||
console.log(`Fetched ${logos.length} channel-assignable logos`);
|
||||
|
||||
// Store in separate channelLogos state
|
||||
set({
|
||||
channelLogos: logos.reduce((acc, logo) => {
|
||||
acc[logo.id] = { ...logo };
|
||||
return acc;
|
||||
}, {}),
|
||||
hasLoadedChannelLogos: true,
|
||||
backgroundLoading: false,
|
||||
});
|
||||
|
||||
return logos;
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch channel-assignable logos:', error);
|
||||
set({
|
||||
error: 'Failed to load channel-assignable logos.',
|
||||
backgroundLoading: false,
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
fetchLogosByIds: async (logoIds) => {
|
||||
if (!logoIds || logoIds.length === 0) return [];
|
||||
|
||||
try {
|
||||
// Filter out logos we already have
|
||||
const missingIds = logoIds.filter((id) => !get().logos[id]);
|
||||
if (missingIds.length === 0) return [];
|
||||
|
||||
const response = await api.getLogosByIds(missingIds);
|
||||
|
||||
// Handle both paginated and non-paginated responses
|
||||
const logos = Array.isArray(response) ? response : response.results || [];
|
||||
|
||||
set((state) => ({
|
||||
logos: {
|
||||
...state.logos,
|
||||
...logos.reduce((acc, logo) => {
|
||||
acc[logo.id] = { ...logo };
|
||||
return acc;
|
||||
}, {}),
|
||||
},
|
||||
}));
|
||||
return logos;
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch logos by IDs:', error);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
fetchLogosInBackground: async () => {
|
||||
set({ backgroundLoading: true });
|
||||
try {
|
||||
// Load logos in chunks using pagination for better performance
|
||||
let page = 1;
|
||||
const pageSize = 200;
|
||||
let hasMore = true;
|
||||
|
||||
while (hasMore) {
|
||||
const response = await api.getLogos({ page, page_size: pageSize });
|
||||
|
||||
set((state) => ({
|
||||
logos: {
|
||||
...state.logos,
|
||||
...response.results.reduce((acc, logo) => {
|
||||
acc[logo.id] = { ...logo };
|
||||
return acc;
|
||||
}, {}),
|
||||
},
|
||||
}));
|
||||
|
||||
// Check if there are more pages
|
||||
hasMore = !!response.next;
|
||||
page++;
|
||||
|
||||
// Add a small delay between chunks to avoid overwhelming the server
|
||||
if (hasMore) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Background logo loading failed:', error);
|
||||
// Don't throw error for background loading
|
||||
} finally {
|
||||
set({ backgroundLoading: false });
|
||||
}
|
||||
},
|
||||
|
||||
// Background loading specifically for all logos after login
|
||||
backgroundLoadAllLogos: async () => {
|
||||
const { backgroundLoading, hasLoadedAll } = get();
|
||||
|
||||
// Don't start if already loading or if we already have all logos loaded
|
||||
if (backgroundLoading || hasLoadedAll) {
|
||||
return;
|
||||
}
|
||||
|
||||
set({ backgroundLoading: true });
|
||||
|
||||
// Use setTimeout to make this truly non-blocking
|
||||
setTimeout(async () => {
|
||||
try {
|
||||
// Use the API directly to avoid interfering with the main isLoading state
|
||||
const response = await api.getLogos({ no_pagination: 'true' });
|
||||
const logosArray = Array.isArray(response)
|
||||
? response
|
||||
: response.results || [];
|
||||
|
||||
// Process logos in smaller chunks to avoid blocking the main thread
|
||||
const chunkSize = 1000;
|
||||
const logoObject = {};
|
||||
|
||||
for (let i = 0; i < logosArray.length; i += chunkSize) {
|
||||
const chunk = logosArray.slice(i, i + chunkSize);
|
||||
chunk.forEach((logo) => {
|
||||
logoObject[logo.id] = { ...logo };
|
||||
});
|
||||
|
||||
// Yield control back to the main thread between chunks
|
||||
if (i + chunkSize < logosArray.length) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 0));
|
||||
}
|
||||
}
|
||||
|
||||
set({
|
||||
logos: logoObject,
|
||||
hasLoadedAll: true,
|
||||
backgroundLoading: false,
|
||||
});
|
||||
},
|
||||
} catch (error) {
|
||||
console.error('Background all logos loading failed:', error);
|
||||
set({ backgroundLoading: false });
|
||||
}
|
||||
}, 0); // Execute immediately but asynchronously
|
||||
},
|
||||
|
||||
addLogo: (newLogo) =>
|
||||
set((state) => ({
|
||||
logos: {
|
||||
...state.logos,
|
||||
[newLogo.id]: { ...newLogo },
|
||||
},
|
||||
})),
|
||||
// Background loading specifically for channel-assignable logos after login
|
||||
backgroundLoadChannelLogos: async () => {
|
||||
const { backgroundLoading, channelLogos, hasLoadedChannelLogos } = get();
|
||||
|
||||
updateLogo: (logo) =>
|
||||
set((state) => ({
|
||||
logos: {
|
||||
...state.logos,
|
||||
[logo.id]: { ...logo },
|
||||
},
|
||||
})),
|
||||
// Don't start if already loading or if we already have channel logos loaded
|
||||
if (
|
||||
backgroundLoading ||
|
||||
hasLoadedChannelLogos ||
|
||||
Object.keys(channelLogos).length > 100
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
removeLogo: (logoId) =>
|
||||
set((state) => {
|
||||
const newLogos = { ...state.logos };
|
||||
delete newLogos[logoId];
|
||||
return { logos: newLogos };
|
||||
}),
|
||||
set({ backgroundLoading: true });
|
||||
try {
|
||||
console.log('Background loading channel-assignable logos...');
|
||||
await get().fetchChannelAssignableLogos();
|
||||
console.log(
|
||||
`Background loaded ${Object.keys(get().channelLogos).length} channel-assignable logos`
|
||||
);
|
||||
} catch (error) {
|
||||
console.error('Background channel logo loading failed:', error);
|
||||
// Don't throw error for background loading
|
||||
} finally {
|
||||
set({ backgroundLoading: false });
|
||||
}
|
||||
},
|
||||
|
||||
// Smart loading methods
|
||||
fetchLogos: async () => {
|
||||
set({ isLoading: true, error: null });
|
||||
try {
|
||||
const logos = await api.getLogos();
|
||||
set({
|
||||
logos: logos.reduce((acc, logo) => {
|
||||
acc[logo.id] = { ...logo };
|
||||
return acc;
|
||||
}, {}),
|
||||
isLoading: false,
|
||||
});
|
||||
return logos;
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch logos:', error);
|
||||
set({ error: 'Failed to load logos.', isLoading: false });
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
// Start background loading after app is fully initialized
|
||||
startBackgroundLoading: () => {
|
||||
// Use a longer delay to ensure app is fully loaded
|
||||
setTimeout(() => {
|
||||
// Fire and forget - don't await this
|
||||
get()
|
||||
.backgroundLoadAllLogos()
|
||||
.catch((error) => {
|
||||
console.error('Background logo loading failed:', error);
|
||||
});
|
||||
}, 3000); // Wait 3 seconds after app initialization
|
||||
},
|
||||
|
||||
fetchUsedLogos: async () => {
|
||||
set({ isLoading: true, error: null });
|
||||
try {
|
||||
const logos = await api.getLogos({ used: 'true' });
|
||||
set((state) => ({
|
||||
logos: {
|
||||
...state.logos,
|
||||
...logos.reduce((acc, logo) => {
|
||||
acc[logo.id] = { ...logo };
|
||||
return acc;
|
||||
}, {}),
|
||||
},
|
||||
isLoading: false,
|
||||
}));
|
||||
return logos;
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch used logos:', error);
|
||||
set({ error: 'Failed to load used logos.', isLoading: false });
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
// Helper methods
|
||||
getLogoById: (logoId) => {
|
||||
return get().logos[logoId] || null;
|
||||
},
|
||||
|
||||
fetchLogosByIds: async (logoIds) => {
|
||||
if (!logoIds || logoIds.length === 0) return [];
|
||||
hasLogo: (logoId) => {
|
||||
return !!get().logos[logoId];
|
||||
},
|
||||
|
||||
try {
|
||||
// Filter out logos we already have
|
||||
const missingIds = logoIds.filter(id => !get().logos[id]);
|
||||
if (missingIds.length === 0) return [];
|
||||
getLogosCount: () => {
|
||||
return Object.keys(get().logos).length;
|
||||
},
|
||||
|
||||
const logos = await api.getLogosByIds(missingIds);
|
||||
set((state) => ({
|
||||
logos: {
|
||||
...state.logos,
|
||||
...logos.reduce((acc, logo) => {
|
||||
acc[logo.id] = { ...logo };
|
||||
return acc;
|
||||
}, {}),
|
||||
},
|
||||
}));
|
||||
return logos;
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch logos by IDs:', error);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
fetchLogosInBackground: async () => {
|
||||
try {
|
||||
// Load all remaining logos in background
|
||||
const allLogos = await api.getLogos();
|
||||
set((state) => ({
|
||||
logos: {
|
||||
...state.logos,
|
||||
...allLogos.reduce((acc, logo) => {
|
||||
acc[logo.id] = { ...logo };
|
||||
return acc;
|
||||
}, {}),
|
||||
},
|
||||
}));
|
||||
} catch (error) {
|
||||
console.error('Background logo loading failed:', error);
|
||||
// Don't throw error for background loading
|
||||
}
|
||||
},
|
||||
|
||||
// Helper methods
|
||||
getLogoById: (logoId) => {
|
||||
return get().logos[logoId] || null;
|
||||
},
|
||||
|
||||
hasLogo: (logoId) => {
|
||||
return !!get().logos[logoId];
|
||||
},
|
||||
|
||||
getLogosCount: () => {
|
||||
return Object.keys(get().logos).length;
|
||||
},
|
||||
// Check if we need to fetch all logos (haven't loaded them yet or store is empty)
|
||||
needsAllLogos: () => {
|
||||
const state = get();
|
||||
return !state.hasLoadedAll || Object.keys(state.logos).length === 0;
|
||||
},
|
||||
}));
|
||||
|
||||
export default useLogosStore;
|
||||
|
|
|
|||
381
frontend/src/store/useVODStore.jsx
Normal file
381
frontend/src/store/useVODStore.jsx
Normal file
|
|
@ -0,0 +1,381 @@
|
|||
import { create } from 'zustand';
|
||||
import api from '../api';
|
||||
|
||||
const useVODStore = create((set, get) => ({
|
||||
movies: {},
|
||||
series: {},
|
||||
episodes: {},
|
||||
categories: {},
|
||||
loading: false,
|
||||
error: null,
|
||||
filters: {
|
||||
type: 'all', // 'all', 'movies', 'series'
|
||||
search: '',
|
||||
category: '',
|
||||
},
|
||||
currentPage: 1,
|
||||
totalCount: 0,
|
||||
pageSize: 20,
|
||||
|
||||
setFilters: (newFilters) =>
|
||||
set((state) => ({
|
||||
filters: { ...state.filters, ...newFilters },
|
||||
currentPage: 1, // Reset to first page when filters change
|
||||
})),
|
||||
|
||||
setPage: (page) =>
|
||||
set(() => ({
|
||||
currentPage: page,
|
||||
})),
|
||||
|
||||
fetchMovies: async () => {
|
||||
try {
|
||||
set({ loading: true, error: null });
|
||||
const state = get();
|
||||
const params = new URLSearchParams();
|
||||
|
||||
params.append('page', state.currentPage);
|
||||
params.append('page_size', state.pageSize);
|
||||
|
||||
if (state.filters.search) {
|
||||
params.append('search', state.filters.search);
|
||||
}
|
||||
|
||||
if (state.filters.category) {
|
||||
params.append('category', state.filters.category);
|
||||
}
|
||||
|
||||
const response = await api.getMovies(params);
|
||||
|
||||
// Handle both paginated and non-paginated responses
|
||||
const results = response.results || response;
|
||||
const count = response.count || results.length;
|
||||
|
||||
set({
|
||||
movies: results.reduce((acc, movie) => {
|
||||
acc[movie.id] = movie;
|
||||
return acc;
|
||||
}, {}),
|
||||
totalCount: count,
|
||||
loading: false,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch movies:', error);
|
||||
set({ error: 'Failed to load movies.', loading: false });
|
||||
}
|
||||
},
|
||||
|
||||
fetchSeries: async () => {
|
||||
set({ loading: true, error: null });
|
||||
try {
|
||||
const state = get();
|
||||
const params = new URLSearchParams();
|
||||
|
||||
params.append('page', state.currentPage);
|
||||
params.append('page_size', state.pageSize);
|
||||
|
||||
if (state.filters.search) {
|
||||
params.append('search', state.filters.search);
|
||||
}
|
||||
|
||||
if (state.filters.category) {
|
||||
params.append('category', state.filters.category);
|
||||
}
|
||||
|
||||
const response = await api.getSeries(params);
|
||||
|
||||
// Handle both paginated and non-paginated responses
|
||||
const results = response.results || response;
|
||||
const count = response.count || results.length;
|
||||
|
||||
set({
|
||||
series: results.reduce((acc, series) => {
|
||||
acc[series.id] = series;
|
||||
return acc;
|
||||
}, {}),
|
||||
totalCount: count,
|
||||
loading: false,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch series:', error);
|
||||
set({ error: 'Failed to load series.', loading: false });
|
||||
}
|
||||
},
|
||||
|
||||
fetchSeriesEpisodes: async (seriesId) => {
|
||||
set({ loading: true, error: null });
|
||||
try {
|
||||
const response = await api.getSeriesEpisodes(seriesId);
|
||||
|
||||
set((state) => ({
|
||||
episodes: {
|
||||
...state.episodes,
|
||||
...response.reduce((acc, episode) => {
|
||||
acc[episode.id] = episode;
|
||||
return acc;
|
||||
}, {}),
|
||||
},
|
||||
loading: false,
|
||||
}));
|
||||
|
||||
return response;
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch series episodes:', error);
|
||||
set({ error: 'Failed to load episodes.', loading: false });
|
||||
throw error; // Re-throw to allow calling component to handle
|
||||
}
|
||||
},
|
||||
|
||||
fetchMovieDetails: async (movieId) => {
|
||||
set({ loading: true, error: null });
|
||||
try {
|
||||
const response = await api.getMovieDetails(movieId);
|
||||
|
||||
// Transform the response data to match our expected format
|
||||
const movieDetails = {
|
||||
id: response.id || movieId,
|
||||
name: response.name || '',
|
||||
description: response.description || '',
|
||||
year: response.year || null,
|
||||
genre: response.genre || '',
|
||||
rating: response.rating || '',
|
||||
duration_secs: response.duration_secs || null,
|
||||
stream_url: response.url || '',
|
||||
logo: response.logo_url || null,
|
||||
type: 'movie',
|
||||
director: response.director || '',
|
||||
actors: response.actors || '',
|
||||
country: response.country || '',
|
||||
tmdb_id: response.tmdb_id || '',
|
||||
imdb_id: response.imdb_id || '',
|
||||
m3u_account: response.m3u_account || '',
|
||||
};
|
||||
console.log('Fetched Movie Details:', movieDetails);
|
||||
set((state) => ({
|
||||
movies: {
|
||||
...state.movies,
|
||||
[movieDetails.id]: movieDetails,
|
||||
},
|
||||
loading: false,
|
||||
}));
|
||||
|
||||
return movieDetails;
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch movie details:', error);
|
||||
set({ error: 'Failed to load movie details.', loading: false });
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
fetchMovieDetailsFromProvider: async (movieId) => {
|
||||
set({ loading: true, error: null });
|
||||
try {
|
||||
const response = await api.getMovieProviderInfo(movieId);
|
||||
|
||||
// Transform the response data to match our expected format
|
||||
const movieDetails = {
|
||||
id: response.id || movieId,
|
||||
name: response.name || '',
|
||||
description: response.description || response.plot || '',
|
||||
year: response.year || null,
|
||||
genre: response.genre || '',
|
||||
rating: response.rating || '',
|
||||
duration_secs: response.duration_secs || null,
|
||||
stream_url: response.stream_url || '',
|
||||
logo: response.logo || response.cover || null,
|
||||
type: 'movie',
|
||||
director: response.director || '',
|
||||
actors: response.actors || response.cast || '',
|
||||
country: response.country || '',
|
||||
tmdb_id: response.tmdb_id || '',
|
||||
youtube_trailer: response.youtube_trailer || '',
|
||||
// Additional provider fields
|
||||
backdrop_path: response.backdrop_path || [],
|
||||
release_date: response.release_date || response.releasedate || '',
|
||||
movie_image: response.movie_image || null,
|
||||
o_name: response.o_name || '',
|
||||
age: response.age || '',
|
||||
episode_run_time: response.episode_run_time || null,
|
||||
bitrate: response.bitrate || 0,
|
||||
video: response.video || {},
|
||||
audio: response.audio || {},
|
||||
};
|
||||
|
||||
set({ loading: false }); // Only update loading state
|
||||
|
||||
// Do NOT merge or overwrite the store entry
|
||||
return movieDetails;
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch movie details from provider:', error);
|
||||
set({ error: 'Failed to load movie details from provider.', loading: false });
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
fetchMovieProviders: async (movieId) => {
|
||||
try {
|
||||
const response = await api.getMovieProviders(movieId);
|
||||
return response || [];
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch movie providers:', error);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
fetchSeriesProviders: async (seriesId) => {
|
||||
try {
|
||||
const response = await api.getSeriesProviders(seriesId);
|
||||
return response || [];
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch series providers:', error);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
fetchCategories: async () => {
|
||||
try {
|
||||
const response = await api.getVODCategories();
|
||||
// Handle both array and paginated responses
|
||||
const results = response.results || response;
|
||||
|
||||
set({
|
||||
categories: results.reduce((acc, category) => {
|
||||
acc[category.id] = category;
|
||||
return acc;
|
||||
}, {}),
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch VOD categories:', error);
|
||||
set({ error: 'Failed to load categories.' });
|
||||
}
|
||||
},
|
||||
|
||||
addMovie: (movie) =>
|
||||
set((state) => ({
|
||||
movies: { ...state.movies, [movie.id]: movie },
|
||||
})),
|
||||
|
||||
updateMovie: (movie) =>
|
||||
set((state) => ({
|
||||
movies: { ...state.movies, [movie.id]: movie },
|
||||
})),
|
||||
|
||||
removeMovie: (movieId) =>
|
||||
set((state) => {
|
||||
const updatedMovies = { ...state.movies };
|
||||
delete updatedMovies[movieId];
|
||||
return { movies: updatedMovies };
|
||||
}),
|
||||
|
||||
addSeries: (series) =>
|
||||
set((state) => ({
|
||||
series: { ...state.series, [series.id]: series },
|
||||
})),
|
||||
|
||||
updateSeries: (series) =>
|
||||
set((state) => ({
|
||||
series: { ...state.series, [series.id]: series },
|
||||
})),
|
||||
|
||||
removeSeries: (seriesId) =>
|
||||
set((state) => {
|
||||
const updatedSeries = { ...state.series };
|
||||
delete updatedSeries[seriesId];
|
||||
return { series: updatedSeries };
|
||||
}),
|
||||
|
||||
fetchSeriesInfo: async (seriesId) => {
|
||||
set({ loading: true, error: null });
|
||||
try {
|
||||
const response = await api.getSeriesInfo(seriesId);
|
||||
|
||||
// Transform the response data to match our expected format
|
||||
const seriesInfo = {
|
||||
id: response.id || seriesId,
|
||||
name: response.name || '',
|
||||
description: response.description || response.custom_properties?.plot || '',
|
||||
year: response.year || null,
|
||||
genre: response.genre || '',
|
||||
rating: response.rating || '',
|
||||
logo: response.cover || null,
|
||||
type: 'series',
|
||||
director: response.custom_properties?.director || '',
|
||||
cast: response.custom_properties?.cast || '',
|
||||
country: response.country || '',
|
||||
tmdb_id: response.tmdb_id || '',
|
||||
imdb_id: response.imdb_id || '',
|
||||
episode_count: response.episode_count || 0,
|
||||
// Additional provider fields
|
||||
backdrop_path: response.custom_properties?.backdrop_path || [],
|
||||
release_date: response.release_date || '',
|
||||
series_image: response.series_image || null,
|
||||
o_name: response.o_name || '',
|
||||
age: response.age || '',
|
||||
m3u_account: response.m3u_account || '',
|
||||
youtube_trailer: response.custom_properties?.youtube_trailer || '',
|
||||
};
|
||||
|
||||
let episodesData = {};
|
||||
|
||||
// Handle episodes - check if they're in the response
|
||||
if (response.episodes) {
|
||||
Object.entries(response.episodes).forEach(([seasonNumber, seasonEpisodes]) => {
|
||||
seasonEpisodes.forEach((episode) => {
|
||||
const episodeData = {
|
||||
id: episode.id,
|
||||
stream_id: episode.id,
|
||||
name: episode.title || '',
|
||||
description: episode.plot || '',
|
||||
season_number: parseInt(seasonNumber) || 0,
|
||||
episode_number: episode.episode_number || 0,
|
||||
duration_secs: episode.duration_secs || null,
|
||||
rating: episode.rating || '',
|
||||
container_extension: episode.container_extension || '',
|
||||
series: {
|
||||
id: seriesInfo.id,
|
||||
name: seriesInfo.name
|
||||
},
|
||||
type: 'episode',
|
||||
uuid: episode.id, // Use the stream ID as UUID for playback
|
||||
logo: episode.movie_image ? { url: episode.movie_image } : null,
|
||||
air_date: episode.air_date || null,
|
||||
movie_image: episode.movie_image || null,
|
||||
tmdb_id: episode.tmdb_id || '',
|
||||
imdb_id: episode.imdb_id || '',
|
||||
};
|
||||
episodesData[episode.id] = episodeData;
|
||||
});
|
||||
});
|
||||
|
||||
// Update episodes in the store
|
||||
set((state) => ({
|
||||
episodes: {
|
||||
...state.episodes,
|
||||
...episodesData,
|
||||
},
|
||||
}));
|
||||
}
|
||||
|
||||
set((state) => ({
|
||||
series: {
|
||||
...state.series,
|
||||
[seriesInfo.id]: seriesInfo,
|
||||
},
|
||||
loading: false,
|
||||
}));
|
||||
|
||||
// Return series info with episodes array for easy access
|
||||
return {
|
||||
...seriesInfo,
|
||||
episodesList: Object.values(episodesData)
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch series info:', error);
|
||||
set({ error: 'Failed to load series details.', loading: false });
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
}));
|
||||
|
||||
export default useVODStore;
|
||||
|
|
@ -7,17 +7,23 @@ import { create } from 'zustand';
|
|||
const useVideoStore = create((set) => ({
|
||||
isVisible: false,
|
||||
streamUrl: null,
|
||||
contentType: 'live', // 'live' for MPEG-TS streams, 'vod' for MP4/MKV files
|
||||
metadata: null, // Store additional metadata for VOD content
|
||||
|
||||
showVideo: (url) =>
|
||||
showVideo: (url, type = 'live', metadata = null) =>
|
||||
set({
|
||||
isVisible: true,
|
||||
streamUrl: url,
|
||||
contentType: type,
|
||||
metadata: metadata,
|
||||
}),
|
||||
|
||||
hideVideo: () =>
|
||||
set({
|
||||
isVisible: false,
|
||||
streamUrl: null,
|
||||
contentType: 'live',
|
||||
metadata: null,
|
||||
}),
|
||||
}));
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue