mirror of
https://github.com/Dispatcharr/Dispatcharr.git
synced 2026-01-23 02:35:14 +00:00
Merge pull request #670 from Dispatcharr/dev
This commit is contained in:
commit
3e77259b2c
43 changed files with 2868 additions and 1288 deletions
|
|
@ -435,8 +435,8 @@ class ChannelViewSet(viewsets.ModelViewSet):
|
|||
@action(detail=False, methods=["patch"], url_path="edit/bulk")
|
||||
def edit_bulk(self, request):
|
||||
"""
|
||||
Bulk edit channels.
|
||||
Expects a list of channels with their updates.
|
||||
Bulk edit channels efficiently.
|
||||
Validates all updates first, then applies in a single transaction.
|
||||
"""
|
||||
data = request.data
|
||||
if not isinstance(data, list):
|
||||
|
|
@ -445,63 +445,94 @@ class ChannelViewSet(viewsets.ModelViewSet):
|
|||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
updated_channels = []
|
||||
errors = []
|
||||
# Extract IDs and validate presence
|
||||
channel_updates = {}
|
||||
missing_ids = []
|
||||
|
||||
for channel_data in data:
|
||||
for i, channel_data in enumerate(data):
|
||||
channel_id = channel_data.get("id")
|
||||
if not channel_id:
|
||||
errors.append({"error": "Channel ID is required"})
|
||||
continue
|
||||
missing_ids.append(f"Item {i}: Channel ID is required")
|
||||
else:
|
||||
channel_updates[channel_id] = channel_data
|
||||
|
||||
try:
|
||||
channel = Channel.objects.get(id=channel_id)
|
||||
if missing_ids:
|
||||
return Response(
|
||||
{"errors": missing_ids},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Handle channel_group_id properly - convert string to integer if needed
|
||||
if 'channel_group_id' in channel_data:
|
||||
group_id = channel_data['channel_group_id']
|
||||
if group_id is not None:
|
||||
try:
|
||||
channel_data['channel_group_id'] = int(group_id)
|
||||
except (ValueError, TypeError):
|
||||
channel_data['channel_group_id'] = None
|
||||
# Fetch all channels at once (one query)
|
||||
channels_dict = {
|
||||
c.id: c for c in Channel.objects.filter(id__in=channel_updates.keys())
|
||||
}
|
||||
|
||||
# Use the serializer to validate and update
|
||||
serializer = ChannelSerializer(
|
||||
channel, data=channel_data, partial=True
|
||||
)
|
||||
# Validate and prepare updates
|
||||
validated_updates = []
|
||||
errors = []
|
||||
|
||||
if serializer.is_valid():
|
||||
updated_channel = serializer.save()
|
||||
updated_channels.append(updated_channel)
|
||||
else:
|
||||
errors.append({
|
||||
"channel_id": channel_id,
|
||||
"errors": serializer.errors
|
||||
})
|
||||
for channel_id, channel_data in channel_updates.items():
|
||||
channel = channels_dict.get(channel_id)
|
||||
|
||||
except Channel.DoesNotExist:
|
||||
if not channel:
|
||||
errors.append({
|
||||
"channel_id": channel_id,
|
||||
"error": "Channel not found"
|
||||
})
|
||||
except Exception as e:
|
||||
continue
|
||||
|
||||
# Handle channel_group_id conversion
|
||||
if 'channel_group_id' in channel_data:
|
||||
group_id = channel_data['channel_group_id']
|
||||
if group_id is not None:
|
||||
try:
|
||||
channel_data['channel_group_id'] = int(group_id)
|
||||
except (ValueError, TypeError):
|
||||
channel_data['channel_group_id'] = None
|
||||
|
||||
# Validate with serializer
|
||||
serializer = ChannelSerializer(
|
||||
channel, data=channel_data, partial=True
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
validated_updates.append((channel, serializer.validated_data))
|
||||
else:
|
||||
errors.append({
|
||||
"channel_id": channel_id,
|
||||
"error": str(e)
|
||||
"errors": serializer.errors
|
||||
})
|
||||
|
||||
if errors:
|
||||
return Response(
|
||||
{"errors": errors, "updated_count": len(updated_channels)},
|
||||
{"errors": errors, "updated_count": len(validated_updates)},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Serialize the updated channels for response
|
||||
serialized_channels = ChannelSerializer(updated_channels, many=True).data
|
||||
# Apply all updates in a transaction
|
||||
with transaction.atomic():
|
||||
for channel, validated_data in validated_updates:
|
||||
for key, value in validated_data.items():
|
||||
setattr(channel, key, value)
|
||||
|
||||
# Single bulk_update query instead of individual saves
|
||||
channels_to_update = [channel for channel, _ in validated_updates]
|
||||
if channels_to_update:
|
||||
Channel.objects.bulk_update(
|
||||
channels_to_update,
|
||||
fields=list(validated_updates[0][1].keys()),
|
||||
batch_size=100
|
||||
)
|
||||
|
||||
# Return the updated objects (already in memory)
|
||||
serialized_channels = ChannelSerializer(
|
||||
[channel for channel, _ in validated_updates],
|
||||
many=True,
|
||||
context=self.get_serializer_context()
|
||||
).data
|
||||
|
||||
return Response({
|
||||
"message": f"Successfully updated {len(updated_channels)} channels",
|
||||
"message": f"Successfully updated {len(validated_updates)} channels",
|
||||
"channels": serialized_channels
|
||||
})
|
||||
|
||||
|
|
@ -987,19 +1018,27 @@ class ChannelViewSet(viewsets.ModelViewSet):
|
|||
channel.epg_data = epg_data
|
||||
channel.save(update_fields=["epg_data"])
|
||||
|
||||
# Explicitly trigger program refresh for this EPG
|
||||
from apps.epg.tasks import parse_programs_for_tvg_id
|
||||
# Only trigger program refresh for non-dummy EPG sources
|
||||
status_message = None
|
||||
if epg_data.epg_source.source_type != 'dummy':
|
||||
# Explicitly trigger program refresh for this EPG
|
||||
from apps.epg.tasks import parse_programs_for_tvg_id
|
||||
|
||||
task_result = parse_programs_for_tvg_id.delay(epg_data.id)
|
||||
task_result = parse_programs_for_tvg_id.delay(epg_data.id)
|
||||
|
||||
# Prepare response with task status info
|
||||
status_message = "EPG refresh queued"
|
||||
if task_result.result == "Task already running":
|
||||
status_message = "EPG refresh already in progress"
|
||||
# Prepare response with task status info
|
||||
status_message = "EPG refresh queued"
|
||||
if task_result.result == "Task already running":
|
||||
status_message = "EPG refresh already in progress"
|
||||
|
||||
# Build response message
|
||||
message = f"EPG data set to {epg_data.tvg_id} for channel {channel.name}"
|
||||
if status_message:
|
||||
message += f". {status_message}"
|
||||
|
||||
return Response(
|
||||
{
|
||||
"message": f"EPG data set to {epg_data.tvg_id} for channel {channel.name}. {status_message}.",
|
||||
"message": message,
|
||||
"channel": self.get_serializer(channel).data,
|
||||
"task_status": status_message,
|
||||
}
|
||||
|
|
@ -1031,8 +1070,15 @@ class ChannelViewSet(viewsets.ModelViewSet):
|
|||
def batch_set_epg(self, request):
|
||||
"""Efficiently associate multiple channels with EPG data at once."""
|
||||
associations = request.data.get("associations", [])
|
||||
channels_updated = 0
|
||||
programs_refreshed = 0
|
||||
|
||||
if not associations:
|
||||
return Response(
|
||||
{"error": "associations list is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Extract channel IDs upfront
|
||||
channel_updates = {}
|
||||
unique_epg_ids = set()
|
||||
|
||||
for assoc in associations:
|
||||
|
|
@ -1042,32 +1088,58 @@ class ChannelViewSet(viewsets.ModelViewSet):
|
|||
if not channel_id:
|
||||
continue
|
||||
|
||||
try:
|
||||
# Get the channel
|
||||
channel = Channel.objects.get(id=channel_id)
|
||||
channel_updates[channel_id] = epg_data_id
|
||||
if epg_data_id:
|
||||
unique_epg_ids.add(epg_data_id)
|
||||
|
||||
# Set the EPG data
|
||||
channel.epg_data_id = epg_data_id
|
||||
channel.save(update_fields=["epg_data"])
|
||||
channels_updated += 1
|
||||
# Batch fetch all channels (single query)
|
||||
channels_dict = {
|
||||
c.id: c for c in Channel.objects.filter(id__in=channel_updates.keys())
|
||||
}
|
||||
|
||||
# Track unique EPG data IDs
|
||||
if epg_data_id:
|
||||
unique_epg_ids.add(epg_data_id)
|
||||
|
||||
except Channel.DoesNotExist:
|
||||
# Collect channels to update
|
||||
channels_to_update = []
|
||||
for channel_id, epg_data_id in channel_updates.items():
|
||||
if channel_id not in channels_dict:
|
||||
logger.error(f"Channel with ID {channel_id} not found")
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error setting EPG data for channel {channel_id}: {str(e)}"
|
||||
continue
|
||||
|
||||
channel = channels_dict[channel_id]
|
||||
channel.epg_data_id = epg_data_id
|
||||
channels_to_update.append(channel)
|
||||
|
||||
# Bulk update all channels (single query)
|
||||
if channels_to_update:
|
||||
with transaction.atomic():
|
||||
Channel.objects.bulk_update(
|
||||
channels_to_update,
|
||||
fields=["epg_data_id"],
|
||||
batch_size=100
|
||||
)
|
||||
|
||||
# Trigger program refresh for unique EPG data IDs
|
||||
from apps.epg.tasks import parse_programs_for_tvg_id
|
||||
channels_updated = len(channels_to_update)
|
||||
|
||||
# Trigger program refresh for unique EPG data IDs (skip dummy EPGs)
|
||||
from apps.epg.tasks import parse_programs_for_tvg_id
|
||||
from apps.epg.models import EPGData
|
||||
|
||||
# Batch fetch EPG data (single query)
|
||||
epg_data_dict = {
|
||||
epg.id: epg
|
||||
for epg in EPGData.objects.filter(id__in=unique_epg_ids).select_related('epg_source')
|
||||
}
|
||||
|
||||
programs_refreshed = 0
|
||||
for epg_id in unique_epg_ids:
|
||||
parse_programs_for_tvg_id.delay(epg_id)
|
||||
programs_refreshed += 1
|
||||
epg_data = epg_data_dict.get(epg_id)
|
||||
if not epg_data:
|
||||
logger.error(f"EPGData with ID {epg_id} not found")
|
||||
continue
|
||||
|
||||
# Only refresh non-dummy EPG sources
|
||||
if epg_data.epg_source.source_type != 'dummy':
|
||||
parse_programs_for_tvg_id.delay(epg_id)
|
||||
programs_refreshed += 1
|
||||
|
||||
return Response(
|
||||
{
|
||||
|
|
@ -1232,7 +1304,7 @@ class CleanupUnusedLogosAPIView(APIView):
|
|||
return [Authenticated()]
|
||||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Delete all logos that are not used by any channels, movies, or series",
|
||||
operation_description="Delete all channel logos that are not used by any channels",
|
||||
request_body=openapi.Schema(
|
||||
type=openapi.TYPE_OBJECT,
|
||||
properties={
|
||||
|
|
@ -1246,24 +1318,11 @@ class CleanupUnusedLogosAPIView(APIView):
|
|||
responses={200: "Cleanup completed"},
|
||||
)
|
||||
def post(self, request):
|
||||
"""Delete all logos with no channel, movie, or series associations"""
|
||||
"""Delete all channel logos with no channel associations"""
|
||||
delete_files = request.data.get("delete_files", False)
|
||||
|
||||
# Find logos that are not used by channels, movies, or series
|
||||
filter_conditions = Q(channels__isnull=True)
|
||||
|
||||
# Add VOD conditions if models are available
|
||||
try:
|
||||
filter_conditions &= Q(movie__isnull=True)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
filter_conditions &= Q(series__isnull=True)
|
||||
except:
|
||||
pass
|
||||
|
||||
unused_logos = Logo.objects.filter(filter_conditions)
|
||||
# Find logos that are not used by any channels
|
||||
unused_logos = Logo.objects.filter(channels__isnull=True)
|
||||
deleted_count = unused_logos.count()
|
||||
logo_names = list(unused_logos.values_list('name', flat=True))
|
||||
local_files_deleted = 0
|
||||
|
|
@ -1335,13 +1394,6 @@ class LogoViewSet(viewsets.ModelViewSet):
|
|||
# Start with basic prefetch for channels
|
||||
queryset = Logo.objects.prefetch_related('channels').order_by('name')
|
||||
|
||||
# Try to prefetch VOD relations if available
|
||||
try:
|
||||
queryset = queryset.prefetch_related('movie', 'series')
|
||||
except:
|
||||
# VOD app might not be available, continue without VOD prefetch
|
||||
pass
|
||||
|
||||
# Filter by specific IDs
|
||||
ids = self.request.query_params.getlist('ids')
|
||||
if ids:
|
||||
|
|
@ -1354,62 +1406,14 @@ class LogoViewSet(viewsets.ModelViewSet):
|
|||
pass # Invalid IDs, return empty queryset
|
||||
queryset = Logo.objects.none()
|
||||
|
||||
# Filter by usage - now includes VOD content
|
||||
# Filter by usage
|
||||
used_filter = self.request.query_params.get('used', None)
|
||||
if used_filter == 'true':
|
||||
# Logo is used if it has any channels, movies, or series
|
||||
filter_conditions = Q(channels__isnull=False)
|
||||
|
||||
# Add VOD conditions if models are available
|
||||
try:
|
||||
filter_conditions |= Q(movie__isnull=False)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
filter_conditions |= Q(series__isnull=False)
|
||||
except:
|
||||
pass
|
||||
|
||||
queryset = queryset.filter(filter_conditions).distinct()
|
||||
|
||||
# Logo is used if it has any channels
|
||||
queryset = queryset.filter(channels__isnull=False).distinct()
|
||||
elif used_filter == 'false':
|
||||
# Logo is unused if it has no channels, movies, or series
|
||||
filter_conditions = Q(channels__isnull=True)
|
||||
|
||||
# Add VOD conditions if models are available
|
||||
try:
|
||||
filter_conditions &= Q(movie__isnull=True)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
filter_conditions &= Q(series__isnull=True)
|
||||
except:
|
||||
pass
|
||||
|
||||
queryset = queryset.filter(filter_conditions)
|
||||
|
||||
# Filter for channel assignment (unused + channel-used, exclude VOD-only)
|
||||
channel_assignable = self.request.query_params.get('channel_assignable', None)
|
||||
if channel_assignable == 'true':
|
||||
# Include logos that are either:
|
||||
# 1. Completely unused, OR
|
||||
# 2. Used by channels (but may also be used by VOD)
|
||||
# Exclude logos that are ONLY used by VOD content
|
||||
|
||||
unused_condition = Q(channels__isnull=True)
|
||||
channel_used_condition = Q(channels__isnull=False)
|
||||
|
||||
# Add VOD conditions if models are available
|
||||
try:
|
||||
unused_condition &= Q(movie__isnull=True) & Q(series__isnull=True)
|
||||
except:
|
||||
pass
|
||||
|
||||
# Combine: unused OR used by channels
|
||||
filter_conditions = unused_condition | channel_used_condition
|
||||
queryset = queryset.filter(filter_conditions).distinct()
|
||||
# Logo is unused if it has no channels
|
||||
queryset = queryset.filter(channels__isnull=True)
|
||||
|
||||
# Filter by name
|
||||
name_filter = self.request.query_params.get('name', None)
|
||||
|
|
|
|||
|
|
@ -64,47 +64,15 @@ class LogoSerializer(serializers.ModelSerializer):
|
|||
return reverse("api:channels:logo-cache", args=[obj.id])
|
||||
|
||||
def get_channel_count(self, obj):
|
||||
"""Get the number of channels, movies, and series using this logo"""
|
||||
channel_count = obj.channels.count()
|
||||
|
||||
# Safely get movie count
|
||||
try:
|
||||
movie_count = obj.movie.count() if hasattr(obj, 'movie') else 0
|
||||
except AttributeError:
|
||||
movie_count = 0
|
||||
|
||||
# Safely get series count
|
||||
try:
|
||||
series_count = obj.series.count() if hasattr(obj, 'series') else 0
|
||||
except AttributeError:
|
||||
series_count = 0
|
||||
|
||||
return channel_count + movie_count + series_count
|
||||
"""Get the number of channels using this logo"""
|
||||
return obj.channels.count()
|
||||
|
||||
def get_is_used(self, obj):
|
||||
"""Check if this logo is used by any channels, movies, or series"""
|
||||
# Check if used by channels
|
||||
if obj.channels.exists():
|
||||
return True
|
||||
|
||||
# Check if used by movies (handle case where VOD app might not be available)
|
||||
try:
|
||||
if hasattr(obj, 'movie') and obj.movie.exists():
|
||||
return True
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# Check if used by series (handle case where VOD app might not be available)
|
||||
try:
|
||||
if hasattr(obj, 'series') and obj.series.exists():
|
||||
return True
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
return False
|
||||
"""Check if this logo is used by any channels"""
|
||||
return obj.channels.exists()
|
||||
|
||||
def get_channel_names(self, obj):
|
||||
"""Get the names of channels, movies, and series using this logo (limited to first 5)"""
|
||||
"""Get the names of channels using this logo (limited to first 5)"""
|
||||
names = []
|
||||
|
||||
# Get channel names
|
||||
|
|
@ -112,28 +80,6 @@ class LogoSerializer(serializers.ModelSerializer):
|
|||
for channel in channels:
|
||||
names.append(f"Channel: {channel.name}")
|
||||
|
||||
# Get movie names (only if we haven't reached limit)
|
||||
if len(names) < 5:
|
||||
try:
|
||||
if hasattr(obj, 'movie'):
|
||||
remaining_slots = 5 - len(names)
|
||||
movies = obj.movie.all()[:remaining_slots]
|
||||
for movie in movies:
|
||||
names.append(f"Movie: {movie.name}")
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# Get series names (only if we haven't reached limit)
|
||||
if len(names) < 5:
|
||||
try:
|
||||
if hasattr(obj, 'series'):
|
||||
remaining_slots = 5 - len(names)
|
||||
series = obj.series.all()[:remaining_slots]
|
||||
for series_item in series:
|
||||
names.append(f"Series: {series_item.name}")
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# Calculate total count for "more" message
|
||||
total_count = self.get_channel_count(obj)
|
||||
if total_count > 5:
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from .models import EPGSource, EPGData, ProgramData
|
|||
from apps.channels.models import Channel
|
||||
|
||||
class EPGSourceSerializer(serializers.ModelSerializer):
|
||||
epg_data_ids = serializers.SerializerMethodField()
|
||||
epg_data_count = serializers.SerializerMethodField()
|
||||
read_only_fields = ['created_at', 'updated_at']
|
||||
url = serializers.CharField(
|
||||
required=False,
|
||||
|
|
@ -29,11 +29,12 @@ class EPGSourceSerializer(serializers.ModelSerializer):
|
|||
'created_at',
|
||||
'updated_at',
|
||||
'custom_properties',
|
||||
'epg_data_ids'
|
||||
'epg_data_count'
|
||||
]
|
||||
|
||||
def get_epg_data_ids(self, obj):
|
||||
return list(obj.epgs.values_list('id', flat=True))
|
||||
def get_epg_data_count(self, obj):
|
||||
"""Return the count of EPG data entries instead of all IDs to prevent large payloads"""
|
||||
return obj.epgs.count()
|
||||
|
||||
class ProgramDataSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
|
|
|
|||
|
|
@ -1157,6 +1157,12 @@ def parse_programs_for_tvg_id(epg_id):
|
|||
epg = EPGData.objects.get(id=epg_id)
|
||||
epg_source = epg.epg_source
|
||||
|
||||
# Skip program parsing for dummy EPG sources - they don't have program data files
|
||||
if epg_source.source_type == 'dummy':
|
||||
logger.info(f"Skipping program parsing for dummy EPG source {epg_source.name} (ID: {epg_id})")
|
||||
release_task_lock('parse_epg_programs', epg_id)
|
||||
return
|
||||
|
||||
if not Channel.objects.filter(epg_data=epg).exists():
|
||||
logger.info(f"No channels matched to EPG {epg.tvg_id}")
|
||||
release_task_lock('parse_epg_programs', epg_id)
|
||||
|
|
|
|||
|
|
@ -29,6 +29,7 @@ from core.models import CoreSettings, UserAgent
|
|||
from asgiref.sync import async_to_sync
|
||||
from core.xtream_codes import Client as XCClient
|
||||
from core.utils import send_websocket_update
|
||||
from .utils import normalize_stream_url
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -219,6 +220,10 @@ def fetch_m3u_lines(account, use_cache=False):
|
|||
# Has HTTP URLs, might be a simple M3U without headers
|
||||
is_valid_m3u = True
|
||||
logger.info("Content validated as M3U: contains HTTP URLs")
|
||||
elif any(line.strip().startswith(('rtsp', 'rtp', 'udp')) for line in content_lines):
|
||||
# Has RTSP/RTP/UDP URLs, might be a simple M3U without headers
|
||||
is_valid_m3u = True
|
||||
logger.info("Content validated as M3U: contains RTSP/RTP/UDP URLs")
|
||||
|
||||
if not is_valid_m3u:
|
||||
# Log what we actually received for debugging
|
||||
|
|
@ -434,25 +439,51 @@ def get_case_insensitive_attr(attributes, key, default=""):
|
|||
def parse_extinf_line(line: str) -> dict:
|
||||
"""
|
||||
Parse an EXTINF line from an M3U file.
|
||||
This function removes the "#EXTINF:" prefix, then splits the remaining
|
||||
string on the first comma that is not enclosed in quotes.
|
||||
This function removes the "#EXTINF:" prefix, then extracts all key="value" attributes,
|
||||
and treats everything after the last attribute as the display name.
|
||||
|
||||
Returns a dictionary with:
|
||||
- 'attributes': a dict of attribute key/value pairs (e.g. tvg-id, tvg-logo, group-title)
|
||||
- 'display_name': the text after the comma (the fallback display name)
|
||||
- 'display_name': the text after the attributes (the fallback display name)
|
||||
- 'name': the value from tvg-name (if present) or the display name otherwise.
|
||||
"""
|
||||
if not line.startswith("#EXTINF:"):
|
||||
return None
|
||||
content = line[len("#EXTINF:") :].strip()
|
||||
# Split on the first comma that is not inside quotes.
|
||||
parts = re.split(r',(?=(?:[^"]*"[^"]*")*[^"]*$)', content, maxsplit=1)
|
||||
if len(parts) != 2:
|
||||
return None
|
||||
attributes_part, display_name = parts[0], parts[1].strip()
|
||||
attrs = dict(re.findall(r'([^\s]+)="([^"]+)"', attributes_part) + re.findall(r"([^\s]+)='([^']+)'", attributes_part))
|
||||
# Use tvg-name attribute if available; otherwise, use the display name.
|
||||
name = get_case_insensitive_attr(attrs, "tvg-name", display_name)
|
||||
|
||||
# Single pass: extract all attributes AND track the last attribute position
|
||||
# This regex matches both key="value" and key='value' patterns
|
||||
attrs = {}
|
||||
last_attr_end = 0
|
||||
|
||||
# Use a single regex that handles both quote types
|
||||
for match in re.finditer(r'([^\s]+)=(["\'])([^\2]*?)\2', content):
|
||||
key = match.group(1)
|
||||
value = match.group(3)
|
||||
attrs[key] = value
|
||||
last_attr_end = match.end()
|
||||
|
||||
# Everything after the last attribute (skipping leading comma and whitespace) is the display name
|
||||
if last_attr_end > 0:
|
||||
remaining = content[last_attr_end:].strip()
|
||||
# Remove leading comma if present
|
||||
if remaining.startswith(','):
|
||||
remaining = remaining[1:].strip()
|
||||
display_name = remaining
|
||||
else:
|
||||
# No attributes found, try the old comma-split method as fallback
|
||||
parts = content.split(',', 1)
|
||||
if len(parts) == 2:
|
||||
display_name = parts[1].strip()
|
||||
else:
|
||||
display_name = content.strip()
|
||||
|
||||
# Use tvg-name attribute if available; otherwise try tvc-guide-title, then fall back to display name.
|
||||
name = get_case_insensitive_attr(attrs, "tvg-name", None)
|
||||
if not name:
|
||||
name = get_case_insensitive_attr(attrs, "tvc-guide-title", None)
|
||||
if not name:
|
||||
name = display_name
|
||||
return {"attributes": attrs, "display_name": display_name, "name": name}
|
||||
|
||||
|
||||
|
|
@ -1186,52 +1217,14 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False):
|
|||
auth_result = xc_client.authenticate()
|
||||
logger.debug(f"Authentication response: {auth_result}")
|
||||
|
||||
# Save account information to all active profiles
|
||||
# Queue async profile refresh task to run in background
|
||||
# This prevents any delay in the main refresh process
|
||||
try:
|
||||
from apps.m3u.models import M3UAccountProfile
|
||||
|
||||
profiles = M3UAccountProfile.objects.filter(
|
||||
m3u_account=account,
|
||||
is_active=True
|
||||
)
|
||||
|
||||
# Update each profile with account information using its own transformed credentials
|
||||
for profile in profiles:
|
||||
try:
|
||||
# Get transformed credentials for this specific profile
|
||||
profile_url, profile_username, profile_password = get_transformed_credentials(account, profile)
|
||||
|
||||
# Create a separate XC client for this profile's credentials
|
||||
with XCClient(
|
||||
profile_url,
|
||||
profile_username,
|
||||
profile_password,
|
||||
user_agent_string
|
||||
) as profile_client:
|
||||
# Authenticate with this profile's credentials
|
||||
if profile_client.authenticate():
|
||||
# Get account information specific to this profile's credentials
|
||||
profile_account_info = profile_client.get_account_info()
|
||||
|
||||
# Merge with existing custom_properties if they exist
|
||||
existing_props = profile.custom_properties or {}
|
||||
existing_props.update(profile_account_info)
|
||||
profile.custom_properties = existing_props
|
||||
profile.save(update_fields=['custom_properties'])
|
||||
|
||||
logger.info(f"Updated account information for profile '{profile.name}' with transformed credentials")
|
||||
else:
|
||||
logger.warning(f"Failed to authenticate profile '{profile.name}' with transformed credentials")
|
||||
|
||||
except Exception as profile_error:
|
||||
logger.error(f"Failed to update account information for profile '{profile.name}': {str(profile_error)}")
|
||||
# Continue with other profiles even if one fails
|
||||
|
||||
logger.info(f"Processed account information for {profiles.count()} profiles for account {account.name}")
|
||||
|
||||
except Exception as save_error:
|
||||
logger.warning(f"Failed to process profile account information: {str(save_error)}")
|
||||
# Don't fail the whole process if saving account info fails
|
||||
logger.info(f"Queueing background profile refresh for account {account.name}")
|
||||
refresh_account_profiles.delay(account.id)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to queue profile refresh task: {str(e)}")
|
||||
# Don't fail the main refresh if profile refresh can't be queued
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Failed to authenticate with XC server: {str(e)}"
|
||||
|
|
@ -1373,10 +1366,12 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False):
|
|||
)
|
||||
problematic_lines.append((line_index + 1, line[:200]))
|
||||
|
||||
elif extinf_data and line.startswith("http"):
|
||||
elif extinf_data and (line.startswith("http") or line.startswith("rtsp") or line.startswith("rtp") or line.startswith("udp")):
|
||||
url_count += 1
|
||||
# Normalize UDP URLs only (e.g., remove VLC-specific @ prefix)
|
||||
normalized_url = normalize_stream_url(line) if line.startswith("udp") else line
|
||||
# Associate URL with the last EXTINF line
|
||||
extinf_data[-1]["url"] = line
|
||||
extinf_data[-1]["url"] = normalized_url
|
||||
valid_stream_count += 1
|
||||
|
||||
# Periodically log progress for large files
|
||||
|
|
@ -2236,6 +2231,106 @@ def get_transformed_credentials(account, profile=None):
|
|||
return base_url, base_username, base_password
|
||||
|
||||
|
||||
@shared_task
|
||||
def refresh_account_profiles(account_id):
|
||||
"""Refresh account information for all active profiles of an XC account.
|
||||
|
||||
This task runs asynchronously in the background after account refresh completes.
|
||||
It includes rate limiting delays between profile authentications to prevent provider bans.
|
||||
"""
|
||||
from django.conf import settings
|
||||
import time
|
||||
|
||||
try:
|
||||
account = M3UAccount.objects.get(id=account_id, is_active=True)
|
||||
|
||||
if account.account_type != M3UAccount.Types.XC:
|
||||
logger.debug(f"Account {account_id} is not XC type, skipping profile refresh")
|
||||
return f"Account {account_id} is not an XtreamCodes account"
|
||||
|
||||
from apps.m3u.models import M3UAccountProfile
|
||||
|
||||
profiles = M3UAccountProfile.objects.filter(
|
||||
m3u_account=account,
|
||||
is_active=True
|
||||
)
|
||||
|
||||
if not profiles.exists():
|
||||
logger.info(f"No active profiles found for account {account.name}")
|
||||
return f"No active profiles for account {account_id}"
|
||||
|
||||
# Get user agent for this account
|
||||
try:
|
||||
user_agent_string = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
|
||||
if account.user_agent_id:
|
||||
from core.models import UserAgent
|
||||
ua_obj = UserAgent.objects.get(id=account.user_agent_id)
|
||||
if ua_obj and hasattr(ua_obj, "user_agent") and ua_obj.user_agent:
|
||||
user_agent_string = ua_obj.user_agent
|
||||
except Exception as e:
|
||||
logger.warning(f"Error getting user agent, using fallback: {str(e)}")
|
||||
logger.debug(f"Using user agent for profile refresh: {user_agent_string}")
|
||||
# Get rate limiting delay from settings
|
||||
profile_delay = getattr(settings, 'XC_PROFILE_REFRESH_DELAY', 2.5)
|
||||
|
||||
profiles_updated = 0
|
||||
profiles_failed = 0
|
||||
|
||||
logger.info(f"Starting background refresh for {profiles.count()} profiles of account {account.name}")
|
||||
|
||||
for idx, profile in enumerate(profiles):
|
||||
try:
|
||||
# Add delay between profiles to prevent rate limiting (except for first profile)
|
||||
if idx > 0:
|
||||
logger.info(f"Waiting {profile_delay}s before refreshing next profile to avoid rate limiting")
|
||||
time.sleep(profile_delay)
|
||||
|
||||
# Get transformed credentials for this specific profile
|
||||
profile_url, profile_username, profile_password = get_transformed_credentials(account, profile)
|
||||
|
||||
# Create a separate XC client for this profile's credentials
|
||||
with XCClient(
|
||||
profile_url,
|
||||
profile_username,
|
||||
profile_password,
|
||||
user_agent_string
|
||||
) as profile_client:
|
||||
# Authenticate with this profile's credentials
|
||||
if profile_client.authenticate():
|
||||
# Get account information specific to this profile's credentials
|
||||
profile_account_info = profile_client.get_account_info()
|
||||
|
||||
# Merge with existing custom_properties if they exist
|
||||
existing_props = profile.custom_properties or {}
|
||||
existing_props.update(profile_account_info)
|
||||
profile.custom_properties = existing_props
|
||||
profile.save(update_fields=['custom_properties'])
|
||||
|
||||
profiles_updated += 1
|
||||
logger.info(f"Updated account information for profile '{profile.name}' ({profiles_updated}/{profiles.count()})")
|
||||
else:
|
||||
profiles_failed += 1
|
||||
logger.warning(f"Failed to authenticate profile '{profile.name}' with transformed credentials")
|
||||
|
||||
except Exception as profile_error:
|
||||
profiles_failed += 1
|
||||
logger.error(f"Failed to update account information for profile '{profile.name}': {str(profile_error)}")
|
||||
# Continue with other profiles even if one fails
|
||||
|
||||
result_msg = f"Profile refresh complete for account {account.name}: {profiles_updated} updated, {profiles_failed} failed"
|
||||
logger.info(result_msg)
|
||||
return result_msg
|
||||
|
||||
except M3UAccount.DoesNotExist:
|
||||
error_msg = f"Account {account_id} not found"
|
||||
logger.error(error_msg)
|
||||
return error_msg
|
||||
except Exception as e:
|
||||
error_msg = f"Error refreshing profiles for account {account_id}: {str(e)}"
|
||||
logger.error(error_msg)
|
||||
return error_msg
|
||||
|
||||
|
||||
@shared_task
|
||||
def refresh_account_info(profile_id):
|
||||
"""Refresh only the account information for a specific M3U profile."""
|
||||
|
|
|
|||
|
|
@ -8,6 +8,34 @@ lock = threading.Lock()
|
|||
active_streams_map = {}
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def normalize_stream_url(url):
|
||||
"""
|
||||
Normalize stream URLs for compatibility with FFmpeg.
|
||||
|
||||
Handles VLC-specific syntax like udp://@239.0.0.1:1234 by removing the @ symbol.
|
||||
FFmpeg doesn't recognize the @ prefix for multicast addresses.
|
||||
|
||||
Args:
|
||||
url (str): The stream URL to normalize
|
||||
|
||||
Returns:
|
||||
str: The normalized URL
|
||||
"""
|
||||
if not url:
|
||||
return url
|
||||
|
||||
# Handle VLC-style UDP multicast URLs: udp://@239.0.0.1:1234 -> udp://239.0.0.1:1234
|
||||
# The @ symbol in VLC means "listen on all interfaces" but FFmpeg doesn't use this syntax
|
||||
if url.startswith('udp://@'):
|
||||
normalized = url.replace('udp://@', 'udp://', 1)
|
||||
logger.debug(f"Normalized VLC-style UDP URL: {url} -> {normalized}")
|
||||
return normalized
|
||||
|
||||
# Could add other normalizations here in the future (rtp://@, etc.)
|
||||
return url
|
||||
|
||||
|
||||
def increment_stream_count(account):
|
||||
with lock:
|
||||
current_usage = active_streams_map.get(account.id, 0)
|
||||
|
|
|
|||
|
|
@ -14,3 +14,26 @@ class OutputM3UTest(TestCase):
|
|||
self.assertEqual(response.status_code, 200)
|
||||
content = response.content.decode()
|
||||
self.assertIn("#EXTM3U", content)
|
||||
|
||||
def test_generate_m3u_response_post_empty_body(self):
|
||||
"""
|
||||
Test that a POST request with an empty body returns 200 OK.
|
||||
"""
|
||||
url = reverse('output:generate_m3u')
|
||||
|
||||
response = self.client.post(url, data=None, content_type='application/x-www-form-urlencoded')
|
||||
content = response.content.decode()
|
||||
|
||||
self.assertEqual(response.status_code, 200, "POST with empty body should return 200 OK")
|
||||
self.assertIn("#EXTM3U", content)
|
||||
|
||||
def test_generate_m3u_response_post_with_body(self):
|
||||
"""
|
||||
Test that a POST request with a non-empty body returns 403 Forbidden.
|
||||
"""
|
||||
url = reverse('output:generate_m3u')
|
||||
|
||||
response = self.client.post(url, data={'evilstring': 'muhahaha'})
|
||||
|
||||
self.assertEqual(response.status_code, 403, "POST with body should return 403 Forbidden")
|
||||
self.assertIn("POST requests with body are not allowed, body is:", response.content.decode())
|
||||
|
|
|
|||
|
|
@ -46,10 +46,12 @@ def generate_m3u(request, profile_name=None, user=None):
|
|||
The stream URL now points to the new stream_view that uses StreamProfile.
|
||||
Supports both GET and POST methods for compatibility with IPTVSmarters.
|
||||
"""
|
||||
# Check if this is a POST request and the body is not empty (which we don't want to allow)
|
||||
logger.debug("Generating M3U for profile: %s, user: %s", profile_name, user.username if user else "Anonymous")
|
||||
# Check if this is a POST request with data (which we don't want to allow)
|
||||
if request.method == "POST" and request.body:
|
||||
return HttpResponseForbidden("POST requests with content are not allowed")
|
||||
if request.body.decode() != '{}':
|
||||
return HttpResponseForbidden("POST requests with body are not allowed, body is: {}".format(request.body.decode()))
|
||||
|
||||
if user is not None:
|
||||
if user.user_level == 0:
|
||||
|
|
@ -2115,7 +2117,7 @@ def xc_get_vod_streams(request, user, category_id=None):
|
|||
None if not movie.logo
|
||||
else build_absolute_uri_with_port(
|
||||
request,
|
||||
reverse("api:channels:logo-cache", args=[movie.logo.id])
|
||||
reverse("api:vod:vodlogo-cache", args=[movie.logo.id])
|
||||
)
|
||||
),
|
||||
#'stream_icon': movie.logo.url if movie.logo else '',
|
||||
|
|
@ -2185,7 +2187,7 @@ def xc_get_series(request, user, category_id=None):
|
|||
None if not series.logo
|
||||
else build_absolute_uri_with_port(
|
||||
request,
|
||||
reverse("api:channels:logo-cache", args=[series.logo.id])
|
||||
reverse("api:vod:vodlogo-cache", args=[series.logo.id])
|
||||
)
|
||||
),
|
||||
"plot": series.description or "",
|
||||
|
|
@ -2378,7 +2380,7 @@ def xc_get_series_info(request, user, series_id):
|
|||
None if not series.logo
|
||||
else build_absolute_uri_with_port(
|
||||
request,
|
||||
reverse("api:channels:logo-cache", args=[series.logo.id])
|
||||
reverse("api:vod:vodlogo-cache", args=[series.logo.id])
|
||||
)
|
||||
),
|
||||
"plot": series_data['description'],
|
||||
|
|
@ -2506,14 +2508,14 @@ def xc_get_vod_info(request, user, vod_id):
|
|||
None if not movie.logo
|
||||
else build_absolute_uri_with_port(
|
||||
request,
|
||||
reverse("api:channels:logo-cache", args=[movie.logo.id])
|
||||
reverse("api:vod:vodlogo-cache", args=[movie.logo.id])
|
||||
)
|
||||
),
|
||||
"movie_image": (
|
||||
None if not movie.logo
|
||||
else build_absolute_uri_with_port(
|
||||
request,
|
||||
reverse("api:channels:logo-cache", args=[movie.logo.id])
|
||||
reverse("api:vod:vodlogo-cache", args=[movie.logo.id])
|
||||
)
|
||||
),
|
||||
'description': movie_data.get('description', ''),
|
||||
|
|
@ -2626,50 +2628,78 @@ def get_host_and_port(request):
|
|||
Returns (host, port) for building absolute URIs.
|
||||
- Prefers X-Forwarded-Host/X-Forwarded-Port (nginx).
|
||||
- Falls back to Host header.
|
||||
- In dev, if missing, uses 5656 or 8000 as a guess.
|
||||
- Returns None for port if using standard ports (80/443) to omit from URLs.
|
||||
- In dev, uses 5656 as a guess if port cannot be determined.
|
||||
"""
|
||||
# 1. Try X-Forwarded-Host (may include port)
|
||||
# Determine the scheme first - needed for standard port detection
|
||||
scheme = request.META.get("HTTP_X_FORWARDED_PROTO", request.scheme)
|
||||
standard_port = "443" if scheme == "https" else "80"
|
||||
|
||||
# 1. Try X-Forwarded-Host (may include port) - set by our nginx
|
||||
xfh = request.META.get("HTTP_X_FORWARDED_HOST")
|
||||
if xfh:
|
||||
if ":" in xfh:
|
||||
host, port = xfh.split(":", 1)
|
||||
# Omit standard ports from URLs, or omit if port doesn't match standard for scheme
|
||||
# (e.g., HTTPS but port is 9191 = behind external reverse proxy)
|
||||
if port == standard_port:
|
||||
return host, None
|
||||
# If port doesn't match standard and X-Forwarded-Proto is set, likely behind external RP
|
||||
if request.META.get("HTTP_X_FORWARDED_PROTO"):
|
||||
host = xfh.split(":")[0] # Strip port, will check for proper port below
|
||||
else:
|
||||
return host, port
|
||||
else:
|
||||
host = xfh
|
||||
port = request.META.get("HTTP_X_FORWARDED_PORT")
|
||||
|
||||
# Check for X-Forwarded-Port header (if we didn't already find a valid port)
|
||||
port = request.META.get("HTTP_X_FORWARDED_PORT")
|
||||
if port:
|
||||
return host, port
|
||||
# Omit standard ports from URLs
|
||||
return host, None if port == standard_port else port
|
||||
# If X-Forwarded-Proto is set but no valid port, assume standard
|
||||
if request.META.get("HTTP_X_FORWARDED_PROTO"):
|
||||
return host, None
|
||||
|
||||
# 2. Try Host header
|
||||
raw_host = request.get_host()
|
||||
if ":" in raw_host:
|
||||
host, port = raw_host.split(":", 1)
|
||||
return host, port
|
||||
# Omit standard ports from URLs
|
||||
return host, None if port == standard_port else port
|
||||
else:
|
||||
host = raw_host
|
||||
|
||||
# 3. Try X-Forwarded-Port
|
||||
port = request.META.get("HTTP_X_FORWARDED_PORT")
|
||||
if port:
|
||||
return host, port
|
||||
# 3. Check if we're behind a reverse proxy (X-Forwarded-Proto or X-Forwarded-For present)
|
||||
# If so, assume standard port for the scheme (don't trust SERVER_PORT in this case)
|
||||
if request.META.get("HTTP_X_FORWARDED_PROTO") or request.META.get("HTTP_X_FORWARDED_FOR"):
|
||||
return host, None
|
||||
|
||||
# 4. Try SERVER_PORT from META
|
||||
# 4. Try SERVER_PORT from META (only if NOT behind reverse proxy)
|
||||
port = request.META.get("SERVER_PORT")
|
||||
if port:
|
||||
return host, port
|
||||
# Omit standard ports from URLs
|
||||
return host, None if port == standard_port else port
|
||||
|
||||
# 5. Dev fallback: guess port
|
||||
# 5. Dev fallback: guess port 5656
|
||||
if os.environ.get("DISPATCHARR_ENV") == "dev" or host in ("localhost", "127.0.0.1"):
|
||||
guess = "5656"
|
||||
return host, guess
|
||||
return host, "5656"
|
||||
|
||||
# 6. Fallback to scheme default
|
||||
port = "443" if request.is_secure() else "9191"
|
||||
return host, port
|
||||
# 6. Final fallback: assume standard port for scheme (omit from URL)
|
||||
return host, None
|
||||
|
||||
def build_absolute_uri_with_port(request, path):
|
||||
"""
|
||||
Build an absolute URI with optional port.
|
||||
Port is omitted from URL if None (standard port for scheme).
|
||||
"""
|
||||
host, port = get_host_and_port(request)
|
||||
scheme = request.scheme
|
||||
return f"{scheme}://{host}:{port}{path}"
|
||||
scheme = request.META.get("HTTP_X_FORWARDED_PROTO", request.scheme)
|
||||
|
||||
if port:
|
||||
return f"{scheme}://{host}:{port}{path}"
|
||||
else:
|
||||
return f"{scheme}://{host}{path}"
|
||||
|
||||
def format_duration_hms(seconds):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -33,6 +33,8 @@ class EventType:
|
|||
# Stream types
|
||||
class StreamType:
|
||||
HLS = "hls"
|
||||
RTSP = "rtsp"
|
||||
UDP = "udp"
|
||||
TS = "ts"
|
||||
UNKNOWN = "unknown"
|
||||
|
||||
|
|
|
|||
|
|
@ -703,9 +703,10 @@ class ProxyServer:
|
|||
state = metadata.get(b'state', b'unknown').decode('utf-8')
|
||||
owner = metadata.get(b'owner', b'').decode('utf-8')
|
||||
|
||||
# States that indicate the channel is running properly
|
||||
# States that indicate the channel is running properly or shutting down
|
||||
valid_states = [ChannelState.ACTIVE, ChannelState.WAITING_FOR_CLIENTS,
|
||||
ChannelState.CONNECTING, ChannelState.BUFFERING, ChannelState.INITIALIZING]
|
||||
ChannelState.CONNECTING, ChannelState.BUFFERING, ChannelState.INITIALIZING,
|
||||
ChannelState.STOPPING]
|
||||
|
||||
# If the channel is in a valid state, check if the owner is still active
|
||||
if state in valid_states:
|
||||
|
|
@ -718,12 +719,24 @@ class ProxyServer:
|
|||
else:
|
||||
# This is a zombie channel - owner is gone but metadata still exists
|
||||
logger.warning(f"Detected zombie channel {channel_id} - owner {owner} is no longer active")
|
||||
|
||||
# Check if there are any clients connected
|
||||
client_set_key = RedisKeys.clients(channel_id)
|
||||
client_count = self.redis_client.scard(client_set_key) or 0
|
||||
|
||||
if client_count > 0:
|
||||
logger.warning(f"Zombie channel {channel_id} has {client_count} clients - attempting ownership takeover")
|
||||
# Could potentially take ownership here in the future
|
||||
# For now, just clean it up to be safe
|
||||
else:
|
||||
logger.warning(f"Zombie channel {channel_id} has no clients - cleaning up")
|
||||
|
||||
self._clean_zombie_channel(channel_id, metadata)
|
||||
return False
|
||||
elif state in [ChannelState.STOPPING, ChannelState.STOPPED, ChannelState.ERROR]:
|
||||
# These states indicate the channel should be reinitialized
|
||||
logger.info(f"Channel {channel_id} exists but in terminal state: {state}")
|
||||
return True
|
||||
elif state in [ChannelState.STOPPED, ChannelState.ERROR]:
|
||||
# These terminal states indicate the channel should be cleaned up and reinitialized
|
||||
logger.info(f"Channel {channel_id} in terminal state {state} - returning False to trigger cleanup")
|
||||
return False
|
||||
else:
|
||||
# Unknown or initializing state, check how long it's been in this state
|
||||
if b'state_changed_at' in metadata:
|
||||
|
|
@ -939,6 +952,15 @@ class ProxyServer:
|
|||
if channel_id in self.client_managers:
|
||||
client_manager = self.client_managers[channel_id]
|
||||
total_clients = client_manager.get_total_client_count()
|
||||
else:
|
||||
# This can happen during reconnection attempts or crashes
|
||||
# Check Redis directly for any connected clients
|
||||
if self.redis_client:
|
||||
client_set_key = RedisKeys.clients(channel_id)
|
||||
total_clients = self.redis_client.scard(client_set_key) or 0
|
||||
|
||||
if total_clients == 0:
|
||||
logger.warning(f"Channel {channel_id} is missing client_manager but we're the owner with 0 clients - will trigger cleanup")
|
||||
|
||||
# Log client count periodically
|
||||
if time.time() % 30 < 1: # Every ~30 seconds
|
||||
|
|
@ -946,7 +968,7 @@ class ProxyServer:
|
|||
|
||||
# If in connecting or waiting_for_clients state, check grace period
|
||||
if channel_state in [ChannelState.CONNECTING, ChannelState.WAITING_FOR_CLIENTS]:
|
||||
# Get connection ready time from metadata
|
||||
# Get connection_ready_time from metadata (indicates if channel reached ready state)
|
||||
connection_ready_time = None
|
||||
if metadata and b'connection_ready_time' in metadata:
|
||||
try:
|
||||
|
|
@ -954,17 +976,60 @@ class ProxyServer:
|
|||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
# If still connecting, give it more time
|
||||
if channel_state == ChannelState.CONNECTING:
|
||||
logger.debug(f"Channel {channel_id} still connecting - not checking for clients yet")
|
||||
continue
|
||||
if total_clients == 0:
|
||||
# Check if we have a connection_attempt timestamp (set when CONNECTING starts)
|
||||
connection_attempt_time = None
|
||||
attempt_key = RedisKeys.connection_attempt(channel_id)
|
||||
if self.redis_client:
|
||||
attempt_value = self.redis_client.get(attempt_key)
|
||||
if attempt_value:
|
||||
try:
|
||||
connection_attempt_time = float(attempt_value.decode('utf-8'))
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
# If waiting for clients, check grace period
|
||||
if connection_ready_time:
|
||||
# Also get init time as a fallback
|
||||
init_time = None
|
||||
if metadata and b'init_time' in metadata:
|
||||
try:
|
||||
init_time = float(metadata[b'init_time'].decode('utf-8'))
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
# Use whichever timestamp we have (prefer connection_attempt as it's more recent)
|
||||
start_time = connection_attempt_time or init_time
|
||||
|
||||
if start_time:
|
||||
# Check which timeout to apply based on channel lifecycle
|
||||
if connection_ready_time:
|
||||
# Already reached ready - use shutdown_delay
|
||||
time_since_ready = time.time() - connection_ready_time
|
||||
shutdown_delay = ConfigHelper.channel_shutdown_delay()
|
||||
|
||||
if time_since_ready > shutdown_delay:
|
||||
logger.warning(
|
||||
f"Channel {channel_id} in {channel_state} state with 0 clients for {time_since_ready:.1f}s "
|
||||
f"(after reaching ready, shutdown_delay: {shutdown_delay}s) - stopping channel"
|
||||
)
|
||||
self.stop_channel(channel_id)
|
||||
continue
|
||||
else:
|
||||
# Never reached ready - use grace_period timeout
|
||||
time_since_start = time.time() - start_time
|
||||
connecting_timeout = ConfigHelper.channel_init_grace_period()
|
||||
|
||||
if time_since_start > connecting_timeout:
|
||||
logger.warning(
|
||||
f"Channel {channel_id} stuck in {channel_state} state for {time_since_start:.1f}s "
|
||||
f"with no clients (timeout: {connecting_timeout}s) - stopping channel due to upstream issues"
|
||||
)
|
||||
self.stop_channel(channel_id)
|
||||
continue
|
||||
elif connection_ready_time:
|
||||
# We have clients now, but check grace period for state transition
|
||||
grace_period = ConfigHelper.channel_init_grace_period()
|
||||
time_since_ready = time.time() - connection_ready_time
|
||||
|
||||
# Add this debug log
|
||||
logger.debug(f"GRACE PERIOD CHECK: Channel {channel_id} in {channel_state} state, "
|
||||
f"time_since_ready={time_since_ready:.1f}s, grace_period={grace_period}s, "
|
||||
f"total_clients={total_clients}")
|
||||
|
|
@ -973,16 +1038,9 @@ class ProxyServer:
|
|||
# Still within grace period
|
||||
logger.debug(f"Channel {channel_id} in grace period - {time_since_ready:.1f}s of {grace_period}s elapsed")
|
||||
continue
|
||||
elif total_clients == 0:
|
||||
# Grace period expired with no clients
|
||||
logger.info(f"Grace period expired ({time_since_ready:.1f}s > {grace_period}s) with no clients - stopping channel {channel_id}")
|
||||
self.stop_channel(channel_id)
|
||||
else:
|
||||
# Grace period expired but we have clients - mark channel as active
|
||||
# Grace period expired with clients - mark channel as active
|
||||
logger.info(f"Grace period expired with {total_clients} clients - marking channel {channel_id} as active")
|
||||
old_state = "unknown"
|
||||
if metadata and b'state' in metadata:
|
||||
old_state = metadata[b'state'].decode('utf-8')
|
||||
if self.update_channel_state(channel_id, ChannelState.ACTIVE, {
|
||||
"grace_period_ended_at": str(time.time()),
|
||||
"clients_at_activation": str(total_clients)
|
||||
|
|
@ -1049,14 +1107,30 @@ class ProxyServer:
|
|||
continue
|
||||
|
||||
# Check for local client count - if zero, clean up our local resources
|
||||
if self.client_managers[channel_id].get_client_count() == 0:
|
||||
# We're not the owner, and we have no local clients - clean up our resources
|
||||
logger.debug(f"Non-owner cleanup: Channel {channel_id} has no local clients, cleaning up local resources")
|
||||
if channel_id in self.client_managers:
|
||||
if self.client_managers[channel_id].get_client_count() == 0:
|
||||
# We're not the owner, and we have no local clients - clean up our resources
|
||||
logger.debug(f"Non-owner cleanup: Channel {channel_id} has no local clients, cleaning up local resources")
|
||||
self._cleanup_local_resources(channel_id)
|
||||
else:
|
||||
# This shouldn't happen, but clean up anyway
|
||||
logger.warning(f"Non-owner cleanup: Channel {channel_id} has no client_manager entry, cleaning up local resources")
|
||||
self._cleanup_local_resources(channel_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in cleanup thread: {e}", exc_info=True)
|
||||
|
||||
# Periodically check for orphaned channels (every 30 seconds)
|
||||
if hasattr(self, '_last_orphan_check'):
|
||||
if time.time() - self._last_orphan_check > 30:
|
||||
try:
|
||||
self._check_orphaned_metadata()
|
||||
self._last_orphan_check = time.time()
|
||||
except Exception as orphan_error:
|
||||
logger.error(f"Error checking orphaned metadata: {orphan_error}", exc_info=True)
|
||||
else:
|
||||
self._last_orphan_check = time.time()
|
||||
|
||||
gevent.sleep(ConfigHelper.cleanup_check_interval()) # REPLACE: time.sleep(ConfigHelper.cleanup_check_interval())
|
||||
|
||||
thread = threading.Thread(target=cleanup_task, daemon=True)
|
||||
|
|
@ -1078,10 +1152,6 @@ class ProxyServer:
|
|||
try:
|
||||
channel_id = key.decode('utf-8').split(':')[2]
|
||||
|
||||
# Skip channels we already have locally
|
||||
if channel_id in self.stream_buffers:
|
||||
continue
|
||||
|
||||
# Check if this channel has an owner
|
||||
owner = self.get_channel_owner(channel_id)
|
||||
|
||||
|
|
@ -1096,13 +1166,84 @@ class ProxyServer:
|
|||
else:
|
||||
# Orphaned channel with no clients - clean it up
|
||||
logger.info(f"Cleaning up orphaned channel {channel_id}")
|
||||
self._clean_redis_keys(channel_id)
|
||||
|
||||
# If we have it locally, stop it properly to clean up processes
|
||||
if channel_id in self.stream_managers or channel_id in self.client_managers:
|
||||
logger.info(f"Orphaned channel {channel_id} is local - calling stop_channel")
|
||||
self.stop_channel(channel_id)
|
||||
else:
|
||||
# Just clean up Redis keys for remote channels
|
||||
self._clean_redis_keys(channel_id)
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing channel key {key}: {e}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking orphaned channels: {e}")
|
||||
|
||||
def _check_orphaned_metadata(self):
|
||||
"""
|
||||
Check for metadata entries that have no owner and no clients.
|
||||
This catches zombie channels that weren't cleaned up properly.
|
||||
"""
|
||||
if not self.redis_client:
|
||||
return
|
||||
|
||||
try:
|
||||
# Get all channel metadata keys
|
||||
channel_pattern = "ts_proxy:channel:*:metadata"
|
||||
channel_keys = self.redis_client.keys(channel_pattern)
|
||||
|
||||
for key in channel_keys:
|
||||
try:
|
||||
channel_id = key.decode('utf-8').split(':')[2]
|
||||
|
||||
# Get metadata first
|
||||
metadata = self.redis_client.hgetall(key)
|
||||
if not metadata:
|
||||
# Empty metadata - clean it up
|
||||
logger.warning(f"Found empty metadata for channel {channel_id} - cleaning up")
|
||||
# If we have it locally, stop it properly
|
||||
if channel_id in self.stream_managers or channel_id in self.client_managers:
|
||||
self.stop_channel(channel_id)
|
||||
else:
|
||||
self._clean_redis_keys(channel_id)
|
||||
continue
|
||||
|
||||
# Get owner
|
||||
owner = metadata.get(b'owner', b'').decode('utf-8') if b'owner' in metadata else ''
|
||||
|
||||
# Check if owner is still alive
|
||||
owner_alive = False
|
||||
if owner:
|
||||
owner_heartbeat_key = f"ts_proxy:worker:{owner}:heartbeat"
|
||||
owner_alive = self.redis_client.exists(owner_heartbeat_key)
|
||||
|
||||
# Check client count
|
||||
client_set_key = RedisKeys.clients(channel_id)
|
||||
client_count = self.redis_client.scard(client_set_key) or 0
|
||||
|
||||
# If no owner and no clients, clean it up
|
||||
if not owner_alive and client_count == 0:
|
||||
state = metadata.get(b'state', b'unknown').decode('utf-8') if b'state' in metadata else 'unknown'
|
||||
logger.warning(f"Found orphaned metadata for channel {channel_id} (state: {state}, owner: {owner}, clients: {client_count}) - cleaning up")
|
||||
|
||||
# If we have it locally, stop it properly to clean up transcode/proxy processes
|
||||
if channel_id in self.stream_managers or channel_id in self.client_managers:
|
||||
logger.info(f"Channel {channel_id} is local - calling stop_channel to clean up processes")
|
||||
self.stop_channel(channel_id)
|
||||
else:
|
||||
# Just clean up Redis keys for remote channels
|
||||
self._clean_redis_keys(channel_id)
|
||||
elif not owner_alive and client_count > 0:
|
||||
# Owner is gone but clients remain - just log for now
|
||||
logger.warning(f"Found orphaned channel {channel_id} with {client_count} clients but no owner - may need ownership takeover")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing metadata key {key}: {e}", exc_info=True)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking orphaned metadata: {e}", exc_info=True)
|
||||
|
||||
def _clean_redis_keys(self, channel_id):
|
||||
"""Clean up all Redis keys for a channel more efficiently"""
|
||||
# Release the channel, stream, and profile keys from the channel
|
||||
|
|
|
|||
|
|
@ -227,11 +227,12 @@ class StreamManager:
|
|||
# Continue with normal flow
|
||||
|
||||
# Check stream type before connecting
|
||||
stream_type = detect_stream_type(self.url)
|
||||
if self.transcode == False and stream_type == StreamType.HLS:
|
||||
logger.info(f"Detected HLS stream: {self.url} for channel {self.channel_id}")
|
||||
logger.info(f"HLS streams will be handled with FFmpeg for now - future version will support HLS natively for channel {self.channel_id}")
|
||||
# Enable transcoding for HLS streams
|
||||
self.stream_type = detect_stream_type(self.url)
|
||||
if self.transcode == False and self.stream_type in (StreamType.HLS, StreamType.RTSP, StreamType.UDP):
|
||||
stream_type_name = "HLS" if self.stream_type == StreamType.HLS else ("RTSP/RTP" if self.stream_type == StreamType.RTSP else "UDP")
|
||||
logger.info(f"Detected {stream_type_name} stream: {self.url} for channel {self.channel_id}")
|
||||
logger.info(f"{stream_type_name} streams require FFmpeg for channel {self.channel_id}")
|
||||
# Enable transcoding for HLS, RTSP/RTP, and UDP streams
|
||||
self.transcode = True
|
||||
# We'll override the stream profile selection with ffmpeg in the transcoding section
|
||||
self.force_ffmpeg = True
|
||||
|
|
@ -420,7 +421,7 @@ class StreamManager:
|
|||
from core.models import StreamProfile
|
||||
try:
|
||||
stream_profile = StreamProfile.objects.get(name='ffmpeg', locked=True)
|
||||
logger.info("Using FFmpeg stream profile for HLS content")
|
||||
logger.info("Using FFmpeg stream profile for unsupported proxy content (HLS/RTSP/UDP)")
|
||||
except StreamProfile.DoesNotExist:
|
||||
# Fall back to channel's profile if FFmpeg not found
|
||||
stream_profile = channel.get_stream_profile()
|
||||
|
|
@ -430,6 +431,13 @@ class StreamManager:
|
|||
|
||||
# Build and start transcode command
|
||||
self.transcode_cmd = stream_profile.build_command(self.url, self.user_agent)
|
||||
|
||||
# For UDP streams, remove any user_agent parameters from the command
|
||||
if hasattr(self, 'stream_type') and self.stream_type == StreamType.UDP:
|
||||
# Filter out any arguments that contain the user_agent value or related headers
|
||||
self.transcode_cmd = [arg for arg in self.transcode_cmd if self.user_agent not in arg and 'user-agent' not in arg.lower() and 'user_agent' not in arg.lower()]
|
||||
logger.debug(f"Removed user_agent parameters from UDP stream command for channel: {self.channel_id}")
|
||||
|
||||
logger.debug(f"Starting transcode process: {self.transcode_cmd} for channel: {self.channel_id}")
|
||||
|
||||
# Modified to capture stderr instead of discarding it
|
||||
|
|
@ -948,10 +956,10 @@ class StreamManager:
|
|||
logger.debug(f"Updated m3u profile for channel {self.channel_id} to use profile from stream {stream_id}")
|
||||
else:
|
||||
logger.warning(f"Failed to update stream profile for channel {self.channel_id}")
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating stream profile for channel {self.channel_id}: {e}")
|
||||
|
||||
|
||||
finally:
|
||||
# Always close database connection after profile update
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -402,6 +402,9 @@ def validate_stream_url(url, user_agent=None, timeout=(5, 5)):
|
|||
"""
|
||||
Validate if a stream URL is accessible without downloading the full content.
|
||||
|
||||
Note: UDP/RTP/RTSP streams are automatically considered valid as they cannot
|
||||
be validated via HTTP methods.
|
||||
|
||||
Args:
|
||||
url (str): The URL to validate
|
||||
user_agent (str): User agent to use for the request
|
||||
|
|
@ -410,6 +413,12 @@ def validate_stream_url(url, user_agent=None, timeout=(5, 5)):
|
|||
Returns:
|
||||
tuple: (is_valid, final_url, status_code, message)
|
||||
"""
|
||||
# Check if URL uses non-HTTP protocols (UDP/RTP/RTSP)
|
||||
# These cannot be validated via HTTP methods, so we skip validation
|
||||
if url.startswith(('udp://', 'rtp://', 'rtsp://')):
|
||||
logger.info(f"Skipping HTTP validation for non-HTTP protocol: {url}")
|
||||
return True, url, 200, "Non-HTTP protocol (UDP/RTP/RTSP) - validation skipped"
|
||||
|
||||
try:
|
||||
# Create session with proper headers
|
||||
session = requests.Session()
|
||||
|
|
|
|||
|
|
@ -7,19 +7,27 @@ logger = logging.getLogger("ts_proxy")
|
|||
|
||||
def detect_stream_type(url):
|
||||
"""
|
||||
Detect if stream URL is HLS or TS format.
|
||||
Detect if stream URL is HLS, RTSP/RTP, UDP, or TS format.
|
||||
|
||||
Args:
|
||||
url (str): The stream URL to analyze
|
||||
|
||||
Returns:
|
||||
str: 'hls' or 'ts' depending on detected format
|
||||
str: 'hls', 'rtsp', 'udp', or 'ts' depending on detected format
|
||||
"""
|
||||
if not url:
|
||||
return 'unknown'
|
||||
|
||||
url_lower = url.lower()
|
||||
|
||||
# Check for UDP streams (requires FFmpeg)
|
||||
if url_lower.startswith('udp://'):
|
||||
return 'udp'
|
||||
|
||||
# Check for RTSP/RTP streams (requires FFmpeg)
|
||||
if url_lower.startswith('rtsp://') or url_lower.startswith('rtp://'):
|
||||
return 'rtsp'
|
||||
|
||||
# Look for common HLS indicators
|
||||
if (url_lower.endswith('.m3u8') or
|
||||
'.m3u8?' in url_lower or
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import time
|
|||
import random
|
||||
import re
|
||||
import pathlib
|
||||
from django.http import StreamingHttpResponse, JsonResponse, HttpResponseRedirect
|
||||
from django.http import StreamingHttpResponse, JsonResponse, HttpResponseRedirect, HttpResponse
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from django.shortcuts import get_object_or_404
|
||||
from apps.proxy.config import TSConfig as Config
|
||||
|
|
@ -84,11 +84,18 @@ def stream_ts(request, channel_id):
|
|||
if state_field in metadata:
|
||||
channel_state = metadata[state_field].decode("utf-8")
|
||||
|
||||
if channel_state:
|
||||
# Channel is being initialized or already active - no need for reinitialization
|
||||
# Active/running states - channel is operational, don't reinitialize
|
||||
if channel_state in [
|
||||
ChannelState.ACTIVE,
|
||||
ChannelState.WAITING_FOR_CLIENTS,
|
||||
ChannelState.BUFFERING,
|
||||
ChannelState.INITIALIZING,
|
||||
ChannelState.CONNECTING,
|
||||
ChannelState.STOPPING,
|
||||
]:
|
||||
needs_initialization = False
|
||||
logger.debug(
|
||||
f"[{client_id}] Channel {channel_id} already in state {channel_state}, skipping initialization"
|
||||
f"[{client_id}] Channel {channel_id} in state {channel_state}, skipping initialization"
|
||||
)
|
||||
|
||||
# Special handling for initializing/connecting states
|
||||
|
|
@ -98,19 +105,34 @@ def stream_ts(request, channel_id):
|
|||
]:
|
||||
channel_initializing = True
|
||||
logger.debug(
|
||||
f"[{client_id}] Channel {channel_id} is still initializing, client will wait for completion"
|
||||
f"[{client_id}] Channel {channel_id} is still initializing, client will wait"
|
||||
)
|
||||
# Terminal states - channel needs cleanup before reinitialization
|
||||
elif channel_state in [
|
||||
ChannelState.ERROR,
|
||||
ChannelState.STOPPED,
|
||||
]:
|
||||
needs_initialization = True
|
||||
logger.info(
|
||||
f"[{client_id}] Channel {channel_id} in terminal state {channel_state}, will reinitialize"
|
||||
)
|
||||
# Unknown/empty state - check if owner is alive
|
||||
else:
|
||||
# Only check for owner if channel is in a valid state
|
||||
owner_field = ChannelMetadataField.OWNER.encode("utf-8")
|
||||
if owner_field in metadata:
|
||||
owner = metadata[owner_field].decode("utf-8")
|
||||
owner_heartbeat_key = f"ts_proxy:worker:{owner}:heartbeat"
|
||||
if proxy_server.redis_client.exists(owner_heartbeat_key):
|
||||
# Owner is still active, so we don't need to reinitialize
|
||||
# Owner is still active with unknown state - don't reinitialize
|
||||
needs_initialization = False
|
||||
logger.debug(
|
||||
f"[{client_id}] Channel {channel_id} has active owner {owner}"
|
||||
f"[{client_id}] Channel {channel_id} has active owner {owner}, skipping init"
|
||||
)
|
||||
else:
|
||||
# Owner dead - needs reinitialization
|
||||
needs_initialization = True
|
||||
logger.warning(
|
||||
f"[{client_id}] Channel {channel_id} owner {owner} is dead, will reinitialize"
|
||||
)
|
||||
|
||||
# Start initialization if needed
|
||||
|
|
@ -292,6 +314,15 @@ def stream_ts(request, channel_id):
|
|||
logger.info(
|
||||
f"[{client_id}] Redirecting to validated URL: {final_url} ({message})"
|
||||
)
|
||||
|
||||
# For non-HTTP protocols (RTSP/RTP/UDP), we need to manually create the redirect
|
||||
# because Django's HttpResponseRedirect blocks them for security
|
||||
if final_url.startswith(('rtsp://', 'rtp://', 'udp://')):
|
||||
logger.info(f"[{client_id}] Using manual redirect for non-HTTP protocol")
|
||||
response = HttpResponse(status=301)
|
||||
response['Location'] = final_url
|
||||
return response
|
||||
|
||||
return HttpResponseRedirect(final_url)
|
||||
else:
|
||||
logger.error(
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ from .api_views import (
|
|||
SeriesViewSet,
|
||||
VODCategoryViewSet,
|
||||
UnifiedContentViewSet,
|
||||
VODLogoViewSet,
|
||||
)
|
||||
|
||||
app_name = 'vod'
|
||||
|
|
@ -16,5 +17,6 @@ router.register(r'episodes', EpisodeViewSet, basename='episode')
|
|||
router.register(r'series', SeriesViewSet, basename='series')
|
||||
router.register(r'categories', VODCategoryViewSet, basename='vodcategory')
|
||||
router.register(r'all', UnifiedContentViewSet, basename='unified-content')
|
||||
router.register(r'vodlogos', VODLogoViewSet, basename='vodlogo')
|
||||
|
||||
urlpatterns = router.urls
|
||||
|
|
|
|||
|
|
@ -3,16 +3,21 @@ from rest_framework.response import Response
|
|||
from rest_framework.decorators import action
|
||||
from rest_framework.filters import SearchFilter, OrderingFilter
|
||||
from rest_framework.pagination import PageNumberPagination
|
||||
from rest_framework.permissions import AllowAny
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.http import StreamingHttpResponse, HttpResponse, FileResponse
|
||||
from django.db.models import Q
|
||||
import django_filters
|
||||
import logging
|
||||
import os
|
||||
import requests
|
||||
from apps.accounts.permissions import (
|
||||
Authenticated,
|
||||
permission_classes_by_action,
|
||||
)
|
||||
from .models import (
|
||||
Series, VODCategory, Movie, Episode,
|
||||
Series, VODCategory, Movie, Episode, VODLogo,
|
||||
M3USeriesRelation, M3UMovieRelation, M3UEpisodeRelation
|
||||
)
|
||||
from .serializers import (
|
||||
|
|
@ -20,6 +25,7 @@ from .serializers import (
|
|||
EpisodeSerializer,
|
||||
SeriesSerializer,
|
||||
VODCategorySerializer,
|
||||
VODLogoSerializer,
|
||||
M3UMovieRelationSerializer,
|
||||
M3USeriesRelationSerializer,
|
||||
M3UEpisodeRelationSerializer
|
||||
|
|
@ -564,7 +570,7 @@ class UnifiedContentViewSet(viewsets.ReadOnlyModelViewSet):
|
|||
logo.url as logo_url,
|
||||
'movie' as content_type
|
||||
FROM vod_movie movies
|
||||
LEFT JOIN dispatcharr_channels_logo logo ON movies.logo_id = logo.id
|
||||
LEFT JOIN vod_vodlogo logo ON movies.logo_id = logo.id
|
||||
WHERE {where_conditions[0]}
|
||||
|
||||
UNION ALL
|
||||
|
|
@ -586,7 +592,7 @@ class UnifiedContentViewSet(viewsets.ReadOnlyModelViewSet):
|
|||
logo.url as logo_url,
|
||||
'series' as content_type
|
||||
FROM vod_series series
|
||||
LEFT JOIN dispatcharr_channels_logo logo ON series.logo_id = logo.id
|
||||
LEFT JOIN vod_vodlogo logo ON series.logo_id = logo.id
|
||||
WHERE {where_conditions[1]}
|
||||
)
|
||||
SELECT * FROM unified_content
|
||||
|
|
@ -613,10 +619,10 @@ class UnifiedContentViewSet(viewsets.ReadOnlyModelViewSet):
|
|||
'id': item_dict['logo_id'],
|
||||
'name': item_dict['logo_name'],
|
||||
'url': item_dict['logo_url'],
|
||||
'cache_url': f"/media/logo_cache/{item_dict['logo_id']}.png" if item_dict['logo_id'] else None,
|
||||
'channel_count': 0, # We don't need this for VOD
|
||||
'is_used': True,
|
||||
'channel_names': [] # We don't need this for VOD
|
||||
'cache_url': f"/api/vod/vodlogos/{item_dict['logo_id']}/cache/",
|
||||
'movie_count': 0, # We don't calculate this in raw SQL
|
||||
'series_count': 0, # We don't calculate this in raw SQL
|
||||
'is_used': True
|
||||
}
|
||||
|
||||
# Convert to the format expected by frontend
|
||||
|
|
@ -668,4 +674,173 @@ class UnifiedContentViewSet(viewsets.ReadOnlyModelViewSet):
|
|||
logger.error(f"Error in UnifiedContentViewSet.list(): {e}")
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
return Response({'error': str(e)}, status=500)
|
||||
return Response({'error': str(e)}, status=500)
|
||||
|
||||
|
||||
class VODLogoPagination(PageNumberPagination):
|
||||
page_size = 100
|
||||
page_size_query_param = "page_size"
|
||||
max_page_size = 1000
|
||||
|
||||
|
||||
class VODLogoViewSet(viewsets.ModelViewSet):
|
||||
"""ViewSet for VOD Logo management"""
|
||||
queryset = VODLogo.objects.all()
|
||||
serializer_class = VODLogoSerializer
|
||||
pagination_class = VODLogoPagination
|
||||
filter_backends = [SearchFilter, OrderingFilter]
|
||||
search_fields = ['name', 'url']
|
||||
ordering_fields = ['name', 'id']
|
||||
ordering = ['name']
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
if self.action == 'cache':
|
||||
return [AllowAny()]
|
||||
return [Authenticated()]
|
||||
|
||||
def get_queryset(self):
|
||||
"""Optimize queryset with prefetch and add filtering"""
|
||||
queryset = VODLogo.objects.prefetch_related('movie', 'series').order_by('name')
|
||||
|
||||
# Filter by specific IDs
|
||||
ids = self.request.query_params.getlist('ids')
|
||||
if ids:
|
||||
try:
|
||||
id_list = [int(id_str) for id_str in ids if id_str.isdigit()]
|
||||
if id_list:
|
||||
queryset = queryset.filter(id__in=id_list)
|
||||
except (ValueError, TypeError):
|
||||
queryset = VODLogo.objects.none()
|
||||
|
||||
# Filter by usage
|
||||
used_filter = self.request.query_params.get('used', None)
|
||||
if used_filter == 'true':
|
||||
# Return logos that are used by movies OR series
|
||||
queryset = queryset.filter(
|
||||
Q(movie__isnull=False) | Q(series__isnull=False)
|
||||
).distinct()
|
||||
elif used_filter == 'false':
|
||||
# Return logos that are NOT used by either
|
||||
queryset = queryset.filter(
|
||||
movie__isnull=True,
|
||||
series__isnull=True
|
||||
)
|
||||
elif used_filter == 'movies':
|
||||
# Return logos that are used by movies (may also be used by series)
|
||||
queryset = queryset.filter(movie__isnull=False).distinct()
|
||||
elif used_filter == 'series':
|
||||
# Return logos that are used by series (may also be used by movies)
|
||||
queryset = queryset.filter(series__isnull=False).distinct()
|
||||
|
||||
|
||||
# Filter by name
|
||||
name_query = self.request.query_params.get('name', None)
|
||||
if name_query:
|
||||
queryset = queryset.filter(name__icontains=name_query)
|
||||
|
||||
# No pagination mode
|
||||
if self.request.query_params.get('no_pagination', 'false').lower() == 'true':
|
||||
self.pagination_class = None
|
||||
|
||||
return queryset
|
||||
|
||||
@action(detail=True, methods=["get"], permission_classes=[AllowAny])
|
||||
def cache(self, request, pk=None):
|
||||
"""Streams the VOD logo file, whether it's local or remote."""
|
||||
logo = self.get_object()
|
||||
|
||||
if not logo.url:
|
||||
return HttpResponse(status=404)
|
||||
|
||||
# Check if this is a local file path
|
||||
if logo.url.startswith('/data/'):
|
||||
# It's a local file
|
||||
file_path = logo.url
|
||||
if not os.path.exists(file_path):
|
||||
logger.error(f"VOD logo file not found: {file_path}")
|
||||
return HttpResponse(status=404)
|
||||
|
||||
try:
|
||||
return FileResponse(open(file_path, 'rb'), content_type='image/png')
|
||||
except Exception as e:
|
||||
logger.error(f"Error serving VOD logo file {file_path}: {str(e)}")
|
||||
return HttpResponse(status=500)
|
||||
else:
|
||||
# It's a remote URL - proxy it
|
||||
try:
|
||||
response = requests.get(logo.url, stream=True, timeout=10)
|
||||
response.raise_for_status()
|
||||
|
||||
content_type = response.headers.get('Content-Type', 'image/png')
|
||||
|
||||
return StreamingHttpResponse(
|
||||
response.iter_content(chunk_size=8192),
|
||||
content_type=content_type
|
||||
)
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Error fetching remote VOD logo {logo.url}: {str(e)}")
|
||||
return HttpResponse(status=404)
|
||||
|
||||
@action(detail=False, methods=["delete"], url_path="bulk-delete")
|
||||
def bulk_delete(self, request):
|
||||
"""Delete multiple VOD logos at once"""
|
||||
logo_ids = request.data.get('logo_ids', [])
|
||||
|
||||
if not logo_ids:
|
||||
return Response(
|
||||
{"error": "No logo IDs provided"},
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
try:
|
||||
# Get logos to delete
|
||||
logos = VODLogo.objects.filter(id__in=logo_ids)
|
||||
deleted_count = logos.count()
|
||||
|
||||
# Delete them
|
||||
logos.delete()
|
||||
|
||||
return Response({
|
||||
"deleted_count": deleted_count,
|
||||
"message": f"Successfully deleted {deleted_count} VOD logo(s)"
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error during bulk VOD logo deletion: {str(e)}")
|
||||
return Response(
|
||||
{"error": str(e)},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR
|
||||
)
|
||||
|
||||
@action(detail=False, methods=["post"])
|
||||
def cleanup(self, request):
|
||||
"""Delete all VOD logos that are not used by any movies or series"""
|
||||
try:
|
||||
# Find unused logos
|
||||
unused_logos = VODLogo.objects.filter(
|
||||
movie__isnull=True,
|
||||
series__isnull=True
|
||||
)
|
||||
|
||||
deleted_count = unused_logos.count()
|
||||
logo_names = list(unused_logos.values_list('name', flat=True))
|
||||
|
||||
# Delete them
|
||||
unused_logos.delete()
|
||||
|
||||
logger.info(f"Cleaned up {deleted_count} unused VOD logos: {logo_names}")
|
||||
|
||||
return Response({
|
||||
"deleted_count": deleted_count,
|
||||
"deleted_logos": logo_names,
|
||||
"message": f"Successfully deleted {deleted_count} unused VOD logo(s)"
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error during VOD logo cleanup: {str(e)}")
|
||||
return Response(
|
||||
{"error": str(e)},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,264 @@
|
|||
# Generated by Django 5.2.4 on 2025-11-06 23:01
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
def migrate_vod_logos_forward(apps, schema_editor):
|
||||
"""
|
||||
Migrate VOD logos from the Logo table to the new VODLogo table.
|
||||
This copies all logos referenced by movies or series to VODLogo.
|
||||
Uses pure SQL for maximum performance.
|
||||
"""
|
||||
from django.db import connection
|
||||
|
||||
print("\n" + "="*80)
|
||||
print("Starting VOD logo migration...")
|
||||
print("="*80)
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
# Step 1: Copy unique logos from Logo table to VODLogo table
|
||||
# Only copy logos that are used by movies or series
|
||||
print("Copying logos to VODLogo table...")
|
||||
cursor.execute("""
|
||||
INSERT INTO vod_vodlogo (name, url)
|
||||
SELECT DISTINCT l.name, l.url
|
||||
FROM dispatcharr_channels_logo l
|
||||
WHERE l.id IN (
|
||||
SELECT DISTINCT logo_id FROM vod_movie WHERE logo_id IS NOT NULL
|
||||
UNION
|
||||
SELECT DISTINCT logo_id FROM vod_series WHERE logo_id IS NOT NULL
|
||||
)
|
||||
ON CONFLICT (url) DO NOTHING
|
||||
""")
|
||||
print(f"Created VODLogo entries")
|
||||
|
||||
# Step 2: Update movies to point to VODLogo IDs using JOIN
|
||||
print("Updating movie references...")
|
||||
cursor.execute("""
|
||||
UPDATE vod_movie m
|
||||
SET logo_id = v.id
|
||||
FROM dispatcharr_channels_logo l
|
||||
INNER JOIN vod_vodlogo v ON l.url = v.url
|
||||
WHERE m.logo_id = l.id
|
||||
AND m.logo_id IS NOT NULL
|
||||
""")
|
||||
movie_count = cursor.rowcount
|
||||
print(f"Updated {movie_count} movies with new VOD logo references")
|
||||
|
||||
# Step 3: Update series to point to VODLogo IDs using JOIN
|
||||
print("Updating series references...")
|
||||
cursor.execute("""
|
||||
UPDATE vod_series s
|
||||
SET logo_id = v.id
|
||||
FROM dispatcharr_channels_logo l
|
||||
INNER JOIN vod_vodlogo v ON l.url = v.url
|
||||
WHERE s.logo_id = l.id
|
||||
AND s.logo_id IS NOT NULL
|
||||
""")
|
||||
series_count = cursor.rowcount
|
||||
print(f"Updated {series_count} series with new VOD logo references")
|
||||
|
||||
print("="*80)
|
||||
print("VOD logo migration completed successfully!")
|
||||
print(f"Summary: Updated {movie_count} movies and {series_count} series")
|
||||
print("="*80 + "\n")
|
||||
|
||||
|
||||
def migrate_vod_logos_backward(apps, schema_editor):
|
||||
"""
|
||||
Reverse migration - moves VODLogos back to Logo table.
|
||||
This recreates Logo entries for all VODLogos and updates Movie/Series references.
|
||||
"""
|
||||
Logo = apps.get_model('dispatcharr_channels', 'Logo')
|
||||
VODLogo = apps.get_model('vod', 'VODLogo')
|
||||
Movie = apps.get_model('vod', 'Movie')
|
||||
Series = apps.get_model('vod', 'Series')
|
||||
|
||||
print("\n" + "="*80)
|
||||
print("REVERSE: Moving VOD logos back to Logo table...")
|
||||
print("="*80)
|
||||
|
||||
# Get all VODLogos
|
||||
vod_logos = VODLogo.objects.all()
|
||||
print(f"Found {vod_logos.count()} VOD logos to reverse migrate")
|
||||
|
||||
# Create Logo entries for each VODLogo
|
||||
logos_to_create = []
|
||||
vod_to_logo_mapping = {} # VODLogo ID -> Logo ID
|
||||
|
||||
for vod_logo in vod_logos:
|
||||
# Check if a Logo with this URL already exists
|
||||
existing_logo = Logo.objects.filter(url=vod_logo.url).first()
|
||||
|
||||
if existing_logo:
|
||||
# Logo already exists, just map to it
|
||||
vod_to_logo_mapping[vod_logo.id] = existing_logo.id
|
||||
print(f"Logo already exists for URL: {vod_logo.url[:50]}... (using existing)")
|
||||
else:
|
||||
# Create new Logo entry
|
||||
new_logo = Logo(name=vod_logo.name, url=vod_logo.url)
|
||||
logos_to_create.append(new_logo)
|
||||
|
||||
# Bulk create new Logo entries
|
||||
if logos_to_create:
|
||||
print(f"Creating {len(logos_to_create)} new Logo entries...")
|
||||
Logo.objects.bulk_create(logos_to_create, ignore_conflicts=True)
|
||||
print("Logo entries created")
|
||||
|
||||
# Get the created Logo instances with their IDs
|
||||
for vod_logo in vod_logos:
|
||||
if vod_logo.id not in vod_to_logo_mapping:
|
||||
try:
|
||||
logo = Logo.objects.get(url=vod_logo.url)
|
||||
vod_to_logo_mapping[vod_logo.id] = logo.id
|
||||
except Logo.DoesNotExist:
|
||||
print(f"Warning: Could not find Logo for URL: {vod_logo.url[:100]}...")
|
||||
|
||||
print(f"Created mapping for {len(vod_to_logo_mapping)} VOD logos -> Logos")
|
||||
|
||||
# Update movies to point back to Logo table
|
||||
movie_count = 0
|
||||
for movie in Movie.objects.exclude(logo__isnull=True):
|
||||
if movie.logo_id in vod_to_logo_mapping:
|
||||
movie.logo_id = vod_to_logo_mapping[movie.logo_id]
|
||||
movie.save(update_fields=['logo_id'])
|
||||
movie_count += 1
|
||||
print(f"Updated {movie_count} movies to use Logo table")
|
||||
|
||||
# Update series to point back to Logo table
|
||||
series_count = 0
|
||||
for series in Series.objects.exclude(logo__isnull=True):
|
||||
if series.logo_id in vod_to_logo_mapping:
|
||||
series.logo_id = vod_to_logo_mapping[series.logo_id]
|
||||
series.save(update_fields=['logo_id'])
|
||||
series_count += 1
|
||||
print(f"Updated {series_count} series to use Logo table")
|
||||
|
||||
# Delete VODLogos (they're now redundant)
|
||||
vod_logo_count = vod_logos.count()
|
||||
vod_logos.delete()
|
||||
print(f"Deleted {vod_logo_count} VOD logos")
|
||||
|
||||
print("="*80)
|
||||
print("Reverse migration completed!")
|
||||
print(f"Summary: Created/reused {len(vod_to_logo_mapping)} logos, updated {movie_count} movies and {series_count} series")
|
||||
print("="*80 + "\n")
|
||||
|
||||
|
||||
def cleanup_migrated_logos(apps, schema_editor):
|
||||
"""
|
||||
Delete Logo entries that were successfully migrated to VODLogo.
|
||||
|
||||
Uses efficient JOIN-based approach with LEFT JOIN to exclude channel usage.
|
||||
"""
|
||||
from django.db import connection
|
||||
|
||||
print("\n" + "="*80)
|
||||
print("Cleaning up migrated Logo entries...")
|
||||
print("="*80)
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
# Single efficient query using JOINs:
|
||||
# - JOIN with vod_vodlogo to find migrated logos
|
||||
# - LEFT JOIN with channels to find which aren't used
|
||||
cursor.execute("""
|
||||
DELETE FROM dispatcharr_channels_logo
|
||||
WHERE id IN (
|
||||
SELECT l.id
|
||||
FROM dispatcharr_channels_logo l
|
||||
INNER JOIN vod_vodlogo v ON l.url = v.url
|
||||
LEFT JOIN dispatcharr_channels_channel c ON c.logo_id = l.id
|
||||
WHERE c.id IS NULL
|
||||
)
|
||||
""")
|
||||
deleted_count = cursor.rowcount
|
||||
|
||||
print(f"✓ Deleted {deleted_count} migrated Logo entries (not used by channels)")
|
||||
print("="*80 + "\n")
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('vod', '0002_add_last_seen_with_default'),
|
||||
('dispatcharr_channels', '0013_alter_logo_url'), # Ensure Logo table exists
|
||||
]
|
||||
|
||||
operations = [
|
||||
# Step 1: Create the VODLogo model
|
||||
migrations.CreateModel(
|
||||
name='VODLogo',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(max_length=255)),
|
||||
('url', models.TextField(unique=True)),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'VOD Logo',
|
||||
'verbose_name_plural': 'VOD Logos',
|
||||
},
|
||||
),
|
||||
|
||||
# Step 2: Remove foreign key constraints temporarily (so we can change the IDs)
|
||||
# We need to find and drop the actual constraint names dynamically
|
||||
migrations.RunSQL(
|
||||
sql=[
|
||||
# Drop movie logo constraint (find it dynamically)
|
||||
"""
|
||||
DO $$
|
||||
DECLARE
|
||||
constraint_name text;
|
||||
BEGIN
|
||||
SELECT conname INTO constraint_name
|
||||
FROM pg_constraint
|
||||
WHERE conrelid = 'vod_movie'::regclass
|
||||
AND conname LIKE '%logo_id%fk%';
|
||||
|
||||
IF constraint_name IS NOT NULL THEN
|
||||
EXECUTE 'ALTER TABLE vod_movie DROP CONSTRAINT ' || constraint_name;
|
||||
END IF;
|
||||
END $$;
|
||||
""",
|
||||
# Drop series logo constraint (find it dynamically)
|
||||
"""
|
||||
DO $$
|
||||
DECLARE
|
||||
constraint_name text;
|
||||
BEGIN
|
||||
SELECT conname INTO constraint_name
|
||||
FROM pg_constraint
|
||||
WHERE conrelid = 'vod_series'::regclass
|
||||
AND conname LIKE '%logo_id%fk%';
|
||||
|
||||
IF constraint_name IS NOT NULL THEN
|
||||
EXECUTE 'ALTER TABLE vod_series DROP CONSTRAINT ' || constraint_name;
|
||||
END IF;
|
||||
END $$;
|
||||
""",
|
||||
],
|
||||
reverse_sql=[
|
||||
# The AlterField operations will recreate the constraints pointing to VODLogo,
|
||||
# so we don't need to manually recreate them in reverse
|
||||
migrations.RunSQL.noop,
|
||||
],
|
||||
),
|
||||
|
||||
# Step 3: Migrate the data (this copies logos and updates references)
|
||||
migrations.RunPython(migrate_vod_logos_forward, migrate_vod_logos_backward),
|
||||
|
||||
# Step 4: Now we can safely alter the foreign keys to point to VODLogo
|
||||
migrations.AlterField(
|
||||
model_name='movie',
|
||||
name='logo',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='movie', to='vod.vodlogo'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='series',
|
||||
name='logo',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='series', to='vod.vodlogo'),
|
||||
),
|
||||
|
||||
# Step 5: Clean up migrated Logo entries
|
||||
migrations.RunPython(cleanup_migrated_logos, migrations.RunPython.noop),
|
||||
]
|
||||
|
|
@ -4,10 +4,22 @@ from django.utils import timezone
|
|||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from apps.m3u.models import M3UAccount
|
||||
from apps.channels.models import Logo
|
||||
import uuid
|
||||
|
||||
|
||||
class VODLogo(models.Model):
|
||||
"""Logo model specifically for VOD content (movies and series)"""
|
||||
name = models.CharField(max_length=255)
|
||||
url = models.TextField(unique=True)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
class Meta:
|
||||
verbose_name = 'VOD Logo'
|
||||
verbose_name_plural = 'VOD Logos'
|
||||
|
||||
|
||||
class VODCategory(models.Model):
|
||||
"""Categories for organizing VODs (e.g., Action, Comedy, Drama)"""
|
||||
|
||||
|
|
@ -69,7 +81,7 @@ class Series(models.Model):
|
|||
year = models.IntegerField(blank=True, null=True)
|
||||
rating = models.CharField(max_length=10, blank=True, null=True)
|
||||
genre = models.CharField(max_length=255, blank=True, null=True)
|
||||
logo = models.ForeignKey(Logo, on_delete=models.SET_NULL, null=True, blank=True, related_name='series')
|
||||
logo = models.ForeignKey(VODLogo, on_delete=models.SET_NULL, null=True, blank=True, related_name='series')
|
||||
|
||||
# Metadata IDs for deduplication - these should be globally unique when present
|
||||
tmdb_id = models.CharField(max_length=50, blank=True, null=True, unique=True, help_text="TMDB ID for metadata")
|
||||
|
|
@ -108,7 +120,7 @@ class Movie(models.Model):
|
|||
rating = models.CharField(max_length=10, blank=True, null=True)
|
||||
genre = models.CharField(max_length=255, blank=True, null=True)
|
||||
duration_secs = models.IntegerField(blank=True, null=True, help_text="Duration in seconds")
|
||||
logo = models.ForeignKey(Logo, on_delete=models.SET_NULL, null=True, blank=True, related_name='movie')
|
||||
logo = models.ForeignKey(VODLogo, on_delete=models.SET_NULL, null=True, blank=True, related_name='movie')
|
||||
|
||||
# Metadata IDs for deduplication - these should be globally unique when present
|
||||
tmdb_id = models.CharField(max_length=50, blank=True, null=True, unique=True, help_text="TMDB ID for metadata")
|
||||
|
|
|
|||
|
|
@ -1,12 +1,79 @@
|
|||
from rest_framework import serializers
|
||||
from django.urls import reverse
|
||||
from .models import (
|
||||
Series, VODCategory, Movie, Episode,
|
||||
Series, VODCategory, Movie, Episode, VODLogo,
|
||||
M3USeriesRelation, M3UMovieRelation, M3UEpisodeRelation, M3UVODCategoryRelation
|
||||
)
|
||||
from apps.channels.serializers import LogoSerializer
|
||||
from apps.m3u.serializers import M3UAccountSerializer
|
||||
|
||||
|
||||
class VODLogoSerializer(serializers.ModelSerializer):
|
||||
cache_url = serializers.SerializerMethodField()
|
||||
movie_count = serializers.SerializerMethodField()
|
||||
series_count = serializers.SerializerMethodField()
|
||||
is_used = serializers.SerializerMethodField()
|
||||
item_names = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = VODLogo
|
||||
fields = ["id", "name", "url", "cache_url", "movie_count", "series_count", "is_used", "item_names"]
|
||||
|
||||
def validate_url(self, value):
|
||||
"""Validate that the URL is unique for creation or update"""
|
||||
if self.instance and self.instance.url == value:
|
||||
return value
|
||||
|
||||
if VODLogo.objects.filter(url=value).exists():
|
||||
raise serializers.ValidationError("A VOD logo with this URL already exists.")
|
||||
|
||||
return value
|
||||
|
||||
def create(self, validated_data):
|
||||
"""Handle logo creation with proper URL validation"""
|
||||
return VODLogo.objects.create(**validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
"""Handle logo updates"""
|
||||
for attr, value in validated_data.items():
|
||||
setattr(instance, attr, value)
|
||||
instance.save()
|
||||
return instance
|
||||
|
||||
def get_cache_url(self, obj):
|
||||
request = self.context.get("request")
|
||||
if request:
|
||||
return request.build_absolute_uri(
|
||||
reverse("api:vod:vodlogo-cache", args=[obj.id])
|
||||
)
|
||||
return reverse("api:vod:vodlogo-cache", args=[obj.id])
|
||||
|
||||
def get_movie_count(self, obj):
|
||||
"""Get the number of movies using this logo"""
|
||||
return obj.movie.count() if hasattr(obj, 'movie') else 0
|
||||
|
||||
def get_series_count(self, obj):
|
||||
"""Get the number of series using this logo"""
|
||||
return obj.series.count() if hasattr(obj, 'series') else 0
|
||||
|
||||
def get_is_used(self, obj):
|
||||
"""Check if this logo is used by any movies or series"""
|
||||
return (hasattr(obj, 'movie') and obj.movie.exists()) or (hasattr(obj, 'series') and obj.series.exists())
|
||||
|
||||
def get_item_names(self, obj):
|
||||
"""Get the list of movies and series using this logo"""
|
||||
names = []
|
||||
|
||||
if hasattr(obj, 'movie'):
|
||||
for movie in obj.movie.all()[:10]: # Limit to 10 items for performance
|
||||
names.append(f"Movie: {movie.name}")
|
||||
|
||||
if hasattr(obj, 'series'):
|
||||
for series in obj.series.all()[:10]: # Limit to 10 items for performance
|
||||
names.append(f"Series: {series.name}")
|
||||
|
||||
return names
|
||||
|
||||
|
||||
class M3UVODCategoryRelationSerializer(serializers.ModelSerializer):
|
||||
category = serializers.IntegerField(source="category.id")
|
||||
m3u_account = serializers.IntegerField(source="m3u_account.id")
|
||||
|
|
@ -31,7 +98,7 @@ class VODCategorySerializer(serializers.ModelSerializer):
|
|||
]
|
||||
|
||||
class SeriesSerializer(serializers.ModelSerializer):
|
||||
logo = LogoSerializer(read_only=True)
|
||||
logo = VODLogoSerializer(read_only=True)
|
||||
episode_count = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
|
|
@ -43,7 +110,7 @@ class SeriesSerializer(serializers.ModelSerializer):
|
|||
|
||||
|
||||
class MovieSerializer(serializers.ModelSerializer):
|
||||
logo = LogoSerializer(read_only=True)
|
||||
logo = VODLogoSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Movie
|
||||
|
|
@ -225,7 +292,7 @@ class M3UEpisodeRelationSerializer(serializers.ModelSerializer):
|
|||
|
||||
class EnhancedSeriesSerializer(serializers.ModelSerializer):
|
||||
"""Enhanced serializer for series with provider information"""
|
||||
logo = LogoSerializer(read_only=True)
|
||||
logo = VODLogoSerializer(read_only=True)
|
||||
providers = M3USeriesRelationSerializer(source='m3u_relations', many=True, read_only=True)
|
||||
episode_count = serializers.SerializerMethodField()
|
||||
|
||||
|
|
|
|||
|
|
@ -5,10 +5,9 @@ from django.db.models import Q
|
|||
from apps.m3u.models import M3UAccount
|
||||
from core.xtream_codes import Client as XtreamCodesClient
|
||||
from .models import (
|
||||
VODCategory, Series, Movie, Episode,
|
||||
VODCategory, Series, Movie, Episode, VODLogo,
|
||||
M3USeriesRelation, M3UMovieRelation, M3UEpisodeRelation, M3UVODCategoryRelation
|
||||
)
|
||||
from apps.channels.models import Logo
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import json
|
||||
|
|
@ -403,7 +402,7 @@ def process_movie_batch(account, batch, categories, relations, scan_start_time=N
|
|||
|
||||
# Get existing logos
|
||||
existing_logos = {
|
||||
logo.url: logo for logo in Logo.objects.filter(url__in=logo_urls)
|
||||
logo.url: logo for logo in VODLogo.objects.filter(url__in=logo_urls)
|
||||
} if logo_urls else {}
|
||||
|
||||
# Create missing logos
|
||||
|
|
@ -411,20 +410,20 @@ def process_movie_batch(account, batch, categories, relations, scan_start_time=N
|
|||
for logo_url in logo_urls:
|
||||
if logo_url not in existing_logos:
|
||||
movie_name = logo_url_to_name.get(logo_url, 'Unknown Movie')
|
||||
logos_to_create.append(Logo(url=logo_url, name=movie_name))
|
||||
logos_to_create.append(VODLogo(url=logo_url, name=movie_name))
|
||||
|
||||
if logos_to_create:
|
||||
try:
|
||||
Logo.objects.bulk_create(logos_to_create, ignore_conflicts=True)
|
||||
VODLogo.objects.bulk_create(logos_to_create, ignore_conflicts=True)
|
||||
# Refresh existing_logos with newly created ones
|
||||
new_logo_urls = [logo.url for logo in logos_to_create]
|
||||
newly_created = {
|
||||
logo.url: logo for logo in Logo.objects.filter(url__in=new_logo_urls)
|
||||
logo.url: logo for logo in VODLogo.objects.filter(url__in=new_logo_urls)
|
||||
}
|
||||
existing_logos.update(newly_created)
|
||||
logger.info(f"Created {len(newly_created)} new logos for movies")
|
||||
logger.info(f"Created {len(newly_created)} new VOD logos for movies")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to create logos: {e}")
|
||||
logger.warning(f"Failed to create VOD logos: {e}")
|
||||
|
||||
# Get existing movies based on our keys
|
||||
existing_movies = {}
|
||||
|
|
@ -725,7 +724,7 @@ def process_series_batch(account, batch, categories, relations, scan_start_time=
|
|||
|
||||
# Get existing logos
|
||||
existing_logos = {
|
||||
logo.url: logo for logo in Logo.objects.filter(url__in=logo_urls)
|
||||
logo.url: logo for logo in VODLogo.objects.filter(url__in=logo_urls)
|
||||
} if logo_urls else {}
|
||||
|
||||
# Create missing logos
|
||||
|
|
@ -733,20 +732,20 @@ def process_series_batch(account, batch, categories, relations, scan_start_time=
|
|||
for logo_url in logo_urls:
|
||||
if logo_url not in existing_logos:
|
||||
series_name = logo_url_to_name.get(logo_url, 'Unknown Series')
|
||||
logos_to_create.append(Logo(url=logo_url, name=series_name))
|
||||
logos_to_create.append(VODLogo(url=logo_url, name=series_name))
|
||||
|
||||
if logos_to_create:
|
||||
try:
|
||||
Logo.objects.bulk_create(logos_to_create, ignore_conflicts=True)
|
||||
VODLogo.objects.bulk_create(logos_to_create, ignore_conflicts=True)
|
||||
# Refresh existing_logos with newly created ones
|
||||
new_logo_urls = [logo.url for logo in logos_to_create]
|
||||
newly_created = {
|
||||
logo.url: logo for logo in Logo.objects.filter(url__in=new_logo_urls)
|
||||
logo.url: logo for logo in VODLogo.objects.filter(url__in=new_logo_urls)
|
||||
}
|
||||
existing_logos.update(newly_created)
|
||||
logger.info(f"Created {len(newly_created)} new logos for series")
|
||||
logger.info(f"Created {len(newly_created)} new VOD logos for series")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to create logos: {e}")
|
||||
logger.warning(f"Failed to create VOD logos: {e}")
|
||||
|
||||
# Get existing series based on our keys - same pattern as movies
|
||||
existing_series = {}
|
||||
|
|
@ -1424,21 +1423,21 @@ def cleanup_orphaned_vod_content(stale_days=0, scan_start_time=None, account_id=
|
|||
stale_episode_count = stale_episode_relations.count()
|
||||
stale_episode_relations.delete()
|
||||
|
||||
# Clean up movies with no relations (orphaned) - only if no account_id specified (global cleanup)
|
||||
if not account_id:
|
||||
orphaned_movies = Movie.objects.filter(m3u_relations__isnull=True)
|
||||
orphaned_movie_count = orphaned_movies.count()
|
||||
# Clean up movies with no relations (orphaned)
|
||||
# Safe to delete even during account-specific cleanup because if ANY account
|
||||
# has a relation, m3u_relations will not be null
|
||||
orphaned_movies = Movie.objects.filter(m3u_relations__isnull=True)
|
||||
orphaned_movie_count = orphaned_movies.count()
|
||||
if orphaned_movie_count > 0:
|
||||
logger.info(f"Deleting {orphaned_movie_count} orphaned movies with no M3U relations")
|
||||
orphaned_movies.delete()
|
||||
|
||||
# Clean up series with no relations (orphaned) - only if no account_id specified (global cleanup)
|
||||
orphaned_series = Series.objects.filter(m3u_relations__isnull=True)
|
||||
orphaned_series_count = orphaned_series.count()
|
||||
# Clean up series with no relations (orphaned)
|
||||
orphaned_series = Series.objects.filter(m3u_relations__isnull=True)
|
||||
orphaned_series_count = orphaned_series.count()
|
||||
if orphaned_series_count > 0:
|
||||
logger.info(f"Deleting {orphaned_series_count} orphaned series with no M3U relations")
|
||||
orphaned_series.delete()
|
||||
else:
|
||||
# When cleaning up for specific account, we don't remove orphaned content
|
||||
# as other accounts might still reference it
|
||||
orphaned_movie_count = 0
|
||||
orphaned_series_count = 0
|
||||
|
||||
# Episodes will be cleaned up via CASCADE when series are deleted
|
||||
|
||||
|
|
@ -1999,7 +1998,7 @@ def refresh_movie_advanced_data(m3u_movie_relation_id, force_refresh=False):
|
|||
|
||||
def validate_logo_reference(obj, obj_type="object"):
|
||||
"""
|
||||
Validate that a logo reference exists in the database.
|
||||
Validate that a VOD logo reference exists in the database.
|
||||
If not, set it to None to prevent foreign key constraint violations.
|
||||
|
||||
Args:
|
||||
|
|
@ -2019,9 +2018,9 @@ def validate_logo_reference(obj, obj_type="object"):
|
|||
|
||||
try:
|
||||
# Verify the logo exists in the database
|
||||
Logo.objects.get(pk=obj.logo.pk)
|
||||
VODLogo.objects.get(pk=obj.logo.pk)
|
||||
return True
|
||||
except Logo.DoesNotExist:
|
||||
logger.warning(f"Logo with ID {obj.logo.pk} does not exist in database for {obj_type} '{getattr(obj, 'name', 'Unknown')}', setting to None")
|
||||
except VODLogo.DoesNotExist:
|
||||
logger.warning(f"VOD Logo with ID {obj.logo.pk} does not exist in database for {obj_type} '{getattr(obj, 'name', 'Unknown')}', setting to None")
|
||||
obj.logo = None
|
||||
return False
|
||||
|
|
|
|||
|
|
@ -377,12 +377,14 @@ def validate_flexible_url(value):
|
|||
import re
|
||||
|
||||
# More flexible pattern for non-FQDN hostnames with paths
|
||||
# Matches: http://hostname, http://hostname/, http://hostname:port/path/to/file.xml
|
||||
non_fqdn_pattern = r'^https?://[a-zA-Z0-9]([a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])?(\:[0-9]+)?(/[^\s]*)?$'
|
||||
# Matches: http://hostname, https://hostname/, http://hostname:port/path/to/file.xml, rtp://192.168.2.1, rtsp://192.168.178.1, udp://239.0.0.1:1234
|
||||
# Also matches FQDNs for rtsp/rtp/udp protocols: rtsp://FQDN/path?query=value
|
||||
# Also supports authentication: rtsp://user:pass@hostname/path
|
||||
non_fqdn_pattern = r'^(rts?p|https?|udp)://([a-zA-Z0-9_\-\.]+:[^\s@]+@)?([a-zA-Z0-9]([a-zA-Z0-9\-\.]{0,61}[a-zA-Z0-9])?|[0-9.]+)?(\:[0-9]+)?(/[^\s]*)?$'
|
||||
non_fqdn_match = re.match(non_fqdn_pattern, value)
|
||||
|
||||
if non_fqdn_match:
|
||||
return # Accept non-FQDN hostnames
|
||||
return # Accept non-FQDN hostnames and rtsp/rtp/udp URLs with optional authentication
|
||||
|
||||
# If it doesn't match our flexible patterns, raise the original error
|
||||
raise ValidationError("Enter a valid URL.")
|
||||
|
|
|
|||
|
|
@ -51,6 +51,11 @@ EPG_BATCH_SIZE = 1000 # Number of records to process in a batch
|
|||
EPG_MEMORY_LIMIT = 512 # Memory limit in MB before forcing garbage collection
|
||||
EPG_ENABLE_MEMORY_MONITORING = True # Whether to monitor memory usage during processing
|
||||
|
||||
# XtreamCodes Rate Limiting Settings
|
||||
# Delay between profile authentications when refreshing multiple profiles
|
||||
# This prevents providers from temporarily banning users with many profiles
|
||||
XC_PROFILE_REFRESH_DELAY = float(os.environ.get('XC_PROFILE_REFRESH_DELAY', '2.5')) # seconds between profile refreshes
|
||||
|
||||
# Database optimization settings
|
||||
DATABASE_STATEMENT_TIMEOUT = 300 # Seconds before timing out long-running queries
|
||||
DATABASE_CONN_MAX_AGE = (
|
||||
|
|
|
|||
|
|
@ -112,15 +112,21 @@ const App = () => {
|
|||
height: 0,
|
||||
}}
|
||||
navbar={{
|
||||
width: open ? drawerWidth : miniDrawerWidth,
|
||||
width: isAuthenticated
|
||||
? open
|
||||
? drawerWidth
|
||||
: miniDrawerWidth
|
||||
: 0,
|
||||
}}
|
||||
>
|
||||
<Sidebar
|
||||
drawerWidth
|
||||
miniDrawerWidth
|
||||
collapsed={!open}
|
||||
toggleDrawer={toggleDrawer}
|
||||
/>
|
||||
{isAuthenticated && (
|
||||
<Sidebar
|
||||
drawerWidth={drawerWidth}
|
||||
miniDrawerWidth={miniDrawerWidth}
|
||||
collapsed={!open}
|
||||
toggleDrawer={toggleDrawer}
|
||||
/>
|
||||
)}
|
||||
|
||||
<AppShell.Main>
|
||||
<Box
|
||||
|
|
|
|||
|
|
@ -572,10 +572,10 @@ export const WebsocketProvider = ({ children }) => {
|
|||
// Update the store with progress information
|
||||
updateEPGProgress(parsedEvent.data);
|
||||
|
||||
// If we have source_id/account info, update the EPG source status
|
||||
if (parsedEvent.data.source_id || parsedEvent.data.account) {
|
||||
// If we have source/account info, update the EPG source status
|
||||
if (parsedEvent.data.source || parsedEvent.data.account) {
|
||||
const sourceId =
|
||||
parsedEvent.data.source_id || parsedEvent.data.account;
|
||||
parsedEvent.data.source || parsedEvent.data.account;
|
||||
const epg = epgs[sourceId];
|
||||
|
||||
if (epg) {
|
||||
|
|
|
|||
|
|
@ -462,7 +462,16 @@ export default class API {
|
|||
}
|
||||
);
|
||||
|
||||
// Don't automatically update the store here - let the caller handle it
|
||||
// Show success notification
|
||||
if (response.message) {
|
||||
notifications.show({
|
||||
title: 'Channels Updated',
|
||||
message: response.message,
|
||||
color: 'green',
|
||||
autoClose: 4000,
|
||||
});
|
||||
}
|
||||
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to update channels', e);
|
||||
|
|
@ -1788,6 +1797,77 @@ export default class API {
|
|||
}
|
||||
}
|
||||
|
||||
// VOD Logo Methods
|
||||
static async getVODLogos(params = {}) {
|
||||
try {
|
||||
// Transform usage filter to match backend expectations
|
||||
const apiParams = { ...params };
|
||||
if (apiParams.usage === 'used') {
|
||||
apiParams.used = 'true';
|
||||
delete apiParams.usage;
|
||||
} else if (apiParams.usage === 'unused') {
|
||||
apiParams.used = 'false';
|
||||
delete apiParams.usage;
|
||||
} else if (apiParams.usage === 'movies') {
|
||||
apiParams.used = 'movies';
|
||||
delete apiParams.usage;
|
||||
} else if (apiParams.usage === 'series') {
|
||||
apiParams.used = 'series';
|
||||
delete apiParams.usage;
|
||||
}
|
||||
|
||||
const queryParams = new URLSearchParams(apiParams);
|
||||
const response = await request(
|
||||
`${host}/api/vod/vodlogos/?${queryParams.toString()}`
|
||||
);
|
||||
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to retrieve VOD logos', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async deleteVODLogo(id) {
|
||||
try {
|
||||
await request(`${host}/api/vod/vodlogos/${id}/`, {
|
||||
method: 'DELETE',
|
||||
});
|
||||
|
||||
return true;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to delete VOD logo', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async deleteVODLogos(ids) {
|
||||
try {
|
||||
await request(`${host}/api/vod/vodlogos/bulk-delete/`, {
|
||||
method: 'DELETE',
|
||||
body: { logo_ids: ids },
|
||||
});
|
||||
|
||||
return true;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to delete VOD logos', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async cleanupUnusedVODLogos() {
|
||||
try {
|
||||
const response = await request(`${host}/api/vod/vodlogos/cleanup/`, {
|
||||
method: 'POST',
|
||||
});
|
||||
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to cleanup unused VOD logos', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
static async getChannelProfiles() {
|
||||
try {
|
||||
const response = await request(`${host}/api/channels/profiles/`);
|
||||
|
|
@ -2132,9 +2212,15 @@ export default class API {
|
|||
|
||||
// If successful, requery channels to update UI
|
||||
if (response.success) {
|
||||
// Build message based on whether EPG sources need refreshing
|
||||
let message = `Updated ${response.channels_updated} channel${response.channels_updated !== 1 ? 's' : ''}`;
|
||||
if (response.programs_refreshed > 0) {
|
||||
message += `, refreshing ${response.programs_refreshed} EPG source${response.programs_refreshed !== 1 ? 's' : ''}`;
|
||||
}
|
||||
|
||||
notifications.show({
|
||||
title: 'EPG Association',
|
||||
message: `Updated ${response.channels_updated} channels, refreshing ${response.programs_refreshed} EPG sources.`,
|
||||
message: message,
|
||||
color: 'blue',
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -17,7 +17,9 @@ import {
|
|||
Table,
|
||||
Divider,
|
||||
} from '@mantine/core';
|
||||
import { Play } from 'lucide-react';
|
||||
import { Play, Copy } from 'lucide-react';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
import { copyToClipboard } from '../utils';
|
||||
import useVODStore from '../store/useVODStore';
|
||||
import useVideoStore from '../store/useVideoStore';
|
||||
import useSettingsStore from '../store/settings';
|
||||
|
|
@ -262,6 +264,39 @@ const SeriesModal = ({ series, opened, onClose }) => {
|
|||
showVideo(streamUrl, 'vod', episode);
|
||||
};
|
||||
|
||||
const getEpisodeStreamUrl = (episode) => {
|
||||
let streamUrl = `/proxy/vod/episode/${episode.uuid}`;
|
||||
|
||||
// Add selected provider as query parameter if available
|
||||
if (selectedProvider) {
|
||||
// Use stream_id for most specific selection, fallback to account_id
|
||||
if (selectedProvider.stream_id) {
|
||||
streamUrl += `?stream_id=${encodeURIComponent(selectedProvider.stream_id)}`;
|
||||
} else {
|
||||
streamUrl += `?m3u_account_id=${selectedProvider.m3u_account.id}`;
|
||||
}
|
||||
}
|
||||
|
||||
if (env_mode === 'dev') {
|
||||
streamUrl = `${window.location.protocol}//${window.location.hostname}:5656${streamUrl}`;
|
||||
} else {
|
||||
streamUrl = `${window.location.origin}${streamUrl}`;
|
||||
}
|
||||
return streamUrl;
|
||||
};
|
||||
|
||||
const handleCopyEpisodeLink = async (episode) => {
|
||||
const streamUrl = getEpisodeStreamUrl(episode);
|
||||
const success = await copyToClipboard(streamUrl);
|
||||
notifications.show({
|
||||
title: success ? 'Link Copied!' : 'Copy Failed',
|
||||
message: success
|
||||
? 'Episode link copied to clipboard'
|
||||
: 'Failed to copy link to clipboard',
|
||||
color: success ? 'green' : 'red',
|
||||
});
|
||||
};
|
||||
|
||||
const handleEpisodeRowClick = (episode) => {
|
||||
setExpandedEpisode(expandedEpisode === episode.id ? null : episode.id);
|
||||
};
|
||||
|
|
@ -611,20 +646,34 @@ const SeriesModal = ({ series, opened, onClose }) => {
|
|||
</Text>
|
||||
</Table.Td>
|
||||
<Table.Td>
|
||||
<ActionIcon
|
||||
variant="filled"
|
||||
color="blue"
|
||||
size="sm"
|
||||
disabled={
|
||||
providers.length > 0 && !selectedProvider
|
||||
}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
handlePlayEpisode(episode);
|
||||
}}
|
||||
>
|
||||
<Play size={12} />
|
||||
</ActionIcon>
|
||||
<Group spacing="xs">
|
||||
<ActionIcon
|
||||
variant="filled"
|
||||
color="blue"
|
||||
size="sm"
|
||||
disabled={
|
||||
providers.length > 0 &&
|
||||
!selectedProvider
|
||||
}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
handlePlayEpisode(episode);
|
||||
}}
|
||||
>
|
||||
<Play size={12} />
|
||||
</ActionIcon>
|
||||
<ActionIcon
|
||||
variant="outline"
|
||||
color="gray"
|
||||
size="sm"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
handleCopyEpisodeLink(episode);
|
||||
}}
|
||||
>
|
||||
<Copy size={12} />
|
||||
</ActionIcon>
|
||||
</Group>
|
||||
</Table.Td>
|
||||
</Table.Tr>
|
||||
{expandedEpisode === episode.id && (
|
||||
|
|
|
|||
|
|
@ -13,7 +13,9 @@ import {
|
|||
Stack,
|
||||
Modal,
|
||||
} from '@mantine/core';
|
||||
import { Play } from 'lucide-react';
|
||||
import { Play, Copy } from 'lucide-react';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
import { copyToClipboard } from '../utils';
|
||||
import useVODStore from '../store/useVODStore';
|
||||
import useVideoStore from '../store/useVideoStore';
|
||||
import useSettingsStore from '../store/settings';
|
||||
|
|
@ -232,9 +234,9 @@ const VODModal = ({ vod, opened, onClose }) => {
|
|||
}
|
||||
}, [opened]);
|
||||
|
||||
const handlePlayVOD = () => {
|
||||
const getStreamUrl = () => {
|
||||
const vodToPlay = detailedVOD || vod;
|
||||
if (!vodToPlay) return;
|
||||
if (!vodToPlay) return null;
|
||||
|
||||
let streamUrl = `/proxy/vod/movie/${vod.uuid}`;
|
||||
|
||||
|
|
@ -253,9 +255,29 @@ const VODModal = ({ vod, opened, onClose }) => {
|
|||
} else {
|
||||
streamUrl = `${window.location.origin}${streamUrl}`;
|
||||
}
|
||||
return streamUrl;
|
||||
};
|
||||
|
||||
const handlePlayVOD = () => {
|
||||
const streamUrl = getStreamUrl();
|
||||
if (!streamUrl) return;
|
||||
const vodToPlay = detailedVOD || vod;
|
||||
showVideo(streamUrl, 'vod', vodToPlay);
|
||||
};
|
||||
|
||||
const handleCopyLink = async () => {
|
||||
const streamUrl = getStreamUrl();
|
||||
if (!streamUrl) return;
|
||||
const success = await copyToClipboard(streamUrl);
|
||||
notifications.show({
|
||||
title: success ? 'Link Copied!' : 'Copy Failed',
|
||||
message: success
|
||||
? 'Stream link copied to clipboard'
|
||||
: 'Failed to copy link to clipboard',
|
||||
color: success ? 'green' : 'red',
|
||||
});
|
||||
};
|
||||
|
||||
// Helper to get embeddable YouTube URL
|
||||
const getEmbedUrl = (url) => {
|
||||
if (!url) return '';
|
||||
|
|
@ -486,6 +508,16 @@ const VODModal = ({ vod, opened, onClose }) => {
|
|||
Watch Trailer
|
||||
</Button>
|
||||
)}
|
||||
<Button
|
||||
leftSection={<Copy size={16} />}
|
||||
variant="outline"
|
||||
color="gray"
|
||||
size="sm"
|
||||
onClick={handleCopyLink}
|
||||
style={{ alignSelf: 'flex-start' }}
|
||||
>
|
||||
Copy Link
|
||||
</Button>
|
||||
</Group>
|
||||
</Stack>
|
||||
</Flex>
|
||||
|
|
|
|||
|
|
@ -1048,8 +1048,10 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
type="submit"
|
||||
variant="default"
|
||||
disabled={formik.isSubmitting}
|
||||
loading={formik.isSubmitting}
|
||||
loaderProps={{ type: 'dots' }}
|
||||
>
|
||||
Submit
|
||||
{formik.isSubmitting ? 'Saving...' : 'Submit'}
|
||||
</Button>
|
||||
</Flex>
|
||||
</form>
|
||||
|
|
|
|||
|
|
@ -55,6 +55,7 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => {
|
|||
|
||||
const streamProfiles = useStreamProfilesStore((s) => s.profiles);
|
||||
const epgs = useEPGsStore((s) => s.epgs);
|
||||
const tvgs = useEPGsStore((s) => s.tvgs);
|
||||
const fetchEPGs = useEPGsStore((s) => s.fetchEPGs);
|
||||
|
||||
const [channelGroupModelOpen, setChannelGroupModalOpen] = useState(false);
|
||||
|
|
@ -267,17 +268,28 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => {
|
|||
} else {
|
||||
// Assign the selected dummy EPG
|
||||
const selectedEpg = epgs[selectedDummyEpgId];
|
||||
if (
|
||||
selectedEpg &&
|
||||
selectedEpg.epg_data_ids &&
|
||||
selectedEpg.epg_data_ids.length > 0
|
||||
) {
|
||||
const epgDataId = selectedEpg.epg_data_ids[0];
|
||||
const associations = channelIds.map((id) => ({
|
||||
channel_id: id,
|
||||
epg_data_id: epgDataId,
|
||||
}));
|
||||
await API.batchSetEPG(associations);
|
||||
if (selectedEpg && selectedEpg.epg_data_count > 0) {
|
||||
// Convert to number for comparison since Select returns string
|
||||
const epgSourceId = parseInt(selectedDummyEpgId, 10);
|
||||
|
||||
// Check if we already have EPG data loaded in the store
|
||||
let epgData = tvgs.find((data) => data.epg_source === epgSourceId);
|
||||
|
||||
// If not in store, fetch it
|
||||
if (!epgData) {
|
||||
const epgDataList = await API.getEPGData();
|
||||
epgData = epgDataList.find(
|
||||
(data) => data.epg_source === epgSourceId
|
||||
);
|
||||
}
|
||||
|
||||
if (epgData) {
|
||||
const associations = channelIds.map((id) => ({
|
||||
channel_id: id,
|
||||
epg_data_id: epgData.id,
|
||||
}));
|
||||
await API.batchSetEPG(associations);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -911,8 +923,14 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => {
|
|||
</Stack>
|
||||
</Group>
|
||||
<Flex mih={50} gap="xs" justify="flex-end" align="flex-end">
|
||||
<Button type="submit" variant="default" disabled={isSubmitting}>
|
||||
Submit
|
||||
<Button
|
||||
type="submit"
|
||||
variant="default"
|
||||
disabled={isSubmitting}
|
||||
loading={isSubmitting}
|
||||
loaderProps={{ type: 'dots' }}
|
||||
>
|
||||
{isSubmitting ? 'Saving...' : 'Submit'}
|
||||
</Button>
|
||||
</Flex>
|
||||
</form>
|
||||
|
|
|
|||
|
|
@ -1,729 +0,0 @@
|
|||
import React, { useState, useEffect, useRef, useMemo } from 'react';
|
||||
import { useFormik } from 'formik';
|
||||
import * as Yup from 'yup';
|
||||
import useChannelsStore from '../../store/channels';
|
||||
import API from '../../api';
|
||||
import useStreamProfilesStore from '../../store/streamProfiles';
|
||||
import useStreamsStore from '../../store/streams';
|
||||
import { useChannelLogoSelection } from '../../hooks/useSmartLogos';
|
||||
import LazyLogo from '../LazyLogo';
|
||||
import ChannelGroupForm from './ChannelGroup';
|
||||
import usePlaylistsStore from '../../store/playlists';
|
||||
import logo from '../../images/logo.png';
|
||||
import {
|
||||
Box,
|
||||
Button,
|
||||
Modal,
|
||||
TextInput,
|
||||
NativeSelect,
|
||||
Text,
|
||||
Group,
|
||||
ActionIcon,
|
||||
Center,
|
||||
Grid,
|
||||
Flex,
|
||||
Select,
|
||||
Divider,
|
||||
Stack,
|
||||
useMantineTheme,
|
||||
Popover,
|
||||
ScrollArea,
|
||||
Tooltip,
|
||||
NumberInput,
|
||||
Image,
|
||||
UnstyledButton,
|
||||
} from '@mantine/core';
|
||||
import { ListOrdered, SquarePlus, SquareX, X } from 'lucide-react';
|
||||
import useEPGsStore from '../../store/epgs';
|
||||
import { Dropzone } from '@mantine/dropzone';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
import { FixedSizeList as List } from 'react-window';
|
||||
|
||||
const ChannelsForm = ({ channel = null, isOpen, onClose }) => {
|
||||
const theme = useMantineTheme();
|
||||
|
||||
const listRef = useRef(null);
|
||||
const logoListRef = useRef(null);
|
||||
const groupListRef = useRef(null);
|
||||
|
||||
const channelGroups = useChannelsStore((s) => s.channelGroups);
|
||||
const { logos, ensureLogosLoaded } = useChannelLogoSelection();
|
||||
const streams = useStreamsStore((state) => state.streams);
|
||||
const streamProfiles = useStreamProfilesStore((s) => s.profiles);
|
||||
const playlists = usePlaylistsStore((s) => s.playlists);
|
||||
const epgs = useEPGsStore((s) => s.epgs);
|
||||
const tvgs = useEPGsStore((s) => s.tvgs);
|
||||
const tvgsById = useEPGsStore((s) => s.tvgsById);
|
||||
|
||||
const [logoPreview, setLogoPreview] = useState(null);
|
||||
const [channelStreams, setChannelStreams] = useState([]);
|
||||
const [channelGroupModelOpen, setChannelGroupModalOpen] = useState(false);
|
||||
const [epgPopoverOpened, setEpgPopoverOpened] = useState(false);
|
||||
const [logoPopoverOpened, setLogoPopoverOpened] = useState(false);
|
||||
const [selectedEPG, setSelectedEPG] = useState('');
|
||||
const [tvgFilter, setTvgFilter] = useState('');
|
||||
const [logoFilter, setLogoFilter] = useState('');
|
||||
|
||||
const [groupPopoverOpened, setGroupPopoverOpened] = useState(false);
|
||||
const [groupFilter, setGroupFilter] = useState('');
|
||||
const groupOptions = Object.values(channelGroups);
|
||||
|
||||
const addStream = (stream) => {
|
||||
const streamSet = new Set(channelStreams);
|
||||
streamSet.add(stream);
|
||||
setChannelStreams(Array.from(streamSet));
|
||||
};
|
||||
|
||||
const removeStream = (stream) => {
|
||||
const streamSet = new Set(channelStreams);
|
||||
streamSet.delete(stream);
|
||||
setChannelStreams(Array.from(streamSet));
|
||||
};
|
||||
|
||||
const handleLogoChange = async (files) => {
|
||||
if (files.length === 1) {
|
||||
const file = files[0];
|
||||
|
||||
// Validate file size on frontend first
|
||||
if (file.size > 5 * 1024 * 1024) {
|
||||
// 5MB
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: 'File too large. Maximum size is 5MB.',
|
||||
color: 'red',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const retval = await API.uploadLogo(file);
|
||||
// Note: API.uploadLogo already adds the logo to the store, no need to fetch
|
||||
setLogoPreview(retval.cache_url);
|
||||
formik.setFieldValue('logo_id', retval.id);
|
||||
} catch (error) {
|
||||
console.error('Logo upload failed:', error);
|
||||
// Error notification is already handled in API.uploadLogo
|
||||
}
|
||||
} else {
|
||||
setLogoPreview(null);
|
||||
}
|
||||
};
|
||||
|
||||
const formik = useFormik({
|
||||
initialValues: {
|
||||
name: '',
|
||||
channel_number: '', // Change from 0 to empty string for consistency
|
||||
channel_group_id:
|
||||
Object.keys(channelGroups).length > 0
|
||||
? Object.keys(channelGroups)[0]
|
||||
: '',
|
||||
stream_profile_id: '0',
|
||||
tvg_id: '',
|
||||
tvc_guide_stationid: '',
|
||||
epg_data_id: '',
|
||||
logo_id: '',
|
||||
},
|
||||
validationSchema: Yup.object({
|
||||
name: Yup.string().required('Name is required'),
|
||||
channel_group_id: Yup.string().required('Channel group is required'),
|
||||
}),
|
||||
onSubmit: async (values, { setSubmitting }) => {
|
||||
let response;
|
||||
|
||||
try {
|
||||
const formattedValues = { ...values };
|
||||
|
||||
// Convert empty or "0" stream_profile_id to null for the API
|
||||
if (
|
||||
!formattedValues.stream_profile_id ||
|
||||
formattedValues.stream_profile_id === '0'
|
||||
) {
|
||||
formattedValues.stream_profile_id = null;
|
||||
}
|
||||
|
||||
// Ensure tvg_id is properly included (no empty strings)
|
||||
formattedValues.tvg_id = formattedValues.tvg_id || null;
|
||||
|
||||
// Ensure tvc_guide_stationid is properly included (no empty strings)
|
||||
formattedValues.tvc_guide_stationid =
|
||||
formattedValues.tvc_guide_stationid || null;
|
||||
|
||||
if (channel) {
|
||||
// If there's an EPG to set, use our enhanced endpoint
|
||||
if (values.epg_data_id !== (channel.epg_data_id ?? '')) {
|
||||
// Use the special endpoint to set EPG and trigger refresh
|
||||
const epgResponse = await API.setChannelEPG(
|
||||
channel.id,
|
||||
values.epg_data_id
|
||||
);
|
||||
|
||||
// Remove epg_data_id from values since we've handled it separately
|
||||
const { epg_data_id, ...otherValues } = formattedValues;
|
||||
|
||||
// Update other channel fields if needed
|
||||
if (Object.keys(otherValues).length > 0) {
|
||||
response = await API.updateChannel({
|
||||
id: channel.id,
|
||||
...otherValues,
|
||||
streams: channelStreams.map((stream) => stream.id),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// No EPG change, regular update
|
||||
response = await API.updateChannel({
|
||||
id: channel.id,
|
||||
...formattedValues,
|
||||
streams: channelStreams.map((stream) => stream.id),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// New channel creation - use the standard method
|
||||
response = await API.addChannel({
|
||||
...formattedValues,
|
||||
streams: channelStreams.map((stream) => stream.id),
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error saving channel:', error);
|
||||
}
|
||||
|
||||
formik.resetForm();
|
||||
API.requeryChannels();
|
||||
|
||||
// Refresh channel profiles to update the membership information
|
||||
useChannelsStore.getState().fetchChannelProfiles();
|
||||
|
||||
setSubmitting(false);
|
||||
setTvgFilter('');
|
||||
setLogoFilter('');
|
||||
onClose();
|
||||
},
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (channel) {
|
||||
if (channel.epg_data_id) {
|
||||
const epgSource = epgs[tvgsById[channel.epg_data_id]?.epg_source];
|
||||
setSelectedEPG(epgSource ? `${epgSource.id}` : '');
|
||||
}
|
||||
|
||||
formik.setValues({
|
||||
name: channel.name || '',
|
||||
channel_number:
|
||||
channel.channel_number !== null ? channel.channel_number : '',
|
||||
channel_group_id: channel.channel_group_id
|
||||
? `${channel.channel_group_id}`
|
||||
: '',
|
||||
stream_profile_id: channel.stream_profile_id
|
||||
? `${channel.stream_profile_id}`
|
||||
: '0',
|
||||
tvg_id: channel.tvg_id || '',
|
||||
tvc_guide_stationid: channel.tvc_guide_stationid || '',
|
||||
epg_data_id: channel.epg_data_id ?? '',
|
||||
logo_id: channel.logo_id ? `${channel.logo_id}` : '',
|
||||
});
|
||||
|
||||
setChannelStreams(channel.streams || []);
|
||||
} else {
|
||||
formik.resetForm();
|
||||
setTvgFilter('');
|
||||
setLogoFilter('');
|
||||
}
|
||||
}, [channel, tvgsById, channelGroups]);
|
||||
|
||||
// Memoize logo options to prevent infinite re-renders during background loading
|
||||
const logoOptions = useMemo(() => {
|
||||
return [{ id: '0', name: 'Default' }].concat(Object.values(logos));
|
||||
}, [logos]); // Only depend on logos object
|
||||
|
||||
const renderLogoOption = ({ option, checked }) => {
|
||||
return (
|
||||
<Center style={{ width: '100%' }}>
|
||||
<img src={logos[option.value].cache_url} width="30" />
|
||||
</Center>
|
||||
);
|
||||
};
|
||||
|
||||
// Update the handler for when channel group modal is closed
|
||||
const handleChannelGroupModalClose = (newGroup) => {
|
||||
setChannelGroupModalOpen(false);
|
||||
|
||||
// If a new group was created and returned, update the form with it
|
||||
if (newGroup && newGroup.id) {
|
||||
// Preserve all current form values while updating just the channel_group_id
|
||||
formik.setValues({
|
||||
...formik.values,
|
||||
channel_group_id: `${newGroup.id}`,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
if (!isOpen) {
|
||||
return <></>;
|
||||
}
|
||||
|
||||
const filteredTvgs = tvgs
|
||||
.filter((tvg) => tvg.epg_source == selectedEPG)
|
||||
.filter(
|
||||
(tvg) =>
|
||||
tvg.name.toLowerCase().includes(tvgFilter.toLowerCase()) ||
|
||||
tvg.tvg_id.toLowerCase().includes(tvgFilter.toLowerCase())
|
||||
);
|
||||
|
||||
const filteredLogos = logoOptions.filter((logo) =>
|
||||
logo.name.toLowerCase().includes(logoFilter.toLowerCase())
|
||||
);
|
||||
|
||||
const filteredGroups = groupOptions.filter((group) =>
|
||||
group.name.toLowerCase().includes(groupFilter.toLowerCase())
|
||||
);
|
||||
|
||||
return (
|
||||
<Modal
|
||||
opened={isOpen}
|
||||
onClose={onClose}
|
||||
size={1000}
|
||||
title={
|
||||
<Group gap="5">
|
||||
<ListOrdered size="20" />
|
||||
<Text>Channels</Text>
|
||||
</Group>
|
||||
}
|
||||
styles={{ content: { '--mantine-color-body': '#27272A' } }}
|
||||
>
|
||||
<form onSubmit={formik.handleSubmit}>
|
||||
<Group justify="space-between" align="top">
|
||||
<Stack gap="5" style={{ flex: 1 }}>
|
||||
<TextInput
|
||||
id="name"
|
||||
name="name"
|
||||
label="Channel Name"
|
||||
value={formik.values.name}
|
||||
onChange={formik.handleChange}
|
||||
error={formik.errors.name ? formik.touched.name : ''}
|
||||
size="xs"
|
||||
/>
|
||||
|
||||
<Flex gap="sm">
|
||||
<Popover
|
||||
opened={groupPopoverOpened}
|
||||
onChange={setGroupPopoverOpened}
|
||||
// position="bottom-start"
|
||||
withArrow
|
||||
>
|
||||
<Popover.Target>
|
||||
<TextInput
|
||||
id="channel_group_id"
|
||||
name="channel_group_id"
|
||||
label="Channel Group"
|
||||
readOnly
|
||||
value={
|
||||
channelGroups[formik.values.channel_group_id]
|
||||
? channelGroups[formik.values.channel_group_id].name
|
||||
: ''
|
||||
}
|
||||
onClick={() => setGroupPopoverOpened(true)}
|
||||
size="xs"
|
||||
/>
|
||||
</Popover.Target>
|
||||
|
||||
<Popover.Dropdown onMouseDown={(e) => e.stopPropagation()}>
|
||||
<Group>
|
||||
<TextInput
|
||||
placeholder="Filter"
|
||||
value={groupFilter}
|
||||
onChange={(event) =>
|
||||
setGroupFilter(event.currentTarget.value)
|
||||
}
|
||||
mb="xs"
|
||||
size="xs"
|
||||
/>
|
||||
</Group>
|
||||
|
||||
<ScrollArea style={{ height: 200 }}>
|
||||
<List
|
||||
height={200} // Set max height for visible items
|
||||
itemCount={filteredGroups.length}
|
||||
itemSize={20} // Adjust row height for each item
|
||||
width={200}
|
||||
ref={groupListRef}
|
||||
>
|
||||
{({ index, style }) => (
|
||||
<Box
|
||||
style={{ ...style, height: 20, overflow: 'hidden' }}
|
||||
>
|
||||
<Tooltip
|
||||
openDelay={500}
|
||||
label={filteredGroups[index].name}
|
||||
size="xs"
|
||||
>
|
||||
<UnstyledButton
|
||||
onClick={() => {
|
||||
formik.setFieldValue(
|
||||
'channel_group_id',
|
||||
filteredGroups[index].id
|
||||
);
|
||||
setGroupPopoverOpened(false);
|
||||
}}
|
||||
>
|
||||
<Text
|
||||
size="xs"
|
||||
style={{
|
||||
whiteSpace: 'nowrap',
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
}}
|
||||
>
|
||||
{filteredGroups[index].name}
|
||||
</Text>
|
||||
</UnstyledButton>
|
||||
</Tooltip>
|
||||
</Box>
|
||||
)}
|
||||
</List>
|
||||
</ScrollArea>
|
||||
</Popover.Dropdown>
|
||||
</Popover>
|
||||
|
||||
{/* <Select
|
||||
id="channel_group_id"
|
||||
name="channel_group_id"
|
||||
label="Channel Group"
|
||||
value={formik.values.channel_group_id}
|
||||
searchable
|
||||
onChange={(value) => {
|
||||
formik.setFieldValue('channel_group_id', value); // Update Formik's state with the new value
|
||||
}}
|
||||
error={
|
||||
formik.errors.channel_group_id
|
||||
? formik.touched.channel_group_id
|
||||
: ''
|
||||
}
|
||||
data={Object.values(channelGroups).map((option, index) => ({
|
||||
value: `${option.id}`,
|
||||
label: option.name,
|
||||
}))}
|
||||
size="xs"
|
||||
style={{ flex: 1 }}
|
||||
/> */}
|
||||
<Flex align="flex-end">
|
||||
<ActionIcon
|
||||
color={theme.tailwind.green[5]}
|
||||
onClick={() => setChannelGroupModalOpen(true)}
|
||||
title="Create new group"
|
||||
size="small"
|
||||
variant="transparent"
|
||||
style={{ marginBottom: 5 }}
|
||||
>
|
||||
<SquarePlus size="20" />
|
||||
</ActionIcon>
|
||||
</Flex>
|
||||
</Flex>
|
||||
|
||||
<Select
|
||||
id="stream_profile_id"
|
||||
label="Stream Profile"
|
||||
name="stream_profile_id"
|
||||
value={formik.values.stream_profile_id}
|
||||
onChange={(value) => {
|
||||
formik.setFieldValue('stream_profile_id', value); // Update Formik's state with the new value
|
||||
}}
|
||||
error={
|
||||
formik.errors.stream_profile_id
|
||||
? formik.touched.stream_profile_id
|
||||
: ''
|
||||
}
|
||||
data={[{ value: '0', label: '(use default)' }].concat(
|
||||
streamProfiles.map((option) => ({
|
||||
value: `${option.id}`,
|
||||
label: option.name,
|
||||
}))
|
||||
)}
|
||||
size="xs"
|
||||
/>
|
||||
</Stack>
|
||||
|
||||
<Divider size="sm" orientation="vertical" />
|
||||
|
||||
<Stack justify="flex-start" style={{ flex: 1 }}>
|
||||
<Group justify="space-between">
|
||||
<Popover
|
||||
opened={logoPopoverOpened}
|
||||
onChange={(opened) => {
|
||||
setLogoPopoverOpened(opened);
|
||||
if (opened) {
|
||||
ensureLogosLoaded();
|
||||
}
|
||||
}}
|
||||
// position="bottom-start"
|
||||
withArrow
|
||||
>
|
||||
<Popover.Target>
|
||||
<TextInput
|
||||
id="logo_id"
|
||||
name="logo_id"
|
||||
label="Logo"
|
||||
readOnly
|
||||
value={logos[formik.values.logo_id]?.name || 'Default'}
|
||||
onClick={() => setLogoPopoverOpened(true)}
|
||||
size="xs"
|
||||
/>
|
||||
</Popover.Target>
|
||||
|
||||
<Popover.Dropdown onMouseDown={(e) => e.stopPropagation()}>
|
||||
<Group>
|
||||
<TextInput
|
||||
placeholder="Filter"
|
||||
value={logoFilter}
|
||||
onChange={(event) =>
|
||||
setLogoFilter(event.currentTarget.value)
|
||||
}
|
||||
mb="xs"
|
||||
size="xs"
|
||||
/>
|
||||
</Group>
|
||||
|
||||
<ScrollArea style={{ height: 200 }}>
|
||||
<List
|
||||
height={200} // Set max height for visible items
|
||||
itemCount={filteredLogos.length}
|
||||
itemSize={20} // Adjust row height for each item
|
||||
width="100%"
|
||||
ref={logoListRef}
|
||||
>
|
||||
{({ index, style }) => (
|
||||
<div style={style}>
|
||||
<Center>
|
||||
<img
|
||||
src={filteredLogos[index].cache_url || logo}
|
||||
height="20"
|
||||
style={{ maxWidth: 80 }}
|
||||
onClick={() => {
|
||||
formik.setFieldValue(
|
||||
'logo_id',
|
||||
filteredLogos[index].id
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</Center>
|
||||
</div>
|
||||
)}
|
||||
</List>
|
||||
</ScrollArea>
|
||||
</Popover.Dropdown>
|
||||
</Popover>
|
||||
|
||||
<LazyLogo
|
||||
logoId={formik.values.logo_id}
|
||||
alt="channel logo"
|
||||
style={{ height: 40 }}
|
||||
/>
|
||||
</Group>
|
||||
|
||||
<Group>
|
||||
<Divider size="xs" style={{ flex: 1 }} />
|
||||
<Text size="xs" c="dimmed">
|
||||
OR
|
||||
</Text>
|
||||
<Divider size="xs" style={{ flex: 1 }} />
|
||||
</Group>
|
||||
|
||||
<Stack>
|
||||
<Text size="sm">Upload Logo</Text>
|
||||
<Dropzone
|
||||
onDrop={handleLogoChange}
|
||||
onReject={(files) => console.log('rejected files', files)}
|
||||
maxSize={5 * 1024 ** 2}
|
||||
>
|
||||
<Group
|
||||
justify="center"
|
||||
gap="xl"
|
||||
mih={40}
|
||||
style={{ pointerEvents: 'none' }}
|
||||
>
|
||||
<Text size="sm" inline>
|
||||
Drag images here or click to select files
|
||||
</Text>
|
||||
</Group>
|
||||
</Dropzone>
|
||||
|
||||
<Center></Center>
|
||||
</Stack>
|
||||
</Stack>
|
||||
|
||||
<Divider size="sm" orientation="vertical" />
|
||||
|
||||
<Stack gap="5" style={{ flex: 1 }} justify="flex-start">
|
||||
<NumberInput
|
||||
id="channel_number"
|
||||
name="channel_number"
|
||||
label="Channel # (blank to auto-assign)"
|
||||
value={formik.values.channel_number}
|
||||
onChange={(value) =>
|
||||
formik.setFieldValue('channel_number', value)
|
||||
}
|
||||
error={
|
||||
formik.errors.channel_number
|
||||
? formik.touched.channel_number
|
||||
: ''
|
||||
}
|
||||
size="xs"
|
||||
/>
|
||||
|
||||
<TextInput
|
||||
id="tvg_id"
|
||||
name="tvg_id"
|
||||
label="TVG-ID"
|
||||
value={formik.values.tvg_id}
|
||||
onChange={formik.handleChange}
|
||||
error={formik.errors.tvg_id ? formik.touched.tvg_id : ''}
|
||||
size="xs"
|
||||
/>
|
||||
|
||||
<TextInput
|
||||
id="tvc_guide_stationid"
|
||||
name="tvc_guide_stationid"
|
||||
label="Gracenote StationId"
|
||||
value={formik.values.tvc_guide_stationid}
|
||||
onChange={formik.handleChange}
|
||||
error={
|
||||
formik.errors.tvc_guide_stationid
|
||||
? formik.touched.tvc_guide_stationid
|
||||
: ''
|
||||
}
|
||||
size="xs"
|
||||
/>
|
||||
|
||||
<Popover
|
||||
opened={epgPopoverOpened}
|
||||
onChange={setEpgPopoverOpened}
|
||||
// position="bottom-start"
|
||||
withArrow
|
||||
>
|
||||
<Popover.Target>
|
||||
<TextInput
|
||||
id="epg_data_id"
|
||||
name="epg_data_id"
|
||||
label={
|
||||
<Group style={{ width: '100%' }}>
|
||||
<Box>EPG</Box>
|
||||
<Button
|
||||
size="xs"
|
||||
variant="transparent"
|
||||
onClick={() =>
|
||||
formik.setFieldValue('epg_data_id', null)
|
||||
}
|
||||
>
|
||||
Use Dummy
|
||||
</Button>
|
||||
</Group>
|
||||
}
|
||||
readOnly
|
||||
value={
|
||||
formik.values.epg_data_id
|
||||
? tvgsById[formik.values.epg_data_id].name
|
||||
: 'Dummy'
|
||||
}
|
||||
onClick={() => setEpgPopoverOpened(true)}
|
||||
size="xs"
|
||||
rightSection={
|
||||
<Tooltip label="Use dummy EPG">
|
||||
<ActionIcon
|
||||
// color={theme.tailwind.green[5]}
|
||||
color="white"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
formik.setFieldValue('epg_data_id', null);
|
||||
}}
|
||||
title="Create new group"
|
||||
size="small"
|
||||
variant="transparent"
|
||||
>
|
||||
<X size="20" />
|
||||
</ActionIcon>
|
||||
</Tooltip>
|
||||
}
|
||||
/>
|
||||
</Popover.Target>
|
||||
|
||||
<Popover.Dropdown onMouseDown={(e) => e.stopPropagation()}>
|
||||
<Group>
|
||||
<Select
|
||||
label="Source"
|
||||
value={selectedEPG}
|
||||
onChange={setSelectedEPG}
|
||||
data={Object.values(epgs).map((epg) => ({
|
||||
value: `${epg.id}`,
|
||||
label: epg.name,
|
||||
}))}
|
||||
size="xs"
|
||||
mb="xs"
|
||||
/>
|
||||
|
||||
{/* Filter Input */}
|
||||
<TextInput
|
||||
label="Filter"
|
||||
value={tvgFilter}
|
||||
onChange={(event) =>
|
||||
setTvgFilter(event.currentTarget.value)
|
||||
}
|
||||
mb="xs"
|
||||
size="xs"
|
||||
/>
|
||||
</Group>
|
||||
|
||||
<ScrollArea style={{ height: 200 }}>
|
||||
<List
|
||||
height={200} // Set max height for visible items
|
||||
itemCount={filteredTvgs.length}
|
||||
itemSize={40} // Adjust row height for each item
|
||||
width="100%"
|
||||
ref={listRef}
|
||||
>
|
||||
{({ index, style }) => (
|
||||
<div style={style}>
|
||||
<Button
|
||||
key={filteredTvgs[index].id}
|
||||
variant="subtle"
|
||||
color="gray"
|
||||
fullWidth
|
||||
justify="left"
|
||||
size="xs"
|
||||
onClick={() => {
|
||||
if (filteredTvgs[index].id == '0') {
|
||||
formik.setFieldValue('epg_data_id', null);
|
||||
} else {
|
||||
formik.setFieldValue(
|
||||
'epg_data_id',
|
||||
filteredTvgs[index].id
|
||||
);
|
||||
}
|
||||
setEpgPopoverOpened(false);
|
||||
}}
|
||||
>
|
||||
{filteredTvgs[index].tvg_id}
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</List>
|
||||
</ScrollArea>
|
||||
</Popover.Dropdown>
|
||||
</Popover>
|
||||
</Stack>
|
||||
</Group>
|
||||
|
||||
<Flex mih={50} gap="xs" justify="flex-end" align="flex-end">
|
||||
<Button
|
||||
type="submit"
|
||||
variant="default"
|
||||
disabled={formik.isSubmitting}
|
||||
>
|
||||
Submit
|
||||
</Button>
|
||||
</Flex>
|
||||
</form>
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
|
||||
export default ChannelsForm;
|
||||
|
|
@ -1172,19 +1172,21 @@ const LiveGroupFilter = ({
|
|||
}}
|
||||
data={[
|
||||
{ value: '0', label: 'No EPG (Disabled)' },
|
||||
...epgSources.map((source) => ({
|
||||
value: source.id.toString(),
|
||||
label: `${source.name} (${
|
||||
source.source_type === 'dummy'
|
||||
? 'Dummy'
|
||||
: source.source_type === 'xmltv'
|
||||
? 'XMLTV'
|
||||
: source.source_type ===
|
||||
'schedules_direct'
|
||||
? 'Schedules Direct'
|
||||
: source.source_type
|
||||
})`,
|
||||
})),
|
||||
...[...epgSources]
|
||||
.sort((a, b) => a.name.localeCompare(b.name))
|
||||
.map((source) => ({
|
||||
value: source.id.toString(),
|
||||
label: `${source.name} (${
|
||||
source.source_type === 'dummy'
|
||||
? 'Dummy'
|
||||
: source.source_type === 'xmltv'
|
||||
? 'XMLTV'
|
||||
: source.source_type ===
|
||||
'schedules_direct'
|
||||
? 'Schedules Direct'
|
||||
: source.source_type
|
||||
})`,
|
||||
})),
|
||||
]}
|
||||
clearable
|
||||
searchable
|
||||
|
|
|
|||
|
|
@ -1,7 +1,24 @@
|
|||
import React, { useState, useEffect } from 'react';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import useAuthStore from '../../store/auth';
|
||||
import { Paper, Title, TextInput, Button, Center, Stack } from '@mantine/core';
|
||||
import API from '../../api';
|
||||
import {
|
||||
Paper,
|
||||
Title,
|
||||
TextInput,
|
||||
Button,
|
||||
Center,
|
||||
Stack,
|
||||
Text,
|
||||
Image,
|
||||
Group,
|
||||
Divider,
|
||||
Modal,
|
||||
Anchor,
|
||||
Code,
|
||||
Checkbox,
|
||||
} from '@mantine/core';
|
||||
import logo from '../../assets/logo.png';
|
||||
|
||||
const LoginForm = () => {
|
||||
const login = useAuthStore((s) => s.login);
|
||||
|
|
@ -11,12 +28,69 @@ const LoginForm = () => {
|
|||
|
||||
const navigate = useNavigate(); // Hook to navigate to other routes
|
||||
const [formData, setFormData] = useState({ username: '', password: '' });
|
||||
const [rememberMe, setRememberMe] = useState(false);
|
||||
const [savePassword, setSavePassword] = useState(false);
|
||||
const [forgotPasswordOpened, setForgotPasswordOpened] = useState(false);
|
||||
const [version, setVersion] = useState(null);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
|
||||
// useEffect(() => {
|
||||
// if (isAuthenticated) {
|
||||
// navigate('/channels');
|
||||
// }
|
||||
// }, [isAuthenticated, navigate]);
|
||||
// Simple base64 encoding/decoding for localStorage
|
||||
// Note: This is obfuscation, not encryption. Use browser's password manager for real security.
|
||||
const encodePassword = (password) => {
|
||||
try {
|
||||
return btoa(password);
|
||||
} catch (error) {
|
||||
console.error('Encoding error:', error);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const decodePassword = (encoded) => {
|
||||
try {
|
||||
return atob(encoded);
|
||||
} catch (error) {
|
||||
console.error('Decoding error:', error);
|
||||
return '';
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
// Fetch version info
|
||||
API.getVersion().then((data) => {
|
||||
setVersion(data?.version);
|
||||
});
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
// Load saved username if it exists
|
||||
const savedUsername = localStorage.getItem(
|
||||
'dispatcharr_remembered_username'
|
||||
);
|
||||
const savedPassword = localStorage.getItem('dispatcharr_saved_password');
|
||||
|
||||
if (savedUsername) {
|
||||
setFormData((prev) => ({ ...prev, username: savedUsername }));
|
||||
setRememberMe(true);
|
||||
|
||||
if (savedPassword) {
|
||||
try {
|
||||
const decrypted = decodePassword(savedPassword);
|
||||
if (decrypted) {
|
||||
setFormData((prev) => ({ ...prev, password: decrypted }));
|
||||
setSavePassword(true);
|
||||
}
|
||||
} catch {
|
||||
// If decoding fails, just skip
|
||||
}
|
||||
}
|
||||
}
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (isAuthenticated) {
|
||||
navigate('/channels');
|
||||
}
|
||||
}, [isAuthenticated, navigate]);
|
||||
|
||||
const handleInputChange = (e) => {
|
||||
setFormData({
|
||||
|
|
@ -27,13 +101,38 @@ const LoginForm = () => {
|
|||
|
||||
const handleSubmit = async (e) => {
|
||||
e.preventDefault();
|
||||
await login(formData);
|
||||
setIsLoading(true);
|
||||
|
||||
try {
|
||||
await login(formData);
|
||||
|
||||
// Save username if remember me is checked
|
||||
if (rememberMe) {
|
||||
localStorage.setItem(
|
||||
'dispatcharr_remembered_username',
|
||||
formData.username
|
||||
);
|
||||
|
||||
// Save password if save password is checked
|
||||
if (savePassword) {
|
||||
const encoded = encodePassword(formData.password);
|
||||
if (encoded) {
|
||||
localStorage.setItem('dispatcharr_saved_password', encoded);
|
||||
}
|
||||
} else {
|
||||
localStorage.removeItem('dispatcharr_saved_password');
|
||||
}
|
||||
} else {
|
||||
localStorage.removeItem('dispatcharr_remembered_username');
|
||||
localStorage.removeItem('dispatcharr_saved_password');
|
||||
}
|
||||
|
||||
await initData();
|
||||
navigate('/channels');
|
||||
// Navigation will happen automatically via the useEffect or route protection
|
||||
} catch (e) {
|
||||
console.log(`Failed to login: ${e}`);
|
||||
await logout();
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -45,11 +144,29 @@ const LoginForm = () => {
|
|||
>
|
||||
<Paper
|
||||
elevation={3}
|
||||
style={{ padding: 30, width: '100%', maxWidth: 400 }}
|
||||
style={{
|
||||
padding: 30,
|
||||
width: '100%',
|
||||
maxWidth: 500,
|
||||
position: 'relative',
|
||||
}}
|
||||
>
|
||||
<Title order={4} align="center">
|
||||
Login
|
||||
</Title>
|
||||
<Stack align="center" spacing="lg">
|
||||
<Image
|
||||
src={logo}
|
||||
alt="Dispatcharr Logo"
|
||||
width={120}
|
||||
height={120}
|
||||
fit="contain"
|
||||
/>
|
||||
<Title order={2} align="center">
|
||||
Dispatcharr
|
||||
</Title>
|
||||
<Text size="sm" color="dimmed" align="center">
|
||||
Welcome back! Please log in to continue.
|
||||
</Text>
|
||||
<Divider style={{ width: '100%' }} />
|
||||
</Stack>
|
||||
<form onSubmit={handleSubmit}>
|
||||
<Stack>
|
||||
<TextInput
|
||||
|
|
@ -69,12 +186,124 @@ const LoginForm = () => {
|
|||
// required
|
||||
/>
|
||||
|
||||
<Button type="submit" mt="sm">
|
||||
Login
|
||||
<Group justify="space-between" align="center">
|
||||
<Group align="center" spacing="xs">
|
||||
<Checkbox
|
||||
label="Remember me"
|
||||
checked={rememberMe}
|
||||
onChange={(e) => setRememberMe(e.currentTarget.checked)}
|
||||
size="sm"
|
||||
/>
|
||||
{rememberMe && (
|
||||
<Checkbox
|
||||
label="Save password"
|
||||
checked={savePassword}
|
||||
onChange={(e) => setSavePassword(e.currentTarget.checked)}
|
||||
size="sm"
|
||||
/>
|
||||
)}
|
||||
</Group>
|
||||
<Anchor
|
||||
size="sm"
|
||||
component="button"
|
||||
type="button"
|
||||
onClick={(e) => {
|
||||
e.preventDefault();
|
||||
setForgotPasswordOpened(true);
|
||||
}}
|
||||
>
|
||||
Forgot password?
|
||||
</Anchor>
|
||||
</Group>
|
||||
|
||||
<div
|
||||
style={{
|
||||
position: 'relative',
|
||||
height: '0',
|
||||
overflow: 'visible',
|
||||
marginBottom: '-4px',
|
||||
}}
|
||||
>
|
||||
{savePassword && (
|
||||
<Text
|
||||
size="xs"
|
||||
color="red"
|
||||
style={{
|
||||
marginTop: '-10px',
|
||||
marginBottom: '0',
|
||||
lineHeight: '1.2',
|
||||
}}
|
||||
>
|
||||
⚠ Password will be stored locally without encryption. Only
|
||||
use on trusted devices.
|
||||
</Text>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<Button
|
||||
type="submit"
|
||||
fullWidth
|
||||
loading={isLoading}
|
||||
disabled={isLoading}
|
||||
loaderProps={{ type: 'dots' }}
|
||||
>
|
||||
{isLoading ? 'Logging you in...' : 'Login'}
|
||||
</Button>
|
||||
</Stack>
|
||||
</form>
|
||||
|
||||
{version && (
|
||||
<Text
|
||||
size="xs"
|
||||
color="dimmed"
|
||||
style={{
|
||||
position: 'absolute',
|
||||
bottom: 6,
|
||||
right: 30,
|
||||
}}
|
||||
>
|
||||
v{version}
|
||||
</Text>
|
||||
)}
|
||||
</Paper>
|
||||
|
||||
<Modal
|
||||
opened={forgotPasswordOpened}
|
||||
onClose={() => setForgotPasswordOpened(false)}
|
||||
title="Reset Your Password"
|
||||
centered
|
||||
>
|
||||
<Stack spacing="md">
|
||||
<Text>
|
||||
To reset your password, your administrator needs to run a Django
|
||||
management command:
|
||||
</Text>
|
||||
<div>
|
||||
<Text weight={500} size="sm" mb={8}>
|
||||
If running with Docker:
|
||||
</Text>
|
||||
<Code block>
|
||||
docker exec <container_name> python manage.py changepassword
|
||||
<username>
|
||||
</Code>
|
||||
</div>
|
||||
<div>
|
||||
<Text weight={500} size="sm" mb={8}>
|
||||
If running locally:
|
||||
</Text>
|
||||
<Code block>python manage.py changepassword <username></Code>
|
||||
</div>
|
||||
<Text size="sm" color="dimmed">
|
||||
The command will prompt for a new password. Replace
|
||||
<code><container_name></code> with your Docker container name
|
||||
and <code><username></code> with the account username.
|
||||
</Text>
|
||||
<Text size="sm" color="dimmed" italic>
|
||||
Please contact your system administrator to perform a password
|
||||
reset.
|
||||
</Text>
|
||||
</Stack>
|
||||
</Modal>
|
||||
</Center>
|
||||
);
|
||||
};
|
||||
|
|
|
|||
|
|
@ -181,8 +181,6 @@ const EPGsTable = () => {
|
|||
);
|
||||
};
|
||||
|
||||
console.log(epgs);
|
||||
|
||||
const columns = useMemo(
|
||||
//column definitions...
|
||||
() => [
|
||||
|
|
|
|||
|
|
@ -115,6 +115,7 @@ const LogosTable = () => {
|
|||
pageSize: pageSize,
|
||||
});
|
||||
const [paginationString, setPaginationString] = useState('');
|
||||
const tableRef = React.useRef(null);
|
||||
|
||||
// Debounce the name filter
|
||||
useEffect(() => {
|
||||
|
|
@ -162,6 +163,14 @@ const LogosTable = () => {
|
|||
/**
|
||||
* Functions
|
||||
*/
|
||||
const clearSelections = useCallback(() => {
|
||||
setSelectedRows(new Set());
|
||||
// Clear table's internal selection state if table is initialized
|
||||
if (tableRef.current?.setSelectedTableIds) {
|
||||
tableRef.current.setSelectedTableIds([]);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const executeDeleteLogo = useCallback(
|
||||
async (id, deleteFile = false) => {
|
||||
setIsLoading(true);
|
||||
|
|
@ -185,10 +194,10 @@ const LogosTable = () => {
|
|||
setDeleteTarget(null);
|
||||
setLogoToDelete(null);
|
||||
setIsBulkDelete(false);
|
||||
setSelectedRows(new Set()); // Clear selections
|
||||
clearSelections(); // Clear selections
|
||||
}
|
||||
},
|
||||
[fetchAllLogos]
|
||||
[fetchAllLogos, clearSelections]
|
||||
);
|
||||
|
||||
const executeBulkDelete = useCallback(
|
||||
|
|
@ -215,10 +224,10 @@ const LogosTable = () => {
|
|||
setIsLoading(false);
|
||||
setConfirmDeleteOpen(false);
|
||||
setIsBulkDelete(false);
|
||||
setSelectedRows(new Set()); // Clear selections
|
||||
clearSelections(); // Clear selections
|
||||
}
|
||||
},
|
||||
[selectedRows, fetchAllLogos]
|
||||
[selectedRows, fetchAllLogos, clearSelections]
|
||||
);
|
||||
|
||||
const executeCleanupUnused = useCallback(
|
||||
|
|
@ -226,7 +235,6 @@ const LogosTable = () => {
|
|||
setIsCleaningUp(true);
|
||||
try {
|
||||
const result = await API.cleanupUnusedLogos(deleteFiles);
|
||||
await fetchAllLogos(); // Refresh all logos to maintain full view
|
||||
|
||||
let message = `Successfully deleted ${result.deleted_count} unused logos`;
|
||||
if (result.local_files_deleted > 0) {
|
||||
|
|
@ -238,6 +246,9 @@ const LogosTable = () => {
|
|||
message: message,
|
||||
color: 'green',
|
||||
});
|
||||
|
||||
// Force refresh all logos after cleanup to maintain full view
|
||||
await fetchAllLogos(true);
|
||||
} catch (error) {
|
||||
notifications.show({
|
||||
title: 'Cleanup Failed',
|
||||
|
|
@ -247,10 +258,10 @@ const LogosTable = () => {
|
|||
} finally {
|
||||
setIsCleaningUp(false);
|
||||
setConfirmCleanupOpen(false);
|
||||
setSelectedRows(new Set()); // Clear selections after cleanup
|
||||
clearSelections(); // Clear selections after cleanup
|
||||
}
|
||||
},
|
||||
[fetchAllLogos]
|
||||
[fetchAllLogos, clearSelections]
|
||||
);
|
||||
|
||||
const editLogo = useCallback(async (logo = null) => {
|
||||
|
|
@ -287,10 +298,10 @@ const LogosTable = () => {
|
|||
if (checked) {
|
||||
setSelectedRows(new Set(data.map((logo) => logo.id)));
|
||||
} else {
|
||||
setSelectedRows(new Set());
|
||||
clearSelections();
|
||||
}
|
||||
},
|
||||
[data]
|
||||
[data, clearSelections]
|
||||
);
|
||||
|
||||
const deleteBulkLogos = useCallback(() => {
|
||||
|
|
@ -308,8 +319,8 @@ const LogosTable = () => {
|
|||
|
||||
// Clear selections when logos data changes (e.g., after filtering)
|
||||
useEffect(() => {
|
||||
setSelectedRows(new Set());
|
||||
}, [data.length]);
|
||||
clearSelections();
|
||||
}, [data.length, clearSelections]);
|
||||
|
||||
// Update pagination when pageSize changes
|
||||
useEffect(() => {
|
||||
|
|
@ -614,6 +625,11 @@ const LogosTable = () => {
|
|||
},
|
||||
});
|
||||
|
||||
// Store table reference for clearing selections
|
||||
React.useEffect(() => {
|
||||
tableRef.current = table;
|
||||
}, [table]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<Box
|
||||
|
|
@ -626,25 +642,6 @@ const LogosTable = () => {
|
|||
}}
|
||||
>
|
||||
<Stack gap="md" style={{ maxWidth: '1200px', width: '100%' }}>
|
||||
<Flex style={{ alignItems: 'center', paddingBottom: 10 }} gap={15}>
|
||||
<Text
|
||||
style={{
|
||||
fontFamily: 'Inter, sans-serif',
|
||||
fontWeight: 500,
|
||||
fontSize: '20px',
|
||||
lineHeight: 1,
|
||||
letterSpacing: '-0.3px',
|
||||
color: 'gray.6',
|
||||
marginBottom: 0,
|
||||
}}
|
||||
>
|
||||
Logos
|
||||
</Text>
|
||||
<Text size="sm" c="dimmed">
|
||||
({data.length} logo{data.length !== 1 ? 's' : ''})
|
||||
</Text>
|
||||
</Flex>
|
||||
|
||||
<Paper
|
||||
style={{
|
||||
backgroundColor: '#27272A',
|
||||
|
|
|
|||
659
frontend/src/components/tables/VODLogosTable.jsx
Normal file
659
frontend/src/components/tables/VODLogosTable.jsx
Normal file
|
|
@ -0,0 +1,659 @@
|
|||
import React, { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import {
|
||||
ActionIcon,
|
||||
Badge,
|
||||
Box,
|
||||
Button,
|
||||
Center,
|
||||
Checkbox,
|
||||
Flex,
|
||||
Group,
|
||||
Image,
|
||||
LoadingOverlay,
|
||||
NativeSelect,
|
||||
Pagination,
|
||||
Paper,
|
||||
Select,
|
||||
Stack,
|
||||
Text,
|
||||
TextInput,
|
||||
Tooltip,
|
||||
useMantineTheme,
|
||||
} from '@mantine/core';
|
||||
import { ExternalLink, Search, Trash2, Trash, SquareMinus } from 'lucide-react';
|
||||
import useVODLogosStore from '../../store/vodLogos';
|
||||
import useLocalStorage from '../../hooks/useLocalStorage';
|
||||
import { CustomTable, useTable } from './CustomTable';
|
||||
import ConfirmationDialog from '../ConfirmationDialog';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
|
||||
const VODLogoRowActions = ({ theme, row, deleteLogo }) => {
|
||||
const [tableSize] = useLocalStorage('table-size', 'default');
|
||||
|
||||
const onDelete = useCallback(() => {
|
||||
deleteLogo(row.original.id);
|
||||
}, [row.original.id, deleteLogo]);
|
||||
|
||||
const iconSize =
|
||||
tableSize === 'default' ? 'sm' : tableSize === 'compact' ? 'xs' : 'md';
|
||||
|
||||
return (
|
||||
<Box style={{ width: '100%', justifyContent: 'left' }}>
|
||||
<Group gap={2} justify="center">
|
||||
<ActionIcon
|
||||
size={iconSize}
|
||||
variant="transparent"
|
||||
color={theme.tailwind.red[6]}
|
||||
onClick={onDelete}
|
||||
>
|
||||
<SquareMinus size="18" />
|
||||
</ActionIcon>
|
||||
</Group>
|
||||
</Box>
|
||||
);
|
||||
};
|
||||
|
||||
export default function VODLogosTable() {
|
||||
const theme = useMantineTheme();
|
||||
|
||||
const {
|
||||
logos,
|
||||
totalCount,
|
||||
isLoading,
|
||||
fetchVODLogos,
|
||||
deleteVODLogo,
|
||||
deleteVODLogos,
|
||||
cleanupUnusedVODLogos,
|
||||
} = useVODLogosStore();
|
||||
|
||||
const [currentPage, setCurrentPage] = useState(1);
|
||||
const [pageSize, setPageSize] = useState(25);
|
||||
const [nameFilter, setNameFilter] = useState('');
|
||||
const [usageFilter, setUsageFilter] = useState('all');
|
||||
const [selectedRows, setSelectedRows] = useState(new Set());
|
||||
const [confirmDeleteOpen, setConfirmDeleteOpen] = useState(false);
|
||||
const [deleteTarget, setDeleteTarget] = useState(null);
|
||||
const [confirmCleanupOpen, setConfirmCleanupOpen] = useState(false);
|
||||
const [paginationString, setPaginationString] = useState('');
|
||||
const [isCleaningUp, setIsCleaningUp] = useState(false);
|
||||
const tableRef = React.useRef(null);
|
||||
|
||||
// Calculate unused logos count
|
||||
const unusedLogosCount = useMemo(() => {
|
||||
return logos.filter(
|
||||
(logo) => logo.movie_count === 0 && logo.series_count === 0
|
||||
).length;
|
||||
}, [logos]);
|
||||
useEffect(() => {
|
||||
fetchVODLogos({
|
||||
page: currentPage,
|
||||
page_size: pageSize,
|
||||
name: nameFilter,
|
||||
usage: usageFilter === 'all' ? undefined : usageFilter,
|
||||
});
|
||||
}, [currentPage, pageSize, nameFilter, usageFilter, fetchVODLogos]);
|
||||
|
||||
const handleSelectAll = useCallback(
|
||||
(checked) => {
|
||||
if (checked) {
|
||||
setSelectedRows(new Set(logos.map((logo) => logo.id)));
|
||||
} else {
|
||||
setSelectedRows(new Set());
|
||||
}
|
||||
},
|
||||
[logos]
|
||||
);
|
||||
|
||||
const handleSelectRow = useCallback((id, checked) => {
|
||||
setSelectedRows((prev) => {
|
||||
const newSet = new Set(prev);
|
||||
if (checked) {
|
||||
newSet.add(id);
|
||||
} else {
|
||||
newSet.delete(id);
|
||||
}
|
||||
return newSet;
|
||||
});
|
||||
}, []);
|
||||
|
||||
const deleteLogo = useCallback((id) => {
|
||||
setDeleteTarget([id]);
|
||||
setConfirmDeleteOpen(true);
|
||||
}, []);
|
||||
|
||||
const handleDeleteSelected = useCallback(() => {
|
||||
setDeleteTarget(Array.from(selectedRows));
|
||||
setConfirmDeleteOpen(true);
|
||||
}, [selectedRows]);
|
||||
|
||||
const onRowSelectionChange = useCallback((newSelection) => {
|
||||
setSelectedRows(new Set(newSelection));
|
||||
}, []);
|
||||
|
||||
const clearSelections = useCallback(() => {
|
||||
setSelectedRows(new Set());
|
||||
// Clear table's internal selection state if table is initialized
|
||||
if (tableRef.current?.setSelectedTableIds) {
|
||||
tableRef.current.setSelectedTableIds([]);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const handleConfirmDelete = async () => {
|
||||
try {
|
||||
if (deleteTarget.length === 1) {
|
||||
await deleteVODLogo(deleteTarget[0]);
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: 'VOD logo deleted successfully',
|
||||
color: 'green',
|
||||
});
|
||||
} else {
|
||||
await deleteVODLogos(deleteTarget);
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: `${deleteTarget.length} VOD logos deleted successfully`,
|
||||
color: 'green',
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: error.message || 'Failed to delete VOD logos',
|
||||
color: 'red',
|
||||
});
|
||||
} finally {
|
||||
// Always clear selections and close dialog, even on error
|
||||
clearSelections();
|
||||
setConfirmDeleteOpen(false);
|
||||
setDeleteTarget(null);
|
||||
}
|
||||
};
|
||||
|
||||
const handleCleanupUnused = useCallback(() => {
|
||||
setConfirmCleanupOpen(true);
|
||||
}, []);
|
||||
|
||||
const handleConfirmCleanup = async () => {
|
||||
setIsCleaningUp(true);
|
||||
try {
|
||||
const result = await cleanupUnusedVODLogos();
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: `Cleaned up ${result.deleted_count} unused VOD logos`,
|
||||
color: 'green',
|
||||
});
|
||||
} catch (error) {
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: error.message || 'Failed to cleanup unused VOD logos',
|
||||
color: 'red',
|
||||
});
|
||||
} finally {
|
||||
setIsCleaningUp(false);
|
||||
setConfirmCleanupOpen(false);
|
||||
clearSelections(); // Clear selections after cleanup
|
||||
}
|
||||
};
|
||||
|
||||
// Clear selections only when filters change (not on every data fetch)
|
||||
useEffect(() => {
|
||||
clearSelections();
|
||||
}, [nameFilter, usageFilter, clearSelections]);
|
||||
|
||||
useEffect(() => {
|
||||
const startItem = (currentPage - 1) * pageSize + 1;
|
||||
const endItem = Math.min(currentPage * pageSize, totalCount);
|
||||
setPaginationString(`${startItem} to ${endItem} of ${totalCount}`);
|
||||
}, [currentPage, pageSize, totalCount]);
|
||||
|
||||
const pageCount = useMemo(() => {
|
||||
return Math.ceil(totalCount / pageSize);
|
||||
}, [totalCount, pageSize]);
|
||||
|
||||
const columns = useMemo(
|
||||
() => [
|
||||
{
|
||||
id: 'select',
|
||||
header: () => (
|
||||
<Checkbox
|
||||
checked={
|
||||
selectedRows.size > 0 && selectedRows.size === logos.length
|
||||
}
|
||||
indeterminate={
|
||||
selectedRows.size > 0 && selectedRows.size < logos.length
|
||||
}
|
||||
onChange={(event) => handleSelectAll(event.currentTarget.checked)}
|
||||
size="sm"
|
||||
/>
|
||||
),
|
||||
cell: ({ row }) => (
|
||||
<Checkbox
|
||||
checked={selectedRows.has(row.original.id)}
|
||||
onChange={(event) =>
|
||||
handleSelectRow(row.original.id, event.currentTarget.checked)
|
||||
}
|
||||
size="sm"
|
||||
/>
|
||||
),
|
||||
size: 50,
|
||||
enableSorting: false,
|
||||
},
|
||||
{
|
||||
header: 'Preview',
|
||||
accessorKey: 'cache_url',
|
||||
size: 80,
|
||||
enableSorting: false,
|
||||
cell: ({ getValue, row }) => (
|
||||
<Center style={{ width: '100%', padding: '4px' }}>
|
||||
<Image
|
||||
src={getValue()}
|
||||
alt={row.original.name}
|
||||
width={40}
|
||||
height={30}
|
||||
fit="contain"
|
||||
fallbackSrc="/logo.png"
|
||||
style={{
|
||||
transition: 'transform 0.3s ease',
|
||||
cursor: 'pointer',
|
||||
}}
|
||||
onMouseEnter={(e) => {
|
||||
e.target.style.transform = 'scale(1.5)';
|
||||
}}
|
||||
onMouseLeave={(e) => {
|
||||
e.target.style.transform = 'scale(1)';
|
||||
}}
|
||||
/>
|
||||
</Center>
|
||||
),
|
||||
},
|
||||
{
|
||||
header: 'Name',
|
||||
accessorKey: 'name',
|
||||
size: 250,
|
||||
cell: ({ getValue }) => (
|
||||
<Text fw={500} size="sm">
|
||||
{getValue()}
|
||||
</Text>
|
||||
),
|
||||
},
|
||||
{
|
||||
header: 'Usage',
|
||||
accessorKey: 'usage',
|
||||
size: 120,
|
||||
cell: ({ row }) => {
|
||||
const { movie_count, series_count, item_names } = row.original;
|
||||
const totalUsage = movie_count + series_count;
|
||||
|
||||
if (totalUsage === 0) {
|
||||
return (
|
||||
<Badge size="sm" variant="light" color="gray">
|
||||
Unused
|
||||
</Badge>
|
||||
);
|
||||
}
|
||||
|
||||
// Build usage description
|
||||
const usageParts = [];
|
||||
if (movie_count > 0) {
|
||||
usageParts.push(
|
||||
`${movie_count} movie${movie_count !== 1 ? 's' : ''}`
|
||||
);
|
||||
}
|
||||
if (series_count > 0) {
|
||||
usageParts.push(`${series_count} series`);
|
||||
}
|
||||
|
||||
const label =
|
||||
usageParts.length === 1
|
||||
? usageParts[0]
|
||||
: `${totalUsage} item${totalUsage !== 1 ? 's' : ''}`;
|
||||
|
||||
return (
|
||||
<Tooltip
|
||||
label={
|
||||
<div>
|
||||
<Text size="xs" fw={600}>
|
||||
Used by {usageParts.join(' & ')}:
|
||||
</Text>
|
||||
{item_names &&
|
||||
item_names.map((name, index) => (
|
||||
<Text key={index} size="xs">
|
||||
• {name}
|
||||
</Text>
|
||||
))}
|
||||
</div>
|
||||
}
|
||||
multiline
|
||||
width={220}
|
||||
>
|
||||
<Badge size="sm" variant="light" color="blue">
|
||||
{label}
|
||||
</Badge>
|
||||
</Tooltip>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
header: 'URL',
|
||||
accessorKey: 'url',
|
||||
grow: true,
|
||||
cell: ({ getValue }) => (
|
||||
<Group gap={4} style={{ alignItems: 'center' }}>
|
||||
<Box
|
||||
style={{
|
||||
whiteSpace: 'nowrap',
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
maxWidth: 300,
|
||||
}}
|
||||
>
|
||||
<Text size="sm" c="dimmed">
|
||||
{getValue()}
|
||||
</Text>
|
||||
</Box>
|
||||
{getValue()?.startsWith('http') && (
|
||||
<ActionIcon
|
||||
size="xs"
|
||||
variant="transparent"
|
||||
color="gray"
|
||||
onClick={() => window.open(getValue(), '_blank')}
|
||||
>
|
||||
<ExternalLink size={12} />
|
||||
</ActionIcon>
|
||||
)}
|
||||
</Group>
|
||||
),
|
||||
},
|
||||
{
|
||||
id: 'actions',
|
||||
size: 80,
|
||||
header: 'Actions',
|
||||
enableSorting: false,
|
||||
cell: ({ row }) => (
|
||||
<VODLogoRowActions theme={theme} row={row} deleteLogo={deleteLogo} />
|
||||
),
|
||||
},
|
||||
],
|
||||
[theme, deleteLogo, selectedRows, handleSelectAll, handleSelectRow, logos]
|
||||
);
|
||||
|
||||
const renderHeaderCell = (header) => {
|
||||
return (
|
||||
<Text size="sm" name={header.id}>
|
||||
{header.column.columnDef.header}
|
||||
</Text>
|
||||
);
|
||||
};
|
||||
|
||||
const table = useTable({
|
||||
data: logos,
|
||||
columns,
|
||||
manualPagination: true,
|
||||
pageCount: pageCount,
|
||||
allRowIds: logos.map((logo) => logo.id),
|
||||
enablePagination: false,
|
||||
enableRowSelection: true,
|
||||
enableRowVirtualization: false,
|
||||
renderTopToolbar: false,
|
||||
manualSorting: false,
|
||||
manualFiltering: false,
|
||||
onRowSelectionChange: onRowSelectionChange,
|
||||
headerCellRenderFns: {
|
||||
actions: renderHeaderCell,
|
||||
cache_url: renderHeaderCell,
|
||||
name: renderHeaderCell,
|
||||
url: renderHeaderCell,
|
||||
usage: renderHeaderCell,
|
||||
},
|
||||
});
|
||||
|
||||
// Store table reference for clearing selections
|
||||
React.useEffect(() => {
|
||||
tableRef.current = table;
|
||||
}, [table]);
|
||||
|
||||
// Helper to get single logo when confirming single-delete
|
||||
const logoToDelete =
|
||||
deleteTarget && deleteTarget.length === 1
|
||||
? logos.find((l) => l.id === deleteTarget[0])
|
||||
: null;
|
||||
return (
|
||||
<Box
|
||||
style={{
|
||||
display: 'flex',
|
||||
justifyContent: 'center',
|
||||
padding: '0px',
|
||||
minHeight: 'calc(100vh - 200px)',
|
||||
minWidth: '900px',
|
||||
}}
|
||||
>
|
||||
<Stack gap="md" style={{ maxWidth: '1200px', width: '100%' }}>
|
||||
<Paper
|
||||
style={{
|
||||
backgroundColor: '#27272A',
|
||||
border: '1px solid #3f3f46',
|
||||
borderRadius: 'var(--mantine-radius-md)',
|
||||
}}
|
||||
>
|
||||
{/* Top toolbar */}
|
||||
<Box
|
||||
style={{
|
||||
display: 'flex',
|
||||
justifyContent: 'space-between',
|
||||
alignItems: 'center',
|
||||
padding: '16px',
|
||||
borderBottom: '1px solid #3f3f46',
|
||||
}}
|
||||
>
|
||||
<Group gap="sm">
|
||||
<TextInput
|
||||
placeholder="Filter by name..."
|
||||
value={nameFilter}
|
||||
onChange={(event) => {
|
||||
const value = event.target.value;
|
||||
setNameFilter(value);
|
||||
}}
|
||||
size="xs"
|
||||
style={{ width: 200 }}
|
||||
/>
|
||||
<Select
|
||||
placeholder="All"
|
||||
value={usageFilter}
|
||||
onChange={(value) => setUsageFilter(value)}
|
||||
data={[
|
||||
{ value: 'all', label: 'All logos' },
|
||||
{ value: 'used', label: 'Used only' },
|
||||
{ value: 'unused', label: 'Unused only' },
|
||||
{ value: 'movies', label: 'Movies logos' },
|
||||
{ value: 'series', label: 'Series logos' },
|
||||
]}
|
||||
size="xs"
|
||||
style={{ width: 120 }}
|
||||
/>
|
||||
</Group>
|
||||
|
||||
<Group gap="sm">
|
||||
<Button
|
||||
leftSection={<Trash size={16} />}
|
||||
variant="light"
|
||||
size="xs"
|
||||
color="orange"
|
||||
onClick={handleCleanupUnused}
|
||||
loading={isCleaningUp}
|
||||
disabled={unusedLogosCount === 0}
|
||||
>
|
||||
Cleanup Unused{' '}
|
||||
{unusedLogosCount > 0 ? `(${unusedLogosCount})` : ''}
|
||||
</Button>
|
||||
|
||||
<Button
|
||||
leftSection={<SquareMinus size={18} />}
|
||||
variant="default"
|
||||
size="xs"
|
||||
onClick={handleDeleteSelected}
|
||||
disabled={selectedRows.size === 0}
|
||||
>
|
||||
Delete {selectedRows.size > 0 ? `(${selectedRows.size})` : ''}
|
||||
</Button>
|
||||
</Group>
|
||||
</Box>
|
||||
|
||||
{/* Table container */}
|
||||
<Box
|
||||
style={{
|
||||
position: 'relative',
|
||||
borderRadius:
|
||||
'0 0 var(--mantine-radius-md) var(--mantine-radius-md)',
|
||||
}}
|
||||
>
|
||||
<Box
|
||||
style={{
|
||||
overflow: 'auto',
|
||||
height: 'calc(100vh - 200px)',
|
||||
}}
|
||||
>
|
||||
<div>
|
||||
<LoadingOverlay visible={isLoading} />
|
||||
<CustomTable table={table} />
|
||||
</div>
|
||||
</Box>
|
||||
|
||||
{/* Pagination Controls */}
|
||||
<Box
|
||||
style={{
|
||||
position: 'sticky',
|
||||
bottom: 0,
|
||||
zIndex: 3,
|
||||
backgroundColor: '#27272A',
|
||||
borderTop: '1px solid #3f3f46',
|
||||
}}
|
||||
>
|
||||
<Group
|
||||
gap={5}
|
||||
justify="center"
|
||||
style={{
|
||||
padding: 8,
|
||||
}}
|
||||
>
|
||||
<Text size="xs">Page Size</Text>
|
||||
<NativeSelect
|
||||
size="xxs"
|
||||
value={String(pageSize)}
|
||||
data={['25', '50', '100', '250']}
|
||||
onChange={(event) => {
|
||||
setPageSize(Number(event.target.value));
|
||||
setCurrentPage(1);
|
||||
}}
|
||||
style={{ paddingRight: 20 }}
|
||||
/>
|
||||
<Pagination
|
||||
total={pageCount}
|
||||
value={currentPage}
|
||||
onChange={setCurrentPage}
|
||||
size="xs"
|
||||
withEdges
|
||||
style={{ paddingRight: 20 }}
|
||||
/>
|
||||
<Text size="xs">{paginationString}</Text>
|
||||
</Group>
|
||||
</Box>
|
||||
</Box>
|
||||
</Paper>
|
||||
</Stack>
|
||||
|
||||
<ConfirmationDialog
|
||||
opened={confirmDeleteOpen}
|
||||
onClose={() => {
|
||||
setConfirmDeleteOpen(false);
|
||||
setDeleteTarget(null);
|
||||
}}
|
||||
onConfirm={(deleteFiles) => {
|
||||
// pass deleteFiles option through
|
||||
handleConfirmDelete(deleteFiles);
|
||||
}}
|
||||
title={
|
||||
deleteTarget && deleteTarget.length > 1
|
||||
? 'Delete Multiple Logos'
|
||||
: 'Delete Logo'
|
||||
}
|
||||
message={
|
||||
deleteTarget && deleteTarget.length > 1 ? (
|
||||
<div>
|
||||
Are you sure you want to delete {deleteTarget.length} selected
|
||||
logos?
|
||||
<Text size="sm" c="dimmed" mt="xs">
|
||||
Any movies or series using these logos will have their logo
|
||||
removed.
|
||||
</Text>
|
||||
<Text size="sm" c="dimmed" mt="xs">
|
||||
This action cannot be undone.
|
||||
</Text>
|
||||
</div>
|
||||
) : logoToDelete ? (
|
||||
<div>
|
||||
Are you sure you want to delete the logo "{logoToDelete.name}"?
|
||||
{logoToDelete.movie_count + logoToDelete.series_count > 0 && (
|
||||
<Text size="sm" c="orange" mt="xs">
|
||||
This logo is currently used by{' '}
|
||||
{logoToDelete.movie_count + logoToDelete.series_count} item
|
||||
{logoToDelete.movie_count + logoToDelete.series_count !== 1
|
||||
? 's'
|
||||
: ''}
|
||||
. They will have their logo removed.
|
||||
</Text>
|
||||
)}
|
||||
<Text size="sm" c="dimmed" mt="xs">
|
||||
This action cannot be undone.
|
||||
</Text>
|
||||
</div>
|
||||
) : (
|
||||
'Are you sure you want to delete this logo?'
|
||||
)
|
||||
}
|
||||
confirmLabel="Delete"
|
||||
cancelLabel="Cancel"
|
||||
size="md"
|
||||
showDeleteFileOption={
|
||||
deleteTarget && deleteTarget.length > 1
|
||||
? Array.from(deleteTarget).some((id) => {
|
||||
const logo = logos.find((l) => l.id === id);
|
||||
return logo && logo.url && logo.url.startsWith('/data/logos');
|
||||
})
|
||||
: logoToDelete &&
|
||||
logoToDelete.url &&
|
||||
logoToDelete.url.startsWith('/data/logos')
|
||||
}
|
||||
deleteFileLabel={
|
||||
deleteTarget && deleteTarget.length > 1
|
||||
? 'Also delete local logo files from disk'
|
||||
: 'Also delete logo file from disk'
|
||||
}
|
||||
/>
|
||||
|
||||
<ConfirmationDialog
|
||||
opened={confirmCleanupOpen}
|
||||
onClose={() => setConfirmCleanupOpen(false)}
|
||||
onConfirm={handleConfirmCleanup}
|
||||
title="Cleanup Unused Logos"
|
||||
message={
|
||||
<div>
|
||||
Are you sure you want to cleanup {unusedLogosCount} unused logo
|
||||
{unusedLogosCount !== 1 ? 's' : ''}?
|
||||
<Text size="sm" c="dimmed" mt="xs">
|
||||
This will permanently delete all logos that are not currently used
|
||||
by any series or movies.
|
||||
</Text>
|
||||
<Text size="sm" c="dimmed" mt="xs">
|
||||
This action cannot be undone.
|
||||
</Text>
|
||||
</div>
|
||||
}
|
||||
confirmLabel="Cleanup"
|
||||
cancelLabel="Cancel"
|
||||
size="md"
|
||||
showDeleteFileOption={true}
|
||||
deleteFileLabel="Also delete local logo files from disk"
|
||||
/>
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
|
|
@ -38,8 +38,7 @@ export const useLogoSelection = () => {
|
|||
};
|
||||
|
||||
/**
|
||||
* Hook for channel forms that need only channel-assignable logos
|
||||
* (unused + channel-used, excluding VOD-only logos)
|
||||
* Hook for channel forms that need channel logos
|
||||
*/
|
||||
export const useChannelLogoSelection = () => {
|
||||
const [isInitialized, setIsInitialized] = useState(false);
|
||||
|
|
@ -65,7 +64,7 @@ export const useChannelLogoSelection = () => {
|
|||
await fetchChannelAssignableLogos();
|
||||
setIsInitialized(true);
|
||||
} catch (error) {
|
||||
console.error('Failed to load channel-assignable logos:', error);
|
||||
console.error('Failed to load channel logos:', error);
|
||||
}
|
||||
}, [
|
||||
backgroundLoading,
|
||||
|
|
|
|||
|
|
@ -1,13 +1,10 @@
|
|||
import React, { useState } from 'react';
|
||||
import useUserAgentsStore from '../store/userAgents';
|
||||
import M3UsTable from '../components/tables/M3UsTable';
|
||||
import EPGsTable from '../components/tables/EPGsTable';
|
||||
import { Box, Stack } from '@mantine/core';
|
||||
|
||||
const M3UPage = () => {
|
||||
const isLoading = useUserAgentsStore((state) => state.isLoading);
|
||||
const error = useUserAgentsStore((state) => state.error);
|
||||
if (isLoading) return <div>Loading...</div>;
|
||||
if (error) return <div>Error: {error}</div>;
|
||||
return (
|
||||
<Stack
|
||||
|
|
|
|||
|
|
@ -275,6 +275,7 @@ export default function TVChannelGuide({ startDate, endDate }) {
|
|||
const guideRef = useRef(null);
|
||||
const timelineRef = useRef(null); // New ref for timeline scrolling
|
||||
const listRef = useRef(null);
|
||||
const tvGuideRef = useRef(null); // Ref for the main tv-guide wrapper
|
||||
const isSyncingScroll = useRef(false);
|
||||
const guideScrollLeftRef = useRef(0);
|
||||
const {
|
||||
|
|
@ -506,37 +507,39 @@ export default function TVChannelGuide({ startDate, endDate }) {
|
|||
if (!node) return undefined;
|
||||
|
||||
const handleScroll = () => {
|
||||
const { scrollLeft } = node;
|
||||
if (scrollLeft === guideScrollLeftRef.current) {
|
||||
return;
|
||||
}
|
||||
|
||||
guideScrollLeftRef.current = scrollLeft;
|
||||
setGuideScrollLeft(scrollLeft);
|
||||
|
||||
if (isSyncingScroll.current) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { scrollLeft } = node;
|
||||
|
||||
// Always sync if timeline is out of sync, even if ref matches
|
||||
if (
|
||||
timelineRef.current &&
|
||||
timelineRef.current.scrollLeft !== scrollLeft
|
||||
) {
|
||||
isSyncingScroll.current = true;
|
||||
timelineRef.current.scrollLeft = scrollLeft;
|
||||
guideScrollLeftRef.current = scrollLeft;
|
||||
setGuideScrollLeft(scrollLeft);
|
||||
requestAnimationFrame(() => {
|
||||
isSyncingScroll.current = false;
|
||||
});
|
||||
} else if (scrollLeft !== guideScrollLeftRef.current) {
|
||||
// Update ref even if timeline was already synced
|
||||
guideScrollLeftRef.current = scrollLeft;
|
||||
setGuideScrollLeft(scrollLeft);
|
||||
}
|
||||
};
|
||||
|
||||
node.addEventListener('scroll', handleScroll, { passive: true });
|
||||
|
||||
return () => {
|
||||
node.removeEventListener('scroll', handleScroll);
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Update “now” every second
|
||||
// Update "now" every second
|
||||
useEffect(() => {
|
||||
const interval = setInterval(() => {
|
||||
setNow(dayjs());
|
||||
|
|
@ -544,13 +547,191 @@ export default function TVChannelGuide({ startDate, endDate }) {
|
|||
return () => clearInterval(interval);
|
||||
}, []);
|
||||
|
||||
// Pixel offset for the “now” vertical line
|
||||
// Pixel offset for the "now" vertical line
|
||||
const nowPosition = useMemo(() => {
|
||||
if (now.isBefore(start) || now.isAfter(end)) return -1;
|
||||
const minutesSinceStart = now.diff(start, 'minute');
|
||||
return (minutesSinceStart / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH;
|
||||
}, [now, start, end]);
|
||||
|
||||
useEffect(() => {
|
||||
const tvGuide = tvGuideRef.current;
|
||||
|
||||
if (!tvGuide) return undefined;
|
||||
|
||||
const handleContainerWheel = (event) => {
|
||||
const guide = guideRef.current;
|
||||
const timeline = timelineRef.current;
|
||||
|
||||
if (!guide) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (event.deltaX !== 0 || (event.shiftKey && event.deltaY !== 0)) {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
|
||||
const delta = event.deltaX !== 0 ? event.deltaX : event.deltaY;
|
||||
const newScrollLeft = guide.scrollLeft + delta;
|
||||
|
||||
// Set both guide and timeline scroll positions
|
||||
if (typeof guide.scrollTo === 'function') {
|
||||
guide.scrollTo({ left: newScrollLeft, behavior: 'auto' });
|
||||
} else {
|
||||
guide.scrollLeft = newScrollLeft;
|
||||
}
|
||||
|
||||
// Also sync timeline immediately
|
||||
if (timeline) {
|
||||
if (typeof timeline.scrollTo === 'function') {
|
||||
timeline.scrollTo({ left: newScrollLeft, behavior: 'auto' });
|
||||
} else {
|
||||
timeline.scrollLeft = newScrollLeft;
|
||||
}
|
||||
}
|
||||
|
||||
// Update the ref to keep state in sync
|
||||
guideScrollLeftRef.current = newScrollLeft;
|
||||
setGuideScrollLeft(newScrollLeft);
|
||||
}
|
||||
};
|
||||
|
||||
tvGuide.addEventListener('wheel', handleContainerWheel, {
|
||||
passive: false,
|
||||
capture: true,
|
||||
});
|
||||
|
||||
return () => {
|
||||
tvGuide.removeEventListener('wheel', handleContainerWheel, {
|
||||
capture: true,
|
||||
});
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Fallback: continuously monitor for any scroll changes
|
||||
useEffect(() => {
|
||||
let rafId = null;
|
||||
let lastCheck = 0;
|
||||
|
||||
const checkSync = (timestamp) => {
|
||||
// Throttle to check every 100ms instead of every frame
|
||||
if (timestamp - lastCheck > 100) {
|
||||
const guide = guideRef.current;
|
||||
const timeline = timelineRef.current;
|
||||
|
||||
if (guide && timeline && guide.scrollLeft !== timeline.scrollLeft) {
|
||||
timeline.scrollLeft = guide.scrollLeft;
|
||||
guideScrollLeftRef.current = guide.scrollLeft;
|
||||
setGuideScrollLeft(guide.scrollLeft);
|
||||
}
|
||||
lastCheck = timestamp;
|
||||
}
|
||||
|
||||
rafId = requestAnimationFrame(checkSync);
|
||||
};
|
||||
|
||||
rafId = requestAnimationFrame(checkSync);
|
||||
|
||||
return () => {
|
||||
if (rafId) cancelAnimationFrame(rafId);
|
||||
};
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
const tvGuide = tvGuideRef.current;
|
||||
if (!tvGuide) return;
|
||||
|
||||
let lastTouchX = null;
|
||||
let isTouching = false;
|
||||
let rafId = null;
|
||||
let lastScrollLeft = 0;
|
||||
let stableFrames = 0;
|
||||
|
||||
const syncScrollPositions = () => {
|
||||
const guide = guideRef.current;
|
||||
const timeline = timelineRef.current;
|
||||
|
||||
if (!guide || !timeline) return false;
|
||||
|
||||
const currentScroll = guide.scrollLeft;
|
||||
|
||||
// Check if scroll position has changed
|
||||
if (currentScroll !== lastScrollLeft) {
|
||||
timeline.scrollLeft = currentScroll;
|
||||
guideScrollLeftRef.current = currentScroll;
|
||||
setGuideScrollLeft(currentScroll);
|
||||
lastScrollLeft = currentScroll;
|
||||
stableFrames = 0;
|
||||
return true; // Still scrolling
|
||||
} else {
|
||||
stableFrames++;
|
||||
return stableFrames < 10; // Continue for 10 stable frames to catch late updates
|
||||
}
|
||||
};
|
||||
|
||||
const startPolling = () => {
|
||||
if (rafId) return; // Already polling
|
||||
|
||||
const poll = () => {
|
||||
const shouldContinue = isTouching || syncScrollPositions();
|
||||
|
||||
if (shouldContinue) {
|
||||
rafId = requestAnimationFrame(poll);
|
||||
} else {
|
||||
rafId = null;
|
||||
}
|
||||
};
|
||||
|
||||
rafId = requestAnimationFrame(poll);
|
||||
};
|
||||
|
||||
const handleTouchStart = (e) => {
|
||||
if (e.touches.length === 1) {
|
||||
const guide = guideRef.current;
|
||||
if (guide) {
|
||||
lastTouchX = e.touches[0].clientX;
|
||||
lastScrollLeft = guide.scrollLeft;
|
||||
isTouching = true;
|
||||
stableFrames = 0;
|
||||
startPolling();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const handleTouchMove = (e) => {
|
||||
if (!isTouching || e.touches.length !== 1) return;
|
||||
const guide = guideRef.current;
|
||||
if (!guide) return;
|
||||
|
||||
const touchX = e.touches[0].clientX;
|
||||
const deltaX = lastTouchX - touchX;
|
||||
lastTouchX = touchX;
|
||||
|
||||
if (Math.abs(deltaX) > 0) {
|
||||
guide.scrollLeft += deltaX;
|
||||
}
|
||||
};
|
||||
|
||||
const handleTouchEnd = () => {
|
||||
isTouching = false;
|
||||
lastTouchX = null;
|
||||
// Polling continues until scroll stabilizes
|
||||
};
|
||||
|
||||
tvGuide.addEventListener('touchstart', handleTouchStart, { passive: true });
|
||||
tvGuide.addEventListener('touchmove', handleTouchMove, { passive: false });
|
||||
tvGuide.addEventListener('touchend', handleTouchEnd, { passive: true });
|
||||
tvGuide.addEventListener('touchcancel', handleTouchEnd, { passive: true });
|
||||
|
||||
return () => {
|
||||
if (rafId) cancelAnimationFrame(rafId);
|
||||
tvGuide.removeEventListener('touchstart', handleTouchStart);
|
||||
tvGuide.removeEventListener('touchmove', handleTouchMove);
|
||||
tvGuide.removeEventListener('touchend', handleTouchEnd);
|
||||
tvGuide.removeEventListener('touchcancel', handleTouchEnd);
|
||||
};
|
||||
}, []);
|
||||
|
||||
const syncScrollLeft = useCallback((nextLeft, behavior = 'auto') => {
|
||||
const guideNode = guideRef.current;
|
||||
const timelineNode = timelineRef.current;
|
||||
|
|
@ -780,18 +961,18 @@ export default function TVChannelGuide({ startDate, endDate }) {
|
|||
}, [now, nowPosition, start, syncScrollLeft]);
|
||||
|
||||
const handleTimelineScroll = useCallback(() => {
|
||||
if (!timelineRef.current) {
|
||||
if (!timelineRef.current || isSyncingScroll.current) {
|
||||
return;
|
||||
}
|
||||
|
||||
const nextLeft = timelineRef.current.scrollLeft;
|
||||
guideScrollLeftRef.current = nextLeft;
|
||||
setGuideScrollLeft(nextLeft);
|
||||
|
||||
if (isSyncingScroll.current) {
|
||||
if (nextLeft === guideScrollLeftRef.current) {
|
||||
return;
|
||||
}
|
||||
|
||||
guideScrollLeftRef.current = nextLeft;
|
||||
setGuideScrollLeft(nextLeft);
|
||||
|
||||
isSyncingScroll.current = true;
|
||||
if (guideRef.current) {
|
||||
if (typeof guideRef.current.scrollTo === 'function') {
|
||||
|
|
@ -1178,6 +1359,7 @@ export default function TVChannelGuide({ startDate, endDate }) {
|
|||
|
||||
return (
|
||||
<Box
|
||||
ref={tvGuideRef}
|
||||
className="tv-guide"
|
||||
style={{
|
||||
overflow: 'hidden',
|
||||
|
|
|
|||
|
|
@ -1,13 +1,20 @@
|
|||
import React, { useEffect, useCallback } from 'react';
|
||||
import { Box, Loader, Center, Text, Stack } from '@mantine/core';
|
||||
import React, { useEffect, useCallback, useState } from 'react';
|
||||
import { Box, Tabs, Flex, Text } from '@mantine/core';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
import useLogosStore from '../store/logos';
|
||||
import useVODLogosStore from '../store/vodLogos';
|
||||
import LogosTable from '../components/tables/LogosTable';
|
||||
import VODLogosTable from '../components/tables/VODLogosTable';
|
||||
|
||||
const LogosPage = () => {
|
||||
const { fetchAllLogos, isLoading, needsAllLogos } = useLogosStore();
|
||||
const { fetchAllLogos, needsAllLogos, logos } = useLogosStore();
|
||||
const { totalCount } = useVODLogosStore();
|
||||
const [activeTab, setActiveTab] = useState('channel');
|
||||
|
||||
const loadLogos = useCallback(async () => {
|
||||
const channelLogosCount = Object.keys(logos).length;
|
||||
const vodLogosCount = totalCount;
|
||||
|
||||
const loadChannelLogos = useCallback(async () => {
|
||||
try {
|
||||
// Only fetch all logos if we haven't loaded them yet
|
||||
if (needsAllLogos()) {
|
||||
|
|
@ -16,30 +23,74 @@ const LogosPage = () => {
|
|||
} catch (err) {
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: 'Failed to load logos',
|
||||
message: 'Failed to load channel logos',
|
||||
color: 'red',
|
||||
});
|
||||
console.error('Failed to load logos:', err);
|
||||
console.error('Failed to load channel logos:', err);
|
||||
}
|
||||
}, [fetchAllLogos, needsAllLogos]);
|
||||
|
||||
useEffect(() => {
|
||||
loadLogos();
|
||||
}, [loadLogos]);
|
||||
// Always load channel logos on mount
|
||||
loadChannelLogos();
|
||||
}, [loadChannelLogos]);
|
||||
|
||||
return (
|
||||
<Box style={{ padding: 10 }}>
|
||||
{isLoading && (
|
||||
<Center style={{ marginBottom: 20 }}>
|
||||
<Stack align="center" spacing="sm">
|
||||
<Loader size="sm" />
|
||||
<Text size="sm" color="dimmed">
|
||||
Loading all logos...
|
||||
<Box>
|
||||
{/* Header with title and tabs */}
|
||||
<Box
|
||||
style={{
|
||||
display: 'flex',
|
||||
justifyContent: 'center',
|
||||
padding: '10px 0',
|
||||
}}
|
||||
>
|
||||
<Flex
|
||||
style={{
|
||||
alignItems: 'center',
|
||||
justifyContent: 'space-between',
|
||||
width: '100%',
|
||||
maxWidth: '1200px',
|
||||
paddingBottom: 10,
|
||||
}}
|
||||
>
|
||||
<Flex gap={8} align="center">
|
||||
<Text
|
||||
style={{
|
||||
fontFamily: 'Inter, sans-serif',
|
||||
fontWeight: 500,
|
||||
fontSize: '20px',
|
||||
lineHeight: 1,
|
||||
letterSpacing: '-0.3px',
|
||||
color: 'gray.6',
|
||||
marginBottom: 0,
|
||||
}}
|
||||
>
|
||||
Logos
|
||||
</Text>
|
||||
</Stack>
|
||||
</Center>
|
||||
)}
|
||||
<LogosTable />
|
||||
<Text size="sm" c="dimmed">
|
||||
({activeTab === 'channel' ? channelLogosCount : vodLogosCount}{' '}
|
||||
logo
|
||||
{(activeTab === 'channel' ? channelLogosCount : vodLogosCount) !==
|
||||
1
|
||||
? 's'
|
||||
: ''}
|
||||
)
|
||||
</Text>
|
||||
</Flex>
|
||||
|
||||
<Tabs value={activeTab} onChange={setActiveTab} variant="pills">
|
||||
<Tabs.List>
|
||||
<Tabs.Tab value="channel">Channel Logos</Tabs.Tab>
|
||||
<Tabs.Tab value="vod">VOD Logos</Tabs.Tab>
|
||||
</Tabs.List>
|
||||
</Tabs>
|
||||
</Flex>
|
||||
</Box>
|
||||
|
||||
{/* Content based on active tab */}
|
||||
{activeTab === 'channel' && <LogosTable />}
|
||||
{activeTab === 'vod' && <VODLogosTable />}
|
||||
</Box>
|
||||
);
|
||||
};
|
||||
|
|
|
|||
|
|
@ -70,11 +70,13 @@
|
|||
|
||||
/* Hide bottom horizontal scrollbar for the guide's virtualized list only */
|
||||
.tv-guide .guide-list-outer {
|
||||
/* Prevent horizontal page scrollbar while preserving internal scroll behavior */
|
||||
overflow-x: hidden !important;
|
||||
/* Allow horizontal scrolling but hide the scrollbar visually */
|
||||
overflow-x: auto !important;
|
||||
scrollbar-width: none; /* Firefox */
|
||||
-ms-overflow-style: none; /* IE and Edge */
|
||||
}
|
||||
|
||||
/* Also hide scrollbars visually across browsers for the outer container */
|
||||
.tv-guide .guide-list-outer::-webkit-scrollbar {
|
||||
height: 0px;
|
||||
display: none; /* Chrome, Safari, Opera */
|
||||
}
|
||||
|
|
|
|||
|
|
@ -97,18 +97,29 @@ const useEPGsStore = create((set) => ({
|
|||
? 'success' // Mark as success when progress is 100%
|
||||
: state.epgs[data.source]?.status || 'idle';
|
||||
|
||||
// Create a new epgs object with the updated source status
|
||||
const newEpgs = {
|
||||
...state.epgs,
|
||||
[data.source]: {
|
||||
...state.epgs[data.source],
|
||||
status: sourceStatus,
|
||||
last_message:
|
||||
data.status === 'error'
|
||||
? data.error || 'Unknown error'
|
||||
: state.epgs[data.source]?.last_message,
|
||||
},
|
||||
};
|
||||
// Only update epgs object if status or last_message actually changed
|
||||
// This prevents unnecessary re-renders on every progress update
|
||||
const currentEpg = state.epgs[data.source];
|
||||
const newLastMessage =
|
||||
data.status === 'error'
|
||||
? data.error || 'Unknown error'
|
||||
: currentEpg?.last_message;
|
||||
|
||||
let newEpgs = state.epgs;
|
||||
if (
|
||||
currentEpg &&
|
||||
(currentEpg.status !== sourceStatus ||
|
||||
currentEpg.last_message !== newLastMessage)
|
||||
) {
|
||||
newEpgs = {
|
||||
...state.epgs,
|
||||
[data.source]: {
|
||||
...currentEpg,
|
||||
status: sourceStatus,
|
||||
last_message: newLastMessage,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
refreshProgress: newRefreshProgress,
|
||||
|
|
|
|||
|
|
@ -3,11 +3,11 @@ import api from '../api';
|
|||
|
||||
const useLogosStore = create((set, get) => ({
|
||||
logos: {},
|
||||
channelLogos: {}, // Keep this for simplicity, but we'll be more careful about when we populate it
|
||||
channelLogos: {}, // Separate cache for channel forms to avoid reloading
|
||||
isLoading: false,
|
||||
backgroundLoading: false,
|
||||
hasLoadedAll: false, // Track if we've loaded all logos
|
||||
hasLoadedChannelLogos: false, // Track if we've loaded channel-assignable logos
|
||||
hasLoadedChannelLogos: false, // Track if we've loaded channel logos
|
||||
error: null,
|
||||
|
||||
// Basic CRUD operations
|
||||
|
|
@ -27,10 +27,9 @@ const useLogosStore = create((set, get) => ({
|
|||
...state.logos,
|
||||
[newLogo.id]: { ...newLogo },
|
||||
};
|
||||
|
||||
// Add to channelLogos if the user has loaded channel-assignable logos
|
||||
|
||||
// Add to channelLogos if the user has loaded channel logos
|
||||
// This means they're using channel forms and the new logo should be available there
|
||||
// Newly created logos are channel-assignable (they start unused)
|
||||
let newChannelLogos = state.channelLogos;
|
||||
if (state.hasLoadedChannelLogos) {
|
||||
newChannelLogos = {
|
||||
|
|
@ -96,11 +95,14 @@ const useLogosStore = create((set, get) => ({
|
|||
}
|
||||
},
|
||||
|
||||
fetchAllLogos: async () => {
|
||||
fetchAllLogos: async (force = false) => {
|
||||
const { isLoading, hasLoadedAll, logos } = get();
|
||||
|
||||
// Prevent unnecessary reloading if we already have all logos
|
||||
if (isLoading || (hasLoadedAll && Object.keys(logos).length > 0)) {
|
||||
if (
|
||||
!force &&
|
||||
(isLoading || (hasLoadedAll && Object.keys(logos).length > 0))
|
||||
) {
|
||||
return Object.values(logos);
|
||||
}
|
||||
|
||||
|
|
@ -173,16 +175,15 @@ const useLogosStore = create((set, get) => ({
|
|||
|
||||
set({ backgroundLoading: true, error: null });
|
||||
try {
|
||||
// Load logos suitable for channel assignment (unused + channel-used, exclude VOD-only)
|
||||
// Load all channel logos (no special filtering needed - all Logo entries are for channels)
|
||||
const response = await api.getLogos({
|
||||
channel_assignable: 'true',
|
||||
no_pagination: 'true', // Get all channel-assignable logos
|
||||
no_pagination: 'true', // Get all channel logos
|
||||
});
|
||||
|
||||
// Handle both paginated and non-paginated responses
|
||||
const logos = Array.isArray(response) ? response : response.results || [];
|
||||
|
||||
console.log(`Fetched ${logos.length} channel-assignable logos`);
|
||||
console.log(`Fetched ${logos.length} channel logos`);
|
||||
|
||||
// Store in both places, but this is intentional and only when specifically requested
|
||||
set({
|
||||
|
|
@ -203,9 +204,9 @@ const useLogosStore = create((set, get) => ({
|
|||
|
||||
return logos;
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch channel-assignable logos:', error);
|
||||
console.error('Failed to fetch channel logos:', error);
|
||||
set({
|
||||
error: 'Failed to load channel-assignable logos.',
|
||||
error: 'Failed to load channel logos.',
|
||||
backgroundLoading: false,
|
||||
});
|
||||
throw error;
|
||||
|
|
@ -327,7 +328,7 @@ const useLogosStore = create((set, get) => ({
|
|||
}, 0); // Execute immediately but asynchronously
|
||||
},
|
||||
|
||||
// Background loading specifically for channel-assignable logos after login
|
||||
// Background loading for channel logos after login
|
||||
backgroundLoadChannelLogos: async () => {
|
||||
const { backgroundLoading, channelLogos, hasLoadedChannelLogos } = get();
|
||||
|
||||
|
|
@ -342,10 +343,10 @@ const useLogosStore = create((set, get) => ({
|
|||
|
||||
set({ backgroundLoading: true });
|
||||
try {
|
||||
console.log('Background loading channel-assignable logos...');
|
||||
console.log('Background loading channel logos...');
|
||||
await get().fetchChannelAssignableLogos();
|
||||
console.log(
|
||||
`Background loaded ${Object.keys(get().channelLogos).length} channel-assignable logos`
|
||||
`Background loaded ${Object.keys(get().channelLogos).length} channel logos`
|
||||
);
|
||||
} catch (error) {
|
||||
console.error('Background channel logo loading failed:', error);
|
||||
|
|
|
|||
130
frontend/src/store/vodLogos.jsx
Normal file
130
frontend/src/store/vodLogos.jsx
Normal file
|
|
@ -0,0 +1,130 @@
|
|||
import { create } from 'zustand';
|
||||
import api from '../api';
|
||||
|
||||
const useVODLogosStore = create((set) => ({
|
||||
vodLogos: {},
|
||||
logos: [],
|
||||
isLoading: false,
|
||||
hasLoaded: false,
|
||||
error: null,
|
||||
totalCount: 0,
|
||||
currentPage: 1,
|
||||
pageSize: 25,
|
||||
|
||||
setVODLogos: (logos, totalCount = 0) => {
|
||||
set({
|
||||
vodLogos: logos.reduce((acc, logo) => {
|
||||
acc[logo.id] = { ...logo };
|
||||
return acc;
|
||||
}, {}),
|
||||
totalCount,
|
||||
hasLoaded: true,
|
||||
});
|
||||
},
|
||||
|
||||
removeVODLogo: (logoId) =>
|
||||
set((state) => {
|
||||
const newVODLogos = { ...state.vodLogos };
|
||||
delete newVODLogos[logoId];
|
||||
return {
|
||||
vodLogos: newVODLogos,
|
||||
totalCount: Math.max(0, state.totalCount - 1),
|
||||
};
|
||||
}),
|
||||
|
||||
fetchVODLogos: async (params = {}) => {
|
||||
set({ isLoading: true, error: null });
|
||||
try {
|
||||
const response = await api.getVODLogos(params);
|
||||
|
||||
// Handle both paginated and non-paginated responses
|
||||
const logos = Array.isArray(response) ? response : response.results || [];
|
||||
const total = response.count || logos.length;
|
||||
|
||||
set({
|
||||
vodLogos: logos.reduce((acc, logo) => {
|
||||
acc[logo.id] = { ...logo };
|
||||
return acc;
|
||||
}, {}),
|
||||
logos: logos,
|
||||
totalCount: total,
|
||||
isLoading: false,
|
||||
hasLoaded: true,
|
||||
});
|
||||
return response;
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch VOD logos:', error);
|
||||
set({ error: 'Failed to load VOD logos.', isLoading: false });
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
deleteVODLogo: async (logoId) => {
|
||||
try {
|
||||
await api.deleteVODLogo(logoId);
|
||||
set((state) => {
|
||||
const newVODLogos = { ...state.vodLogos };
|
||||
delete newVODLogos[logoId];
|
||||
const newLogos = state.logos.filter((logo) => logo.id !== logoId);
|
||||
return {
|
||||
vodLogos: newVODLogos,
|
||||
logos: newLogos,
|
||||
totalCount: Math.max(0, state.totalCount - 1),
|
||||
};
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Failed to delete VOD logo:', error);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
deleteVODLogos: async (logoIds) => {
|
||||
try {
|
||||
await api.deleteVODLogos(logoIds);
|
||||
set((state) => {
|
||||
const newVODLogos = { ...state.vodLogos };
|
||||
logoIds.forEach((id) => delete newVODLogos[id]);
|
||||
const logoIdSet = new Set(logoIds);
|
||||
const newLogos = state.logos.filter((logo) => !logoIdSet.has(logo.id));
|
||||
return {
|
||||
vodLogos: newVODLogos,
|
||||
logos: newLogos,
|
||||
totalCount: Math.max(0, state.totalCount - logoIds.length),
|
||||
};
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Failed to delete VOD logos:', error);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
cleanupUnusedVODLogos: async () => {
|
||||
try {
|
||||
const result = await api.cleanupUnusedVODLogos();
|
||||
|
||||
// Refresh the logos after cleanup
|
||||
const state = useVODLogosStore.getState();
|
||||
await state.fetchVODLogos({
|
||||
page: state.currentPage,
|
||||
page_size: state.pageSize,
|
||||
});
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
console.error('Failed to cleanup unused VOD logos:', error);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
clearVODLogos: () => {
|
||||
set({
|
||||
vodLogos: {},
|
||||
logos: [],
|
||||
hasLoaded: false,
|
||||
totalCount: 0,
|
||||
error: null,
|
||||
});
|
||||
},
|
||||
}));
|
||||
|
||||
export default useVODLogosStore;
|
||||
Loading…
Add table
Add a link
Reference in a new issue