mirror of
https://github.com/Dispatcharr/Dispatcharr.git
synced 2026-01-22 18:28:00 +00:00
Merge remote-tracking branch 'origin/dev' into virtfs
This commit is contained in:
commit
993e88b49c
27 changed files with 2627 additions and 134 deletions
|
|
@ -0,0 +1,54 @@
|
|||
# Generated migration to backfill stream_hash for existing custom streams
|
||||
|
||||
from django.db import migrations
|
||||
import hashlib
|
||||
|
||||
|
||||
def backfill_custom_stream_hashes(apps, schema_editor):
|
||||
"""
|
||||
Generate stream_hash for all custom streams that don't have one.
|
||||
Uses stream ID to create a stable hash that won't change when name/url is edited.
|
||||
"""
|
||||
Stream = apps.get_model('dispatcharr_channels', 'Stream')
|
||||
|
||||
custom_streams_without_hash = Stream.objects.filter(
|
||||
is_custom=True,
|
||||
stream_hash__isnull=True
|
||||
)
|
||||
|
||||
updated_count = 0
|
||||
for stream in custom_streams_without_hash:
|
||||
# Generate a stable hash using the stream's ID
|
||||
# This ensures the hash never changes even if name/url is edited
|
||||
unique_string = f"custom_stream_{stream.id}"
|
||||
stream.stream_hash = hashlib.sha256(unique_string.encode()).hexdigest()
|
||||
stream.save(update_fields=['stream_hash'])
|
||||
updated_count += 1
|
||||
|
||||
if updated_count > 0:
|
||||
print(f"Backfilled stream_hash for {updated_count} custom streams")
|
||||
else:
|
||||
print("No custom streams needed stream_hash backfill")
|
||||
|
||||
|
||||
def reverse_backfill(apps, schema_editor):
|
||||
"""
|
||||
Reverse migration - clear stream_hash for custom streams.
|
||||
Note: This will break preview functionality for custom streams.
|
||||
"""
|
||||
Stream = apps.get_model('dispatcharr_channels', 'Stream')
|
||||
|
||||
custom_streams = Stream.objects.filter(is_custom=True)
|
||||
count = custom_streams.update(stream_hash=None)
|
||||
print(f"Cleared stream_hash for {count} custom streams")
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0028_channel_created_at_channel_updated_at'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(backfill_custom_stream_hashes, reverse_backfill),
|
||||
]
|
||||
|
|
@ -152,8 +152,14 @@ class Stream(models.Model):
|
|||
stream = cls.objects.create(**fields_to_update)
|
||||
return stream, True # True means it was created
|
||||
|
||||
# @TODO: honor stream's stream profile
|
||||
def get_stream_profile(self):
|
||||
"""
|
||||
Get the stream profile for this stream.
|
||||
Uses the stream's own profile if set, otherwise returns the default.
|
||||
"""
|
||||
if self.stream_profile:
|
||||
return self.stream_profile
|
||||
|
||||
stream_profile = StreamProfile.objects.get(
|
||||
id=CoreSettings.get_default_stream_profile_id()
|
||||
)
|
||||
|
|
|
|||
|
|
@ -45,6 +45,20 @@ def set_default_m3u_account(sender, instance, **kwargs):
|
|||
else:
|
||||
raise ValueError("No default M3UAccount found.")
|
||||
|
||||
@receiver(post_save, sender=Stream)
|
||||
def generate_custom_stream_hash(sender, instance, created, **kwargs):
|
||||
"""
|
||||
Generate a stable stream_hash for custom streams after creation.
|
||||
Uses the stream's ID to ensure the hash never changes even if name/url is edited.
|
||||
"""
|
||||
if instance.is_custom and not instance.stream_hash and created:
|
||||
import hashlib
|
||||
# Use stream ID for a stable, unique hash that never changes
|
||||
unique_string = f"custom_stream_{instance.id}"
|
||||
instance.stream_hash = hashlib.sha256(unique_string.encode()).hexdigest()
|
||||
# Use update to avoid triggering signals again
|
||||
Stream.objects.filter(id=instance.id).update(stream_hash=instance.stream_hash)
|
||||
|
||||
@receiver(post_save, sender=Channel)
|
||||
def refresh_epg_programs(sender, instance, created, **kwargs):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -147,23 +147,37 @@ class EPGGridAPIView(APIView):
|
|||
f"EPGGridAPIView: Found {count} program(s), including recently ended, currently running, and upcoming shows."
|
||||
)
|
||||
|
||||
# Generate dummy programs for channels that have no EPG data
|
||||
# Generate dummy programs for channels that have no EPG data OR dummy EPG sources
|
||||
from apps.channels.models import Channel
|
||||
from apps.epg.models import EPGSource
|
||||
from django.db.models import Q
|
||||
|
||||
# Get channels with no EPG data
|
||||
# Get channels with no EPG data at all (standard dummy)
|
||||
channels_without_epg = Channel.objects.filter(Q(epg_data__isnull=True))
|
||||
channels_count = channels_without_epg.count()
|
||||
|
||||
# Log more detailed information about channels missing EPG data
|
||||
if channels_count > 0:
|
||||
# Get channels with custom dummy EPG sources (generate on-demand with patterns)
|
||||
channels_with_custom_dummy = Channel.objects.filter(
|
||||
epg_data__epg_source__source_type='dummy'
|
||||
).distinct()
|
||||
|
||||
# Log what we found
|
||||
without_count = channels_without_epg.count()
|
||||
custom_count = channels_with_custom_dummy.count()
|
||||
|
||||
if without_count > 0:
|
||||
channel_names = [f"{ch.name} (ID: {ch.id})" for ch in channels_without_epg]
|
||||
logger.warning(
|
||||
f"EPGGridAPIView: Missing EPG data for these channels: {', '.join(channel_names)}"
|
||||
logger.debug(
|
||||
f"EPGGridAPIView: Channels needing standard dummy EPG: {', '.join(channel_names)}"
|
||||
)
|
||||
|
||||
if custom_count > 0:
|
||||
channel_names = [f"{ch.name} (ID: {ch.id})" for ch in channels_with_custom_dummy]
|
||||
logger.debug(
|
||||
f"EPGGridAPIView: Channels needing custom dummy EPG: {', '.join(channel_names)}"
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
f"EPGGridAPIView: Found {channels_count} channels with no EPG data."
|
||||
f"EPGGridAPIView: Found {without_count} channels needing standard dummy, {custom_count} needing custom dummy EPG."
|
||||
)
|
||||
|
||||
# Serialize the regular programs
|
||||
|
|
@ -205,12 +219,91 @@ class EPGGridAPIView(APIView):
|
|||
|
||||
# Generate and append dummy programs
|
||||
dummy_programs = []
|
||||
for channel in channels_without_epg:
|
||||
# Use the channel UUID as tvg_id for dummy programs to match in the guide
|
||||
|
||||
# Import the function from output.views
|
||||
from apps.output.views import generate_dummy_programs as gen_dummy_progs
|
||||
|
||||
# Handle channels with CUSTOM dummy EPG sources (with patterns)
|
||||
for channel in channels_with_custom_dummy:
|
||||
# For dummy EPGs, ALWAYS use channel UUID to ensure unique programs per channel
|
||||
# This prevents multiple channels assigned to the same dummy EPG from showing identical data
|
||||
# Each channel gets its own unique program data even if they share the same EPG source
|
||||
dummy_tvg_id = str(channel.uuid)
|
||||
|
||||
try:
|
||||
# Create programs every 4 hours for the next 24 hours
|
||||
# Get the custom dummy EPG source
|
||||
epg_source = channel.epg_data.epg_source if channel.epg_data else None
|
||||
|
||||
logger.debug(f"Generating custom dummy programs for channel: {channel.name} (ID: {channel.id})")
|
||||
|
||||
# Determine which name to parse based on custom properties
|
||||
name_to_parse = channel.name
|
||||
if epg_source and epg_source.custom_properties:
|
||||
custom_props = epg_source.custom_properties
|
||||
name_source = custom_props.get('name_source')
|
||||
|
||||
if name_source == 'stream':
|
||||
# Get the stream index (1-based from user, convert to 0-based)
|
||||
stream_index = custom_props.get('stream_index', 1) - 1
|
||||
|
||||
# Get streams ordered by channelstream order
|
||||
channel_streams = channel.streams.all().order_by('channelstream__order')
|
||||
|
||||
if channel_streams.exists() and 0 <= stream_index < channel_streams.count():
|
||||
stream = list(channel_streams)[stream_index]
|
||||
name_to_parse = stream.name
|
||||
logger.debug(f"Using stream name for parsing: {name_to_parse} (stream index: {stream_index})")
|
||||
else:
|
||||
logger.warning(f"Stream index {stream_index} not found for channel {channel.name}, falling back to channel name")
|
||||
elif name_source == 'channel':
|
||||
logger.debug(f"Using channel name for parsing: {name_to_parse}")
|
||||
|
||||
# Generate programs using custom patterns from the dummy EPG source
|
||||
# Use the same tvg_id that will be set in the program data
|
||||
generated = gen_dummy_progs(
|
||||
channel_id=dummy_tvg_id,
|
||||
channel_name=name_to_parse,
|
||||
num_days=1,
|
||||
program_length_hours=4,
|
||||
epg_source=epg_source
|
||||
)
|
||||
|
||||
# Custom dummy should always return data (either from patterns or fallback)
|
||||
if generated:
|
||||
logger.debug(f"Generated {len(generated)} custom dummy programs for {channel.name}")
|
||||
# Convert generated programs to API format
|
||||
for program in generated:
|
||||
dummy_program = {
|
||||
"id": f"dummy-custom-{channel.id}-{program['start_time'].hour}",
|
||||
"epg": {"tvg_id": dummy_tvg_id, "name": channel.name},
|
||||
"start_time": program['start_time'].isoformat(),
|
||||
"end_time": program['end_time'].isoformat(),
|
||||
"title": program['title'],
|
||||
"description": program['description'],
|
||||
"tvg_id": dummy_tvg_id,
|
||||
"sub_title": None,
|
||||
"custom_properties": None,
|
||||
}
|
||||
dummy_programs.append(dummy_program)
|
||||
else:
|
||||
logger.warning(f"No programs generated for custom dummy EPG channel: {channel.name}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error creating custom dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}"
|
||||
)
|
||||
|
||||
# Handle channels with NO EPG data (standard dummy with humorous descriptions)
|
||||
for channel in channels_without_epg:
|
||||
# For channels with no EPG, use UUID to ensure uniqueness (matches frontend logic)
|
||||
# The frontend uses: tvgRecord?.tvg_id ?? channel.uuid
|
||||
# Since there's no EPG data, it will fall back to UUID
|
||||
dummy_tvg_id = str(channel.uuid)
|
||||
|
||||
try:
|
||||
logger.debug(f"Generating standard dummy programs for channel: {channel.name} (ID: {channel.id})")
|
||||
|
||||
# Create programs every 4 hours for the next 24 hours with humorous descriptions
|
||||
for hour_offset in range(0, 24, 4):
|
||||
# Use timedelta for time arithmetic instead of replace() to avoid hour overflow
|
||||
start_time = now + timedelta(hours=hour_offset)
|
||||
|
|
@ -238,7 +331,7 @@ class EPGGridAPIView(APIView):
|
|||
|
||||
# Create a dummy program in the same format as regular programs
|
||||
dummy_program = {
|
||||
"id": f"dummy-{channel.id}-{hour_offset}", # Create a unique ID
|
||||
"id": f"dummy-standard-{channel.id}-{hour_offset}",
|
||||
"epg": {"tvg_id": dummy_tvg_id, "name": channel.name},
|
||||
"start_time": start_time.isoformat(),
|
||||
"end_time": end_time.isoformat(),
|
||||
|
|
@ -252,7 +345,7 @@ class EPGGridAPIView(APIView):
|
|||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error creating dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}"
|
||||
f"Error creating standard dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}"
|
||||
)
|
||||
|
||||
# Combine regular and dummy programs
|
||||
|
|
@ -284,7 +377,22 @@ class EPGImportAPIView(APIView):
|
|||
)
|
||||
def post(self, request, format=None):
|
||||
logger.info("EPGImportAPIView: Received request to import EPG data.")
|
||||
refresh_epg_data.delay(request.data.get("id", None)) # Trigger Celery task
|
||||
epg_id = request.data.get("id", None)
|
||||
|
||||
# Check if this is a dummy EPG source
|
||||
try:
|
||||
from .models import EPGSource
|
||||
epg_source = EPGSource.objects.get(id=epg_id)
|
||||
if epg_source.source_type == 'dummy':
|
||||
logger.info(f"EPGImportAPIView: Skipping refresh for dummy EPG source {epg_id}")
|
||||
return Response(
|
||||
{"success": False, "message": "Dummy EPG sources do not require refreshing."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except EPGSource.DoesNotExist:
|
||||
pass # Let the task handle the missing source
|
||||
|
||||
refresh_epg_data.delay(epg_id) # Trigger Celery task
|
||||
logger.info("EPGImportAPIView: Task dispatched to refresh EPG data.")
|
||||
return Response(
|
||||
{"success": True, "message": "EPG data import initiated."},
|
||||
|
|
@ -308,3 +416,4 @@ class EPGDataViewSet(viewsets.ReadOnlyModelViewSet):
|
|||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,23 @@
|
|||
# Generated by Django 5.2.4 on 2025-10-17 17:02
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0017_alter_epgsource_url'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='epgsource',
|
||||
name='custom_properties',
|
||||
field=models.JSONField(blank=True, default=dict, help_text='Custom properties for dummy EPG configuration (regex patterns, timezone, duration, etc.)', null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='epgsource',
|
||||
name='source_type',
|
||||
field=models.CharField(choices=[('xmltv', 'XMLTV URL'), ('schedules_direct', 'Schedules Direct API'), ('dummy', 'Custom Dummy EPG')], max_length=20),
|
||||
),
|
||||
]
|
||||
|
|
@ -8,6 +8,7 @@ class EPGSource(models.Model):
|
|||
SOURCE_TYPE_CHOICES = [
|
||||
('xmltv', 'XMLTV URL'),
|
||||
('schedules_direct', 'Schedules Direct API'),
|
||||
('dummy', 'Custom Dummy EPG'),
|
||||
]
|
||||
|
||||
STATUS_IDLE = 'idle'
|
||||
|
|
@ -38,6 +39,12 @@ class EPGSource(models.Model):
|
|||
refresh_task = models.ForeignKey(
|
||||
PeriodicTask, on_delete=models.SET_NULL, null=True, blank=True
|
||||
)
|
||||
custom_properties = models.JSONField(
|
||||
default=dict,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Custom properties for dummy EPG configuration (regex patterns, timezone, duration, etc.)"
|
||||
)
|
||||
status = models.CharField(
|
||||
max_length=20,
|
||||
choices=STATUS_CHOICES,
|
||||
|
|
|
|||
|
|
@ -28,6 +28,7 @@ class EPGSourceSerializer(serializers.ModelSerializer):
|
|||
'last_message',
|
||||
'created_at',
|
||||
'updated_at',
|
||||
'custom_properties',
|
||||
'epg_data_ids'
|
||||
]
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
from django.db.models.signals import post_save, post_delete, pre_save
|
||||
from django.dispatch import receiver
|
||||
from .models import EPGSource
|
||||
from .models import EPGSource, EPGData
|
||||
from .tasks import refresh_epg_data, delete_epg_refresh_task_by_id
|
||||
from django_celery_beat.models import PeriodicTask, IntervalSchedule
|
||||
from core.utils import is_protected_path
|
||||
from core.utils import is_protected_path, send_websocket_update
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
|
@ -12,15 +12,77 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
@receiver(post_save, sender=EPGSource)
|
||||
def trigger_refresh_on_new_epg_source(sender, instance, created, **kwargs):
|
||||
# Trigger refresh only if the source is newly created and active
|
||||
if created and instance.is_active:
|
||||
# Trigger refresh only if the source is newly created, active, and not a dummy EPG
|
||||
if created and instance.is_active and instance.source_type != 'dummy':
|
||||
refresh_epg_data.delay(instance.id)
|
||||
|
||||
@receiver(post_save, sender=EPGSource)
|
||||
def create_dummy_epg_data(sender, instance, created, **kwargs):
|
||||
"""
|
||||
Automatically create EPGData for dummy EPG sources when they are created.
|
||||
This allows channels to be assigned to dummy EPGs immediately without
|
||||
requiring a refresh first.
|
||||
"""
|
||||
if instance.source_type == 'dummy':
|
||||
# Ensure dummy EPGs always have idle status and no status message
|
||||
if instance.status != EPGSource.STATUS_IDLE or instance.last_message:
|
||||
instance.status = EPGSource.STATUS_IDLE
|
||||
instance.last_message = None
|
||||
instance.save(update_fields=['status', 'last_message'])
|
||||
|
||||
# Create a URL-friendly tvg_id from the dummy EPG name
|
||||
# Replace spaces and special characters with underscores
|
||||
friendly_tvg_id = instance.name.replace(' ', '_').replace('-', '_')
|
||||
# Remove any characters that aren't alphanumeric or underscores
|
||||
friendly_tvg_id = ''.join(c for c in friendly_tvg_id if c.isalnum() or c == '_')
|
||||
# Convert to lowercase for consistency
|
||||
friendly_tvg_id = friendly_tvg_id.lower()
|
||||
# Prefix with 'dummy_' to make it clear this is a dummy EPG
|
||||
friendly_tvg_id = f"dummy_{friendly_tvg_id}"
|
||||
|
||||
# Create or update the EPGData record
|
||||
epg_data, data_created = EPGData.objects.get_or_create(
|
||||
tvg_id=friendly_tvg_id,
|
||||
epg_source=instance,
|
||||
defaults={
|
||||
'name': instance.name,
|
||||
'icon_url': None
|
||||
}
|
||||
)
|
||||
|
||||
# Update name if it changed and record already existed
|
||||
if not data_created and epg_data.name != instance.name:
|
||||
epg_data.name = instance.name
|
||||
epg_data.save(update_fields=['name'])
|
||||
|
||||
if data_created:
|
||||
logger.info(f"Auto-created EPGData for dummy EPG source: {instance.name} (ID: {instance.id})")
|
||||
|
||||
# Send websocket update to notify frontend that EPG data has been created
|
||||
# This allows the channel form to immediately show the new dummy EPG without refreshing
|
||||
send_websocket_update('updates', 'update', {
|
||||
'type': 'epg_data_created',
|
||||
'source_id': instance.id,
|
||||
'source_name': instance.name,
|
||||
'epg_data_id': epg_data.id
|
||||
})
|
||||
else:
|
||||
logger.debug(f"EPGData already exists for dummy EPG source: {instance.name} (ID: {instance.id})")
|
||||
|
||||
@receiver(post_save, sender=EPGSource)
|
||||
def create_or_update_refresh_task(sender, instance, **kwargs):
|
||||
"""
|
||||
Create or update a Celery Beat periodic task when an EPGSource is created/updated.
|
||||
Skip creating tasks for dummy EPG sources as they don't need refreshing.
|
||||
"""
|
||||
# Skip task creation for dummy EPGs
|
||||
if instance.source_type == 'dummy':
|
||||
# If there's an existing task, disable it
|
||||
if instance.refresh_task:
|
||||
instance.refresh_task.enabled = False
|
||||
instance.refresh_task.save(update_fields=['enabled'])
|
||||
return
|
||||
|
||||
task_name = f"epg_source-refresh-{instance.id}"
|
||||
interval, _ = IntervalSchedule.objects.get_or_create(
|
||||
every=int(instance.refresh_interval),
|
||||
|
|
@ -80,7 +142,14 @@ def delete_refresh_task(sender, instance, **kwargs):
|
|||
def update_status_on_active_change(sender, instance, **kwargs):
|
||||
"""
|
||||
When an EPGSource's is_active field changes, update the status accordingly.
|
||||
For dummy EPGs, always ensure status is idle and no status message.
|
||||
"""
|
||||
# Dummy EPGs should always be idle with no status message
|
||||
if instance.source_type == 'dummy':
|
||||
instance.status = EPGSource.STATUS_IDLE
|
||||
instance.last_message = None
|
||||
return
|
||||
|
||||
if instance.pk: # Only for existing records, not new ones
|
||||
try:
|
||||
# Get the current record from the database
|
||||
|
|
|
|||
|
|
@ -133,8 +133,9 @@ def delete_epg_refresh_task_by_id(epg_id):
|
|||
@shared_task
|
||||
def refresh_all_epg_data():
|
||||
logger.info("Starting refresh_epg_data task.")
|
||||
active_sources = EPGSource.objects.filter(is_active=True)
|
||||
logger.debug(f"Found {active_sources.count()} active EPGSource(s).")
|
||||
# Exclude dummy EPG sources from refresh - they don't need refreshing
|
||||
active_sources = EPGSource.objects.filter(is_active=True).exclude(source_type='dummy')
|
||||
logger.debug(f"Found {active_sources.count()} active EPGSource(s) (excluding dummy EPGs).")
|
||||
|
||||
for source in active_sources:
|
||||
refresh_epg_data(source.id)
|
||||
|
|
@ -180,6 +181,13 @@ def refresh_epg_data(source_id):
|
|||
gc.collect()
|
||||
return
|
||||
|
||||
# Skip refresh for dummy EPG sources - they don't need refreshing
|
||||
if source.source_type == 'dummy':
|
||||
logger.info(f"Skipping refresh for dummy EPG source {source.name} (ID: {source_id})")
|
||||
release_task_lock('refresh_epg_data', source_id)
|
||||
gc.collect()
|
||||
return
|
||||
|
||||
# Continue with the normal processing...
|
||||
logger.info(f"Processing EPGSource: {source.name} (type: {source.source_type})")
|
||||
if source.source_type == 'xmltv':
|
||||
|
|
@ -1943,3 +1951,20 @@ def detect_file_format(file_path=None, content=None):
|
|||
|
||||
# If we reach here, we couldn't reliably determine the format
|
||||
return format_type, is_compressed, file_extension
|
||||
|
||||
|
||||
def generate_dummy_epg(source):
|
||||
"""
|
||||
DEPRECATED: This function is no longer used.
|
||||
|
||||
Dummy EPG programs are now generated on-demand when they are requested
|
||||
(during XMLTV export or EPG grid display), rather than being pre-generated
|
||||
and stored in the database.
|
||||
|
||||
See: apps/output/views.py - generate_custom_dummy_programs()
|
||||
|
||||
This function remains for backward compatibility but should not be called.
|
||||
"""
|
||||
logger.warning(f"generate_dummy_epg() called for {source.name} but this function is deprecated. "
|
||||
f"Dummy EPG programs are now generated on-demand.")
|
||||
return True
|
||||
|
|
|
|||
|
|
@ -1548,7 +1548,7 @@ def sync_auto_channels(account_id, scan_start_time=None):
|
|||
|
||||
# Get force_dummy_epg, group_override, and regex patterns from group custom_properties
|
||||
group_custom_props = {}
|
||||
force_dummy_epg = False
|
||||
force_dummy_epg = False # Backward compatibility: legacy option to disable EPG
|
||||
override_group_id = None
|
||||
name_regex_pattern = None
|
||||
name_replace_pattern = None
|
||||
|
|
@ -1558,6 +1558,7 @@ def sync_auto_channels(account_id, scan_start_time=None):
|
|||
channel_sort_reverse = False
|
||||
stream_profile_id = None
|
||||
custom_logo_id = None
|
||||
custom_epg_id = None # New option: select specific EPG source (takes priority over force_dummy_epg)
|
||||
if group_relation.custom_properties:
|
||||
group_custom_props = group_relation.custom_properties
|
||||
force_dummy_epg = group_custom_props.get("force_dummy_epg", False)
|
||||
|
|
@ -1568,6 +1569,7 @@ def sync_auto_channels(account_id, scan_start_time=None):
|
|||
)
|
||||
name_match_regex = group_custom_props.get("name_match_regex")
|
||||
channel_profile_ids = group_custom_props.get("channel_profile_ids")
|
||||
custom_epg_id = group_custom_props.get("custom_epg_id")
|
||||
channel_sort_order = group_custom_props.get("channel_sort_order")
|
||||
channel_sort_reverse = group_custom_props.get(
|
||||
"channel_sort_reverse", False
|
||||
|
|
@ -1828,7 +1830,25 @@ def sync_auto_channels(account_id, scan_start_time=None):
|
|||
|
||||
# Handle logo updates
|
||||
current_logo = None
|
||||
if stream.logo_url:
|
||||
if custom_logo_id:
|
||||
# Use the custom logo specified in group settings
|
||||
from apps.channels.models import Logo
|
||||
try:
|
||||
current_logo = Logo.objects.get(id=custom_logo_id)
|
||||
except Logo.DoesNotExist:
|
||||
logger.warning(
|
||||
f"Custom logo with ID {custom_logo_id} not found for existing channel, falling back to stream logo"
|
||||
)
|
||||
# Fall back to stream logo if custom logo not found
|
||||
if stream.logo_url:
|
||||
current_logo, _ = Logo.objects.get_or_create(
|
||||
url=stream.logo_url,
|
||||
defaults={
|
||||
"name": stream.name or stream.tvg_id or "Unknown"
|
||||
},
|
||||
)
|
||||
elif stream.logo_url:
|
||||
# No custom logo configured, use stream logo
|
||||
from apps.channels.models import Logo
|
||||
|
||||
current_logo, _ = Logo.objects.get_or_create(
|
||||
|
|
@ -1844,10 +1864,42 @@ def sync_auto_channels(account_id, scan_start_time=None):
|
|||
|
||||
# Handle EPG data updates
|
||||
current_epg_data = None
|
||||
if stream.tvg_id and not force_dummy_epg:
|
||||
if custom_epg_id:
|
||||
# Use the custom EPG specified in group settings (e.g., a dummy EPG)
|
||||
from apps.epg.models import EPGSource
|
||||
try:
|
||||
epg_source = EPGSource.objects.get(id=custom_epg_id)
|
||||
# For dummy EPGs, select the first (and typically only) EPGData entry from this source
|
||||
if epg_source.source_type == 'dummy':
|
||||
current_epg_data = EPGData.objects.filter(
|
||||
epg_source=epg_source
|
||||
).first()
|
||||
if not current_epg_data:
|
||||
logger.warning(
|
||||
f"No EPGData found for dummy EPG source {epg_source.name} (ID: {custom_epg_id})"
|
||||
)
|
||||
else:
|
||||
# For non-dummy sources, try to find existing EPGData by tvg_id
|
||||
if stream.tvg_id:
|
||||
current_epg_data = EPGData.objects.filter(
|
||||
tvg_id=stream.tvg_id,
|
||||
epg_source=epg_source
|
||||
).first()
|
||||
except EPGSource.DoesNotExist:
|
||||
logger.warning(
|
||||
f"Custom EPG source with ID {custom_epg_id} not found for existing channel, falling back to auto-match"
|
||||
)
|
||||
# Fall back to auto-match by tvg_id
|
||||
if stream.tvg_id and not force_dummy_epg:
|
||||
current_epg_data = EPGData.objects.filter(
|
||||
tvg_id=stream.tvg_id
|
||||
).first()
|
||||
elif stream.tvg_id and not force_dummy_epg:
|
||||
# Auto-match EPG by tvg_id (original behavior)
|
||||
current_epg_data = EPGData.objects.filter(
|
||||
tvg_id=stream.tvg_id
|
||||
).first()
|
||||
# If force_dummy_epg is True and no custom_epg_id, current_epg_data stays None
|
||||
|
||||
if existing_channel.epg_data != current_epg_data:
|
||||
existing_channel.epg_data = current_epg_data
|
||||
|
|
@ -1937,14 +1989,55 @@ def sync_auto_channels(account_id, scan_start_time=None):
|
|||
ChannelProfileMembership.objects.bulk_create(memberships)
|
||||
|
||||
# Try to match EPG data
|
||||
if stream.tvg_id and not force_dummy_epg:
|
||||
if custom_epg_id:
|
||||
# Use the custom EPG specified in group settings (e.g., a dummy EPG)
|
||||
from apps.epg.models import EPGSource
|
||||
try:
|
||||
epg_source = EPGSource.objects.get(id=custom_epg_id)
|
||||
# For dummy EPGs, select the first (and typically only) EPGData entry from this source
|
||||
if epg_source.source_type == 'dummy':
|
||||
epg_data = EPGData.objects.filter(
|
||||
epg_source=epg_source
|
||||
).first()
|
||||
if epg_data:
|
||||
channel.epg_data = epg_data
|
||||
channel.save(update_fields=["epg_data"])
|
||||
else:
|
||||
logger.warning(
|
||||
f"No EPGData found for dummy EPG source {epg_source.name} (ID: {custom_epg_id})"
|
||||
)
|
||||
else:
|
||||
# For non-dummy sources, try to find existing EPGData by tvg_id
|
||||
if stream.tvg_id:
|
||||
epg_data = EPGData.objects.filter(
|
||||
tvg_id=stream.tvg_id,
|
||||
epg_source=epg_source
|
||||
).first()
|
||||
if epg_data:
|
||||
channel.epg_data = epg_data
|
||||
channel.save(update_fields=["epg_data"])
|
||||
except EPGSource.DoesNotExist:
|
||||
logger.warning(
|
||||
f"Custom EPG source with ID {custom_epg_id} not found, falling back to auto-match"
|
||||
)
|
||||
# Fall back to auto-match by tvg_id
|
||||
if stream.tvg_id and not force_dummy_epg:
|
||||
epg_data = EPGData.objects.filter(
|
||||
tvg_id=stream.tvg_id
|
||||
).first()
|
||||
if epg_data:
|
||||
channel.epg_data = epg_data
|
||||
channel.save(update_fields=["epg_data"])
|
||||
elif stream.tvg_id and not force_dummy_epg:
|
||||
# Auto-match EPG by tvg_id (original behavior)
|
||||
epg_data = EPGData.objects.filter(
|
||||
tvg_id=stream.tvg_id
|
||||
).first()
|
||||
if epg_data:
|
||||
channel.epg_data = epg_data
|
||||
channel.save(update_fields=["epg_data"])
|
||||
elif stream.tvg_id and force_dummy_epg:
|
||||
elif force_dummy_epg:
|
||||
# Force dummy EPG with no custom EPG selected (set to None)
|
||||
channel.epg_data = None
|
||||
channel.save(update_fields=["epg_data"])
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ from apps.epg.models import ProgramData
|
|||
from apps.accounts.models import User
|
||||
from core.models import CoreSettings, NETWORK_ACCESS
|
||||
from dispatcharr.utils import network_access_allowed
|
||||
from django.utils import timezone
|
||||
from django.utils import timezone as django_timezone
|
||||
from django.shortcuts import get_object_or_404
|
||||
from datetime import datetime, timedelta
|
||||
import html # Add this import for XML escaping
|
||||
|
|
@ -22,6 +22,7 @@ import logging
|
|||
from django.db.models.functions import Lower
|
||||
import os
|
||||
from apps.m3u.utils import calculate_tuner_count
|
||||
import regex
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -186,12 +187,42 @@ def generate_m3u(request, profile_name=None, user=None):
|
|||
return response
|
||||
|
||||
|
||||
def generate_dummy_programs(channel_id, channel_name, num_days=1, program_length_hours=4):
|
||||
def generate_dummy_programs(channel_id, channel_name, num_days=1, program_length_hours=4, epg_source=None):
|
||||
"""
|
||||
Generate dummy EPG programs for channels.
|
||||
|
||||
If epg_source is provided and it's a custom dummy EPG with patterns,
|
||||
use those patterns to generate programs from the channel title.
|
||||
Otherwise, generate default dummy programs.
|
||||
|
||||
Args:
|
||||
channel_id: Channel ID for the programs
|
||||
channel_name: Channel title/name
|
||||
num_days: Number of days to generate programs for
|
||||
program_length_hours: Length of each program in hours
|
||||
epg_source: Optional EPGSource for custom dummy EPG with patterns
|
||||
|
||||
Returns:
|
||||
List of program dictionaries
|
||||
"""
|
||||
# Get current time rounded to hour
|
||||
now = timezone.now()
|
||||
now = django_timezone.now()
|
||||
now = now.replace(minute=0, second=0, microsecond=0)
|
||||
|
||||
# Humorous program descriptions based on time of day
|
||||
# Check if this is a custom dummy EPG with regex patterns
|
||||
if epg_source and epg_source.source_type == 'dummy' and epg_source.custom_properties:
|
||||
custom_programs = generate_custom_dummy_programs(
|
||||
channel_id, channel_name, now, num_days,
|
||||
epg_source.custom_properties
|
||||
)
|
||||
# If custom generation succeeded, return those programs
|
||||
# If it returned empty (pattern didn't match), fall through to default
|
||||
if custom_programs:
|
||||
return custom_programs
|
||||
else:
|
||||
logger.info(f"Custom pattern didn't match for '{channel_name}', using default dummy EPG")
|
||||
|
||||
# Default humorous program descriptions based on time of day
|
||||
time_descriptions = {
|
||||
(0, 4): [
|
||||
f"Late Night with {channel_name} - Where insomniacs unite!",
|
||||
|
|
@ -263,6 +294,579 @@ def generate_dummy_programs(channel_id, channel_name, num_days=1, program_length
|
|||
return programs
|
||||
|
||||
|
||||
def generate_custom_dummy_programs(channel_id, channel_name, now, num_days, custom_properties):
|
||||
"""
|
||||
Generate programs using custom dummy EPG regex patterns.
|
||||
|
||||
Extracts information from channel title using regex patterns and generates
|
||||
programs based on the extracted data.
|
||||
|
||||
TIMEZONE HANDLING:
|
||||
------------------
|
||||
The timezone parameter specifies the timezone of the event times in your channel
|
||||
titles using standard timezone names (e.g., 'US/Eastern', 'US/Pacific', 'Europe/London').
|
||||
DST (Daylight Saving Time) is handled automatically by pytz.
|
||||
|
||||
Examples:
|
||||
- Channel: "NHL 01: Bruins VS Maple Leafs @ 8:00PM ET"
|
||||
- Set timezone = "US/Eastern"
|
||||
- In October (DST): 8:00PM EDT → 12:00AM UTC (automatically uses UTC-4)
|
||||
- In January (no DST): 8:00PM EST → 1:00AM UTC (automatically uses UTC-5)
|
||||
|
||||
Args:
|
||||
channel_id: Channel ID for the programs
|
||||
channel_name: Channel title to parse
|
||||
now: Current datetime (in UTC)
|
||||
num_days: Number of days to generate programs for
|
||||
custom_properties: Dict with title_pattern, time_pattern, templates, etc.
|
||||
- timezone: Timezone name (e.g., 'US/Eastern')
|
||||
|
||||
Returns:
|
||||
List of program dictionaries with start_time/end_time in UTC
|
||||
"""
|
||||
import pytz
|
||||
|
||||
logger.info(f"Generating custom dummy programs for channel: {channel_name}")
|
||||
|
||||
# Extract patterns from custom properties
|
||||
title_pattern = custom_properties.get('title_pattern', '')
|
||||
time_pattern = custom_properties.get('time_pattern', '')
|
||||
date_pattern = custom_properties.get('date_pattern', '')
|
||||
|
||||
# Get timezone name (e.g., 'US/Eastern', 'US/Pacific', 'Europe/London')
|
||||
timezone_value = custom_properties.get('timezone', 'UTC')
|
||||
output_timezone_value = custom_properties.get('output_timezone', '') # Optional: display times in different timezone
|
||||
program_duration = custom_properties.get('program_duration', 180) # Minutes
|
||||
title_template = custom_properties.get('title_template', '')
|
||||
description_template = custom_properties.get('description_template', '')
|
||||
|
||||
# Templates for upcoming/ended programs
|
||||
upcoming_title_template = custom_properties.get('upcoming_title_template', '')
|
||||
upcoming_description_template = custom_properties.get('upcoming_description_template', '')
|
||||
ended_title_template = custom_properties.get('ended_title_template', '')
|
||||
ended_description_template = custom_properties.get('ended_description_template', '')
|
||||
|
||||
# EPG metadata options
|
||||
category_string = custom_properties.get('category', '')
|
||||
# Split comma-separated categories and strip whitespace, filter out empty strings
|
||||
categories = [cat.strip() for cat in category_string.split(',') if cat.strip()] if category_string else []
|
||||
include_date = custom_properties.get('include_date', True)
|
||||
include_live = custom_properties.get('include_live', False)
|
||||
|
||||
# Parse timezone name
|
||||
try:
|
||||
source_tz = pytz.timezone(timezone_value)
|
||||
logger.debug(f"Using timezone: {timezone_value} (DST will be handled automatically)")
|
||||
except pytz.exceptions.UnknownTimeZoneError:
|
||||
logger.warning(f"Unknown timezone: {timezone_value}, defaulting to UTC")
|
||||
source_tz = pytz.utc
|
||||
|
||||
# Parse output timezone if provided (for display purposes)
|
||||
output_tz = None
|
||||
if output_timezone_value:
|
||||
try:
|
||||
output_tz = pytz.timezone(output_timezone_value)
|
||||
logger.debug(f"Using output timezone for display: {output_timezone_value}")
|
||||
except pytz.exceptions.UnknownTimeZoneError:
|
||||
logger.warning(f"Unknown output timezone: {output_timezone_value}, will use source timezone")
|
||||
output_tz = None
|
||||
|
||||
if not title_pattern:
|
||||
logger.warning(f"No title_pattern in custom_properties, falling back to default")
|
||||
return [] # Return empty, will use default
|
||||
|
||||
logger.debug(f"Title pattern from DB: {repr(title_pattern)}")
|
||||
|
||||
# Convert PCRE/JavaScript named groups (?<name>) to Python format (?P<name>)
|
||||
# This handles patterns created with JavaScript regex syntax
|
||||
# Use negative lookahead to avoid matching lookbehind (?<=) and negative lookbehind (?<!)
|
||||
title_pattern = regex.sub(r'\(\?<(?![=!])([^>]+)>', r'(?P<\1>', title_pattern)
|
||||
logger.debug(f"Converted title pattern: {repr(title_pattern)}")
|
||||
|
||||
# Compile regex patterns using the enhanced regex module
|
||||
# (supports variable-width lookbehinds like JavaScript)
|
||||
try:
|
||||
title_regex = regex.compile(title_pattern)
|
||||
except Exception as e:
|
||||
logger.error(f"Invalid title regex pattern after conversion: {e}")
|
||||
logger.error(f"Pattern was: {repr(title_pattern)}")
|
||||
return []
|
||||
|
||||
time_regex = None
|
||||
if time_pattern:
|
||||
# Convert PCRE/JavaScript named groups to Python format
|
||||
# Use negative lookahead to avoid matching lookbehind (?<=) and negative lookbehind (?<!)
|
||||
time_pattern = regex.sub(r'\(\?<(?![=!])([^>]+)>', r'(?P<\1>', time_pattern)
|
||||
logger.debug(f"Converted time pattern: {repr(time_pattern)}")
|
||||
try:
|
||||
time_regex = regex.compile(time_pattern)
|
||||
except Exception as e:
|
||||
logger.warning(f"Invalid time regex pattern after conversion: {e}")
|
||||
logger.warning(f"Pattern was: {repr(time_pattern)}")
|
||||
|
||||
# Compile date regex if provided
|
||||
date_regex = None
|
||||
if date_pattern:
|
||||
# Convert PCRE/JavaScript named groups to Python format
|
||||
# Use negative lookahead to avoid matching lookbehind (?<=) and negative lookbehind (?<!)
|
||||
date_pattern = regex.sub(r'\(\?<(?![=!])([^>]+)>', r'(?P<\1>', date_pattern)
|
||||
logger.debug(f"Converted date pattern: {repr(date_pattern)}")
|
||||
try:
|
||||
date_regex = regex.compile(date_pattern)
|
||||
except Exception as e:
|
||||
logger.warning(f"Invalid date regex pattern after conversion: {e}")
|
||||
logger.warning(f"Pattern was: {repr(date_pattern)}")
|
||||
|
||||
# Try to match the channel name with the title pattern
|
||||
# Use search() instead of match() to match JavaScript behavior where .match() searches anywhere in the string
|
||||
title_match = title_regex.search(channel_name)
|
||||
if not title_match:
|
||||
logger.debug(f"Channel name '{channel_name}' doesn't match title pattern")
|
||||
return [] # Return empty, will use default
|
||||
|
||||
groups = title_match.groupdict()
|
||||
logger.debug(f"Title pattern matched. Groups: {groups}")
|
||||
|
||||
# Helper function to format template with matched groups
|
||||
def format_template(template, groups):
|
||||
"""Replace {groupname} placeholders with matched group values"""
|
||||
if not template:
|
||||
return ''
|
||||
result = template
|
||||
for key, value in groups.items():
|
||||
result = result.replace(f'{{{key}}}', str(value) if value else '')
|
||||
return result
|
||||
|
||||
# Extract time from title if time pattern exists
|
||||
time_info = None
|
||||
time_groups = {}
|
||||
if time_regex:
|
||||
time_match = time_regex.search(channel_name)
|
||||
if time_match:
|
||||
time_groups = time_match.groupdict()
|
||||
try:
|
||||
hour = int(time_groups.get('hour'))
|
||||
# Handle optional minute group - could be None if not captured
|
||||
minute_value = time_groups.get('minute')
|
||||
minute = int(minute_value) if minute_value is not None else 0
|
||||
ampm = time_groups.get('ampm')
|
||||
ampm = ampm.lower() if ampm else None
|
||||
|
||||
# Determine if this is 12-hour or 24-hour format
|
||||
if ampm in ('am', 'pm'):
|
||||
# 12-hour format: convert to 24-hour
|
||||
if ampm == 'pm' and hour != 12:
|
||||
hour += 12
|
||||
elif ampm == 'am' and hour == 12:
|
||||
hour = 0
|
||||
logger.debug(f"Extracted time (12-hour): {hour}:{minute:02d} {ampm}")
|
||||
else:
|
||||
# 24-hour format: hour is already in 24-hour format
|
||||
# Validate that it's actually a 24-hour time (0-23)
|
||||
if hour > 23:
|
||||
logger.warning(f"Invalid 24-hour time: {hour}. Must be 0-23.")
|
||||
hour = hour % 24 # Wrap around just in case
|
||||
logger.debug(f"Extracted time (24-hour): {hour}:{minute:02d}")
|
||||
|
||||
time_info = {'hour': hour, 'minute': minute}
|
||||
except (ValueError, TypeError) as e:
|
||||
logger.warning(f"Error parsing time: {e}")
|
||||
|
||||
# Extract date from title if date pattern exists
|
||||
date_info = None
|
||||
date_groups = {}
|
||||
if date_regex:
|
||||
date_match = date_regex.search(channel_name)
|
||||
if date_match:
|
||||
date_groups = date_match.groupdict()
|
||||
try:
|
||||
# Support various date group names: month, day, year
|
||||
month_str = date_groups.get('month', '')
|
||||
day = int(date_groups.get('day', 1))
|
||||
year = int(date_groups.get('year', now.year)) # Default to current year if not provided
|
||||
|
||||
# Parse month - can be numeric (1-12) or text (Jan, January, etc.)
|
||||
month = None
|
||||
if month_str.isdigit():
|
||||
month = int(month_str)
|
||||
else:
|
||||
# Try to parse text month names
|
||||
import calendar
|
||||
month_str_lower = month_str.lower()
|
||||
# Check full month names
|
||||
for i, month_name in enumerate(calendar.month_name):
|
||||
if month_name.lower() == month_str_lower:
|
||||
month = i
|
||||
break
|
||||
# Check abbreviated month names if not found
|
||||
if month is None:
|
||||
for i, month_abbr in enumerate(calendar.month_abbr):
|
||||
if month_abbr.lower() == month_str_lower:
|
||||
month = i
|
||||
break
|
||||
|
||||
if month and 1 <= month <= 12 and 1 <= day <= 31:
|
||||
date_info = {'year': year, 'month': month, 'day': day}
|
||||
logger.debug(f"Extracted date: {year}-{month:02d}-{day:02d}")
|
||||
else:
|
||||
logger.warning(f"Invalid date values: month={month}, day={day}, year={year}")
|
||||
except (ValueError, TypeError) as e:
|
||||
logger.warning(f"Error parsing date: {e}")
|
||||
|
||||
# Merge title groups, time groups, and date groups for template formatting
|
||||
all_groups = {**groups, **time_groups, **date_groups}
|
||||
|
||||
# Add formatted time strings for better display (handles minutes intelligently)
|
||||
if time_info:
|
||||
hour_24 = time_info['hour']
|
||||
minute = time_info['minute']
|
||||
|
||||
# If output_timezone is specified, convert the display time to that timezone
|
||||
if output_tz:
|
||||
# Create a datetime in the source timezone
|
||||
temp_date = datetime.now(source_tz).replace(hour=hour_24, minute=minute, second=0, microsecond=0)
|
||||
# Convert to output timezone
|
||||
temp_date_output = temp_date.astimezone(output_tz)
|
||||
# Extract converted hour and minute for display
|
||||
hour_24 = temp_date_output.hour
|
||||
minute = temp_date_output.minute
|
||||
logger.debug(f"Converted display time from {source_tz} to {output_tz}: {hour_24}:{minute:02d}")
|
||||
|
||||
# Format 24-hour time string - only include minutes if non-zero
|
||||
if minute > 0:
|
||||
all_groups['time24'] = f"{hour_24}:{minute:02d}"
|
||||
else:
|
||||
all_groups['time24'] = f"{hour_24:02d}:00"
|
||||
|
||||
# Convert 24-hour to 12-hour format for {time} placeholder
|
||||
# Note: hour_24 is ALWAYS in 24-hour format at this point (converted earlier if needed)
|
||||
ampm = 'AM' if hour_24 < 12 else 'PM'
|
||||
hour_12 = hour_24
|
||||
if hour_24 == 0:
|
||||
hour_12 = 12
|
||||
elif hour_24 > 12:
|
||||
hour_12 = hour_24 - 12
|
||||
|
||||
# Format 12-hour time string - only include minutes if non-zero
|
||||
if minute > 0:
|
||||
all_groups['time'] = f"{hour_12}:{minute:02d} {ampm}"
|
||||
else:
|
||||
all_groups['time'] = f"{hour_12} {ampm}"
|
||||
|
||||
# Generate programs
|
||||
programs = []
|
||||
|
||||
# If we have extracted time AND date, the event happens on a SPECIFIC date
|
||||
# If we have time but NO date, generate for multiple days (existing behavior)
|
||||
# All other days and times show "Upcoming" before or "Ended" after
|
||||
event_happened = False
|
||||
|
||||
# Determine how many iterations we need
|
||||
if date_info and time_info:
|
||||
# Specific date extracted - only generate for that one date
|
||||
iterations = 1
|
||||
logger.debug(f"Date extracted, generating single event for specific date")
|
||||
else:
|
||||
# No specific date - use num_days (existing behavior)
|
||||
iterations = num_days
|
||||
|
||||
for day in range(iterations):
|
||||
# Start from current time (like standard dummy) instead of midnight
|
||||
# This ensures programs appear in the guide's current viewing window
|
||||
day_start = now + timedelta(days=day)
|
||||
day_end = day_start + timedelta(days=1)
|
||||
|
||||
if time_info:
|
||||
# We have an extracted event time - this is when the MAIN event starts
|
||||
# The extracted time is in the SOURCE timezone (e.g., 8PM ET)
|
||||
# We need to convert it to UTC for storage
|
||||
|
||||
# Determine which date to use
|
||||
if date_info:
|
||||
# Use the extracted date from the channel title
|
||||
current_date = datetime(
|
||||
date_info['year'],
|
||||
date_info['month'],
|
||||
date_info['day']
|
||||
).date()
|
||||
logger.debug(f"Using extracted date: {current_date}")
|
||||
else:
|
||||
# No date extracted, use day offset from current time in SOURCE timezone
|
||||
# This ensures we calculate "today" in the event's timezone, not UTC
|
||||
# For example: 8:30 PM Central (1:30 AM UTC next day) for a 10 PM ET event
|
||||
# should use today's date in ET, not tomorrow's date in UTC
|
||||
now_in_source_tz = now.astimezone(source_tz)
|
||||
current_date = (now_in_source_tz + timedelta(days=day)).date()
|
||||
logger.debug(f"No date extracted, using day offset in {source_tz}: {current_date}")
|
||||
|
||||
# Create a naive datetime (no timezone info) representing the event in source timezone
|
||||
event_start_naive = datetime.combine(
|
||||
current_date,
|
||||
datetime.min.time().replace(
|
||||
hour=time_info['hour'],
|
||||
minute=time_info['minute']
|
||||
)
|
||||
)
|
||||
|
||||
# Use pytz to localize the naive datetime to the source timezone
|
||||
# This automatically handles DST!
|
||||
try:
|
||||
event_start_local = source_tz.localize(event_start_naive)
|
||||
# Convert to UTC
|
||||
event_start_utc = event_start_local.astimezone(pytz.utc)
|
||||
logger.debug(f"Converted {event_start_local} to UTC: {event_start_utc}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error localizing time to {source_tz}: {e}")
|
||||
# Fallback: treat as UTC
|
||||
event_start_utc = django_timezone.make_aware(event_start_naive, pytz.utc)
|
||||
|
||||
event_end_utc = event_start_utc + timedelta(minutes=program_duration)
|
||||
|
||||
# Pre-generate the main event title and description for reuse
|
||||
if title_template:
|
||||
main_event_title = format_template(title_template, all_groups)
|
||||
else:
|
||||
title_parts = []
|
||||
if 'league' in all_groups and all_groups['league']:
|
||||
title_parts.append(all_groups['league'])
|
||||
if 'team1' in all_groups and 'team2' in all_groups:
|
||||
title_parts.append(f"{all_groups['team1']} vs {all_groups['team2']}")
|
||||
elif 'title' in all_groups and all_groups['title']:
|
||||
title_parts.append(all_groups['title'])
|
||||
main_event_title = ' - '.join(title_parts) if title_parts else channel_name
|
||||
|
||||
if description_template:
|
||||
main_event_description = format_template(description_template, all_groups)
|
||||
else:
|
||||
main_event_description = main_event_title
|
||||
|
||||
|
||||
|
||||
# Determine if this day is before, during, or after the event
|
||||
# Event only happens on day 0 (first day)
|
||||
is_event_day = (day == 0)
|
||||
|
||||
if is_event_day and not event_happened:
|
||||
# This is THE day the event happens
|
||||
# Fill programs BEFORE the event
|
||||
current_time = day_start
|
||||
|
||||
while current_time < event_start_utc:
|
||||
program_start_utc = current_time
|
||||
program_end_utc = min(current_time + timedelta(minutes=program_duration), event_start_utc)
|
||||
|
||||
# Use custom upcoming templates if provided, otherwise use defaults
|
||||
if upcoming_title_template:
|
||||
upcoming_title = format_template(upcoming_title_template, all_groups)
|
||||
else:
|
||||
upcoming_title = main_event_title
|
||||
|
||||
if upcoming_description_template:
|
||||
upcoming_description = format_template(upcoming_description_template, all_groups)
|
||||
else:
|
||||
upcoming_description = f"Upcoming: {main_event_description}"
|
||||
|
||||
# Build custom_properties for upcoming programs (only date, no category/live)
|
||||
program_custom_properties = {}
|
||||
|
||||
# Add date if requested (YYYY-MM-DD format from start time in event timezone)
|
||||
if include_date:
|
||||
# Convert UTC time to event timezone for date calculation
|
||||
local_time = program_start_utc.astimezone(source_tz)
|
||||
date_str = local_time.strftime('%Y-%m-%d')
|
||||
program_custom_properties['date'] = date_str
|
||||
|
||||
programs.append({
|
||||
"channel_id": channel_id,
|
||||
"start_time": program_start_utc,
|
||||
"end_time": program_end_utc,
|
||||
"title": upcoming_title,
|
||||
"description": upcoming_description,
|
||||
"custom_properties": program_custom_properties,
|
||||
})
|
||||
|
||||
current_time += timedelta(minutes=program_duration)
|
||||
|
||||
# Add the MAIN EVENT at the extracted time
|
||||
# Build custom_properties for main event (includes category and live)
|
||||
main_event_custom_properties = {}
|
||||
|
||||
# Add categories if provided
|
||||
if categories:
|
||||
main_event_custom_properties['categories'] = categories
|
||||
|
||||
# Add date if requested (YYYY-MM-DD format from start time in event timezone)
|
||||
if include_date:
|
||||
# Convert UTC time to event timezone for date calculation
|
||||
local_time = event_start_utc.astimezone(source_tz)
|
||||
date_str = local_time.strftime('%Y-%m-%d')
|
||||
main_event_custom_properties['date'] = date_str
|
||||
|
||||
# Add live flag if requested
|
||||
if include_live:
|
||||
main_event_custom_properties['live'] = True
|
||||
|
||||
programs.append({
|
||||
"channel_id": channel_id,
|
||||
"start_time": event_start_utc,
|
||||
"end_time": event_end_utc,
|
||||
"title": main_event_title,
|
||||
"description": main_event_description,
|
||||
"custom_properties": main_event_custom_properties,
|
||||
})
|
||||
|
||||
event_happened = True
|
||||
|
||||
# Fill programs AFTER the event until end of day
|
||||
current_time = event_end_utc
|
||||
|
||||
while current_time < day_end:
|
||||
program_start_utc = current_time
|
||||
program_end_utc = min(current_time + timedelta(minutes=program_duration), day_end)
|
||||
|
||||
# Use custom ended templates if provided, otherwise use defaults
|
||||
if ended_title_template:
|
||||
ended_title = format_template(ended_title_template, all_groups)
|
||||
else:
|
||||
ended_title = main_event_title
|
||||
|
||||
if ended_description_template:
|
||||
ended_description = format_template(ended_description_template, all_groups)
|
||||
else:
|
||||
ended_description = f"Ended: {main_event_description}"
|
||||
|
||||
# Build custom_properties for ended programs (only date, no category/live)
|
||||
program_custom_properties = {}
|
||||
|
||||
# Add date if requested (YYYY-MM-DD format from start time in event timezone)
|
||||
if include_date:
|
||||
# Convert UTC time to event timezone for date calculation
|
||||
local_time = program_start_utc.astimezone(source_tz)
|
||||
date_str = local_time.strftime('%Y-%m-%d')
|
||||
program_custom_properties['date'] = date_str
|
||||
|
||||
programs.append({
|
||||
"channel_id": channel_id,
|
||||
"start_time": program_start_utc,
|
||||
"end_time": program_end_utc,
|
||||
"title": ended_title,
|
||||
"description": ended_description,
|
||||
"custom_properties": program_custom_properties,
|
||||
})
|
||||
|
||||
current_time += timedelta(minutes=program_duration)
|
||||
else:
|
||||
# This day is either before the event (future days) or after the event happened
|
||||
# Fill entire day with appropriate message
|
||||
current_time = day_start
|
||||
|
||||
# If event already happened, all programs show "Ended"
|
||||
# If event hasn't happened yet (shouldn't occur with day 0 logic), show "Upcoming"
|
||||
is_ended = event_happened
|
||||
|
||||
while current_time < day_end:
|
||||
program_start_utc = current_time
|
||||
program_end_utc = min(current_time + timedelta(minutes=program_duration), day_end)
|
||||
|
||||
# Use custom templates based on whether event has ended or is upcoming
|
||||
if is_ended:
|
||||
if ended_title_template:
|
||||
program_title = format_template(ended_title_template, all_groups)
|
||||
else:
|
||||
program_title = main_event_title
|
||||
|
||||
if ended_description_template:
|
||||
program_description = format_template(ended_description_template, all_groups)
|
||||
else:
|
||||
program_description = f"Ended: {main_event_description}"
|
||||
else:
|
||||
if upcoming_title_template:
|
||||
program_title = format_template(upcoming_title_template, all_groups)
|
||||
else:
|
||||
program_title = main_event_title
|
||||
|
||||
if upcoming_description_template:
|
||||
program_description = format_template(upcoming_description_template, all_groups)
|
||||
else:
|
||||
program_description = f"Upcoming: {main_event_description}"
|
||||
|
||||
# Build custom_properties (only date for upcoming/ended filler programs)
|
||||
program_custom_properties = {}
|
||||
|
||||
# Add date if requested (YYYY-MM-DD format from start time in event timezone)
|
||||
if include_date:
|
||||
# Convert UTC time to event timezone for date calculation
|
||||
local_time = program_start_utc.astimezone(source_tz)
|
||||
date_str = local_time.strftime('%Y-%m-%d')
|
||||
program_custom_properties['date'] = date_str
|
||||
|
||||
programs.append({
|
||||
"channel_id": channel_id,
|
||||
"start_time": program_start_utc,
|
||||
"end_time": program_end_utc,
|
||||
"title": program_title,
|
||||
"description": program_description,
|
||||
"custom_properties": program_custom_properties,
|
||||
})
|
||||
|
||||
current_time += timedelta(minutes=program_duration)
|
||||
else:
|
||||
# No extracted time - fill entire day with regular intervals
|
||||
# day_start and day_end are already in UTC, so no conversion needed
|
||||
programs_per_day = max(1, int(24 / (program_duration / 60)))
|
||||
|
||||
for program_num in range(programs_per_day):
|
||||
program_start_utc = day_start + timedelta(minutes=program_num * program_duration)
|
||||
program_end_utc = program_start_utc + timedelta(minutes=program_duration)
|
||||
|
||||
if title_template:
|
||||
title = format_template(title_template, all_groups)
|
||||
else:
|
||||
title_parts = []
|
||||
if 'league' in all_groups and all_groups['league']:
|
||||
title_parts.append(all_groups['league'])
|
||||
if 'team1' in all_groups and 'team2' in all_groups:
|
||||
title_parts.append(f"{all_groups['team1']} vs {all_groups['team2']}")
|
||||
elif 'title' in all_groups and all_groups['title']:
|
||||
title_parts.append(all_groups['title'])
|
||||
title = ' - '.join(title_parts) if title_parts else channel_name
|
||||
|
||||
if description_template:
|
||||
description = format_template(description_template, all_groups)
|
||||
else:
|
||||
description = title
|
||||
|
||||
# Build custom_properties for this program
|
||||
program_custom_properties = {}
|
||||
|
||||
# Add categories if provided
|
||||
if categories:
|
||||
program_custom_properties['categories'] = categories
|
||||
|
||||
# Add date if requested (YYYY-MM-DD format from start time in event timezone)
|
||||
if include_date:
|
||||
# Convert UTC time to event timezone for date calculation
|
||||
local_time = program_start_utc.astimezone(source_tz)
|
||||
date_str = local_time.strftime('%Y-%m-%d')
|
||||
program_custom_properties['date'] = date_str
|
||||
|
||||
# Add live flag if requested
|
||||
if include_live:
|
||||
program_custom_properties['live'] = True
|
||||
|
||||
programs.append({
|
||||
"channel_id": channel_id,
|
||||
"start_time": program_start_utc,
|
||||
"end_time": program_end_utc,
|
||||
"title": title,
|
||||
"description": description,
|
||||
"custom_properties": program_custom_properties,
|
||||
})
|
||||
|
||||
logger.info(f"Generated {len(programs)} custom dummy programs for {channel_name}")
|
||||
return programs
|
||||
|
||||
|
||||
def generate_dummy_epg(
|
||||
channel_id, channel_name, xml_lines=None, num_days=1, program_length_hours=4
|
||||
):
|
||||
|
|
@ -294,6 +898,23 @@ def generate_dummy_epg(
|
|||
)
|
||||
xml_lines.append(f" <title>{html.escape(program['title'])}</title>")
|
||||
xml_lines.append(f" <desc>{html.escape(program['description'])}</desc>")
|
||||
|
||||
# Add custom_properties if present
|
||||
custom_data = program.get('custom_properties', {})
|
||||
|
||||
# Categories
|
||||
if 'categories' in custom_data:
|
||||
for cat in custom_data['categories']:
|
||||
xml_lines.append(f" <category>{html.escape(cat)}</category>")
|
||||
|
||||
# Date tag
|
||||
if 'date' in custom_data:
|
||||
xml_lines.append(f" <date>{html.escape(custom_data['date'])}</date>")
|
||||
|
||||
# Live tag
|
||||
if custom_data.get('live', False):
|
||||
xml_lines.append(f" <live />")
|
||||
|
||||
xml_lines.append(f" </programme>")
|
||||
|
||||
return xml_lines
|
||||
|
|
@ -367,7 +988,7 @@ def generate_epg(request, profile_name=None, user=None):
|
|||
dummy_days = num_days if num_days > 0 else 3
|
||||
|
||||
# Calculate cutoff date for EPG data filtering (only if days > 0)
|
||||
now = timezone.now()
|
||||
now = django_timezone.now()
|
||||
cutoff_date = now + timedelta(days=num_days) if num_days > 0 else None
|
||||
|
||||
# Process channels for the <channel> section
|
||||
|
|
@ -434,12 +1055,38 @@ def generate_epg(request, profile_name=None, user=None):
|
|||
# Default to channel number
|
||||
channel_id = str(formatted_channel_number) if formatted_channel_number != "" else str(channel.id)
|
||||
|
||||
# Use EPG data name for display, but channel name for pattern matching
|
||||
display_name = channel.epg_data.name if channel.epg_data else channel.name
|
||||
# For dummy EPG pattern matching, determine which name to use
|
||||
pattern_match_name = channel.name
|
||||
|
||||
# Check if we should use stream name instead of channel name
|
||||
if channel.epg_data and channel.epg_data.epg_source:
|
||||
epg_source = channel.epg_data.epg_source
|
||||
if epg_source.custom_properties:
|
||||
custom_props = epg_source.custom_properties
|
||||
name_source = custom_props.get('name_source')
|
||||
|
||||
if name_source == 'stream':
|
||||
stream_index = custom_props.get('stream_index', 1) - 1
|
||||
channel_streams = channel.streams.all().order_by('channelstream__order')
|
||||
|
||||
if channel_streams.exists() and 0 <= stream_index < channel_streams.count():
|
||||
stream = list(channel_streams)[stream_index]
|
||||
pattern_match_name = stream.name
|
||||
logger.debug(f"Using stream name for parsing: {pattern_match_name} (stream index: {stream_index})")
|
||||
else:
|
||||
logger.warning(f"Stream index {stream_index} not found for channel {channel.name}, falling back to channel name")
|
||||
|
||||
if not channel.epg_data:
|
||||
# Use the enhanced dummy EPG generation function with defaults
|
||||
program_length_hours = 4 # Default to 4-hour program blocks
|
||||
dummy_programs = generate_dummy_programs(channel_id, display_name, num_days=dummy_days, program_length_hours=program_length_hours)
|
||||
dummy_programs = generate_dummy_programs(
|
||||
channel_id, pattern_match_name,
|
||||
num_days=dummy_days,
|
||||
program_length_hours=program_length_hours,
|
||||
epg_source=None
|
||||
)
|
||||
|
||||
for program in dummy_programs:
|
||||
# Format times in XMLTV format
|
||||
|
|
@ -450,9 +1097,68 @@ def generate_epg(request, profile_name=None, user=None):
|
|||
yield f' <programme start="{start_str}" stop="{stop_str}" channel="{channel_id}">\n'
|
||||
yield f" <title>{html.escape(program['title'])}</title>\n"
|
||||
yield f" <desc>{html.escape(program['description'])}</desc>\n"
|
||||
|
||||
# Add custom_properties if present
|
||||
custom_data = program.get('custom_properties', {})
|
||||
|
||||
# Categories
|
||||
if 'categories' in custom_data:
|
||||
for cat in custom_data['categories']:
|
||||
yield f" <category>{html.escape(cat)}</category>\n"
|
||||
|
||||
# Date tag
|
||||
if 'date' in custom_data:
|
||||
yield f" <date>{html.escape(custom_data['date'])}</date>\n"
|
||||
|
||||
# Live tag
|
||||
if custom_data.get('live', False):
|
||||
yield f" <live />\n"
|
||||
|
||||
yield f" </programme>\n"
|
||||
|
||||
else:
|
||||
# Check if this is a dummy EPG with no programs (generate on-demand)
|
||||
if channel.epg_data.epg_source and channel.epg_data.epg_source.source_type == 'dummy':
|
||||
# This is a custom dummy EPG - check if it has programs
|
||||
if not channel.epg_data.programs.exists():
|
||||
# No programs stored, generate on-demand using custom patterns
|
||||
# Use actual channel name for pattern matching
|
||||
program_length_hours = 4
|
||||
dummy_programs = generate_dummy_programs(
|
||||
channel_id, pattern_match_name,
|
||||
num_days=dummy_days,
|
||||
program_length_hours=program_length_hours,
|
||||
epg_source=channel.epg_data.epg_source
|
||||
)
|
||||
|
||||
for program in dummy_programs:
|
||||
start_str = program['start_time'].strftime("%Y%m%d%H%M%S %z")
|
||||
stop_str = program['end_time'].strftime("%Y%m%d%H%M%S %z")
|
||||
|
||||
yield f' <programme start="{start_str}" stop="{stop_str}" channel="{channel_id}">\n'
|
||||
yield f" <title>{html.escape(program['title'])}</title>\n"
|
||||
yield f" <desc>{html.escape(program['description'])}</desc>\n"
|
||||
|
||||
# Add custom_properties if present
|
||||
custom_data = program.get('custom_properties', {})
|
||||
|
||||
# Categories
|
||||
if 'categories' in custom_data:
|
||||
for cat in custom_data['categories']:
|
||||
yield f" <category>{html.escape(cat)}</category>\n"
|
||||
|
||||
# Date tag
|
||||
if 'date' in custom_data:
|
||||
yield f" <date>{html.escape(custom_data['date'])}</date>\n"
|
||||
|
||||
# Live tag
|
||||
if custom_data.get('live', False):
|
||||
yield f" <live />\n"
|
||||
|
||||
yield f" </programme>\n"
|
||||
|
||||
continue # Skip to next channel
|
||||
|
||||
# For real EPG data - filter only if days parameter was specified
|
||||
if num_days > 0:
|
||||
programs_qs = channel.epg_data.programs.filter(
|
||||
|
|
@ -1013,14 +1719,34 @@ def xc_get_epg(request, user, short=False):
|
|||
|
||||
limit = request.GET.get('limit', 4)
|
||||
if channel.epg_data:
|
||||
if short == False:
|
||||
programs = channel.epg_data.programs.filter(
|
||||
start_time__gte=timezone.now()
|
||||
).order_by('start_time')
|
||||
# Check if this is a dummy EPG that generates on-demand
|
||||
if channel.epg_data.epg_source and channel.epg_data.epg_source.source_type == 'dummy':
|
||||
if not channel.epg_data.programs.exists():
|
||||
# Generate on-demand using custom patterns
|
||||
programs = generate_dummy_programs(
|
||||
channel_id=channel_id,
|
||||
channel_name=channel.name,
|
||||
epg_source=channel.epg_data.epg_source
|
||||
)
|
||||
else:
|
||||
# Has stored programs, use them
|
||||
if short == False:
|
||||
programs = channel.epg_data.programs.filter(
|
||||
start_time__gte=django_timezone.now()
|
||||
).order_by('start_time')
|
||||
else:
|
||||
programs = channel.epg_data.programs.all().order_by('start_time')[:limit]
|
||||
else:
|
||||
programs = channel.epg_data.programs.all().order_by('start_time')[:limit]
|
||||
# Regular EPG with stored programs
|
||||
if short == False:
|
||||
programs = channel.epg_data.programs.filter(
|
||||
start_time__gte=django_timezone.now()
|
||||
).order_by('start_time')
|
||||
else:
|
||||
programs = channel.epg_data.programs.all().order_by('start_time')[:limit]
|
||||
else:
|
||||
programs = generate_dummy_programs(channel_id=channel_id, channel_name=channel.name)
|
||||
# No EPG data assigned, generate default dummy
|
||||
programs = generate_dummy_programs(channel_id=channel_id, channel_name=channel.name, epg_source=None)
|
||||
|
||||
output = {"epg_listings": []}
|
||||
for program in programs:
|
||||
|
|
@ -1047,7 +1773,7 @@ def xc_get_epg(request, user, short=False):
|
|||
}
|
||||
|
||||
if short == False:
|
||||
program_output["now_playing"] = 1 if start <= timezone.now() <= end else 0
|
||||
program_output["now_playing"] = 1 if start <= django_timezone.now() <= end else 0
|
||||
program_output["has_archive"] = "0"
|
||||
|
||||
output['epg_listings'].append(program_output)
|
||||
|
|
@ -1232,7 +1958,7 @@ def xc_get_series_info(request, user, series_id):
|
|||
try:
|
||||
should_refresh = (
|
||||
not series_relation.last_episode_refresh or
|
||||
series_relation.last_episode_refresh < timezone.now() - timedelta(hours=24)
|
||||
series_relation.last_episode_refresh < django_timezone.now() - timedelta(hours=24)
|
||||
)
|
||||
|
||||
# Check if detailed data has been fetched
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from typing import Optional, Tuple, List
|
|||
from django.shortcuts import get_object_or_404
|
||||
from apps.channels.models import Channel, Stream
|
||||
from apps.m3u.models import M3UAccount, M3UAccountProfile
|
||||
from core.models import UserAgent, CoreSettings
|
||||
from core.models import UserAgent, CoreSettings, StreamProfile
|
||||
from .utils import get_logger
|
||||
from uuid import UUID
|
||||
import requests
|
||||
|
|
@ -26,16 +26,67 @@ def get_stream_object(id: str):
|
|||
|
||||
def generate_stream_url(channel_id: str) -> Tuple[str, str, bool, Optional[int]]:
|
||||
"""
|
||||
Generate the appropriate stream URL for a channel based on its profile settings.
|
||||
Generate the appropriate stream URL for a channel or stream based on its profile settings.
|
||||
|
||||
Args:
|
||||
channel_id: The UUID of the channel
|
||||
channel_id: The UUID of the channel or stream hash
|
||||
|
||||
Returns:
|
||||
Tuple[str, str, bool, Optional[int]]: (stream_url, user_agent, transcode_flag, profile_id)
|
||||
"""
|
||||
try:
|
||||
channel = get_stream_object(channel_id)
|
||||
channel_or_stream = get_stream_object(channel_id)
|
||||
|
||||
# Handle direct stream preview (custom streams)
|
||||
if isinstance(channel_or_stream, Stream):
|
||||
stream = channel_or_stream
|
||||
logger.info(f"Previewing stream directly: {stream.id} ({stream.name})")
|
||||
|
||||
# For custom streams, we need to get the M3U account and profile
|
||||
m3u_account = stream.m3u_account
|
||||
if not m3u_account:
|
||||
logger.error(f"Stream {stream.id} has no M3U account")
|
||||
return None, None, False, None
|
||||
|
||||
# Get the default profile for this M3U account (custom streams use default)
|
||||
m3u_profiles = m3u_account.profiles.all()
|
||||
profile = next((obj for obj in m3u_profiles if obj.is_default), None)
|
||||
|
||||
if not profile:
|
||||
logger.error(f"No default profile found for M3U account {m3u_account.id}")
|
||||
return None, None, False, None
|
||||
|
||||
# Get the appropriate user agent
|
||||
stream_user_agent = m3u_account.get_user_agent().user_agent
|
||||
if stream_user_agent is None:
|
||||
stream_user_agent = UserAgent.objects.get(id=CoreSettings.get_default_user_agent_id())
|
||||
logger.debug(f"No user agent found for account, using default: {stream_user_agent}")
|
||||
|
||||
# Get stream URL (no transformation for custom streams)
|
||||
stream_url = stream.url
|
||||
|
||||
# Check if the stream has its own stream_profile set, otherwise use default
|
||||
if stream.stream_profile:
|
||||
stream_profile = stream.stream_profile
|
||||
logger.debug(f"Using stream's own stream profile: {stream_profile.name}")
|
||||
else:
|
||||
stream_profile = StreamProfile.objects.get(
|
||||
id=CoreSettings.get_default_stream_profile_id()
|
||||
)
|
||||
logger.debug(f"Using default stream profile: {stream_profile.name}")
|
||||
|
||||
# Check if transcoding is needed
|
||||
if stream_profile.is_proxy() or stream_profile is None:
|
||||
transcode = False
|
||||
else:
|
||||
transcode = True
|
||||
|
||||
stream_profile_id = stream_profile.id
|
||||
|
||||
return stream_url, stream_user_agent, transcode, stream_profile_id
|
||||
|
||||
# Handle channel preview (existing logic)
|
||||
channel = channel_or_stream
|
||||
|
||||
# Get stream and profile for this channel
|
||||
# Note: get_stream now returns 3 values (stream_id, profile_id, error_reason)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
from django.urls import path, include
|
||||
from rest_framework.routers import DefaultRouter
|
||||
from .api_views import UserAgentViewSet, StreamProfileViewSet, CoreSettingsViewSet, environment, version, rehash_streams_endpoint
|
||||
from .api_views import UserAgentViewSet, StreamProfileViewSet, CoreSettingsViewSet, environment, version, rehash_streams_endpoint, TimezoneListView
|
||||
|
||||
router = DefaultRouter()
|
||||
router.register(r'useragents', UserAgentViewSet, basename='useragent')
|
||||
|
|
@ -12,5 +12,6 @@ urlpatterns = [
|
|||
path('settings/env/', environment, name='token_refresh'),
|
||||
path('version/', version, name='version'),
|
||||
path('rehash-streams/', rehash_streams_endpoint, name='rehash_streams'),
|
||||
path('timezones/', TimezoneListView.as_view(), name='timezones'),
|
||||
path('', include(router.urls)),
|
||||
]
|
||||
|
|
|
|||
|
|
@ -5,10 +5,12 @@ import ipaddress
|
|||
import logging
|
||||
from rest_framework import viewsets, status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.decorators import api_view, permission_classes, action
|
||||
from drf_yasg.utils import swagger_auto_schema
|
||||
from drf_yasg import openapi
|
||||
from .models import (
|
||||
UserAgent,
|
||||
StreamProfile,
|
||||
|
|
@ -328,25 +330,69 @@ def rehash_streams_endpoint(request):
|
|||
# Get the current hash keys from settings
|
||||
hash_key_setting = CoreSettings.objects.get(key=STREAM_HASH_KEY)
|
||||
hash_keys = hash_key_setting.value.split(",")
|
||||
|
||||
|
||||
# Queue the rehash task
|
||||
task = rehash_streams.delay(hash_keys)
|
||||
|
||||
|
||||
return Response({
|
||||
"success": True,
|
||||
"message": "Stream rehashing task has been queued",
|
||||
"task_id": task.id
|
||||
}, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
except CoreSettings.DoesNotExist:
|
||||
return Response({
|
||||
"success": False,
|
||||
"message": "Hash key settings not found"
|
||||
}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error triggering rehash streams: {e}")
|
||||
return Response({
|
||||
"success": False,
|
||||
"message": "Failed to trigger rehash task"
|
||||
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
|
||||
|
||||
# ─────────────────────────────
|
||||
# Timezone List API
|
||||
# ─────────────────────────────
|
||||
class TimezoneListView(APIView):
|
||||
"""
|
||||
API endpoint that returns all available timezones supported by pytz.
|
||||
Returns a list of timezone names grouped by region for easy selection.
|
||||
This is a general utility endpoint that can be used throughout the application.
|
||||
"""
|
||||
|
||||
def get_permissions(self):
|
||||
return [Authenticated()]
|
||||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Get list of all supported timezones",
|
||||
responses={200: openapi.Response('List of timezones with grouping by region')}
|
||||
)
|
||||
def get(self, request):
|
||||
import pytz
|
||||
|
||||
# Get all common timezones (excludes deprecated ones)
|
||||
all_timezones = sorted(pytz.common_timezones)
|
||||
|
||||
# Group by region for better UX
|
||||
grouped = {}
|
||||
for tz in all_timezones:
|
||||
if '/' in tz:
|
||||
region = tz.split('/')[0]
|
||||
if region not in grouped:
|
||||
grouped[region] = []
|
||||
grouped[region].append(tz)
|
||||
else:
|
||||
# Handle special zones like UTC, GMT, etc.
|
||||
if 'Other' not in grouped:
|
||||
grouped['Other'] = []
|
||||
grouped['Other'].append(tz)
|
||||
|
||||
return Response({
|
||||
'timezones': all_timezones,
|
||||
'grouped': grouped,
|
||||
'count': len(all_timezones)
|
||||
})
|
||||
|
|
|
|||
|
|
@ -96,7 +96,16 @@ fi
|
|||
|
||||
chmod +x /etc/profile.d/dispatcharr.sh
|
||||
|
||||
pip install django-filter
|
||||
# Ensure root's .bashrc sources the profile.d scripts for interactive non-login shells
|
||||
if ! grep -q "profile.d/dispatcharr.sh" /root/.bashrc 2>/dev/null; then
|
||||
cat >> /root/.bashrc << 'EOF'
|
||||
|
||||
# Source Dispatcharr environment variables
|
||||
if [ -f /etc/profile.d/dispatcharr.sh ]; then
|
||||
. /etc/profile.d/dispatcharr.sh
|
||||
fi
|
||||
EOF
|
||||
fi
|
||||
|
||||
# Run init scripts
|
||||
echo "Starting user setup..."
|
||||
|
|
|
|||
|
|
@ -1,25 +1,60 @@
|
|||
#!/bin/bash
|
||||
|
||||
mkdir -p /data/logos
|
||||
mkdir -p /data/recordings
|
||||
mkdir -p /data/uploads/m3us
|
||||
mkdir -p /data/uploads/epgs
|
||||
mkdir -p /data/m3us
|
||||
mkdir -p /data/epgs
|
||||
mkdir -p /data/plugins
|
||||
mkdir -p /app/logo_cache
|
||||
mkdir -p /app/media
|
||||
# Define directories that need to exist and be owned by PUID:PGID
|
||||
DATA_DIRS=(
|
||||
"/data/logos"
|
||||
"/data/recordings"
|
||||
"/data/uploads/m3us"
|
||||
"/data/uploads/epgs"
|
||||
"/data/m3us"
|
||||
"/data/epgs"
|
||||
"/data/plugins"
|
||||
)
|
||||
|
||||
APP_DIRS=(
|
||||
"/app/logo_cache"
|
||||
"/app/media"
|
||||
)
|
||||
|
||||
# Create all directories
|
||||
for dir in "${DATA_DIRS[@]}" "${APP_DIRS[@]}"; do
|
||||
mkdir -p "$dir"
|
||||
done
|
||||
|
||||
# Ensure /app itself is owned by PUID:PGID (needed for uwsgi socket creation)
|
||||
if [ "$(id -u)" = "0" ] && [ -d "/app" ]; then
|
||||
if [ "$(stat -c '%u:%g' /app)" != "$PUID:$PGID" ]; then
|
||||
echo "Fixing ownership for /app (non-recursive)"
|
||||
chown $PUID:$PGID /app
|
||||
fi
|
||||
fi
|
||||
|
||||
sed -i "s/NGINX_PORT/${DISPATCHARR_PORT}/g" /etc/nginx/sites-enabled/default
|
||||
|
||||
# NOTE: mac doesn't run as root, so only manage permissions
|
||||
# if this script is running as root
|
||||
if [ "$(id -u)" = "0" ]; then
|
||||
# Needs to own ALL of /data except db, we handle that below
|
||||
chown -R $PUID:$PGID /data
|
||||
chown -R $PUID:$PGID /app
|
||||
# Fix data directories (non-recursive to avoid touching user files)
|
||||
for dir in "${DATA_DIRS[@]}"; do
|
||||
if [ -d "$dir" ] && [ "$(stat -c '%u:%g' "$dir")" != "$PUID:$PGID" ]; then
|
||||
echo "Fixing ownership for $dir"
|
||||
chown $PUID:$PGID "$dir"
|
||||
fi
|
||||
done
|
||||
|
||||
# Fix app directories (recursive since they're managed by the app)
|
||||
for dir in "${APP_DIRS[@]}"; do
|
||||
if [ -d "$dir" ] && [ "$(stat -c '%u:%g' "$dir")" != "$PUID:$PGID" ]; then
|
||||
echo "Fixing ownership for $dir (recursive)"
|
||||
chown -R $PUID:$PGID "$dir"
|
||||
fi
|
||||
done
|
||||
|
||||
# Database permissions
|
||||
if [ -d /data/db ] && [ "$(stat -c '%u' /data/db)" != "$(id -u postgres)" ]; then
|
||||
echo "Fixing ownership for /data/db"
|
||||
chown -R postgres:postgres /data/db
|
||||
fi
|
||||
|
||||
# Permissions
|
||||
chown -R postgres:postgres /data/db
|
||||
chmod +x /data
|
||||
fi
|
||||
fi
|
||||
|
|
@ -642,6 +642,16 @@ export const WebsocketProvider = ({ children }) => {
|
|||
}
|
||||
break;
|
||||
|
||||
case 'epg_data_created':
|
||||
// A new EPG data entry was created (e.g., for a dummy EPG)
|
||||
// Fetch EPG data so the channel form can immediately assign it
|
||||
try {
|
||||
await fetchEPGData();
|
||||
} catch (e) {
|
||||
console.warn('Failed to refresh EPG data after creation:', e);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'stream_rehash':
|
||||
// Handle stream rehash progress updates
|
||||
if (parsedEvent.data.action === 'starting') {
|
||||
|
|
|
|||
|
|
@ -1118,6 +1118,21 @@ export default class API {
|
|||
}
|
||||
}
|
||||
|
||||
static async getTimezones() {
|
||||
try {
|
||||
const response = await request(`${host}/api/core/timezones/`);
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to retrieve timezones', e);
|
||||
// Return fallback data instead of throwing
|
||||
return {
|
||||
timezones: ['UTC', 'US/Eastern', 'US/Central', 'US/Mountain', 'US/Pacific'],
|
||||
grouped: {},
|
||||
count: 5
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
static async getStreamProfiles() {
|
||||
try {
|
||||
const response = await request(`${host}/api/core/streamprofiles/`);
|
||||
|
|
|
|||
1005
frontend/src/components/forms/DummyEPG.jsx
Normal file
1005
frontend/src/components/forms/DummyEPG.jsx
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -1,31 +1,22 @@
|
|||
// Modal.js
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import API from '../../api';
|
||||
import useEPGsStore from '../../store/epgs';
|
||||
import {
|
||||
LoadingOverlay,
|
||||
TextInput,
|
||||
Button,
|
||||
Checkbox,
|
||||
Modal,
|
||||
Flex,
|
||||
NativeSelect,
|
||||
NumberInput,
|
||||
Space,
|
||||
Grid,
|
||||
Group,
|
||||
FileInput,
|
||||
Title,
|
||||
Text,
|
||||
Divider,
|
||||
Stack,
|
||||
Group,
|
||||
Divider,
|
||||
Box,
|
||||
Text,
|
||||
} from '@mantine/core';
|
||||
import { isNotEmpty, useForm } from '@mantine/form';
|
||||
|
||||
const EPG = ({ epg = null, isOpen, onClose }) => {
|
||||
const epgs = useEPGsStore((state) => state.epgs);
|
||||
// Remove the file state and handler since we're not supporting file uploads
|
||||
const [sourceType, setSourceType] = useState('xmltv');
|
||||
|
||||
const form = useForm({
|
||||
|
|
@ -49,13 +40,9 @@ const EPG = ({ epg = null, isOpen, onClose }) => {
|
|||
const values = form.getValues();
|
||||
|
||||
if (epg?.id) {
|
||||
// Remove file from API call
|
||||
await API.updateEPG({ id: epg.id, ...values });
|
||||
} else {
|
||||
// Remove file from API call
|
||||
await API.addEPG({
|
||||
...values,
|
||||
});
|
||||
await API.addEPG(values);
|
||||
}
|
||||
|
||||
form.reset();
|
||||
|
|
@ -73,11 +60,12 @@ const EPG = ({ epg = null, isOpen, onClose }) => {
|
|||
refresh_interval: epg.refresh_interval,
|
||||
};
|
||||
form.setValues(values);
|
||||
setSourceType(epg.source_type); // Update source type state
|
||||
setSourceType(epg.source_type);
|
||||
} else {
|
||||
form.reset();
|
||||
setSourceType('xmltv'); // Reset to xmltv
|
||||
setSourceType('xmltv');
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [epg]);
|
||||
|
||||
// Function to handle source type changes
|
||||
|
|
@ -156,7 +144,7 @@ const EPG = ({ epg = null, isOpen, onClose }) => {
|
|||
description="API key for services that require authentication"
|
||||
{...form.getInputProps('api_key')}
|
||||
key={form.key('api_key')}
|
||||
disabled={sourceType !== 'schedules_direct'} // Use the state variable
|
||||
disabled={sourceType !== 'schedules_direct'}
|
||||
/>
|
||||
|
||||
{/* Put checkbox at the same level as Refresh Interval */}
|
||||
|
|
@ -171,8 +159,8 @@ const EPG = ({ epg = null, isOpen, onClose }) => {
|
|||
style={{
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
height: '30px', // Reduced height
|
||||
marginTop: '-4px', // Slight negative margin to move it up
|
||||
height: '30px',
|
||||
marginTop: '-4px',
|
||||
}}
|
||||
>
|
||||
<Checkbox
|
||||
|
|
|
|||
|
|
@ -29,6 +29,7 @@ import { FixedSizeList as List } from 'react-window';
|
|||
import LazyLogo from '../LazyLogo';
|
||||
import LogoForm from './Logo';
|
||||
import logo from '../../images/logo.png';
|
||||
import API from '../../api';
|
||||
|
||||
// Custom item component for MultiSelect with tooltip
|
||||
const OptionWithTooltip = forwardRef(
|
||||
|
|
@ -53,6 +54,7 @@ const LiveGroupFilter = ({
|
|||
const streamProfiles = useStreamProfilesStore((s) => s.profiles);
|
||||
const fetchStreamProfiles = useStreamProfilesStore((s) => s.fetchProfiles);
|
||||
const [groupFilter, setGroupFilter] = useState('');
|
||||
const [epgSources, setEpgSources] = useState([]);
|
||||
|
||||
// Logo selection functionality
|
||||
const {
|
||||
|
|
@ -75,6 +77,19 @@ const LiveGroupFilter = ({
|
|||
}
|
||||
}, [streamProfiles.length, fetchStreamProfiles]);
|
||||
|
||||
// Fetch EPG sources when component mounts
|
||||
useEffect(() => {
|
||||
const fetchEPGSources = async () => {
|
||||
try {
|
||||
const sources = await API.getEPGs();
|
||||
setEpgSources(sources || []);
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch EPG sources:', error);
|
||||
}
|
||||
};
|
||||
fetchEPGSources();
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (Object.keys(channelGroups).length === 0) {
|
||||
return;
|
||||
|
|
@ -298,10 +313,10 @@ const LiveGroupFilter = ({
|
|||
placeholder="Select options..."
|
||||
data={[
|
||||
{
|
||||
value: 'force_dummy_epg',
|
||||
label: 'Force Dummy EPG',
|
||||
value: 'force_epg',
|
||||
label: 'Force EPG Source',
|
||||
description:
|
||||
'Assign a dummy EPG to all channels in this group if no EPG is matched',
|
||||
'Force a specific EPG source for all auto-synced channels, or disable EPG assignment entirely',
|
||||
},
|
||||
{
|
||||
value: 'group_override',
|
||||
|
|
@ -349,8 +364,12 @@ const LiveGroupFilter = ({
|
|||
itemComponent={OptionWithTooltip}
|
||||
value={(() => {
|
||||
const selectedValues = [];
|
||||
if (group.custom_properties?.force_dummy_epg) {
|
||||
selectedValues.push('force_dummy_epg');
|
||||
if (
|
||||
group.custom_properties?.custom_epg_id !==
|
||||
undefined ||
|
||||
group.custom_properties?.force_dummy_epg
|
||||
) {
|
||||
selectedValues.push('force_epg');
|
||||
}
|
||||
if (
|
||||
group.custom_properties?.group_override !==
|
||||
|
|
@ -409,13 +428,25 @@ const LiveGroupFilter = ({
|
|||
...(state.custom_properties || {}),
|
||||
};
|
||||
|
||||
// Handle force_dummy_epg
|
||||
if (
|
||||
selectedOptions.includes('force_dummy_epg')
|
||||
) {
|
||||
newCustomProps.force_dummy_epg = true;
|
||||
// Handle force_epg
|
||||
if (selectedOptions.includes('force_epg')) {
|
||||
// Migrate from old force_dummy_epg if present
|
||||
if (
|
||||
newCustomProps.force_dummy_epg &&
|
||||
newCustomProps.custom_epg_id === undefined
|
||||
) {
|
||||
// Migrate: force_dummy_epg=true becomes custom_epg_id=null
|
||||
newCustomProps.custom_epg_id = null;
|
||||
delete newCustomProps.force_dummy_epg;
|
||||
} else if (
|
||||
newCustomProps.custom_epg_id === undefined
|
||||
) {
|
||||
// New configuration: initialize with null (no EPG/default dummy)
|
||||
newCustomProps.custom_epg_id = null;
|
||||
}
|
||||
} else {
|
||||
delete newCustomProps.force_dummy_epg;
|
||||
// Only remove custom_epg_id when deselected
|
||||
delete newCustomProps.custom_epg_id;
|
||||
}
|
||||
|
||||
// Handle group_override
|
||||
|
|
@ -1088,6 +1119,79 @@ const LiveGroupFilter = ({
|
|||
</Button>
|
||||
</Box>
|
||||
)}
|
||||
|
||||
{/* Show EPG selector when force_epg is selected */}
|
||||
{(group.custom_properties?.custom_epg_id !== undefined ||
|
||||
group.custom_properties?.force_dummy_epg) && (
|
||||
<Tooltip
|
||||
label="Force a specific EPG source for all auto-synced channels in this group. For dummy EPGs, all channels will share the same EPG data. For regular EPG sources (XMLTV, Schedules Direct), channels will be matched by their tvg_id within that source. Select 'No EPG' to disable EPG assignment."
|
||||
withArrow
|
||||
>
|
||||
<Select
|
||||
label="EPG Source"
|
||||
placeholder="No EPG (Disabled)"
|
||||
value={(() => {
|
||||
// Handle migration from force_dummy_epg
|
||||
if (
|
||||
group.custom_properties?.custom_epg_id !==
|
||||
undefined
|
||||
) {
|
||||
// Convert to string, use '0' for null/no EPG
|
||||
return group.custom_properties.custom_epg_id ===
|
||||
null
|
||||
? '0'
|
||||
: group.custom_properties.custom_epg_id.toString();
|
||||
} else if (
|
||||
group.custom_properties?.force_dummy_epg
|
||||
) {
|
||||
// Show "No EPG" for old force_dummy_epg configs
|
||||
return '0';
|
||||
}
|
||||
return '0';
|
||||
})()}
|
||||
onChange={(value) => {
|
||||
// Convert back: '0' means no EPG (null)
|
||||
const newValue =
|
||||
value === '0' ? null : parseInt(value);
|
||||
setGroupStates(
|
||||
groupStates.map((state) => {
|
||||
if (
|
||||
state.channel_group === group.channel_group
|
||||
) {
|
||||
return {
|
||||
...state,
|
||||
custom_properties: {
|
||||
...state.custom_properties,
|
||||
custom_epg_id: newValue,
|
||||
},
|
||||
};
|
||||
}
|
||||
return state;
|
||||
})
|
||||
);
|
||||
}}
|
||||
data={[
|
||||
{ value: '0', label: 'No EPG (Disabled)' },
|
||||
...epgSources.map((source) => ({
|
||||
value: source.id.toString(),
|
||||
label: `${source.name} (${
|
||||
source.source_type === 'dummy'
|
||||
? 'Dummy'
|
||||
: source.source_type === 'xmltv'
|
||||
? 'XMLTV'
|
||||
: source.source_type ===
|
||||
'schedules_direct'
|
||||
? 'Schedules Direct'
|
||||
: source.source_type
|
||||
})`,
|
||||
})),
|
||||
]}
|
||||
clearable
|
||||
searchable
|
||||
size="xs"
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</Stack>
|
||||
|
|
|
|||
|
|
@ -25,10 +25,22 @@ const Stream = ({ stream = null, isOpen, onClose }) => {
|
|||
}),
|
||||
onSubmit: async (values, { setSubmitting, resetForm }) => {
|
||||
console.log(values);
|
||||
|
||||
// Convert string IDs back to integers for the API
|
||||
const payload = {
|
||||
...values,
|
||||
channel_group: values.channel_group
|
||||
? parseInt(values.channel_group, 10)
|
||||
: null,
|
||||
stream_profile_id: values.stream_profile_id
|
||||
? parseInt(values.stream_profile_id, 10)
|
||||
: null,
|
||||
};
|
||||
|
||||
if (stream?.id) {
|
||||
await API.updateStream({ id: stream.id, ...values });
|
||||
await API.updateStream({ id: stream.id, ...payload });
|
||||
} else {
|
||||
await API.addStream(values);
|
||||
await API.addStream(payload);
|
||||
}
|
||||
|
||||
resetForm();
|
||||
|
|
@ -42,12 +54,18 @@ const Stream = ({ stream = null, isOpen, onClose }) => {
|
|||
formik.setValues({
|
||||
name: stream.name,
|
||||
url: stream.url,
|
||||
channel_group: stream.channel_group,
|
||||
stream_profile_id: stream.stream_profile_id,
|
||||
// Convert IDs to strings to match Select component values
|
||||
channel_group: stream.channel_group
|
||||
? String(stream.channel_group)
|
||||
: null,
|
||||
stream_profile_id: stream.stream_profile_id
|
||||
? String(stream.stream_profile_id)
|
||||
: '',
|
||||
});
|
||||
} else {
|
||||
formik.resetForm();
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [stream]);
|
||||
|
||||
if (!isOpen) {
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import React, { useEffect, useMemo, useRef, useState } from 'react';
|
|||
import API from '../../api';
|
||||
import useEPGsStore from '../../store/epgs';
|
||||
import EPGForm from '../forms/EPG';
|
||||
import DummyEPGForm from '../forms/DummyEPG';
|
||||
import { TableHelper } from '../../helpers';
|
||||
import {
|
||||
ActionIcon,
|
||||
|
|
@ -17,6 +18,7 @@ import {
|
|||
Progress,
|
||||
Stack,
|
||||
Group,
|
||||
Menu,
|
||||
} from '@mantine/core';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
import {
|
||||
|
|
@ -27,6 +29,7 @@ import {
|
|||
SquareMinus,
|
||||
SquarePen,
|
||||
SquarePlus,
|
||||
ChevronDown,
|
||||
} from 'lucide-react';
|
||||
import dayjs from 'dayjs';
|
||||
import useSettingsStore from '../../store/settings';
|
||||
|
|
@ -62,6 +65,7 @@ const getStatusColor = (status) => {
|
|||
const RowActions = ({ tableSize, row, editEPG, deleteEPG, refreshEPG }) => {
|
||||
const iconSize =
|
||||
tableSize == 'default' ? 'sm' : tableSize == 'compact' ? 'xs' : 'md';
|
||||
const isDummyEPG = row.original.source_type === 'dummy';
|
||||
|
||||
return (
|
||||
<>
|
||||
|
|
@ -88,7 +92,7 @@ const RowActions = ({ tableSize, row, editEPG, deleteEPG, refreshEPG }) => {
|
|||
size={iconSize} // Use standardized icon size
|
||||
color="blue.5" // Red color for delete actions
|
||||
onClick={() => refreshEPG(row.original.id)}
|
||||
disabled={!row.original.is_active}
|
||||
disabled={!row.original.is_active || isDummyEPG}
|
||||
>
|
||||
<RefreshCcw size={tableSize === 'compact' ? 16 : 18} />{' '}
|
||||
{/* Small icon size */}
|
||||
|
|
@ -100,6 +104,7 @@ const RowActions = ({ tableSize, row, editEPG, deleteEPG, refreshEPG }) => {
|
|||
const EPGsTable = () => {
|
||||
const [epg, setEPG] = useState(null);
|
||||
const [epgModalOpen, setEPGModalOpen] = useState(false);
|
||||
const [dummyEpgModalOpen, setDummyEpgModalOpen] = useState(false);
|
||||
const [rowSelection, setRowSelection] = useState([]);
|
||||
const [confirmDeleteOpen, setConfirmDeleteOpen] = useState(false);
|
||||
const [deleteTarget, setDeleteTarget] = useState(null);
|
||||
|
|
@ -224,11 +229,14 @@ const EPGsTable = () => {
|
|||
size: 100,
|
||||
cell: ({ row }) => {
|
||||
const data = row.original;
|
||||
const isDummyEPG = data.source_type === 'dummy';
|
||||
|
||||
// Dummy EPGs always show idle status
|
||||
const displayStatus = isDummyEPG ? 'idle' : data.status;
|
||||
|
||||
// Always show status text, even when there's progress happening
|
||||
return (
|
||||
<Text size="sm" fw={500} c={getStatusColor(data.status)}>
|
||||
{formatStatusText(data.status)}
|
||||
<Text size="sm" fw={500} c={getStatusColor(displayStatus)}>
|
||||
{formatStatusText(displayStatus)}
|
||||
</Text>
|
||||
);
|
||||
},
|
||||
|
|
@ -241,6 +249,12 @@ const EPGsTable = () => {
|
|||
grow: true,
|
||||
cell: ({ row }) => {
|
||||
const data = row.original;
|
||||
const isDummyEPG = data.source_type === 'dummy';
|
||||
|
||||
// Dummy EPGs don't have status messages
|
||||
if (isDummyEPG) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Check if there's an active progress for this EPG - show progress first if active
|
||||
if (
|
||||
|
|
@ -305,15 +319,19 @@ const EPGsTable = () => {
|
|||
mantineTableBodyCellProps: {
|
||||
align: 'left',
|
||||
},
|
||||
cell: ({ row, cell }) => (
|
||||
<Box sx={{ display: 'flex', justifyContent: 'center' }}>
|
||||
<Switch
|
||||
size="xs"
|
||||
checked={cell.getValue()}
|
||||
onChange={() => toggleActive(row.original)}
|
||||
/>
|
||||
</Box>
|
||||
),
|
||||
cell: ({ row, cell }) => {
|
||||
const isDummyEPG = row.original.source_type === 'dummy';
|
||||
return (
|
||||
<Box sx={{ display: 'flex', justifyContent: 'center' }}>
|
||||
<Switch
|
||||
size="xs"
|
||||
checked={cell.getValue()}
|
||||
onChange={() => toggleActive(row.original)}
|
||||
disabled={isDummyEPG}
|
||||
/>
|
||||
</Box>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'actions',
|
||||
|
|
@ -329,9 +347,24 @@ const EPGsTable = () => {
|
|||
|
||||
const editEPG = async (epg = null) => {
|
||||
setEPG(epg);
|
||||
// Open the appropriate modal based on source type
|
||||
if (epg?.source_type === 'dummy') {
|
||||
setDummyEpgModalOpen(true);
|
||||
} else {
|
||||
setEPGModalOpen(true);
|
||||
}
|
||||
};
|
||||
|
||||
const createStandardEPG = () => {
|
||||
setEPG(null);
|
||||
setEPGModalOpen(true);
|
||||
};
|
||||
|
||||
const createDummyEPG = () => {
|
||||
setEPG(null);
|
||||
setDummyEpgModalOpen(true);
|
||||
};
|
||||
|
||||
const deleteEPG = async (id) => {
|
||||
// Get EPG details for the confirmation dialog
|
||||
const epgObj = epgs[id];
|
||||
|
|
@ -365,6 +398,11 @@ const EPGsTable = () => {
|
|||
setEPGModalOpen(false);
|
||||
};
|
||||
|
||||
const closeDummyEPGForm = () => {
|
||||
setEPG(null);
|
||||
setDummyEpgModalOpen(false);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
setData(
|
||||
Object.values(epgs).sort((a, b) => {
|
||||
|
|
@ -522,21 +560,31 @@ const EPGsTable = () => {
|
|||
>
|
||||
EPGs
|
||||
</Text>
|
||||
<Button
|
||||
leftSection={<SquarePlus size={18} />}
|
||||
variant="light"
|
||||
size="xs"
|
||||
onClick={() => editEPG()}
|
||||
p={5}
|
||||
color="green"
|
||||
style={{
|
||||
borderWidth: '1px',
|
||||
borderColor: 'green',
|
||||
color: 'white',
|
||||
}}
|
||||
>
|
||||
Add EPG
|
||||
</Button>
|
||||
<Menu shadow="md" width={200}>
|
||||
<Menu.Target>
|
||||
<Button
|
||||
leftSection={<SquarePlus size={18} />}
|
||||
rightSection={<ChevronDown size={16} />}
|
||||
variant="light"
|
||||
size="xs"
|
||||
p={5}
|
||||
color="green"
|
||||
style={{
|
||||
borderWidth: '1px',
|
||||
borderColor: 'green',
|
||||
color: 'white',
|
||||
}}
|
||||
>
|
||||
Add EPG
|
||||
</Button>
|
||||
</Menu.Target>
|
||||
<Menu.Dropdown>
|
||||
<Menu.Item onClick={createStandardEPG}>
|
||||
Standard EPG Source
|
||||
</Menu.Item>
|
||||
<Menu.Item onClick={createDummyEPG}>Dummy EPG Source</Menu.Item>
|
||||
</Menu.Dropdown>
|
||||
</Menu>
|
||||
</Flex>
|
||||
|
||||
<Paper
|
||||
|
|
@ -579,6 +627,11 @@ const EPGsTable = () => {
|
|||
</Box>
|
||||
|
||||
<EPGForm epg={epg} isOpen={epgModalOpen} onClose={closeEPGForm} />
|
||||
<DummyEPGForm
|
||||
epg={epg}
|
||||
isOpen={dummyEpgModalOpen}
|
||||
onClose={closeDummyEPGForm}
|
||||
/>
|
||||
|
||||
<ConfirmationDialog
|
||||
opened={confirmDeleteOpen}
|
||||
|
|
|
|||
|
|
@ -250,6 +250,7 @@ export default function TVChannelGuide({ startDate, endDate }) {
|
|||
const logos = useLogosStore((s) => s.logos);
|
||||
|
||||
const tvgsById = useEPGsStore((s) => s.tvgsById);
|
||||
const epgs = useEPGsStore((s) => s.epgs);
|
||||
|
||||
const [programs, setPrograms] = useState([]);
|
||||
const [guideChannels, setGuideChannels] = useState([]);
|
||||
|
|
@ -400,8 +401,8 @@ export default function TVChannelGuide({ startDate, endDate }) {
|
|||
: defaultEnd;
|
||||
|
||||
const channelIdByTvgId = useMemo(
|
||||
() => buildChannelIdMap(guideChannels, tvgsById),
|
||||
[guideChannels, tvgsById]
|
||||
() => buildChannelIdMap(guideChannels, tvgsById, epgs),
|
||||
[guideChannels, tvgsById, epgs]
|
||||
);
|
||||
|
||||
const channelById = useMemo(() => {
|
||||
|
|
@ -1476,6 +1477,7 @@ export default function TVChannelGuide({ startDate, endDate }) {
|
|||
|
||||
{filteredChannels.length > 0 ? (
|
||||
<VariableSizeList
|
||||
className="guide-list-outer"
|
||||
height={virtualizedHeight}
|
||||
width={virtualizedWidth}
|
||||
itemCount={filteredChannels.length}
|
||||
|
|
|
|||
|
|
@ -67,3 +67,14 @@
|
|||
.tv-guide {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
/* Hide bottom horizontal scrollbar for the guide's virtualized list only */
|
||||
.tv-guide .guide-list-outer {
|
||||
/* Prevent horizontal page scrollbar while preserving internal scroll behavior */
|
||||
overflow-x: hidden !important;
|
||||
}
|
||||
|
||||
/* Also hide scrollbars visually across browsers for the outer container */
|
||||
.tv-guide .guide-list-outer::-webkit-scrollbar {
|
||||
height: 0px;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,13 +3,30 @@ import dayjs from 'dayjs';
|
|||
export const PROGRAM_HEIGHT = 90;
|
||||
export const EXPANDED_PROGRAM_HEIGHT = 180;
|
||||
|
||||
export function buildChannelIdMap(channels, tvgsById) {
|
||||
export function buildChannelIdMap(channels, tvgsById, epgs = {}) {
|
||||
const map = new Map();
|
||||
channels.forEach((channel) => {
|
||||
const tvgRecord = channel.epg_data_id
|
||||
? tvgsById[channel.epg_data_id]
|
||||
: null;
|
||||
const tvgId = tvgRecord?.tvg_id ?? channel.uuid;
|
||||
|
||||
// For dummy EPG sources, ALWAYS use channel UUID to ensure unique programs per channel
|
||||
// This prevents multiple channels with the same dummy EPG from showing identical data
|
||||
let tvgId;
|
||||
if (tvgRecord?.epg_source) {
|
||||
const epgSource = epgs[tvgRecord.epg_source];
|
||||
if (epgSource?.source_type === 'dummy') {
|
||||
// Dummy EPG: use channel UUID for uniqueness
|
||||
tvgId = channel.uuid;
|
||||
} else {
|
||||
// Regular EPG: use tvg_id from EPG data, or fall back to channel UUID
|
||||
tvgId = tvgRecord.tvg_id ?? channel.uuid;
|
||||
}
|
||||
} else {
|
||||
// No EPG data: use channel UUID
|
||||
tvgId = channel.uuid;
|
||||
}
|
||||
|
||||
if (tvgId) {
|
||||
const tvgKey = String(tvgId);
|
||||
if (!map.has(tvgKey)) {
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ django-cors-headers
|
|||
djangorestframework-simplejwt
|
||||
m3u8
|
||||
rapidfuzz==3.13.0
|
||||
regex # Required by transformers but also used for advanced regex features
|
||||
tzlocal
|
||||
|
||||
# PyTorch dependencies (CPU only)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue