diff --git a/apps/channels/migrations/0029_backfill_custom_stream_hashes.py b/apps/channels/migrations/0029_backfill_custom_stream_hashes.py new file mode 100644 index 00000000..3e270be2 --- /dev/null +++ b/apps/channels/migrations/0029_backfill_custom_stream_hashes.py @@ -0,0 +1,54 @@ +# Generated migration to backfill stream_hash for existing custom streams + +from django.db import migrations +import hashlib + + +def backfill_custom_stream_hashes(apps, schema_editor): + """ + Generate stream_hash for all custom streams that don't have one. + Uses stream ID to create a stable hash that won't change when name/url is edited. + """ + Stream = apps.get_model('dispatcharr_channels', 'Stream') + + custom_streams_without_hash = Stream.objects.filter( + is_custom=True, + stream_hash__isnull=True + ) + + updated_count = 0 + for stream in custom_streams_without_hash: + # Generate a stable hash using the stream's ID + # This ensures the hash never changes even if name/url is edited + unique_string = f"custom_stream_{stream.id}" + stream.stream_hash = hashlib.sha256(unique_string.encode()).hexdigest() + stream.save(update_fields=['stream_hash']) + updated_count += 1 + + if updated_count > 0: + print(f"Backfilled stream_hash for {updated_count} custom streams") + else: + print("No custom streams needed stream_hash backfill") + + +def reverse_backfill(apps, schema_editor): + """ + Reverse migration - clear stream_hash for custom streams. + Note: This will break preview functionality for custom streams. + """ + Stream = apps.get_model('dispatcharr_channels', 'Stream') + + custom_streams = Stream.objects.filter(is_custom=True) + count = custom_streams.update(stream_hash=None) + print(f"Cleared stream_hash for {count} custom streams") + + +class Migration(migrations.Migration): + + dependencies = [ + ('dispatcharr_channels', '0028_channel_created_at_channel_updated_at'), + ] + + operations = [ + migrations.RunPython(backfill_custom_stream_hashes, reverse_backfill), + ] diff --git a/apps/channels/models.py b/apps/channels/models.py index 238bdb33..3b7ed6e3 100644 --- a/apps/channels/models.py +++ b/apps/channels/models.py @@ -152,8 +152,14 @@ class Stream(models.Model): stream = cls.objects.create(**fields_to_update) return stream, True # True means it was created - # @TODO: honor stream's stream profile def get_stream_profile(self): + """ + Get the stream profile for this stream. + Uses the stream's own profile if set, otherwise returns the default. + """ + if self.stream_profile: + return self.stream_profile + stream_profile = StreamProfile.objects.get( id=CoreSettings.get_default_stream_profile_id() ) diff --git a/apps/channels/signals.py b/apps/channels/signals.py index d7a7414d..27b361ba 100644 --- a/apps/channels/signals.py +++ b/apps/channels/signals.py @@ -45,6 +45,20 @@ def set_default_m3u_account(sender, instance, **kwargs): else: raise ValueError("No default M3UAccount found.") +@receiver(post_save, sender=Stream) +def generate_custom_stream_hash(sender, instance, created, **kwargs): + """ + Generate a stable stream_hash for custom streams after creation. + Uses the stream's ID to ensure the hash never changes even if name/url is edited. + """ + if instance.is_custom and not instance.stream_hash and created: + import hashlib + # Use stream ID for a stable, unique hash that never changes + unique_string = f"custom_stream_{instance.id}" + instance.stream_hash = hashlib.sha256(unique_string.encode()).hexdigest() + # Use update to avoid triggering signals again + Stream.objects.filter(id=instance.id).update(stream_hash=instance.stream_hash) + @receiver(post_save, sender=Channel) def refresh_epg_programs(sender, instance, created, **kwargs): """ diff --git a/apps/epg/api_views.py b/apps/epg/api_views.py index f3248677..2fc5a743 100644 --- a/apps/epg/api_views.py +++ b/apps/epg/api_views.py @@ -147,23 +147,37 @@ class EPGGridAPIView(APIView): f"EPGGridAPIView: Found {count} program(s), including recently ended, currently running, and upcoming shows." ) - # Generate dummy programs for channels that have no EPG data + # Generate dummy programs for channels that have no EPG data OR dummy EPG sources from apps.channels.models import Channel + from apps.epg.models import EPGSource from django.db.models import Q - # Get channels with no EPG data + # Get channels with no EPG data at all (standard dummy) channels_without_epg = Channel.objects.filter(Q(epg_data__isnull=True)) - channels_count = channels_without_epg.count() - # Log more detailed information about channels missing EPG data - if channels_count > 0: + # Get channels with custom dummy EPG sources (generate on-demand with patterns) + channels_with_custom_dummy = Channel.objects.filter( + epg_data__epg_source__source_type='dummy' + ).distinct() + + # Log what we found + without_count = channels_without_epg.count() + custom_count = channels_with_custom_dummy.count() + + if without_count > 0: channel_names = [f"{ch.name} (ID: {ch.id})" for ch in channels_without_epg] - logger.warning( - f"EPGGridAPIView: Missing EPG data for these channels: {', '.join(channel_names)}" + logger.debug( + f"EPGGridAPIView: Channels needing standard dummy EPG: {', '.join(channel_names)}" + ) + + if custom_count > 0: + channel_names = [f"{ch.name} (ID: {ch.id})" for ch in channels_with_custom_dummy] + logger.debug( + f"EPGGridAPIView: Channels needing custom dummy EPG: {', '.join(channel_names)}" ) logger.debug( - f"EPGGridAPIView: Found {channels_count} channels with no EPG data." + f"EPGGridAPIView: Found {without_count} channels needing standard dummy, {custom_count} needing custom dummy EPG." ) # Serialize the regular programs @@ -205,12 +219,91 @@ class EPGGridAPIView(APIView): # Generate and append dummy programs dummy_programs = [] - for channel in channels_without_epg: - # Use the channel UUID as tvg_id for dummy programs to match in the guide + + # Import the function from output.views + from apps.output.views import generate_dummy_programs as gen_dummy_progs + + # Handle channels with CUSTOM dummy EPG sources (with patterns) + for channel in channels_with_custom_dummy: + # For dummy EPGs, ALWAYS use channel UUID to ensure unique programs per channel + # This prevents multiple channels assigned to the same dummy EPG from showing identical data + # Each channel gets its own unique program data even if they share the same EPG source dummy_tvg_id = str(channel.uuid) try: - # Create programs every 4 hours for the next 24 hours + # Get the custom dummy EPG source + epg_source = channel.epg_data.epg_source if channel.epg_data else None + + logger.debug(f"Generating custom dummy programs for channel: {channel.name} (ID: {channel.id})") + + # Determine which name to parse based on custom properties + name_to_parse = channel.name + if epg_source and epg_source.custom_properties: + custom_props = epg_source.custom_properties + name_source = custom_props.get('name_source') + + if name_source == 'stream': + # Get the stream index (1-based from user, convert to 0-based) + stream_index = custom_props.get('stream_index', 1) - 1 + + # Get streams ordered by channelstream order + channel_streams = channel.streams.all().order_by('channelstream__order') + + if channel_streams.exists() and 0 <= stream_index < channel_streams.count(): + stream = list(channel_streams)[stream_index] + name_to_parse = stream.name + logger.debug(f"Using stream name for parsing: {name_to_parse} (stream index: {stream_index})") + else: + logger.warning(f"Stream index {stream_index} not found for channel {channel.name}, falling back to channel name") + elif name_source == 'channel': + logger.debug(f"Using channel name for parsing: {name_to_parse}") + + # Generate programs using custom patterns from the dummy EPG source + # Use the same tvg_id that will be set in the program data + generated = gen_dummy_progs( + channel_id=dummy_tvg_id, + channel_name=name_to_parse, + num_days=1, + program_length_hours=4, + epg_source=epg_source + ) + + # Custom dummy should always return data (either from patterns or fallback) + if generated: + logger.debug(f"Generated {len(generated)} custom dummy programs for {channel.name}") + # Convert generated programs to API format + for program in generated: + dummy_program = { + "id": f"dummy-custom-{channel.id}-{program['start_time'].hour}", + "epg": {"tvg_id": dummy_tvg_id, "name": channel.name}, + "start_time": program['start_time'].isoformat(), + "end_time": program['end_time'].isoformat(), + "title": program['title'], + "description": program['description'], + "tvg_id": dummy_tvg_id, + "sub_title": None, + "custom_properties": None, + } + dummy_programs.append(dummy_program) + else: + logger.warning(f"No programs generated for custom dummy EPG channel: {channel.name}") + + except Exception as e: + logger.error( + f"Error creating custom dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}" + ) + + # Handle channels with NO EPG data (standard dummy with humorous descriptions) + for channel in channels_without_epg: + # For channels with no EPG, use UUID to ensure uniqueness (matches frontend logic) + # The frontend uses: tvgRecord?.tvg_id ?? channel.uuid + # Since there's no EPG data, it will fall back to UUID + dummy_tvg_id = str(channel.uuid) + + try: + logger.debug(f"Generating standard dummy programs for channel: {channel.name} (ID: {channel.id})") + + # Create programs every 4 hours for the next 24 hours with humorous descriptions for hour_offset in range(0, 24, 4): # Use timedelta for time arithmetic instead of replace() to avoid hour overflow start_time = now + timedelta(hours=hour_offset) @@ -238,7 +331,7 @@ class EPGGridAPIView(APIView): # Create a dummy program in the same format as regular programs dummy_program = { - "id": f"dummy-{channel.id}-{hour_offset}", # Create a unique ID + "id": f"dummy-standard-{channel.id}-{hour_offset}", "epg": {"tvg_id": dummy_tvg_id, "name": channel.name}, "start_time": start_time.isoformat(), "end_time": end_time.isoformat(), @@ -252,7 +345,7 @@ class EPGGridAPIView(APIView): except Exception as e: logger.error( - f"Error creating dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}" + f"Error creating standard dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}" ) # Combine regular and dummy programs @@ -284,7 +377,22 @@ class EPGImportAPIView(APIView): ) def post(self, request, format=None): logger.info("EPGImportAPIView: Received request to import EPG data.") - refresh_epg_data.delay(request.data.get("id", None)) # Trigger Celery task + epg_id = request.data.get("id", None) + + # Check if this is a dummy EPG source + try: + from .models import EPGSource + epg_source = EPGSource.objects.get(id=epg_id) + if epg_source.source_type == 'dummy': + logger.info(f"EPGImportAPIView: Skipping refresh for dummy EPG source {epg_id}") + return Response( + {"success": False, "message": "Dummy EPG sources do not require refreshing."}, + status=status.HTTP_400_BAD_REQUEST, + ) + except EPGSource.DoesNotExist: + pass # Let the task handle the missing source + + refresh_epg_data.delay(epg_id) # Trigger Celery task logger.info("EPGImportAPIView: Task dispatched to refresh EPG data.") return Response( {"success": True, "message": "EPG data import initiated."}, @@ -308,3 +416,4 @@ class EPGDataViewSet(viewsets.ReadOnlyModelViewSet): return [perm() for perm in permission_classes_by_action[self.action]] except KeyError: return [Authenticated()] + diff --git a/apps/epg/migrations/0018_epgsource_custom_properties_and_more.py b/apps/epg/migrations/0018_epgsource_custom_properties_and_more.py new file mode 100644 index 00000000..70ebb214 --- /dev/null +++ b/apps/epg/migrations/0018_epgsource_custom_properties_and_more.py @@ -0,0 +1,23 @@ +# Generated by Django 5.2.4 on 2025-10-17 17:02 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('epg', '0017_alter_epgsource_url'), + ] + + operations = [ + migrations.AddField( + model_name='epgsource', + name='custom_properties', + field=models.JSONField(blank=True, default=dict, help_text='Custom properties for dummy EPG configuration (regex patterns, timezone, duration, etc.)', null=True), + ), + migrations.AlterField( + model_name='epgsource', + name='source_type', + field=models.CharField(choices=[('xmltv', 'XMLTV URL'), ('schedules_direct', 'Schedules Direct API'), ('dummy', 'Custom Dummy EPG')], max_length=20), + ), + ] diff --git a/apps/epg/models.py b/apps/epg/models.py index da6ac8e6..6c70add2 100644 --- a/apps/epg/models.py +++ b/apps/epg/models.py @@ -8,6 +8,7 @@ class EPGSource(models.Model): SOURCE_TYPE_CHOICES = [ ('xmltv', 'XMLTV URL'), ('schedules_direct', 'Schedules Direct API'), + ('dummy', 'Custom Dummy EPG'), ] STATUS_IDLE = 'idle' @@ -38,6 +39,12 @@ class EPGSource(models.Model): refresh_task = models.ForeignKey( PeriodicTask, on_delete=models.SET_NULL, null=True, blank=True ) + custom_properties = models.JSONField( + default=dict, + blank=True, + null=True, + help_text="Custom properties for dummy EPG configuration (regex patterns, timezone, duration, etc.)" + ) status = models.CharField( max_length=20, choices=STATUS_CHOICES, diff --git a/apps/epg/serializers.py b/apps/epg/serializers.py index 85186cae..3404cca9 100644 --- a/apps/epg/serializers.py +++ b/apps/epg/serializers.py @@ -28,6 +28,7 @@ class EPGSourceSerializer(serializers.ModelSerializer): 'last_message', 'created_at', 'updated_at', + 'custom_properties', 'epg_data_ids' ] diff --git a/apps/epg/signals.py b/apps/epg/signals.py index e8a004cb..e41d3aaf 100644 --- a/apps/epg/signals.py +++ b/apps/epg/signals.py @@ -1,9 +1,9 @@ from django.db.models.signals import post_save, post_delete, pre_save from django.dispatch import receiver -from .models import EPGSource +from .models import EPGSource, EPGData from .tasks import refresh_epg_data, delete_epg_refresh_task_by_id from django_celery_beat.models import PeriodicTask, IntervalSchedule -from core.utils import is_protected_path +from core.utils import is_protected_path, send_websocket_update import json import logging import os @@ -12,15 +12,77 @@ logger = logging.getLogger(__name__) @receiver(post_save, sender=EPGSource) def trigger_refresh_on_new_epg_source(sender, instance, created, **kwargs): - # Trigger refresh only if the source is newly created and active - if created and instance.is_active: + # Trigger refresh only if the source is newly created, active, and not a dummy EPG + if created and instance.is_active and instance.source_type != 'dummy': refresh_epg_data.delay(instance.id) +@receiver(post_save, sender=EPGSource) +def create_dummy_epg_data(sender, instance, created, **kwargs): + """ + Automatically create EPGData for dummy EPG sources when they are created. + This allows channels to be assigned to dummy EPGs immediately without + requiring a refresh first. + """ + if instance.source_type == 'dummy': + # Ensure dummy EPGs always have idle status and no status message + if instance.status != EPGSource.STATUS_IDLE or instance.last_message: + instance.status = EPGSource.STATUS_IDLE + instance.last_message = None + instance.save(update_fields=['status', 'last_message']) + + # Create a URL-friendly tvg_id from the dummy EPG name + # Replace spaces and special characters with underscores + friendly_tvg_id = instance.name.replace(' ', '_').replace('-', '_') + # Remove any characters that aren't alphanumeric or underscores + friendly_tvg_id = ''.join(c for c in friendly_tvg_id if c.isalnum() or c == '_') + # Convert to lowercase for consistency + friendly_tvg_id = friendly_tvg_id.lower() + # Prefix with 'dummy_' to make it clear this is a dummy EPG + friendly_tvg_id = f"dummy_{friendly_tvg_id}" + + # Create or update the EPGData record + epg_data, data_created = EPGData.objects.get_or_create( + tvg_id=friendly_tvg_id, + epg_source=instance, + defaults={ + 'name': instance.name, + 'icon_url': None + } + ) + + # Update name if it changed and record already existed + if not data_created and epg_data.name != instance.name: + epg_data.name = instance.name + epg_data.save(update_fields=['name']) + + if data_created: + logger.info(f"Auto-created EPGData for dummy EPG source: {instance.name} (ID: {instance.id})") + + # Send websocket update to notify frontend that EPG data has been created + # This allows the channel form to immediately show the new dummy EPG without refreshing + send_websocket_update('updates', 'update', { + 'type': 'epg_data_created', + 'source_id': instance.id, + 'source_name': instance.name, + 'epg_data_id': epg_data.id + }) + else: + logger.debug(f"EPGData already exists for dummy EPG source: {instance.name} (ID: {instance.id})") + @receiver(post_save, sender=EPGSource) def create_or_update_refresh_task(sender, instance, **kwargs): """ Create or update a Celery Beat periodic task when an EPGSource is created/updated. + Skip creating tasks for dummy EPG sources as they don't need refreshing. """ + # Skip task creation for dummy EPGs + if instance.source_type == 'dummy': + # If there's an existing task, disable it + if instance.refresh_task: + instance.refresh_task.enabled = False + instance.refresh_task.save(update_fields=['enabled']) + return + task_name = f"epg_source-refresh-{instance.id}" interval, _ = IntervalSchedule.objects.get_or_create( every=int(instance.refresh_interval), @@ -80,7 +142,14 @@ def delete_refresh_task(sender, instance, **kwargs): def update_status_on_active_change(sender, instance, **kwargs): """ When an EPGSource's is_active field changes, update the status accordingly. + For dummy EPGs, always ensure status is idle and no status message. """ + # Dummy EPGs should always be idle with no status message + if instance.source_type == 'dummy': + instance.status = EPGSource.STATUS_IDLE + instance.last_message = None + return + if instance.pk: # Only for existing records, not new ones try: # Get the current record from the database diff --git a/apps/epg/tasks.py b/apps/epg/tasks.py index d9ae5a5d..2028cd98 100644 --- a/apps/epg/tasks.py +++ b/apps/epg/tasks.py @@ -133,8 +133,9 @@ def delete_epg_refresh_task_by_id(epg_id): @shared_task def refresh_all_epg_data(): logger.info("Starting refresh_epg_data task.") - active_sources = EPGSource.objects.filter(is_active=True) - logger.debug(f"Found {active_sources.count()} active EPGSource(s).") + # Exclude dummy EPG sources from refresh - they don't need refreshing + active_sources = EPGSource.objects.filter(is_active=True).exclude(source_type='dummy') + logger.debug(f"Found {active_sources.count()} active EPGSource(s) (excluding dummy EPGs).") for source in active_sources: refresh_epg_data(source.id) @@ -180,6 +181,13 @@ def refresh_epg_data(source_id): gc.collect() return + # Skip refresh for dummy EPG sources - they don't need refreshing + if source.source_type == 'dummy': + logger.info(f"Skipping refresh for dummy EPG source {source.name} (ID: {source_id})") + release_task_lock('refresh_epg_data', source_id) + gc.collect() + return + # Continue with the normal processing... logger.info(f"Processing EPGSource: {source.name} (type: {source.source_type})") if source.source_type == 'xmltv': @@ -1943,3 +1951,20 @@ def detect_file_format(file_path=None, content=None): # If we reach here, we couldn't reliably determine the format return format_type, is_compressed, file_extension + + +def generate_dummy_epg(source): + """ + DEPRECATED: This function is no longer used. + + Dummy EPG programs are now generated on-demand when they are requested + (during XMLTV export or EPG grid display), rather than being pre-generated + and stored in the database. + + See: apps/output/views.py - generate_custom_dummy_programs() + + This function remains for backward compatibility but should not be called. + """ + logger.warning(f"generate_dummy_epg() called for {source.name} but this function is deprecated. " + f"Dummy EPG programs are now generated on-demand.") + return True diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index 6b1dbdcc..d29c294b 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -1548,7 +1548,7 @@ def sync_auto_channels(account_id, scan_start_time=None): # Get force_dummy_epg, group_override, and regex patterns from group custom_properties group_custom_props = {} - force_dummy_epg = False + force_dummy_epg = False # Backward compatibility: legacy option to disable EPG override_group_id = None name_regex_pattern = None name_replace_pattern = None @@ -1558,6 +1558,7 @@ def sync_auto_channels(account_id, scan_start_time=None): channel_sort_reverse = False stream_profile_id = None custom_logo_id = None + custom_epg_id = None # New option: select specific EPG source (takes priority over force_dummy_epg) if group_relation.custom_properties: group_custom_props = group_relation.custom_properties force_dummy_epg = group_custom_props.get("force_dummy_epg", False) @@ -1568,6 +1569,7 @@ def sync_auto_channels(account_id, scan_start_time=None): ) name_match_regex = group_custom_props.get("name_match_regex") channel_profile_ids = group_custom_props.get("channel_profile_ids") + custom_epg_id = group_custom_props.get("custom_epg_id") channel_sort_order = group_custom_props.get("channel_sort_order") channel_sort_reverse = group_custom_props.get( "channel_sort_reverse", False @@ -1828,7 +1830,25 @@ def sync_auto_channels(account_id, scan_start_time=None): # Handle logo updates current_logo = None - if stream.logo_url: + if custom_logo_id: + # Use the custom logo specified in group settings + from apps.channels.models import Logo + try: + current_logo = Logo.objects.get(id=custom_logo_id) + except Logo.DoesNotExist: + logger.warning( + f"Custom logo with ID {custom_logo_id} not found for existing channel, falling back to stream logo" + ) + # Fall back to stream logo if custom logo not found + if stream.logo_url: + current_logo, _ = Logo.objects.get_or_create( + url=stream.logo_url, + defaults={ + "name": stream.name or stream.tvg_id or "Unknown" + }, + ) + elif stream.logo_url: + # No custom logo configured, use stream logo from apps.channels.models import Logo current_logo, _ = Logo.objects.get_or_create( @@ -1844,10 +1864,42 @@ def sync_auto_channels(account_id, scan_start_time=None): # Handle EPG data updates current_epg_data = None - if stream.tvg_id and not force_dummy_epg: + if custom_epg_id: + # Use the custom EPG specified in group settings (e.g., a dummy EPG) + from apps.epg.models import EPGSource + try: + epg_source = EPGSource.objects.get(id=custom_epg_id) + # For dummy EPGs, select the first (and typically only) EPGData entry from this source + if epg_source.source_type == 'dummy': + current_epg_data = EPGData.objects.filter( + epg_source=epg_source + ).first() + if not current_epg_data: + logger.warning( + f"No EPGData found for dummy EPG source {epg_source.name} (ID: {custom_epg_id})" + ) + else: + # For non-dummy sources, try to find existing EPGData by tvg_id + if stream.tvg_id: + current_epg_data = EPGData.objects.filter( + tvg_id=stream.tvg_id, + epg_source=epg_source + ).first() + except EPGSource.DoesNotExist: + logger.warning( + f"Custom EPG source with ID {custom_epg_id} not found for existing channel, falling back to auto-match" + ) + # Fall back to auto-match by tvg_id + if stream.tvg_id and not force_dummy_epg: + current_epg_data = EPGData.objects.filter( + tvg_id=stream.tvg_id + ).first() + elif stream.tvg_id and not force_dummy_epg: + # Auto-match EPG by tvg_id (original behavior) current_epg_data = EPGData.objects.filter( tvg_id=stream.tvg_id ).first() + # If force_dummy_epg is True and no custom_epg_id, current_epg_data stays None if existing_channel.epg_data != current_epg_data: existing_channel.epg_data = current_epg_data @@ -1937,14 +1989,55 @@ def sync_auto_channels(account_id, scan_start_time=None): ChannelProfileMembership.objects.bulk_create(memberships) # Try to match EPG data - if stream.tvg_id and not force_dummy_epg: + if custom_epg_id: + # Use the custom EPG specified in group settings (e.g., a dummy EPG) + from apps.epg.models import EPGSource + try: + epg_source = EPGSource.objects.get(id=custom_epg_id) + # For dummy EPGs, select the first (and typically only) EPGData entry from this source + if epg_source.source_type == 'dummy': + epg_data = EPGData.objects.filter( + epg_source=epg_source + ).first() + if epg_data: + channel.epg_data = epg_data + channel.save(update_fields=["epg_data"]) + else: + logger.warning( + f"No EPGData found for dummy EPG source {epg_source.name} (ID: {custom_epg_id})" + ) + else: + # For non-dummy sources, try to find existing EPGData by tvg_id + if stream.tvg_id: + epg_data = EPGData.objects.filter( + tvg_id=stream.tvg_id, + epg_source=epg_source + ).first() + if epg_data: + channel.epg_data = epg_data + channel.save(update_fields=["epg_data"]) + except EPGSource.DoesNotExist: + logger.warning( + f"Custom EPG source with ID {custom_epg_id} not found, falling back to auto-match" + ) + # Fall back to auto-match by tvg_id + if stream.tvg_id and not force_dummy_epg: + epg_data = EPGData.objects.filter( + tvg_id=stream.tvg_id + ).first() + if epg_data: + channel.epg_data = epg_data + channel.save(update_fields=["epg_data"]) + elif stream.tvg_id and not force_dummy_epg: + # Auto-match EPG by tvg_id (original behavior) epg_data = EPGData.objects.filter( tvg_id=stream.tvg_id ).first() if epg_data: channel.epg_data = epg_data channel.save(update_fields=["epg_data"]) - elif stream.tvg_id and force_dummy_epg: + elif force_dummy_epg: + # Force dummy EPG with no custom EPG selected (set to None) channel.epg_data = None channel.save(update_fields=["epg_data"]) diff --git a/apps/output/views.py b/apps/output/views.py index 15036710..a695d05f 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -9,7 +9,7 @@ from apps.epg.models import ProgramData from apps.accounts.models import User from core.models import CoreSettings, NETWORK_ACCESS from dispatcharr.utils import network_access_allowed -from django.utils import timezone +from django.utils import timezone as django_timezone from django.shortcuts import get_object_or_404 from datetime import datetime, timedelta import html # Add this import for XML escaping @@ -22,6 +22,7 @@ import logging from django.db.models.functions import Lower import os from apps.m3u.utils import calculate_tuner_count +import regex logger = logging.getLogger(__name__) @@ -186,12 +187,42 @@ def generate_m3u(request, profile_name=None, user=None): return response -def generate_dummy_programs(channel_id, channel_name, num_days=1, program_length_hours=4): +def generate_dummy_programs(channel_id, channel_name, num_days=1, program_length_hours=4, epg_source=None): + """ + Generate dummy EPG programs for channels. + + If epg_source is provided and it's a custom dummy EPG with patterns, + use those patterns to generate programs from the channel title. + Otherwise, generate default dummy programs. + + Args: + channel_id: Channel ID for the programs + channel_name: Channel title/name + num_days: Number of days to generate programs for + program_length_hours: Length of each program in hours + epg_source: Optional EPGSource for custom dummy EPG with patterns + + Returns: + List of program dictionaries + """ # Get current time rounded to hour - now = timezone.now() + now = django_timezone.now() now = now.replace(minute=0, second=0, microsecond=0) - # Humorous program descriptions based on time of day + # Check if this is a custom dummy EPG with regex patterns + if epg_source and epg_source.source_type == 'dummy' and epg_source.custom_properties: + custom_programs = generate_custom_dummy_programs( + channel_id, channel_name, now, num_days, + epg_source.custom_properties + ) + # If custom generation succeeded, return those programs + # If it returned empty (pattern didn't match), fall through to default + if custom_programs: + return custom_programs + else: + logger.info(f"Custom pattern didn't match for '{channel_name}', using default dummy EPG") + + # Default humorous program descriptions based on time of day time_descriptions = { (0, 4): [ f"Late Night with {channel_name} - Where insomniacs unite!", @@ -263,6 +294,579 @@ def generate_dummy_programs(channel_id, channel_name, num_days=1, program_length return programs +def generate_custom_dummy_programs(channel_id, channel_name, now, num_days, custom_properties): + """ + Generate programs using custom dummy EPG regex patterns. + + Extracts information from channel title using regex patterns and generates + programs based on the extracted data. + + TIMEZONE HANDLING: + ------------------ + The timezone parameter specifies the timezone of the event times in your channel + titles using standard timezone names (e.g., 'US/Eastern', 'US/Pacific', 'Europe/London'). + DST (Daylight Saving Time) is handled automatically by pytz. + + Examples: + - Channel: "NHL 01: Bruins VS Maple Leafs @ 8:00PM ET" + - Set timezone = "US/Eastern" + - In October (DST): 8:00PM EDT → 12:00AM UTC (automatically uses UTC-4) + - In January (no DST): 8:00PM EST → 1:00AM UTC (automatically uses UTC-5) + + Args: + channel_id: Channel ID for the programs + channel_name: Channel title to parse + now: Current datetime (in UTC) + num_days: Number of days to generate programs for + custom_properties: Dict with title_pattern, time_pattern, templates, etc. + - timezone: Timezone name (e.g., 'US/Eastern') + + Returns: + List of program dictionaries with start_time/end_time in UTC + """ + import pytz + + logger.info(f"Generating custom dummy programs for channel: {channel_name}") + + # Extract patterns from custom properties + title_pattern = custom_properties.get('title_pattern', '') + time_pattern = custom_properties.get('time_pattern', '') + date_pattern = custom_properties.get('date_pattern', '') + + # Get timezone name (e.g., 'US/Eastern', 'US/Pacific', 'Europe/London') + timezone_value = custom_properties.get('timezone', 'UTC') + output_timezone_value = custom_properties.get('output_timezone', '') # Optional: display times in different timezone + program_duration = custom_properties.get('program_duration', 180) # Minutes + title_template = custom_properties.get('title_template', '') + description_template = custom_properties.get('description_template', '') + + # Templates for upcoming/ended programs + upcoming_title_template = custom_properties.get('upcoming_title_template', '') + upcoming_description_template = custom_properties.get('upcoming_description_template', '') + ended_title_template = custom_properties.get('ended_title_template', '') + ended_description_template = custom_properties.get('ended_description_template', '') + + # EPG metadata options + category_string = custom_properties.get('category', '') + # Split comma-separated categories and strip whitespace, filter out empty strings + categories = [cat.strip() for cat in category_string.split(',') if cat.strip()] if category_string else [] + include_date = custom_properties.get('include_date', True) + include_live = custom_properties.get('include_live', False) + + # Parse timezone name + try: + source_tz = pytz.timezone(timezone_value) + logger.debug(f"Using timezone: {timezone_value} (DST will be handled automatically)") + except pytz.exceptions.UnknownTimeZoneError: + logger.warning(f"Unknown timezone: {timezone_value}, defaulting to UTC") + source_tz = pytz.utc + + # Parse output timezone if provided (for display purposes) + output_tz = None + if output_timezone_value: + try: + output_tz = pytz.timezone(output_timezone_value) + logger.debug(f"Using output timezone for display: {output_timezone_value}") + except pytz.exceptions.UnknownTimeZoneError: + logger.warning(f"Unknown output timezone: {output_timezone_value}, will use source timezone") + output_tz = None + + if not title_pattern: + logger.warning(f"No title_pattern in custom_properties, falling back to default") + return [] # Return empty, will use default + + logger.debug(f"Title pattern from DB: {repr(title_pattern)}") + + # Convert PCRE/JavaScript named groups (?) to Python format (?P) + # This handles patterns created with JavaScript regex syntax + # Use negative lookahead to avoid matching lookbehind (?<=) and negative lookbehind (?]+)>', r'(?P<\1>', title_pattern) + logger.debug(f"Converted title pattern: {repr(title_pattern)}") + + # Compile regex patterns using the enhanced regex module + # (supports variable-width lookbehinds like JavaScript) + try: + title_regex = regex.compile(title_pattern) + except Exception as e: + logger.error(f"Invalid title regex pattern after conversion: {e}") + logger.error(f"Pattern was: {repr(title_pattern)}") + return [] + + time_regex = None + if time_pattern: + # Convert PCRE/JavaScript named groups to Python format + # Use negative lookahead to avoid matching lookbehind (?<=) and negative lookbehind (?]+)>', r'(?P<\1>', time_pattern) + logger.debug(f"Converted time pattern: {repr(time_pattern)}") + try: + time_regex = regex.compile(time_pattern) + except Exception as e: + logger.warning(f"Invalid time regex pattern after conversion: {e}") + logger.warning(f"Pattern was: {repr(time_pattern)}") + + # Compile date regex if provided + date_regex = None + if date_pattern: + # Convert PCRE/JavaScript named groups to Python format + # Use negative lookahead to avoid matching lookbehind (?<=) and negative lookbehind (?]+)>', r'(?P<\1>', date_pattern) + logger.debug(f"Converted date pattern: {repr(date_pattern)}") + try: + date_regex = regex.compile(date_pattern) + except Exception as e: + logger.warning(f"Invalid date regex pattern after conversion: {e}") + logger.warning(f"Pattern was: {repr(date_pattern)}") + + # Try to match the channel name with the title pattern + # Use search() instead of match() to match JavaScript behavior where .match() searches anywhere in the string + title_match = title_regex.search(channel_name) + if not title_match: + logger.debug(f"Channel name '{channel_name}' doesn't match title pattern") + return [] # Return empty, will use default + + groups = title_match.groupdict() + logger.debug(f"Title pattern matched. Groups: {groups}") + + # Helper function to format template with matched groups + def format_template(template, groups): + """Replace {groupname} placeholders with matched group values""" + if not template: + return '' + result = template + for key, value in groups.items(): + result = result.replace(f'{{{key}}}', str(value) if value else '') + return result + + # Extract time from title if time pattern exists + time_info = None + time_groups = {} + if time_regex: + time_match = time_regex.search(channel_name) + if time_match: + time_groups = time_match.groupdict() + try: + hour = int(time_groups.get('hour')) + # Handle optional minute group - could be None if not captured + minute_value = time_groups.get('minute') + minute = int(minute_value) if minute_value is not None else 0 + ampm = time_groups.get('ampm') + ampm = ampm.lower() if ampm else None + + # Determine if this is 12-hour or 24-hour format + if ampm in ('am', 'pm'): + # 12-hour format: convert to 24-hour + if ampm == 'pm' and hour != 12: + hour += 12 + elif ampm == 'am' and hour == 12: + hour = 0 + logger.debug(f"Extracted time (12-hour): {hour}:{minute:02d} {ampm}") + else: + # 24-hour format: hour is already in 24-hour format + # Validate that it's actually a 24-hour time (0-23) + if hour > 23: + logger.warning(f"Invalid 24-hour time: {hour}. Must be 0-23.") + hour = hour % 24 # Wrap around just in case + logger.debug(f"Extracted time (24-hour): {hour}:{minute:02d}") + + time_info = {'hour': hour, 'minute': minute} + except (ValueError, TypeError) as e: + logger.warning(f"Error parsing time: {e}") + + # Extract date from title if date pattern exists + date_info = None + date_groups = {} + if date_regex: + date_match = date_regex.search(channel_name) + if date_match: + date_groups = date_match.groupdict() + try: + # Support various date group names: month, day, year + month_str = date_groups.get('month', '') + day = int(date_groups.get('day', 1)) + year = int(date_groups.get('year', now.year)) # Default to current year if not provided + + # Parse month - can be numeric (1-12) or text (Jan, January, etc.) + month = None + if month_str.isdigit(): + month = int(month_str) + else: + # Try to parse text month names + import calendar + month_str_lower = month_str.lower() + # Check full month names + for i, month_name in enumerate(calendar.month_name): + if month_name.lower() == month_str_lower: + month = i + break + # Check abbreviated month names if not found + if month is None: + for i, month_abbr in enumerate(calendar.month_abbr): + if month_abbr.lower() == month_str_lower: + month = i + break + + if month and 1 <= month <= 12 and 1 <= day <= 31: + date_info = {'year': year, 'month': month, 'day': day} + logger.debug(f"Extracted date: {year}-{month:02d}-{day:02d}") + else: + logger.warning(f"Invalid date values: month={month}, day={day}, year={year}") + except (ValueError, TypeError) as e: + logger.warning(f"Error parsing date: {e}") + + # Merge title groups, time groups, and date groups for template formatting + all_groups = {**groups, **time_groups, **date_groups} + + # Add formatted time strings for better display (handles minutes intelligently) + if time_info: + hour_24 = time_info['hour'] + minute = time_info['minute'] + + # If output_timezone is specified, convert the display time to that timezone + if output_tz: + # Create a datetime in the source timezone + temp_date = datetime.now(source_tz).replace(hour=hour_24, minute=minute, second=0, microsecond=0) + # Convert to output timezone + temp_date_output = temp_date.astimezone(output_tz) + # Extract converted hour and minute for display + hour_24 = temp_date_output.hour + minute = temp_date_output.minute + logger.debug(f"Converted display time from {source_tz} to {output_tz}: {hour_24}:{minute:02d}") + + # Format 24-hour time string - only include minutes if non-zero + if minute > 0: + all_groups['time24'] = f"{hour_24}:{minute:02d}" + else: + all_groups['time24'] = f"{hour_24:02d}:00" + + # Convert 24-hour to 12-hour format for {time} placeholder + # Note: hour_24 is ALWAYS in 24-hour format at this point (converted earlier if needed) + ampm = 'AM' if hour_24 < 12 else 'PM' + hour_12 = hour_24 + if hour_24 == 0: + hour_12 = 12 + elif hour_24 > 12: + hour_12 = hour_24 - 12 + + # Format 12-hour time string - only include minutes if non-zero + if minute > 0: + all_groups['time'] = f"{hour_12}:{minute:02d} {ampm}" + else: + all_groups['time'] = f"{hour_12} {ampm}" + + # Generate programs + programs = [] + + # If we have extracted time AND date, the event happens on a SPECIFIC date + # If we have time but NO date, generate for multiple days (existing behavior) + # All other days and times show "Upcoming" before or "Ended" after + event_happened = False + + # Determine how many iterations we need + if date_info and time_info: + # Specific date extracted - only generate for that one date + iterations = 1 + logger.debug(f"Date extracted, generating single event for specific date") + else: + # No specific date - use num_days (existing behavior) + iterations = num_days + + for day in range(iterations): + # Start from current time (like standard dummy) instead of midnight + # This ensures programs appear in the guide's current viewing window + day_start = now + timedelta(days=day) + day_end = day_start + timedelta(days=1) + + if time_info: + # We have an extracted event time - this is when the MAIN event starts + # The extracted time is in the SOURCE timezone (e.g., 8PM ET) + # We need to convert it to UTC for storage + + # Determine which date to use + if date_info: + # Use the extracted date from the channel title + current_date = datetime( + date_info['year'], + date_info['month'], + date_info['day'] + ).date() + logger.debug(f"Using extracted date: {current_date}") + else: + # No date extracted, use day offset from current time in SOURCE timezone + # This ensures we calculate "today" in the event's timezone, not UTC + # For example: 8:30 PM Central (1:30 AM UTC next day) for a 10 PM ET event + # should use today's date in ET, not tomorrow's date in UTC + now_in_source_tz = now.astimezone(source_tz) + current_date = (now_in_source_tz + timedelta(days=day)).date() + logger.debug(f"No date extracted, using day offset in {source_tz}: {current_date}") + + # Create a naive datetime (no timezone info) representing the event in source timezone + event_start_naive = datetime.combine( + current_date, + datetime.min.time().replace( + hour=time_info['hour'], + minute=time_info['minute'] + ) + ) + + # Use pytz to localize the naive datetime to the source timezone + # This automatically handles DST! + try: + event_start_local = source_tz.localize(event_start_naive) + # Convert to UTC + event_start_utc = event_start_local.astimezone(pytz.utc) + logger.debug(f"Converted {event_start_local} to UTC: {event_start_utc}") + except Exception as e: + logger.error(f"Error localizing time to {source_tz}: {e}") + # Fallback: treat as UTC + event_start_utc = django_timezone.make_aware(event_start_naive, pytz.utc) + + event_end_utc = event_start_utc + timedelta(minutes=program_duration) + + # Pre-generate the main event title and description for reuse + if title_template: + main_event_title = format_template(title_template, all_groups) + else: + title_parts = [] + if 'league' in all_groups and all_groups['league']: + title_parts.append(all_groups['league']) + if 'team1' in all_groups and 'team2' in all_groups: + title_parts.append(f"{all_groups['team1']} vs {all_groups['team2']}") + elif 'title' in all_groups and all_groups['title']: + title_parts.append(all_groups['title']) + main_event_title = ' - '.join(title_parts) if title_parts else channel_name + + if description_template: + main_event_description = format_template(description_template, all_groups) + else: + main_event_description = main_event_title + + + + # Determine if this day is before, during, or after the event + # Event only happens on day 0 (first day) + is_event_day = (day == 0) + + if is_event_day and not event_happened: + # This is THE day the event happens + # Fill programs BEFORE the event + current_time = day_start + + while current_time < event_start_utc: + program_start_utc = current_time + program_end_utc = min(current_time + timedelta(minutes=program_duration), event_start_utc) + + # Use custom upcoming templates if provided, otherwise use defaults + if upcoming_title_template: + upcoming_title = format_template(upcoming_title_template, all_groups) + else: + upcoming_title = main_event_title + + if upcoming_description_template: + upcoming_description = format_template(upcoming_description_template, all_groups) + else: + upcoming_description = f"Upcoming: {main_event_description}" + + # Build custom_properties for upcoming programs (only date, no category/live) + program_custom_properties = {} + + # Add date if requested (YYYY-MM-DD format from start time in event timezone) + if include_date: + # Convert UTC time to event timezone for date calculation + local_time = program_start_utc.astimezone(source_tz) + date_str = local_time.strftime('%Y-%m-%d') + program_custom_properties['date'] = date_str + + programs.append({ + "channel_id": channel_id, + "start_time": program_start_utc, + "end_time": program_end_utc, + "title": upcoming_title, + "description": upcoming_description, + "custom_properties": program_custom_properties, + }) + + current_time += timedelta(minutes=program_duration) + + # Add the MAIN EVENT at the extracted time + # Build custom_properties for main event (includes category and live) + main_event_custom_properties = {} + + # Add categories if provided + if categories: + main_event_custom_properties['categories'] = categories + + # Add date if requested (YYYY-MM-DD format from start time in event timezone) + if include_date: + # Convert UTC time to event timezone for date calculation + local_time = event_start_utc.astimezone(source_tz) + date_str = local_time.strftime('%Y-%m-%d') + main_event_custom_properties['date'] = date_str + + # Add live flag if requested + if include_live: + main_event_custom_properties['live'] = True + + programs.append({ + "channel_id": channel_id, + "start_time": event_start_utc, + "end_time": event_end_utc, + "title": main_event_title, + "description": main_event_description, + "custom_properties": main_event_custom_properties, + }) + + event_happened = True + + # Fill programs AFTER the event until end of day + current_time = event_end_utc + + while current_time < day_end: + program_start_utc = current_time + program_end_utc = min(current_time + timedelta(minutes=program_duration), day_end) + + # Use custom ended templates if provided, otherwise use defaults + if ended_title_template: + ended_title = format_template(ended_title_template, all_groups) + else: + ended_title = main_event_title + + if ended_description_template: + ended_description = format_template(ended_description_template, all_groups) + else: + ended_description = f"Ended: {main_event_description}" + + # Build custom_properties for ended programs (only date, no category/live) + program_custom_properties = {} + + # Add date if requested (YYYY-MM-DD format from start time in event timezone) + if include_date: + # Convert UTC time to event timezone for date calculation + local_time = program_start_utc.astimezone(source_tz) + date_str = local_time.strftime('%Y-%m-%d') + program_custom_properties['date'] = date_str + + programs.append({ + "channel_id": channel_id, + "start_time": program_start_utc, + "end_time": program_end_utc, + "title": ended_title, + "description": ended_description, + "custom_properties": program_custom_properties, + }) + + current_time += timedelta(minutes=program_duration) + else: + # This day is either before the event (future days) or after the event happened + # Fill entire day with appropriate message + current_time = day_start + + # If event already happened, all programs show "Ended" + # If event hasn't happened yet (shouldn't occur with day 0 logic), show "Upcoming" + is_ended = event_happened + + while current_time < day_end: + program_start_utc = current_time + program_end_utc = min(current_time + timedelta(minutes=program_duration), day_end) + + # Use custom templates based on whether event has ended or is upcoming + if is_ended: + if ended_title_template: + program_title = format_template(ended_title_template, all_groups) + else: + program_title = main_event_title + + if ended_description_template: + program_description = format_template(ended_description_template, all_groups) + else: + program_description = f"Ended: {main_event_description}" + else: + if upcoming_title_template: + program_title = format_template(upcoming_title_template, all_groups) + else: + program_title = main_event_title + + if upcoming_description_template: + program_description = format_template(upcoming_description_template, all_groups) + else: + program_description = f"Upcoming: {main_event_description}" + + # Build custom_properties (only date for upcoming/ended filler programs) + program_custom_properties = {} + + # Add date if requested (YYYY-MM-DD format from start time in event timezone) + if include_date: + # Convert UTC time to event timezone for date calculation + local_time = program_start_utc.astimezone(source_tz) + date_str = local_time.strftime('%Y-%m-%d') + program_custom_properties['date'] = date_str + + programs.append({ + "channel_id": channel_id, + "start_time": program_start_utc, + "end_time": program_end_utc, + "title": program_title, + "description": program_description, + "custom_properties": program_custom_properties, + }) + + current_time += timedelta(minutes=program_duration) + else: + # No extracted time - fill entire day with regular intervals + # day_start and day_end are already in UTC, so no conversion needed + programs_per_day = max(1, int(24 / (program_duration / 60))) + + for program_num in range(programs_per_day): + program_start_utc = day_start + timedelta(minutes=program_num * program_duration) + program_end_utc = program_start_utc + timedelta(minutes=program_duration) + + if title_template: + title = format_template(title_template, all_groups) + else: + title_parts = [] + if 'league' in all_groups and all_groups['league']: + title_parts.append(all_groups['league']) + if 'team1' in all_groups and 'team2' in all_groups: + title_parts.append(f"{all_groups['team1']} vs {all_groups['team2']}") + elif 'title' in all_groups and all_groups['title']: + title_parts.append(all_groups['title']) + title = ' - '.join(title_parts) if title_parts else channel_name + + if description_template: + description = format_template(description_template, all_groups) + else: + description = title + + # Build custom_properties for this program + program_custom_properties = {} + + # Add categories if provided + if categories: + program_custom_properties['categories'] = categories + + # Add date if requested (YYYY-MM-DD format from start time in event timezone) + if include_date: + # Convert UTC time to event timezone for date calculation + local_time = program_start_utc.astimezone(source_tz) + date_str = local_time.strftime('%Y-%m-%d') + program_custom_properties['date'] = date_str + + # Add live flag if requested + if include_live: + program_custom_properties['live'] = True + + programs.append({ + "channel_id": channel_id, + "start_time": program_start_utc, + "end_time": program_end_utc, + "title": title, + "description": description, + "custom_properties": program_custom_properties, + }) + + logger.info(f"Generated {len(programs)} custom dummy programs for {channel_name}") + return programs + + def generate_dummy_epg( channel_id, channel_name, xml_lines=None, num_days=1, program_length_hours=4 ): @@ -294,6 +898,23 @@ def generate_dummy_epg( ) xml_lines.append(f" {html.escape(program['title'])}") xml_lines.append(f" {html.escape(program['description'])}") + + # Add custom_properties if present + custom_data = program.get('custom_properties', {}) + + # Categories + if 'categories' in custom_data: + for cat in custom_data['categories']: + xml_lines.append(f" {html.escape(cat)}") + + # Date tag + if 'date' in custom_data: + xml_lines.append(f" {html.escape(custom_data['date'])}") + + # Live tag + if custom_data.get('live', False): + xml_lines.append(f" ") + xml_lines.append(f" ") return xml_lines @@ -367,7 +988,7 @@ def generate_epg(request, profile_name=None, user=None): dummy_days = num_days if num_days > 0 else 3 # Calculate cutoff date for EPG data filtering (only if days > 0) - now = timezone.now() + now = django_timezone.now() cutoff_date = now + timedelta(days=num_days) if num_days > 0 else None # Process channels for the section @@ -434,12 +1055,38 @@ def generate_epg(request, profile_name=None, user=None): # Default to channel number channel_id = str(formatted_channel_number) if formatted_channel_number != "" else str(channel.id) + # Use EPG data name for display, but channel name for pattern matching display_name = channel.epg_data.name if channel.epg_data else channel.name + # For dummy EPG pattern matching, determine which name to use + pattern_match_name = channel.name + + # Check if we should use stream name instead of channel name + if channel.epg_data and channel.epg_data.epg_source: + epg_source = channel.epg_data.epg_source + if epg_source.custom_properties: + custom_props = epg_source.custom_properties + name_source = custom_props.get('name_source') + + if name_source == 'stream': + stream_index = custom_props.get('stream_index', 1) - 1 + channel_streams = channel.streams.all().order_by('channelstream__order') + + if channel_streams.exists() and 0 <= stream_index < channel_streams.count(): + stream = list(channel_streams)[stream_index] + pattern_match_name = stream.name + logger.debug(f"Using stream name for parsing: {pattern_match_name} (stream index: {stream_index})") + else: + logger.warning(f"Stream index {stream_index} not found for channel {channel.name}, falling back to channel name") if not channel.epg_data: # Use the enhanced dummy EPG generation function with defaults program_length_hours = 4 # Default to 4-hour program blocks - dummy_programs = generate_dummy_programs(channel_id, display_name, num_days=dummy_days, program_length_hours=program_length_hours) + dummy_programs = generate_dummy_programs( + channel_id, pattern_match_name, + num_days=dummy_days, + program_length_hours=program_length_hours, + epg_source=None + ) for program in dummy_programs: # Format times in XMLTV format @@ -450,9 +1097,68 @@ def generate_epg(request, profile_name=None, user=None): yield f' \n' yield f" {html.escape(program['title'])}\n" yield f" {html.escape(program['description'])}\n" + + # Add custom_properties if present + custom_data = program.get('custom_properties', {}) + + # Categories + if 'categories' in custom_data: + for cat in custom_data['categories']: + yield f" {html.escape(cat)}\n" + + # Date tag + if 'date' in custom_data: + yield f" {html.escape(custom_data['date'])}\n" + + # Live tag + if custom_data.get('live', False): + yield f" \n" + yield f" \n" else: + # Check if this is a dummy EPG with no programs (generate on-demand) + if channel.epg_data.epg_source and channel.epg_data.epg_source.source_type == 'dummy': + # This is a custom dummy EPG - check if it has programs + if not channel.epg_data.programs.exists(): + # No programs stored, generate on-demand using custom patterns + # Use actual channel name for pattern matching + program_length_hours = 4 + dummy_programs = generate_dummy_programs( + channel_id, pattern_match_name, + num_days=dummy_days, + program_length_hours=program_length_hours, + epg_source=channel.epg_data.epg_source + ) + + for program in dummy_programs: + start_str = program['start_time'].strftime("%Y%m%d%H%M%S %z") + stop_str = program['end_time'].strftime("%Y%m%d%H%M%S %z") + + yield f' \n' + yield f" {html.escape(program['title'])}\n" + yield f" {html.escape(program['description'])}\n" + + # Add custom_properties if present + custom_data = program.get('custom_properties', {}) + + # Categories + if 'categories' in custom_data: + for cat in custom_data['categories']: + yield f" {html.escape(cat)}\n" + + # Date tag + if 'date' in custom_data: + yield f" {html.escape(custom_data['date'])}\n" + + # Live tag + if custom_data.get('live', False): + yield f" \n" + + yield f" \n" + + continue # Skip to next channel + # For real EPG data - filter only if days parameter was specified if num_days > 0: programs_qs = channel.epg_data.programs.filter( @@ -1013,14 +1719,34 @@ def xc_get_epg(request, user, short=False): limit = request.GET.get('limit', 4) if channel.epg_data: - if short == False: - programs = channel.epg_data.programs.filter( - start_time__gte=timezone.now() - ).order_by('start_time') + # Check if this is a dummy EPG that generates on-demand + if channel.epg_data.epg_source and channel.epg_data.epg_source.source_type == 'dummy': + if not channel.epg_data.programs.exists(): + # Generate on-demand using custom patterns + programs = generate_dummy_programs( + channel_id=channel_id, + channel_name=channel.name, + epg_source=channel.epg_data.epg_source + ) + else: + # Has stored programs, use them + if short == False: + programs = channel.epg_data.programs.filter( + start_time__gte=django_timezone.now() + ).order_by('start_time') + else: + programs = channel.epg_data.programs.all().order_by('start_time')[:limit] else: - programs = channel.epg_data.programs.all().order_by('start_time')[:limit] + # Regular EPG with stored programs + if short == False: + programs = channel.epg_data.programs.filter( + start_time__gte=django_timezone.now() + ).order_by('start_time') + else: + programs = channel.epg_data.programs.all().order_by('start_time')[:limit] else: - programs = generate_dummy_programs(channel_id=channel_id, channel_name=channel.name) + # No EPG data assigned, generate default dummy + programs = generate_dummy_programs(channel_id=channel_id, channel_name=channel.name, epg_source=None) output = {"epg_listings": []} for program in programs: @@ -1047,7 +1773,7 @@ def xc_get_epg(request, user, short=False): } if short == False: - program_output["now_playing"] = 1 if start <= timezone.now() <= end else 0 + program_output["now_playing"] = 1 if start <= django_timezone.now() <= end else 0 program_output["has_archive"] = "0" output['epg_listings'].append(program_output) @@ -1232,7 +1958,7 @@ def xc_get_series_info(request, user, series_id): try: should_refresh = ( not series_relation.last_episode_refresh or - series_relation.last_episode_refresh < timezone.now() - timedelta(hours=24) + series_relation.last_episode_refresh < django_timezone.now() - timedelta(hours=24) ) # Check if detailed data has been fetched diff --git a/apps/proxy/ts_proxy/url_utils.py b/apps/proxy/ts_proxy/url_utils.py index 75e7653e..db53cc74 100644 --- a/apps/proxy/ts_proxy/url_utils.py +++ b/apps/proxy/ts_proxy/url_utils.py @@ -8,7 +8,7 @@ from typing import Optional, Tuple, List from django.shortcuts import get_object_or_404 from apps.channels.models import Channel, Stream from apps.m3u.models import M3UAccount, M3UAccountProfile -from core.models import UserAgent, CoreSettings +from core.models import UserAgent, CoreSettings, StreamProfile from .utils import get_logger from uuid import UUID import requests @@ -26,16 +26,67 @@ def get_stream_object(id: str): def generate_stream_url(channel_id: str) -> Tuple[str, str, bool, Optional[int]]: """ - Generate the appropriate stream URL for a channel based on its profile settings. + Generate the appropriate stream URL for a channel or stream based on its profile settings. Args: - channel_id: The UUID of the channel + channel_id: The UUID of the channel or stream hash Returns: Tuple[str, str, bool, Optional[int]]: (stream_url, user_agent, transcode_flag, profile_id) """ try: - channel = get_stream_object(channel_id) + channel_or_stream = get_stream_object(channel_id) + + # Handle direct stream preview (custom streams) + if isinstance(channel_or_stream, Stream): + stream = channel_or_stream + logger.info(f"Previewing stream directly: {stream.id} ({stream.name})") + + # For custom streams, we need to get the M3U account and profile + m3u_account = stream.m3u_account + if not m3u_account: + logger.error(f"Stream {stream.id} has no M3U account") + return None, None, False, None + + # Get the default profile for this M3U account (custom streams use default) + m3u_profiles = m3u_account.profiles.all() + profile = next((obj for obj in m3u_profiles if obj.is_default), None) + + if not profile: + logger.error(f"No default profile found for M3U account {m3u_account.id}") + return None, None, False, None + + # Get the appropriate user agent + stream_user_agent = m3u_account.get_user_agent().user_agent + if stream_user_agent is None: + stream_user_agent = UserAgent.objects.get(id=CoreSettings.get_default_user_agent_id()) + logger.debug(f"No user agent found for account, using default: {stream_user_agent}") + + # Get stream URL (no transformation for custom streams) + stream_url = stream.url + + # Check if the stream has its own stream_profile set, otherwise use default + if stream.stream_profile: + stream_profile = stream.stream_profile + logger.debug(f"Using stream's own stream profile: {stream_profile.name}") + else: + stream_profile = StreamProfile.objects.get( + id=CoreSettings.get_default_stream_profile_id() + ) + logger.debug(f"Using default stream profile: {stream_profile.name}") + + # Check if transcoding is needed + if stream_profile.is_proxy() or stream_profile is None: + transcode = False + else: + transcode = True + + stream_profile_id = stream_profile.id + + return stream_url, stream_user_agent, transcode, stream_profile_id + + # Handle channel preview (existing logic) + channel = channel_or_stream # Get stream and profile for this channel # Note: get_stream now returns 3 values (stream_id, profile_id, error_reason) diff --git a/core/api_urls.py b/core/api_urls.py index 00e20a6e..baa4bbe5 100644 --- a/core/api_urls.py +++ b/core/api_urls.py @@ -2,7 +2,7 @@ from django.urls import path, include from rest_framework.routers import DefaultRouter -from .api_views import UserAgentViewSet, StreamProfileViewSet, CoreSettingsViewSet, environment, version, rehash_streams_endpoint +from .api_views import UserAgentViewSet, StreamProfileViewSet, CoreSettingsViewSet, environment, version, rehash_streams_endpoint, TimezoneListView router = DefaultRouter() router.register(r'useragents', UserAgentViewSet, basename='useragent') @@ -12,5 +12,6 @@ urlpatterns = [ path('settings/env/', environment, name='token_refresh'), path('version/', version, name='version'), path('rehash-streams/', rehash_streams_endpoint, name='rehash_streams'), + path('timezones/', TimezoneListView.as_view(), name='timezones'), path('', include(router.urls)), ] diff --git a/core/api_views.py b/core/api_views.py index 9de5aa5a..f475909a 100644 --- a/core/api_views.py +++ b/core/api_views.py @@ -5,10 +5,12 @@ import ipaddress import logging from rest_framework import viewsets, status from rest_framework.response import Response +from rest_framework.views import APIView from django.shortcuts import get_object_or_404 from rest_framework.permissions import IsAuthenticated from rest_framework.decorators import api_view, permission_classes, action from drf_yasg.utils import swagger_auto_schema +from drf_yasg import openapi from .models import ( UserAgent, StreamProfile, @@ -328,25 +330,69 @@ def rehash_streams_endpoint(request): # Get the current hash keys from settings hash_key_setting = CoreSettings.objects.get(key=STREAM_HASH_KEY) hash_keys = hash_key_setting.value.split(",") - + # Queue the rehash task task = rehash_streams.delay(hash_keys) - + return Response({ "success": True, "message": "Stream rehashing task has been queued", "task_id": task.id }, status=status.HTTP_200_OK) - + except CoreSettings.DoesNotExist: return Response({ "success": False, "message": "Hash key settings not found" }, status=status.HTTP_400_BAD_REQUEST) - + except Exception as e: logger.error(f"Error triggering rehash streams: {e}") return Response({ "success": False, "message": "Failed to trigger rehash task" }, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + + +# ───────────────────────────── +# Timezone List API +# ───────────────────────────── +class TimezoneListView(APIView): + """ + API endpoint that returns all available timezones supported by pytz. + Returns a list of timezone names grouped by region for easy selection. + This is a general utility endpoint that can be used throughout the application. + """ + + def get_permissions(self): + return [Authenticated()] + + @swagger_auto_schema( + operation_description="Get list of all supported timezones", + responses={200: openapi.Response('List of timezones with grouping by region')} + ) + def get(self, request): + import pytz + + # Get all common timezones (excludes deprecated ones) + all_timezones = sorted(pytz.common_timezones) + + # Group by region for better UX + grouped = {} + for tz in all_timezones: + if '/' in tz: + region = tz.split('/')[0] + if region not in grouped: + grouped[region] = [] + grouped[region].append(tz) + else: + # Handle special zones like UTC, GMT, etc. + if 'Other' not in grouped: + grouped['Other'] = [] + grouped['Other'].append(tz) + + return Response({ + 'timezones': all_timezones, + 'grouped': grouped, + 'count': len(all_timezones) + }) diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh index fd0a883d..ab048ff7 100755 --- a/docker/entrypoint.sh +++ b/docker/entrypoint.sh @@ -96,7 +96,16 @@ fi chmod +x /etc/profile.d/dispatcharr.sh -pip install django-filter +# Ensure root's .bashrc sources the profile.d scripts for interactive non-login shells +if ! grep -q "profile.d/dispatcharr.sh" /root/.bashrc 2>/dev/null; then + cat >> /root/.bashrc << 'EOF' + +# Source Dispatcharr environment variables +if [ -f /etc/profile.d/dispatcharr.sh ]; then + . /etc/profile.d/dispatcharr.sh +fi +EOF +fi # Run init scripts echo "Starting user setup..." diff --git a/docker/init/03-init-dispatcharr.sh b/docker/init/03-init-dispatcharr.sh index 629c5a51..2c769241 100644 --- a/docker/init/03-init-dispatcharr.sh +++ b/docker/init/03-init-dispatcharr.sh @@ -1,25 +1,60 @@ #!/bin/bash -mkdir -p /data/logos -mkdir -p /data/recordings -mkdir -p /data/uploads/m3us -mkdir -p /data/uploads/epgs -mkdir -p /data/m3us -mkdir -p /data/epgs -mkdir -p /data/plugins -mkdir -p /app/logo_cache -mkdir -p /app/media +# Define directories that need to exist and be owned by PUID:PGID +DATA_DIRS=( + "/data/logos" + "/data/recordings" + "/data/uploads/m3us" + "/data/uploads/epgs" + "/data/m3us" + "/data/epgs" + "/data/plugins" +) + +APP_DIRS=( + "/app/logo_cache" + "/app/media" +) + +# Create all directories +for dir in "${DATA_DIRS[@]}" "${APP_DIRS[@]}"; do + mkdir -p "$dir" +done + +# Ensure /app itself is owned by PUID:PGID (needed for uwsgi socket creation) +if [ "$(id -u)" = "0" ] && [ -d "/app" ]; then + if [ "$(stat -c '%u:%g' /app)" != "$PUID:$PGID" ]; then + echo "Fixing ownership for /app (non-recursive)" + chown $PUID:$PGID /app + fi +fi sed -i "s/NGINX_PORT/${DISPATCHARR_PORT}/g" /etc/nginx/sites-enabled/default # NOTE: mac doesn't run as root, so only manage permissions # if this script is running as root if [ "$(id -u)" = "0" ]; then - # Needs to own ALL of /data except db, we handle that below - chown -R $PUID:$PGID /data - chown -R $PUID:$PGID /app + # Fix data directories (non-recursive to avoid touching user files) + for dir in "${DATA_DIRS[@]}"; do + if [ -d "$dir" ] && [ "$(stat -c '%u:%g' "$dir")" != "$PUID:$PGID" ]; then + echo "Fixing ownership for $dir" + chown $PUID:$PGID "$dir" + fi + done + + # Fix app directories (recursive since they're managed by the app) + for dir in "${APP_DIRS[@]}"; do + if [ -d "$dir" ] && [ "$(stat -c '%u:%g' "$dir")" != "$PUID:$PGID" ]; then + echo "Fixing ownership for $dir (recursive)" + chown -R $PUID:$PGID "$dir" + fi + done + + # Database permissions + if [ -d /data/db ] && [ "$(stat -c '%u' /data/db)" != "$(id -u postgres)" ]; then + echo "Fixing ownership for /data/db" + chown -R postgres:postgres /data/db + fi - # Permissions - chown -R postgres:postgres /data/db chmod +x /data -fi +fi \ No newline at end of file diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index 23a9a656..0f46b012 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -642,6 +642,16 @@ export const WebsocketProvider = ({ children }) => { } break; + case 'epg_data_created': + // A new EPG data entry was created (e.g., for a dummy EPG) + // Fetch EPG data so the channel form can immediately assign it + try { + await fetchEPGData(); + } catch (e) { + console.warn('Failed to refresh EPG data after creation:', e); + } + break; + case 'stream_rehash': // Handle stream rehash progress updates if (parsedEvent.data.action === 'starting') { diff --git a/frontend/src/api.js b/frontend/src/api.js index 4ef5f97e..5b80a3f7 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -1118,6 +1118,21 @@ export default class API { } } + static async getTimezones() { + try { + const response = await request(`${host}/api/core/timezones/`); + return response; + } catch (e) { + errorNotification('Failed to retrieve timezones', e); + // Return fallback data instead of throwing + return { + timezones: ['UTC', 'US/Eastern', 'US/Central', 'US/Mountain', 'US/Pacific'], + grouped: {}, + count: 5 + }; + } + } + static async getStreamProfiles() { try { const response = await request(`${host}/api/core/streamprofiles/`); diff --git a/frontend/src/components/forms/DummyEPG.jsx b/frontend/src/components/forms/DummyEPG.jsx new file mode 100644 index 00000000..a449d0c6 --- /dev/null +++ b/frontend/src/components/forms/DummyEPG.jsx @@ -0,0 +1,1005 @@ +import React, { useEffect, useMemo, useState } from 'react'; +import { + Box, + Button, + Checkbox, + Divider, + Group, + Modal, + NumberInput, + Select, + Stack, + Text, + TextInput, + Textarea, +} from '@mantine/core'; +import { useForm } from '@mantine/form'; +import { notifications } from '@mantine/notifications'; +import API from '../../api'; +import dayjs from 'dayjs'; +import utc from 'dayjs/plugin/utc'; +import timezone from 'dayjs/plugin/timezone'; + +// Extend dayjs with timezone support +dayjs.extend(utc); +dayjs.extend(timezone); + +const DummyEPGForm = ({ epg, isOpen, onClose }) => { + // Separate state for each field to prevent focus loss + const [titlePattern, setTitlePattern] = useState(''); + const [timePattern, setTimePattern] = useState(''); + const [datePattern, setDatePattern] = useState(''); + const [sampleTitle, setSampleTitle] = useState(''); + const [titleTemplate, setTitleTemplate] = useState(''); + const [descriptionTemplate, setDescriptionTemplate] = useState(''); + const [upcomingTitleTemplate, setUpcomingTitleTemplate] = useState(''); + const [upcomingDescriptionTemplate, setUpcomingDescriptionTemplate] = + useState(''); + const [endedTitleTemplate, setEndedTitleTemplate] = useState(''); + const [endedDescriptionTemplate, setEndedDescriptionTemplate] = useState(''); + const [timezoneOptions, setTimezoneOptions] = useState([]); + const [loadingTimezones, setLoadingTimezones] = useState(true); + + const form = useForm({ + initialValues: { + name: '', + is_active: true, + source_type: 'dummy', + custom_properties: { + title_pattern: '', + time_pattern: '', + date_pattern: '', + timezone: 'US/Eastern', + output_timezone: '', + program_duration: 180, + sample_title: '', + title_template: '', + description_template: '', + upcoming_title_template: '', + upcoming_description_template: '', + ended_title_template: '', + ended_description_template: '', + name_source: 'channel', + stream_index: 1, + category: '', + include_date: true, + include_live: false, + }, + }, + validate: { + name: (value) => (value?.trim() ? null : 'Name is required'), + 'custom_properties.title_pattern': (value) => { + if (!value?.trim()) return 'Title pattern is required'; + try { + new RegExp(value); + return null; + } catch (e) { + return `Invalid regex: ${e.message}`; + } + }, + 'custom_properties.name_source': (value) => { + if (!value) return 'Name source is required'; + return null; + }, + 'custom_properties.stream_index': (value, values) => { + if (values.custom_properties?.name_source === 'stream') { + if (!value || value < 1) { + return 'Stream index must be at least 1'; + } + } + return null; + }, + }, + }); + + // Real-time pattern validation with useMemo to prevent re-renders + const patternValidation = useMemo(() => { + const result = { + titleMatch: false, + timeMatch: false, + dateMatch: false, + titleGroups: {}, + timeGroups: {}, + dateGroups: {}, + formattedTitle: '', + formattedDescription: '', + formattedUpcomingTitle: '', + formattedUpcomingDescription: '', + formattedEndedTitle: '', + formattedEndedDescription: '', + error: null, + }; + + // Validate title pattern + if (titlePattern && sampleTitle) { + try { + const titleRegex = new RegExp(titlePattern); + const titleMatch = sampleTitle.match(titleRegex); + + if (titleMatch) { + result.titleMatch = true; + result.titleGroups = titleMatch.groups || {}; + } + } catch (e) { + result.error = `Title pattern error: ${e.message}`; + } + } + + // Validate time pattern + if (timePattern && sampleTitle) { + try { + const timeRegex = new RegExp(timePattern); + const timeMatch = sampleTitle.match(timeRegex); + + if (timeMatch) { + result.timeMatch = true; + result.timeGroups = timeMatch.groups || {}; + } + } catch (e) { + result.error = result.error + ? `${result.error}; Time pattern error: ${e.message}` + : `Time pattern error: ${e.message}`; + } + } + + // Validate date pattern + if (datePattern && sampleTitle) { + try { + const dateRegex = new RegExp(datePattern); + const dateMatch = sampleTitle.match(dateRegex); + + if (dateMatch) { + result.dateMatch = true; + result.dateGroups = dateMatch.groups || {}; + } + } catch (e) { + result.error = result.error + ? `${result.error}; Date pattern error: ${e.message}` + : `Date pattern error: ${e.message}`; + } + } + + // Merge all groups for template formatting + const allGroups = { + ...result.titleGroups, + ...result.timeGroups, + ...result.dateGroups, + }; + + // Calculate formatted time strings if time was extracted + if (result.timeGroups.hour) { + try { + let hour24 = parseInt(result.timeGroups.hour); + const minute = result.timeGroups.minute + ? parseInt(result.timeGroups.minute) + : 0; + const ampm = result.timeGroups.ampm?.toLowerCase(); + + // Convert to 24-hour if AM/PM present + if (ampm === 'pm' && hour24 !== 12) { + hour24 += 12; + } else if (ampm === 'am' && hour24 === 12) { + hour24 = 0; + } + + // Apply timezone conversion if output_timezone is set + const sourceTimezone = form.values.custom_properties?.timezone || 'UTC'; + const outputTimezone = form.values.custom_properties?.output_timezone; + + if (outputTimezone && outputTimezone !== sourceTimezone) { + // Create a date in the source timezone + const sourceDate = dayjs() + .tz(sourceTimezone) + .set('hour', hour24) + .set('minute', minute) + .set('second', 0); + + // Convert to output timezone + const outputDate = sourceDate.tz(outputTimezone); + + // Update hour and minute to the converted values + hour24 = outputDate.hour(); + const convertedMinute = outputDate.minute(); + + // Format 24-hour time string with converted time + if (convertedMinute > 0) { + allGroups.time24 = `${hour24.toString().padStart(2, '0')}:${convertedMinute.toString().padStart(2, '0')}`; + } else { + allGroups.time24 = `${hour24.toString().padStart(2, '0')}:00`; + } + + // Convert to 12-hour format with converted time + const ampmDisplay = hour24 < 12 ? 'AM' : 'PM'; + let hour12 = hour24; + if (hour24 === 0) { + hour12 = 12; + } else if (hour24 > 12) { + hour12 = hour24 - 12; + } + + // Format 12-hour time string with converted time + if (convertedMinute > 0) { + allGroups.time = `${hour12}:${convertedMinute.toString().padStart(2, '0')} ${ampmDisplay}`; + } else { + allGroups.time = `${hour12} ${ampmDisplay}`; + } + } else { + // No timezone conversion - use original logic + // Format 24-hour time string + if (minute > 0) { + allGroups.time24 = `${hour24.toString().padStart(2, '0')}:${minute.toString().padStart(2, '0')}`; + } else { + allGroups.time24 = `${hour24.toString().padStart(2, '0')}:00`; + } + + // Convert to 12-hour format + const ampmDisplay = hour24 < 12 ? 'AM' : 'PM'; + let hour12 = hour24; + if (hour24 === 0) { + hour12 = 12; + } else if (hour24 > 12) { + hour12 = hour24 - 12; + } + + // Format 12-hour time string + if (minute > 0) { + allGroups.time = `${hour12}:${minute.toString().padStart(2, '0')} ${ampmDisplay}`; + } else { + allGroups.time = `${hour12} ${ampmDisplay}`; + } + } + } catch (e) { + // If parsing fails, leave time/time24 as placeholders + console.error('Error formatting time:', e); + } + } + + // Format title template + if (titleTemplate && (result.titleMatch || result.timeMatch)) { + result.formattedTitle = titleTemplate.replace( + /\{(\w+)\}/g, + (match, key) => allGroups[key] || match + ); + } + + // Format description template + if (descriptionTemplate && (result.titleMatch || result.timeMatch)) { + result.formattedDescription = descriptionTemplate.replace( + /\{(\w+)\}/g, + (match, key) => allGroups[key] || match + ); + } + + // Format upcoming title template + if (upcomingTitleTemplate && (result.titleMatch || result.timeMatch)) { + result.formattedUpcomingTitle = upcomingTitleTemplate.replace( + /\{(\w+)\}/g, + (match, key) => allGroups[key] || match + ); + } + + // Format upcoming description template + if ( + upcomingDescriptionTemplate && + (result.titleMatch || result.timeMatch) + ) { + result.formattedUpcomingDescription = upcomingDescriptionTemplate.replace( + /\{(\w+)\}/g, + (match, key) => allGroups[key] || match + ); + } + + // Format ended title template + if (endedTitleTemplate && (result.titleMatch || result.timeMatch)) { + result.formattedEndedTitle = endedTitleTemplate.replace( + /\{(\w+)\}/g, + (match, key) => allGroups[key] || match + ); + } + + // Format ended description template + if (endedDescriptionTemplate && (result.titleMatch || result.timeMatch)) { + result.formattedEndedDescription = endedDescriptionTemplate.replace( + /\{(\w+)\}/g, + (match, key) => allGroups[key] || match + ); + } + + return result; + }, [ + titlePattern, + timePattern, + datePattern, + sampleTitle, + titleTemplate, + descriptionTemplate, + upcomingTitleTemplate, + upcomingDescriptionTemplate, + endedTitleTemplate, + endedDescriptionTemplate, + form.values.custom_properties?.timezone, + form.values.custom_properties?.output_timezone, + ]); + + useEffect(() => { + if (epg) { + const custom = epg.custom_properties || {}; + + form.setValues({ + name: epg.name || '', + is_active: epg.is_active ?? true, + source_type: 'dummy', + custom_properties: { + title_pattern: custom.title_pattern || '', + time_pattern: custom.time_pattern || '', + date_pattern: custom.date_pattern || '', + timezone: + custom.timezone || + custom.timezone_offset?.toString() || + 'US/Eastern', + output_timezone: custom.output_timezone || '', + program_duration: custom.program_duration || 180, + sample_title: custom.sample_title || '', + title_template: custom.title_template || '', + description_template: custom.description_template || '', + upcoming_title_template: custom.upcoming_title_template || '', + upcoming_description_template: + custom.upcoming_description_template || '', + ended_title_template: custom.ended_title_template || '', + ended_description_template: custom.ended_description_template || '', + name_source: custom.name_source || 'channel', + stream_index: custom.stream_index || 1, + category: custom.category || '', + include_date: custom.include_date ?? true, + include_live: custom.include_live ?? false, + }, + }); + + // Set controlled state + setTitlePattern(custom.title_pattern || ''); + setTimePattern(custom.time_pattern || ''); + setDatePattern(custom.date_pattern || ''); + setSampleTitle(custom.sample_title || ''); + setTitleTemplate(custom.title_template || ''); + setDescriptionTemplate(custom.description_template || ''); + setUpcomingTitleTemplate(custom.upcoming_title_template || ''); + setUpcomingDescriptionTemplate( + custom.upcoming_description_template || '' + ); + setEndedTitleTemplate(custom.ended_title_template || ''); + setEndedDescriptionTemplate(custom.ended_description_template || ''); + } else { + form.reset(); + setTitlePattern(''); + setTimePattern(''); + setDatePattern(''); + setSampleTitle(''); + setTitleTemplate(''); + setDescriptionTemplate(''); + setUpcomingTitleTemplate(''); + setUpcomingDescriptionTemplate(''); + setEndedTitleTemplate(''); + setEndedDescriptionTemplate(''); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [epg]); + + // Fetch available timezones from the API + useEffect(() => { + const fetchTimezones = async () => { + try { + setLoadingTimezones(true); + const response = await API.getTimezones(); + + // Convert timezone list to Select options format + const options = response.timezones.map((tz) => ({ + value: tz, + label: tz, + })); + + setTimezoneOptions(options); + } catch (error) { + console.error('Failed to load timezones:', error); + notifications.show({ + title: 'Warning', + message: 'Failed to load timezone list. Using default options.', + color: 'yellow', + }); + // Fallback to a minimal list + setTimezoneOptions([ + { value: 'UTC', label: 'UTC' }, + { value: 'US/Eastern', label: 'US/Eastern' }, + { value: 'US/Central', label: 'US/Central' }, + { value: 'US/Pacific', label: 'US/Pacific' }, + ]); + } finally { + setLoadingTimezones(false); + } + }; + + fetchTimezones(); + }, []); + + const handleSubmit = async (values) => { + try { + if (epg?.id) { + await API.updateEPG({ ...values, id: epg.id }); + notifications.show({ + title: 'Success', + message: 'Dummy EPG source updated successfully', + color: 'green', + }); + } else { + await API.addEPG(values); + notifications.show({ + title: 'Success', + message: 'Dummy EPG source created successfully', + color: 'green', + }); + } + onClose(); + } catch (error) { + notifications.show({ + title: 'Error', + message: error.message || 'Failed to save dummy EPG source', + color: 'red', + }); + } + }; + + return ( + +
+ + {/* Basic Settings */} + + + {/* Pattern Configuration */} + + + + Define regex patterns to extract information from channel titles or + stream names. Use named capture groups like + (?<groupname>pattern). + + +