From 22fb0b3bdd78a6d649a3275a8871640afb97bf5f Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 18 Oct 2025 12:08:56 -0500 Subject: [PATCH 01/23] Enhancement: Add Custom Dummy EPG with Dynamic Pattern Matching and Name Source Selection This enhancement introduces a powerful custom dummy EPG system that allows users to generate EPG programs on-demand by parsing channel or stream names using configurable regex patterns. Key Features: - Custom Pattern Matching: Define regex patterns to extract information from channel/stream names (teams, leagues, times, dates, etc.) - Flexible Name Source: Choose to parse either the channel name or a specific stream name (by index) - Timezone-Aware Scheduling: Automatic DST handling using pytz timezone names (e.g., 'US/Eastern', 'Europe/London') - Time Format Support: Parse both 12-hour (AM/PM) and 24-hour time formats - Date Parsing: Extract dates from names with flexible month/day/year patterns - Custom Templates: Format EPG titles and descriptions using captured groups with {placeholder} syntax - Upcoming/Ended Customization: Define custom titles and descriptions for programs before and after scheduled events - Live Preview: Test patterns and templates in real-time with sample input - Smart Program Generation: Automatically creates "Upcoming" and "Ended" programs around scheduled events Use Cases: - Sports channels with event details in stream names (e.g., "NHL 01: Bruins VS Leafs @ 8:00PM ET") - Movie channels with genre/title/year information - Racing events with driver/track/series details - Any scenario where EPG data is embedded in channel/stream naming conventions Technical Implementation: - Backend: Pattern matching engine with timezone conversion and program scheduling logic - Frontend: Interactive form with validation, pattern testing, and visual group preview - Name Source Options: Parse from channel name or selectable stream index (1-based) - Fallback Behavior: Uses standard dummy EPG if patterns don't match - Custom Properties: Stores all configuration in EPGSource.custom_properties JSON field Configuration Options: - Title Pattern: Extract primary information (required) - Time Pattern: Extract hour/minute/AM-PM (optional) - Date Pattern: Extract month/day/year (optional) - Timezone: Event timezone with automatic DST support - Program Duration: Length of generated programs in minutes - Title Template: Format EPG title using captured groups - Description Template: Format EPG description using captured groups - Upcoming Title Template: Custom title for programs before event starts (optional) - Upcoming Description Template: Custom description for programs before event starts (optional) - Ended Title Template: Custom title for programs after event ends (optional) - Ended Description Template: Custom description for programs after event ends (optional) - Name Source: Channel name or stream name - Stream Index: Which stream to use when parsing stream names (1, 2, 3, etc.) Closes #293 --- apps/epg/api_views.py | 137 +++- ...18_epgsource_custom_properties_and_more.py | 23 + apps/epg/models.py | 7 + apps/epg/serializers.py | 1 + apps/epg/signals.py | 77 +- apps/epg/tasks.py | 29 +- apps/output/views.py | 550 ++++++++++++- core/api_urls.py | 3 +- core/api_views.py | 54 +- frontend/src/WebSocket.jsx | 10 + frontend/src/api.js | 15 + frontend/src/components/forms/DummyEPG.jsx | 761 ++++++++++++++++++ frontend/src/components/forms/EPG.jsx | 32 +- frontend/src/components/tables/EPGsTable.jsx | 109 ++- frontend/src/pages/Guide.jsx | 5 +- frontend/src/pages/guideUtils.js | 21 +- 16 files changed, 1741 insertions(+), 93 deletions(-) create mode 100644 apps/epg/migrations/0018_epgsource_custom_properties_and_more.py create mode 100644 frontend/src/components/forms/DummyEPG.jsx diff --git a/apps/epg/api_views.py b/apps/epg/api_views.py index f3248677..2fc5a743 100644 --- a/apps/epg/api_views.py +++ b/apps/epg/api_views.py @@ -147,23 +147,37 @@ class EPGGridAPIView(APIView): f"EPGGridAPIView: Found {count} program(s), including recently ended, currently running, and upcoming shows." ) - # Generate dummy programs for channels that have no EPG data + # Generate dummy programs for channels that have no EPG data OR dummy EPG sources from apps.channels.models import Channel + from apps.epg.models import EPGSource from django.db.models import Q - # Get channels with no EPG data + # Get channels with no EPG data at all (standard dummy) channels_without_epg = Channel.objects.filter(Q(epg_data__isnull=True)) - channels_count = channels_without_epg.count() - # Log more detailed information about channels missing EPG data - if channels_count > 0: + # Get channels with custom dummy EPG sources (generate on-demand with patterns) + channels_with_custom_dummy = Channel.objects.filter( + epg_data__epg_source__source_type='dummy' + ).distinct() + + # Log what we found + without_count = channels_without_epg.count() + custom_count = channels_with_custom_dummy.count() + + if without_count > 0: channel_names = [f"{ch.name} (ID: {ch.id})" for ch in channels_without_epg] - logger.warning( - f"EPGGridAPIView: Missing EPG data for these channels: {', '.join(channel_names)}" + logger.debug( + f"EPGGridAPIView: Channels needing standard dummy EPG: {', '.join(channel_names)}" + ) + + if custom_count > 0: + channel_names = [f"{ch.name} (ID: {ch.id})" for ch in channels_with_custom_dummy] + logger.debug( + f"EPGGridAPIView: Channels needing custom dummy EPG: {', '.join(channel_names)}" ) logger.debug( - f"EPGGridAPIView: Found {channels_count} channels with no EPG data." + f"EPGGridAPIView: Found {without_count} channels needing standard dummy, {custom_count} needing custom dummy EPG." ) # Serialize the regular programs @@ -205,12 +219,91 @@ class EPGGridAPIView(APIView): # Generate and append dummy programs dummy_programs = [] - for channel in channels_without_epg: - # Use the channel UUID as tvg_id for dummy programs to match in the guide + + # Import the function from output.views + from apps.output.views import generate_dummy_programs as gen_dummy_progs + + # Handle channels with CUSTOM dummy EPG sources (with patterns) + for channel in channels_with_custom_dummy: + # For dummy EPGs, ALWAYS use channel UUID to ensure unique programs per channel + # This prevents multiple channels assigned to the same dummy EPG from showing identical data + # Each channel gets its own unique program data even if they share the same EPG source dummy_tvg_id = str(channel.uuid) try: - # Create programs every 4 hours for the next 24 hours + # Get the custom dummy EPG source + epg_source = channel.epg_data.epg_source if channel.epg_data else None + + logger.debug(f"Generating custom dummy programs for channel: {channel.name} (ID: {channel.id})") + + # Determine which name to parse based on custom properties + name_to_parse = channel.name + if epg_source and epg_source.custom_properties: + custom_props = epg_source.custom_properties + name_source = custom_props.get('name_source') + + if name_source == 'stream': + # Get the stream index (1-based from user, convert to 0-based) + stream_index = custom_props.get('stream_index', 1) - 1 + + # Get streams ordered by channelstream order + channel_streams = channel.streams.all().order_by('channelstream__order') + + if channel_streams.exists() and 0 <= stream_index < channel_streams.count(): + stream = list(channel_streams)[stream_index] + name_to_parse = stream.name + logger.debug(f"Using stream name for parsing: {name_to_parse} (stream index: {stream_index})") + else: + logger.warning(f"Stream index {stream_index} not found for channel {channel.name}, falling back to channel name") + elif name_source == 'channel': + logger.debug(f"Using channel name for parsing: {name_to_parse}") + + # Generate programs using custom patterns from the dummy EPG source + # Use the same tvg_id that will be set in the program data + generated = gen_dummy_progs( + channel_id=dummy_tvg_id, + channel_name=name_to_parse, + num_days=1, + program_length_hours=4, + epg_source=epg_source + ) + + # Custom dummy should always return data (either from patterns or fallback) + if generated: + logger.debug(f"Generated {len(generated)} custom dummy programs for {channel.name}") + # Convert generated programs to API format + for program in generated: + dummy_program = { + "id": f"dummy-custom-{channel.id}-{program['start_time'].hour}", + "epg": {"tvg_id": dummy_tvg_id, "name": channel.name}, + "start_time": program['start_time'].isoformat(), + "end_time": program['end_time'].isoformat(), + "title": program['title'], + "description": program['description'], + "tvg_id": dummy_tvg_id, + "sub_title": None, + "custom_properties": None, + } + dummy_programs.append(dummy_program) + else: + logger.warning(f"No programs generated for custom dummy EPG channel: {channel.name}") + + except Exception as e: + logger.error( + f"Error creating custom dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}" + ) + + # Handle channels with NO EPG data (standard dummy with humorous descriptions) + for channel in channels_without_epg: + # For channels with no EPG, use UUID to ensure uniqueness (matches frontend logic) + # The frontend uses: tvgRecord?.tvg_id ?? channel.uuid + # Since there's no EPG data, it will fall back to UUID + dummy_tvg_id = str(channel.uuid) + + try: + logger.debug(f"Generating standard dummy programs for channel: {channel.name} (ID: {channel.id})") + + # Create programs every 4 hours for the next 24 hours with humorous descriptions for hour_offset in range(0, 24, 4): # Use timedelta for time arithmetic instead of replace() to avoid hour overflow start_time = now + timedelta(hours=hour_offset) @@ -238,7 +331,7 @@ class EPGGridAPIView(APIView): # Create a dummy program in the same format as regular programs dummy_program = { - "id": f"dummy-{channel.id}-{hour_offset}", # Create a unique ID + "id": f"dummy-standard-{channel.id}-{hour_offset}", "epg": {"tvg_id": dummy_tvg_id, "name": channel.name}, "start_time": start_time.isoformat(), "end_time": end_time.isoformat(), @@ -252,7 +345,7 @@ class EPGGridAPIView(APIView): except Exception as e: logger.error( - f"Error creating dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}" + f"Error creating standard dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}" ) # Combine regular and dummy programs @@ -284,7 +377,22 @@ class EPGImportAPIView(APIView): ) def post(self, request, format=None): logger.info("EPGImportAPIView: Received request to import EPG data.") - refresh_epg_data.delay(request.data.get("id", None)) # Trigger Celery task + epg_id = request.data.get("id", None) + + # Check if this is a dummy EPG source + try: + from .models import EPGSource + epg_source = EPGSource.objects.get(id=epg_id) + if epg_source.source_type == 'dummy': + logger.info(f"EPGImportAPIView: Skipping refresh for dummy EPG source {epg_id}") + return Response( + {"success": False, "message": "Dummy EPG sources do not require refreshing."}, + status=status.HTTP_400_BAD_REQUEST, + ) + except EPGSource.DoesNotExist: + pass # Let the task handle the missing source + + refresh_epg_data.delay(epg_id) # Trigger Celery task logger.info("EPGImportAPIView: Task dispatched to refresh EPG data.") return Response( {"success": True, "message": "EPG data import initiated."}, @@ -308,3 +416,4 @@ class EPGDataViewSet(viewsets.ReadOnlyModelViewSet): return [perm() for perm in permission_classes_by_action[self.action]] except KeyError: return [Authenticated()] + diff --git a/apps/epg/migrations/0018_epgsource_custom_properties_and_more.py b/apps/epg/migrations/0018_epgsource_custom_properties_and_more.py new file mode 100644 index 00000000..70ebb214 --- /dev/null +++ b/apps/epg/migrations/0018_epgsource_custom_properties_and_more.py @@ -0,0 +1,23 @@ +# Generated by Django 5.2.4 on 2025-10-17 17:02 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('epg', '0017_alter_epgsource_url'), + ] + + operations = [ + migrations.AddField( + model_name='epgsource', + name='custom_properties', + field=models.JSONField(blank=True, default=dict, help_text='Custom properties for dummy EPG configuration (regex patterns, timezone, duration, etc.)', null=True), + ), + migrations.AlterField( + model_name='epgsource', + name='source_type', + field=models.CharField(choices=[('xmltv', 'XMLTV URL'), ('schedules_direct', 'Schedules Direct API'), ('dummy', 'Custom Dummy EPG')], max_length=20), + ), + ] diff --git a/apps/epg/models.py b/apps/epg/models.py index da6ac8e6..6c70add2 100644 --- a/apps/epg/models.py +++ b/apps/epg/models.py @@ -8,6 +8,7 @@ class EPGSource(models.Model): SOURCE_TYPE_CHOICES = [ ('xmltv', 'XMLTV URL'), ('schedules_direct', 'Schedules Direct API'), + ('dummy', 'Custom Dummy EPG'), ] STATUS_IDLE = 'idle' @@ -38,6 +39,12 @@ class EPGSource(models.Model): refresh_task = models.ForeignKey( PeriodicTask, on_delete=models.SET_NULL, null=True, blank=True ) + custom_properties = models.JSONField( + default=dict, + blank=True, + null=True, + help_text="Custom properties for dummy EPG configuration (regex patterns, timezone, duration, etc.)" + ) status = models.CharField( max_length=20, choices=STATUS_CHOICES, diff --git a/apps/epg/serializers.py b/apps/epg/serializers.py index 85186cae..3404cca9 100644 --- a/apps/epg/serializers.py +++ b/apps/epg/serializers.py @@ -28,6 +28,7 @@ class EPGSourceSerializer(serializers.ModelSerializer): 'last_message', 'created_at', 'updated_at', + 'custom_properties', 'epg_data_ids' ] diff --git a/apps/epg/signals.py b/apps/epg/signals.py index e8a004cb..e41d3aaf 100644 --- a/apps/epg/signals.py +++ b/apps/epg/signals.py @@ -1,9 +1,9 @@ from django.db.models.signals import post_save, post_delete, pre_save from django.dispatch import receiver -from .models import EPGSource +from .models import EPGSource, EPGData from .tasks import refresh_epg_data, delete_epg_refresh_task_by_id from django_celery_beat.models import PeriodicTask, IntervalSchedule -from core.utils import is_protected_path +from core.utils import is_protected_path, send_websocket_update import json import logging import os @@ -12,15 +12,77 @@ logger = logging.getLogger(__name__) @receiver(post_save, sender=EPGSource) def trigger_refresh_on_new_epg_source(sender, instance, created, **kwargs): - # Trigger refresh only if the source is newly created and active - if created and instance.is_active: + # Trigger refresh only if the source is newly created, active, and not a dummy EPG + if created and instance.is_active and instance.source_type != 'dummy': refresh_epg_data.delay(instance.id) +@receiver(post_save, sender=EPGSource) +def create_dummy_epg_data(sender, instance, created, **kwargs): + """ + Automatically create EPGData for dummy EPG sources when they are created. + This allows channels to be assigned to dummy EPGs immediately without + requiring a refresh first. + """ + if instance.source_type == 'dummy': + # Ensure dummy EPGs always have idle status and no status message + if instance.status != EPGSource.STATUS_IDLE or instance.last_message: + instance.status = EPGSource.STATUS_IDLE + instance.last_message = None + instance.save(update_fields=['status', 'last_message']) + + # Create a URL-friendly tvg_id from the dummy EPG name + # Replace spaces and special characters with underscores + friendly_tvg_id = instance.name.replace(' ', '_').replace('-', '_') + # Remove any characters that aren't alphanumeric or underscores + friendly_tvg_id = ''.join(c for c in friendly_tvg_id if c.isalnum() or c == '_') + # Convert to lowercase for consistency + friendly_tvg_id = friendly_tvg_id.lower() + # Prefix with 'dummy_' to make it clear this is a dummy EPG + friendly_tvg_id = f"dummy_{friendly_tvg_id}" + + # Create or update the EPGData record + epg_data, data_created = EPGData.objects.get_or_create( + tvg_id=friendly_tvg_id, + epg_source=instance, + defaults={ + 'name': instance.name, + 'icon_url': None + } + ) + + # Update name if it changed and record already existed + if not data_created and epg_data.name != instance.name: + epg_data.name = instance.name + epg_data.save(update_fields=['name']) + + if data_created: + logger.info(f"Auto-created EPGData for dummy EPG source: {instance.name} (ID: {instance.id})") + + # Send websocket update to notify frontend that EPG data has been created + # This allows the channel form to immediately show the new dummy EPG without refreshing + send_websocket_update('updates', 'update', { + 'type': 'epg_data_created', + 'source_id': instance.id, + 'source_name': instance.name, + 'epg_data_id': epg_data.id + }) + else: + logger.debug(f"EPGData already exists for dummy EPG source: {instance.name} (ID: {instance.id})") + @receiver(post_save, sender=EPGSource) def create_or_update_refresh_task(sender, instance, **kwargs): """ Create or update a Celery Beat periodic task when an EPGSource is created/updated. + Skip creating tasks for dummy EPG sources as they don't need refreshing. """ + # Skip task creation for dummy EPGs + if instance.source_type == 'dummy': + # If there's an existing task, disable it + if instance.refresh_task: + instance.refresh_task.enabled = False + instance.refresh_task.save(update_fields=['enabled']) + return + task_name = f"epg_source-refresh-{instance.id}" interval, _ = IntervalSchedule.objects.get_or_create( every=int(instance.refresh_interval), @@ -80,7 +142,14 @@ def delete_refresh_task(sender, instance, **kwargs): def update_status_on_active_change(sender, instance, **kwargs): """ When an EPGSource's is_active field changes, update the status accordingly. + For dummy EPGs, always ensure status is idle and no status message. """ + # Dummy EPGs should always be idle with no status message + if instance.source_type == 'dummy': + instance.status = EPGSource.STATUS_IDLE + instance.last_message = None + return + if instance.pk: # Only for existing records, not new ones try: # Get the current record from the database diff --git a/apps/epg/tasks.py b/apps/epg/tasks.py index d9ae5a5d..2028cd98 100644 --- a/apps/epg/tasks.py +++ b/apps/epg/tasks.py @@ -133,8 +133,9 @@ def delete_epg_refresh_task_by_id(epg_id): @shared_task def refresh_all_epg_data(): logger.info("Starting refresh_epg_data task.") - active_sources = EPGSource.objects.filter(is_active=True) - logger.debug(f"Found {active_sources.count()} active EPGSource(s).") + # Exclude dummy EPG sources from refresh - they don't need refreshing + active_sources = EPGSource.objects.filter(is_active=True).exclude(source_type='dummy') + logger.debug(f"Found {active_sources.count()} active EPGSource(s) (excluding dummy EPGs).") for source in active_sources: refresh_epg_data(source.id) @@ -180,6 +181,13 @@ def refresh_epg_data(source_id): gc.collect() return + # Skip refresh for dummy EPG sources - they don't need refreshing + if source.source_type == 'dummy': + logger.info(f"Skipping refresh for dummy EPG source {source.name} (ID: {source_id})") + release_task_lock('refresh_epg_data', source_id) + gc.collect() + return + # Continue with the normal processing... logger.info(f"Processing EPGSource: {source.name} (type: {source.source_type})") if source.source_type == 'xmltv': @@ -1943,3 +1951,20 @@ def detect_file_format(file_path=None, content=None): # If we reach here, we couldn't reliably determine the format return format_type, is_compressed, file_extension + + +def generate_dummy_epg(source): + """ + DEPRECATED: This function is no longer used. + + Dummy EPG programs are now generated on-demand when they are requested + (during XMLTV export or EPG grid display), rather than being pre-generated + and stored in the database. + + See: apps/output/views.py - generate_custom_dummy_programs() + + This function remains for backward compatibility but should not be called. + """ + logger.warning(f"generate_dummy_epg() called for {source.name} but this function is deprecated. " + f"Dummy EPG programs are now generated on-demand.") + return True diff --git a/apps/output/views.py b/apps/output/views.py index 15036710..c7a827fc 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -9,7 +9,7 @@ from apps.epg.models import ProgramData from apps.accounts.models import User from core.models import CoreSettings, NETWORK_ACCESS from dispatcharr.utils import network_access_allowed -from django.utils import timezone +from django.utils import timezone as django_timezone from django.shortcuts import get_object_or_404 from datetime import datetime, timedelta import html # Add this import for XML escaping @@ -186,12 +186,44 @@ def generate_m3u(request, profile_name=None, user=None): return response -def generate_dummy_programs(channel_id, channel_name, num_days=1, program_length_hours=4): +def generate_dummy_programs(channel_id, channel_name, num_days=1, program_length_hours=4, epg_source=None): + """ + Generate dummy EPG programs for channels. + + If epg_source is provided and it's a custom dummy EPG with patterns, + use those patterns to generate programs from the channel title. + Otherwise, generate default dummy programs. + + Args: + channel_id: Channel ID for the programs + channel_name: Channel title/name + num_days: Number of days to generate programs for + program_length_hours: Length of each program in hours + epg_source: Optional EPGSource for custom dummy EPG with patterns + + Returns: + List of program dictionaries + """ + import re + # Get current time rounded to hour - now = timezone.now() + now = django_timezone.now() now = now.replace(minute=0, second=0, microsecond=0) - # Humorous program descriptions based on time of day + # Check if this is a custom dummy EPG with regex patterns + if epg_source and epg_source.source_type == 'dummy' and epg_source.custom_properties: + custom_programs = generate_custom_dummy_programs( + channel_id, channel_name, now, num_days, + epg_source.custom_properties + ) + # If custom generation succeeded, return those programs + # If it returned empty (pattern didn't match), fall through to default + if custom_programs: + return custom_programs + else: + logger.info(f"Custom pattern didn't match for '{channel_name}', using default dummy EPG") + + # Default humorous program descriptions based on time of day time_descriptions = { (0, 4): [ f"Late Night with {channel_name} - Where insomniacs unite!", @@ -263,6 +295,443 @@ def generate_dummy_programs(channel_id, channel_name, num_days=1, program_length return programs +def generate_custom_dummy_programs(channel_id, channel_name, now, num_days, custom_properties): + """ + Generate programs using custom dummy EPG regex patterns. + + Extracts information from channel title using regex patterns and generates + programs based on the extracted data. + + TIMEZONE HANDLING: + ------------------ + The timezone parameter specifies the timezone of the event times in your channel + titles using standard timezone names (e.g., 'US/Eastern', 'US/Pacific', 'Europe/London'). + DST (Daylight Saving Time) is handled automatically by pytz. + + Examples: + - Channel: "NHL 01: Bruins VS Maple Leafs @ 8:00PM ET" + - Set timezone = "US/Eastern" + - In October (DST): 8:00PM EDT → 12:00AM UTC (automatically uses UTC-4) + - In January (no DST): 8:00PM EST → 1:00AM UTC (automatically uses UTC-5) + + Args: + channel_id: Channel ID for the programs + channel_name: Channel title to parse + now: Current datetime (in UTC) + num_days: Number of days to generate programs for + custom_properties: Dict with title_pattern, time_pattern, templates, etc. + - timezone: Timezone name (e.g., 'US/Eastern') + + Returns: + List of program dictionaries with start_time/end_time in UTC + """ + import re + import pytz + + logger.info(f"Generating custom dummy programs for channel: {channel_name}") + + # Extract patterns from custom properties + title_pattern = custom_properties.get('title_pattern', '') + time_pattern = custom_properties.get('time_pattern', '') + date_pattern = custom_properties.get('date_pattern', '') + + # Get timezone name (e.g., 'US/Eastern', 'US/Pacific', 'Europe/London') + timezone_value = custom_properties.get('timezone', 'UTC') + program_duration = custom_properties.get('program_duration', 180) # Minutes + title_template = custom_properties.get('title_template', '') + description_template = custom_properties.get('description_template', '') + + # Templates for upcoming/ended programs + upcoming_title_template = custom_properties.get('upcoming_title_template', '') + upcoming_description_template = custom_properties.get('upcoming_description_template', '') + ended_title_template = custom_properties.get('ended_title_template', '') + ended_description_template = custom_properties.get('ended_description_template', '') + + # Parse timezone name + try: + source_tz = pytz.timezone(timezone_value) + logger.debug(f"Using timezone: {timezone_value} (DST will be handled automatically)") + except pytz.exceptions.UnknownTimeZoneError: + logger.warning(f"Unknown timezone: {timezone_value}, defaulting to UTC") + source_tz = pytz.utc + + if not title_pattern: + logger.warning(f"No title_pattern in custom_properties, falling back to default") + return [] # Return empty, will use default + + logger.debug(f"Title pattern from DB: {repr(title_pattern)}") + + # Convert PCRE/JavaScript named groups (?) to Python format (?P) + # This handles patterns created with JavaScript regex syntax + title_pattern = re.sub(r'\(\?<([^>]+)>', r'(?P<\1>', title_pattern) + logger.debug(f"Converted title pattern: {repr(title_pattern)}") + + # Compile regex patterns + try: + title_regex = re.compile(title_pattern) + except re.error as e: + logger.error(f"Invalid title regex pattern after conversion: {e}") + logger.error(f"Pattern was: {repr(title_pattern)}") + return [] + + time_regex = None + if time_pattern: + # Convert PCRE/JavaScript named groups to Python format + time_pattern = re.sub(r'\(\?<([^>]+)>', r'(?P<\1>', time_pattern) + logger.debug(f"Converted time pattern: {repr(time_pattern)}") + try: + time_regex = re.compile(time_pattern) + except re.error as e: + logger.warning(f"Invalid time regex pattern after conversion: {e}") + logger.warning(f"Pattern was: {repr(time_pattern)}") + + # Compile date regex if provided + date_regex = None + if date_pattern: + # Convert PCRE/JavaScript named groups to Python format + date_pattern = re.sub(r'\(\?<([^>]+)>', r'(?P<\1>', date_pattern) + logger.debug(f"Converted date pattern: {repr(date_pattern)}") + try: + date_regex = re.compile(date_pattern) + except re.error as e: + logger.warning(f"Invalid date regex pattern after conversion: {e}") + logger.warning(f"Pattern was: {repr(date_pattern)}") + + # Try to match the channel name with the title pattern + title_match = title_regex.match(channel_name) + if not title_match: + logger.debug(f"Channel name '{channel_name}' doesn't match title pattern") + return [] # Return empty, will use default + + groups = title_match.groupdict() + logger.debug(f"Title pattern matched. Groups: {groups}") + + # Helper function to format template with matched groups + def format_template(template, groups): + """Replace {groupname} placeholders with matched group values""" + if not template: + return '' + result = template + for key, value in groups.items(): + result = result.replace(f'{{{key}}}', str(value) if value else '') + return result + + # Extract time from title if time pattern exists + time_info = None + time_groups = {} + if time_regex: + time_match = time_regex.search(channel_name) + if time_match: + time_groups = time_match.groupdict() + try: + hour = int(time_groups.get('hour')) + minute = int(time_groups.get('minute', 0)) + ampm = time_groups.get('ampm') + ampm = ampm.lower() if ampm else None + + # Determine if this is 12-hour or 24-hour format + if ampm in ('am', 'pm'): + # 12-hour format: convert to 24-hour + if ampm == 'pm' and hour != 12: + hour += 12 + elif ampm == 'am' and hour == 12: + hour = 0 + logger.debug(f"Extracted time (12-hour): {hour}:{minute:02d} {ampm}") + else: + # 24-hour format: hour is already in 24-hour format + # Validate that it's actually a 24-hour time (0-23) + if hour > 23: + logger.warning(f"Invalid 24-hour time: {hour}. Must be 0-23.") + hour = hour % 24 # Wrap around just in case + logger.debug(f"Extracted time (24-hour): {hour}:{minute:02d}") + + time_info = {'hour': hour, 'minute': minute} + except (ValueError, TypeError) as e: + logger.warning(f"Error parsing time: {e}") + + # Extract date from title if date pattern exists + date_info = None + date_groups = {} + if date_regex: + date_match = date_regex.search(channel_name) + if date_match: + date_groups = date_match.groupdict() + try: + # Support various date group names: month, day, year + month_str = date_groups.get('month', '') + day = int(date_groups.get('day', 1)) + year = int(date_groups.get('year', now.year)) # Default to current year if not provided + + # Parse month - can be numeric (1-12) or text (Jan, January, etc.) + month = None + if month_str.isdigit(): + month = int(month_str) + else: + # Try to parse text month names + import calendar + month_str_lower = month_str.lower() + # Check full month names + for i, month_name in enumerate(calendar.month_name): + if month_name.lower() == month_str_lower: + month = i + break + # Check abbreviated month names if not found + if month is None: + for i, month_abbr in enumerate(calendar.month_abbr): + if month_abbr.lower() == month_str_lower: + month = i + break + + if month and 1 <= month <= 12 and 1 <= day <= 31: + date_info = {'year': year, 'month': month, 'day': day} + logger.debug(f"Extracted date: {year}-{month:02d}-{day:02d}") + else: + logger.warning(f"Invalid date values: month={month}, day={day}, year={year}") + except (ValueError, TypeError) as e: + logger.warning(f"Error parsing date: {e}") + + # Merge title groups, time groups, and date groups for template formatting + all_groups = {**groups, **time_groups, **date_groups} + + # Generate programs + programs = [] + + # If we have extracted time AND date, the event happens on a SPECIFIC date + # If we have time but NO date, generate for multiple days (existing behavior) + # All other days and times show "Upcoming" before or "Ended" after + event_happened = False + + # Determine how many iterations we need + if date_info and time_info: + # Specific date extracted - only generate for that one date + iterations = 1 + logger.debug(f"Date extracted, generating single event for specific date") + else: + # No specific date - use num_days (existing behavior) + iterations = num_days + + for day in range(iterations): + # Start from current time (like standard dummy) instead of midnight + # This ensures programs appear in the guide's current viewing window + day_start = now + timedelta(days=day) + day_end = day_start + timedelta(days=1) + + if time_info: + # We have an extracted event time - this is when the MAIN event starts + # The extracted time is in the SOURCE timezone (e.g., 8PM ET) + # We need to convert it to UTC for storage + + # Determine which date to use + if date_info: + # Use the extracted date from the channel title + current_date = datetime( + date_info['year'], + date_info['month'], + date_info['day'] + ).date() + logger.debug(f"Using extracted date: {current_date}") + else: + # No date extracted, use day offset from current time (existing behavior) + current_date = (now + timedelta(days=day)).date() + logger.debug(f"No date extracted, using day offset: {current_date}") + + # Create a naive datetime (no timezone info) representing the event in source timezone + event_start_naive = datetime.combine( + current_date, + datetime.min.time().replace( + hour=time_info['hour'], + minute=time_info['minute'] + ) + ) + + # Use pytz to localize the naive datetime to the source timezone + # This automatically handles DST! + try: + event_start_local = source_tz.localize(event_start_naive) + # Convert to UTC + event_start_utc = event_start_local.astimezone(pytz.utc) + logger.debug(f"Converted {event_start_local} to UTC: {event_start_utc}") + except Exception as e: + logger.error(f"Error localizing time to {source_tz}: {e}") + # Fallback: treat as UTC + event_start_utc = django_timezone.make_aware(event_start_naive, pytz.utc) + + event_end_utc = event_start_utc + timedelta(minutes=program_duration) + + # Pre-generate the main event title and description for reuse + if title_template: + main_event_title = format_template(title_template, all_groups) + else: + title_parts = [] + if 'league' in all_groups and all_groups['league']: + title_parts.append(all_groups['league']) + if 'team1' in all_groups and 'team2' in all_groups: + title_parts.append(f"{all_groups['team1']} vs {all_groups['team2']}") + elif 'title' in all_groups and all_groups['title']: + title_parts.append(all_groups['title']) + main_event_title = ' - '.join(title_parts) if title_parts else channel_name + + if description_template: + main_event_description = format_template(description_template, all_groups) + else: + main_event_description = main_event_title + + + + # Determine if this day is before, during, or after the event + # Event only happens on day 0 (first day) + is_event_day = (day == 0) + + if is_event_day and not event_happened: + # This is THE day the event happens + # Fill programs BEFORE the event + current_time = day_start + + while current_time < event_start_utc: + program_start_utc = current_time + program_end_utc = min(current_time + timedelta(minutes=program_duration), event_start_utc) + + # Use custom upcoming templates if provided, otherwise use defaults + if upcoming_title_template: + upcoming_title = format_template(upcoming_title_template, all_groups) + else: + upcoming_title = main_event_title + + if upcoming_description_template: + upcoming_description = format_template(upcoming_description_template, all_groups) + else: + upcoming_description = f"Upcoming: {main_event_description}" + + programs.append({ + "channel_id": channel_id, + "start_time": program_start_utc, + "end_time": program_end_utc, + "title": upcoming_title, + "description": upcoming_description, + }) + + current_time += timedelta(minutes=program_duration) + + # Add the MAIN EVENT at the extracted time + programs.append({ + "channel_id": channel_id, + "start_time": event_start_utc, + "end_time": event_end_utc, + "title": main_event_title, + "description": main_event_description, + }) + + event_happened = True + + # Fill programs AFTER the event until end of day + current_time = event_end_utc + + while current_time < day_end: + program_start_utc = current_time + program_end_utc = min(current_time + timedelta(minutes=program_duration), day_end) + + # Use custom ended templates if provided, otherwise use defaults + if ended_title_template: + ended_title = format_template(ended_title_template, all_groups) + else: + ended_title = main_event_title + + if ended_description_template: + ended_description = format_template(ended_description_template, all_groups) + else: + ended_description = f"Ended: {main_event_description}" + + programs.append({ + "channel_id": channel_id, + "start_time": program_start_utc, + "end_time": program_end_utc, + "title": ended_title, + "description": ended_description, + }) + + current_time += timedelta(minutes=program_duration) + else: + # This day is either before the event (future days) or after the event happened + # Fill entire day with appropriate message + current_time = day_start + + # If event already happened, all programs show "Ended" + # If event hasn't happened yet (shouldn't occur with day 0 logic), show "Upcoming" + is_ended = event_happened + + while current_time < day_end: + program_start_utc = current_time + program_end_utc = min(current_time + timedelta(minutes=program_duration), day_end) + + # Use custom templates based on whether event has ended or is upcoming + if is_ended: + if ended_title_template: + program_title = format_template(ended_title_template, all_groups) + else: + program_title = main_event_title + + if ended_description_template: + program_description = format_template(ended_description_template, all_groups) + else: + program_description = f"Ended: {main_event_description}" + else: + if upcoming_title_template: + program_title = format_template(upcoming_title_template, all_groups) + else: + program_title = main_event_title + + if upcoming_description_template: + program_description = format_template(upcoming_description_template, all_groups) + else: + program_description = f"Upcoming: {main_event_description}" + + programs.append({ + "channel_id": channel_id, + "start_time": program_start_utc, + "end_time": program_end_utc, + "title": program_title, + "description": program_description, + }) + + current_time += timedelta(minutes=program_duration) + else: + # No extracted time - fill entire day with regular intervals + # day_start and day_end are already in UTC, so no conversion needed + programs_per_day = max(1, int(24 / (program_duration / 60))) + + for program_num in range(programs_per_day): + program_start_utc = day_start + timedelta(minutes=program_num * program_duration) + program_end_utc = program_start_utc + timedelta(minutes=program_duration) + + if title_template: + title = format_template(title_template, all_groups) + else: + title_parts = [] + if 'league' in all_groups and all_groups['league']: + title_parts.append(all_groups['league']) + if 'team1' in all_groups and 'team2' in all_groups: + title_parts.append(f"{all_groups['team1']} vs {all_groups['team2']}") + elif 'title' in all_groups and all_groups['title']: + title_parts.append(all_groups['title']) + title = ' - '.join(title_parts) if title_parts else channel_name + + if description_template: + description = format_template(description_template, all_groups) + else: + description = title + + programs.append({ + "channel_id": channel_id, + "start_time": program_start_utc, + "end_time": program_end_utc, + "title": title, + "description": description, + }) + + logger.info(f"Generated {len(programs)} custom dummy programs for {channel_name}") + return programs + + def generate_dummy_epg( channel_id, channel_name, xml_lines=None, num_days=1, program_length_hours=4 ): @@ -367,7 +836,7 @@ def generate_epg(request, profile_name=None, user=None): dummy_days = num_days if num_days > 0 else 3 # Calculate cutoff date for EPG data filtering (only if days > 0) - now = timezone.now() + now = django_timezone.now() cutoff_date = now + timedelta(days=num_days) if num_days > 0 else None # Process channels for the section @@ -434,12 +903,20 @@ def generate_epg(request, profile_name=None, user=None): # Default to channel number channel_id = str(formatted_channel_number) if formatted_channel_number != "" else str(channel.id) + # Use EPG data name for display, but channel name for pattern matching display_name = channel.epg_data.name if channel.epg_data else channel.name + # For dummy EPG pattern matching, always use the actual channel name + pattern_match_name = channel.name if not channel.epg_data: # Use the enhanced dummy EPG generation function with defaults program_length_hours = 4 # Default to 4-hour program blocks - dummy_programs = generate_dummy_programs(channel_id, display_name, num_days=dummy_days, program_length_hours=program_length_hours) + dummy_programs = generate_dummy_programs( + channel_id, pattern_match_name, + num_days=dummy_days, + program_length_hours=program_length_hours, + epg_source=None + ) for program in dummy_programs: # Format times in XMLTV format @@ -453,6 +930,31 @@ def generate_epg(request, profile_name=None, user=None): yield f" \n" else: + # Check if this is a dummy EPG with no programs (generate on-demand) + if channel.epg_data.epg_source and channel.epg_data.epg_source.source_type == 'dummy': + # This is a custom dummy EPG - check if it has programs + if not channel.epg_data.programs.exists(): + # No programs stored, generate on-demand using custom patterns + # Use actual channel name for pattern matching + program_length_hours = 4 + dummy_programs = generate_dummy_programs( + channel_id, pattern_match_name, + num_days=dummy_days, + program_length_hours=program_length_hours, + epg_source=channel.epg_data.epg_source + ) + + for program in dummy_programs: + start_str = program['start_time'].strftime("%Y%m%d%H%M%S %z") + stop_str = program['end_time'].strftime("%Y%m%d%H%M%S %z") + + yield f' \n' + yield f" {html.escape(program['title'])}\n" + yield f" {html.escape(program['description'])}\n" + yield f" \n" + + continue # Skip to next channel + # For real EPG data - filter only if days parameter was specified if num_days > 0: programs_qs = channel.epg_data.programs.filter( @@ -1013,14 +1515,34 @@ def xc_get_epg(request, user, short=False): limit = request.GET.get('limit', 4) if channel.epg_data: - if short == False: - programs = channel.epg_data.programs.filter( - start_time__gte=timezone.now() - ).order_by('start_time') + # Check if this is a dummy EPG that generates on-demand + if channel.epg_data.epg_source and channel.epg_data.epg_source.source_type == 'dummy': + if not channel.epg_data.programs.exists(): + # Generate on-demand using custom patterns + programs = generate_dummy_programs( + channel_id=channel_id, + channel_name=channel.name, + epg_source=channel.epg_data.epg_source + ) + else: + # Has stored programs, use them + if short == False: + programs = channel.epg_data.programs.filter( + start_time__gte=django_timezone.now() + ).order_by('start_time') + else: + programs = channel.epg_data.programs.all().order_by('start_time')[:limit] else: - programs = channel.epg_data.programs.all().order_by('start_time')[:limit] + # Regular EPG with stored programs + if short == False: + programs = channel.epg_data.programs.filter( + start_time__gte=django_timezone.now() + ).order_by('start_time') + else: + programs = channel.epg_data.programs.all().order_by('start_time')[:limit] else: - programs = generate_dummy_programs(channel_id=channel_id, channel_name=channel.name) + # No EPG data assigned, generate default dummy + programs = generate_dummy_programs(channel_id=channel_id, channel_name=channel.name, epg_source=None) output = {"epg_listings": []} for program in programs: @@ -1047,7 +1569,7 @@ def xc_get_epg(request, user, short=False): } if short == False: - program_output["now_playing"] = 1 if start <= timezone.now() <= end else 0 + program_output["now_playing"] = 1 if start <= django_timezone.now() <= end else 0 program_output["has_archive"] = "0" output['epg_listings'].append(program_output) @@ -1232,7 +1754,7 @@ def xc_get_series_info(request, user, series_id): try: should_refresh = ( not series_relation.last_episode_refresh or - series_relation.last_episode_refresh < timezone.now() - timedelta(hours=24) + series_relation.last_episode_refresh < django_timezone.now() - timedelta(hours=24) ) # Check if detailed data has been fetched diff --git a/core/api_urls.py b/core/api_urls.py index 00e20a6e..baa4bbe5 100644 --- a/core/api_urls.py +++ b/core/api_urls.py @@ -2,7 +2,7 @@ from django.urls import path, include from rest_framework.routers import DefaultRouter -from .api_views import UserAgentViewSet, StreamProfileViewSet, CoreSettingsViewSet, environment, version, rehash_streams_endpoint +from .api_views import UserAgentViewSet, StreamProfileViewSet, CoreSettingsViewSet, environment, version, rehash_streams_endpoint, TimezoneListView router = DefaultRouter() router.register(r'useragents', UserAgentViewSet, basename='useragent') @@ -12,5 +12,6 @@ urlpatterns = [ path('settings/env/', environment, name='token_refresh'), path('version/', version, name='version'), path('rehash-streams/', rehash_streams_endpoint, name='rehash_streams'), + path('timezones/', TimezoneListView.as_view(), name='timezones'), path('', include(router.urls)), ] diff --git a/core/api_views.py b/core/api_views.py index 9de5aa5a..f475909a 100644 --- a/core/api_views.py +++ b/core/api_views.py @@ -5,10 +5,12 @@ import ipaddress import logging from rest_framework import viewsets, status from rest_framework.response import Response +from rest_framework.views import APIView from django.shortcuts import get_object_or_404 from rest_framework.permissions import IsAuthenticated from rest_framework.decorators import api_view, permission_classes, action from drf_yasg.utils import swagger_auto_schema +from drf_yasg import openapi from .models import ( UserAgent, StreamProfile, @@ -328,25 +330,69 @@ def rehash_streams_endpoint(request): # Get the current hash keys from settings hash_key_setting = CoreSettings.objects.get(key=STREAM_HASH_KEY) hash_keys = hash_key_setting.value.split(",") - + # Queue the rehash task task = rehash_streams.delay(hash_keys) - + return Response({ "success": True, "message": "Stream rehashing task has been queued", "task_id": task.id }, status=status.HTTP_200_OK) - + except CoreSettings.DoesNotExist: return Response({ "success": False, "message": "Hash key settings not found" }, status=status.HTTP_400_BAD_REQUEST) - + except Exception as e: logger.error(f"Error triggering rehash streams: {e}") return Response({ "success": False, "message": "Failed to trigger rehash task" }, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + + +# ───────────────────────────── +# Timezone List API +# ───────────────────────────── +class TimezoneListView(APIView): + """ + API endpoint that returns all available timezones supported by pytz. + Returns a list of timezone names grouped by region for easy selection. + This is a general utility endpoint that can be used throughout the application. + """ + + def get_permissions(self): + return [Authenticated()] + + @swagger_auto_schema( + operation_description="Get list of all supported timezones", + responses={200: openapi.Response('List of timezones with grouping by region')} + ) + def get(self, request): + import pytz + + # Get all common timezones (excludes deprecated ones) + all_timezones = sorted(pytz.common_timezones) + + # Group by region for better UX + grouped = {} + for tz in all_timezones: + if '/' in tz: + region = tz.split('/')[0] + if region not in grouped: + grouped[region] = [] + grouped[region].append(tz) + else: + # Handle special zones like UTC, GMT, etc. + if 'Other' not in grouped: + grouped['Other'] = [] + grouped['Other'].append(tz) + + return Response({ + 'timezones': all_timezones, + 'grouped': grouped, + 'count': len(all_timezones) + }) diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index 23a9a656..0f46b012 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -642,6 +642,16 @@ export const WebsocketProvider = ({ children }) => { } break; + case 'epg_data_created': + // A new EPG data entry was created (e.g., for a dummy EPG) + // Fetch EPG data so the channel form can immediately assign it + try { + await fetchEPGData(); + } catch (e) { + console.warn('Failed to refresh EPG data after creation:', e); + } + break; + case 'stream_rehash': // Handle stream rehash progress updates if (parsedEvent.data.action === 'starting') { diff --git a/frontend/src/api.js b/frontend/src/api.js index 4ef5f97e..5b80a3f7 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -1118,6 +1118,21 @@ export default class API { } } + static async getTimezones() { + try { + const response = await request(`${host}/api/core/timezones/`); + return response; + } catch (e) { + errorNotification('Failed to retrieve timezones', e); + // Return fallback data instead of throwing + return { + timezones: ['UTC', 'US/Eastern', 'US/Central', 'US/Mountain', 'US/Pacific'], + grouped: {}, + count: 5 + }; + } + } + static async getStreamProfiles() { try { const response = await request(`${host}/api/core/streamprofiles/`); diff --git a/frontend/src/components/forms/DummyEPG.jsx b/frontend/src/components/forms/DummyEPG.jsx new file mode 100644 index 00000000..8f273118 --- /dev/null +++ b/frontend/src/components/forms/DummyEPG.jsx @@ -0,0 +1,761 @@ +import React, { useEffect, useMemo, useState } from 'react'; +import { + Box, + Button, + Divider, + Group, + Modal, + NumberInput, + Select, + Stack, + Text, + TextInput, + Textarea, +} from '@mantine/core'; +import { useForm } from '@mantine/form'; +import { notifications } from '@mantine/notifications'; +import API from '../../api'; + +const DummyEPGForm = ({ epg, isOpen, onClose }) => { + // Separate state for each field to prevent focus loss + const [titlePattern, setTitlePattern] = useState(''); + const [timePattern, setTimePattern] = useState(''); + const [datePattern, setDatePattern] = useState(''); + const [sampleTitle, setSampleTitle] = useState(''); + const [titleTemplate, setTitleTemplate] = useState(''); + const [descriptionTemplate, setDescriptionTemplate] = useState(''); + const [upcomingTitleTemplate, setUpcomingTitleTemplate] = useState(''); + const [upcomingDescriptionTemplate, setUpcomingDescriptionTemplate] = + useState(''); + const [endedTitleTemplate, setEndedTitleTemplate] = useState(''); + const [endedDescriptionTemplate, setEndedDescriptionTemplate] = useState(''); + const [timezoneOptions, setTimezoneOptions] = useState([]); + const [loadingTimezones, setLoadingTimezones] = useState(true); + + const form = useForm({ + initialValues: { + name: '', + is_active: true, + source_type: 'dummy', + custom_properties: { + title_pattern: '', + time_pattern: '', + date_pattern: '', + timezone: 'US/Eastern', + program_duration: 180, + sample_title: '', + title_template: '', + description_template: '', + upcoming_title_template: '', + upcoming_description_template: '', + ended_title_template: '', + ended_description_template: '', + name_source: 'channel', + stream_index: 1, + }, + }, + validate: { + name: (value) => (value?.trim() ? null : 'Name is required'), + 'custom_properties.title_pattern': (value) => { + if (!value?.trim()) return 'Title pattern is required'; + try { + new RegExp(value); + return null; + } catch (e) { + return `Invalid regex: ${e.message}`; + } + }, + 'custom_properties.name_source': (value) => { + if (!value) return 'Name source is required'; + return null; + }, + 'custom_properties.stream_index': (value, values) => { + if (values.custom_properties?.name_source === 'stream') { + if (!value || value < 1) { + return 'Stream index must be at least 1'; + } + } + return null; + }, + }, + }); + + // Real-time pattern validation with useMemo to prevent re-renders + const patternValidation = useMemo(() => { + const result = { + titleMatch: false, + timeMatch: false, + dateMatch: false, + titleGroups: {}, + timeGroups: {}, + dateGroups: {}, + formattedTitle: '', + formattedDescription: '', + error: null, + }; + + // Validate title pattern + if (titlePattern && sampleTitle) { + try { + const titleRegex = new RegExp(titlePattern); + const titleMatch = sampleTitle.match(titleRegex); + + if (titleMatch) { + result.titleMatch = true; + result.titleGroups = titleMatch.groups || {}; + } + } catch (e) { + result.error = `Title pattern error: ${e.message}`; + } + } + + // Validate time pattern + if (timePattern && sampleTitle) { + try { + const timeRegex = new RegExp(timePattern); + const timeMatch = sampleTitle.match(timeRegex); + + if (timeMatch) { + result.timeMatch = true; + result.timeGroups = timeMatch.groups || {}; + } + } catch (e) { + result.error = result.error + ? `${result.error}; Time pattern error: ${e.message}` + : `Time pattern error: ${e.message}`; + } + } + + // Validate date pattern + if (datePattern && sampleTitle) { + try { + const dateRegex = new RegExp(datePattern); + const dateMatch = sampleTitle.match(dateRegex); + + if (dateMatch) { + result.dateMatch = true; + result.dateGroups = dateMatch.groups || {}; + } + } catch (e) { + result.error = result.error + ? `${result.error}; Date pattern error: ${e.message}` + : `Date pattern error: ${e.message}`; + } + } + + // Merge all groups for template formatting + const allGroups = { + ...result.titleGroups, + ...result.timeGroups, + ...result.dateGroups, + }; + + // Format title template + if (titleTemplate && (result.titleMatch || result.timeMatch)) { + result.formattedTitle = titleTemplate.replace( + /\{(\w+)\}/g, + (match, key) => allGroups[key] || match + ); + } + + // Format description template + if (descriptionTemplate && (result.titleMatch || result.timeMatch)) { + result.formattedDescription = descriptionTemplate.replace( + /\{(\w+)\}/g, + (match, key) => allGroups[key] || match + ); + } + + return result; + }, [ + titlePattern, + timePattern, + datePattern, + sampleTitle, + titleTemplate, + descriptionTemplate, + ]); + + useEffect(() => { + if (epg) { + const custom = epg.custom_properties || {}; + + form.setValues({ + name: epg.name || '', + is_active: epg.is_active ?? true, + source_type: 'dummy', + custom_properties: { + title_pattern: custom.title_pattern || '', + time_pattern: custom.time_pattern || '', + date_pattern: custom.date_pattern || '', + timezone: + custom.timezone || + custom.timezone_offset?.toString() || + 'US/Eastern', + program_duration: custom.program_duration || 180, + sample_title: custom.sample_title || '', + title_template: custom.title_template || '', + description_template: custom.description_template || '', + upcoming_title_template: custom.upcoming_title_template || '', + upcoming_description_template: + custom.upcoming_description_template || '', + ended_title_template: custom.ended_title_template || '', + ended_description_template: custom.ended_description_template || '', + name_source: custom.name_source || 'channel', + stream_index: custom.stream_index || 1, + }, + }); + + // Set controlled state + setTitlePattern(custom.title_pattern || ''); + setTimePattern(custom.time_pattern || ''); + setDatePattern(custom.date_pattern || ''); + setSampleTitle(custom.sample_title || ''); + setTitleTemplate(custom.title_template || ''); + setDescriptionTemplate(custom.description_template || ''); + setUpcomingTitleTemplate(custom.upcoming_title_template || ''); + setUpcomingDescriptionTemplate( + custom.upcoming_description_template || '' + ); + setEndedTitleTemplate(custom.ended_title_template || ''); + setEndedDescriptionTemplate(custom.ended_description_template || ''); + } else { + form.reset(); + setTitlePattern(''); + setTimePattern(''); + setDatePattern(''); + setSampleTitle(''); + setTitleTemplate(''); + setDescriptionTemplate(''); + setUpcomingTitleTemplate(''); + setUpcomingDescriptionTemplate(''); + setEndedTitleTemplate(''); + setEndedDescriptionTemplate(''); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [epg]); + + // Fetch available timezones from the API + useEffect(() => { + const fetchTimezones = async () => { + try { + setLoadingTimezones(true); + const response = await API.getTimezones(); + + // Convert timezone list to Select options format + const options = response.timezones.map((tz) => ({ + value: tz, + label: tz, + })); + + setTimezoneOptions(options); + } catch (error) { + console.error('Failed to load timezones:', error); + notifications.show({ + title: 'Warning', + message: 'Failed to load timezone list. Using default options.', + color: 'yellow', + }); + // Fallback to a minimal list + setTimezoneOptions([ + { value: 'UTC', label: 'UTC' }, + { value: 'US/Eastern', label: 'US/Eastern' }, + { value: 'US/Central', label: 'US/Central' }, + { value: 'US/Pacific', label: 'US/Pacific' }, + ]); + } finally { + setLoadingTimezones(false); + } + }; + + fetchTimezones(); + }, []); + + const handleSubmit = async (values) => { + try { + if (epg?.id) { + await API.updateEPG({ ...values, id: epg.id }); + notifications.show({ + title: 'Success', + message: 'Dummy EPG source updated successfully', + color: 'green', + }); + } else { + await API.addEPG(values); + notifications.show({ + title: 'Success', + message: 'Dummy EPG source created successfully', + color: 'green', + }); + } + onClose(); + } catch (error) { + notifications.show({ + title: 'Error', + message: error.message || 'Failed to save dummy EPG source', + color: 'red', + }); + } + }; + + return ( + +
+ + {/* Basic Settings */} + + + {/* Pattern Configuration */} + + + + Define regex patterns to extract information from channel titles or + stream names. Use named capture groups like + (?<groupname>pattern). + + +