diff --git a/apps/epg/migrations/0013_alter_epgsource_refresh_interval.py b/apps/epg/migrations/0013_alter_epgsource_refresh_interval.py new file mode 100644 index 00000000..64be2c3c --- /dev/null +++ b/apps/epg/migrations/0013_alter_epgsource_refresh_interval.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.6 on 2025-05-21 19:58 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('epg', '0012_alter_epgsource_status'), + ] + + operations = [ + migrations.AlterField( + model_name='epgsource', + name='refresh_interval', + field=models.IntegerField(default=0), + ), + ] diff --git a/apps/epg/models.py b/apps/epg/models.py index ed8f2708..dce4e21b 100644 --- a/apps/epg/models.py +++ b/apps/epg/models.py @@ -32,7 +32,7 @@ class EPGSource(models.Model): api_key = models.CharField(max_length=255, blank=True, null=True) # For Schedules Direct is_active = models.BooleanField(default=True) file_path = models.CharField(max_length=1024, blank=True, null=True) - refresh_interval = models.IntegerField(default=24) + refresh_interval = models.IntegerField(default=0) refresh_task = models.ForeignKey( PeriodicTask, on_delete=models.SET_NULL, null=True, blank=True ) diff --git a/apps/epg/tasks.py b/apps/epg/tasks.py index 71e468bd..f3f281e7 100644 --- a/apps/epg/tasks.py +++ b/apps/epg/tasks.py @@ -1445,17 +1445,42 @@ def fetch_schedules_direct(source): # ------------------------------- def parse_xmltv_time(time_str): try: + # Basic format validation + if len(time_str) < 14: + logger.warning(f"XMLTV timestamp too short: '{time_str}', using as-is") + dt_obj = datetime.strptime(time_str, '%Y%m%d%H%M%S') + return timezone.make_aware(dt_obj, timezone=dt_timezone.utc) + + # Parse base datetime dt_obj = datetime.strptime(time_str[:14], '%Y%m%d%H%M%S') - tz_sign = time_str[15] - tz_hours = int(time_str[16:18]) - tz_minutes = int(time_str[18:20]) - if tz_sign == '+': - dt_obj = dt_obj - timedelta(hours=tz_hours, minutes=tz_minutes) - elif tz_sign == '-': - dt_obj = dt_obj + timedelta(hours=tz_hours, minutes=tz_minutes) - aware_dt = timezone.make_aware(dt_obj, timezone=dt_timezone.utc) - logger.trace(f"Parsed XMLTV time '{time_str}' to {aware_dt}") - return aware_dt + + # Handle timezone if present + if len(time_str) >= 20: # Has timezone info + tz_sign = time_str[15] + tz_hours = int(time_str[16:18]) + tz_minutes = int(time_str[18:20]) + + # Create a timezone object + if tz_sign == '+': + tz_offset = dt_timezone(timedelta(hours=tz_hours, minutes=tz_minutes)) + elif tz_sign == '-': + tz_offset = dt_timezone(timedelta(hours=-tz_hours, minutes=-tz_minutes)) + else: + tz_offset = dt_timezone.utc + + # Make datetime aware with correct timezone + aware_dt = datetime.replace(dt_obj, tzinfo=tz_offset) + # Convert to UTC + aware_dt = aware_dt.astimezone(dt_timezone.utc) + + logger.trace(f"Parsed XMLTV time '{time_str}' to {aware_dt}") + return aware_dt + else: + # No timezone info, assume UTC + aware_dt = timezone.make_aware(dt_obj, timezone=dt_timezone.utc) + logger.trace(f"Parsed XMLTV time without timezone '{time_str}' as UTC: {aware_dt}") + return aware_dt + except Exception as e: logger.error(f"Error parsing XMLTV time '{time_str}': {e}", exc_info=True) raise diff --git a/apps/m3u/migrations/0012_alter_m3uaccount_refresh_interval.py b/apps/m3u/migrations/0012_alter_m3uaccount_refresh_interval.py new file mode 100644 index 00000000..7045810e --- /dev/null +++ b/apps/m3u/migrations/0012_alter_m3uaccount_refresh_interval.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.6 on 2025-05-21 19:58 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('m3u', '0011_alter_m3uaccount_status'), + ] + + operations = [ + migrations.AlterField( + model_name='m3uaccount', + name='refresh_interval', + field=models.IntegerField(default=0), + ), + ] diff --git a/apps/m3u/models.py b/apps/m3u/models.py index 4ea661c7..94ec88fc 100644 --- a/apps/m3u/models.py +++ b/apps/m3u/models.py @@ -86,7 +86,7 @@ class M3UAccount(models.Model): username = models.CharField(max_length=255, null=True, blank=True) password = models.CharField(max_length=255, null=True, blank=True) custom_properties = models.TextField(null=True, blank=True) - refresh_interval = models.IntegerField(default=24) + refresh_interval = models.IntegerField(default=0) refresh_task = models.ForeignKey( PeriodicTask, on_delete=models.SET_NULL, null=True, blank=True ) diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index 4a1f2645..b1b1170d 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -22,11 +22,11 @@ from core.utils import RedisClient, acquire_task_lock, release_task_lock from core.models import CoreSettings, UserAgent from asgiref.sync import async_to_sync from core.xtream_codes import Client as XCClient +from core.utils import send_websocket_update logger = logging.getLogger(__name__) BATCH_SIZE = 1000 -SKIP_EXTS = {} m3u_dir = os.path.join(settings.MEDIA_ROOT, "cached_m3u") def fetch_m3u_lines(account, use_cache=False): @@ -200,11 +200,6 @@ def parse_extinf_line(line: str) -> dict: 'name': name } -import re -import logging - -logger = logging.getLogger(__name__) - def _matches_filters(stream_name: str, group_name: str, filters): """Check if a stream or group name matches a precompiled regex filter.""" compiled_filters = [(re.compile(f.regex_pattern, re.IGNORECASE), f.exclude) for f in filters] @@ -245,7 +240,7 @@ def process_groups(account, groups): groups_to_create = [] for group_name, custom_props in groups.items(): logger.debug(f"Handling group: {group_name}") - if (group_name not in existing_groups) and (group_name not in SKIP_EXTS): + if (group_name not in existing_groups): groups_to_create.append(ChannelGroup( name=group_name, )) @@ -495,9 +490,9 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): retval = f"Batch processed: {len(streams_to_create)} created, {len(streams_to_update)} updated." # Aggressive garbage collection - del streams_to_create, streams_to_update, stream_hashes, existing_streams - from core.utils import cleanup_memory - cleanup_memory(log_usage=True, force_collection=True) + #del streams_to_create, streams_to_update, stream_hashes, existing_streams + #from core.utils import cleanup_memory + #cleanup_memory(log_usage=True, force_collection=True) return retval @@ -696,25 +691,68 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): release_task_lock('refresh_m3u_account_groups', account_id) return f"Failed to fetch M3U data for account_id={account_id}.", None - for line in lines: + # Log basic file structure for debugging + logger.debug(f"Processing {len(lines)} lines from M3U file") + + line_count = 0 + extinf_count = 0 + url_count = 0 + valid_stream_count = 0 + problematic_lines = [] + + for line_index, line in enumerate(lines): + line_count += 1 line = line.strip() + if line.startswith("#EXTINF"): + extinf_count += 1 parsed = parse_extinf_line(line) if parsed: if "group-title" in parsed["attributes"]: - groups[parsed["attributes"]["group-title"]] = {} + group_name = parsed["attributes"]["group-title"] + # Log new groups as they're discovered + if group_name not in groups: + logger.debug(f"Found new group: '{group_name}'") + groups[group_name] = {} extinf_data.append(parsed) + else: + # Log problematic EXTINF lines + logger.warning(f"Failed to parse EXTINF at line {line_index+1}: {line[:200]}") + problematic_lines.append((line_index+1, line[:200])) + elif extinf_data and line.startswith("http"): + url_count += 1 # Associate URL with the last EXTINF line extinf_data[-1]["url"] = line + valid_stream_count += 1 + # Periodically log progress for large files + if valid_stream_count % 1000 == 0: + logger.debug(f"Processed {valid_stream_count} valid streams so far...") + + # Log summary statistics + logger.info(f"M3U parsing complete - Lines: {line_count}, EXTINF: {extinf_count}, URLs: {url_count}, Valid streams: {valid_stream_count}") + + if problematic_lines: + logger.warning(f"Found {len(problematic_lines)} problematic lines during parsing") + for i, (line_num, content) in enumerate(problematic_lines[:10]): # Log max 10 examples + logger.warning(f"Problematic line #{i+1} at line {line_num}: {content}") + if len(problematic_lines) > 10: + logger.warning(f"... and {len(problematic_lines) - 10} more problematic lines") + + # Log group statistics + logger.info(f"Found {len(groups)} groups in M3U file: {', '.join(list(groups.keys())[:20])}" + + ("..." if len(groups) > 20 else "")) + + # Cache processed data cache_path = os.path.join(m3u_dir, f"{account_id}.json") with open(cache_path, 'w', encoding='utf-8') as f: json.dump({ "extinf_data": extinf_data, "groups": groups, }, f) + logger.debug(f"Cached parsed M3U data to {cache_path}") send_m3u_update(account_id, "processing_groups", 0) @@ -924,6 +962,7 @@ def refresh_single_m3u_account(account_id): account.save(update_fields=['status']) if account.account_type == M3UAccount.Types.STADNARD: + logger.debug(f"Processing Standard account with groups: {existing_groups}") # Break into batches and process in parallel batches = [extinf_data[i:i + BATCH_SIZE] for i in range(0, len(extinf_data), BATCH_SIZE)] task_group = group(process_m3u_batch.s(account_id, batch, existing_groups, hash_keys) for batch in batches) @@ -1090,8 +1129,6 @@ def refresh_single_m3u_account(account_id): return f"Dispatched jobs complete." -from core.utils import send_websocket_update - def send_m3u_update(account_id, action, progress, **kwargs): # Start with the base data dictionary data = { diff --git a/core/utils.py b/core/utils.py index fcff03e5..039b0695 100644 --- a/core/utils.py +++ b/core/utils.py @@ -59,9 +59,16 @@ class RedisClient: client.config_set('save', '') # Disable RDB snapshots client.config_set('appendonly', 'no') # Disable AOF logging - # Set optimal memory settings - client.config_set('maxmemory-policy', 'allkeys-lru') # Use LRU eviction - client.config_set('maxmemory', '256mb') # Set reasonable memory limit + # Set optimal memory settings with environment variable support + # Get max memory from environment or use a larger default (512MB instead of 256MB) + #max_memory = os.environ.get('REDIS_MAX_MEMORY', '512mb') + #eviction_policy = os.environ.get('REDIS_EVICTION_POLICY', 'allkeys-lru') + + # Apply memory settings + #client.config_set('maxmemory-policy', eviction_policy) + #client.config_set('maxmemory', max_memory) + + #logger.info(f"Redis configured with maxmemory={max_memory}, policy={eviction_policy}") # Disable protected mode when in debug mode if os.environ.get('DISPATCHARR_DEBUG', '').lower() == 'true': @@ -69,10 +76,18 @@ class RedisClient: logger.warning("Redis protected mode disabled for debug environment") logger.trace("Redis persistence disabled for better performance") - except redis.exceptions.ResponseError: - # This might fail if Redis is configured to prohibit CONFIG command - # or if running in protected mode - that's okay - logger.error("Could not modify Redis persistence settings (may be restricted)") + except redis.exceptions.ResponseError as e: + # Improve error handling for Redis configuration errors + if "OOM" in str(e): + logger.error(f"Redis OOM during configuration: {e}") + # Try to increase maxmemory as an emergency measure + try: + client.config_set('maxmemory', '768mb') + logger.warning("Applied emergency Redis memory increase to 768MB") + except: + pass + else: + logger.error(f"Redis configuration error: {e}") logger.info(f"Connected to Redis at {redis_host}:{redis_port}/{redis_db}") diff --git a/dispatcharr/celery.py b/dispatcharr/celery.py index 855acacd..8856d330 100644 --- a/dispatcharr/celery.py +++ b/dispatcharr/celery.py @@ -50,7 +50,7 @@ app.conf.update( ) # Add memory cleanup after task completion -@task_postrun.connect # Use the imported signal +#@task_postrun.connect # Use the imported signal def cleanup_task_memory(**kwargs): """Clean up memory after each task completes""" # Get task name from kwargs diff --git a/frontend/src/components/forms/EPG.jsx b/frontend/src/components/forms/EPG.jsx index b886dccf..0c7f78c0 100644 --- a/frontend/src/components/forms/EPG.jsx +++ b/frontend/src/components/forms/EPG.jsx @@ -12,19 +12,22 @@ import { NativeSelect, NumberInput, Space, + Grid, + Group, + FileInput, + Title, + Text, + Divider, + Stack, + Box, } from '@mantine/core'; import { isNotEmpty, useForm } from '@mantine/form'; +import { IconUpload } from '@tabler/icons-react'; const EPG = ({ epg = null, isOpen, onClose }) => { const epgs = useEPGsStore((state) => state.epgs); - const [file, setFile] = useState(null); - - const handleFileChange = (e) => { - const file = e.target.files[0]; - if (file) { - setFile(file); - } - }; + // Remove the file state and handler since we're not supporting file uploads + const [sourceType, setSourceType] = useState('xmltv'); const form = useForm({ mode: 'uncontrolled', @@ -47,114 +50,151 @@ const EPG = ({ epg = null, isOpen, onClose }) => { const values = form.getValues(); if (epg?.id) { - await API.updateEPG({ id: epg.id, ...values, file }); + // Remove file from API call + await API.updateEPG({ id: epg.id, ...values }); } else { + // Remove file from API call await API.addEPG({ ...values, - file, }); } form.reset(); - setFile(null); onClose(); }; useEffect(() => { if (epg) { - form.setValues({ + const values = { name: epg.name, source_type: epg.source_type, url: epg.url, api_key: epg.api_key, is_active: epg.is_active, refresh_interval: epg.refresh_interval, - }); + }; + form.setValues(values); + setSourceType(epg.source_type); // Update source type state } else { form.reset(); + setSourceType('xmltv'); // Reset to xmltv } }, [epg]); + // Function to handle source type changes + const handleSourceTypeChange = (value) => { + form.setFieldValue('source_type', value); + setSourceType(value); + }; + if (!isOpen) { return <>; } return ( - +
- + + {/* Left Column */} + + - + handleSourceTypeChange(event.currentTarget.value)} + /> - + + - + - How often to automatically refresh EPG data
- (0 to disable automatic refreshes)} - {...form.getInputProps('refresh_interval')} - key={form.key('refresh_interval')} - /> + {/* Right Column */} + + - + - - - + {/* Put checkbox at the same level as Refresh Interval */} + + Status + When enabled, this EPG source will auto update. + + + + + +
+ + {/* Full Width Section */} + + + + + + + +
); diff --git a/frontend/src/components/tables/M3UsTable.jsx b/frontend/src/components/tables/M3UsTable.jsx index 8dcdfde1..5977eed1 100644 --- a/frontend/src/components/tables/M3UsTable.jsx +++ b/frontend/src/components/tables/M3UsTable.jsx @@ -306,14 +306,14 @@ const M3UTable = () => { - + Parsing: {parseInt(data.progress)}% {data.elapsed_time && ( - + Elapsed: {elapsedTime} @@ -321,7 +321,7 @@ const M3UTable = () => { )} {data.time_remaining && ( - + Remaining: {timeRemaining} @@ -329,7 +329,7 @@ const M3UTable = () => { )} {data.streams_processed && ( - + Streams: {data.streams_processed} @@ -561,7 +561,7 @@ const M3UTable = () => { c="dimmed" size="xs" lineClamp={2} - style={{ lineHeight: 1.3 }} + style={{ lineHeight: 1.1 }} > {value} diff --git a/requirements.txt b/requirements.txt index 732ce9ad..d029bd1a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -23,6 +23,7 @@ tzlocal # PyTorch dependencies (CPU only) --extra-index-url https://download.pytorch.org/whl/cpu/ torch==2.6.0+cpu +tzlocal # ML/NLP dependencies sentence-transformers==3.4.1 @@ -30,4 +31,4 @@ channels channels-redis django-filter django-celery-beat -lxml==5.4.0 \ No newline at end of file +lxml==5.4.0