From f4e91013f2cf2689492cb6bee5ed67a36d4bdf70 Mon Sep 17 00:00:00 2001 From: SergeantPanda <61642231+SergeantPanda@users.noreply.github.com> Date: Mon, 15 Sep 2025 20:14:02 -0500 Subject: [PATCH 001/119] Remove local data volume binding Removed local data volume binding from docker-compose. --- docker/docker-compose.aio.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/docker/docker-compose.aio.yml b/docker/docker-compose.aio.yml index 0cf387d5..90cd8654 100644 --- a/docker/docker-compose.aio.yml +++ b/docker/docker-compose.aio.yml @@ -9,7 +9,6 @@ services: - 9191:9191 volumes: - dispatcharr_data:/data - - ./data:/data environment: - DISPATCHARR_ENV=aio - REDIS_HOST=localhost From f1739f23944eade0ecc46541b13cf5efe6b289c4 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Sep 2025 08:55:10 -0500 Subject: [PATCH 002/119] Add EPG auto-match functionality for specific channels and update UI --- apps/channels/api_views.py | 29 ++++- apps/channels/tasks.py | 122 ++++++++++++++++++++++ frontend/src/api.js | 20 ++++ frontend/src/components/forms/Channel.jsx | 58 +++++++++- 4 files changed, 227 insertions(+), 2 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 92755252..6537e6b8 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -39,7 +39,7 @@ from .serializers import ( ChannelProfileSerializer, RecordingSerializer, ) -from .tasks import match_epg_channels, evaluate_series_rules, evaluate_series_rules_impl +from .tasks import match_epg_channels, evaluate_series_rules, evaluate_series_rules_impl, match_single_channel_epg import django_filters from django_filters.rest_framework import DjangoFilterBackend from rest_framework.filters import SearchFilter, OrderingFilter @@ -789,6 +789,33 @@ class ChannelViewSet(viewsets.ModelViewSet): {"message": "EPG matching task initiated."}, status=status.HTTP_202_ACCEPTED ) + @swagger_auto_schema( + method="post", + operation_description="Try to auto-match this specific channel with EPG data.", + responses={200: "EPG matching completed", 202: "EPG matching task initiated"}, + ) + @action(detail=True, methods=["post"], url_path="match-epg") + def match_channel_epg(self, request, pk=None): + channel = self.get_object() + + # Import the matching logic + from apps.channels.tasks import match_single_channel_epg + + try: + # Try to match this specific channel - call synchronously for immediate response + result = match_single_channel_epg.apply_async(args=[channel.id]).get(timeout=30) + + # Refresh the channel from DB to get any updates + channel.refresh_from_db() + + return Response({ + "message": result.get("message", "Channel matching completed"), + "matched": result.get("matched", False), + "channel": self.get_serializer(channel).data + }) + except Exception as e: + return Response({"error": str(e)}, status=400) + # ───────────────────────────────────────────────────────── # 7) Set EPG and Refresh # ───────────────────────────────────────────────────────── diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py index e0954210..f4d58f46 100755 --- a/apps/channels/tasks.py +++ b/apps/channels/tasks.py @@ -241,6 +241,128 @@ def match_epg_channels(): cleanup_memory(log_usage=True, force_collection=True) +@shared_task +def match_single_channel_epg(channel_id): + """ + Try to match a single channel with EPG data using the same logic as match_epg_channels + but for just one channel. Returns a dict with match status and message. + """ + try: + from apps.channels.models import Channel + from apps.epg.models import EPGData + import tempfile + import subprocess + import json + + logger.info(f"Starting single channel EPG matching for channel ID {channel_id}") + + # Get the channel + try: + channel = Channel.objects.get(id=channel_id) + except Channel.DoesNotExist: + return {"matched": False, "message": "Channel not found"} + + # If channel already has EPG data, skip + if channel.epg_data: + return {"matched": False, "message": f"Channel '{channel.name}' already has EPG data assigned"} + + # Get region preference + try: + region_obj = CoreSettings.objects.get(key="preferred-region") + region_code = region_obj.value.strip().lower() + except CoreSettings.DoesNotExist: + region_code = None + + # Prepare channel data for matching script + normalized_tvg_id = channel.tvg_id.strip().lower() if channel.tvg_id else "" + channel_json = { + "id": channel.id, + "name": channel.name, + "tvg_id": normalized_tvg_id, + "original_tvg_id": channel.tvg_id, + "fallback_name": normalized_tvg_id if normalized_tvg_id else channel.name, + "norm_chan": normalize_name(normalized_tvg_id if normalized_tvg_id else channel.name) + } + + # Prepare EPG data + epg_json = [] + for epg in EPGData.objects.all(): + normalized_epg_tvg_id = epg.tvg_id.strip().lower() if epg.tvg_id else "" + epg_json.append({ + 'id': epg.id, + 'tvg_id': normalized_epg_tvg_id, + 'original_tvg_id': epg.tvg_id, + 'name': epg.name, + 'norm_name': normalize_name(epg.name), + 'epg_source_id': epg.epg_source.id if epg.epg_source else None, + }) + + # Create payload for matching script + payload = { + "channels": [channel_json], # Only one channel + "epg_data": epg_json, + } + + if region_code: + payload["region_code"] = region_code + + # Write to temporary file and run the matching script + with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as temp_file: + json.dump(payload, temp_file) + temp_file_path = temp_file.name + + try: + # Run the matching script + from django.conf import settings + import os + + project_root = settings.BASE_DIR + script_path = os.path.join(project_root, 'scripts', 'epg_match.py') + + process = subprocess.Popen( + ['python', script_path, temp_file_path], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + cwd=project_root + ) + + stdout, stderr = process.communicate(timeout=60) # 1 minute timeout for single channel + + if process.returncode != 0: + logger.error(f"EPG matching script failed: {stderr}") + return {"matched": False, "message": "EPG matching failed"} + + result = json.loads(stdout) + channels_to_update = result.get("channels_to_update", []) + + if channels_to_update: + # Update the channel with the matched EPG data + epg_data_id = channels_to_update[0].get("epg_data_id") + if epg_data_id: + try: + epg_data = EPGData.objects.get(id=epg_data_id) + channel.epg_data = epg_data + channel.save(update_fields=["epg_data"]) + + return { + "matched": True, + "message": f"Channel '{channel.name}' matched with EPG '{epg_data.name}' (TVG ID: {epg_data.tvg_id})" + } + except EPGData.DoesNotExist: + return {"matched": False, "message": "Matched EPG data not found"} + + return {"matched": False, "message": f"No suitable EPG match found for channel '{channel.name}'"} + + finally: + # Clean up temp file + os.remove(temp_file_path) + + except Exception as e: + logger.error(f"Error in single channel EPG matching: {e}", exc_info=True) + return {"matched": False, "message": f"Error during matching: {str(e)}"} + + def evaluate_series_rules_impl(tvg_id: str | None = None): """Synchronous implementation of series rule evaluation; returns details for debugging.""" from django.utils import timezone diff --git a/frontend/src/api.js b/frontend/src/api.js index 956f3ece..d3e222d2 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -1452,6 +1452,26 @@ export default class API { } } + static async matchChannelEpg(channelId) { + try { + const response = await request( + `${host}/api/channels/channels/${channelId}/match-epg/`, + { + method: 'POST', + } + ); + + // Update the channel in the store with the refreshed data if provided + if (response.channel) { + useChannelsStore.getState().updateChannel(response.channel); + } + + return response; + } catch (e) { + errorNotification('Failed to run EPG auto-match for channel', e); + } + } + static async fetchActiveChannelStats() { try { const response = await request(`${host}/proxy/ts/status`); diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index d07fa44c..62da50c1 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -34,7 +34,7 @@ import { UnstyledButton, } from '@mantine/core'; import { notifications } from '@mantine/notifications'; -import { ListOrdered, SquarePlus, SquareX, X } from 'lucide-react'; +import { ListOrdered, SquarePlus, SquareX, X, Zap } from 'lucide-react'; import useEPGsStore from '../../store/epgs'; import { Dropzone } from '@mantine/dropzone'; import { FixedSizeList as List } from 'react-window'; @@ -121,6 +121,48 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { } }; + const handleAutoMatchEpg = async () => { + // Only attempt auto-match for existing channels (editing mode) + if (!channel || !channel.id) { + notifications.show({ + title: 'Info', + message: 'Auto-match is only available when editing existing channels.', + color: 'blue', + }); + return; + } + + try { + const response = await API.matchChannelEpg(channel.id); + + if (response.matched) { + // Update the form with the new EPG data + if (response.channel && response.channel.epg_data_id) { + formik.setFieldValue('epg_data_id', response.channel.epg_data_id); + } + + notifications.show({ + title: 'Success', + message: response.message, + color: 'green', + }); + } else { + notifications.show({ + title: 'No Match Found', + message: response.message, + color: 'orange', + }); + } + } catch (error) { + notifications.show({ + title: 'Error', + message: 'Failed to auto-match EPG data', + color: 'red', + }); + console.error('Auto-match error:', error); + } + }; + const formik = useFormik({ initialValues: { name: '', @@ -707,6 +749,20 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { > Use Dummy + } readOnly From f6be6bc3a9e07d6ffbfb9bc210734f64a4a81a5b Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Sep 2025 09:18:41 -0500 Subject: [PATCH 003/119] Don't use matching script --- apps/channels/api_views.py | 8 +- apps/channels/tasks.py | 202 ++++++++++++---------- frontend/src/components/forms/Channel.jsx | 10 +- 3 files changed, 118 insertions(+), 102 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 6537e6b8..e522b618 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -797,17 +797,17 @@ class ChannelViewSet(viewsets.ModelViewSet): @action(detail=True, methods=["post"], url_path="match-epg") def match_channel_epg(self, request, pk=None): channel = self.get_object() - + # Import the matching logic from apps.channels.tasks import match_single_channel_epg - + try: # Try to match this specific channel - call synchronously for immediate response result = match_single_channel_epg.apply_async(args=[channel.id]).get(timeout=30) - + # Refresh the channel from DB to get any updates channel.refresh_from_db() - + return Response({ "message": result.get("message", "Channel matching completed"), "matched": result.get("matched", False), diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py index f4d58f46..f67fe563 100755 --- a/apps/channels/tasks.py +++ b/apps/channels/tasks.py @@ -12,6 +12,7 @@ import gc from celery import shared_task from django.utils.text import slugify +from rapidfuzz import fuzz from apps.channels.models import Channel from apps.epg.models import EPGData @@ -244,122 +245,133 @@ def match_epg_channels(): @shared_task def match_single_channel_epg(channel_id): """ - Try to match a single channel with EPG data using the same logic as match_epg_channels - but for just one channel. Returns a dict with match status and message. + Try to match a single channel with EPG data using optimized logic + that doesn't require loading all EPG data or running the external script. + Returns a dict with match status and message. """ try: from apps.channels.models import Channel from apps.epg.models import EPGData - import tempfile - import subprocess - import json - - logger.info(f"Starting single channel EPG matching for channel ID {channel_id}") - + import re + + logger.info(f"Starting optimized single channel EPG matching for channel ID {channel_id}") + # Get the channel try: channel = Channel.objects.get(id=channel_id) except Channel.DoesNotExist: return {"matched": False, "message": "Channel not found"} - + # If channel already has EPG data, skip if channel.epg_data: return {"matched": False, "message": f"Channel '{channel.name}' already has EPG data assigned"} - + # Get region preference try: region_obj = CoreSettings.objects.get(key="preferred-region") region_code = region_obj.value.strip().lower() except CoreSettings.DoesNotExist: region_code = None - - # Prepare channel data for matching script + + # Prepare channel data normalized_tvg_id = channel.tvg_id.strip().lower() if channel.tvg_id else "" - channel_json = { - "id": channel.id, - "name": channel.name, - "tvg_id": normalized_tvg_id, - "original_tvg_id": channel.tvg_id, - "fallback_name": normalized_tvg_id if normalized_tvg_id else channel.name, - "norm_chan": normalize_name(normalized_tvg_id if normalized_tvg_id else channel.name) + normalized_channel_name = normalize_name(channel.name) + + logger.info(f"Matching channel '{channel.name}' (TVG ID: '{channel.tvg_id}') against EPG data") + + # Step 1: Try exact TVG ID match first (most efficient) + if normalized_tvg_id: + epg_exact_match = EPGData.objects.filter(tvg_id__iexact=channel.tvg_id).first() + if epg_exact_match: + logger.info(f"Channel '{channel.name}' matched with EPG '{epg_exact_match.name}' by exact TVG ID match") + channel.epg_data = epg_exact_match + channel.save(update_fields=["epg_data"]) + return { + "matched": True, + "message": f"Channel '{channel.name}' matched with EPG '{epg_exact_match.name}' by exact TVG ID match" + } + + # Step 2: Try case-insensitive TVG ID match + if normalized_tvg_id: + epg_case_match = EPGData.objects.filter(tvg_id__icontains=normalized_tvg_id).first() + if epg_case_match: + logger.info(f"Channel '{channel.name}' matched with EPG '{epg_case_match.name}' by case-insensitive TVG ID match") + channel.epg_data = epg_case_match + channel.save(update_fields=["epg_data"]) + return { + "matched": True, + "message": f"Channel '{channel.name}' matched with EPG '{epg_case_match.name}' by case-insensitive TVG ID match" + } + + # Step 3: Fuzzy name matching (only if name-based matching is needed) + if not normalized_channel_name: + return {"matched": False, "message": f"Channel '{channel.name}' has no usable name for matching"} + + # Query EPG data with name filtering to reduce dataset + epg_candidates = EPGData.objects.filter(name__isnull=False).exclude(name='').values('id', 'name', 'tvg_id') + epg_count = epg_candidates.count() + logger.info(f"Fuzzy matching against {epg_count} EPG entries (optimized - not loading all EPG data)") + + best_score = 0 + best_epg = None + + for epg in epg_candidates: + if not epg['name']: + continue + + epg_normalized_name = normalize_name(epg['name']) + if not epg_normalized_name: + continue + + # Calculate base fuzzy score + base_score = fuzz.ratio(normalized_channel_name, epg_normalized_name) + bonus = 0 + + # Apply region-based bonus/penalty if applicable + if region_code and epg['tvg_id']: + combined_text = epg['tvg_id'].lower() + " " + epg['name'].lower() + dot_regions = re.findall(r'\.([a-z]{2})', combined_text) + + if dot_regions: + if region_code in dot_regions: + bonus = 30 # Bigger bonus for matching region + else: + bonus = -15 # Penalty for different region + elif region_code in combined_text: + bonus = 15 + + final_score = base_score + bonus + + if final_score > best_score: + best_score = final_score + best_epg = epg + + # Apply matching thresholds (same as the ML script) + BEST_FUZZY_THRESHOLD = 85 + + if best_epg and best_score >= BEST_FUZZY_THRESHOLD: + try: + logger.info(f"Channel '{channel.name}' matched with EPG '{best_epg['name']}' (score: {best_score})") + epg_data = EPGData.objects.get(id=best_epg['id']) + channel.epg_data = epg_data + channel.save(update_fields=["epg_data"]) + + return { + "matched": True, + "message": f"Channel '{channel.name}' matched with EPG '{epg_data.name}' (score: {best_score})" + } + except EPGData.DoesNotExist: + return {"matched": False, "message": "Matched EPG data not found"} + + # No good match found + logger.info(f"No suitable EPG match found for channel '{channel.name}' (best score: {best_score})") + return { + "matched": False, + "message": f"No suitable EPG match found for channel '{channel.name}' (best score: {best_score})" } - - # Prepare EPG data - epg_json = [] - for epg in EPGData.objects.all(): - normalized_epg_tvg_id = epg.tvg_id.strip().lower() if epg.tvg_id else "" - epg_json.append({ - 'id': epg.id, - 'tvg_id': normalized_epg_tvg_id, - 'original_tvg_id': epg.tvg_id, - 'name': epg.name, - 'norm_name': normalize_name(epg.name), - 'epg_source_id': epg.epg_source.id if epg.epg_source else None, - }) - - # Create payload for matching script - payload = { - "channels": [channel_json], # Only one channel - "epg_data": epg_json, - } - - if region_code: - payload["region_code"] = region_code - - # Write to temporary file and run the matching script - with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as temp_file: - json.dump(payload, temp_file) - temp_file_path = temp_file.name - - try: - # Run the matching script - from django.conf import settings - import os - - project_root = settings.BASE_DIR - script_path = os.path.join(project_root, 'scripts', 'epg_match.py') - - process = subprocess.Popen( - ['python', script_path, temp_file_path], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - text=True, - cwd=project_root - ) - - stdout, stderr = process.communicate(timeout=60) # 1 minute timeout for single channel - - if process.returncode != 0: - logger.error(f"EPG matching script failed: {stderr}") - return {"matched": False, "message": "EPG matching failed"} - - result = json.loads(stdout) - channels_to_update = result.get("channels_to_update", []) - - if channels_to_update: - # Update the channel with the matched EPG data - epg_data_id = channels_to_update[0].get("epg_data_id") - if epg_data_id: - try: - epg_data = EPGData.objects.get(id=epg_data_id) - channel.epg_data = epg_data - channel.save(update_fields=["epg_data"]) - - return { - "matched": True, - "message": f"Channel '{channel.name}' matched with EPG '{epg_data.name}' (TVG ID: {epg_data.tvg_id})" - } - except EPGData.DoesNotExist: - return {"matched": False, "message": "Matched EPG data not found"} - - return {"matched": False, "message": f"No suitable EPG match found for channel '{channel.name}'"} - - finally: - # Clean up temp file - os.remove(temp_file_path) - + except Exception as e: - logger.error(f"Error in single channel EPG matching: {e}", exc_info=True) + logger.error(f"Error in optimized single channel EPG matching: {e}", exc_info=True) return {"matched": False, "message": f"Error during matching: {str(e)}"} diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index 62da50c1..d3d6a94b 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -134,13 +134,13 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { try { const response = await API.matchChannelEpg(channel.id); - + if (response.matched) { // Update the form with the new EPG data if (response.channel && response.channel.epg_data_id) { formik.setFieldValue('epg_data_id', response.channel.epg_data_id); } - + notifications.show({ title: 'Success', message: response.message, @@ -758,7 +758,11 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { handleAutoMatchEpg(); }} disabled={!channel || !channel.id} - title={!channel || !channel.id ? "Auto-match is only available for existing channels" : "Automatically match EPG data"} + title={ + !channel || !channel.id + ? 'Auto-match is only available for existing channels' + : 'Automatically match EPG data' + } leftSection={} > Auto Match From d2085d57f84966bd0d1ffd42f7ed9965a3fa1749 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Sep 2025 12:43:21 -0500 Subject: [PATCH 004/119] Add sentence transformers to new matching function. --- apps/channels/tasks.py | 697 +++++++++++++++++++++++++++++------------ 1 file changed, 500 insertions(+), 197 deletions(-) diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py index f67fe563..1619843f 100755 --- a/apps/channels/tasks.py +++ b/apps/channels/tasks.py @@ -28,6 +28,113 @@ from urllib.parse import quote logger = logging.getLogger(__name__) +# Lazy loading for ML models - only imported/loaded when needed +_ml_model_cache = { + 'sentence_transformer': None, + 'model_path': os.path.join("/data", "models", "all-MiniLM-L6-v2"), # Use /data for persistence + 'model_name': "sentence-transformers/all-MiniLM-L6-v2" +} + +def get_sentence_transformer(): + """Lazy load the sentence transformer model only when needed""" + if _ml_model_cache['sentence_transformer'] is None: + try: + from sentence_transformers import SentenceTransformer + from sentence_transformers import util + + model_path = _ml_model_cache['model_path'] + model_name = _ml_model_cache['model_name'] + cache_dir = os.path.dirname(model_path) # /data/models + + # Check environment variable to disable downloads + disable_downloads = os.environ.get('DISABLE_ML_DOWNLOADS', 'false').lower() == 'true' + + # Ensure directory exists and is writable + os.makedirs(cache_dir, exist_ok=True) + + # Debug: List what's actually in the cache directory + try: + if os.path.exists(cache_dir): + logger.info(f"Cache directory contents: {os.listdir(cache_dir)}") + for item in os.listdir(cache_dir): + item_path = os.path.join(cache_dir, item) + if os.path.isdir(item_path): + logger.info(f" Subdirectory '{item}' contains: {os.listdir(item_path)}") + except Exception as e: + logger.info(f"Could not list cache directory: {e}") + + # Check if model files exist in our expected location + config_path = os.path.join(model_path, "config.json") + + logger.info(f"Checking for cached model at {model_path}") + logger.info(f"Config exists: {os.path.exists(config_path)}") + + # Also check if the model exists in the sentence-transformers default naming convention + alt_model_name = model_name.replace("/", "_") + alt_model_path = os.path.join(cache_dir, alt_model_name) + alt_config_path = os.path.join(alt_model_path, "config.json") + logger.info(f"Alternative path check - {alt_model_path}, config exists: {os.path.exists(alt_config_path)}") + + # Check for Hugging Face Hub cache format (newer format) + hf_model_name = f"models--{model_name.replace('/', '--')}" + hf_model_path = os.path.join(cache_dir, hf_model_name) + hf_snapshots_path = os.path.join(hf_model_path, "snapshots") + + logger.info(f"Hugging Face cache path check - {hf_model_path}, snapshots exists: {os.path.exists(hf_snapshots_path)}") + + # If HF cache exists, find the latest snapshot + hf_config_exists = False + hf_snapshot_path = None + if os.path.exists(hf_snapshots_path): + try: + snapshots = os.listdir(hf_snapshots_path) + if snapshots: + # Use the first (and likely only) snapshot + hf_snapshot_path = os.path.join(hf_snapshots_path, snapshots[0]) + hf_config_path = os.path.join(hf_snapshot_path, "config.json") + hf_config_exists = os.path.exists(hf_config_path) + logger.info(f"HF snapshot path: {hf_snapshot_path}, config exists: {hf_config_exists}") + except Exception as e: + logger.info(f"Error checking HF cache: {e}") + + # First try to load from our specific path + if os.path.exists(config_path): + logger.info(f"Loading cached sentence transformer from {model_path}") + _ml_model_cache['sentence_transformer'] = SentenceTransformer(model_path) + elif os.path.exists(alt_config_path): + logger.info(f"Loading cached sentence transformer from alternative path {alt_model_path}") + _ml_model_cache['sentence_transformer'] = SentenceTransformer(alt_model_path) + elif hf_config_exists and hf_snapshot_path: + logger.info(f"Loading cached sentence transformer from HF cache {hf_snapshot_path}") + _ml_model_cache['sentence_transformer'] = SentenceTransformer(hf_snapshot_path) + elif disable_downloads: + logger.warning(f"ML model not found and downloads disabled (DISABLE_ML_DOWNLOADS=true). Skipping ML matching.") + return None, None + else: + logger.info(f"Model cache not found, downloading {model_name}") + # Let sentence-transformers handle the download with its cache folder + _ml_model_cache['sentence_transformer'] = SentenceTransformer( + model_name, + cache_folder=cache_dir + ) + logger.info(f"Model downloaded and loaded successfully") + + return _ml_model_cache['sentence_transformer'], util + except ImportError: + logger.warning("sentence-transformers not available - ML-enhanced matching disabled") + return None, None + except Exception as e: + logger.error(f"Failed to load sentence transformer: {e}") + return None, None + else: + from sentence_transformers import util + return _ml_model_cache['sentence_transformer'], util + +# ML matching thresholds (same as original script) +BEST_FUZZY_THRESHOLD = 85 +LOWER_FUZZY_THRESHOLD = 40 +EMBED_SIM_THRESHOLD = 0.65 + # Words we remove to help with fuzzy + embedding matching COMMON_EXTRANEOUS_WORDS = [ "tv", "channel", "network", "television", @@ -50,155 +157,373 @@ def normalize_name(name: str) -> str: norm = name.lower() norm = re.sub(r"\[.*?\]", "", norm) + + # Extract and preserve important call signs from parentheses before removing them + # This captures call signs like (KVLY), (KING), (KARE), etc. + call_sign_match = re.search(r"\(([A-Z]{3,5})\)", name) + preserved_call_sign = "" + if call_sign_match: + preserved_call_sign = " " + call_sign_match.group(1).lower() + + # Now remove all parentheses content norm = re.sub(r"\(.*?\)", "", norm) + + # Add back the preserved call sign + norm = norm + preserved_call_sign + norm = re.sub(r"[^\w\s]", "", norm) tokens = norm.split() tokens = [t for t in tokens if t not in COMMON_EXTRANEOUS_WORDS] norm = " ".join(tokens).strip() return norm +def match_channels_to_epg(channels_data, epg_data, region_code=None, use_ml=True): + """ + EPG matching logic that finds the best EPG matches for channels using + multiple matching strategies including fuzzy matching and ML models. + """ + channels_to_update = [] + matched_channels = [] + + # Try to get ML models if requested (but don't load yet - lazy loading) + st_model, util = None, None + epg_embeddings = None + ml_available = use_ml + + # Process each channel + for chan in channels_data: + normalized_tvg_id = chan.get("tvg_id", "") + fallback_name = chan["tvg_id"].strip() if chan["tvg_id"] else chan["name"] + + # Step 1: Exact TVG ID match + epg_by_tvg_id = next((epg for epg in epg_data if epg["tvg_id"] == normalized_tvg_id), None) + if normalized_tvg_id and epg_by_tvg_id: + chan["epg_data_id"] = epg_by_tvg_id["id"] + channels_to_update.append(chan) + matched_channels.append((chan['id'], fallback_name, epg_by_tvg_id["tvg_id"])) + logger.info(f"Channel {chan['id']} '{fallback_name}' => EPG found by exact tvg_id={epg_by_tvg_id['tvg_id']}") + continue + + # Step 2: Secondary TVG ID check (legacy compatibility) + if chan["tvg_id"]: + epg_match = [epg["id"] for epg in epg_data if epg["tvg_id"] == chan["tvg_id"]] + if epg_match: + chan["epg_data_id"] = epg_match[0] + channels_to_update.append(chan) + matched_channels.append((chan['id'], fallback_name, chan["tvg_id"])) + logger.info(f"Channel {chan['id']} '{chan['name']}' => EPG found by secondary tvg_id={chan['tvg_id']}") + continue + + # Step 3: Name-based fuzzy matching + if not chan["norm_chan"]: + logger.debug(f"Channel {chan['id']} '{chan['name']}' => empty after normalization, skipping") + continue + + best_score = 0 + best_epg = None + + # Debug: show what we're matching against + logger.debug(f"Fuzzy matching '{chan['norm_chan']}' against EPG entries...") + + # Find best fuzzy match + for row in epg_data: + if not row.get("norm_name"): + continue + + base_score = fuzz.ratio(chan["norm_chan"], row["norm_name"]) + bonus = 0 + + # Apply region-based bonus/penalty + if region_code and row.get("tvg_id"): + combined_text = row["tvg_id"].lower() + " " + row["name"].lower() + dot_regions = re.findall(r'\.([a-z]{2})', combined_text) + + if dot_regions: + if region_code in dot_regions: + bonus = 30 # Bigger bonus for matching region + else: + bonus = -15 # Penalty for different region + elif region_code in combined_text: + bonus = 15 + + score = base_score + bonus + + # Debug the best few matches + if score > 50: # Only show decent matches + logger.debug(f" EPG '{row['name']}' (norm: '{row['norm_name']}') => score: {score} (base: {base_score}, bonus: {bonus})") + + if score > best_score: + best_score = score + best_epg = row + + # Log the best score we found + if best_epg: + logger.info(f"Channel {chan['id']} '{chan['name']}' => best match: '{best_epg['name']}' (score: {best_score})") + + # Debug: Show some other potential matches for analysis + def score_epg_entry(epg_row): + base_score = fuzz.ratio(chan["norm_chan"], epg_row.get("norm_name", "")) + bonus = 0 + if region_code and epg_row.get("tvg_id"): + combined_text = epg_row["tvg_id"].lower() + " " + epg_row["name"].lower() + dot_regions = re.findall(r'\.([a-z]{2})', combined_text) + if dot_regions: + if region_code in dot_regions: + bonus = 30 + else: + bonus = -15 + elif region_code in combined_text: + bonus = 15 + return base_score + bonus + + # Check specifically for entries matching the channel's call sign or name parts + channel_keywords = chan["norm_chan"].split() + potential_matches = [] + for keyword in channel_keywords: + if len(keyword) >= 3: # Only check meaningful keywords + matching_entries = [row for row in epg_data if keyword.lower() in row['name'].lower() or keyword.lower() in row['tvg_id'].lower()] + potential_matches.extend(matching_entries) + + # Remove duplicates + unique_matches = [] + seen_ids = set() + for match in potential_matches: + if match['tvg_id'] not in seen_ids: + seen_ids.add(match['tvg_id']) + unique_matches.append(match) + + if unique_matches: + logger.info(f"Found {len(unique_matches)} entries containing channel keywords {channel_keywords}:") + for match_row in unique_matches: + match_score = score_epg_entry(match_row) + # Show original name vs normalized name to debug normalization + logger.info(f" Match: '{match_row['name']}' → normalized: '{match_row.get('norm_name', 'MISSING')}' (tvg_id: {match_row['tvg_id']}) => score: {match_score}") + else: + logger.warning(f"No entries found containing any of the channel keywords: {channel_keywords}") + + sorted_scores = sorted([(row, score_epg_entry(row)) for row in epg_data if row.get("norm_name") and score_epg_entry(row) > 20], key=lambda x: x[1], reverse=True) + + # Remove duplicates based on tvg_id + seen_tvg_ids = set() + unique_sorted_scores = [] + for row, score in sorted_scores: + if row['tvg_id'] not in seen_tvg_ids: + seen_tvg_ids.add(row['tvg_id']) + unique_sorted_scores.append((row, score)) + + logger.debug(f"Channel {chan['id']} '{chan['name']}' => top 10 unique fuzzy matches:") + for i, (epg_row, score) in enumerate(unique_sorted_scores[:10]): + # Highlight entries that contain any of the channel's keywords + channel_keywords = chan["norm_chan"].split() + is_keyword_match = any(keyword in epg_row['name'].lower() or keyword in epg_row['tvg_id'].lower() for keyword in channel_keywords if len(keyword) >= 3) + + if is_keyword_match: + logger.info(f" {i+1}. 🎯 KEYWORD MATCH: '{epg_row['name']}' (tvg_id: {epg_row['tvg_id']}) => score: {score} (norm_name: '{epg_row.get('norm_name', 'MISSING')}')") + else: + logger.debug(f" {i+1}. '{epg_row['name']}' (tvg_id: {epg_row['tvg_id']}) => score: {score}") + else: + logger.debug(f"Channel {chan['id']} '{chan['name']}' => no EPG entries with valid norm_name found") + continue + + # High confidence match - accept immediately + if best_score >= BEST_FUZZY_THRESHOLD: + chan["epg_data_id"] = best_epg["id"] + channels_to_update.append(chan) + matched_channels.append((chan['id'], chan['name'], best_epg["tvg_id"])) + logger.info(f"Channel {chan['id']} '{chan['name']}' => matched tvg_id={best_epg['tvg_id']} (score={best_score})") + + # Medium confidence - use ML if available (lazy load models here) + elif best_score >= LOWER_FUZZY_THRESHOLD and ml_available: + logger.debug(f"Channel {chan['id']} entering ML matching path at {time.time()}") + + # Note: If experiencing 5+ second delays here, check if Celery Beat + # task 'scan_and_process_files' is running too frequently and blocking execution + + # Lazy load ML models only when we actually need them + if st_model is None: + logger.debug(f"Channel {chan['id']} about to load ML model at {time.time()}") + st_model, util = get_sentence_transformer() + logger.debug(f"Channel {chan['id']} finished loading ML model at {time.time()}") + + # Lazy generate embeddings only when we actually need them + if epg_embeddings is None and st_model and any(row.get("norm_name") for row in epg_data): + try: + logger.info("Generating embeddings for EPG data using ML model (lazy loading)") + epg_embeddings = st_model.encode( + [row["norm_name"] for row in epg_data if row.get("norm_name")], + convert_to_tensor=True + ) + except Exception as e: + logger.warning(f"Failed to generate embeddings: {e}") + epg_embeddings = None + + if epg_embeddings is not None and st_model: + try: + # Generate embedding for this channel + chan_embedding = st_model.encode(chan["norm_chan"], convert_to_tensor=True) + + # Calculate similarity with all EPG embeddings + sim_scores = util.cos_sim(chan_embedding, epg_embeddings)[0] + top_index = int(sim_scores.argmax()) + top_value = float(sim_scores[top_index]) + + if top_value >= EMBED_SIM_THRESHOLD: + # Find the EPG entry that corresponds to this embedding index + epg_with_names = [epg for epg in epg_data if epg.get("norm_name")] + matched_epg = epg_with_names[top_index] + + chan["epg_data_id"] = matched_epg["id"] + channels_to_update.append(chan) + matched_channels.append((chan['id'], chan['name'], matched_epg["tvg_id"])) + logger.info(f"Channel {chan['id']} '{chan['name']}' => matched EPG tvg_id={matched_epg['tvg_id']} (fuzzy={best_score}, ML-sim={top_value:.2f})") + else: + logger.info(f"Channel {chan['id']} '{chan['name']}' => fuzzy={best_score}, ML-sim={top_value:.2f} < {EMBED_SIM_THRESHOLD}, trying last resort...") + + # Last resort: try ML with very low fuzzy threshold + if top_value >= 0.45: # Lower ML threshold as last resort + epg_with_names = [epg for epg in epg_data if epg.get("norm_name")] + matched_epg = epg_with_names[top_index] + + chan["epg_data_id"] = matched_epg["id"] + channels_to_update.append(chan) + matched_channels.append((chan['id'], chan['name'], matched_epg["tvg_id"])) + logger.info(f"Channel {chan['id']} '{chan['name']}' => LAST RESORT match EPG tvg_id={matched_epg['tvg_id']} (fuzzy={best_score}, ML-sim={top_value:.2f})") + else: + logger.info(f"Channel {chan['id']} '{chan['name']}' => even last resort ML-sim {top_value:.2f} < 0.45, skipping") + + except Exception as e: + logger.warning(f"ML matching failed for channel {chan['id']}: {e}") + # Fall back to non-ML decision + logger.info(f"Channel {chan['id']} '{chan['name']}' => fuzzy score {best_score} below threshold, skipping") + + # Last resort: Try ML matching even with very low fuzzy scores + elif best_score >= 20 and ml_available: + logger.debug(f"Channel {chan['id']} entering last resort ML matching at {time.time()}") + + # Lazy load ML models for last resort attempts + if st_model is None: + logger.debug(f"Channel {chan['id']} loading ML model for last resort at {time.time()}") + st_model, util = get_sentence_transformer() + logger.debug(f"Channel {chan['id']} finished loading ML model for last resort at {time.time()}") + + # Lazy generate embeddings for last resort attempts + if epg_embeddings is None and st_model and any(row.get("norm_name") for row in epg_data): + try: + logger.info("Generating embeddings for EPG data using ML model (last resort lazy loading)") + epg_embeddings = st_model.encode( + [row["norm_name"] for row in epg_data if row.get("norm_name")], + convert_to_tensor=True + ) + except Exception as e: + logger.warning(f"Failed to generate embeddings for last resort: {e}") + epg_embeddings = None + + if epg_embeddings is not None and st_model: + try: + logger.info(f"Channel {chan['id']} '{chan['name']}' => trying ML as last resort (fuzzy={best_score})") + # Generate embedding for this channel + chan_embedding = st_model.encode(chan["norm_chan"], convert_to_tensor=True) + + # Calculate similarity with all EPG embeddings + sim_scores = util.cos_sim(chan_embedding, epg_embeddings)[0] + top_index = int(sim_scores.argmax()) + top_value = float(sim_scores[top_index]) + + if top_value >= 0.50: # Even lower threshold for last resort + # Find the EPG entry that corresponds to this embedding index + epg_with_names = [epg for epg in epg_data if epg.get("norm_name")] + matched_epg = epg_with_names[top_index] + + chan["epg_data_id"] = matched_epg["id"] + channels_to_update.append(chan) + matched_channels.append((chan['id'], chan['name'], matched_epg["tvg_id"])) + logger.info(f"Channel {chan['id']} '{chan['name']}' => DESPERATE LAST RESORT match EPG tvg_id={matched_epg['tvg_id']} (fuzzy={best_score}, ML-sim={top_value:.2f})") + else: + logger.info(f"Channel {chan['id']} '{chan['name']}' => desperate last resort ML-sim {top_value:.2f} < 0.50, giving up") + except Exception as e: + logger.warning(f"Last resort ML matching failed for channel {chan['id']}: {e}") + logger.info(f"Channel {chan['id']} '{chan['name']}' => best fuzzy score={best_score} < {LOWER_FUZZY_THRESHOLD}, giving up") + else: + # No ML available or very low fuzzy score + logger.info(f"Channel {chan['id']} '{chan['name']}' => best fuzzy score={best_score} < {LOWER_FUZZY_THRESHOLD}, no ML fallback available") + + return { + "channels_to_update": channels_to_update, + "matched_channels": matched_channels + } + @shared_task def match_epg_channels(): """ - Goes through all Channels and tries to find a matching EPGData row by: - 1) If channel.tvg_id is valid in EPGData, skip. - 2) If channel has a tvg_id but not found in EPGData, attempt direct EPGData lookup. - 3) Otherwise, perform name-based fuzzy matching with optional region-based bonus. - 4) If a match is found, we set channel.tvg_id - 5) Summarize and log results. + Uses integrated EPG matching instead of external script. + Provides the same functionality with better performance and maintainability. """ try: - logger.info("Starting EPG matching logic...") + logger.info("Starting integrated EPG matching...") - # Attempt to retrieve a "preferred-region" if configured + # Get region preference try: region_obj = CoreSettings.objects.get(key="preferred-region") region_code = region_obj.value.strip().lower() except CoreSettings.DoesNotExist: region_code = None - matched_channels = [] - channels_to_update = [] - # Get channels that don't have EPG data assigned channels_without_epg = Channel.objects.filter(epg_data__isnull=True) logger.info(f"Found {channels_without_epg.count()} channels without EPG data") - channels_json = [] + channels_data = [] for channel in channels_without_epg: - # Normalize TVG ID - strip whitespace and convert to lowercase normalized_tvg_id = channel.tvg_id.strip().lower() if channel.tvg_id else "" - if normalized_tvg_id: - logger.info(f"Processing channel {channel.id} '{channel.name}' with TVG ID='{normalized_tvg_id}'") - - channels_json.append({ + channels_data.append({ "id": channel.id, "name": channel.name, - "tvg_id": normalized_tvg_id, # Use normalized TVG ID - "original_tvg_id": channel.tvg_id, # Keep original for reference + "tvg_id": normalized_tvg_id, + "original_tvg_id": channel.tvg_id, "fallback_name": normalized_tvg_id if normalized_tvg_id else channel.name, - "norm_chan": normalize_name(normalized_tvg_id if normalized_tvg_id else channel.name) + "norm_chan": normalize_name(channel.name) # Always use channel name for fuzzy matching! }) - # Similarly normalize EPG data TVG IDs - epg_json = [] + # Get all EPG data + epg_data = [] for epg in EPGData.objects.all(): normalized_tvg_id = epg.tvg_id.strip().lower() if epg.tvg_id else "" - epg_json.append({ + epg_data.append({ 'id': epg.id, - 'tvg_id': normalized_tvg_id, # Use normalized TVG ID - 'original_tvg_id': epg.tvg_id, # Keep original for reference + 'tvg_id': normalized_tvg_id, + 'original_tvg_id': epg.tvg_id, 'name': epg.name, 'norm_name': normalize_name(epg.name), 'epg_source_id': epg.epg_source.id if epg.epg_source else None, }) - # Log available EPG data TVG IDs for debugging - unique_epg_tvg_ids = set(e['tvg_id'] for e in epg_json if e['tvg_id']) - logger.info(f"Available EPG TVG IDs: {', '.join(sorted(unique_epg_tvg_ids))}") + logger.info(f"Processing {len(channels_data)} channels against {len(epg_data)} EPG entries") - payload = { - "channels": channels_json, - "epg_data": epg_json, - "region_code": region_code, - } - - with tempfile.NamedTemporaryFile(delete=False) as temp_file: - temp_file.write(json.dumps(payload).encode('utf-8')) - temp_file_path = temp_file.name - - # After writing to the file but before subprocess - # Explicitly delete the large data structures - del payload - gc.collect() - - process = subprocess.Popen( - ['python', '/app/scripts/epg_match.py', temp_file_path], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - text=True - ) - - stdout = '' - block_size = 1024 - - while True: - # Monitor stdout and stderr for readability - readable, _, _ = select.select([process.stdout, process.stderr], [], [], 1) # timeout of 1 second - - if not readable: # timeout expired - if process.poll() is not None: # check if process finished - break - else: # process still running, continue - continue - - for stream in readable: - if stream == process.stdout: - stdout += stream.read(block_size) - elif stream == process.stderr: - error = stream.readline() - if error: - logger.info(error.strip()) - - if process.poll() is not None: - break - - process.wait() - os.remove(temp_file_path) - - if process.returncode != 0: - return f"Failed to process EPG matching" - - result = json.loads(stdout) - # This returns lists of dicts, not model objects + # Run EPG matching + result = match_channels_to_epg(channels_data, epg_data, region_code, use_ml=True) channels_to_update_dicts = result["channels_to_update"] matched_channels = result["matched_channels"] - # Explicitly clean up large objects - del stdout, result - gc.collect() - - # Convert your dict-based 'channels_to_update' into real Channel objects + # Update channels in database if channels_to_update_dicts: - # Extract IDs of the channels that need updates channel_ids = [d["id"] for d in channels_to_update_dicts] - - # Fetch them from DB channels_qs = Channel.objects.filter(id__in=channel_ids) channels_list = list(channels_qs) - # Build a map from channel_id -> epg_data_id (or whatever fields you need) - epg_mapping = { - d["id"]: d["epg_data_id"] for d in channels_to_update_dicts - } + # Create mapping from channel_id to epg_data_id + epg_mapping = {d["id"]: d["epg_data_id"] for d in channels_to_update_dicts} - # Populate each Channel object with the updated epg_data_id + # Update each channel with matched EPG data for channel_obj in channels_list: - # The script sets 'epg_data_id' in the returned dict - # We either assign directly, or fetch the EPGData instance if needed. - channel_obj.epg_data_id = epg_mapping.get(channel_obj.id) + epg_data_id = epg_mapping.get(channel_obj.id) + if epg_data_id: + try: + epg_data_obj = EPGData.objects.get(id=epg_data_id) + channel_obj.epg_data = epg_data_obj + except EPGData.DoesNotExist: + logger.error(f"EPG data {epg_data_id} not found for channel {channel_obj.id}") - # Now we have real model objects, so bulk_update will work + # Bulk update all channels Channel.objects.bulk_update(channels_list, ["epg_data"]) total_matched = len(matched_channels) @@ -209,9 +534,9 @@ def match_epg_channels(): else: logger.info("No new channels were matched.") - logger.info("Finished EPG matching logic.") + logger.info("Finished integrated EPG matching.") - # Send update with additional information for refreshing UI + # Send WebSocket update channel_layer = get_channel_layer() associations = [ {"channel_id": chan["id"], "epg_data_id": chan["epg_data_id"]} @@ -225,19 +550,19 @@ def match_epg_channels(): "data": { "success": True, "type": "epg_match", - "refresh_channels": True, # Flag to tell frontend to refresh channels + "refresh_channels": True, "matches_count": total_matched, "message": f"EPG matching complete: {total_matched} channel(s) matched", - "associations": associations # Add the associations data + "associations": associations } } ) return f"Done. Matched {total_matched} channel(s)." + finally: - # Final cleanup + # Memory cleanup gc.collect() - # Use our standardized cleanup function for more thorough memory management from core.utils import cleanup_memory cleanup_memory(log_usage=True, force_collection=True) @@ -245,16 +570,14 @@ def match_epg_channels(): @shared_task def match_single_channel_epg(channel_id): """ - Try to match a single channel with EPG data using optimized logic - that doesn't require loading all EPG data or running the external script. - Returns a dict with match status and message. + Try to match a single channel with EPG data using the integrated matching logic + that includes both fuzzy and ML-enhanced matching. Returns a dict with match status and message. """ try: from apps.channels.models import Channel from apps.epg.models import EPGData - import re - logger.info(f"Starting optimized single channel EPG matching for channel ID {channel_id}") + logger.info(f"Starting integrated single channel EPG matching for channel ID {channel_id}") # Get the channel try: @@ -266,112 +589,92 @@ def match_single_channel_epg(channel_id): if channel.epg_data: return {"matched": False, "message": f"Channel '{channel.name}' already has EPG data assigned"} - # Get region preference - try: - region_obj = CoreSettings.objects.get(key="preferred-region") - region_code = region_obj.value.strip().lower() - except CoreSettings.DoesNotExist: - region_code = None - - # Prepare channel data + # Prepare single channel data for matching (same format as bulk matching) normalized_tvg_id = channel.tvg_id.strip().lower() if channel.tvg_id else "" - normalized_channel_name = normalize_name(channel.name) + channel_data = { + "id": channel.id, + "name": channel.name, + "tvg_id": normalized_tvg_id, + "original_tvg_id": channel.tvg_id, + "fallback_name": normalized_tvg_id if normalized_tvg_id else channel.name, + "norm_chan": normalize_name(channel.name) # Always use channel name for fuzzy matching! + } - logger.info(f"Matching channel '{channel.name}' (TVG ID: '{channel.tvg_id}') against EPG data") + logger.info(f"Channel data prepared: name='{channel.name}', tvg_id='{normalized_tvg_id}', norm_chan='{channel_data['norm_chan']}'") - # Step 1: Try exact TVG ID match first (most efficient) - if normalized_tvg_id: - epg_exact_match = EPGData.objects.filter(tvg_id__iexact=channel.tvg_id).first() - if epg_exact_match: - logger.info(f"Channel '{channel.name}' matched with EPG '{epg_exact_match.name}' by exact TVG ID match") - channel.epg_data = epg_exact_match - channel.save(update_fields=["epg_data"]) - return { - "matched": True, - "message": f"Channel '{channel.name}' matched with EPG '{epg_exact_match.name}' by exact TVG ID match" - } + # Debug: Test what the normalization does to preserve call signs + test_name = "NBC 11 (KVLY) - Fargo" # Example for testing + test_normalized = normalize_name(test_name) + logger.debug(f"DEBUG normalization example: '{test_name}' → '{test_normalized}' (call sign preserved)") - # Step 2: Try case-insensitive TVG ID match - if normalized_tvg_id: - epg_case_match = EPGData.objects.filter(tvg_id__icontains=normalized_tvg_id).first() - if epg_case_match: - logger.info(f"Channel '{channel.name}' matched with EPG '{epg_case_match.name}' by case-insensitive TVG ID match") - channel.epg_data = epg_case_match - channel.save(update_fields=["epg_data"]) - return { - "matched": True, - "message": f"Channel '{channel.name}' matched with EPG '{epg_case_match.name}' by case-insensitive TVG ID match" - } + # Get all EPG data for matching - must include norm_name field + epg_data_list = [] + for epg in EPGData.objects.filter(name__isnull=False).exclude(name=''): + normalized_epg_tvg_id = epg.tvg_id.strip().lower() if epg.tvg_id else "" + epg_data_list.append({ + 'id': epg.id, + 'tvg_id': normalized_epg_tvg_id, + 'original_tvg_id': epg.tvg_id, + 'name': epg.name, + 'norm_name': normalize_name(epg.name), + 'epg_source_id': epg.epg_source.id if epg.epg_source else None, + }) - # Step 3: Fuzzy name matching (only if name-based matching is needed) - if not normalized_channel_name: - return {"matched": False, "message": f"Channel '{channel.name}' has no usable name for matching"} + if not epg_data_list: + return {"matched": False, "message": "No EPG data available for matching"} - # Query EPG data with name filtering to reduce dataset - epg_candidates = EPGData.objects.filter(name__isnull=False).exclude(name='').values('id', 'name', 'tvg_id') - epg_count = epg_candidates.count() - logger.info(f"Fuzzy matching against {epg_count} EPG entries (optimized - not loading all EPG data)") + logger.info(f"Matching single channel '{channel.name}' against {len(epg_data_list)} EPG entries") - best_score = 0 - best_epg = None + # Use the EPG matching function + result = match_channels_to_epg([channel_data], epg_data_list) + channels_to_update = result.get("channels_to_update", []) + matched_channels = result.get("matched_channels", []) - for epg in epg_candidates: - if not epg['name']: - continue + if channels_to_update: + # Find our channel in the results + channel_match = None + for update in channels_to_update: + if update["id"] == channel.id: + channel_match = update + break - epg_normalized_name = normalize_name(epg['name']) - if not epg_normalized_name: - continue + if channel_match: + # Apply the match to the channel + try: + epg_data = EPGData.objects.get(id=channel_match['epg_data_id']) + channel.epg_data = epg_data + channel.save(update_fields=["epg_data"]) - # Calculate base fuzzy score - base_score = fuzz.ratio(normalized_channel_name, epg_normalized_name) - bonus = 0 + # Find match details from matched_channels for better reporting + match_details = None + for match_info in matched_channels: + if match_info[0] == channel.id: # matched_channels format: (channel_id, channel_name, epg_info) + match_details = match_info + break - # Apply region-based bonus/penalty if applicable - if region_code and epg['tvg_id']: - combined_text = epg['tvg_id'].lower() + " " + epg['name'].lower() - dot_regions = re.findall(r'\.([a-z]{2})', combined_text) + success_msg = f"Channel '{channel.name}' matched with EPG '{epg_data.name}'" + if match_details: + success_msg += f" (matched via: {match_details[2]})" - if dot_regions: - if region_code in dot_regions: - bonus = 30 # Bigger bonus for matching region - else: - bonus = -15 # Penalty for different region - elif region_code in combined_text: - bonus = 15 + logger.info(success_msg) - final_score = base_score + bonus + return { + "matched": True, + "message": success_msg, + "epg_name": epg_data.name, + "epg_id": epg_data.id + } + except EPGData.DoesNotExist: + return {"matched": False, "message": "Matched EPG data not found"} - if final_score > best_score: - best_score = final_score - best_epg = epg - - # Apply matching thresholds (same as the ML script) - BEST_FUZZY_THRESHOLD = 85 - - if best_epg and best_score >= BEST_FUZZY_THRESHOLD: - try: - logger.info(f"Channel '{channel.name}' matched with EPG '{best_epg['name']}' (score: {best_score})") - epg_data = EPGData.objects.get(id=best_epg['id']) - channel.epg_data = epg_data - channel.save(update_fields=["epg_data"]) - - return { - "matched": True, - "message": f"Channel '{channel.name}' matched with EPG '{epg_data.name}' (score: {best_score})" - } - except EPGData.DoesNotExist: - return {"matched": False, "message": "Matched EPG data not found"} - - # No good match found - logger.info(f"No suitable EPG match found for channel '{channel.name}' (best score: {best_score})") + # No match found return { "matched": False, - "message": f"No suitable EPG match found for channel '{channel.name}' (best score: {best_score})" + "message": f"No suitable EPG match found for channel '{channel.name}'" } except Exception as e: - logger.error(f"Error in optimized single channel EPG matching: {e}", exc_info=True) + logger.error(f"Error in integrated single channel EPG matching: {e}", exc_info=True) return {"matched": False, "message": f"Error during matching: {str(e)}"} From fedc98f848a03ae860b85c260bd0457519860778 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Sep 2025 12:54:19 -0500 Subject: [PATCH 005/119] Removed unneeded debug logging. --- apps/channels/tasks.py | 73 ------------------------------------------ 1 file changed, 73 deletions(-) diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py index 1619843f..12584bc3 100755 --- a/apps/channels/tasks.py +++ b/apps/channels/tasks.py @@ -259,68 +259,6 @@ def match_channels_to_epg(channels_data, epg_data, region_code=None, use_ml=True # Log the best score we found if best_epg: logger.info(f"Channel {chan['id']} '{chan['name']}' => best match: '{best_epg['name']}' (score: {best_score})") - - # Debug: Show some other potential matches for analysis - def score_epg_entry(epg_row): - base_score = fuzz.ratio(chan["norm_chan"], epg_row.get("norm_name", "")) - bonus = 0 - if region_code and epg_row.get("tvg_id"): - combined_text = epg_row["tvg_id"].lower() + " " + epg_row["name"].lower() - dot_regions = re.findall(r'\.([a-z]{2})', combined_text) - if dot_regions: - if region_code in dot_regions: - bonus = 30 - else: - bonus = -15 - elif region_code in combined_text: - bonus = 15 - return base_score + bonus - - # Check specifically for entries matching the channel's call sign or name parts - channel_keywords = chan["norm_chan"].split() - potential_matches = [] - for keyword in channel_keywords: - if len(keyword) >= 3: # Only check meaningful keywords - matching_entries = [row for row in epg_data if keyword.lower() in row['name'].lower() or keyword.lower() in row['tvg_id'].lower()] - potential_matches.extend(matching_entries) - - # Remove duplicates - unique_matches = [] - seen_ids = set() - for match in potential_matches: - if match['tvg_id'] not in seen_ids: - seen_ids.add(match['tvg_id']) - unique_matches.append(match) - - if unique_matches: - logger.info(f"Found {len(unique_matches)} entries containing channel keywords {channel_keywords}:") - for match_row in unique_matches: - match_score = score_epg_entry(match_row) - # Show original name vs normalized name to debug normalization - logger.info(f" Match: '{match_row['name']}' → normalized: '{match_row.get('norm_name', 'MISSING')}' (tvg_id: {match_row['tvg_id']}) => score: {match_score}") - else: - logger.warning(f"No entries found containing any of the channel keywords: {channel_keywords}") - - sorted_scores = sorted([(row, score_epg_entry(row)) for row in epg_data if row.get("norm_name") and score_epg_entry(row) > 20], key=lambda x: x[1], reverse=True) - - # Remove duplicates based on tvg_id - seen_tvg_ids = set() - unique_sorted_scores = [] - for row, score in sorted_scores: - if row['tvg_id'] not in seen_tvg_ids: - seen_tvg_ids.add(row['tvg_id']) - unique_sorted_scores.append((row, score)) - - logger.debug(f"Channel {chan['id']} '{chan['name']}' => top 10 unique fuzzy matches:") - for i, (epg_row, score) in enumerate(unique_sorted_scores[:10]): - # Highlight entries that contain any of the channel's keywords - channel_keywords = chan["norm_chan"].split() - is_keyword_match = any(keyword in epg_row['name'].lower() or keyword in epg_row['tvg_id'].lower() for keyword in channel_keywords if len(keyword) >= 3) - - if is_keyword_match: - logger.info(f" {i+1}. 🎯 KEYWORD MATCH: '{epg_row['name']}' (tvg_id: {epg_row['tvg_id']}) => score: {score} (norm_name: '{epg_row.get('norm_name', 'MISSING')}')") - else: - logger.debug(f" {i+1}. '{epg_row['name']}' (tvg_id: {epg_row['tvg_id']}) => score: {score}") else: logger.debug(f"Channel {chan['id']} '{chan['name']}' => no EPG entries with valid norm_name found") continue @@ -334,16 +272,9 @@ def match_channels_to_epg(channels_data, epg_data, region_code=None, use_ml=True # Medium confidence - use ML if available (lazy load models here) elif best_score >= LOWER_FUZZY_THRESHOLD and ml_available: - logger.debug(f"Channel {chan['id']} entering ML matching path at {time.time()}") - - # Note: If experiencing 5+ second delays here, check if Celery Beat - # task 'scan_and_process_files' is running too frequently and blocking execution - # Lazy load ML models only when we actually need them if st_model is None: - logger.debug(f"Channel {chan['id']} about to load ML model at {time.time()}") st_model, util = get_sentence_transformer() - logger.debug(f"Channel {chan['id']} finished loading ML model at {time.time()}") # Lazy generate embeddings only when we actually need them if epg_embeddings is None and st_model and any(row.get("norm_name") for row in epg_data): @@ -398,13 +329,9 @@ def match_channels_to_epg(channels_data, epg_data, region_code=None, use_ml=True # Last resort: Try ML matching even with very low fuzzy scores elif best_score >= 20 and ml_available: - logger.debug(f"Channel {chan['id']} entering last resort ML matching at {time.time()}") - # Lazy load ML models for last resort attempts if st_model is None: - logger.debug(f"Channel {chan['id']} loading ML model for last resort at {time.time()}") st_model, util = get_sentence_transformer() - logger.debug(f"Channel {chan['id']} finished loading ML model for last resort at {time.time()}") # Lazy generate embeddings for last resort attempts if epg_embeddings is None and st_model and any(row.get("norm_name") for row in epg_data): From c55dcfd26a29bb3e853a331d5032a8fac7dc3200 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Sep 2025 13:01:43 -0500 Subject: [PATCH 006/119] Remove unnecessary checking of cache directories. Lets sentence transformers handle it. --- apps/channels/tasks.py | 90 ++++++++---------------------------------- 1 file changed, 17 insertions(+), 73 deletions(-) diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py index 12584bc3..ac3c4f0d 100755 --- a/apps/channels/tasks.py +++ b/apps/channels/tasks.py @@ -30,9 +30,7 @@ logger = logging.getLogger(__name__) # Lazy loading for ML models - only imported/loaded when needed _ml_model_cache = { - 'sentence_transformer': None, - 'model_path': os.path.join("/data", "models", "all-MiniLM-L6-v2"), # Use /data for persistence - 'model_name': "sentence-transformers/all-MiniLM-L6-v2" + 'sentence_transformer': None } def get_sentence_transformer(): @@ -42,82 +40,28 @@ def get_sentence_transformer(): from sentence_transformers import SentenceTransformer from sentence_transformers import util - model_path = _ml_model_cache['model_path'] - model_name = _ml_model_cache['model_name'] - cache_dir = os.path.dirname(model_path) # /data/models + model_name = "sentence-transformers/all-MiniLM-L6-v2" + cache_dir = "/data/models" # Check environment variable to disable downloads disable_downloads = os.environ.get('DISABLE_ML_DOWNLOADS', 'false').lower() == 'true' - # Ensure directory exists and is writable + if disable_downloads: + # Check if model exists before attempting to load + hf_model_path = os.path.join(cache_dir, f"models--{model_name.replace('/', '--')}") + if not os.path.exists(hf_model_path): + logger.warning("ML model not found and downloads disabled (DISABLE_ML_DOWNLOADS=true). Skipping ML matching.") + return None, None + + # Ensure cache directory exists os.makedirs(cache_dir, exist_ok=True) - # Debug: List what's actually in the cache directory - try: - if os.path.exists(cache_dir): - logger.info(f"Cache directory contents: {os.listdir(cache_dir)}") - for item in os.listdir(cache_dir): - item_path = os.path.join(cache_dir, item) - if os.path.isdir(item_path): - logger.info(f" Subdirectory '{item}' contains: {os.listdir(item_path)}") - except Exception as e: - logger.info(f"Could not list cache directory: {e}") - - # Check if model files exist in our expected location - config_path = os.path.join(model_path, "config.json") - - logger.info(f"Checking for cached model at {model_path}") - logger.info(f"Config exists: {os.path.exists(config_path)}") - - # Also check if the model exists in the sentence-transformers default naming convention - alt_model_name = model_name.replace("/", "_") - alt_model_path = os.path.join(cache_dir, alt_model_name) - alt_config_path = os.path.join(alt_model_path, "config.json") - logger.info(f"Alternative path check - {alt_model_path}, config exists: {os.path.exists(alt_config_path)}") - - # Check for Hugging Face Hub cache format (newer format) - hf_model_name = f"models--{model_name.replace('/', '--')}" - hf_model_path = os.path.join(cache_dir, hf_model_name) - hf_snapshots_path = os.path.join(hf_model_path, "snapshots") - - logger.info(f"Hugging Face cache path check - {hf_model_path}, snapshots exists: {os.path.exists(hf_snapshots_path)}") - - # If HF cache exists, find the latest snapshot - hf_config_exists = False - hf_snapshot_path = None - if os.path.exists(hf_snapshots_path): - try: - snapshots = os.listdir(hf_snapshots_path) - if snapshots: - # Use the first (and likely only) snapshot - hf_snapshot_path = os.path.join(hf_snapshots_path, snapshots[0]) - hf_config_path = os.path.join(hf_snapshot_path, "config.json") - hf_config_exists = os.path.exists(hf_config_path) - logger.info(f"HF snapshot path: {hf_snapshot_path}, config exists: {hf_config_exists}") - except Exception as e: - logger.info(f"Error checking HF cache: {e}") - - # First try to load from our specific path - if os.path.exists(config_path): - logger.info(f"Loading cached sentence transformer from {model_path}") - _ml_model_cache['sentence_transformer'] = SentenceTransformer(model_path) - elif os.path.exists(alt_config_path): - logger.info(f"Loading cached sentence transformer from alternative path {alt_model_path}") - _ml_model_cache['sentence_transformer'] = SentenceTransformer(alt_model_path) - elif hf_config_exists and hf_snapshot_path: - logger.info(f"Loading cached sentence transformer from HF cache {hf_snapshot_path}") - _ml_model_cache['sentence_transformer'] = SentenceTransformer(hf_snapshot_path) - elif disable_downloads: - logger.warning(f"ML model not found and downloads disabled (DISABLE_ML_DOWNLOADS=true). Skipping ML matching.") - return None, None - else: - logger.info(f"Model cache not found, downloading {model_name}") - # Let sentence-transformers handle the download with its cache folder - _ml_model_cache['sentence_transformer'] = SentenceTransformer( - model_name, - cache_folder=cache_dir - ) - logger.info(f"Model downloaded and loaded successfully") + # Let sentence-transformers handle all cache detection and management + logger.info(f"Loading sentence transformer model (cache: {cache_dir})") + _ml_model_cache['sentence_transformer'] = SentenceTransformer( + model_name, + cache_folder=cache_dir + ) return _ml_model_cache['sentence_transformer'], util except ImportError: From d6bb9e40b2f6547df490ae2b32c089a34199df06 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Sep 2025 13:15:32 -0500 Subject: [PATCH 007/119] Implement memory cleanup for ML models after channel matching operations --- apps/channels/tasks.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py index ac3c4f0d..7e645a71 100755 --- a/apps/channels/tasks.py +++ b/apps/channels/tasks.py @@ -318,6 +318,12 @@ def match_channels_to_epg(channels_data, epg_data, region_code=None, use_ml=True # No ML available or very low fuzzy score logger.info(f"Channel {chan['id']} '{chan['name']}' => best fuzzy score={best_score} < {LOWER_FUZZY_THRESHOLD}, no ML fallback available") + # Clean up ML models from memory after matching (infrequent operation) + if _ml_model_cache['sentence_transformer'] is not None: + logger.info("Cleaning up ML models from memory") + _ml_model_cache['sentence_transformer'] = None + gc.collect() + return { "channels_to_update": channels_to_update, "matched_channels": matched_channels @@ -432,6 +438,11 @@ def match_epg_channels(): return f"Done. Matched {total_matched} channel(s)." finally: + # Clean up ML models from memory after bulk matching + if _ml_model_cache['sentence_transformer'] is not None: + logger.info("Cleaning up ML models from memory") + _ml_model_cache['sentence_transformer'] = None + # Memory cleanup gc.collect() from core.utils import cleanup_memory @@ -529,6 +540,12 @@ def match_single_channel_epg(channel_id): logger.info(success_msg) + # Clean up ML models from memory after single channel matching + if _ml_model_cache['sentence_transformer'] is not None: + logger.info("Cleaning up ML models from memory") + _ml_model_cache['sentence_transformer'] = None + gc.collect() + return { "matched": True, "message": success_msg, @@ -539,6 +556,12 @@ def match_single_channel_epg(channel_id): return {"matched": False, "message": "Matched EPG data not found"} # No match found + # Clean up ML models from memory after single channel matching + if _ml_model_cache['sentence_transformer'] is not None: + logger.info("Cleaning up ML models from memory") + _ml_model_cache['sentence_transformer'] = None + gc.collect() + return { "matched": False, "message": f"No suitable EPG match found for channel '{channel.name}'" @@ -546,6 +569,13 @@ def match_single_channel_epg(channel_id): except Exception as e: logger.error(f"Error in integrated single channel EPG matching: {e}", exc_info=True) + + # Clean up ML models from memory even on error + if _ml_model_cache['sentence_transformer'] is not None: + logger.info("Cleaning up ML models from memory after error") + _ml_model_cache['sentence_transformer'] = None + gc.collect() + return {"matched": False, "message": f"Error during matching: {str(e)}"} From 6384f4f56ff217ae0005560d835c1f5b444646dc Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Sep 2025 13:47:59 -0500 Subject: [PATCH 008/119] Add progress notifications for EPG matching process --- apps/channels/tasks.py | 75 +++++++++++++++++++++-- frontend/src/WebSocket.jsx | 45 ++++++++++++++ frontend/src/components/forms/Channel.jsx | 5 ++ 3 files changed, 119 insertions(+), 6 deletions(-) diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py index 7e645a71..5ad291a1 100755 --- a/apps/channels/tasks.py +++ b/apps/channels/tasks.py @@ -28,6 +28,36 @@ from urllib.parse import quote logger = logging.getLogger(__name__) +def send_epg_matching_progress(total_channels, matched_channels, current_channel_name="", stage="matching"): + """ + Send EPG matching progress via WebSocket + """ + try: + channel_layer = get_channel_layer() + if channel_layer: + progress_data = { + 'type': 'epg_matching_progress', + 'total': total_channels, + 'matched': len(matched_channels) if isinstance(matched_channels, list) else matched_channels, + 'remaining': total_channels - (len(matched_channels) if isinstance(matched_channels, list) else matched_channels), + 'current_channel': current_channel_name, + 'stage': stage, + 'progress_percent': round((len(matched_channels) if isinstance(matched_channels, list) else matched_channels) / total_channels * 100, 1) if total_channels > 0 else 0 + } + + async_to_sync(channel_layer.group_send)( + "updates", + { + "type": "update", + "data": { + "type": "epg_matching_progress", + **progress_data + } + } + ) + except Exception as e: + logger.warning(f"Failed to send EPG matching progress: {e}") + # Lazy loading for ML models - only imported/loaded when needed _ml_model_cache = { 'sentence_transformer': None @@ -121,13 +151,18 @@ def normalize_name(name: str) -> str: norm = " ".join(tokens).strip() return norm -def match_channels_to_epg(channels_data, epg_data, region_code=None, use_ml=True): +def match_channels_to_epg(channels_data, epg_data, region_code=None, use_ml=True, send_progress=True): """ EPG matching logic that finds the best EPG matches for channels using multiple matching strategies including fuzzy matching and ML models. """ channels_to_update = [] matched_channels = [] + total_channels = len(channels_data) + + # Send initial progress + if send_progress: + send_epg_matching_progress(total_channels, 0, stage="starting") # Try to get ML models if requested (but don't load yet - lazy loading) st_model, util = None, None @@ -135,7 +170,18 @@ def match_channels_to_epg(channels_data, epg_data, region_code=None, use_ml=True ml_available = use_ml # Process each channel - for chan in channels_data: + for index, chan in enumerate(channels_data): + normalized_tvg_id = chan.get("tvg_id", "") + fallback_name = chan["tvg_id"].strip() if chan["tvg_id"] else chan["name"] + + # Send progress update every 5 channels or for the first few + if send_progress and (index < 5 or index % 5 == 0 or index == total_channels - 1): + send_epg_matching_progress( + total_channels, + len(matched_channels), + current_channel_name=chan["name"][:50], # Truncate long names + stage="matching" + ) normalized_tvg_id = chan.get("tvg_id", "") fallback_name = chan["tvg_id"].strip() if chan["tvg_id"] else chan["name"] @@ -324,6 +370,14 @@ def match_channels_to_epg(channels_data, epg_data, region_code=None, use_ml=True _ml_model_cache['sentence_transformer'] = None gc.collect() + # Send final progress update + if send_progress: + send_epg_matching_progress( + total_channels, + len(matched_channels), + stage="completed" + ) + return { "channels_to_update": channels_to_update, "matched_channels": matched_channels @@ -376,8 +430,8 @@ def match_epg_channels(): logger.info(f"Processing {len(channels_data)} channels against {len(epg_data)} EPG entries") - # Run EPG matching - result = match_channels_to_epg(channels_data, epg_data, region_code, use_ml=True) + # Run EPG matching with progress updates + result = match_channels_to_epg(channels_data, epg_data, region_code, use_ml=True, send_progress=True) channels_to_update_dicts = result["channels_to_update"] matched_channels = result["matched_channels"] @@ -507,8 +561,11 @@ def match_single_channel_epg(channel_id): logger.info(f"Matching single channel '{channel.name}' against {len(epg_data_list)} EPG entries") - # Use the EPG matching function - result = match_channels_to_epg([channel_data], epg_data_list) + # Send progress for single channel matching + send_epg_matching_progress(1, 0, current_channel_name=channel.name, stage="matching") + + # Use the EPG matching function (no progress updates for single channel to avoid spam) + result = match_channels_to_epg([channel_data], epg_data_list, send_progress=False) channels_to_update = result.get("channels_to_update", []) matched_channels = result.get("matched_channels", []) @@ -540,6 +597,9 @@ def match_single_channel_epg(channel_id): logger.info(success_msg) + # Send completion progress for single channel + send_epg_matching_progress(1, 1, current_channel_name=channel.name, stage="completed") + # Clean up ML models from memory after single channel matching if _ml_model_cache['sentence_transformer'] is not None: logger.info("Cleaning up ML models from memory") @@ -556,6 +616,9 @@ def match_single_channel_epg(channel_id): return {"matched": False, "message": "Matched EPG data not found"} # No match found + # Send completion progress for single channel (failed) + send_epg_matching_progress(1, 0, current_channel_name=channel.name, stage="completed") + # Clean up ML models from memory after single channel matching if _ml_model_cache['sentence_transformer'] is not None: logger.info("Cleaning up ML models from memory") diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index d917d115..2c57a37e 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -330,6 +330,51 @@ export const WebsocketProvider = ({ children }) => { } break; + case 'epg_matching_progress': { + const progress = parsedEvent.data; + const id = 'epg-matching-progress'; + + if (progress.stage === 'starting') { + notifications.show({ + id, + title: 'EPG Matching in Progress', + message: `Starting to match ${progress.total} channels...`, + color: 'blue.5', + autoClose: false, + withCloseButton: false, + loading: true, + }); + } else if (progress.stage === 'matching') { + let message = `Matched ${progress.matched} of ${progress.total} channels`; + if (progress.remaining > 0) { + message += ` (${progress.remaining} remaining)`; + } + if (progress.current_channel) { + message += `\nCurrently processing: ${progress.current_channel}`; + } + + notifications.update({ + id, + title: 'EPG Matching in Progress', + message, + color: 'blue.5', + autoClose: false, + withCloseButton: false, + loading: true, + }); + } else if (progress.stage === 'completed') { + notifications.update({ + id, + title: 'EPG Matching Complete', + message: `Successfully matched ${progress.matched} of ${progress.total} channels (${progress.progress_percent}%)`, + color: progress.matched > 0 ? 'green.5' : 'orange', + loading: false, + autoClose: 6000, + }); + } + break; + } + case 'm3u_profile_test': setProfilePreview( parsedEvent.data.search_preview, diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index d3d6a94b..6e5eabec 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -78,6 +78,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { const [groupPopoverOpened, setGroupPopoverOpened] = useState(false); const [groupFilter, setGroupFilter] = useState(''); + const [autoMatchLoading, setAutoMatchLoading] = useState(false); const groupOptions = Object.values(channelGroups); const addStream = (stream) => { @@ -132,6 +133,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { return; } + setAutoMatchLoading(true); try { const response = await API.matchChannelEpg(channel.id); @@ -160,6 +162,8 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { color: 'red', }); console.error('Auto-match error:', error); + } finally { + setAutoMatchLoading(false); } }; @@ -758,6 +762,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { handleAutoMatchEpg(); }} disabled={!channel || !channel.id} + loading={autoMatchLoading} title={ !channel || !channel.id ? 'Auto-match is only available for existing channels' From c7235f66bac226181e51e1bb3e4a90ffb5e5a59c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Sep 2025 14:12:45 -0500 Subject: [PATCH 009/119] Use stricter matching for bulk matching. --- apps/channels/tasks.py | 51 ++++++++++++++++++++++++++++++------------ 1 file changed, 37 insertions(+), 14 deletions(-) diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py index 5ad291a1..f2ff1a2b 100755 --- a/apps/channels/tasks.py +++ b/apps/channels/tasks.py @@ -155,6 +155,10 @@ def match_channels_to_epg(channels_data, epg_data, region_code=None, use_ml=True """ EPG matching logic that finds the best EPG matches for channels using multiple matching strategies including fuzzy matching and ML models. + + Automatically uses conservative thresholds for bulk matching (multiple channels) + to avoid bad matches that create user cleanup work, and aggressive thresholds + for single channel matching where users specifically requested a match attempt. """ channels_to_update = [] matched_channels = [] @@ -169,7 +173,26 @@ def match_channels_to_epg(channels_data, epg_data, region_code=None, use_ml=True epg_embeddings = None ml_available = use_ml - # Process each channel + # Automatically determine matching strategy based on number of channels + is_bulk_matching = len(channels_data) > 1 + + # Adjust matching thresholds based on operation type + if is_bulk_matching: + # Conservative thresholds for bulk matching to avoid creating cleanup work + FUZZY_HIGH_CONFIDENCE = 90 # Only very high fuzzy scores + FUZZY_MEDIUM_CONFIDENCE = 70 # Higher threshold for ML enhancement + ML_HIGH_CONFIDENCE = 0.75 # Higher ML confidence required + ML_LAST_RESORT = 0.65 # More conservative last resort + FUZZY_LAST_RESORT_MIN = 50 # Higher fuzzy minimum for last resort + logger.info(f"Using conservative thresholds for bulk matching ({total_channels} channels)") + else: + # More aggressive thresholds for single channel matching (user requested specific match) + FUZZY_HIGH_CONFIDENCE = 85 # Original threshold + FUZZY_MEDIUM_CONFIDENCE = 40 # Original threshold + ML_HIGH_CONFIDENCE = 0.65 # Original threshold + ML_LAST_RESORT = 0.50 # Original desperate threshold + FUZZY_LAST_RESORT_MIN = 20 # Original minimum + logger.info("Using aggressive thresholds for single channel matching") # Process each channel for index, chan in enumerate(channels_data): normalized_tvg_id = chan.get("tvg_id", "") fallback_name = chan["tvg_id"].strip() if chan["tvg_id"] else chan["name"] @@ -254,14 +277,14 @@ def match_channels_to_epg(channels_data, epg_data, region_code=None, use_ml=True continue # High confidence match - accept immediately - if best_score >= BEST_FUZZY_THRESHOLD: + if best_score >= FUZZY_HIGH_CONFIDENCE: chan["epg_data_id"] = best_epg["id"] channels_to_update.append(chan) matched_channels.append((chan['id'], chan['name'], best_epg["tvg_id"])) logger.info(f"Channel {chan['id']} '{chan['name']}' => matched tvg_id={best_epg['tvg_id']} (score={best_score})") # Medium confidence - use ML if available (lazy load models here) - elif best_score >= LOWER_FUZZY_THRESHOLD and ml_available: + elif best_score >= FUZZY_MEDIUM_CONFIDENCE and ml_available: # Lazy load ML models only when we actually need them if st_model is None: st_model, util = get_sentence_transformer() @@ -288,7 +311,7 @@ def match_channels_to_epg(channels_data, epg_data, region_code=None, use_ml=True top_index = int(sim_scores.argmax()) top_value = float(sim_scores[top_index]) - if top_value >= EMBED_SIM_THRESHOLD: + if top_value >= ML_HIGH_CONFIDENCE: # Find the EPG entry that corresponds to this embedding index epg_with_names = [epg for epg in epg_data if epg.get("norm_name")] matched_epg = epg_with_names[top_index] @@ -298,10 +321,10 @@ def match_channels_to_epg(channels_data, epg_data, region_code=None, use_ml=True matched_channels.append((chan['id'], chan['name'], matched_epg["tvg_id"])) logger.info(f"Channel {chan['id']} '{chan['name']}' => matched EPG tvg_id={matched_epg['tvg_id']} (fuzzy={best_score}, ML-sim={top_value:.2f})") else: - logger.info(f"Channel {chan['id']} '{chan['name']}' => fuzzy={best_score}, ML-sim={top_value:.2f} < {EMBED_SIM_THRESHOLD}, trying last resort...") + logger.info(f"Channel {chan['id']} '{chan['name']}' => fuzzy={best_score}, ML-sim={top_value:.2f} < {ML_HIGH_CONFIDENCE}, trying last resort...") # Last resort: try ML with very low fuzzy threshold - if top_value >= 0.45: # Lower ML threshold as last resort + if top_value >= ML_LAST_RESORT: # Dynamic last resort threshold epg_with_names = [epg for epg in epg_data if epg.get("norm_name")] matched_epg = epg_with_names[top_index] @@ -310,7 +333,7 @@ def match_channels_to_epg(channels_data, epg_data, region_code=None, use_ml=True matched_channels.append((chan['id'], chan['name'], matched_epg["tvg_id"])) logger.info(f"Channel {chan['id']} '{chan['name']}' => LAST RESORT match EPG tvg_id={matched_epg['tvg_id']} (fuzzy={best_score}, ML-sim={top_value:.2f})") else: - logger.info(f"Channel {chan['id']} '{chan['name']}' => even last resort ML-sim {top_value:.2f} < 0.45, skipping") + logger.info(f"Channel {chan['id']} '{chan['name']}' => even last resort ML-sim {top_value:.2f} < {ML_LAST_RESORT}, skipping") except Exception as e: logger.warning(f"ML matching failed for channel {chan['id']}: {e}") @@ -318,7 +341,7 @@ def match_channels_to_epg(channels_data, epg_data, region_code=None, use_ml=True logger.info(f"Channel {chan['id']} '{chan['name']}' => fuzzy score {best_score} below threshold, skipping") # Last resort: Try ML matching even with very low fuzzy scores - elif best_score >= 20 and ml_available: + elif best_score >= FUZZY_LAST_RESORT_MIN and ml_available: # Lazy load ML models for last resort attempts if st_model is None: st_model, util = get_sentence_transformer() @@ -346,7 +369,7 @@ def match_channels_to_epg(channels_data, epg_data, region_code=None, use_ml=True top_index = int(sim_scores.argmax()) top_value = float(sim_scores[top_index]) - if top_value >= 0.50: # Even lower threshold for last resort + if top_value >= ML_LAST_RESORT: # Dynamic threshold for desperate attempts # Find the EPG entry that corresponds to this embedding index epg_with_names = [epg for epg in epg_data if epg.get("norm_name")] matched_epg = epg_with_names[top_index] @@ -356,13 +379,13 @@ def match_channels_to_epg(channels_data, epg_data, region_code=None, use_ml=True matched_channels.append((chan['id'], chan['name'], matched_epg["tvg_id"])) logger.info(f"Channel {chan['id']} '{chan['name']}' => DESPERATE LAST RESORT match EPG tvg_id={matched_epg['tvg_id']} (fuzzy={best_score}, ML-sim={top_value:.2f})") else: - logger.info(f"Channel {chan['id']} '{chan['name']}' => desperate last resort ML-sim {top_value:.2f} < 0.50, giving up") + logger.info(f"Channel {chan['id']} '{chan['name']}' => desperate last resort ML-sim {top_value:.2f} < {ML_LAST_RESORT}, giving up") except Exception as e: logger.warning(f"Last resort ML matching failed for channel {chan['id']}: {e}") - logger.info(f"Channel {chan['id']} '{chan['name']}' => best fuzzy score={best_score} < {LOWER_FUZZY_THRESHOLD}, giving up") + logger.info(f"Channel {chan['id']} '{chan['name']}' => best fuzzy score={best_score} < {FUZZY_MEDIUM_CONFIDENCE}, giving up") else: # No ML available or very low fuzzy score - logger.info(f"Channel {chan['id']} '{chan['name']}' => best fuzzy score={best_score} < {LOWER_FUZZY_THRESHOLD}, no ML fallback available") + logger.info(f"Channel {chan['id']} '{chan['name']}' => best fuzzy score={best_score} < {FUZZY_MEDIUM_CONFIDENCE}, no ML fallback available") # Clean up ML models from memory after matching (infrequent operation) if _ml_model_cache['sentence_transformer'] is not None: @@ -430,7 +453,7 @@ def match_epg_channels(): logger.info(f"Processing {len(channels_data)} channels against {len(epg_data)} EPG entries") - # Run EPG matching with progress updates + # Run EPG matching with progress updates - automatically uses conservative thresholds for bulk operations result = match_channels_to_epg(channels_data, epg_data, region_code, use_ml=True, send_progress=True) channels_to_update_dicts = result["channels_to_update"] matched_channels = result["matched_channels"] @@ -564,7 +587,7 @@ def match_single_channel_epg(channel_id): # Send progress for single channel matching send_epg_matching_progress(1, 0, current_channel_name=channel.name, stage="matching") - # Use the EPG matching function (no progress updates for single channel to avoid spam) + # Use the EPG matching function - automatically uses aggressive thresholds for single channel result = match_channels_to_epg([channel_data], epg_data_list, send_progress=False) channels_to_update = result.get("channels_to_update", []) matched_channels = result.get("matched_channels", []) From 20685b8344f3474bde351e5cba29562317852b2a Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Sep 2025 14:27:07 -0500 Subject: [PATCH 010/119] Lower regional bonus. Remove epg_match script. --- apps/channels/tasks.py | 4 +- scripts/epg_match.py | 182 ----------------------------------------- 2 files changed, 2 insertions(+), 184 deletions(-) delete mode 100644 scripts/epg_match.py diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py index f2ff1a2b..17d40ca2 100755 --- a/apps/channels/tasks.py +++ b/apps/channels/tasks.py @@ -253,11 +253,11 @@ def match_channels_to_epg(channels_data, epg_data, region_code=None, use_ml=True if dot_regions: if region_code in dot_regions: - bonus = 30 # Bigger bonus for matching region + bonus = 15 # Bigger bonus for matching region else: bonus = -15 # Penalty for different region elif region_code in combined_text: - bonus = 15 + bonus = 10 score = base_score + bonus diff --git a/scripts/epg_match.py b/scripts/epg_match.py deleted file mode 100644 index 890ffe3a..00000000 --- a/scripts/epg_match.py +++ /dev/null @@ -1,182 +0,0 @@ -# ml_model.py - -import sys -import json -import re -import os -import logging - -from rapidfuzz import fuzz -from sentence_transformers import util -from sentence_transformers import SentenceTransformer as st - -# Set up logger -logger = logging.getLogger(__name__) - -# Load the sentence-transformers model once at the module level -SENTENCE_MODEL_NAME = "sentence-transformers/all-MiniLM-L6-v2" -MODEL_PATH = os.path.join("/app", "models", "all-MiniLM-L6-v2") - -# Thresholds -BEST_FUZZY_THRESHOLD = 85 -LOWER_FUZZY_THRESHOLD = 40 -EMBED_SIM_THRESHOLD = 0.65 - -def process_data(input_data): - os.makedirs(MODEL_PATH, exist_ok=True) - - # If not present locally, download: - if not os.path.exists(os.path.join(MODEL_PATH, "config.json")): - logger.info(f"Local model not found in {MODEL_PATH}; downloading from {SENTENCE_MODEL_NAME}...") - st_model = st(SENTENCE_MODEL_NAME, cache_folder=MODEL_PATH) - else: - logger.info(f"Loading local model from {MODEL_PATH}") - st_model = st(MODEL_PATH) - - channels = input_data["channels"] - epg_data = input_data["epg_data"] - region_code = input_data.get("region_code", None) - - epg_embeddings = None - if any(row["norm_name"] for row in epg_data): - epg_embeddings = st_model.encode( - [row["norm_name"] for row in epg_data], - convert_to_tensor=True - ) - - channels_to_update = [] - matched_channels = [] - - for chan in channels: - normalized_tvg_id = chan.get("tvg_id", "") - fallback_name = chan["tvg_id"].strip() if chan["tvg_id"] else chan["name"] - - # Exact TVG ID match (direct match) - epg_by_tvg_id = next((epg for epg in epg_data if epg["tvg_id"] == normalized_tvg_id), None) - if normalized_tvg_id and epg_by_tvg_id: - chan["epg_data_id"] = epg_by_tvg_id["id"] - channels_to_update.append(chan) - - # Add to matched_channels list so it's counted in the total - matched_channels.append((chan['id'], fallback_name, epg_by_tvg_id["tvg_id"])) - - logger.info(f"Channel {chan['id']} '{fallback_name}' => EPG found by tvg_id={epg_by_tvg_id['tvg_id']}") - continue - - # If channel has a tvg_id that doesn't exist in EPGData, do direct check. - # I don't THINK this should happen now that we assign EPG on channel creation. - if chan["tvg_id"]: - epg_match = [epg["id"] for epg in epg_data if epg["tvg_id"] == chan["tvg_id"]] - if epg_match: - chan["epg_data_id"] = epg_match[0] - logger.info(f"Channel {chan['id']} '{chan['name']}' => EPG found by tvg_id={chan['tvg_id']}") - channels_to_update.append(chan) - continue - - # C) Perform name-based fuzzy matching - if not chan["norm_chan"]: - logger.debug(f"Channel {chan['id']} '{chan['name']}' => empty after normalization, skipping") - continue - - best_score = 0 - best_epg = None - for row in epg_data: - if not row["norm_name"]: - continue - - base_score = fuzz.ratio(chan["norm_chan"], row["norm_name"]) - bonus = 0 - # Region-based bonus/penalty - combined_text = row["tvg_id"].lower() + " " + row["name"].lower() - dot_regions = re.findall(r'\.([a-z]{2})', combined_text) - if region_code: - if dot_regions: - if region_code in dot_regions: - bonus = 30 # bigger bonus if .us or .ca matches - else: - bonus = -15 - elif region_code in combined_text: - bonus = 15 - score = base_score + bonus - - logger.debug( - f"Channel {chan['id']} '{fallback_name}' => EPG row {row['id']}: " - f"name='{row['name']}', norm_name='{row['norm_name']}', " - f"combined_text='{combined_text}', dot_regions={dot_regions}, " - f"base_score={base_score}, bonus={bonus}, total_score={score}" - ) - - if score > best_score: - best_score = score - best_epg = row - - # If no best match was found, skip - if not best_epg: - logger.debug(f"Channel {chan['id']} '{fallback_name}' => no EPG match at all.") - continue - - # If best_score is above BEST_FUZZY_THRESHOLD => direct accept - if best_score >= BEST_FUZZY_THRESHOLD: - chan["epg_data_id"] = best_epg["id"] - channels_to_update.append(chan) - - matched_channels.append((chan['id'], fallback_name, best_epg["tvg_id"])) - logger.info( - f"Channel {chan['id']} '{fallback_name}' => matched tvg_id={best_epg['tvg_id']} " - f"(score={best_score})" - ) - - # If best_score is in the “middle range,” do embedding check - elif best_score >= LOWER_FUZZY_THRESHOLD and epg_embeddings is not None: - chan_embedding = st_model.encode(chan["norm_chan"], convert_to_tensor=True) - sim_scores = util.cos_sim(chan_embedding, epg_embeddings)[0] - top_index = int(sim_scores.argmax()) - top_value = float(sim_scores[top_index]) - if top_value >= EMBED_SIM_THRESHOLD: - matched_epg = epg_data[top_index] - chan["epg_data_id"] = matched_epg["id"] - channels_to_update.append(chan) - - matched_channels.append((chan['id'], fallback_name, matched_epg["tvg_id"])) - logger.info( - f"Channel {chan['id']} '{fallback_name}' => matched EPG tvg_id={matched_epg['tvg_id']} " - f"(fuzzy={best_score}, cos-sim={top_value:.2f})" - ) - else: - logger.info( - f"Channel {chan['id']} '{fallback_name}' => fuzzy={best_score}, " - f"cos-sim={top_value:.2f} < {EMBED_SIM_THRESHOLD}, skipping" - ) - else: - # No good match found - fuzzy score is too low - logger.info( - f"Channel {chan['id']} '{fallback_name}' => best fuzzy match score={best_score} < {LOWER_FUZZY_THRESHOLD}, skipping" - ) - - return { - "channels_to_update": channels_to_update, - "matched_channels": matched_channels - } - -def main(): - # Configure logging - logging_level = os.environ.get('DISPATCHARR_LOG_LEVEL', 'INFO') - logging.basicConfig( - level=getattr(logging, logging_level), - format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', - stream=sys.stderr - ) - - # Read input data from a file - input_file_path = sys.argv[1] - with open(input_file_path, 'r') as f: - input_data = json.load(f) - - # Process data with the ML model (or your logic) - result = process_data(input_data) - - # Output result to stdout - print(json.dumps(result)) - -if __name__ == "__main__": - main() From 60e378b1ced71931d02e76e956c509e048c74d8b Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Sep 2025 14:38:16 -0500 Subject: [PATCH 011/119] Add support for matching selected channels with EPG data - Updated API to accept optional channel IDs for EPG matching. - Enhanced match_epg method to process only specified channels if provided. - Implemented new task for matching selected channels in the backend. - Updated frontend to trigger EPG matching for selected channels with notifications. --- apps/channels/api_views.py | 30 +++- apps/channels/tasks.py | 144 ++++++++++++++++++ frontend/src/api.js | 8 +- .../ChannelsTable/ChannelTableHeader.jsx | 24 ++- 4 files changed, 195 insertions(+), 11 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index e522b618..4e3851b7 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -39,7 +39,7 @@ from .serializers import ( ChannelProfileSerializer, RecordingSerializer, ) -from .tasks import match_epg_channels, evaluate_series_rules, evaluate_series_rules_impl, match_single_channel_epg +from .tasks import match_epg_channels, evaluate_series_rules, evaluate_series_rules_impl, match_single_channel_epg, match_selected_channels_epg import django_filters from django_filters.rest_framework import DjangoFilterBackend from rest_framework.filters import SearchFilter, OrderingFilter @@ -779,14 +779,36 @@ class ChannelViewSet(viewsets.ModelViewSet): # ───────────────────────────────────────────────────────── @swagger_auto_schema( method="post", - operation_description="Kick off a Celery task that tries to fuzzy-match channels with EPG data.", + operation_description="Kick off a Celery task that tries to fuzzy-match channels with EPG data. If channel_ids are provided, only those channels will be processed.", + request_body=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + 'channel_ids': openapi.Schema( + type=openapi.TYPE_ARRAY, + items=openapi.Schema(type=openapi.TYPE_INTEGER), + description='List of channel IDs to process. If empty or not provided, all channels without EPG will be processed.' + ) + } + ), responses={202: "EPG matching task initiated"}, ) @action(detail=False, methods=["post"], url_path="match-epg") def match_epg(self, request): - match_epg_channels.delay() + # Get channel IDs from request body if provided + channel_ids = request.data.get('channel_ids', []) + + if channel_ids: + # Process only selected channels + from .tasks import match_selected_channels_epg + match_selected_channels_epg.delay(channel_ids) + message = f"EPG matching task initiated for {len(channel_ids)} selected channel(s)." + else: + # Process all channels without EPG (original behavior) + match_epg_channels.delay() + message = "EPG matching task initiated for all channels without EPG." + return Response( - {"message": "EPG matching task initiated."}, status=status.HTTP_202_ACCEPTED + {"message": message}, status=status.HTTP_202_ACCEPTED ) @swagger_auto_schema( diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py index 17d40ca2..6352ee84 100755 --- a/apps/channels/tasks.py +++ b/apps/channels/tasks.py @@ -526,6 +526,150 @@ def match_epg_channels(): cleanup_memory(log_usage=True, force_collection=True) +@shared_task +def match_selected_channels_epg(channel_ids): + """ + Match EPG data for only the specified selected channels. + Uses the same integrated EPG matching logic but processes only selected channels. + """ + try: + logger.info(f"Starting integrated EPG matching for {len(channel_ids)} selected channels...") + + # Get region preference + try: + region_obj = CoreSettings.objects.get(key="preferred-region") + region_code = region_obj.value.strip().lower() + except CoreSettings.DoesNotExist: + region_code = None + + # Get only the specified channels that don't have EPG data assigned + channels_without_epg = Channel.objects.filter( + id__in=channel_ids, + epg_data__isnull=True + ) + logger.info(f"Found {channels_without_epg.count()} selected channels without EPG data") + + if not channels_without_epg.exists(): + logger.info("No selected channels need EPG matching.") + + # Send WebSocket update + channel_layer = get_channel_layer() + async_to_sync(channel_layer.group_send)( + 'updates', + { + 'type': 'update', + "data": { + "success": True, + "type": "epg_match", + "refresh_channels": True, + "matches_count": 0, + "message": "No selected channels need EPG matching", + "associations": [] + } + } + ) + return "No selected channels needed EPG matching." + + channels_data = [] + for channel in channels_without_epg: + normalized_tvg_id = channel.tvg_id.strip().lower() if channel.tvg_id else "" + channels_data.append({ + "id": channel.id, + "name": channel.name, + "tvg_id": normalized_tvg_id, + "original_tvg_id": channel.tvg_id, + "fallback_name": normalized_tvg_id if normalized_tvg_id else channel.name, + "norm_chan": normalize_name(channel.name) + }) + + # Get all EPG data + epg_data = [] + for epg in EPGData.objects.all(): + normalized_tvg_id = epg.tvg_id.strip().lower() if epg.tvg_id else "" + epg_data.append({ + 'id': epg.id, + 'tvg_id': normalized_tvg_id, + 'original_tvg_id': epg.tvg_id, + 'name': epg.name, + 'norm_name': normalize_name(epg.name), + 'epg_source_id': epg.epg_source.id if epg.epg_source else None, + }) + + logger.info(f"Processing {len(channels_data)} selected channels against {len(epg_data)} EPG entries") + + # Run EPG matching with progress updates - automatically uses appropriate thresholds + result = match_channels_to_epg(channels_data, epg_data, region_code, use_ml=True, send_progress=True) + channels_to_update_dicts = result["channels_to_update"] + matched_channels = result["matched_channels"] + + # Update channels in database + if channels_to_update_dicts: + channel_ids_to_update = [d["id"] for d in channels_to_update_dicts] + channels_qs = Channel.objects.filter(id__in=channel_ids_to_update) + channels_list = list(channels_qs) + + # Create mapping from channel_id to epg_data_id + epg_mapping = {d["id"]: d["epg_data_id"] for d in channels_to_update_dicts} + + # Update each channel with matched EPG data + for channel_obj in channels_list: + epg_data_id = epg_mapping.get(channel_obj.id) + if epg_data_id: + try: + epg_data_obj = EPGData.objects.get(id=epg_data_id) + channel_obj.epg_data = epg_data_obj + except EPGData.DoesNotExist: + logger.error(f"EPG data {epg_data_id} not found for channel {channel_obj.id}") + + # Bulk update all channels + Channel.objects.bulk_update(channels_list, ["epg_data"]) + + total_matched = len(matched_channels) + if total_matched: + logger.info(f"Selected Channel Match Summary: {total_matched} channel(s) matched.") + for (cid, cname, tvg) in matched_channels: + logger.info(f" - Channel ID={cid}, Name='{cname}' => tvg_id='{tvg}'") + else: + logger.info("No selected channels were matched.") + + logger.info("Finished integrated EPG matching for selected channels.") + + # Send WebSocket update + channel_layer = get_channel_layer() + associations = [ + {"channel_id": chan["id"], "epg_data_id": chan["epg_data_id"]} + for chan in channels_to_update_dicts + ] + + async_to_sync(channel_layer.group_send)( + 'updates', + { + 'type': 'update', + "data": { + "success": True, + "type": "epg_match", + "refresh_channels": True, + "matches_count": total_matched, + "message": f"EPG matching complete: {total_matched} selected channel(s) matched", + "associations": associations + } + } + ) + + return f"Done. Matched {total_matched} selected channel(s)." + + finally: + # Clean up ML models from memory after bulk matching + if _ml_model_cache['sentence_transformer'] is not None: + logger.info("Cleaning up ML models from memory") + _ml_model_cache['sentence_transformer'] = None + + # Memory cleanup + gc.collect() + from core.utils import cleanup_memory + cleanup_memory(log_usage=True, force_collection=True) + + @shared_task def match_single_channel_epg(channel_id): """ diff --git a/frontend/src/api.js b/frontend/src/api.js index d3e222d2..09f8c3c1 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -1437,12 +1437,18 @@ export default class API { } } - static async matchEpg() { + static async matchEpg(channelIds = null) { try { + const requestBody = channelIds ? { channel_ids: channelIds } : {}; + const response = await request( `${host}/api/channels/channels/match-epg/`, { method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify(requestBody), } ); diff --git a/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx b/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx index 54fa2f8d..e9f5172d 100644 --- a/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx +++ b/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx @@ -143,11 +143,18 @@ const ChannelTableHeader = ({ const matchEpg = async () => { try { // Hit our new endpoint that triggers the fuzzy matching Celery task - await API.matchEpg(); - - notifications.show({ - title: 'EPG matching task started!', - }); + // If channels are selected, only match those; otherwise match all + if (selectedTableIds.length > 0) { + await API.matchEpg(selectedTableIds); + notifications.show({ + title: `EPG matching task started for ${selectedTableIds.length} selected channel(s)!`, + }); + } else { + await API.matchEpg(); + notifications.show({ + title: 'EPG matching task started for all channels without EPG!', + }); + } } catch (err) { notifications.show(`Error: ${err.message}`); } @@ -298,7 +305,12 @@ const ChannelTableHeader = ({ disabled={authUser.user_level != USER_LEVELS.ADMIN} onClick={matchEpg} > - Auto-Match + + {selectedTableIds.length > 0 + ? `Auto-Match (${selectedTableIds.length} selected)` + : 'Auto-Match EPG' + } + Date: Tue, 16 Sep 2025 14:39:04 -0500 Subject: [PATCH 012/119] Minor formatting adjustment. --- apps/channels/api_views.py | 4 ++-- apps/channels/tasks.py | 2 +- frontend/src/api.js | 2 +- .../components/tables/ChannelsTable/ChannelTableHeader.jsx | 7 +++---- 4 files changed, 7 insertions(+), 8 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 4e3851b7..c1f7034e 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -796,7 +796,7 @@ class ChannelViewSet(viewsets.ModelViewSet): def match_epg(self, request): # Get channel IDs from request body if provided channel_ids = request.data.get('channel_ids', []) - + if channel_ids: # Process only selected channels from .tasks import match_selected_channels_epg @@ -806,7 +806,7 @@ class ChannelViewSet(viewsets.ModelViewSet): # Process all channels without EPG (original behavior) match_epg_channels.delay() message = "EPG matching task initiated for all channels without EPG." - + return Response( {"message": message}, status=status.HTTP_202_ACCEPTED ) diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py index 6352ee84..c1e63658 100755 --- a/apps/channels/tasks.py +++ b/apps/channels/tasks.py @@ -551,7 +551,7 @@ def match_selected_channels_epg(channel_ids): if not channels_without_epg.exists(): logger.info("No selected channels need EPG matching.") - + # Send WebSocket update channel_layer = get_channel_layer() async_to_sync(channel_layer.group_send)( diff --git a/frontend/src/api.js b/frontend/src/api.js index 09f8c3c1..a1c761c6 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -1440,7 +1440,7 @@ export default class API { static async matchEpg(channelIds = null) { try { const requestBody = channelIds ? { channel_ids: channelIds } : {}; - + const response = await request( `${host}/api/channels/channels/match-epg/`, { diff --git a/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx b/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx index e9f5172d..b7e04d7d 100644 --- a/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx +++ b/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx @@ -306,10 +306,9 @@ const ChannelTableHeader = ({ onClick={matchEpg} > - {selectedTableIds.length > 0 - ? `Auto-Match (${selectedTableIds.length} selected)` - : 'Auto-Match EPG' - } + {selectedTableIds.length > 0 + ? `Auto-Match (${selectedTableIds.length} selected)` + : 'Auto-Match EPG'} From 64a019597d9799ea667f0201e3bdb11c1e709ba0 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Sep 2025 16:20:16 -0500 Subject: [PATCH 013/119] Add ability to channel edit form and bulk channel editor to set logos and names from assigned epg. Closes #157 [Feature]: Logo from EPG --- frontend/src/components/forms/Channel.jsx | 132 +++++++++-- .../src/components/forms/ChannelBatch.jsx | 208 +++++++++++++++++- 2 files changed, 317 insertions(+), 23 deletions(-) diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index 6e5eabec..610f112e 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -167,6 +167,87 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { } }; + const handleSetNameFromEpg = () => { + const epgDataId = formik.values.epg_data_id; + if (!epgDataId) { + notifications.show({ + title: 'No EPG Selected', + message: 'Please select an EPG source first.', + color: 'orange', + }); + return; + } + + const tvg = tvgsById[epgDataId]; + if (tvg && tvg.name) { + formik.setFieldValue('name', tvg.name); + notifications.show({ + title: 'Success', + message: `Channel name set to "${tvg.name}"`, + color: 'green', + }); + } else { + notifications.show({ + title: 'No Name Available', + message: 'No name found in the selected EPG data.', + color: 'orange', + }); + } + }; + + const handleSetLogoFromEpg = async () => { + const epgDataId = formik.values.epg_data_id; + if (!epgDataId) { + notifications.show({ + title: 'No EPG Selected', + message: 'Please select an EPG source first.', + color: 'orange', + }); + return; + } + + const tvg = tvgsById[epgDataId]; + if (!tvg || !tvg.name) { + notifications.show({ + title: 'No EPG Name', + message: 'EPG data does not have a name to match against logos.', + color: 'orange', + }); + return; + } + + try { + // Try to find a logo that matches the EPG name + const matchingLogo = Object.values(logos).find( + (logo) => + logo.name.toLowerCase().includes(tvg.name.toLowerCase()) || + tvg.name.toLowerCase().includes(logo.name.toLowerCase()) + ); + + if (matchingLogo) { + formik.setFieldValue('logo_id', matchingLogo.id); + notifications.show({ + title: 'Success', + message: `Logo set to "${matchingLogo.name}"`, + color: 'green', + }); + } else { + notifications.show({ + title: 'No Matching Logo', + message: `No existing logo found that matches "${tvg.name}". Consider uploading a logo or using the smart logo selection.`, + color: 'orange', + }); + } + } catch (error) { + notifications.show({ + title: 'Error', + message: 'Failed to set logo from EPG data', + color: 'red', + }); + console.error('Set logo from EPG error:', error); + } + }; + const formik = useFormik({ initialValues: { name: '', @@ -349,15 +430,27 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
- + + + + { - + + + + diff --git a/frontend/src/components/forms/ChannelBatch.jsx b/frontend/src/components/forms/ChannelBatch.jsx index 9973ea57..226565a9 100644 --- a/frontend/src/components/forms/ChannelBatch.jsx +++ b/frontend/src/components/forms/ChannelBatch.jsx @@ -2,6 +2,7 @@ import React, { useState, useEffect, useMemo, useRef } from 'react'; import useChannelsStore from '../../store/channels'; import API from '../../api'; import useStreamProfilesStore from '../../store/streamProfiles'; +import useEPGsStore from '../../store/epgs'; import ChannelGroupForm from './ChannelGroup'; import { Box, @@ -27,6 +28,7 @@ import { import { ListOrdered, SquarePlus, SquareX, X } from 'lucide-react'; import { FixedSizeList as List } from 'react-window'; import { useForm } from '@mantine/form'; +import { notifications } from '@mantine/notifications'; import { USER_LEVELS, USER_LEVEL_LABELS } from '../../constants'; const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => { @@ -38,6 +40,9 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => { const canEditChannelGroup = useChannelsStore((s) => s.canEditChannelGroup); const streamProfiles = useStreamProfilesStore((s) => s.profiles); + const epgs = useEPGsStore((s) => s.epgs); + const tvgs = useEPGsStore((s) => s.tvgs); + const tvgsById = useEPGsStore((s) => s.tvgsById); const [channelGroupModelOpen, setChannelGroupModalOpen] = useState(false); const [selectedChannelGroup, setSelectedChannelGroup] = useState('-1'); @@ -134,6 +139,137 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => { } }; + const handleSetNamesFromEpg = async () => { + if (!channelIds || channelIds.length === 0) { + notifications.show({ + title: 'No Channels Selected', + message: 'No channels to update.', + color: 'orange', + }); + return; + } + + try { + const channelsMap = useChannelsStore.getState().channels; + const updates = []; + + for (const id of channelIds) { + const channel = channelsMap[id]; + if (channel && channel.epg_data_id) { + const tvg = tvgsById[channel.epg_data_id]; + if (tvg && tvg.name) { + updates.push({ + id, + name: tvg.name, + }); + } + } + } + + if (updates.length === 0) { + notifications.show({ + title: 'No Updates Available', + message: 'No selected channels have EPG data with names.', + color: 'orange', + }); + return; + } + + await API.bulkUpdateChannels(updates); + await Promise.all([ + API.requeryChannels(), + useChannelsStore.getState().fetchChannels(), + ]); + + notifications.show({ + title: 'Success', + message: `Updated names for ${updates.length} channels from EPG data.`, + color: 'green', + }); + } catch (error) { + console.error('Failed to set names from EPG:', error); + notifications.show({ + title: 'Error', + message: 'Failed to set names from EPG data.', + color: 'red', + }); + } + }; + + const handleSetLogosFromEpg = async () => { + if (!channelIds || channelIds.length === 0) { + notifications.show({ + title: 'No Channels Selected', + message: 'No channels to update.', + color: 'orange', + }); + return; + } + + try { + // First, get all available logos + const logosResponse = await API.getLogos(); + const logos = logosResponse.reduce((acc, logo) => { + acc[logo.id] = logo; + return acc; + }, {}); + + const channelsMap = useChannelsStore.getState().channels; + const updates = []; + + for (const id of channelIds) { + const channel = channelsMap[id]; + if (channel && channel.epg_data_id) { + const tvg = tvgsById[channel.epg_data_id]; + if (tvg && tvg.name) { + // Try to find a matching logo + const matchingLogo = Object.values(logos).find( + (logo) => + logo.name.toLowerCase().includes(tvg.name.toLowerCase()) || + tvg.name.toLowerCase().includes(logo.name.toLowerCase()) + ); + + if (matchingLogo) { + updates.push({ + id, + logo_id: matchingLogo.id, + }); + } + } + } + } + + if (updates.length === 0) { + notifications.show({ + title: 'No Matching Logos', + message: + 'No matching logos found for the selected channels based on their EPG names.', + color: 'orange', + }); + return; + } + + await API.bulkUpdateChannels(updates); + await Promise.all([ + API.requeryChannels(), + useChannelsStore.getState().fetchChannels(), + ]); + + notifications.show({ + title: 'Success', + message: `Updated logos for ${updates.length} channels based on EPG names.`, + color: 'green', + }); + } catch (error) { + console.error('Failed to set logos from EPG:', error); + notifications.show({ + title: 'Error', + message: 'Failed to set logos from EPG data.', + color: 'red', + }); + } + }; + // useEffect(() => { // // const sameStreamProfile = channels.every( // // (channel) => channel.stream_profile_id == channels[0].stream_profile_id @@ -183,7 +319,7 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => { @@ -197,7 +333,9 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => { - Channel Name + + Channel Name + { /> + + + + EPG Operations + + + + + + + + Updates channel names and logos based on their assigned EPG + data + + + { export default ChannelBatchForm; // Lightweight inline preview component to visualize rename results for a subset -const RegexPreview = ({ channelIds, find, replace}) => { +const RegexPreview = ({ channelIds, find, replace }) => { const channelsMap = useChannelsStore((s) => s.channels); const previewItems = useMemo(() => { const items = []; @@ -412,7 +580,8 @@ const RegexPreview = ({ channelIds, find, replace}) => { let re; try { re = new RegExp(find, flags); - } catch (e) { + } catch (error) { + console.error('Invalid regex:', error); return [{ before: 'Invalid regex', after: '' }]; } for (let i = 0; i < Math.min(channelIds.length, 25); i++) { @@ -431,20 +600,41 @@ const RegexPreview = ({ channelIds, find, replace}) => { return ( - Preview (first {Math.min(channelIds.length, 25)} of {channelIds.length} selected) + Preview (first {Math.min(channelIds.length, 25)} of {channelIds.length}{' '} + selected) {previewItems.length === 0 ? ( - No changes with current pattern. + + No changes with current pattern. + ) : ( previewItems.map((row, idx) => ( - + {row.before} - - + + → + + {row.after} From 388d9e7171da1e56ef77f2113807eccc80b51943 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Sep 2025 16:25:50 -0500 Subject: [PATCH 014/119] Fix logos not being set. --- frontend/src/components/forms/ChannelBatch.jsx | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/frontend/src/components/forms/ChannelBatch.jsx b/frontend/src/components/forms/ChannelBatch.jsx index 226565a9..1025194c 100644 --- a/frontend/src/components/forms/ChannelBatch.jsx +++ b/frontend/src/components/forms/ChannelBatch.jsx @@ -4,6 +4,7 @@ import API from '../../api'; import useStreamProfilesStore from '../../store/streamProfiles'; import useEPGsStore from '../../store/epgs'; import ChannelGroupForm from './ChannelGroup'; +import { useLogoSelection } from '../../hooks/useSmartLogos'; import { Box, Button, @@ -44,6 +45,12 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => { const tvgs = useEPGsStore((s) => s.tvgs); const tvgsById = useEPGsStore((s) => s.tvgsById); + const { + logos, + ensureLogosLoaded, + isLoading: logosLoading, + } = useLogoSelection(); + const [channelGroupModelOpen, setChannelGroupModalOpen] = useState(false); const [selectedChannelGroup, setSelectedChannelGroup] = useState('-1'); const [isSubmitting, setIsSubmitting] = useState(false); @@ -207,12 +214,8 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => { } try { - // First, get all available logos - const logosResponse = await API.getLogos(); - const logos = logosResponse.reduce((acc, logo) => { - acc[logo.id] = logo; - return acc; - }, {}); + // Ensure logos are loaded first + await ensureLogosLoaded(); const channelsMap = useChannelsStore.getState().channels; const updates = []; From 8607d626fa34e7a91ac0e8b439097b00ba004ee2 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Sep 2025 16:32:32 -0500 Subject: [PATCH 015/119] Update logo store when bulk changing logos. --- frontend/src/components/forms/ChannelBatch.jsx | 3 +++ 1 file changed, 3 insertions(+) diff --git a/frontend/src/components/forms/ChannelBatch.jsx b/frontend/src/components/forms/ChannelBatch.jsx index 1025194c..3492d6ac 100644 --- a/frontend/src/components/forms/ChannelBatch.jsx +++ b/frontend/src/components/forms/ChannelBatch.jsx @@ -253,9 +253,12 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => { } await API.bulkUpdateChannels(updates); + + // Refresh both channels and logos data await Promise.all([ API.requeryChannels(), useChannelsStore.getState().fetchChannels(), + ensureLogosLoaded(), // Ensure logos are refreshed ]); notifications.show({ From d2d1984797285df95cb3fb7bbabf6daa7b912a2b Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Sep 2025 17:17:07 -0500 Subject: [PATCH 016/119] Switch bulk epg name and logo to backend celery tasks for efficiency and scrape epg channel logo during epg scanning. --- apps/channels/api_views.py | 62 +++++ apps/channels/tasks.py | 224 ++++++++++++++++++ apps/epg/migrations/0016_epgdata_icon_url.py | 18 ++ apps/epg/models.py | 1 + apps/epg/serializers.py | 1 + apps/epg/tasks.py | 20 +- frontend/src/api.js | 46 ++++ frontend/src/components/forms/Channel.jsx | 58 ++++- .../src/components/forms/ChannelBatch.jsx | 122 ++-------- 9 files changed, 437 insertions(+), 115 deletions(-) create mode 100644 apps/epg/migrations/0016_epgdata_icon_url.py diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index c1f7034e..7a3d5135 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -493,6 +493,68 @@ class ChannelViewSet(viewsets.ModelViewSet): "channels": serialized_channels }) + @action(detail=False, methods=["post"], url_path="set-names-from-epg") + def set_names_from_epg(self, request): + """ + Trigger a Celery task to set channel names from EPG data + """ + from .tasks import set_channels_names_from_epg + + data = request.data + channel_ids = data.get("channel_ids", []) + + if not channel_ids: + return Response( + {"error": "channel_ids is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if not isinstance(channel_ids, list): + return Response( + {"error": "channel_ids must be a list"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Start the Celery task + task = set_channels_names_from_epg.delay(channel_ids) + + return Response({ + "message": f"Started EPG name setting task for {len(channel_ids)} channels", + "task_id": task.id, + "channel_count": len(channel_ids) + }) + + @action(detail=False, methods=["post"], url_path="set-logos-from-epg") + def set_logos_from_epg(self, request): + """ + Trigger a Celery task to set channel logos from EPG data + """ + from .tasks import set_channels_logos_from_epg + + data = request.data + channel_ids = data.get("channel_ids", []) + + if not channel_ids: + return Response( + {"error": "channel_ids is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if not isinstance(channel_ids, list): + return Response( + {"error": "channel_ids must be a list"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Start the Celery task + task = set_channels_logos_from_epg.delay(channel_ids) + + return Response({ + "message": f"Started EPG logo setting task for {len(channel_ids)} channels", + "task_id": task.id, + "channel_count": len(channel_ids) + }) + @action(detail=False, methods=["get"], url_path="ids") def get_ids(self, request, *args, **kwargs): # Get the filtered queryset diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py index c1e63658..2760d1a7 100755 --- a/apps/channels/tasks.py +++ b/apps/channels/tasks.py @@ -2465,3 +2465,227 @@ def bulk_create_channels_from_streams(self, stream_ids, channel_profile_ids=None 'error': str(e) }) raise + + +@shared_task(bind=True) +def set_channels_names_from_epg(self, channel_ids): + """ + Celery task to set channel names from EPG data for multiple channels + """ + from core.utils import send_websocket_update + + task_id = self.request.id + total_channels = len(channel_ids) + updated_count = 0 + errors = [] + + try: + logger.info(f"Starting EPG name setting task for {total_channels} channels") + + # Send initial progress + send_websocket_update('updates', 'update', { + 'type': 'epg_name_setting_progress', + 'task_id': task_id, + 'progress': 0, + 'total': total_channels, + 'status': 'running', + 'message': 'Starting EPG name setting...' + }) + + batch_size = 100 + for i in range(0, total_channels, batch_size): + batch_ids = channel_ids[i:i + batch_size] + batch_updates = [] + + # Get channels and their EPG data + channels = Channel.objects.filter(id__in=batch_ids).select_related('epg_data') + + for channel in channels: + try: + if channel.epg_data and channel.epg_data.name: + if channel.name != channel.epg_data.name: + channel.name = channel.epg_data.name + batch_updates.append(channel) + updated_count += 1 + except Exception as e: + errors.append(f"Channel {channel.id}: {str(e)}") + logger.error(f"Error processing channel {channel.id}: {e}") + + # Bulk update the batch + if batch_updates: + Channel.objects.bulk_update(batch_updates, ['name']) + + # Send progress update + progress = min(i + batch_size, total_channels) + send_websocket_update('updates', 'update', { + 'type': 'epg_name_setting_progress', + 'task_id': task_id, + 'progress': progress, + 'total': total_channels, + 'status': 'running', + 'message': f'Updated {updated_count} channel names...', + 'updated_count': updated_count + }) + + # Send completion notification + send_websocket_update('updates', 'update', { + 'type': 'epg_name_setting_progress', + 'task_id': task_id, + 'progress': total_channels, + 'total': total_channels, + 'status': 'completed', + 'message': f'Successfully updated {updated_count} channel names from EPG data', + 'updated_count': updated_count, + 'error_count': len(errors), + 'errors': errors + }) + + logger.info(f"EPG name setting task completed. Updated {updated_count} channels") + return { + 'status': 'completed', + 'updated_count': updated_count, + 'error_count': len(errors), + 'errors': errors + } + + except Exception as e: + logger.error(f"EPG name setting task failed: {e}") + send_websocket_update('updates', 'update', { + 'type': 'epg_name_setting_progress', + 'task_id': task_id, + 'progress': 0, + 'total': total_channels, + 'status': 'failed', + 'message': f'Task failed: {str(e)}', + 'error': str(e) + }) + raise + + +@shared_task(bind=True) +def set_channels_logos_from_epg(self, channel_ids): + """ + Celery task to set channel logos from EPG data for multiple channels + Creates logos from EPG icon URLs if they don't exist + """ + from .models import Logo + from core.utils import send_websocket_update + import requests + from urllib.parse import urlparse + + task_id = self.request.id + total_channels = len(channel_ids) + updated_count = 0 + created_logos_count = 0 + errors = [] + + try: + logger.info(f"Starting EPG logo setting task for {total_channels} channels") + + # Send initial progress + send_websocket_update('updates', 'update', { + 'type': 'epg_logo_setting_progress', + 'task_id': task_id, + 'progress': 0, + 'total': total_channels, + 'status': 'running', + 'message': 'Starting EPG logo setting...' + }) + + batch_size = 50 # Smaller batch for logo processing + for i in range(0, total_channels, batch_size): + batch_ids = channel_ids[i:i + batch_size] + batch_updates = [] + + # Get channels and their EPG data + channels = Channel.objects.filter(id__in=batch_ids).select_related('epg_data', 'logo') + + for channel in channels: + try: + if channel.epg_data and channel.epg_data.icon_url: + icon_url = channel.epg_data.icon_url.strip() + + # Try to find existing logo with this URL + try: + logo = Logo.objects.get(url=icon_url) + except Logo.DoesNotExist: + # Create new logo from EPG icon URL + try: + # Generate a name for the logo + logo_name = channel.epg_data.name or f"Logo for {channel.epg_data.tvg_id}" + + # Create the logo record + logo = Logo.objects.create( + name=logo_name, + url=icon_url + ) + created_logos_count += 1 + logger.info(f"Created new logo from EPG: {logo_name} - {icon_url}") + + except Exception as create_error: + errors.append(f"Channel {channel.id}: Failed to create logo from {icon_url}: {str(create_error)}") + logger.error(f"Failed to create logo for channel {channel.id}: {create_error}") + continue + + # Update channel logo if different + if channel.logo != logo: + channel.logo = logo + batch_updates.append(channel) + updated_count += 1 + + except Exception as e: + errors.append(f"Channel {channel.id}: {str(e)}") + logger.error(f"Error processing channel {channel.id}: {e}") + + # Bulk update the batch + if batch_updates: + Channel.objects.bulk_update(batch_updates, ['logo']) + + # Send progress update + progress = min(i + batch_size, total_channels) + send_websocket_update('updates', 'update', { + 'type': 'epg_logo_setting_progress', + 'task_id': task_id, + 'progress': progress, + 'total': total_channels, + 'status': 'running', + 'message': f'Updated {updated_count} channel logos, created {created_logos_count} new logos...', + 'updated_count': updated_count, + 'created_logos_count': created_logos_count + }) + + # Send completion notification + send_websocket_update('updates', 'update', { + 'type': 'epg_logo_setting_progress', + 'task_id': task_id, + 'progress': total_channels, + 'total': total_channels, + 'status': 'completed', + 'message': f'Successfully updated {updated_count} channel logos and created {created_logos_count} new logos from EPG data', + 'updated_count': updated_count, + 'created_logos_count': created_logos_count, + 'error_count': len(errors), + 'errors': errors + }) + + logger.info(f"EPG logo setting task completed. Updated {updated_count} channels, created {created_logos_count} logos") + return { + 'status': 'completed', + 'updated_count': updated_count, + 'created_logos_count': created_logos_count, + 'error_count': len(errors), + 'errors': errors + } + + except Exception as e: + logger.error(f"EPG logo setting task failed: {e}") + send_websocket_update('updates', 'update', { + 'type': 'epg_logo_setting_progress', + 'task_id': task_id, + 'progress': 0, + 'total': total_channels, + 'status': 'failed', + 'message': f'Task failed: {str(e)}', + 'error': str(e) + }) + raise diff --git a/apps/epg/migrations/0016_epgdata_icon_url.py b/apps/epg/migrations/0016_epgdata_icon_url.py new file mode 100644 index 00000000..b934b024 --- /dev/null +++ b/apps/epg/migrations/0016_epgdata_icon_url.py @@ -0,0 +1,18 @@ +# Generated by Django 5.2.4 on 2025-09-16 22:01 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('epg', '0015_alter_programdata_custom_properties'), + ] + + operations = [ + migrations.AddField( + model_name='epgdata', + name='icon_url', + field=models.URLField(blank=True, max_length=500, null=True), + ), + ] diff --git a/apps/epg/models.py b/apps/epg/models.py index 22f2bd28..3ed33ab2 100644 --- a/apps/epg/models.py +++ b/apps/epg/models.py @@ -127,6 +127,7 @@ class EPGData(models.Model): # and a name (which might simply be the tvg_id if no real channel exists). tvg_id = models.CharField(max_length=255, null=True, blank=True, db_index=True) name = models.CharField(max_length=255) + icon_url = models.URLField(max_length=500, null=True, blank=True) epg_source = models.ForeignKey( EPGSource, on_delete=models.CASCADE, diff --git a/apps/epg/serializers.py b/apps/epg/serializers.py index 2f97cebf..85186cae 100644 --- a/apps/epg/serializers.py +++ b/apps/epg/serializers.py @@ -52,5 +52,6 @@ class EPGDataSerializer(serializers.ModelSerializer): 'id', 'tvg_id', 'name', + 'icon_url', 'epg_source', ] diff --git a/apps/epg/tasks.py b/apps/epg/tasks.py index 0d0ebbb3..dc0fdbe6 100644 --- a/apps/epg/tasks.py +++ b/apps/epg/tasks.py @@ -873,10 +873,12 @@ def parse_channels_only(source): tvg_id = elem.get('id', '').strip() if tvg_id: display_name = None + icon_url = None for child in elem: if child.tag == 'display-name' and child.text: display_name = child.text.strip() - break + elif child.tag == 'icon': + icon_url = child.get('src', '').strip() if not display_name: display_name = tvg_id @@ -894,17 +896,24 @@ def parse_channels_only(source): epgs_to_create.append(EPGData( tvg_id=tvg_id, name=display_name, + icon_url=icon_url, epg_source=source, )) logger.debug(f"[parse_channels_only] Added new channel to epgs_to_create 1: {tvg_id} - {display_name}") processed_channels += 1 continue - # We use the cached object to check if the name has changed + # We use the cached object to check if the name or icon_url has changed epg_obj = existing_epgs[tvg_id] + needs_update = False if epg_obj.name != display_name: - # Only update if the name actually changed epg_obj.name = display_name + needs_update = True + if epg_obj.icon_url != icon_url: + epg_obj.icon_url = icon_url + needs_update = True + + if needs_update: epgs_to_update.append(epg_obj) logger.debug(f"[parse_channels_only] Added channel to update to epgs_to_update: {tvg_id} - {display_name}") else: @@ -915,6 +924,7 @@ def parse_channels_only(source): epgs_to_create.append(EPGData( tvg_id=tvg_id, name=display_name, + icon_url=icon_url, epg_source=source, )) logger.debug(f"[parse_channels_only] Added new channel to epgs_to_create 2: {tvg_id} - {display_name}") @@ -937,7 +947,7 @@ def parse_channels_only(source): logger.info(f"[parse_channels_only] Bulk updating {len(epgs_to_update)} EPG entries") if process: logger.info(f"[parse_channels_only] Memory before bulk_update: {process.memory_info().rss / 1024 / 1024:.2f} MB") - EPGData.objects.bulk_update(epgs_to_update, ["name"]) + EPGData.objects.bulk_update(epgs_to_update, ["name", "icon_url"]) if process: logger.info(f"[parse_channels_only] Memory after bulk_update: {process.memory_info().rss / 1024 / 1024:.2f} MB") epgs_to_update = [] @@ -1004,7 +1014,7 @@ def parse_channels_only(source): logger.debug(f"[parse_channels_only] Created final batch of {len(epgs_to_create)} EPG entries") if epgs_to_update: - EPGData.objects.bulk_update(epgs_to_update, ["name"]) + EPGData.objects.bulk_update(epgs_to_update, ["name", "icon_url"]) logger.debug(f"[parse_channels_only] Updated final batch of {len(epgs_to_update)} EPG entries") if process: logger.debug(f"[parse_channels_only] Memory after final batch creation: {process.memory_info().rss / 1024 / 1024:.2f} MB") diff --git a/frontend/src/api.js b/frontend/src/api.js index a1c761c6..01186bf6 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -516,6 +516,52 @@ export default class API { } } + static async setChannelNamesFromEpg(channelIds) { + try { + const response = await request( + `${host}/api/channels/channels/set-names-from-epg/`, + { + method: 'POST', + body: { channel_ids: channelIds }, + } + ); + + notifications.show({ + title: 'Task Started', + message: response.message, + color: 'blue', + }); + + return response; + } catch (e) { + errorNotification('Failed to start EPG name setting task', e); + throw e; + } + } + + static async setChannelLogosFromEpg(channelIds) { + try { + const response = await request( + `${host}/api/channels/channels/set-logos-from-epg/`, + { + method: 'POST', + body: { channel_ids: channelIds }, + } + ); + + notifications.show({ + title: 'Task Started', + message: response.message, + color: 'blue', + }); + + return response; + } catch (e) { + errorNotification('Failed to start EPG logo setting task', e); + throw e; + } + } + static async assignChannelNumbers(channelIds, startingNum = 1) { try { const response = await request(`${host}/api/channels/channels/assign/`, { diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index 610f112e..f8c0f15b 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -207,21 +207,19 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { } const tvg = tvgsById[epgDataId]; - if (!tvg || !tvg.name) { + if (!tvg || !tvg.icon_url) { notifications.show({ - title: 'No EPG Name', - message: 'EPG data does not have a name to match against logos.', + title: 'No EPG Icon', + message: 'EPG data does not have an icon URL.', color: 'orange', }); return; } try { - // Try to find a logo that matches the EPG name - const matchingLogo = Object.values(logos).find( - (logo) => - logo.name.toLowerCase().includes(tvg.name.toLowerCase()) || - tvg.name.toLowerCase().includes(logo.name.toLowerCase()) + // Try to find a logo that matches the EPG icon URL + let matchingLogo = Object.values(logos).find( + (logo) => logo.url === tvg.icon_url ); if (matchingLogo) { @@ -232,11 +230,47 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { color: 'green', }); } else { + // Logo doesn't exist - create it notifications.show({ - title: 'No Matching Logo', - message: `No existing logo found that matches "${tvg.name}". Consider uploading a logo or using the smart logo selection.`, - color: 'orange', + id: 'creating-logo', + title: 'Creating Logo', + message: `Creating new logo from EPG icon URL...`, + loading: true, }); + + try { + const newLogoData = { + name: tvg.name || `Logo for ${tvg.icon_url}`, + url: tvg.icon_url, + }; + + // Create logo by calling the Logo API directly + const newLogo = await API.createLogo(newLogoData); + + formik.setFieldValue('logo_id', newLogo.id); + + // Refresh logos to update the cache + await ensureLogosLoaded(); + + notifications.update({ + id: 'creating-logo', + title: 'Success', + message: `Created and assigned new logo "${newLogo.name}"`, + loading: false, + color: 'green', + autoClose: 5000, + }); + } catch (createError) { + notifications.update({ + id: 'creating-logo', + title: 'Error', + message: 'Failed to create logo from EPG icon URL', + loading: false, + color: 'red', + autoClose: 5000, + }); + throw createError; + } } } catch (error) { notifications.show({ @@ -751,7 +785,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { variant="light" onClick={handleSetLogoFromEpg} disabled={!formik.values.epg_data_id} - title="Find matching logo based on EPG name" + title="Find matching logo based on EPG icon URL" > Use EPG Logo diff --git a/frontend/src/components/forms/ChannelBatch.jsx b/frontend/src/components/forms/ChannelBatch.jsx index 3492d6ac..ad61fb26 100644 --- a/frontend/src/components/forms/ChannelBatch.jsx +++ b/frontend/src/components/forms/ChannelBatch.jsx @@ -2,9 +2,7 @@ import React, { useState, useEffect, useMemo, useRef } from 'react'; import useChannelsStore from '../../store/channels'; import API from '../../api'; import useStreamProfilesStore from '../../store/streamProfiles'; -import useEPGsStore from '../../store/epgs'; import ChannelGroupForm from './ChannelGroup'; -import { useLogoSelection } from '../../hooks/useSmartLogos'; import { Box, Button, @@ -38,18 +36,8 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => { const groupListRef = useRef(null); const channelGroups = useChannelsStore((s) => s.channelGroups); - const canEditChannelGroup = useChannelsStore((s) => s.canEditChannelGroup); const streamProfiles = useStreamProfilesStore((s) => s.profiles); - const epgs = useEPGsStore((s) => s.epgs); - const tvgs = useEPGsStore((s) => s.tvgs); - const tvgsById = useEPGsStore((s) => s.tvgsById); - - const { - logos, - ensureLogosLoaded, - isLoading: logosLoading, - } = useLogoSelection(); const [channelGroupModelOpen, setChannelGroupModalOpen] = useState(false); const [selectedChannelGroup, setSelectedChannelGroup] = useState('-1'); @@ -157,47 +145,24 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => { } try { - const channelsMap = useChannelsStore.getState().channels; - const updates = []; - - for (const id of channelIds) { - const channel = channelsMap[id]; - if (channel && channel.epg_data_id) { - const tvg = tvgsById[channel.epg_data_id]; - if (tvg && tvg.name) { - updates.push({ - id, - name: tvg.name, - }); - } - } - } - - if (updates.length === 0) { - notifications.show({ - title: 'No Updates Available', - message: 'No selected channels have EPG data with names.', - color: 'orange', - }); - return; - } - - await API.bulkUpdateChannels(updates); - await Promise.all([ - API.requeryChannels(), - useChannelsStore.getState().fetchChannels(), - ]); + // Start the backend task + await API.setChannelNamesFromEpg(channelIds); + // The task will send WebSocket updates for progress + // Just show that it started successfully notifications.show({ - title: 'Success', - message: `Updated names for ${updates.length} channels from EPG data.`, - color: 'green', + title: 'Task Started', + message: `Started setting names from EPG for ${channelIds.length} channels. Progress will be shown in notifications.`, + color: 'blue', }); + + // Close the modal since the task is now running in background + onClose(); } catch (error) { - console.error('Failed to set names from EPG:', error); + console.error('Failed to start EPG name setting task:', error); notifications.show({ title: 'Error', - message: 'Failed to set names from EPG data.', + message: 'Failed to start EPG name setting task.', color: 'red', }); } @@ -214,63 +179,24 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => { } try { - // Ensure logos are loaded first - await ensureLogosLoaded(); - - const channelsMap = useChannelsStore.getState().channels; - const updates = []; - - for (const id of channelIds) { - const channel = channelsMap[id]; - if (channel && channel.epg_data_id) { - const tvg = tvgsById[channel.epg_data_id]; - if (tvg && tvg.name) { - // Try to find a matching logo - const matchingLogo = Object.values(logos).find( - (logo) => - logo.name.toLowerCase().includes(tvg.name.toLowerCase()) || - tvg.name.toLowerCase().includes(logo.name.toLowerCase()) - ); - - if (matchingLogo) { - updates.push({ - id, - logo_id: matchingLogo.id, - }); - } - } - } - } - - if (updates.length === 0) { - notifications.show({ - title: 'No Matching Logos', - message: - 'No matching logos found for the selected channels based on their EPG names.', - color: 'orange', - }); - return; - } - - await API.bulkUpdateChannels(updates); - - // Refresh both channels and logos data - await Promise.all([ - API.requeryChannels(), - useChannelsStore.getState().fetchChannels(), - ensureLogosLoaded(), // Ensure logos are refreshed - ]); + // Start the backend task + await API.setChannelLogosFromEpg(channelIds); + // The task will send WebSocket updates for progress + // Just show that it started successfully notifications.show({ - title: 'Success', - message: `Updated logos for ${updates.length} channels based on EPG names.`, - color: 'green', + title: 'Task Started', + message: `Started setting logos from EPG for ${channelIds.length} channels. Progress will be shown in notifications.`, + color: 'blue', }); + + // Close the modal since the task is now running in background + onClose(); } catch (error) { - console.error('Failed to set logos from EPG:', error); + console.error('Failed to start EPG logo setting task:', error); notifications.show({ title: 'Error', - message: 'Failed to set logos from EPG data.', + message: 'Failed to start EPG logo setting task.', color: 'red', }); } From 3cb5a061c9d2d61ab824dd7f6275bfb9b39aab59 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Sep 2025 17:35:38 -0500 Subject: [PATCH 017/119] Show progress as notifications. --- frontend/src/WebSocket.jsx | 109 +++++++++++++++++++++++++++++++++++++ 1 file changed, 109 insertions(+) diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index 2c57a37e..b2b733c2 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -375,6 +375,115 @@ export const WebsocketProvider = ({ children }) => { break; } + case 'epg_logo_setting_progress': { + const progress = parsedEvent.data; + const id = 'epg-logo-setting-progress'; + + if (progress.status === 'running' && progress.progress === 0) { + // Initial message + notifications.show({ + id, + title: 'Setting Logos from EPG', + message: `Processing ${progress.total} channels...`, + color: 'blue.5', + autoClose: false, + withCloseButton: false, + loading: true, + }); + } else if (progress.status === 'running') { + // Progress update + let message = `Processed ${progress.progress} of ${progress.total} channels`; + if (progress.updated_count !== undefined) { + message += ` (${progress.updated_count} updated)`; + } + if (progress.created_logos_count !== undefined) { + message += `, created ${progress.created_logos_count} logos`; + } + + notifications.update({ + id, + title: 'Setting Logos from EPG', + message, + color: 'blue.5', + autoClose: false, + withCloseButton: false, + loading: true, + }); + } else if (progress.status === 'completed') { + notifications.update({ + id, + title: 'Logo Setting Complete', + message: `Successfully updated ${progress.updated_count || 0} channel logos${progress.created_logos_count ? `, created ${progress.created_logos_count} new logos` : ''}`, + color: progress.updated_count > 0 ? 'green.5' : 'orange', + loading: false, + autoClose: 6000, + }); + // Refresh channels data + try { + await useChannelsStore.getState().fetchChannels(); + } catch (e) { + console.warn( + 'Failed to refresh channels after logo setting:', + e + ); + } + } + break; + } + + case 'epg_name_setting_progress': { + const progress = parsedEvent.data; + const id = 'epg-name-setting-progress'; + + if (progress.status === 'running' && progress.progress === 0) { + // Initial message + notifications.show({ + id, + title: 'Setting Names from EPG', + message: `Processing ${progress.total} channels...`, + color: 'blue.5', + autoClose: false, + withCloseButton: false, + loading: true, + }); + } else if (progress.status === 'running') { + // Progress update + let message = `Processed ${progress.progress} of ${progress.total} channels`; + if (progress.updated_count !== undefined) { + message += ` (${progress.updated_count} updated)`; + } + + notifications.update({ + id, + title: 'Setting Names from EPG', + message, + color: 'blue.5', + autoClose: false, + withCloseButton: false, + loading: true, + }); + } else if (progress.status === 'completed') { + notifications.update({ + id, + title: 'Name Setting Complete', + message: `Successfully updated ${progress.updated_count || 0} channel names from EPG data`, + color: progress.updated_count > 0 ? 'green.5' : 'orange', + loading: false, + autoClose: 6000, + }); + // Refresh channels data + try { + await useChannelsStore.getState().fetchChannels(); + } catch (e) { + console.warn( + 'Failed to refresh channels after name setting:', + e + ); + } + } + break; + } + case 'm3u_profile_test': setProfilePreview( parsedEvent.data.search_preview, From 7e13e511986b5ba5f550bc776951b3e0e3bed6eb Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Sep 2025 17:55:55 -0500 Subject: [PATCH 018/119] Update the frontend on logo change. --- frontend/src/WebSocket.jsx | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index b2b733c2..249ee437 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -418,9 +418,21 @@ export const WebsocketProvider = ({ children }) => { loading: false, autoClose: 6000, }); - // Refresh channels data + // Refresh channels data and logos try { + await API.requeryChannels(); await useChannelsStore.getState().fetchChannels(); + + // Get updated channel data and extract logo IDs to load + const channels = useChannelsStore.getState().channels; + const logoIds = Object.values(channels) + .filter((channel) => channel.logo_id) + .map((channel) => channel.logo_id); + + // Fetch the specific logos that were just assigned + if (logoIds.length > 0) { + await useLogosStore.getState().fetchLogosByIds(logoIds); + } } catch (e) { console.warn( 'Failed to refresh channels after logo setting:', From 8b740fc3acf3c0ab3f840096ca0e794209f3901f Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Sep 2025 18:49:02 -0500 Subject: [PATCH 019/119] Move buttons for use epg name and use epg logo . --- frontend/src/components/forms/Channel.jsx | 74 +++++++++++++---------- 1 file changed, 43 insertions(+), 31 deletions(-) diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index f8c0f15b..e9e6e686 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -464,27 +464,32 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { - - - - + + Channel Name + {formik.values.epg_data_id && ( + + )} + + } + value={formik.values.name} + onChange={formik.handleChange} + error={formik.errors.name ? formik.touched.name : ''} + size="xs" + style={{ flex: 1 }} + /> { + Logo + {formik.values.epg_data_id && ( + + )} + + } readOnly value={logos[formik.values.logo_id]?.name || 'Default'} onClick={() => { @@ -780,15 +801,6 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { alt="channel logo" style={{ height: 40 }} /> - From 2e5280c46a9a53c3c01a27d256e47a82fb3f6a7b Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Sep 2025 19:17:31 -0500 Subject: [PATCH 020/119] Remove unneeded logo call. --- frontend/src/components/forms/Channel.jsx | 3 --- 1 file changed, 3 deletions(-) diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index e9e6e686..9e329614 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -249,9 +249,6 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { formik.setFieldValue('logo_id', newLogo.id); - // Refresh logos to update the cache - await ensureLogosLoaded(); - notifications.update({ id: 'creating-logo', title: 'Success', From ab3350d08d3ed84c358ad0885aa35da2f58bb909 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Sep 2025 19:41:11 -0500 Subject: [PATCH 021/119] Search all logos instead of just channel assignable. --- frontend/src/components/forms/Channel.jsx | 8 +++- frontend/src/store/logos.jsx | 50 +++++++++++++++++++---- 2 files changed, 49 insertions(+), 9 deletions(-) diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index 9e329614..48edb3ac 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -9,6 +9,7 @@ import ChannelGroupForm from './ChannelGroup'; import usePlaylistsStore from '../../store/playlists'; import logo from '../../images/logo.png'; import { useChannelLogoSelection } from '../../hooks/useSmartLogos'; +import useLogosStore from '../../store/logos'; import LazyLogo from '../LazyLogo'; import { Box, @@ -56,6 +57,9 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { isLoading: logosLoading, } = useChannelLogoSelection(); + // Import the full logos store for duplicate checking + const allLogos = useLogosStore((s) => s.logos); + // Ensure logos are loaded when component mounts useEffect(() => { ensureLogosLoaded(); @@ -217,8 +221,8 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { } try { - // Try to find a logo that matches the EPG icon URL - let matchingLogo = Object.values(logos).find( + // Try to find a logo that matches the EPG icon URL - check ALL logos to avoid duplicates + let matchingLogo = Object.values(allLogos).find( (logo) => logo.url === tvg.icon_url ); diff --git a/frontend/src/store/logos.jsx b/frontend/src/store/logos.jsx index 4a0b945c..eb2a7597 100644 --- a/frontend/src/store/logos.jsx +++ b/frontend/src/store/logos.jsx @@ -3,7 +3,7 @@ import api from '../api'; const useLogosStore = create((set, get) => ({ logos: {}, - channelLogos: {}, // Separate state for channel-assignable logos + channelLogos: {}, // Keep this for simplicity, but we'll be more careful about when we populate it isLoading: false, backgroundLoading: false, hasLoadedAll: false, // Track if we've loaded all logos @@ -21,12 +21,29 @@ const useLogosStore = create((set, get) => ({ }, addLogo: (newLogo) => - set((state) => ({ - logos: { + set((state) => { + // Add to main logos store always + const newLogos = { ...state.logos, [newLogo.id]: { ...newLogo }, - }, - })), + }; + + // Add to channelLogos if the user has loaded channel-assignable logos + // This means they're using channel forms and the new logo should be available there + // Newly created logos are channel-assignable (they start unused) + let newChannelLogos = state.channelLogos; + if (state.hasLoadedChannelLogos) { + newChannelLogos = { + ...state.channelLogos, + [newLogo.id]: { ...newLogo }, + }; + } + + return { + logos: newLogos, + channelLogos: newChannelLogos, + }; + }), updateLogo: (logo) => set((state) => ({ @@ -34,13 +51,25 @@ const useLogosStore = create((set, get) => ({ ...state.logos, [logo.id]: { ...logo }, }, + // Update in channelLogos if it exists there + channelLogos: state.channelLogos[logo.id] + ? { + ...state.channelLogos, + [logo.id]: { ...logo }, + } + : state.channelLogos, })), removeLogo: (logoId) => set((state) => { const newLogos = { ...state.logos }; + const newChannelLogos = { ...state.channelLogos }; delete newLogos[logoId]; - return { logos: newLogos }; + delete newChannelLogos[logoId]; + return { + logos: newLogos, + channelLogos: newChannelLogos, + }; }), // Smart loading methods @@ -155,8 +184,15 @@ const useLogosStore = create((set, get) => ({ console.log(`Fetched ${logos.length} channel-assignable logos`); - // Store in separate channelLogos state + // Store in both places, but this is intentional and only when specifically requested set({ + logos: { + ...get().logos, // Keep existing logos + ...logos.reduce((acc, logo) => { + acc[logo.id] = { ...logo }; + return acc; + }, {}), + }, channelLogos: logos.reduce((acc, logo) => { acc[logo.id] = { ...logo }; return acc; From 9ef2aa966d2c3344d00a1304bbdebc0894a6173f Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Sep 2025 19:44:41 -0500 Subject: [PATCH 022/119] Requery channels when setting channel names from epg. --- frontend/src/WebSocket.jsx | 1 + 1 file changed, 1 insertion(+) diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index 249ee437..1101c9f8 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -485,6 +485,7 @@ export const WebsocketProvider = ({ children }) => { }); // Refresh channels data try { + await API.requeryChannels(); await useChannelsStore.getState().fetchChannels(); } catch (e) { console.warn( From 00da23332274a8ba9b24213dbe89b08631d3bc00 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Sep 2025 19:49:41 -0500 Subject: [PATCH 023/119] Rename logos variable to channelLogos to avoid future confusion. --- frontend/src/components/forms/Channel.jsx | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index 48edb3ac..5941f9d4 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -52,7 +52,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { const canEditChannelGroup = useChannelsStore((s) => s.canEditChannelGroup); const { - logos, + logos: channelLogos, ensureLogosLoaded, isLoading: logosLoading, } = useChannelLogoSelection(); @@ -410,9 +410,11 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { // Memoize logo options to prevent infinite re-renders during background loading const logoOptions = useMemo(() => { - const options = [{ id: '0', name: 'Default' }].concat(Object.values(logos)); + const options = [{ id: '0', name: 'Default' }].concat( + Object.values(channelLogos) + ); return options; - }, [logos]); // Only depend on logos object + }, [channelLogos]); // Only depend on channelLogos object // Update the handler for when channel group modal is closed const handleChannelGroupModalClose = (newGroup) => { @@ -689,7 +691,9 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { } readOnly - value={logos[formik.values.logo_id]?.name || 'Default'} + value={ + channelLogos[formik.values.logo_id]?.name || 'Default' + } onClick={() => { console.log( 'Logo input clicked, setting popover opened to true' From edc18e07fe09bfbf955077c3a2f1231507f37a03 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Sep 2025 20:10:49 -0500 Subject: [PATCH 024/119] Auto-focus filter for epg. --- frontend/src/components/forms/Channel.jsx | 1 + 1 file changed, 1 insertion(+) diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index 5941f9d4..53ecddd2 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -987,6 +987,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { } mb="xs" size="xs" + autoFocus /> From 424a4506541ae78d4465c16f67b7b9ef0a268dbc Mon Sep 17 00:00:00 2001 From: Dispatcharr Date: Thu, 18 Sep 2025 10:23:16 -0500 Subject: [PATCH 025/119] DVR Features and bug fixes Added ability to use custom comskip.ini Added series recording without reliance on EPG Fixed comskip bug Fixed timezone mismatch when scheduling DVR recordings No migrations completed yet --- apps/channels/api_urls.py | 4 + apps/channels/api_views.py | 92 ++++- .../migrations/0026_recurringrecordingrule.py | 31 ++ apps/channels/models.py | 30 ++ apps/channels/serializers.py | 36 ++ apps/channels/tasks.py | 230 ++++++++++-- apps/channels/tests/__init__.py | 0 apps/channels/tests/test_recurring_rules.py | 40 +++ core/models.py | 22 ++ dispatcharr/settings.py | 4 + frontend/src/api.js | 64 ++++ frontend/src/components/forms/Recording.jsx | 327 ++++++++++++++---- frontend/src/pages/DVR.jsx | 168 +++++++-- frontend/src/pages/Settings.jsx | 104 ++++++ frontend/src/store/channels.jsx | 18 + 15 files changed, 1056 insertions(+), 114 deletions(-) create mode 100644 apps/channels/migrations/0026_recurringrecordingrule.py create mode 100644 apps/channels/tests/__init__.py create mode 100644 apps/channels/tests/test_recurring_rules.py diff --git a/apps/channels/api_urls.py b/apps/channels/api_urls.py index 7cfdc1b1..7999abd9 100644 --- a/apps/channels/api_urls.py +++ b/apps/channels/api_urls.py @@ -13,12 +13,14 @@ from .api_views import ( UpdateChannelMembershipAPIView, BulkUpdateChannelMembershipAPIView, RecordingViewSet, + RecurringRecordingRuleViewSet, GetChannelStreamsAPIView, SeriesRulesAPIView, DeleteSeriesRuleAPIView, EvaluateSeriesRulesAPIView, BulkRemoveSeriesRecordingsAPIView, BulkDeleteUpcomingRecordingsAPIView, + ComskipConfigAPIView, ) app_name = 'channels' # for DRF routing @@ -30,6 +32,7 @@ router.register(r'channels', ChannelViewSet, basename='channel') router.register(r'logos', LogoViewSet, basename='logo') router.register(r'profiles', ChannelProfileViewSet, basename='profile') router.register(r'recordings', RecordingViewSet, basename='recording') +router.register(r'recurring-rules', RecurringRecordingRuleViewSet, basename='recurring-rule') urlpatterns = [ # Bulk delete is a single APIView, not a ViewSet @@ -46,6 +49,7 @@ urlpatterns = [ path('series-rules/bulk-remove/', BulkRemoveSeriesRecordingsAPIView.as_view(), name='bulk_remove_series_recordings'), path('series-rules//', DeleteSeriesRuleAPIView.as_view(), name='delete_series_rule'), path('recordings/bulk-delete-upcoming/', BulkDeleteUpcomingRecordingsAPIView.as_view(), name='bulk_delete_upcoming_recordings'), + path('dvr/comskip-config/', ComskipConfigAPIView.as_view(), name='comskip_config'), ] urlpatterns += router.urls diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 7a3d5135..e7991220 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -28,6 +28,7 @@ from .models import ( ChannelProfile, ChannelProfileMembership, Recording, + RecurringRecordingRule, ) from .serializers import ( StreamSerializer, @@ -38,8 +39,17 @@ from .serializers import ( BulkChannelProfileMembershipSerializer, ChannelProfileSerializer, RecordingSerializer, + RecurringRecordingRuleSerializer, +) +from .tasks import ( + match_epg_channels, + evaluate_series_rules, + evaluate_series_rules_impl, + match_single_channel_epg, + match_selected_channels_epg, + sync_recurring_rule_impl, + purge_recurring_rule_impl, ) -from .tasks import match_epg_channels, evaluate_series_rules, evaluate_series_rules_impl, match_single_channel_epg, match_selected_channels_epg import django_filters from django_filters.rest_framework import DjangoFilterBackend from rest_framework.filters import SearchFilter, OrderingFilter @@ -49,10 +59,12 @@ from django.db.models import Q from django.http import StreamingHttpResponse, FileResponse, Http404 from django.utils import timezone import mimetypes +from django.conf import settings from rest_framework.pagination import PageNumberPagination + logger = logging.getLogger(__name__) @@ -1653,6 +1665,41 @@ class BulkUpdateChannelMembershipAPIView(APIView): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) +class RecurringRecordingRuleViewSet(viewsets.ModelViewSet): + queryset = RecurringRecordingRule.objects.all().select_related("channel") + serializer_class = RecurringRecordingRuleSerializer + + def get_permissions(self): + return [IsAdmin()] + + def perform_create(self, serializer): + rule = serializer.save() + try: + sync_recurring_rule_impl(rule.id, drop_existing=True) + except Exception as err: + logger.warning(f"Failed to initialize recurring rule {rule.id}: {err}") + return rule + + def perform_update(self, serializer): + rule = serializer.save() + try: + if rule.enabled: + sync_recurring_rule_impl(rule.id, drop_existing=True) + else: + purge_recurring_rule_impl(rule.id) + except Exception as err: + logger.warning(f"Failed to resync recurring rule {rule.id}: {err}") + return rule + + def perform_destroy(self, instance): + rule_id = instance.id + super().perform_destroy(instance) + try: + purge_recurring_rule_impl(rule_id) + except Exception as err: + logger.warning(f"Failed to purge recordings for rule {rule_id}: {err}") + + class RecordingViewSet(viewsets.ModelViewSet): queryset = Recording.objects.all() serializer_class = RecordingSerializer @@ -1832,6 +1879,49 @@ class RecordingViewSet(viewsets.ModelViewSet): return response +class ComskipConfigAPIView(APIView): + """Upload or inspect the custom comskip.ini used by DVR processing.""" + + parser_classes = [MultiPartParser, FormParser] + + def get_permissions(self): + return [IsAdmin()] + + def get(self, request): + path = CoreSettings.get_dvr_comskip_custom_path() + exists = bool(path and os.path.exists(path)) + return Response({"path": path, "exists": exists}) + + def post(self, request): + uploaded = request.FILES.get("file") or request.FILES.get("comskip_ini") + if not uploaded: + return Response({"error": "No file provided"}, status=status.HTTP_400_BAD_REQUEST) + + name = (uploaded.name or "").lower() + if not name.endswith(".ini"): + return Response({"error": "Only .ini files are allowed"}, status=status.HTTP_400_BAD_REQUEST) + + if uploaded.size and uploaded.size > 1024 * 1024: + return Response({"error": "File too large (limit 1MB)"}, status=status.HTTP_400_BAD_REQUEST) + + dest_dir = os.path.join(settings.MEDIA_ROOT, "comskip") + os.makedirs(dest_dir, exist_ok=True) + dest_path = os.path.join(dest_dir, "comskip.ini") + + try: + with open(dest_path, "wb") as dest: + for chunk in uploaded.chunks(): + dest.write(chunk) + except Exception as e: + logger.error(f"Failed to save uploaded comskip.ini: {e}") + return Response({"error": "Unable to save file"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + + # Persist path setting so DVR processing picks it up immediately + CoreSettings.set_dvr_comskip_custom_path(dest_path) + + return Response({"success": True, "path": dest_path, "exists": os.path.exists(dest_path)}) + + class BulkDeleteUpcomingRecordingsAPIView(APIView): """Delete all upcoming (future) recordings.""" def get_permissions(self): diff --git a/apps/channels/migrations/0026_recurringrecordingrule.py b/apps/channels/migrations/0026_recurringrecordingrule.py new file mode 100644 index 00000000..1b8cfdb8 --- /dev/null +++ b/apps/channels/migrations/0026_recurringrecordingrule.py @@ -0,0 +1,31 @@ +# Generated by Django 5.0.14 on 2025-09-18 14:56 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dispatcharr_channels', '0025_alter_channelgroupm3uaccount_custom_properties_and_more'), + ] + + operations = [ + migrations.CreateModel( + name='RecurringRecordingRule', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('days_of_week', models.JSONField(default=list)), + ('start_time', models.TimeField()), + ('end_time', models.TimeField()), + ('enabled', models.BooleanField(default=True)), + ('name', models.CharField(blank=True, max_length=255)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ('channel', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='recurring_rules', to='dispatcharr_channels.channel')), + ], + options={ + 'ordering': ['channel', 'start_time'], + }, + ), + ] diff --git a/apps/channels/models.py b/apps/channels/models.py index af66178d..e6e3bd7a 100644 --- a/apps/channels/models.py +++ b/apps/channels/models.py @@ -601,3 +601,33 @@ class Recording(models.Model): def __str__(self): return f"{self.channel.name} - {self.start_time} to {self.end_time}" + + +class RecurringRecordingRule(models.Model): + """Rule describing a recurring manual DVR schedule.""" + + channel = models.ForeignKey( + "Channel", + on_delete=models.CASCADE, + related_name="recurring_rules", + ) + days_of_week = models.JSONField(default=list) + start_time = models.TimeField() + end_time = models.TimeField() + enabled = models.BooleanField(default=True) + name = models.CharField(max_length=255, blank=True) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + ordering = ["channel", "start_time"] + + def __str__(self): + channel_name = getattr(self.channel, "name", str(self.channel_id)) + return f"Recurring rule for {channel_name}" + + def cleaned_days(self): + try: + return sorted({int(d) for d in (self.days_of_week or []) if 0 <= int(d) <= 6}) + except Exception: + return [] diff --git a/apps/channels/serializers.py b/apps/channels/serializers.py index 51bfe0a0..d9b34549 100644 --- a/apps/channels/serializers.py +++ b/apps/channels/serializers.py @@ -10,6 +10,7 @@ from .models import ( ChannelProfile, ChannelProfileMembership, Recording, + RecurringRecordingRule, ) from apps.epg.serializers import EPGDataSerializer from core.models import StreamProfile @@ -454,6 +455,13 @@ class RecordingSerializer(serializers.ModelSerializer): start_time = data.get("start_time") end_time = data.get("end_time") + if start_time and timezone.is_naive(start_time): + start_time = timezone.make_aware(start_time, timezone.get_current_timezone()) + data["start_time"] = start_time + if end_time and timezone.is_naive(end_time): + end_time = timezone.make_aware(end_time, timezone.get_current_timezone()) + data["end_time"] = end_time + # If this is an EPG-based recording (program provided), apply global pre/post offsets try: cp = data.get("custom_properties") or {} @@ -497,3 +505,31 @@ class RecordingSerializer(serializers.ModelSerializer): raise serializers.ValidationError("End time must be after start time.") return data + + +class RecurringRecordingRuleSerializer(serializers.ModelSerializer): + class Meta: + model = RecurringRecordingRule + fields = "__all__" + read_only_fields = ["created_at", "updated_at"] + + def validate_days_of_week(self, value): + if not value: + raise serializers.ValidationError("Select at least one day of the week") + cleaned = [] + for entry in value: + try: + iv = int(entry) + except (TypeError, ValueError): + raise serializers.ValidationError("Days of week must be integers 0-6") + if iv < 0 or iv > 6: + raise serializers.ValidationError("Days of week must be between 0 (Monday) and 6 (Sunday)") + cleaned.append(iv) + return sorted(set(cleaned)) + + def validate(self, attrs): + start = attrs.get("start_time") or getattr(self.instance, "start_time", None) + end = attrs.get("end_time") or getattr(self.instance, "end_time", None) + if start and end and end <= start: + raise serializers.ValidationError("End time must be after start time") + return super().validate(attrs) diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py index 2760d1a7..e92c4794 100755 --- a/apps/channels/tasks.py +++ b/apps/channels/tasks.py @@ -7,6 +7,7 @@ import requests import time import json import subprocess +import signal from datetime import datetime, timedelta import gc @@ -1095,6 +1096,130 @@ def reschedule_upcoming_recordings_for_offset_change(): return reschedule_upcoming_recordings_for_offset_change_impl() +def _notify_recordings_refresh(): + try: + from core.utils import send_websocket_update + send_websocket_update('updates', 'update', {"success": True, "type": "recordings_refreshed"}) + except Exception: + pass + + +def purge_recurring_rule_impl(rule_id: int) -> int: + """Remove all future recordings created by a recurring rule.""" + from django.utils import timezone + from .models import Recording + + now = timezone.now() + try: + removed, _ = Recording.objects.filter( + start_time__gte=now, + custom_properties__rule__id=rule_id, + ).delete() + except Exception: + removed = 0 + if removed: + _notify_recordings_refresh() + return removed + + +def sync_recurring_rule_impl(rule_id: int, drop_existing: bool = True, horizon_days: int = 14) -> int: + """Ensure recordings exist for a recurring rule within the scheduling horizon.""" + from django.utils import timezone + from .models import RecurringRecordingRule, Recording + + rule = RecurringRecordingRule.objects.filter(pk=rule_id).select_related("channel").first() + now = timezone.now() + removed = 0 + if drop_existing: + removed = purge_recurring_rule_impl(rule_id) + + if not rule or not rule.enabled: + return 0 + + days = rule.cleaned_days() + if not days: + return 0 + + tz = timezone.get_current_timezone() + horizon = now + timedelta(days=horizon_days) + start_date = now.date() + end_date = horizon.date() + total_created = 0 + + for offset in range((end_date - start_date).days + 1): + target_date = start_date + timedelta(days=offset) + if target_date.weekday() not in days: + continue + try: + start_dt = timezone.make_aware(datetime.combine(target_date, rule.start_time), tz) + end_dt = timezone.make_aware(datetime.combine(target_date, rule.end_time), tz) + except Exception: + continue + if end_dt <= start_dt or start_dt <= now: + continue + exists = Recording.objects.filter( + channel=rule.channel, + start_time=start_dt, + custom_properties__rule__id=rule.id, + ).exists() + if exists: + continue + description = rule.name or f"Recurring recording for {rule.channel.name}" + cp = { + "rule": { + "type": "recurring", + "id": rule.id, + "days_of_week": days, + "name": rule.name or "", + }, + "status": "scheduled", + "description": description, + "program": { + "title": rule.name or rule.channel.name, + "description": description, + "start_time": start_dt.isoformat(), + "end_time": end_dt.isoformat(), + }, + } + try: + Recording.objects.create( + channel=rule.channel, + start_time=start_dt, + end_time=end_dt, + custom_properties=cp, + ) + total_created += 1 + except Exception as err: + logger.warning(f"Failed to create recurring recording for rule {rule.id}: {err}") + + if removed or total_created: + _notify_recordings_refresh() + + return total_created + + +@shared_task +def rebuild_recurring_rule(rule_id: int, horizon_days: int = 14): + return sync_recurring_rule_impl(rule_id, drop_existing=True, horizon_days=horizon_days) + + +@shared_task +def maintain_recurring_recordings(): + from .models import RecurringRecordingRule + + total = 0 + for rule_id in RecurringRecordingRule.objects.filter(enabled=True).values_list("id", flat=True): + try: + total += sync_recurring_rule_impl(rule_id, drop_existing=False) + except Exception as err: + logger.warning(f"Recurring rule maintenance failed for {rule_id}: {err}") + return total + + +@shared_task +def purge_recurring_rule(rule_id: int): + return purge_recurring_rule_impl(rule_id) + @shared_task def _safe_name(s): try: @@ -1817,6 +1942,7 @@ def comskip_process_recording(recording_id: int): Safe to call even if comskip is not installed; stores status in custom_properties.comskip. """ import shutil + from django.db import DatabaseError from .models import Recording # Helper to broadcast status over websocket def _ws(status: str, extra: dict | None = None): @@ -1834,7 +1960,33 @@ def comskip_process_recording(recording_id: int): except Recording.DoesNotExist: return "not_found" - cp = rec.custom_properties or {} + cp = rec.custom_properties.copy() if isinstance(rec.custom_properties, dict) else {} + + def _persist_custom_properties(): + """Persist updated custom_properties without raising if the row disappeared.""" + try: + updated = Recording.objects.filter(pk=recording_id).update(custom_properties=cp) + if not updated: + logger.warning( + "Recording %s vanished before comskip status could be saved", + recording_id, + ) + return False + except DatabaseError as db_err: + logger.warning( + "Failed to persist comskip status for recording %s: %s", + recording_id, + db_err, + ) + return False + except Exception as unexpected: + logger.warning( + "Unexpected error while saving comskip status for recording %s: %s", + recording_id, + unexpected, + ) + return False + return True file_path = (cp or {}).get("file_path") if not file_path or not os.path.exists(file_path): return "no_file" @@ -1845,8 +1997,7 @@ def comskip_process_recording(recording_id: int): comskip_bin = shutil.which("comskip") if not comskip_bin: cp["comskip"] = {"status": "skipped", "reason": "comskip_not_installed"} - rec.custom_properties = cp - rec.save(update_fields=["custom_properties"]) + _persist_custom_properties() _ws('skipped', {"reason": "comskip_not_installed"}) return "comskip_missing" @@ -1858,24 +2009,59 @@ def comskip_process_recording(recording_id: int): try: cmd = [comskip_bin, "--output", os.path.dirname(file_path)] - # Prefer system ini if present to squelch warning and get sane defaults - for ini_path in ("/etc/comskip/comskip.ini", "/app/docker/comskip.ini"): - if os.path.exists(ini_path): + # Prefer user-specified INI, fall back to known defaults + ini_candidates = [] + try: + custom_ini = CoreSettings.get_dvr_comskip_custom_path() + if custom_ini: + ini_candidates.append(custom_ini) + except Exception as ini_err: + logger.debug(f"Unable to load custom comskip.ini path: {ini_err}") + ini_candidates.extend(["/etc/comskip/comskip.ini", "/app/docker/comskip.ini"]) + selected_ini = None + for ini_path in ini_candidates: + if ini_path and os.path.exists(ini_path): + selected_ini = ini_path cmd.extend([f"--ini={ini_path}"]) break cmd.append(file_path) - subprocess.run(cmd, check=True) + subprocess.run( + cmd, + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + ) + except subprocess.CalledProcessError as e: + stderr_tail = (e.stderr or "").strip().splitlines() + stderr_tail = stderr_tail[-5:] if stderr_tail else [] + detail = { + "status": "error", + "reason": "comskip_failed", + "returncode": e.returncode, + } + if e.returncode and e.returncode < 0: + try: + detail["signal"] = signal.Signals(-e.returncode).name + except Exception: + detail["signal"] = f"signal_{-e.returncode}" + if stderr_tail: + detail["stderr"] = "\n".join(stderr_tail) + if selected_ini: + detail["ini_path"] = selected_ini + cp["comskip"] = detail + _persist_custom_properties() + _ws('error', {"reason": "comskip_failed", "returncode": e.returncode}) + return "comskip_failed" except Exception as e: cp["comskip"] = {"status": "error", "reason": f"comskip_failed: {e}"} - rec.custom_properties = cp - rec.save(update_fields=["custom_properties"]) + _persist_custom_properties() _ws('error', {"reason": str(e)}) return "comskip_failed" if not os.path.exists(edl_path): cp["comskip"] = {"status": "error", "reason": "edl_not_found"} - rec.custom_properties = cp - rec.save(update_fields=["custom_properties"]) + _persist_custom_properties() _ws('error', {"reason": "edl_not_found"}) return "no_edl" @@ -1893,8 +2079,7 @@ def comskip_process_recording(recording_id: int): duration = _ffprobe_duration(file_path) if duration is None: cp["comskip"] = {"status": "error", "reason": "duration_unknown"} - rec.custom_properties = cp - rec.save(update_fields=["custom_properties"]) + _persist_custom_properties() _ws('error', {"reason": "duration_unknown"}) return "no_duration" @@ -1923,9 +2108,14 @@ def comskip_process_recording(recording_id: int): keep.append((cur, duration)) if not commercials or sum((e - s) for s, e in commercials) <= 0.5: - cp["comskip"] = {"status": "completed", "skipped": True, "edl": os.path.basename(edl_path)} - rec.custom_properties = cp - rec.save(update_fields=["custom_properties"]) + cp["comskip"] = { + "status": "completed", + "skipped": True, + "edl": os.path.basename(edl_path), + } + if selected_ini: + cp["comskip"]["ini_path"] = selected_ini + _persist_custom_properties() _ws('skipped', {"reason": "no_commercials", "commercials": 0}) return "no_commercials" @@ -1975,14 +2165,14 @@ def comskip_process_recording(recording_id: int): "segments_kept": len(parts), "commercials": len(commercials), } - rec.custom_properties = cp - rec.save(update_fields=["custom_properties"]) + if selected_ini: + cp["comskip"]["ini_path"] = selected_ini + _persist_custom_properties() _ws('completed', {"commercials": len(commercials), "segments_kept": len(parts)}) return "ok" except Exception as e: cp["comskip"] = {"status": "error", "reason": str(e)} - rec.custom_properties = cp - rec.save(update_fields=["custom_properties"]) + _persist_custom_properties() _ws('error', {"reason": str(e)}) return f"error:{e}" def _resolve_poster_for_program(channel_name, program): diff --git a/apps/channels/tests/__init__.py b/apps/channels/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/apps/channels/tests/test_recurring_rules.py b/apps/channels/tests/test_recurring_rules.py new file mode 100644 index 00000000..982ecb93 --- /dev/null +++ b/apps/channels/tests/test_recurring_rules.py @@ -0,0 +1,40 @@ +from datetime import datetime, timedelta +from django.test import TestCase +from django.utils import timezone + +from apps.channels.models import Channel, RecurringRecordingRule, Recording +from apps.channels.tasks import sync_recurring_rule_impl, purge_recurring_rule_impl + + +class RecurringRecordingRuleTasksTests(TestCase): + def test_sync_recurring_rule_creates_and_purges_recordings(self): + now = timezone.now() + channel = Channel.objects.create(channel_number=1, name='Test Channel') + + start_time = (now + timedelta(minutes=15)).time().replace(second=0, microsecond=0) + end_time = (now + timedelta(minutes=75)).time().replace(second=0, microsecond=0) + + rule = RecurringRecordingRule.objects.create( + channel=channel, + days_of_week=[now.weekday()], + start_time=start_time, + end_time=end_time, + ) + + created = sync_recurring_rule_impl(rule.id, drop_existing=True, horizon_days=1) + self.assertEqual(created, 1) + + recording = Recording.objects.filter(custom_properties__rule__id=rule.id).first() + self.assertIsNotNone(recording) + self.assertEqual(recording.channel, channel) + self.assertEqual(recording.custom_properties.get('rule', {}).get('id'), rule.id) + + expected_start = timezone.make_aware( + datetime.combine(recording.start_time.date(), start_time), + timezone.get_current_timezone(), + ) + self.assertLess(abs((recording.start_time - expected_start).total_seconds()), 60) + + removed = purge_recurring_rule_impl(rule.id) + self.assertEqual(removed, 1) + self.assertFalse(Recording.objects.filter(custom_properties__rule__id=rule.id).exists()) diff --git a/core/models.py b/core/models.py index ba040666..5584d7ca 100644 --- a/core/models.py +++ b/core/models.py @@ -158,6 +158,7 @@ DVR_TV_FALLBACK_DIR_KEY = slugify("DVR TV Fallback Dir") DVR_TV_FALLBACK_TEMPLATE_KEY = slugify("DVR TV Fallback Template") DVR_MOVIE_FALLBACK_TEMPLATE_KEY = slugify("DVR Movie Fallback Template") DVR_COMSKIP_ENABLED_KEY = slugify("DVR Comskip Enabled") +DVR_COMSKIP_CUSTOM_PATH_KEY = slugify("DVR Comskip Custom Path") DVR_PRE_OFFSET_MINUTES_KEY = slugify("DVR Pre-Offset Minutes") DVR_POST_OFFSET_MINUTES_KEY = slugify("DVR Post-Offset Minutes") @@ -274,6 +275,27 @@ class CoreSettings(models.Model): except cls.DoesNotExist: return False + @classmethod + def get_dvr_comskip_custom_path(cls): + """Return configured comskip.ini path or empty string if unset.""" + try: + return cls.objects.get(key=DVR_COMSKIP_CUSTOM_PATH_KEY).value + except cls.DoesNotExist: + return "" + + @classmethod + def set_dvr_comskip_custom_path(cls, path: str | None): + """Persist the comskip.ini path setting, normalizing nulls to empty string.""" + value = (path or "").strip() + obj, _ = cls.objects.get_or_create( + key=DVR_COMSKIP_CUSTOM_PATH_KEY, + defaults={"name": "DVR Comskip Custom Path", "value": value}, + ) + if obj.value != value: + obj.value = value + obj.save(update_fields=["value"]) + return value + @classmethod def get_dvr_pre_offset_minutes(cls): """Minutes to start recording before scheduled start (default 0).""" diff --git a/dispatcharr/settings.py b/dispatcharr/settings.py index 289c6794..057780de 100644 --- a/dispatcharr/settings.py +++ b/dispatcharr/settings.py @@ -211,6 +211,10 @@ CELERY_BEAT_SCHEDULE = { "task": "core.tasks.scan_and_process_files", # Direct task call "schedule": 20.0, # Every 20 seconds }, + "maintain-recurring-recordings": { + "task": "apps.channels.tasks.maintain_recurring_recordings", + "schedule": 3600.0, # Once an hour ensure recurring schedules stay ahead + }, } MEDIA_ROOT = BASE_DIR / "media" diff --git a/frontend/src/api.js b/frontend/src/api.js index 01186bf6..19de8cd0 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -1873,6 +1873,70 @@ export default class API { } } + static async getComskipConfig() { + try { + return await request(`${host}/api/channels/dvr/comskip-config/`); + } catch (e) { + errorNotification('Failed to retrieve comskip configuration', e); + } + } + + static async uploadComskipIni(file) { + try { + const formData = new FormData(); + formData.append('file', file); + return await request(`${host}/api/channels/dvr/comskip-config/`, { + method: 'POST', + body: formData, + }); + } catch (e) { + errorNotification('Failed to upload comskip.ini', e); + } + } + + static async listRecurringRules() { + try { + const response = await request(`${host}/api/channels/recurring-rules/`); + return response; + } catch (e) { + errorNotification('Failed to retrieve recurring DVR rules', e); + } + } + + static async createRecurringRule(payload) { + try { + const response = await request(`${host}/api/channels/recurring-rules/`, { + method: 'POST', + body: payload, + }); + return response; + } catch (e) { + errorNotification('Failed to create recurring DVR rule', e); + } + } + + static async updateRecurringRule(ruleId, payload) { + try { + const response = await request(`${host}/api/channels/recurring-rules/${ruleId}/`, { + method: 'PATCH', + body: payload, + }); + return response; + } catch (e) { + errorNotification(`Failed to update recurring rule ${ruleId}`, e); + } + } + + static async deleteRecurringRule(ruleId) { + try { + await request(`${host}/api/channels/recurring-rules/${ruleId}/`, { + method: 'DELETE', + }); + } catch (e) { + errorNotification(`Failed to delete recurring rule ${ruleId}`, e); + } + } + static async deleteRecording(id) { try { await request(`${host}/api/channels/recordings/${id}/`, { method: 'DELETE' }); diff --git a/frontend/src/components/forms/Recording.jsx b/frontend/src/components/forms/Recording.jsx index 7ac36a0f..342276ed 100644 --- a/frontend/src/components/forms/Recording.jsx +++ b/frontend/src/components/forms/Recording.jsx @@ -1,117 +1,300 @@ -// Modal.js -import React from 'react'; +import React, { useMemo, useState } from 'react'; import API from '../../api'; -import { Button, Modal, Flex, Select, Alert } from '@mantine/core'; -import useChannelsStore from '../../store/channels'; -import { DateTimePicker } from '@mantine/dates'; +import { + Alert, + Button, + Modal, + Select, + Stack, + SegmentedControl, + MultiSelect, + Group, + Text, +} from '@mantine/core'; +import { DateTimePicker, TimeInput } from '@mantine/dates'; import { CircleAlert } from 'lucide-react'; import { isNotEmpty, useForm } from '@mantine/form'; +import useChannelsStore from '../../store/channels'; +import { notifications } from '@mantine/notifications'; -const DVR = ({ recording = null, channel = null, isOpen, onClose }) => { +const DAY_OPTIONS = [ + { value: '6', label: 'Sun' }, + { value: '0', label: 'Mon' }, + { value: '1', label: 'Tue' }, + { value: '2', label: 'Wed' }, + { value: '3', label: 'Thu' }, + { value: '4', label: 'Fri' }, + { value: '5', label: 'Sat' }, +]; + +const asDate = (value) => { + if (!value) return null; + if (value instanceof Date) return value; + const parsed = new Date(value); + return Number.isNaN(parsed.getTime()) ? null : parsed; +}; + +const toIsoIfDate = (value) => { + const dt = asDate(value); + return dt ? dt.toISOString() : value; +}; + +const toTimeString = (value) => { + const dt = asDate(value); + if (!dt) return ''; + const hours = String(dt.getHours()).padStart(2, '0'); + const minutes = String(dt.getMinutes()).padStart(2, '0'); + return `${hours}:${minutes}`; +}; + +const createRoundedDate = (minutesAhead = 0) => { + const dt = new Date(); + dt.setSeconds(0); + dt.setMilliseconds(0); + dt.setMinutes(Math.ceil(dt.getMinutes() / 30) * 30); + if (minutesAhead) { + dt.setMinutes(dt.getMinutes() + minutesAhead); + } + return dt; +}; + +const RecordingModal = ({ recording = null, channel = null, isOpen, onClose }) => { const channels = useChannelsStore((s) => s.channels); + const fetchRecordings = useChannelsStore((s) => s.fetchRecordings); + const fetchRecurringRules = useChannelsStore((s) => s.fetchRecurringRules); - let startTime = new Date(); - startTime.setMinutes(Math.ceil(startTime.getMinutes() / 30) * 30); - startTime.setSeconds(0); - startTime.setMilliseconds(0); + const [mode, setMode] = useState('single'); + const [submitting, setSubmitting] = useState(false); - let endTime = new Date(); - endTime.setMinutes(Math.ceil(endTime.getMinutes() / 30) * 30); - endTime.setSeconds(0); - endTime.setMilliseconds(0); - endTime.setHours(endTime.getHours() + 1); + const defaultStart = createRoundedDate(); + const defaultEnd = createRoundedDate(60); - const form = useForm({ - mode: 'uncontrolled', + const singleForm = useForm({ + mode: 'controlled', initialValues: { channel_id: recording - ? recording.channel_id + ? `${recording.channel}` : channel ? `${channel.id}` : '', - start_time: recording ? recording.start_time : startTime, - end_time: recording ? recording.end_time : endTime, + start_time: recording ? asDate(recording.start_time) || defaultStart : defaultStart, + end_time: recording ? asDate(recording.end_time) || defaultEnd : defaultEnd, }, - validate: { channel_id: isNotEmpty('Select a channel'), start_time: isNotEmpty('Select a start time'), - end_time: isNotEmpty('Select an end time'), + end_time: (value, values) => { + const start = asDate(values.start_time); + const end = asDate(value); + if (!end) return 'Select an end time'; + if (start && end <= start) return 'End time must be after start time'; + return null; + }, }, }); - const onSubmit = async () => { - const { channel_id, ...values } = form.getValues(); + const recurringForm = useForm({ + mode: 'controlled', + initialValues: { + channel_id: channel ? `${channel.id}` : '', + days_of_week: [], + start_time: defaultStart, + end_time: defaultEnd, + }, + validate: { + channel_id: isNotEmpty('Select a channel'), + days_of_week: (value) => (value && value.length ? null : 'Pick at least one day'), + start_time: isNotEmpty('Select a start time'), + end_time: (value, values) => { + const start = asDate(values.start_time); + const end = asDate(value); + if (!end) return 'Select an end time'; + if (start && end <= start) return 'End time must be after start time'; + return null; + }, + }, + }); - console.log(values); - - await API.createRecording({ - ...values, - channel: channel_id, + const channelOptions = useMemo(() => { + const list = Object.values(channels || {}); + list.sort((a, b) => { + const aNum = Number(a.channel_number) || 0; + const bNum = Number(b.channel_number) || 0; + if (aNum === bNum) { + return (a.name || '').localeCompare(b.name || ''); + } + return aNum - bNum; }); + return list.map((item) => ({ value: `${item.id}`, label: item.name || `Channel ${item.id}` })); + }, [channels]); - form.reset(); - onClose(); + const resetForms = () => { + singleForm.reset(); + recurringForm.reset(); + setMode('single'); }; + const handleClose = () => { + resetForms(); + onClose?.(); + }; + + const handleSingleSubmit = async (values) => { + try { + setSubmitting(true); + await API.createRecording({ + channel: values.channel_id, + start_time: toIsoIfDate(values.start_time), + end_time: toIsoIfDate(values.end_time), + }); + await fetchRecordings(); + notifications.show({ + title: 'Recording scheduled', + message: 'One-time recording added to DVR queue', + color: 'green', + autoClose: 2500, + }); + handleClose(); + } catch (error) { + console.error('Failed to create recording', error); + } finally { + setSubmitting(false); + } + }; + + const handleRecurringSubmit = async (values) => { + try { + setSubmitting(true); + await API.createRecurringRule({ + channel: values.channel_id, + days_of_week: (values.days_of_week || []).map((d) => Number(d)), + start_time: toTimeString(values.start_time), + end_time: toTimeString(values.end_time), + }); + await Promise.all([fetchRecurringRules(), fetchRecordings()]); + notifications.show({ + title: 'Recurring rule saved', + message: 'Future slots will be scheduled automatically', + color: 'green', + autoClose: 2500, + }); + handleClose(); + } catch (error) { + console.error('Failed to create recurring rule', error); + } finally { + setSubmitting(false); + } + }; + + const onSubmit = mode === 'single' + ? singleForm.onSubmit(handleSingleSubmit) + : recurringForm.onSubmit(handleRecurringSubmit); + if (!isOpen) { - return <>; + return null; } return ( - + } - style={{ paddingBottom: 5 }} + style={{ paddingBottom: 5, marginBottom: 12 }} > - Recordings may fail if active streams or overlapping recordings use up - all available streams + Recordings may fail if active streams or overlapping recordings use up all available tuners. - - + ) : ( + + + ({ value: String(opt.value), label: opt.label }))} + searchable + clearable + /> + + form.setFieldValue('start_date', value || dayjs().toDate())} + valueFormat="MMM D, YYYY" + /> + form.setFieldValue('end_date', value)} + valueFormat="MMM D, YYYY" + minDate={form.values.start_date || undefined} + /> + + + form.setFieldValue('start_time', value)} + withSeconds={false} + format="12" + amLabel="AM" + pmLabel="PM" + /> + form.setFieldValue('end_time', value)} + withSeconds={false} + format="12" + amLabel="AM" + pmLabel="PM" + /> + + + + + + + + + + Upcoming occurrences + {upcomingOccurrences.length} + + {upcomingOccurrences.length === 0 ? ( + No future airings currently scheduled. + ) : ( + + {upcomingOccurrences.map((occ) => { + const occStart = dayjs(occ.start_time); + const occEnd = dayjs(occ.end_time); + return ( + + + + {occStart.format('MMM D, YYYY')} + {occStart.format('h:mma')} – {occEnd.format('h:mma')} + + + + + + + + ); + })} + + )} + + + + ); +}; + +const RecordingCard = ({ recording, onOpenDetails, onOpenRecurring }) => { const channels = useChannelsStore((s) => s.channels); const env_mode = useSettingsStore((s) => s.environment.env_mode); const showVideo = useVideoStore((s) => s.showVideo); @@ -332,11 +664,11 @@ const RecordingCard = ({ recording, onOpenDetails }) => { const deleteRecording = (id) => { // Optimistically remove immediately from UI - try { useChannelsStore.getState().removeRecording(id); } catch {} + try { useChannelsStore.getState().removeRecording(id); } catch (error) { console.error('Failed to optimistically remove recording', error); } // Fire-and-forget server delete; websocket will keep others in sync API.deleteRecording(id).catch(() => { // On failure, fallback to refetch to restore state - try { useChannelsStore.getState().fetchRecordings(); } catch {} + try { useChannelsStore.getState().fetchRecordings(); } catch (error) { console.error('Failed to refresh recordings after delete', error); } }); }; @@ -345,6 +677,7 @@ const RecordingCard = ({ recording, onOpenDetails }) => { const recordingName = program.title || 'Custom Recording'; const subTitle = program.sub_title || ''; const description = program.description || customProps.description || ''; + const isRecurringRule = customProps?.rule?.type === 'recurring'; // Poster or channel logo const posterLogoId = customProps.poster_logo_id; @@ -395,7 +728,9 @@ const RecordingCard = ({ recording, onOpenDetails }) => { try { await API.runComskip(recording.id); notifications.show({ title: 'Removing commercials', message: 'Queued comskip for this recording', color: 'blue.5', autoClose: 2000 }); - } catch {} + } catch (error) { + console.error('Failed to queue comskip for recording', error); + } }; // Cancel handling for series groups @@ -403,6 +738,10 @@ const RecordingCard = ({ recording, onOpenDetails }) => { const [busy, setBusy] = React.useState(false); const handleCancelClick = (e) => { e.stopPropagation(); + if (isRecurringRule) { + onOpenRecurring?.(recording, true); + return; + } if (isSeriesGroup) { setCancelOpen(true); } else { @@ -410,11 +749,11 @@ const RecordingCard = ({ recording, onOpenDetails }) => { } }; - const seriesInfo = React.useMemo(() => { + const seriesInfo = (() => { const cp = customProps || {}; const pr = cp.program || {}; return { tvg_id: pr.tvg_id, title: pr.title }; - }, [customProps]); + })(); const removeUpcomingOnly = async () => { try { @@ -423,7 +762,7 @@ const RecordingCard = ({ recording, onOpenDetails }) => { } finally { setBusy(false); setCancelOpen(false); - try { await fetchRecordings(); } catch {} + try { await fetchRecordings(); } catch (error) { console.error('Failed to refresh recordings', error); } } }; @@ -432,13 +771,13 @@ const RecordingCard = ({ recording, onOpenDetails }) => { setBusy(true); const { tvg_id, title } = seriesInfo; if (tvg_id) { - try { await API.bulkRemoveSeriesRecordings({ tvg_id, title, scope: 'title' }); } catch {} - try { await API.deleteSeriesRule(tvg_id); } catch {} + try { await API.bulkRemoveSeriesRecordings({ tvg_id, title, scope: 'title' }); } catch (error) { console.error('Failed to remove series recordings', error); } + try { await API.deleteSeriesRule(tvg_id); } catch (error) { console.error('Failed to delete series rule', error); } } } finally { setBusy(false); setCancelOpen(false); - try { await fetchRecordings(); } catch {} + try { await fetchRecordings(); } catch (error) { console.error('Failed to refresh recordings after series removal', error); } } }; @@ -455,7 +794,13 @@ const RecordingCard = ({ recording, onOpenDetails }) => { height: '100%', cursor: 'pointer', }} - onClick={() => onOpenDetails?.(recording)} + onClick={() => { + if (isRecurringRule) { + onOpenRecurring?.(recording, false); + } else { + onOpenDetails?.(recording); + } + }} > @@ -471,7 +816,7 @@ const RecordingCard = ({ recording, onOpenDetails }) => { {isSeriesGroup && ( Series )} - {customProps?.rule?.type === 'recurring' && ( + {isRecurringRule && ( Recurring )} {seLabel && !isSeriesGroup && ( @@ -622,13 +967,13 @@ const DVRPage = () => { const fetchRecordings = useChannelsStore((s) => s.fetchRecordings); const channels = useChannelsStore((s) => s.channels); const fetchChannels = useChannelsStore((s) => s.fetchChannels); - const recurringRules = useChannelsStore((s) => s.recurringRules) || []; const fetchRecurringRules = useChannelsStore((s) => s.fetchRecurringRules); const [recordingModalOpen, setRecordingModalOpen] = useState(false); const [detailsOpen, setDetailsOpen] = useState(false); const [detailsRecording, setDetailsRecording] = useState(null); - const [busyRuleId, setBusyRuleId] = useState(null); + const [ruleModal, setRuleModal] = useState({ open: false, ruleId: null }); + const [editRecording, setEditRecording] = useState(null); const openRecordingModal = () => { setRecordingModalOpen(true); @@ -644,50 +989,27 @@ const DVRPage = () => { }; const closeDetails = () => setDetailsOpen(false); + const openRuleModal = (recording) => { + const ruleId = recording?.custom_properties?.rule?.id; + if (!ruleId) { + openDetails(recording); + return; + } + setDetailsOpen(false); + setDetailsRecording(null); + setEditRecording(null); + setRuleModal({ open: true, ruleId }); + }; + + const closeRuleModal = () => setRuleModal({ open: false, ruleId: null }); + useEffect(() => { - // Ensure channels and recordings are loaded for this view if (!channels || Object.keys(channels).length === 0) { fetchChannels(); } fetchRecordings(); fetchRecurringRules(); - }, []); - - const handleDeleteRule = async (ruleId) => { - setBusyRuleId(ruleId); - try { - await API.deleteRecurringRule(ruleId); - await Promise.all([fetchRecurringRules(), fetchRecordings()]); - notifications.show({ - title: 'Recurring rule removed', - message: 'Future recordings for this rule were cancelled', - color: 'red', - autoClose: 2500, - }); - } catch (error) { - console.error('Failed to delete recurring rule', error); - } finally { - setBusyRuleId(null); - } - }; - - const handleToggleRule = async (rule, enabled) => { - setBusyRuleId(rule.id); - try { - await API.updateRecurringRule(rule.id, { enabled }); - await Promise.all([fetchRecurringRules(), fetchRecordings()]); - notifications.show({ - title: enabled ? 'Recurring rule enabled' : 'Recurring rule paused', - message: enabled ? 'Future occurrences will be scheduled automatically' : 'Upcoming recordings removed', - color: enabled ? 'green' : 'yellow', - autoClose: 2500, - }); - } catch (error) { - console.error('Failed to update recurring rule', error); - } finally { - setBusyRuleId(null); - } - }; + }, [channels, fetchChannels, fetchRecordings, fetchRecurringRules]); // Re-render every second so time-based bucketing updates without a refresh const [now, setNow] = useState(dayjs()); @@ -761,7 +1083,7 @@ const DVRPage = () => { }); completed.sort((a, b) => dayjs(b.end_time) - dayjs(a.end_time)); return { inProgress: inProgressDedup, upcoming: upcomingGrouped, completed }; - }, [recordings]); + }, [recordings, now]); return ( @@ -781,56 +1103,6 @@ const DVRPage = () => { New Recording -
- - Recurring Rules - {recurringRules.length} - - {recurringRules.length === 0 ? ( - - No recurring rules yet. Create one from the New Recording dialog. - - ) : ( - - {recurringRules.map((rule) => { - const ch = channels?.[rule.channel]; - const channelName = ch?.name || `Channel ${rule.channel}`; - const range = `${formatRuleTime(rule.start_time)} – ${formatRuleTime(rule.end_time)}`; - const days = formatRuleDays(rule.days_of_week); - return ( - - - - - {channelName} - {!rule.enabled && Paused} - - {days} • {range} - - - handleToggleRule(rule, event.currentTarget.checked)} - disabled={busyRuleId === rule.id} - /> - handleDeleteRule(rule.id)} - disabled={busyRuleId === rule.id} - > - - - - - - ); - })} - - )} -
-
Currently Recording @@ -838,7 +1110,7 @@ const DVRPage = () => { {inProgress.map((rec) => ( - + ))} {inProgress.length === 0 && ( @@ -855,7 +1127,7 @@ const DVRPage = () => { {upcoming.map((rec) => ( - + ))} {upcoming.length === 0 && ( @@ -872,7 +1144,7 @@ const DVRPage = () => { {completed.map((rec) => ( - + ))} {completed.length === 0 && ( @@ -888,6 +1160,22 @@ const DVRPage = () => { onClose={closeRecordingModal} /> + setEditRecording(null)} + /> + + { + setRuleModal({ open: false, ruleId: null }); + setEditRecording(occ); + }} + /> + {/* Details Modal */} {detailsRecording && ( { } useVideoStore.getState().showVideo(fileUrl, 'vod', { name: detailsRecording.custom_properties?.program?.title || 'Recording', logo: { url: (detailsRecording.custom_properties?.poster_logo_id ? `/api/channels/logos/${detailsRecording.custom_properties.poster_logo_id}/cache/` : channels[detailsRecording.channel]?.logo?.cache_url) || '/logo.png' } }); }} + onEdit={(rec) => { + setEditRecording(rec); + closeDetails(); + }} /> )} From 6536f35dc0e7d01b060d646f4dcb2ddb4b3f5ca8 Mon Sep 17 00:00:00 2001 From: Dispatcharr Date: Fri, 19 Sep 2025 19:47:59 -0500 Subject: [PATCH 031/119] FIxed bug Fixed bug that stopped stream from ending --- apps/channels/serializers.py | 14 ++- apps/channels/tasks.py | 11 +- frontend/src/components/forms/Recording.jsx | 120 +++++++++++--------- frontend/src/pages/DVR.jsx | 19 ++-- 4 files changed, 93 insertions(+), 71 deletions(-) diff --git a/apps/channels/serializers.py b/apps/channels/serializers.py index d41bebed..1fa2b68a 100644 --- a/apps/channels/serializers.py +++ b/apps/channels/serializers.py @@ -528,26 +528,28 @@ class RecurringRecordingRuleSerializer(serializers.ModelSerializer): return sorted(set(cleaned)) def validate(self, attrs): - from django.utils import timezone start = attrs.get("start_time") or getattr(self.instance, "start_time", None) end = attrs.get("end_time") or getattr(self.instance, "end_time", None) if start and end and end <= start: raise serializers.ValidationError("End time must be after start time") start_date = attrs.get("start_date") if "start_date" in attrs else getattr(self.instance, "start_date", None) end_date = attrs.get("end_date") if "end_date" in attrs else getattr(self.instance, "end_date", None) + if start_date is None: + existing_start = getattr(self.instance, "start_date", None) + if existing_start is None: + raise serializers.ValidationError("Start date is required") if start_date and end_date and end_date < start_date: raise serializers.ValidationError("End date must be on or after start date") + if end_date is None: + existing_end = getattr(self.instance, "end_date", None) + if existing_end is None: + raise serializers.ValidationError("End date is required") # Normalize empty strings to None for dates if attrs.get("end_date") == "": attrs["end_date"] = None if attrs.get("start_date") == "": attrs["start_date"] = None - if attrs.get("start_date") is None and not getattr(self.instance, "start_date", None): - attrs["start_date"] = timezone.localdate() return super().validate(attrs) def create(self, validated_data): - from django.utils import timezone - if not validated_data.get("start_date"): - validated_data["start_date"] = timezone.localdate() return super().create(validated_data) diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py index 540934fc..688dc79d 100755 --- a/apps/channels/tasks.py +++ b/apps/channels/tasks.py @@ -1145,9 +1145,12 @@ def sync_recurring_rule_impl(rule_id: int, drop_existing: bool = True, horizon_d end_limit = rule.end_date horizon = now + timedelta(days=horizon_days) start_window = max(start_limit, now.date()) - end_window = horizon.date() - if end_limit and end_limit < end_window: + if drop_existing and end_limit: end_window = end_limit + else: + end_window = horizon.date() + if end_limit and end_limit < end_window: + end_window = end_limit if end_window < start_window: return 0 total_created = 0 @@ -1163,7 +1166,9 @@ def sync_recurring_rule_impl(rule_id: int, drop_existing: bool = True, horizon_d end_dt = timezone.make_aware(datetime.combine(target_date, rule.end_time), tz) except Exception: continue - if end_dt <= start_dt or start_dt <= now: + if end_dt <= start_dt: + end_dt = end_dt + timedelta(days=1) + if start_dt <= now: continue exists = Recording.objects.filter( channel=rule.channel, diff --git a/frontend/src/components/forms/Recording.jsx b/frontend/src/components/forms/Recording.jsx index 8203cc20..90080676 100644 --- a/frontend/src/components/forms/Recording.jsx +++ b/frontend/src/components/forms/Recording.jsx @@ -10,7 +10,6 @@ import { SegmentedControl, MultiSelect, Group, - Text, TextInput, } from '@mantine/core'; import { DateTimePicker, TimeInput, DatePickerInput } from '@mantine/dates'; @@ -41,13 +40,12 @@ const toIsoIfDate = (value) => { return dt ? dt.toISOString() : value; }; +// Accepts "h:mm A"/"hh:mm A"/"HH:mm"/Date, returns "HH:mm" const toTimeString = (value) => { if (!value) return '00:00'; if (typeof value === 'string') { - const parsed = dayjs(value, ['HH:mm', 'HH:mm:ss', 'h:mm A'], true); - if (parsed.isValid()) { - return parsed.format('HH:mm'); - } + const parsed = dayjs(value, ['HH:mm', 'hh:mm A', 'h:mm A', 'HH:mm:ss'], true); + if (parsed.isValid()) return parsed.format('HH:mm'); return value; } const dt = asDate(value); @@ -69,12 +67,16 @@ const createRoundedDate = (minutesAhead = 0) => { dt.setSeconds(0); dt.setMilliseconds(0); dt.setMinutes(Math.ceil(dt.getMinutes() / 30) * 30); - if (minutesAhead) { - dt.setMinutes(dt.getMinutes() + minutesAhead); - } + if (minutesAhead) dt.setMinutes(dt.getMinutes() + minutesAhead); return dt; }; +// robust onChange for TimeInput (string or event) +const timeChange = (setter) => (valOrEvent) => { + if (typeof valOrEvent === 'string') setter(valOrEvent); + else if (valOrEvent?.currentTarget) setter(valOrEvent.currentTarget.value); +}; + const RecordingModal = ({ recording = null, channel = null, isOpen, onClose }) => { const channels = useChannelsStore((s) => s.channels); const fetchRecordings = useChannelsStore((s) => s.fetchRecordings); @@ -87,14 +89,11 @@ const RecordingModal = ({ recording = null, channel = null, isOpen, onClose }) = const defaultEnd = createRoundedDate(60); const defaultDate = new Date(); + // One-time form const singleForm = useForm({ mode: 'controlled', initialValues: { - channel_id: recording - ? `${recording.channel}` - : channel - ? `${channel.id}` - : '', + channel_id: recording ? `${recording.channel}` : channel ? `${channel.id}` : '', start_time: recording ? asDate(recording.start_time) || defaultStart : defaultStart, end_time: recording ? asDate(recording.end_time) || defaultEnd : defaultEnd, }, @@ -111,8 +110,11 @@ const RecordingModal = ({ recording = null, channel = null, isOpen, onClose }) = }, }); + // Recurring form stores times as "HH:mm" strings for stable editing const recurringForm = useForm({ mode: 'controlled', + validateInputOnChange: false, + validateInputOnBlur: true, initialValues: { channel_id: channel ? `${channel.id}` : '', days_of_week: [], @@ -120,34 +122,38 @@ const RecordingModal = ({ recording = null, channel = null, isOpen, onClose }) = end_time: dayjs(defaultEnd).format('HH:mm'), rule_name: '', start_date: defaultDate, - end_date: null, + end_date: defaultDate, }, validate: { channel_id: isNotEmpty('Select a channel'), days_of_week: (value) => (value && value.length ? null : 'Pick at least one day'), - start_time: isNotEmpty('Select a start time'), + start_time: (value) => (value ? null : 'Select a start time'), end_time: (value, values) => { - const start = asDate(values.start_time); - const end = asDate(value); - if (!end) return 'Select an end time'; - if (start && end <= start) return 'End time must be after start time'; + if (!value) return 'Select an end time'; + const start = dayjs(values.start_time, ['HH:mm', 'hh:mm A', 'h:mm A'], true); + const end = dayjs(value, ['HH:mm', 'hh:mm A', 'h:mm A'], true); + if (start.isValid() && end.isValid() && end.diff(start, 'minute') === 0) { + return 'End time must differ from start time'; + } return null; }, end_date: (value, values) => { const end = asDate(value); const start = asDate(values.start_date); - if (end && start && end < start) { - return 'End date cannot be before start date'; - } + if (!end) return 'Select an end date'; + if (start && end < start) return 'End date cannot be before start date'; return null; }, }, }); useEffect(() => { - if (!isOpen) { - return; - } + if (!isOpen) return; + + const freshStart = createRoundedDate(); + const freshEnd = createRoundedDate(60); + const freshDate = new Date(); + if (recording && recording.id) { setMode('single'); singleForm.setValues({ @@ -156,23 +162,22 @@ const RecordingModal = ({ recording = null, channel = null, isOpen, onClose }) = end_time: asDate(recording.end_time) || defaultEnd, }); } else { - // Reset forms to defaults when opening fresh - const freshStart = createRoundedDate(); - const freshEnd = createRoundedDate(60); - const freshDate = new Date(); + // Reset forms for fresh open singleForm.setValues({ channel_id: channel ? `${channel.id}` : '', start_time: freshStart, end_time: freshEnd, }); + + const startStr = dayjs(freshStart).format('HH:mm'); recurringForm.setValues({ channel_id: channel ? `${channel.id}` : '', days_of_week: [], - start_time: dayjs(freshStart).format('HH:mm'), + start_time: startStr, end_time: dayjs(freshEnd).format('HH:mm'), rule_name: channel?.name || '', start_date: freshDate, - end_date: null, + end_date: freshDate, }); setMode('single'); } @@ -184,9 +189,7 @@ const RecordingModal = ({ recording = null, channel = null, isOpen, onClose }) = list.sort((a, b) => { const aNum = Number(a.channel_number) || 0; const bNum = Number(b.channel_number) || 0; - if (aNum === bNum) { - return (a.name || '').localeCompare(b.name || ''); - } + if (aNum === bNum) return (a.name || '').localeCompare(b.name || ''); return aNum - bNum; }); return list.map((item) => ({ value: `${item.id}`, label: item.name || `Channel ${item.id}` })); @@ -252,6 +255,7 @@ const RecordingModal = ({ recording = null, channel = null, isOpen, onClose }) = end_date: toDateString(values.end_date), name: values.rule_name?.trim() || '', }); + await Promise.all([fetchRecurringRules(), fetchRecordings()]); notifications.show({ title: 'Recurring rule saved', @@ -267,13 +271,12 @@ const RecordingModal = ({ recording = null, channel = null, isOpen, onClose }) = } }; - const onSubmit = mode === 'single' - ? singleForm.onSubmit(handleSingleSubmit) - : recurringForm.onSubmit(handleRecurringSubmit); + const onSubmit = + mode === 'single' + ? singleForm.onSubmit(handleSingleSubmit) + : recurringForm.onSubmit(handleRecurringSubmit); - if (!isOpen) { - return null; - } + if (!isOpen) return null; return ( @@ -326,15 +329,15 @@ const RecordingModal = ({ recording = null, channel = null, isOpen, onClose }) = {...singleForm.getInputProps('start_time')} key={singleForm.key('start_time')} label="Start" - valueFormat="MMM D, YYYY hh:mm A" - timeInputProps={{ format: '12', amLabel: 'AM', pmLabel: 'PM', withSeconds: false }} + valueFormat="MMM D, YYYY h:mm A" + timeInputProps={{ format: '12', withSeconds: false, amLabel: 'AM', pmLabel: 'PM' }} /> ) : ( @@ -353,41 +356,52 @@ const RecordingModal = ({ recording = null, channel = null, isOpen, onClose }) = data={DAY_OPTIONS} searchable clearable - nothingFound="No match" + nothingFoundMessage="No match" /> + recurringForm.setFieldValue('start_date', value || new Date())} + onChange={(value) => + recurringForm.setFieldValue('start_date', value || new Date()) + } valueFormat="MMM D, YYYY" /> recurringForm.setFieldValue('end_date', value)} valueFormat="MMM D, YYYY" minDate={recurringForm.values.start_date || undefined} /> + recurringForm.setFieldValue('start_time', value)} label="Start time" + value={recurringForm.values.start_time} + onChange={timeChange((val) => + recurringForm.setFieldValue('start_time', toTimeString(val)) + )} + onBlur={() => recurringForm.validateField('start_time')} withSeconds={false} - format="12" + format="12" // shows 12-hour (so "00:00" renders "12:00 AM") + inputMode="numeric" amLabel="AM" pmLabel="PM" /> + recurringForm.setFieldValue('end_time', value)} label="End time" + value={recurringForm.values.end_time} + onChange={timeChange((val) => + recurringForm.setFieldValue('end_time', toTimeString(val)) + )} + onBlur={() => recurringForm.validateField('end_time')} withSeconds={false} format="12" + inputMode="numeric" amLabel="AM" pmLabel="PM" /> diff --git a/frontend/src/pages/DVR.jsx b/frontend/src/pages/DVR.jsx index ec6ebbff..83faae06 100644 --- a/frontend/src/pages/DVR.jsx +++ b/frontend/src/pages/DVR.jsx @@ -375,23 +375,26 @@ const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence }) => { start_time: dayjs().startOf('hour').format('HH:mm'), end_time: dayjs().startOf('hour').add(1, 'hour').format('HH:mm'), start_date: dayjs().toDate(), - end_date: null, + end_date: dayjs().toDate(), enabled: true, }, validate: { channel_id: (value) => (value ? null : 'Select a channel'), days_of_week: (value) => (value && value.length ? null : 'Pick at least one day'), end_time: (value, values) => { - const startValue = dayjs(values.start_time); - const endValue = dayjs(value); if (!value) return 'Select an end time'; - if (endValue.isSameOrBefore(startValue)) return 'End time must be after start time'; + const startValue = dayjs(values.start_time, ['HH:mm', 'hh:mm A', 'h:mm A'], true); + const endValue = dayjs(value, ['HH:mm', 'hh:mm A', 'h:mm A'], true); + if (startValue.isValid() && endValue.isValid() && endValue.diff(startValue, 'minute') === 0) { + return 'End time must differ from start time'; + } return null; }, end_date: (value, values) => { const endDate = dayjs(value); const startDate = dayjs(values.start_date); - if (value && startDate.isValid() && endDate.isBefore(startDate, 'day')) { + if (!value) return 'Select an end date'; + if (startDate.isValid() && endDate.isBefore(startDate, 'day')) { return 'End date cannot be before start date'; } return null; @@ -564,8 +567,6 @@ const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence }) => { /> form.setFieldValue('end_date', value)} valueFormat="MMM D, YYYY" @@ -576,7 +577,7 @@ const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence }) => { form.setFieldValue('start_time', value)} + onChange={(value) => form.setFieldValue('start_time', toTimeString(value))} withSeconds={false} format="12" amLabel="AM" @@ -585,7 +586,7 @@ const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence }) => { form.setFieldValue('end_time', value)} + onChange={(value) => form.setFieldValue('end_time', toTimeString(value))} withSeconds={false} format="12" amLabel="AM" From db024130be7eb7d07bc5f74cf8127128f2ce8438 Mon Sep 17 00:00:00 2001 From: Jim McBride Date: Sun, 21 Sep 2025 01:02:32 -0500 Subject: [PATCH 032/119] Virtualize TV guide rendering --- frontend/src/pages/Guide.jsx | 1331 ++++++++++++++++++++-------------- 1 file changed, 770 insertions(+), 561 deletions(-) diff --git a/frontend/src/pages/Guide.jsx b/frontend/src/pages/Guide.jsx index 7671fb57..644bc6ea 100644 --- a/frontend/src/pages/Guide.jsx +++ b/frontend/src/pages/Guide.jsx @@ -1,5 +1,12 @@ // frontend/src/pages/Guide.js -import React, { useMemo, useState, useEffect, useRef } from 'react'; +import React, { + useMemo, + useState, + useEffect, + useRef, + useCallback, + useContext, +} from 'react'; import dayjs from 'dayjs'; import API from '../api'; import useChannelsStore from '../store/channels'; @@ -23,12 +30,13 @@ import { Transition, Modal, Stack, - useMantineTheme, } from '@mantine/core'; import { Search, X, Clock, Video, Calendar, Play } from 'lucide-react'; import './guide.css'; import useEPGsStore from '../store/epgs'; import useLocalStorage from '../hooks/useLocalStorage'; +import { useElementSize } from '@mantine/hooks'; +import { VariableSizeList } from 'react-window'; /** Layout constants */ const CHANNEL_WIDTH = 120; // Width of the channel/logo column @@ -38,8 +46,243 @@ const HOUR_WIDTH = 450; // Increased from 300 to 450 to make each program wider const MINUTE_INCREMENT = 15; // For positioning programs every 15 min const MINUTE_BLOCK_WIDTH = HOUR_WIDTH / (60 / MINUTE_INCREMENT); +const GuideVirtualizedContext = React.createContext({ + contentWidth: CHANNEL_WIDTH, + nowPosition: -1, +}); + +const GuideInnerElement = React.forwardRef(function GuideInnerElement( + { style, children, ...rest }, + ref +) { + const { contentWidth, nowPosition } = useContext(GuideVirtualizedContext); + + return ( +
+ {nowPosition >= 0 && ( + + )} + {children} +
+ ); +}); +GuideInnerElement.displayName = 'GuideInnerElement'; + +const GuideRow = React.memo(function GuideRow({ index, style, data }) { + const { + filteredChannels, + programsByChannelId, + expandedProgramId, + rowHeights, + logos, + hoveredChannelId, + setHoveredChannelId, + renderProgram, + hourTimeline, + handleLogoClick, + contentWidth, + start, + } = data; + + const channel = filteredChannels[index]; + if (!channel) { + return null; + } + + const channelPrograms = programsByChannelId.get(channel.id) || []; + const hasExpandedProgram = channelPrograms.some( + (program) => program.id === expandedProgramId + ); + const rowHeight = + rowHeights[index] ?? + (hasExpandedProgram ? EXPANDED_PROGRAM_HEIGHT : PROGRAM_HEIGHT); + + return ( +
+ + handleLogoClick(channel, event)} + onMouseEnter={() => setHoveredChannelId(channel.id)} + onMouseLeave={() => setHoveredChannelId(null)} + > + {hoveredChannelId === channel.id && ( + + + + )} + + + + {channel.name} + + + + {channel.channel_number || '-'} + + + + + + {channelPrograms.length > 0 ? ( + channelPrograms.map((program) => ( +
+ {renderProgram(program, start)} +
+ )) + ) : ( + <> + {Array.from({ length: Math.ceil(hourTimeline.length / 2) }).map( + (_, placeholderIndex) => ( + + No program data + + ) + )} + + )} +
+
+
+ ); +}); +GuideRow.displayName = 'GuideRow'; + export default function TVChannelGuide({ startDate, endDate }) { - const theme = useMantineTheme(); const channels = useChannelsStore((s) => s.channels); const recordings = useChannelsStore((s) => s.recordings); const channelGroups = useChannelsStore((s) => s.channelGroups); @@ -59,7 +302,6 @@ export default function TVChannelGuide({ startDate, endDate }) { const [existingRuleMode, setExistingRuleMode] = useState(null); const [rulesOpen, setRulesOpen] = useState(false); const [rules, setRules] = useState([]); - const [loading, setLoading] = useState(true); const [initialScrollComplete, setInitialScrollComplete] = useState(false); // New filter states @@ -71,6 +313,13 @@ export default function TVChannelGuide({ startDate, endDate }) { const guideRef = useRef(null); const timelineRef = useRef(null); // New ref for timeline scrolling + const listRef = useRef(null); + const isSyncingScroll = useRef(false); + const { + ref: guideContainerRef, + width: guideWidth, + height: guideHeight, + } = useElementSize(); // Add new state to track hovered logo const [hoveredChannelId, setHoveredChannelId] = useState(null); @@ -80,14 +329,14 @@ export default function TVChannelGuide({ startDate, endDate }) { if (!Object.keys(channels).length === 0) { console.warn('No channels provided or empty channels array'); notifications.show({ title: 'No channels available', color: 'red.5' }); - setLoading(false); return; } const fetchPrograms = async () => { console.log('Fetching program grid...'); const fetched = await API.getGrid(); // GETs your EPG grid - console.log(`Received ${fetched.length} programs`); + const receivedCount = Array.isArray(fetched) ? fetched.length : 0; + console.log(`Received ${receivedCount} programs`); // Include ALL channels, sorted by channel number - don't filter by EPG data const sortedChannels = Object.values(channels).sort( @@ -97,10 +346,21 @@ export default function TVChannelGuide({ startDate, endDate }) { console.log(`Using all ${sortedChannels.length} available channels`); + const processedPrograms = (Array.isArray(fetched) ? fetched : []).map( + (program) => { + const start = dayjs(program.start_time); + const end = dayjs(program.end_time); + return { + ...program, + startMs: start.valueOf(), + endMs: end.valueOf(), + }; + } + ); + setGuideChannels(sortedChannels); setFilteredChannels(sortedChannels); // Initialize filtered channels - setPrograms(fetched); - setLoading(false); + setPrograms(processedPrograms); }; fetchPrograms(); @@ -152,6 +412,84 @@ export default function TVChannelGuide({ startDate, endDate }) { profiles, ]); + const channelById = useMemo(() => { + return guideChannels.reduce((acc, channel) => { + acc[channel.id] = channel; + return acc; + }, {}); + }, [guideChannels]); + + const channelIdByTvgId = useMemo(() => { + const map = new Map(); + guideChannels.forEach((channel) => { + const tvgRecord = channel.epg_data_id + ? tvgsById[channel.epg_data_id] + : null; + const tvgId = tvgRecord?.tvg_id ?? channel.uuid; + if (tvgId) { + map.set(String(tvgId), channel.id); + } + }); + return map; + }, [guideChannels, tvgsById]); + + const programsByChannelId = useMemo(() => { + if (!programs.length) return new Map(); + + const map = new Map(); + programs.forEach((program) => { + const channelId = channelIdByTvgId.get(String(program.tvg_id)); + if (!channelId) return; + if (!map.has(channelId)) { + map.set(channelId, []); + } + map.get(channelId).push(program); + }); + + map.forEach((list) => + list.sort((a, b) => (a.startMs || 0) - (b.startMs || 0)) + ); + + return map; + }, [programs, channelIdByTvgId]); + + const recordingsByProgramId = useMemo(() => { + const map = new Map(); + (recordings || []).forEach((recording) => { + const programId = recording?.custom_properties?.program?.id; + if (programId != null) { + map.set(programId, recording); + } + }); + return map; + }, [recordings]); + + const rowHeights = useMemo(() => { + if (!filteredChannels.length) return []; + return filteredChannels.map((channel) => { + const channelPrograms = programsByChannelId.get(channel.id) || []; + const hasExpandedProgram = channelPrograms.some( + (program) => program.id === expandedProgramId + ); + return hasExpandedProgram ? EXPANDED_PROGRAM_HEIGHT : PROGRAM_HEIGHT; + }); + }, [filteredChannels, programsByChannelId, expandedProgramId]); + + const getItemSize = useCallback( + (index) => rowHeights[index] ?? PROGRAM_HEIGHT, + [rowHeights] + ); + + useEffect(() => { + if (!listRef.current) return; + listRef.current.resetAfterIndex(0, true); + }, [rowHeights]); + + useEffect(() => { + if (!listRef.current) return; + listRef.current.scrollToItem(0); + }, [searchQuery, selectedGroupId, selectedProfileId]); + // Use start/end from props or default to "today at midnight" +24h const defaultStart = dayjs(startDate || dayjs().startOf('day')); const defaultEnd = endDate ? dayjs(endDate) : defaultStart.add(24, 'hour'); @@ -180,22 +518,10 @@ export default function TVChannelGuide({ startDate, endDate }) { ? latestProgramEnd : defaultEnd; - // Time increments in 15-min steps (for placing programs) - const programTimeline = useMemo(() => { - const times = []; - let current = start; - while (current.isBefore(end)) { - times.push(current); - current = current.add(MINUTE_INCREMENT, 'minute'); - } - return times; - }, [start, end]); - // Format day label using relative terms when possible (Today, Tomorrow, etc) - const formatDayLabel = (time) => { + const formatDayLabel = useCallback((time) => { const today = dayjs().startOf('day'); const tomorrow = today.add(1, 'day'); - const dayAfterTomorrow = today.add(2, 'day'); const weekLater = today.add(7, 'day'); const day = time.startOf('day'); @@ -211,7 +537,7 @@ export default function TVChannelGuide({ startDate, endDate }) { // Beyond a week, show month and day return time.format(dateFormat); } - }; + }, [dateFormat]); // Hourly marks with day labels const hourTimeline = useMemo(() => { @@ -238,7 +564,7 @@ export default function TVChannelGuide({ startDate, endDate }) { current = current.add(1, 'hour'); } return hours; - }, [start, end]); + }, [start, end, formatDayLabel]); // Scroll to the nearest half-hour mark ONLY on initial load useEffect(() => { @@ -282,179 +608,229 @@ export default function TVChannelGuide({ startDate, endDate }) { return (minutesSinceStart / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; }, [now, start, end]); + const contentWidth = useMemo( + () => hourTimeline.length * HOUR_WIDTH + CHANNEL_WIDTH, + [hourTimeline.length] + ); + + const virtualizedHeight = useMemo( + () => guideHeight || 600, + [guideHeight] + ); + + const virtualizedWidth = useMemo(() => { + if (guideWidth) return guideWidth; + if (typeof window !== 'undefined') { + return window.innerWidth; + } + return Math.max(contentWidth, 1200); + }, [guideWidth, contentWidth]); + + const itemKey = useCallback( + (index) => filteredChannels[index]?.id ?? index, + [filteredChannels] + ); + + useEffect(() => { + const guideEl = guideRef.current; + if (!guideEl) return; + + let frame; + + const syncScroll = () => { + if (!timelineRef.current) return; + if (isSyncingScroll.current) return; + isSyncingScroll.current = true; + const scrollLeft = guideEl.scrollLeft; + frame = requestAnimationFrame(() => { + if (timelineRef.current) { + timelineRef.current.scrollLeft = scrollLeft; + } + isSyncingScroll.current = false; + }); + }; + + guideEl.addEventListener('scroll', syncScroll); + + return () => { + guideEl.removeEventListener('scroll', syncScroll); + if (frame) cancelAnimationFrame(frame); + isSyncingScroll.current = false; + }; + }, [guideWidth, guideRef, timelineRef]); + // Helper: find channel by tvg_id - function findChannelByTvgId(tvgId) { - return guideChannels.find( - (ch) => - tvgsById[ch.epg_data_id]?.tvg_id === tvgId || - (!ch.epg_data_id && ch.uuid === tvgId) - ); - } + const findChannelByTvgId = useCallback( + (tvgId) => { + const channelId = channelIdByTvgId.get(String(tvgId)); + return channelId ? channelById[channelId] : undefined; + }, + [channelById, channelIdByTvgId] + ); - const openRecordChoice = async (program) => { - setRecordChoiceProgram(program); - setRecordChoiceOpen(true); - try { - const rules = await API.listSeriesRules(); - // Only treat as existing if the rule matches this specific show's title (or has no title constraint) - const rule = (rules || []).find( - (r) => String(r.tvg_id) === String(program.tvg_id) && (!r.title || r.title === program.title) - ); - setExistingRuleMode(rule ? rule.mode : null); - } catch {} - // Also detect if this program already has a scheduled recording - try { - const rec = (recordings || []).find((r) => r?.custom_properties?.program?.id == program.id); - setRecordingForProgram(rec || null); - } catch {} - }; + const openRecordChoice = useCallback( + async (program) => { + setRecordChoiceProgram(program); + setRecordChoiceOpen(true); + try { + const rules = await API.listSeriesRules(); + const rule = (rules || []).find( + (r) => + String(r.tvg_id) === String(program.tvg_id) && + (!r.title || r.title === program.title) + ); + setExistingRuleMode(rule ? rule.mode : null); + } catch (error) { + console.warn('Failed to load series rules', error); + } + try { + const rec = recordingsByProgramId.get(program.id); + setRecordingForProgram(rec || null); + } catch (error) { + console.warn('Failed to resolve program recording', error); + } + }, + [recordingsByProgramId] + ); - const recordOne = async (program) => { - const channel = findChannelByTvgId(program.tvg_id); - await API.createRecording({ - channel: `${channel.id}`, - start_time: program.start_time, - end_time: program.end_time, - custom_properties: { program }, + const recordOne = useCallback( + async (program) => { + const channel = findChannelByTvgId(program.tvg_id); + if (!channel) { + notifications.show({ + title: 'Channel not found', + message: 'Unable to schedule recording for this program.', + color: 'red', + }); + return; + } + await API.createRecording({ + channel: `${channel.id}`, + start_time: program.start_time, + end_time: program.end_time, + custom_properties: { program }, + }); + notifications.show({ title: 'Recording scheduled' }); + }, + [findChannelByTvgId] + ); + + const saveSeriesRule = useCallback(async (program, mode) => { + await API.createSeriesRule({ + tvg_id: program.tvg_id, + mode, + title: program.title, }); - notifications.show({ title: 'Recording scheduled' }); - }; - - const saveSeriesRule = async (program, mode) => { - await API.createSeriesRule({ tvg_id: program.tvg_id, mode, title: program.title }); await API.evaluateSeriesRules(program.tvg_id); - // Refresh recordings so icons and DVR reflect new schedules try { await useChannelsStore.getState().fetchRecordings(); - } catch (e) { - console.warn('Failed to refresh recordings after saving series rule', e); + } catch (error) { + console.warn('Failed to refresh recordings after saving series rule', error); } - notifications.show({ title: mode === 'new' ? 'Record new episodes' : 'Record all episodes' }); - }; + notifications.show({ + title: mode === 'new' ? 'Record new episodes' : 'Record all episodes', + }); + }, []); - const openRules = async () => { + const openRules = useCallback(async () => { setRulesOpen(true); try { const r = await API.listSeriesRules(); setRules(r); - } catch (e) { - // handled by API + } catch (error) { + console.warn('Failed to fetch series rules', error); } - }; - - const deleteAllUpcoming = async () => { - const ok = window.confirm('Delete ALL upcoming recordings?'); - if (!ok) return; - await API.deleteAllUpcomingRecordings(); - try { await useChannelsStore.getState().fetchRecordings(); } catch {} - }; + }, []); // The “Watch Now” click => show floating video const showVideo = useVideoStore((s) => s.showVideo); - function handleWatchStream(program) { - const matched = findChannelByTvgId(program.tvg_id); - if (!matched) { - console.warn(`No channel found for tvg_id=${program.tvg_id}`); - return; - } - // Build a playable stream URL for that channel - let vidUrl = `/proxy/ts/stream/${matched.uuid}`; - if (env_mode == 'dev') { - vidUrl = `${window.location.protocol}//${window.location.hostname}:5656${vidUrl}`; - } + const handleWatchStream = useCallback( + (program) => { + const matched = findChannelByTvgId(program.tvg_id); + if (!matched) { + console.warn(`No channel found for tvg_id=${program.tvg_id}`); + return; + } + let vidUrl = `/proxy/ts/stream/${matched.uuid}`; + if (env_mode === 'dev') { + vidUrl = `${window.location.protocol}//${window.location.hostname}:5656${vidUrl}`; + } - showVideo(vidUrl); - } + showVideo(vidUrl); + }, + [env_mode, findChannelByTvgId, showVideo] + ); // Function to handle logo click to play channel - function handleLogoClick(channel, event) { - // Prevent event from bubbling up - event.stopPropagation(); + const handleLogoClick = useCallback( + (channel, event) => { + event.stopPropagation(); - // Build a playable stream URL for the channel - let vidUrl = `/proxy/ts/stream/${channel.uuid}`; - if (env_mode === 'dev') { - vidUrl = `${window.location.protocol}//${window.location.hostname}:5656${vidUrl}`; - } + let vidUrl = `/proxy/ts/stream/${channel.uuid}`; + if (env_mode === 'dev') { + vidUrl = `${window.location.protocol}//${window.location.hostname}:5656${vidUrl}`; + } - // Use the existing showVideo function - showVideo(vidUrl); - } + showVideo(vidUrl); + }, + [env_mode, showVideo] + ); // On program click, toggle the expanded state - function handleProgramClick(program, event) { - // Prevent event from bubbling up to parent elements - event.stopPropagation(); + const handleProgramClick = useCallback( + (program, event) => { + event.stopPropagation(); - // Get the program's start time and calculate its position - const programStart = dayjs(program.start_time); - const startOffsetMinutes = programStart.diff(start, 'minute'); - const leftPx = (startOffsetMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; + const programStart = dayjs(program.start_time); + const startOffsetMinutes = programStart.diff(start, 'minute'); + const leftPx = + (startOffsetMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; - // Calculate desired scroll position (account for channel column width) - const desiredScrollPosition = Math.max(0, leftPx - 20); // 20px buffer + const desiredScrollPosition = Math.max(0, leftPx - 20); - // If already expanded, collapse it - if (expandedProgramId === program.id) { - setExpandedProgramId(null); - setRecordingForProgram(null); - return; - } + if (expandedProgramId === program.id) { + setExpandedProgramId(null); + setRecordingForProgram(null); + return; + } - // Otherwise expand this program - setExpandedProgramId(program.id); + setExpandedProgramId(program.id); - // Check if this program has a recording - const programRecording = recordings.find((recording) => { - if (recording.custom_properties) { - const customProps = recording.custom_properties || {}; - if (customProps.program && customProps.program.id == program.id) { - return true; + const programRecording = recordingsByProgramId.get(program.id) || null; + setRecordingForProgram(programRecording); + + if (guideRef.current && timelineRef.current) { + const currentScrollPosition = guideRef.current.scrollLeft; + if ( + desiredScrollPosition < currentScrollPosition || + leftPx - currentScrollPosition < 100 + ) { + guideRef.current.scrollTo({ + left: desiredScrollPosition, + behavior: 'smooth', + }); + + timelineRef.current.scrollTo({ + left: desiredScrollPosition, + behavior: 'smooth', + }); } } - return false; - }); - - setRecordingForProgram(programRecording); - - // Scroll to show the start of the program if it's not already fully visible - if (guideRef.current && timelineRef.current) { - const currentScrollPosition = guideRef.current.scrollLeft; - - // Check if we need to scroll (if program start is before current view or too close to edge) - if ( - desiredScrollPosition < currentScrollPosition || - leftPx - currentScrollPosition < 100 - ) { - // 100px from left edge - - // Smooth scroll to the program's start - guideRef.current.scrollTo({ - left: desiredScrollPosition, - behavior: 'smooth', - }); - - // Also sync the timeline scroll - timelineRef.current.scrollTo({ - left: desiredScrollPosition, - behavior: 'smooth', - }); - } - } - } + }, + [expandedProgramId, guideRef, recordingsByProgramId, start, timelineRef] + ); // Close the expanded program when clicking elsewhere - const handleClickOutside = () => { + const handleClickOutside = useCallback(() => { if (expandedProgramId) { setExpandedProgramId(null); setRecordingForProgram(null); } - }; + }, [expandedProgramId]); // Function to scroll to current time - matches initial loading position - const scrollToNow = () => { + const scrollToNow = useCallback(() => { if (guideRef.current && timelineRef.current && nowPosition >= 0) { - // Round the current time to the nearest half-hour mark const roundedNow = now.minute() < 30 ? now.startOf('hour') @@ -466,60 +842,47 @@ export default function TVChannelGuide({ startDate, endDate }) { const scrollPos = Math.max(scrollPosition, 0); guideRef.current.scrollLeft = scrollPos; - timelineRef.current.scrollLeft = scrollPos; // Sync timeline scroll + timelineRef.current.scrollLeft = scrollPos; } - }; + }, [guideRef, now, nowPosition, start, timelineRef]); // Sync scrolling between timeline and main content - const handleTimelineScroll = () => { - if (timelineRef.current && guideRef.current) { - guideRef.current.scrollLeft = timelineRef.current.scrollLeft; - } - }; - - // Sync scrolling between main content and timeline - const handleGuideScroll = () => { - if (guideRef.current && timelineRef.current) { - timelineRef.current.scrollLeft = guideRef.current.scrollLeft; - } - }; + const handleTimelineScroll = useCallback(() => { + if (!timelineRef.current || !guideRef.current) return; + if (isSyncingScroll.current) return; + isSyncingScroll.current = true; + const target = timelineRef.current.scrollLeft; + guideRef.current.scrollLeft = target; + requestAnimationFrame(() => { + isSyncingScroll.current = false; + }); + }, [guideRef, timelineRef]); // Handle wheel events on the timeline for horizontal scrolling - const handleTimelineWheel = (e) => { - if (timelineRef.current) { - // Prevent the default vertical scroll - e.preventDefault(); - - // Determine scroll amount (with shift key for faster scrolling) - const scrollAmount = e.shiftKey ? 250 : 125; - - // Scroll horizontally based on wheel direction + const handleTimelineWheel = useCallback( + (event) => { + if (!timelineRef.current) return; + event.preventDefault(); + const scrollAmount = event.shiftKey ? 250 : 125; timelineRef.current.scrollLeft += - e.deltaY > 0 ? scrollAmount : -scrollAmount; - - // Sync the main content scroll position - if (guideRef.current) { - guideRef.current.scrollLeft = timelineRef.current.scrollLeft; - } - } - }; + event.deltaY > 0 ? scrollAmount : -scrollAmount; + handleTimelineScroll(); + }, + [handleTimelineScroll] + ); // Function to handle timeline time clicks with 15-minute snapping - const handleTimeClick = (clickedTime, event) => { - if (timelineRef.current && guideRef.current) { - // Calculate where in the hour block the click happened + const handleTimeClick = useCallback( + (clickedTime, event) => { + if (!timelineRef.current || !guideRef.current) return; + const hourBlockElement = event.currentTarget; const rect = hourBlockElement.getBoundingClientRect(); - const clickPositionX = event.clientX - rect.left; // Position within the hour block - const percentageAcross = clickPositionX / rect.width; // 0 to 1 value + const clickPositionX = event.clientX - rect.left; + const percentageAcross = clickPositionX / rect.width; - // Calculate the minute within the hour based on click position const minuteWithinHour = Math.floor(percentageAcross * 60); - // Create a new time object with the calculated minute - const exactTime = clickedTime.minute(minuteWithinHour); - - // Determine the nearest 15-minute interval (0, 15, 30, 45) let snappedMinute; if (minuteWithinHour < 7.5) { snappedMinute = 0; @@ -530,109 +893,86 @@ export default function TVChannelGuide({ startDate, endDate }) { } else if (minuteWithinHour < 52.5) { snappedMinute = 45; } else { - // If we're past 52.5 minutes, snap to the next hour snappedMinute = 0; clickedTime = clickedTime.add(1, 'hour'); } - // Create the snapped time const snappedTime = clickedTime.minute(snappedMinute); - - // Calculate the offset from the start of the timeline to the snapped time const snappedOffset = snappedTime.diff(start, 'minute'); - - // Convert to pixels const scrollPosition = (snappedOffset / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; - // Scroll both containers to the snapped position timelineRef.current.scrollLeft = scrollPosition; guideRef.current.scrollLeft = scrollPosition; - } - }; + }, + [start] + ); + // Renders each program block - function renderProgram(program, channelStart) { - const programKey = `${program.tvg_id}-${program.start_time}`; - const programStart = dayjs(program.start_time); - const programEnd = dayjs(program.end_time); - const startOffsetMinutes = programStart.diff(channelStart, 'minute'); - const durationMinutes = programEnd.diff(programStart, 'minute'); - const leftPx = (startOffsetMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; + const renderProgram = useCallback( + (program, channelStart) => { + const programKey = `${program.tvg_id}-${program.start_time}`; + const programStart = dayjs(program.start_time); + const programEnd = dayjs(program.end_time); + const startOffsetMinutes = programStart.diff(channelStart, 'minute'); + const durationMinutes = programEnd.diff(programStart, 'minute'); + const leftPx = + (startOffsetMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; - // Calculate width with a small gap (2px on each side) - const gapSize = 2; - const widthPx = - (durationMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH - gapSize * 2; + const gapSize = 2; + const widthPx = + (durationMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH - + gapSize * 2; - // Check if we have a recording for this program - const recording = recordings.find((recording) => { - if (recording.custom_properties) { - const customProps = recording.custom_properties || {}; - if (customProps.program && customProps.program.id == program.id) { - return recording; - } + const recording = recordingsByProgramId.get(program.id); + + const isLive = now.isAfter(programStart) && now.isBefore(programEnd); + const isPast = now.isAfter(programEnd); + const isExpanded = expandedProgramId === program.id; + + const rowHeight = isExpanded ? EXPANDED_PROGRAM_HEIGHT : PROGRAM_HEIGHT; + const MIN_EXPANDED_WIDTH = 450; + const expandedWidthPx = Math.max(widthPx, MIN_EXPANDED_WIDTH); + + const currentScrollLeft = guideRef.current?.scrollLeft || 0; + const programStartInView = leftPx + gapSize; + const programEndInView = leftPx + gapSize + widthPx; + const viewportLeft = currentScrollLeft; + + const startsBeforeView = programStartInView < viewportLeft; + const extendsIntoView = programEndInView > viewportLeft; + + let textOffsetLeft = 0; + if (startsBeforeView && extendsIntoView) { + const visibleStart = Math.max(viewportLeft - programStartInView, 0); + const maxOffset = widthPx - 200; + textOffsetLeft = Math.min(visibleStart, maxOffset); } - return null; - }); - // Highlight if currently live - const isLive = now.isAfter(programStart) && now.isBefore(programEnd); - - // Determine if the program has ended - const isPast = now.isAfter(programEnd); // Check if this program is expanded - const isExpanded = expandedProgramId === program.id; - - // Set the height based on expanded state - const rowHeight = isExpanded ? EXPANDED_PROGRAM_HEIGHT : PROGRAM_HEIGHT; - - // Determine expanded width - if program is short, ensure it has a minimum expanded width - // This will allow it to overlap programs to the right - const MIN_EXPANDED_WIDTH = 450; // Minimum width in pixels when expanded - const expandedWidthPx = Math.max(widthPx, MIN_EXPANDED_WIDTH); - - // Calculate text positioning for long programs that start before the visible area - const currentScrollLeft = guideRef.current?.scrollLeft || 0; - const programStartInView = leftPx + gapSize; - const programEndInView = leftPx + gapSize + widthPx; - const viewportLeft = currentScrollLeft; - - // Check if program starts before viewport but extends into it - const startsBeforeView = programStartInView < viewportLeft; - const extendsIntoView = programEndInView > viewportLeft; - - // Calculate text offset to position it at the visible portion - let textOffsetLeft = 0; - if (startsBeforeView && extendsIntoView) { - // Position text at the start of the visible area, but not beyond the program end - const visibleStart = Math.max(viewportLeft - programStartInView, 0); - const maxOffset = widthPx - 200; // Leave some space for text, don't push to very end - textOffsetLeft = Math.min(visibleStart, maxOffset); - } - - return ( - handleProgramClick(program, e)} - > - handleProgramClick(program, event)} + > + - {programStart.format(timeFormat)} -{' '} - {programEnd.format(timeFormat)} + {programStart.format(timeFormat)} - {programEnd.format(timeFormat)}
- {' '} - {/* Description is always shown but expands when row is expanded */} + {program.description && ( )} - {/* Expanded content */} {isExpanded && ( - {/* Always show Record for not-past; it opens options (schedule/remove) */} {!isPast && ( - - )} - + {GuideRow} + + + ) : ( + + No channels match your filters + + + )} @@ -1388,22 +1553,54 @@ export default function TVChannelGuide({ startDate, endDate }) { {recordingForProgram && ( <> )} {existingRuleMode && ( - + )} @@ -1435,13 +1632,25 @@ export default function TVChannelGuide({ startDate, endDate }) { From 00b8119b811a39fa448445c71af3ec2fbc8105db Mon Sep 17 00:00:00 2001 From: Jim McBride Date: Sun, 21 Sep 2025 01:25:29 -0500 Subject: [PATCH 033/119] Revert "Virtualize TV guide rendering" This reverts commit db024130be7eb7d07bc5f74cf8127128f2ce8438. --- frontend/src/pages/Guide.jsx | 1331 ++++++++++++++-------------------- 1 file changed, 561 insertions(+), 770 deletions(-) diff --git a/frontend/src/pages/Guide.jsx b/frontend/src/pages/Guide.jsx index 644bc6ea..7671fb57 100644 --- a/frontend/src/pages/Guide.jsx +++ b/frontend/src/pages/Guide.jsx @@ -1,12 +1,5 @@ // frontend/src/pages/Guide.js -import React, { - useMemo, - useState, - useEffect, - useRef, - useCallback, - useContext, -} from 'react'; +import React, { useMemo, useState, useEffect, useRef } from 'react'; import dayjs from 'dayjs'; import API from '../api'; import useChannelsStore from '../store/channels'; @@ -30,13 +23,12 @@ import { Transition, Modal, Stack, + useMantineTheme, } from '@mantine/core'; import { Search, X, Clock, Video, Calendar, Play } from 'lucide-react'; import './guide.css'; import useEPGsStore from '../store/epgs'; import useLocalStorage from '../hooks/useLocalStorage'; -import { useElementSize } from '@mantine/hooks'; -import { VariableSizeList } from 'react-window'; /** Layout constants */ const CHANNEL_WIDTH = 120; // Width of the channel/logo column @@ -46,243 +38,8 @@ const HOUR_WIDTH = 450; // Increased from 300 to 450 to make each program wider const MINUTE_INCREMENT = 15; // For positioning programs every 15 min const MINUTE_BLOCK_WIDTH = HOUR_WIDTH / (60 / MINUTE_INCREMENT); -const GuideVirtualizedContext = React.createContext({ - contentWidth: CHANNEL_WIDTH, - nowPosition: -1, -}); - -const GuideInnerElement = React.forwardRef(function GuideInnerElement( - { style, children, ...rest }, - ref -) { - const { contentWidth, nowPosition } = useContext(GuideVirtualizedContext); - - return ( -
- {nowPosition >= 0 && ( - - )} - {children} -
- ); -}); -GuideInnerElement.displayName = 'GuideInnerElement'; - -const GuideRow = React.memo(function GuideRow({ index, style, data }) { - const { - filteredChannels, - programsByChannelId, - expandedProgramId, - rowHeights, - logos, - hoveredChannelId, - setHoveredChannelId, - renderProgram, - hourTimeline, - handleLogoClick, - contentWidth, - start, - } = data; - - const channel = filteredChannels[index]; - if (!channel) { - return null; - } - - const channelPrograms = programsByChannelId.get(channel.id) || []; - const hasExpandedProgram = channelPrograms.some( - (program) => program.id === expandedProgramId - ); - const rowHeight = - rowHeights[index] ?? - (hasExpandedProgram ? EXPANDED_PROGRAM_HEIGHT : PROGRAM_HEIGHT); - - return ( -
- - handleLogoClick(channel, event)} - onMouseEnter={() => setHoveredChannelId(channel.id)} - onMouseLeave={() => setHoveredChannelId(null)} - > - {hoveredChannelId === channel.id && ( - - - - )} - - - - {channel.name} - - - - {channel.channel_number || '-'} - - - - - - {channelPrograms.length > 0 ? ( - channelPrograms.map((program) => ( -
- {renderProgram(program, start)} -
- )) - ) : ( - <> - {Array.from({ length: Math.ceil(hourTimeline.length / 2) }).map( - (_, placeholderIndex) => ( - - No program data - - ) - )} - - )} -
-
-
- ); -}); -GuideRow.displayName = 'GuideRow'; - export default function TVChannelGuide({ startDate, endDate }) { + const theme = useMantineTheme(); const channels = useChannelsStore((s) => s.channels); const recordings = useChannelsStore((s) => s.recordings); const channelGroups = useChannelsStore((s) => s.channelGroups); @@ -302,6 +59,7 @@ export default function TVChannelGuide({ startDate, endDate }) { const [existingRuleMode, setExistingRuleMode] = useState(null); const [rulesOpen, setRulesOpen] = useState(false); const [rules, setRules] = useState([]); + const [loading, setLoading] = useState(true); const [initialScrollComplete, setInitialScrollComplete] = useState(false); // New filter states @@ -313,13 +71,6 @@ export default function TVChannelGuide({ startDate, endDate }) { const guideRef = useRef(null); const timelineRef = useRef(null); // New ref for timeline scrolling - const listRef = useRef(null); - const isSyncingScroll = useRef(false); - const { - ref: guideContainerRef, - width: guideWidth, - height: guideHeight, - } = useElementSize(); // Add new state to track hovered logo const [hoveredChannelId, setHoveredChannelId] = useState(null); @@ -329,14 +80,14 @@ export default function TVChannelGuide({ startDate, endDate }) { if (!Object.keys(channels).length === 0) { console.warn('No channels provided or empty channels array'); notifications.show({ title: 'No channels available', color: 'red.5' }); + setLoading(false); return; } const fetchPrograms = async () => { console.log('Fetching program grid...'); const fetched = await API.getGrid(); // GETs your EPG grid - const receivedCount = Array.isArray(fetched) ? fetched.length : 0; - console.log(`Received ${receivedCount} programs`); + console.log(`Received ${fetched.length} programs`); // Include ALL channels, sorted by channel number - don't filter by EPG data const sortedChannels = Object.values(channels).sort( @@ -346,21 +97,10 @@ export default function TVChannelGuide({ startDate, endDate }) { console.log(`Using all ${sortedChannels.length} available channels`); - const processedPrograms = (Array.isArray(fetched) ? fetched : []).map( - (program) => { - const start = dayjs(program.start_time); - const end = dayjs(program.end_time); - return { - ...program, - startMs: start.valueOf(), - endMs: end.valueOf(), - }; - } - ); - setGuideChannels(sortedChannels); setFilteredChannels(sortedChannels); // Initialize filtered channels - setPrograms(processedPrograms); + setPrograms(fetched); + setLoading(false); }; fetchPrograms(); @@ -412,84 +152,6 @@ export default function TVChannelGuide({ startDate, endDate }) { profiles, ]); - const channelById = useMemo(() => { - return guideChannels.reduce((acc, channel) => { - acc[channel.id] = channel; - return acc; - }, {}); - }, [guideChannels]); - - const channelIdByTvgId = useMemo(() => { - const map = new Map(); - guideChannels.forEach((channel) => { - const tvgRecord = channel.epg_data_id - ? tvgsById[channel.epg_data_id] - : null; - const tvgId = tvgRecord?.tvg_id ?? channel.uuid; - if (tvgId) { - map.set(String(tvgId), channel.id); - } - }); - return map; - }, [guideChannels, tvgsById]); - - const programsByChannelId = useMemo(() => { - if (!programs.length) return new Map(); - - const map = new Map(); - programs.forEach((program) => { - const channelId = channelIdByTvgId.get(String(program.tvg_id)); - if (!channelId) return; - if (!map.has(channelId)) { - map.set(channelId, []); - } - map.get(channelId).push(program); - }); - - map.forEach((list) => - list.sort((a, b) => (a.startMs || 0) - (b.startMs || 0)) - ); - - return map; - }, [programs, channelIdByTvgId]); - - const recordingsByProgramId = useMemo(() => { - const map = new Map(); - (recordings || []).forEach((recording) => { - const programId = recording?.custom_properties?.program?.id; - if (programId != null) { - map.set(programId, recording); - } - }); - return map; - }, [recordings]); - - const rowHeights = useMemo(() => { - if (!filteredChannels.length) return []; - return filteredChannels.map((channel) => { - const channelPrograms = programsByChannelId.get(channel.id) || []; - const hasExpandedProgram = channelPrograms.some( - (program) => program.id === expandedProgramId - ); - return hasExpandedProgram ? EXPANDED_PROGRAM_HEIGHT : PROGRAM_HEIGHT; - }); - }, [filteredChannels, programsByChannelId, expandedProgramId]); - - const getItemSize = useCallback( - (index) => rowHeights[index] ?? PROGRAM_HEIGHT, - [rowHeights] - ); - - useEffect(() => { - if (!listRef.current) return; - listRef.current.resetAfterIndex(0, true); - }, [rowHeights]); - - useEffect(() => { - if (!listRef.current) return; - listRef.current.scrollToItem(0); - }, [searchQuery, selectedGroupId, selectedProfileId]); - // Use start/end from props or default to "today at midnight" +24h const defaultStart = dayjs(startDate || dayjs().startOf('day')); const defaultEnd = endDate ? dayjs(endDate) : defaultStart.add(24, 'hour'); @@ -518,10 +180,22 @@ export default function TVChannelGuide({ startDate, endDate }) { ? latestProgramEnd : defaultEnd; + // Time increments in 15-min steps (for placing programs) + const programTimeline = useMemo(() => { + const times = []; + let current = start; + while (current.isBefore(end)) { + times.push(current); + current = current.add(MINUTE_INCREMENT, 'minute'); + } + return times; + }, [start, end]); + // Format day label using relative terms when possible (Today, Tomorrow, etc) - const formatDayLabel = useCallback((time) => { + const formatDayLabel = (time) => { const today = dayjs().startOf('day'); const tomorrow = today.add(1, 'day'); + const dayAfterTomorrow = today.add(2, 'day'); const weekLater = today.add(7, 'day'); const day = time.startOf('day'); @@ -537,7 +211,7 @@ export default function TVChannelGuide({ startDate, endDate }) { // Beyond a week, show month and day return time.format(dateFormat); } - }, [dateFormat]); + }; // Hourly marks with day labels const hourTimeline = useMemo(() => { @@ -564,7 +238,7 @@ export default function TVChannelGuide({ startDate, endDate }) { current = current.add(1, 'hour'); } return hours; - }, [start, end, formatDayLabel]); + }, [start, end]); // Scroll to the nearest half-hour mark ONLY on initial load useEffect(() => { @@ -608,229 +282,179 @@ export default function TVChannelGuide({ startDate, endDate }) { return (minutesSinceStart / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; }, [now, start, end]); - const contentWidth = useMemo( - () => hourTimeline.length * HOUR_WIDTH + CHANNEL_WIDTH, - [hourTimeline.length] - ); - - const virtualizedHeight = useMemo( - () => guideHeight || 600, - [guideHeight] - ); - - const virtualizedWidth = useMemo(() => { - if (guideWidth) return guideWidth; - if (typeof window !== 'undefined') { - return window.innerWidth; - } - return Math.max(contentWidth, 1200); - }, [guideWidth, contentWidth]); - - const itemKey = useCallback( - (index) => filteredChannels[index]?.id ?? index, - [filteredChannels] - ); - - useEffect(() => { - const guideEl = guideRef.current; - if (!guideEl) return; - - let frame; - - const syncScroll = () => { - if (!timelineRef.current) return; - if (isSyncingScroll.current) return; - isSyncingScroll.current = true; - const scrollLeft = guideEl.scrollLeft; - frame = requestAnimationFrame(() => { - if (timelineRef.current) { - timelineRef.current.scrollLeft = scrollLeft; - } - isSyncingScroll.current = false; - }); - }; - - guideEl.addEventListener('scroll', syncScroll); - - return () => { - guideEl.removeEventListener('scroll', syncScroll); - if (frame) cancelAnimationFrame(frame); - isSyncingScroll.current = false; - }; - }, [guideWidth, guideRef, timelineRef]); - // Helper: find channel by tvg_id - const findChannelByTvgId = useCallback( - (tvgId) => { - const channelId = channelIdByTvgId.get(String(tvgId)); - return channelId ? channelById[channelId] : undefined; - }, - [channelById, channelIdByTvgId] - ); + function findChannelByTvgId(tvgId) { + return guideChannels.find( + (ch) => + tvgsById[ch.epg_data_id]?.tvg_id === tvgId || + (!ch.epg_data_id && ch.uuid === tvgId) + ); + } - const openRecordChoice = useCallback( - async (program) => { - setRecordChoiceProgram(program); - setRecordChoiceOpen(true); - try { - const rules = await API.listSeriesRules(); - const rule = (rules || []).find( - (r) => - String(r.tvg_id) === String(program.tvg_id) && - (!r.title || r.title === program.title) - ); - setExistingRuleMode(rule ? rule.mode : null); - } catch (error) { - console.warn('Failed to load series rules', error); - } - try { - const rec = recordingsByProgramId.get(program.id); - setRecordingForProgram(rec || null); - } catch (error) { - console.warn('Failed to resolve program recording', error); - } - }, - [recordingsByProgramId] - ); + const openRecordChoice = async (program) => { + setRecordChoiceProgram(program); + setRecordChoiceOpen(true); + try { + const rules = await API.listSeriesRules(); + // Only treat as existing if the rule matches this specific show's title (or has no title constraint) + const rule = (rules || []).find( + (r) => String(r.tvg_id) === String(program.tvg_id) && (!r.title || r.title === program.title) + ); + setExistingRuleMode(rule ? rule.mode : null); + } catch {} + // Also detect if this program already has a scheduled recording + try { + const rec = (recordings || []).find((r) => r?.custom_properties?.program?.id == program.id); + setRecordingForProgram(rec || null); + } catch {} + }; - const recordOne = useCallback( - async (program) => { - const channel = findChannelByTvgId(program.tvg_id); - if (!channel) { - notifications.show({ - title: 'Channel not found', - message: 'Unable to schedule recording for this program.', - color: 'red', - }); - return; - } - await API.createRecording({ - channel: `${channel.id}`, - start_time: program.start_time, - end_time: program.end_time, - custom_properties: { program }, - }); - notifications.show({ title: 'Recording scheduled' }); - }, - [findChannelByTvgId] - ); - - const saveSeriesRule = useCallback(async (program, mode) => { - await API.createSeriesRule({ - tvg_id: program.tvg_id, - mode, - title: program.title, + const recordOne = async (program) => { + const channel = findChannelByTvgId(program.tvg_id); + await API.createRecording({ + channel: `${channel.id}`, + start_time: program.start_time, + end_time: program.end_time, + custom_properties: { program }, }); + notifications.show({ title: 'Recording scheduled' }); + }; + + const saveSeriesRule = async (program, mode) => { + await API.createSeriesRule({ tvg_id: program.tvg_id, mode, title: program.title }); await API.evaluateSeriesRules(program.tvg_id); + // Refresh recordings so icons and DVR reflect new schedules try { await useChannelsStore.getState().fetchRecordings(); - } catch (error) { - console.warn('Failed to refresh recordings after saving series rule', error); + } catch (e) { + console.warn('Failed to refresh recordings after saving series rule', e); } - notifications.show({ - title: mode === 'new' ? 'Record new episodes' : 'Record all episodes', - }); - }, []); + notifications.show({ title: mode === 'new' ? 'Record new episodes' : 'Record all episodes' }); + }; - const openRules = useCallback(async () => { + const openRules = async () => { setRulesOpen(true); try { const r = await API.listSeriesRules(); setRules(r); - } catch (error) { - console.warn('Failed to fetch series rules', error); + } catch (e) { + // handled by API } - }, []); + }; + + const deleteAllUpcoming = async () => { + const ok = window.confirm('Delete ALL upcoming recordings?'); + if (!ok) return; + await API.deleteAllUpcomingRecordings(); + try { await useChannelsStore.getState().fetchRecordings(); } catch {} + }; // The “Watch Now” click => show floating video const showVideo = useVideoStore((s) => s.showVideo); - const handleWatchStream = useCallback( - (program) => { - const matched = findChannelByTvgId(program.tvg_id); - if (!matched) { - console.warn(`No channel found for tvg_id=${program.tvg_id}`); - return; - } - let vidUrl = `/proxy/ts/stream/${matched.uuid}`; - if (env_mode === 'dev') { - vidUrl = `${window.location.protocol}//${window.location.hostname}:5656${vidUrl}`; - } + function handleWatchStream(program) { + const matched = findChannelByTvgId(program.tvg_id); + if (!matched) { + console.warn(`No channel found for tvg_id=${program.tvg_id}`); + return; + } + // Build a playable stream URL for that channel + let vidUrl = `/proxy/ts/stream/${matched.uuid}`; + if (env_mode == 'dev') { + vidUrl = `${window.location.protocol}//${window.location.hostname}:5656${vidUrl}`; + } - showVideo(vidUrl); - }, - [env_mode, findChannelByTvgId, showVideo] - ); + showVideo(vidUrl); + } // Function to handle logo click to play channel - const handleLogoClick = useCallback( - (channel, event) => { - event.stopPropagation(); + function handleLogoClick(channel, event) { + // Prevent event from bubbling up + event.stopPropagation(); - let vidUrl = `/proxy/ts/stream/${channel.uuid}`; - if (env_mode === 'dev') { - vidUrl = `${window.location.protocol}//${window.location.hostname}:5656${vidUrl}`; - } + // Build a playable stream URL for the channel + let vidUrl = `/proxy/ts/stream/${channel.uuid}`; + if (env_mode === 'dev') { + vidUrl = `${window.location.protocol}//${window.location.hostname}:5656${vidUrl}`; + } - showVideo(vidUrl); - }, - [env_mode, showVideo] - ); + // Use the existing showVideo function + showVideo(vidUrl); + } // On program click, toggle the expanded state - const handleProgramClick = useCallback( - (program, event) => { - event.stopPropagation(); + function handleProgramClick(program, event) { + // Prevent event from bubbling up to parent elements + event.stopPropagation(); - const programStart = dayjs(program.start_time); - const startOffsetMinutes = programStart.diff(start, 'minute'); - const leftPx = - (startOffsetMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; + // Get the program's start time and calculate its position + const programStart = dayjs(program.start_time); + const startOffsetMinutes = programStart.diff(start, 'minute'); + const leftPx = (startOffsetMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; - const desiredScrollPosition = Math.max(0, leftPx - 20); + // Calculate desired scroll position (account for channel column width) + const desiredScrollPosition = Math.max(0, leftPx - 20); // 20px buffer - if (expandedProgramId === program.id) { - setExpandedProgramId(null); - setRecordingForProgram(null); - return; - } + // If already expanded, collapse it + if (expandedProgramId === program.id) { + setExpandedProgramId(null); + setRecordingForProgram(null); + return; + } - setExpandedProgramId(program.id); + // Otherwise expand this program + setExpandedProgramId(program.id); - const programRecording = recordingsByProgramId.get(program.id) || null; - setRecordingForProgram(programRecording); - - if (guideRef.current && timelineRef.current) { - const currentScrollPosition = guideRef.current.scrollLeft; - if ( - desiredScrollPosition < currentScrollPosition || - leftPx - currentScrollPosition < 100 - ) { - guideRef.current.scrollTo({ - left: desiredScrollPosition, - behavior: 'smooth', - }); - - timelineRef.current.scrollTo({ - left: desiredScrollPosition, - behavior: 'smooth', - }); + // Check if this program has a recording + const programRecording = recordings.find((recording) => { + if (recording.custom_properties) { + const customProps = recording.custom_properties || {}; + if (customProps.program && customProps.program.id == program.id) { + return true; } } - }, - [expandedProgramId, guideRef, recordingsByProgramId, start, timelineRef] - ); + return false; + }); + + setRecordingForProgram(programRecording); + + // Scroll to show the start of the program if it's not already fully visible + if (guideRef.current && timelineRef.current) { + const currentScrollPosition = guideRef.current.scrollLeft; + + // Check if we need to scroll (if program start is before current view or too close to edge) + if ( + desiredScrollPosition < currentScrollPosition || + leftPx - currentScrollPosition < 100 + ) { + // 100px from left edge + + // Smooth scroll to the program's start + guideRef.current.scrollTo({ + left: desiredScrollPosition, + behavior: 'smooth', + }); + + // Also sync the timeline scroll + timelineRef.current.scrollTo({ + left: desiredScrollPosition, + behavior: 'smooth', + }); + } + } + } // Close the expanded program when clicking elsewhere - const handleClickOutside = useCallback(() => { + const handleClickOutside = () => { if (expandedProgramId) { setExpandedProgramId(null); setRecordingForProgram(null); } - }, [expandedProgramId]); + }; // Function to scroll to current time - matches initial loading position - const scrollToNow = useCallback(() => { + const scrollToNow = () => { if (guideRef.current && timelineRef.current && nowPosition >= 0) { + // Round the current time to the nearest half-hour mark const roundedNow = now.minute() < 30 ? now.startOf('hour') @@ -842,47 +466,60 @@ export default function TVChannelGuide({ startDate, endDate }) { const scrollPos = Math.max(scrollPosition, 0); guideRef.current.scrollLeft = scrollPos; - timelineRef.current.scrollLeft = scrollPos; + timelineRef.current.scrollLeft = scrollPos; // Sync timeline scroll } - }, [guideRef, now, nowPosition, start, timelineRef]); + }; // Sync scrolling between timeline and main content - const handleTimelineScroll = useCallback(() => { - if (!timelineRef.current || !guideRef.current) return; - if (isSyncingScroll.current) return; - isSyncingScroll.current = true; - const target = timelineRef.current.scrollLeft; - guideRef.current.scrollLeft = target; - requestAnimationFrame(() => { - isSyncingScroll.current = false; - }); - }, [guideRef, timelineRef]); + const handleTimelineScroll = () => { + if (timelineRef.current && guideRef.current) { + guideRef.current.scrollLeft = timelineRef.current.scrollLeft; + } + }; + + // Sync scrolling between main content and timeline + const handleGuideScroll = () => { + if (guideRef.current && timelineRef.current) { + timelineRef.current.scrollLeft = guideRef.current.scrollLeft; + } + }; // Handle wheel events on the timeline for horizontal scrolling - const handleTimelineWheel = useCallback( - (event) => { - if (!timelineRef.current) return; - event.preventDefault(); - const scrollAmount = event.shiftKey ? 250 : 125; + const handleTimelineWheel = (e) => { + if (timelineRef.current) { + // Prevent the default vertical scroll + e.preventDefault(); + + // Determine scroll amount (with shift key for faster scrolling) + const scrollAmount = e.shiftKey ? 250 : 125; + + // Scroll horizontally based on wheel direction timelineRef.current.scrollLeft += - event.deltaY > 0 ? scrollAmount : -scrollAmount; - handleTimelineScroll(); - }, - [handleTimelineScroll] - ); + e.deltaY > 0 ? scrollAmount : -scrollAmount; + + // Sync the main content scroll position + if (guideRef.current) { + guideRef.current.scrollLeft = timelineRef.current.scrollLeft; + } + } + }; // Function to handle timeline time clicks with 15-minute snapping - const handleTimeClick = useCallback( - (clickedTime, event) => { - if (!timelineRef.current || !guideRef.current) return; - + const handleTimeClick = (clickedTime, event) => { + if (timelineRef.current && guideRef.current) { + // Calculate where in the hour block the click happened const hourBlockElement = event.currentTarget; const rect = hourBlockElement.getBoundingClientRect(); - const clickPositionX = event.clientX - rect.left; - const percentageAcross = clickPositionX / rect.width; + const clickPositionX = event.clientX - rect.left; // Position within the hour block + const percentageAcross = clickPositionX / rect.width; // 0 to 1 value + // Calculate the minute within the hour based on click position const minuteWithinHour = Math.floor(percentageAcross * 60); + // Create a new time object with the calculated minute + const exactTime = clickedTime.minute(minuteWithinHour); + + // Determine the nearest 15-minute interval (0, 15, 30, 45) let snappedMinute; if (minuteWithinHour < 7.5) { snappedMinute = 0; @@ -893,86 +530,109 @@ export default function TVChannelGuide({ startDate, endDate }) { } else if (minuteWithinHour < 52.5) { snappedMinute = 45; } else { + // If we're past 52.5 minutes, snap to the next hour snappedMinute = 0; clickedTime = clickedTime.add(1, 'hour'); } + // Create the snapped time const snappedTime = clickedTime.minute(snappedMinute); + + // Calculate the offset from the start of the timeline to the snapped time const snappedOffset = snappedTime.diff(start, 'minute'); + + // Convert to pixels const scrollPosition = (snappedOffset / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; + // Scroll both containers to the snapped position timelineRef.current.scrollLeft = scrollPosition; guideRef.current.scrollLeft = scrollPosition; - }, - [start] - ); - + } + }; // Renders each program block - const renderProgram = useCallback( - (program, channelStart) => { - const programKey = `${program.tvg_id}-${program.start_time}`; - const programStart = dayjs(program.start_time); - const programEnd = dayjs(program.end_time); - const startOffsetMinutes = programStart.diff(channelStart, 'minute'); - const durationMinutes = programEnd.diff(programStart, 'minute'); - const leftPx = - (startOffsetMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; + function renderProgram(program, channelStart) { + const programKey = `${program.tvg_id}-${program.start_time}`; + const programStart = dayjs(program.start_time); + const programEnd = dayjs(program.end_time); + const startOffsetMinutes = programStart.diff(channelStart, 'minute'); + const durationMinutes = programEnd.diff(programStart, 'minute'); + const leftPx = (startOffsetMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; - const gapSize = 2; - const widthPx = - (durationMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH - - gapSize * 2; + // Calculate width with a small gap (2px on each side) + const gapSize = 2; + const widthPx = + (durationMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH - gapSize * 2; - const recording = recordingsByProgramId.get(program.id); - - const isLive = now.isAfter(programStart) && now.isBefore(programEnd); - const isPast = now.isAfter(programEnd); - const isExpanded = expandedProgramId === program.id; - - const rowHeight = isExpanded ? EXPANDED_PROGRAM_HEIGHT : PROGRAM_HEIGHT; - const MIN_EXPANDED_WIDTH = 450; - const expandedWidthPx = Math.max(widthPx, MIN_EXPANDED_WIDTH); - - const currentScrollLeft = guideRef.current?.scrollLeft || 0; - const programStartInView = leftPx + gapSize; - const programEndInView = leftPx + gapSize + widthPx; - const viewportLeft = currentScrollLeft; - - const startsBeforeView = programStartInView < viewportLeft; - const extendsIntoView = programEndInView > viewportLeft; - - let textOffsetLeft = 0; - if (startsBeforeView && extendsIntoView) { - const visibleStart = Math.max(viewportLeft - programStartInView, 0); - const maxOffset = widthPx - 200; - textOffsetLeft = Math.min(visibleStart, maxOffset); + // Check if we have a recording for this program + const recording = recordings.find((recording) => { + if (recording.custom_properties) { + const customProps = recording.custom_properties || {}; + if (customProps.program && customProps.program.id == program.id) { + return recording; + } } + return null; + }); - return ( - viewportLeft; + + // Calculate text offset to position it at the visible portion + let textOffsetLeft = 0; + if (startsBeforeView && extendsIntoView) { + // Position text at the start of the visible area, but not beyond the program end + const visibleStart = Math.max(viewportLeft - programStartInView, 0); + const maxOffset = widthPx - 200; // Leave some space for text, don't push to very end + textOffsetLeft = Math.min(visibleStart, maxOffset); + } + + return ( + handleProgramClick(program, e)} + > + handleProgramClick(program, event)} - > - - {programStart.format(timeFormat)} - {programEnd.format(timeFormat)} + {programStart.format(timeFormat)} -{' '} + {programEnd.format(timeFormat)}
- + {' '} + {/* Description is always shown but expands when row is expanded */} {program.description && ( )} + {/* Expanded content */} {isExpanded && ( + {/* Always show Record for not-past; it opens options (schedule/remove) */} {!isPast && ( - - )} + No channels match your filters + + + )} + @@ -1553,54 +1388,22 @@ export default function TVChannelGuide({ startDate, endDate }) { {recordingForProgram && ( <> )} {existingRuleMode && ( - + )} @@ -1632,25 +1435,13 @@ export default function TVChannelGuide({ startDate, endDate }) { From 323f1d5c051972451768d9251f04fdde94e83092 Mon Sep 17 00:00:00 2001 From: Jim McBride Date: Sun, 21 Sep 2025 09:38:55 -0500 Subject: [PATCH 034/119] Add TV guide utility tests and vitest setup --- frontend/package-lock.json | 1314 ++++++++++++- frontend/package.json | 11 +- frontend/src/pages/Guide.jsx | 1650 +++++++++-------- .../src/pages/__tests__/guideUtils.test.js | 100 + frontend/src/pages/guideUtils.js | 71 + frontend/src/test/setupTests.js | 42 + frontend/vite.config.js | 6 + 7 files changed, 2443 insertions(+), 751 deletions(-) create mode 100644 frontend/src/pages/__tests__/guideUtils.test.js create mode 100644 frontend/src/pages/guideUtils.js create mode 100644 frontend/src/test/setupTests.js diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 5325ff6c..20f95839 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -41,6 +41,9 @@ }, "devDependencies": { "@eslint/js": "^9.21.0", + "@testing-library/jest-dom": "^6.8.0", + "@testing-library/react": "^16.3.0", + "@testing-library/user-event": "^14.6.1", "@types/react": "^19.0.10", "@types/react-dom": "^19.0.4", "@vitejs/plugin-react-swc": "^3.8.0", @@ -48,10 +51,53 @@ "eslint-plugin-react-hooks": "^5.1.0", "eslint-plugin-react-refresh": "^0.4.19", "globals": "^15.15.0", + "jsdom": "^27.0.0", "prettier": "^3.5.3", - "vite": "^6.2.0" + "vite": "^6.2.0", + "vitest": "^3.2.4" } }, + "node_modules/@adobe/css-tools": { + "version": "4.4.4", + "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.4.4.tgz", + "integrity": "sha512-Elp+iwUx5rN5+Y8xLt5/GRoG20WGoDCQ/1Fb+1LiGtvwbDavuSk0jhD/eZdckHAuzcDzccnkv+rEjyWfRx18gg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@asamuzakjp/css-color": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-4.0.4.tgz", + "integrity": "sha512-cKjSKvWGmAziQWbCouOsFwb14mp1betm8Y7Fn+yglDMUUu3r9DCbJ9iJbeFDenLMqFbIMC0pQP8K+B8LAxX3OQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@csstools/css-calc": "^2.1.4", + "@csstools/css-color-parser": "^3.0.10", + "@csstools/css-parser-algorithms": "^3.0.5", + "@csstools/css-tokenizer": "^3.0.4", + "lru-cache": "^11.1.0" + } + }, + "node_modules/@asamuzakjp/dom-selector": { + "version": "6.5.5", + "resolved": "https://registry.npmjs.org/@asamuzakjp/dom-selector/-/dom-selector-6.5.5.tgz", + "integrity": "sha512-kI2MX9pmImjxWT8nxDZY+MuN6r1jJGe7WxizEbsAEPB/zxfW5wYLIiPG1v3UKgEOOP8EsDkp0ZL99oRFAdPM8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@asamuzakjp/nwsapi": "^2.3.9", + "bidi-js": "^1.0.3", + "css-tree": "^3.1.0", + "is-potential-custom-element-name": "^1.0.1" + } + }, + "node_modules/@asamuzakjp/nwsapi": { + "version": "2.3.9", + "resolved": "https://registry.npmjs.org/@asamuzakjp/nwsapi/-/nwsapi-2.3.9.tgz", + "integrity": "sha512-n8GuYSrI9bF7FFZ/SjhwevlHc8xaVlb/7HmHelnc/PZXBD2ZR49NnN9sMMuDdEGPeeRQ5d0hqlSlEpgCX3Wl0Q==", + "dev": true, + "license": "MIT" + }, "node_modules/@babel/code-frame": { "version": "7.26.2", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", @@ -194,6 +240,144 @@ "node": ">=6.9.0" } }, + "node_modules/@csstools/color-helpers": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@csstools/color-helpers/-/color-helpers-5.1.0.tgz", + "integrity": "sha512-S11EXWJyy0Mz5SYvRmY8nJYTFFd1LCNV+7cXyAgQtOOuzb4EsgfqDufL+9esx72/eLhsRdGZwaldu/h+E4t4BA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "license": "MIT-0", + "engines": { + "node": ">=18" + } + }, + "node_modules/@csstools/css-calc": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@csstools/css-calc/-/css-calc-2.1.4.tgz", + "integrity": "sha512-3N8oaj+0juUw/1H3YwmDDJXCgTB1gKU6Hc/bB502u9zR0q2vd786XJH9QfrKIEgFlZmhZiq6epXl4rHqhzsIgQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "license": "MIT", + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@csstools/css-parser-algorithms": "^3.0.5", + "@csstools/css-tokenizer": "^3.0.4" + } + }, + "node_modules/@csstools/css-color-parser": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@csstools/css-color-parser/-/css-color-parser-3.1.0.tgz", + "integrity": "sha512-nbtKwh3a6xNVIp/VRuXV64yTKnb1IjTAEEh3irzS+HkKjAOYLTGNb9pmVNntZ8iVBHcWDA2Dof0QtPgFI1BaTA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "license": "MIT", + "dependencies": { + "@csstools/color-helpers": "^5.1.0", + "@csstools/css-calc": "^2.1.4" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@csstools/css-parser-algorithms": "^3.0.5", + "@csstools/css-tokenizer": "^3.0.4" + } + }, + "node_modules/@csstools/css-parser-algorithms": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@csstools/css-parser-algorithms/-/css-parser-algorithms-3.0.5.tgz", + "integrity": "sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "license": "MIT", + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@csstools/css-tokenizer": "^3.0.4" + } + }, + "node_modules/@csstools/css-syntax-patches-for-csstree": { + "version": "1.0.14", + "resolved": "https://registry.npmjs.org/@csstools/css-syntax-patches-for-csstree/-/css-syntax-patches-for-csstree-1.0.14.tgz", + "integrity": "sha512-zSlIxa20WvMojjpCSy8WrNpcZ61RqfTfX3XTaOeVlGJrt/8HF3YbzgFZa01yTbT4GWQLwfTcC3EB8i3XnB647Q==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "license": "MIT-0", + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/css-tokenizer": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@csstools/css-tokenizer/-/css-tokenizer-3.0.4.tgz", + "integrity": "sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "license": "MIT", + "engines": { + "node": ">=18" + } + }, "node_modules/@dnd-kit/accessibility": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/@dnd-kit/accessibility/-/accessibility-3.1.1.tgz", @@ -727,9 +911,9 @@ } }, "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", - "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", "license": "MIT" }, "node_modules/@jridgewell/trace-mapping": { @@ -1019,6 +1203,114 @@ "url": "https://github.com/sponsors/tannerlinsley" } }, + "node_modules/@testing-library/dom": { + "version": "10.4.1", + "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-10.4.1.tgz", + "integrity": "sha512-o4PXJQidqJl82ckFaXUeoAW+XysPLauYI43Abki5hABd853iMhitooc6znOnczgbTYmEP6U6/y1ZyKAIsvMKGg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@babel/code-frame": "^7.10.4", + "@babel/runtime": "^7.12.5", + "@types/aria-query": "^5.0.1", + "aria-query": "5.3.0", + "dom-accessibility-api": "^0.5.9", + "lz-string": "^1.5.0", + "picocolors": "1.1.1", + "pretty-format": "^27.0.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@testing-library/jest-dom": { + "version": "6.8.0", + "resolved": "https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-6.8.0.tgz", + "integrity": "sha512-WgXcWzVM6idy5JaftTVC8Vs83NKRmGJz4Hqs4oyOuO2J4r/y79vvKZsb+CaGyCSEbUPI6OsewfPd0G1A0/TUZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@adobe/css-tools": "^4.4.0", + "aria-query": "^5.0.0", + "css.escape": "^1.5.1", + "dom-accessibility-api": "^0.6.3", + "picocolors": "^1.1.1", + "redent": "^3.0.0" + }, + "engines": { + "node": ">=14", + "npm": ">=6", + "yarn": ">=1" + } + }, + "node_modules/@testing-library/jest-dom/node_modules/dom-accessibility-api": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.6.3.tgz", + "integrity": "sha512-7ZgogeTnjuHbo+ct10G9Ffp0mif17idi0IyWNVA/wcwcm7NPOD/WEHVP3n7n3MhXqxoIYm8d6MuZohYWIZ4T3w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@testing-library/react": { + "version": "16.3.0", + "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-16.3.0.tgz", + "integrity": "sha512-kFSyxiEDwv1WLl2fgsq6pPBbw5aWKrsY2/noi1Id0TK0UParSF62oFQFGHXIyaG4pp2tEub/Zlel+fjjZILDsw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.12.5" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@testing-library/dom": "^10.0.0", + "@types/react": "^18.0.0 || ^19.0.0", + "@types/react-dom": "^18.0.0 || ^19.0.0", + "react": "^18.0.0 || ^19.0.0", + "react-dom": "^18.0.0 || ^19.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@testing-library/user-event": { + "version": "14.6.1", + "resolved": "https://registry.npmjs.org/@testing-library/user-event/-/user-event-14.6.1.tgz", + "integrity": "sha512-vq7fv0rnt+QTXgPxr5Hjc210p6YKq2kmdziLgnsZGgLJ9e6VAShx1pACLuRjd/AS/sr7phAR58OIIpf0LlmQNw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12", + "npm": ">=6" + }, + "peerDependencies": { + "@testing-library/dom": ">=7.21.4" + } + }, + "node_modules/@types/aria-query": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-5.0.4.tgz", + "integrity": "sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==", + "dev": true, + "license": "MIT", + "peer": true + }, + "node_modules/@types/chai": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.2.tgz", + "integrity": "sha512-8kB30R7Hwqf40JPiKhVzodJs2Qc1ZJ5zuT3uzw5Hq/dhNCl3G3l83jfpdI1e20BP348+fV7VIL/+FxaXkqBmWg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/deep-eql": "*" + } + }, "node_modules/@types/d3-array": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.1.tgz", @@ -1082,6 +1374,13 @@ "integrity": "sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==", "license": "MIT" }, + "node_modules/@types/deep-eql": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz", + "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/estree": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", @@ -1192,6 +1491,121 @@ "vite": "^4 || ^5 || ^6" } }, + "node_modules/@vitest/expect": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.2.4.tgz", + "integrity": "sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/chai": "^5.2.2", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", + "chai": "^5.2.0", + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/mocker": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.2.4.tgz", + "integrity": "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/spy": "3.2.4", + "estree-walker": "^3.0.3", + "magic-string": "^0.30.17" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "msw": "^2.4.9", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" + }, + "peerDependenciesMeta": { + "msw": { + "optional": true + }, + "vite": { + "optional": true + } + } + }, + "node_modules/@vitest/pretty-format": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.2.4.tgz", + "integrity": "sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/runner": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.2.4.tgz", + "integrity": "sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/utils": "3.2.4", + "pathe": "^2.0.3", + "strip-literal": "^3.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/snapshot": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.2.4.tgz", + "integrity": "sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "3.2.4", + "magic-string": "^0.30.17", + "pathe": "^2.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/spy": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.2.4.tgz", + "integrity": "sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyspy": "^4.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/utils": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.2.4.tgz", + "integrity": "sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "3.2.4", + "loupe": "^3.1.4", + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, "node_modules/@xmldom/xmldom": { "version": "0.8.10", "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.10.tgz", @@ -1236,6 +1650,16 @@ "pkcs7": "^1.0.4" } }, + "node_modules/agent-base": { + "version": "7.1.4", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz", + "integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, "node_modules/ajv": { "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", @@ -1284,6 +1708,17 @@ "react-dom": "16.8.0 - 18" } }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=8" + } + }, "node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -1307,6 +1742,26 @@ "dev": true, "license": "Python-2.0" }, + "node_modules/aria-query": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.0.tgz", + "integrity": "sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "dequal": "^2.0.3" + } + }, + "node_modules/assertion-error": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", + "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + } + }, "node_modules/attr-accept": { "version": "2.2.5", "resolved": "https://registry.npmjs.org/attr-accept/-/attr-accept-2.2.5.tgz", @@ -1338,6 +1793,16 @@ "dev": true, "license": "MIT" }, + "node_modules/bidi-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/bidi-js/-/bidi-js-1.0.3.tgz", + "integrity": "sha512-RKshQI1R3YQ+n9YJz2QQ147P66ELpa1FQEg20Dk8oW9t2KgLbpDLLp9aGZ7y8WHSshDknG0bknqGw5/tyCs5tw==", + "dev": true, + "license": "MIT", + "dependencies": { + "require-from-string": "^2.0.2" + } + }, "node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -1349,6 +1814,16 @@ "concat-map": "0.0.1" } }, + "node_modules/cac": { + "version": "6.7.14", + "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", + "integrity": "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/callsites": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", @@ -1358,6 +1833,23 @@ "node": ">=6" } }, + "node_modules/chai": { + "version": "5.3.3", + "resolved": "https://registry.npmjs.org/chai/-/chai-5.3.3.tgz", + "integrity": "sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw==", + "dev": true, + "license": "MIT", + "dependencies": { + "assertion-error": "^2.0.1", + "check-error": "^2.1.1", + "deep-eql": "^5.0.1", + "loupe": "^3.1.0", + "pathval": "^2.0.0" + }, + "engines": { + "node": ">=18" + } + }, "node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -1375,6 +1867,16 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, + "node_modules/check-error": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.1.1.tgz", + "integrity": "sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 16" + } + }, "node_modules/classnames": { "version": "2.5.1", "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.5.1.tgz", @@ -1472,6 +1974,42 @@ "node": ">= 8" } }, + "node_modules/css-tree": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-3.1.0.tgz", + "integrity": "sha512-0eW44TGN5SQXU1mWSkKwFstI/22X2bG1nYzZTYMAWjylYURhse752YgbE4Cx46AC+bAvI+/dYTPRk1LqSUnu6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "mdn-data": "2.12.2", + "source-map-js": "^1.0.1" + }, + "engines": { + "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0" + } + }, + "node_modules/css.escape": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/css.escape/-/css.escape-1.5.1.tgz", + "integrity": "sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg==", + "dev": true, + "license": "MIT" + }, + "node_modules/cssstyle": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-5.3.1.tgz", + "integrity": "sha512-g5PC9Aiph9eiczFpcgUhd9S4UUO3F+LHGRIi5NUMZ+4xtoIYbHNZwZnWA2JsFGe8OU8nl4WyaEFiZuGuxlutJQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@asamuzakjp/css-color": "^4.0.3", + "@csstools/css-syntax-patches-for-csstree": "^1.0.14", + "css-tree": "^3.1.0" + }, + "engines": { + "node": ">=20" + } + }, "node_modules/csstype": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", @@ -1599,6 +2137,20 @@ "node": ">=12" } }, + "node_modules/data-urls": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-6.0.0.tgz", + "integrity": "sha512-BnBS08aLUM+DKamupXs3w2tJJoqU+AkaE/+6vQxi/G/DPmIZFJJp9Dkb1kM03AZx8ADehDUZgsNxju3mPXZYIA==", + "dev": true, + "license": "MIT", + "dependencies": { + "whatwg-mimetype": "^4.0.0", + "whatwg-url": "^15.0.0" + }, + "engines": { + "node": ">=20" + } + }, "node_modules/dayjs": { "version": "1.11.13", "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.13.tgz", @@ -1606,9 +2158,9 @@ "license": "MIT" }, "node_modules/debug": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", - "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", "license": "MIT", "dependencies": { "ms": "^2.1.3" @@ -1622,12 +2174,29 @@ } } }, + "node_modules/decimal.js": { + "version": "10.6.0", + "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.6.0.tgz", + "integrity": "sha512-YpgQiITW3JXGntzdUmyUR1V812Hn8T1YVXhCu+wO3OpS4eU9l4YdD3qjyiKdV6mvV29zapkMeD390UVEf2lkUg==", + "dev": true, + "license": "MIT" + }, "node_modules/decimal.js-light": { "version": "2.5.1", "resolved": "https://registry.npmjs.org/decimal.js-light/-/decimal.js-light-2.5.1.tgz", "integrity": "sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg==", "license": "MIT" }, + "node_modules/deep-eql": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-5.0.2.tgz", + "integrity": "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/deep-is": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", @@ -1644,12 +2213,30 @@ "node": ">=0.10.0" } }, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/detect-node-es": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz", "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==", "license": "MIT" }, + "node_modules/dom-accessibility-api": { + "version": "0.5.16", + "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.5.16.tgz", + "integrity": "sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==", + "dev": true, + "license": "MIT", + "peer": true + }, "node_modules/dom-helpers": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/dom-helpers/-/dom-helpers-5.2.1.tgz", @@ -1665,6 +2252,19 @@ "resolved": "https://registry.npmjs.org/dom-walk/-/dom-walk-0.1.2.tgz", "integrity": "sha512-6QvTW9mrGeIegrFXdtQi9pk7O/nSK6lSdXW2eqUspN5LWD7UTji2Fqw5V2YLjBpHEoU9Xl/eUWNpDeZvoyOv2w==" }, + "node_modules/entities": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz", + "integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, "node_modules/error-ex": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", @@ -1674,6 +2274,13 @@ "is-arrayish": "^0.2.1" } }, + "node_modules/es-module-lexer": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", + "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", + "dev": true, + "license": "MIT" + }, "node_modules/es6-promise": { "version": "4.2.8", "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", @@ -1901,6 +2508,16 @@ "node": ">=4.0" } }, + "node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, "node_modules/esutils": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", @@ -1917,6 +2534,16 @@ "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==", "license": "MIT" }, + "node_modules/expect-type": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.2.2.tgz", + "integrity": "sha512-JhFGDVJ7tmDJItKhYgJCGLOWjuK9vPxiXoUFLwLDc99NlmklilbiQJwoctZtt13+xMw91MCk/REan6MWHqDjyA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.0.0" + } + }, "node_modules/fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", @@ -1947,11 +2574,14 @@ "license": "MIT" }, "node_modules/fdir": { - "version": "6.4.4", - "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz", - "integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", "dev": true, "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, "peerDependencies": { "picomatch": "^3 || ^4" }, @@ -2167,6 +2797,60 @@ "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", "license": "MIT" }, + "node_modules/html-encoding-sniffer": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-4.0.0.tgz", + "integrity": "sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "whatwg-encoding": "^3.1.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/http-proxy-agent": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", + "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", + "dev": true, + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.0", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/https-proxy-agent": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", + "dev": true, + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/ignore": { "version": "5.3.2", "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", @@ -2203,6 +2887,16 @@ "node": ">=0.8.19" } }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/internmap": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", @@ -2262,6 +2956,13 @@ "node": ">=0.10.0" } }, + "node_modules/is-potential-custom-element-name": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz", + "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==", + "dev": true, + "license": "MIT" + }, "node_modules/isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", @@ -2288,6 +2989,46 @@ "js-yaml": "bin/js-yaml.js" } }, + "node_modules/jsdom": { + "version": "27.0.0", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-27.0.0.tgz", + "integrity": "sha512-lIHeR1qlIRrIN5VMccd8tI2Sgw6ieYXSVktcSHaNe3Z5nE/tcPQYQWOq00wxMvYOsz+73eAkNenVvmPC6bba9A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@asamuzakjp/dom-selector": "^6.5.4", + "cssstyle": "^5.3.0", + "data-urls": "^6.0.0", + "decimal.js": "^10.5.0", + "html-encoding-sniffer": "^4.0.0", + "http-proxy-agent": "^7.0.2", + "https-proxy-agent": "^7.0.6", + "is-potential-custom-element-name": "^1.0.1", + "parse5": "^7.3.0", + "rrweb-cssom": "^0.8.0", + "saxes": "^6.0.0", + "symbol-tree": "^3.2.4", + "tough-cookie": "^6.0.0", + "w3c-xmlserializer": "^5.0.0", + "webidl-conversions": "^8.0.0", + "whatwg-encoding": "^3.1.1", + "whatwg-mimetype": "^4.0.0", + "whatwg-url": "^15.0.0", + "ws": "^8.18.2", + "xml-name-validator": "^5.0.0" + }, + "engines": { + "node": ">=20" + }, + "peerDependencies": { + "canvas": "^3.0.0" + }, + "peerDependenciesMeta": { + "canvas": { + "optional": true + } + } + }, "node_modules/jsesc": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", @@ -2432,6 +3173,23 @@ "loose-envify": "cli.js" } }, + "node_modules/loupe": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.2.1.tgz", + "integrity": "sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/lru-cache": { + "version": "11.2.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz", + "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==", + "dev": true, + "license": "ISC", + "engines": { + "node": "20 || >=22" + } + }, "node_modules/lucide-react": { "version": "0.511.0", "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.511.0.tgz", @@ -2441,6 +3199,17 @@ "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, + "node_modules/lz-string": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz", + "integrity": "sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==", + "dev": true, + "license": "MIT", + "peer": true, + "bin": { + "lz-string": "bin/bin.js" + } + }, "node_modules/m3u8-parser": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/m3u8-parser/-/m3u8-parser-7.2.0.tgz", @@ -2452,6 +3221,23 @@ "global": "^4.4.0" } }, + "node_modules/magic-string": { + "version": "0.30.19", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.19.tgz", + "integrity": "sha512-2N21sPY9Ws53PZvsEpVtNuSW+ScYbQdp4b9qUaL+9QkHUrGFKo56Lg9Emg5s9V/qrtNBmiR01sYhUOwu3H+VOw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.5" + } + }, + "node_modules/mdn-data": { + "version": "2.12.2", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.12.2.tgz", + "integrity": "sha512-IEn+pegP1aManZuckezWCO+XZQDplx1366JoVhTpMpBB1sPey/SbveZQUosKiKiGYjg1wH4pMlNgXbCiYgihQA==", + "dev": true, + "license": "CC0-1.0" + }, "node_modules/memoize-one": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/memoize-one/-/memoize-one-5.2.1.tgz", @@ -2466,6 +3252,16 @@ "dom-walk": "^0.1.0" } }, + "node_modules/min-indent": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", + "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, "node_modules/minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", @@ -2642,6 +3438,19 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/parse5": { + "version": "7.3.0", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.3.0.tgz", + "integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==", + "dev": true, + "license": "MIT", + "dependencies": { + "entities": "^6.0.0" + }, + "funding": { + "url": "https://github.com/inikulin/parse5?sponsor=1" + } + }, "node_modules/path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", @@ -2677,6 +3486,23 @@ "node": ">=8" } }, + "node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, + "node_modules/pathval": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.1.tgz", + "integrity": "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14.16" + } + }, "node_modules/picocolors": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", @@ -2684,9 +3510,9 @@ "license": "ISC" }, "node_modules/picomatch": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", - "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "dev": true, "license": "MIT", "engines": { @@ -2763,6 +3589,44 @@ "url": "https://github.com/prettier/prettier?sponsor=1" } }, + "node_modules/pretty-format": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.5.1.tgz", + "integrity": "sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "ansi-regex": "^5.0.1", + "ansi-styles": "^5.0.0", + "react-is": "^17.0.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/pretty-format/node_modules/react-is": { + "version": "17.0.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", + "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", + "dev": true, + "license": "MIT", + "peer": true + }, "node_modules/process": { "version": "0.11.10", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", @@ -3157,12 +4021,36 @@ "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==", "license": "MIT" }, + "node_modules/redent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", + "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", + "dev": true, + "license": "MIT", + "dependencies": { + "indent-string": "^4.0.0", + "strip-indent": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/regenerator-runtime": { "version": "0.14.1", "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==", "license": "MIT" }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/resolve": { "version": "1.22.10", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", @@ -3231,6 +4119,33 @@ "fsevents": "~2.3.2" } }, + "node_modules/rrweb-cssom": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.8.0.tgz", + "integrity": "sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw==", + "dev": true, + "license": "MIT" + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true, + "license": "MIT" + }, + "node_modules/saxes": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/saxes/-/saxes-6.0.0.tgz", + "integrity": "sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA==", + "dev": true, + "license": "ISC", + "dependencies": { + "xmlchars": "^2.2.0" + }, + "engines": { + "node": ">=v12.22.7" + } + }, "node_modules/scheduler": { "version": "0.25.0", "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.25.0.tgz", @@ -3266,6 +4181,13 @@ "node": ">=8" } }, + "node_modules/siginfo": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz", + "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==", + "dev": true, + "license": "ISC" + }, "node_modules/source-map": { "version": "0.5.7", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", @@ -3285,6 +4207,33 @@ "node": ">=0.10.0" } }, + "node_modules/stackback": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", + "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==", + "dev": true, + "license": "MIT" + }, + "node_modules/std-env": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.9.0.tgz", + "integrity": "sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw==", + "dev": true, + "license": "MIT" + }, + "node_modules/strip-indent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", + "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "min-indent": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/strip-json-comments": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", @@ -3298,6 +4247,26 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/strip-literal": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-3.0.0.tgz", + "integrity": "sha512-TcccoMhJOM3OebGhSBEmp3UZ2SfDMZUEBdRA/9ynfLi8yYajyWX3JiXArcJt4Umh4vISpspkQIY8ZZoCqjbviA==", + "dev": true, + "license": "MIT", + "dependencies": { + "js-tokens": "^9.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/strip-literal/node_modules/js-tokens": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-9.0.1.tgz", + "integrity": "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==", + "dev": true, + "license": "MIT" + }, "node_modules/stylis": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.2.0.tgz", @@ -3329,6 +4298,13 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/symbol-tree": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz", + "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==", + "dev": true, + "license": "MIT" + }, "node_modules/tabbable": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/tabbable/-/tabbable-6.2.0.tgz", @@ -3353,15 +4329,29 @@ "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==", "license": "MIT" }, + "node_modules/tinybench": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz", + "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==", + "dev": true, + "license": "MIT" + }, + "node_modules/tinyexec": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-0.3.2.tgz", + "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==", + "dev": true, + "license": "MIT" + }, "node_modules/tinyglobby": { - "version": "0.2.13", - "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.13.tgz", - "integrity": "sha512-mEwzpUgrLySlveBwEVDMKk5B57bhLPYovRfPAXD5gA/98Opn0rCDj3GtLwFvCvH5RK9uPCExUROW5NjDwvqkxw==", + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", "dev": true, "license": "MIT", "dependencies": { - "fdir": "^6.4.4", - "picomatch": "^4.0.2" + "fdir": "^6.5.0", + "picomatch": "^4.0.3" }, "engines": { "node": ">=12.0.0" @@ -3370,12 +4360,88 @@ "url": "https://github.com/sponsors/SuperchupuDev" } }, + "node_modules/tinypool": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.1.1.tgz", + "integrity": "sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.0.0 || >=20.0.0" + } + }, + "node_modules/tinyrainbow": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-2.0.0.tgz", + "integrity": "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/tinyspy": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-4.0.4.tgz", + "integrity": "sha512-azl+t0z7pw/z958Gy9svOTuzqIk6xq+NSheJzn5MMWtWTFywIacg2wUlzKFGtt3cthx0r2SxMK0yzJOR0IES7Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/tldts": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/tldts/-/tldts-7.0.15.tgz", + "integrity": "sha512-heYRCiGLhtI+U/D0V8YM3QRwPfsLJiP+HX+YwiHZTnWzjIKC+ZCxQRYlzvOoTEc6KIP62B1VeAN63diGCng2hg==", + "dev": true, + "license": "MIT", + "dependencies": { + "tldts-core": "^7.0.15" + }, + "bin": { + "tldts": "bin/cli.js" + } + }, + "node_modules/tldts-core": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.15.tgz", + "integrity": "sha512-YBkp2VfS9VTRMPNL2PA6PMESmxV1JEVoAr5iBlZnB5JG3KUrWzNCB3yNNkRa2FZkqClaBgfNYCp8PgpYmpjkZw==", + "dev": true, + "license": "MIT" + }, "node_modules/toposort": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/toposort/-/toposort-2.0.2.tgz", "integrity": "sha512-0a5EOkAUp8D4moMi2W8ZF8jcga7BgZd91O/yabJCFY8az+XSzeGyTKs0Aoo897iV1Nj6guFq8orWDS96z91oGg==", "license": "MIT" }, + "node_modules/tough-cookie": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-6.0.0.tgz", + "integrity": "sha512-kXuRi1mtaKMrsLUxz3sQYvVl37B0Ns6MzfrtV5DvJceE9bPyspOqk9xxv7XbZWcfLWbFmm997vl83qUWVJA64w==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "tldts": "^7.0.5" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/tr46": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-6.0.0.tgz", + "integrity": "sha512-bLVMLPtstlZ4iMQHpFHTR7GAGj2jxi8Dg0s2h2MafAE4uSWF98FC/3MomU51iQAMf8/qDUbKWf5GxuvvVcXEhw==", + "dev": true, + "license": "MIT", + "dependencies": { + "punycode": "^2.3.1" + }, + "engines": { + "node": ">=20" + } + }, "node_modules/tslib": { "version": "2.8.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", @@ -3653,11 +4719,167 @@ } } }, + "node_modules/vite-node": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.2.4.tgz", + "integrity": "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cac": "^6.7.14", + "debug": "^4.4.1", + "es-module-lexer": "^1.7.0", + "pathe": "^2.0.3", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" + }, + "bin": { + "vite-node": "vite-node.mjs" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/vitest": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.2.4.tgz", + "integrity": "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/chai": "^5.2.2", + "@vitest/expect": "3.2.4", + "@vitest/mocker": "3.2.4", + "@vitest/pretty-format": "^3.2.4", + "@vitest/runner": "3.2.4", + "@vitest/snapshot": "3.2.4", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", + "chai": "^5.2.0", + "debug": "^4.4.1", + "expect-type": "^1.2.1", + "magic-string": "^0.30.17", + "pathe": "^2.0.3", + "picomatch": "^4.0.2", + "std-env": "^3.9.0", + "tinybench": "^2.9.0", + "tinyexec": "^0.3.2", + "tinyglobby": "^0.2.14", + "tinypool": "^1.1.1", + "tinyrainbow": "^2.0.0", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0", + "vite-node": "3.2.4", + "why-is-node-running": "^2.3.0" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@edge-runtime/vm": "*", + "@types/debug": "^4.1.12", + "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "@vitest/browser": "3.2.4", + "@vitest/ui": "3.2.4", + "happy-dom": "*", + "jsdom": "*" + }, + "peerDependenciesMeta": { + "@edge-runtime/vm": { + "optional": true + }, + "@types/debug": { + "optional": true + }, + "@types/node": { + "optional": true + }, + "@vitest/browser": { + "optional": true + }, + "@vitest/ui": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + } + } + }, + "node_modules/w3c-xmlserializer": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-5.0.0.tgz", + "integrity": "sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA==", + "dev": true, + "license": "MIT", + "dependencies": { + "xml-name-validator": "^5.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/webidl-conversions": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-8.0.0.tgz", + "integrity": "sha512-n4W4YFyz5JzOfQeA8oN7dUYpR+MBP3PIUsn2jLjWXwK5ASUzt0Jc/A5sAUZoCYFJRGF0FBKJ+1JjN43rNdsQzA==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=20" + } + }, "node_modules/webworkify-webpack": { "version": "2.1.5", "resolved": "git+ssh://git@github.com/xqq/webworkify-webpack.git#24d1e719b4a6cac37a518b2bb10fe124527ef4ef", "license": "MIT" }, + "node_modules/whatwg-encoding": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-3.1.1.tgz", + "integrity": "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "iconv-lite": "0.6.3" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/whatwg-mimetype": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz", + "integrity": "sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/whatwg-url": { + "version": "15.1.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-15.1.0.tgz", + "integrity": "sha512-2ytDk0kiEj/yu90JOAp44PVPUkO9+jVhyf+SybKlRHSDlvOOZhdPIrr7xTH64l4WixO2cP+wQIcgujkGBPPz6g==", + "dev": true, + "license": "MIT", + "dependencies": { + "tr46": "^6.0.0", + "webidl-conversions": "^8.0.0" + }, + "engines": { + "node": ">=20" + } + }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", @@ -3674,6 +4896,23 @@ "node": ">= 8" } }, + "node_modules/why-is-node-running": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz", + "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==", + "dev": true, + "license": "MIT", + "dependencies": { + "siginfo": "^2.0.0", + "stackback": "0.0.2" + }, + "bin": { + "why-is-node-running": "cli.js" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/word-wrap": { "version": "1.2.5", "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", @@ -3684,6 +4923,45 @@ "node": ">=0.10.0" } }, + "node_modules/ws": { + "version": "8.18.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz", + "integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, + "node_modules/xml-name-validator": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-5.0.0.tgz", + "integrity": "sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18" + } + }, + "node_modules/xmlchars": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz", + "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==", + "dev": true, + "license": "MIT" + }, "node_modules/yocto-queue": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", diff --git a/frontend/package.json b/frontend/package.json index 1f6c769d..960eaa64 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -7,7 +7,9 @@ "dev": "vite --host", "build": "vite build", "lint": "eslint .", - "preview": "vite preview" + "preview": "vite preview", + "test": "vitest --run", + "test:watch": "vitest" }, "dependencies": { "@dnd-kit/core": "^6.3.1", @@ -43,6 +45,9 @@ }, "devDependencies": { "@eslint/js": "^9.21.0", + "@testing-library/jest-dom": "^6.8.0", + "@testing-library/react": "^16.3.0", + "@testing-library/user-event": "^14.6.1", "@types/react": "^19.0.10", "@types/react-dom": "^19.0.4", "@vitejs/plugin-react-swc": "^3.8.0", @@ -50,7 +55,9 @@ "eslint-plugin-react-hooks": "^5.1.0", "eslint-plugin-react-refresh": "^0.4.19", "globals": "^15.15.0", + "jsdom": "^27.0.0", "prettier": "^3.5.3", - "vite": "^6.2.0" + "vite": "^6.2.0", + "vitest": "^3.2.4" } } diff --git a/frontend/src/pages/Guide.jsx b/frontend/src/pages/Guide.jsx index 7671fb57..11b17e5b 100644 --- a/frontend/src/pages/Guide.jsx +++ b/frontend/src/pages/Guide.jsx @@ -1,5 +1,5 @@ // frontend/src/pages/Guide.js -import React, { useMemo, useState, useEffect, useRef } from 'react'; +import React, { useMemo, useState, useEffect, useRef, useCallback } from 'react'; import dayjs from 'dayjs'; import API from '../api'; import useChannelsStore from '../store/channels'; @@ -23,23 +23,216 @@ import { Transition, Modal, Stack, - useMantineTheme, } from '@mantine/core'; import { Search, X, Clock, Video, Calendar, Play } from 'lucide-react'; import './guide.css'; import useEPGsStore from '../store/epgs'; import useLocalStorage from '../hooks/useLocalStorage'; +import { useElementSize } from '@mantine/hooks'; +import { VariableSizeList } from 'react-window'; +import { + PROGRAM_HEIGHT, + EXPANDED_PROGRAM_HEIGHT, + buildChannelIdMap, + mapProgramsByChannel, + computeRowHeights, +} from './guideUtils'; /** Layout constants */ const CHANNEL_WIDTH = 120; // Width of the channel/logo column -const PROGRAM_HEIGHT = 90; // Height of each channel row -const EXPANDED_PROGRAM_HEIGHT = 180; // Height for expanded program rows const HOUR_WIDTH = 450; // Increased from 300 to 450 to make each program wider const MINUTE_INCREMENT = 15; // For positioning programs every 15 min const MINUTE_BLOCK_WIDTH = HOUR_WIDTH / (60 / MINUTE_INCREMENT); +const GuideRow = React.memo(({ index, style, data }) => { + const { + filteredChannels, + programsByChannelId, + expandedProgramId, + rowHeights, + logos, + hoveredChannelId, + setHoveredChannelId, + renderProgram, + handleLogoClick, + contentWidth, + } = data; + + const channel = filteredChannels[index]; + if (!channel) { + return null; + } + + const channelPrograms = programsByChannelId.get(channel.id) || []; + const rowHeight = + rowHeights[index] ?? + (channelPrograms.some((program) => program.id === expandedProgramId) + ? EXPANDED_PROGRAM_HEIGHT + : PROGRAM_HEIGHT); + + return ( +
+ + handleLogoClick(channel, event)} + onMouseEnter={() => setHoveredChannelId(channel.id)} + onMouseLeave={() => setHoveredChannelId(null)} + > + {hoveredChannelId === channel.id && ( + + + + )} + + + + {channel.name} + + + + {channel.channel_number || '-'} + + + + + + {channelPrograms.length > 0 ? ( + channelPrograms.map((program) => renderProgram(program)) + ) : ( + <> + {Array.from({ length: Math.ceil(24 / 2) }).map((_, placeholderIndex) => ( + + No program data + + ))} + + )} + + +
+ ); +}); + export default function TVChannelGuide({ startDate, endDate }) { - const theme = useMantineTheme(); const channels = useChannelsStore((s) => s.channels); const recordings = useChannelsStore((s) => s.recordings); const channelGroups = useChannelsStore((s) => s.channelGroups); @@ -59,7 +252,6 @@ export default function TVChannelGuide({ startDate, endDate }) { const [existingRuleMode, setExistingRuleMode] = useState(null); const [rulesOpen, setRulesOpen] = useState(false); const [rules, setRules] = useState([]); - const [loading, setLoading] = useState(true); const [initialScrollComplete, setInitialScrollComplete] = useState(false); // New filter states @@ -71,6 +263,15 @@ export default function TVChannelGuide({ startDate, endDate }) { const guideRef = useRef(null); const timelineRef = useRef(null); // New ref for timeline scrolling + const listRef = useRef(null); + const isSyncingScroll = useRef(false); + const guideScrollLeftRef = useRef(0); + const { + ref: guideContainerRef, + width: guideWidth, + height: guideHeight, + } = useElementSize(); + const [guideScrollLeft, setGuideScrollLeft] = useState(0); // Add new state to track hovered logo const [hoveredChannelId, setHoveredChannelId] = useState(null); @@ -80,7 +281,6 @@ export default function TVChannelGuide({ startDate, endDate }) { if (!Object.keys(channels).length === 0) { console.warn('No channels provided or empty channels array'); notifications.show({ title: 'No channels available', color: 'red.5' }); - setLoading(false); return; } @@ -97,10 +297,19 @@ export default function TVChannelGuide({ startDate, endDate }) { console.log(`Using all ${sortedChannels.length} available channels`); + const processedPrograms = fetched.map((program) => { + const start = dayjs(program.start_time); + const end = dayjs(program.end_time); + return { + ...program, + startMs: start.valueOf(), + endMs: end.valueOf(), + }; + }); + setGuideChannels(sortedChannels); setFilteredChannels(sortedChannels); // Initialize filtered channels - setPrograms(fetched); - setLoading(false); + setPrograms(processedPrograms); }; fetchPrograms(); @@ -180,22 +389,59 @@ export default function TVChannelGuide({ startDate, endDate }) { ? latestProgramEnd : defaultEnd; - // Time increments in 15-min steps (for placing programs) - const programTimeline = useMemo(() => { - const times = []; - let current = start; - while (current.isBefore(end)) { - times.push(current); - current = current.add(MINUTE_INCREMENT, 'minute'); - } - return times; - }, [start, end]); + const channelIdByTvgId = useMemo( + () => buildChannelIdMap(guideChannels, tvgsById), + [guideChannels, tvgsById] + ); + + const channelById = useMemo(() => { + const map = new Map(); + guideChannels.forEach((channel) => { + map.set(channel.id, channel); + }); + return map; + }, [guideChannels]); + + const programsByChannelId = useMemo( + () => mapProgramsByChannel(programs, channelIdByTvgId), + [programs, channelIdByTvgId] + ); + + const recordingsByProgramId = useMemo(() => { + const map = new Map(); + (recordings || []).forEach((recording) => { + const programId = recording?.custom_properties?.program?.id; + if (programId != null) { + map.set(programId, recording); + } + }); + return map; + }, [recordings]); + + const rowHeights = useMemo( + () => + computeRowHeights( + filteredChannels, + programsByChannelId, + expandedProgramId + ), + [filteredChannels, programsByChannelId, expandedProgramId] + ); + + const getItemSize = useCallback( + (index) => rowHeights[index] ?? PROGRAM_HEIGHT, + [rowHeights] + ); + + const [timeFormatSetting] = useLocalStorage('time-format', '12h'); + const [dateFormatSetting] = useLocalStorage('date-format', 'mdy'); + const timeFormat = timeFormatSetting === '12h' ? 'h:mm A' : 'HH:mm'; + const dateFormat = dateFormatSetting === 'mdy' ? 'MMMM D' : 'D MMMM'; // Format day label using relative terms when possible (Today, Tomorrow, etc) - const formatDayLabel = (time) => { + const formatDayLabel = useCallback((time) => { const today = dayjs().startOf('day'); const tomorrow = today.add(1, 'day'); - const dayAfterTomorrow = today.add(2, 'day'); const weekLater = today.add(7, 'day'); const day = time.startOf('day'); @@ -211,7 +457,7 @@ export default function TVChannelGuide({ startDate, endDate }) { // Beyond a week, show month and day return time.format(dateFormat); } - }; + }, [dateFormat]); // Hourly marks with day labels const hourTimeline = useMemo(() => { @@ -238,34 +484,39 @@ export default function TVChannelGuide({ startDate, endDate }) { current = current.add(1, 'hour'); } return hours; - }, [start, end]); + }, [start, end, formatDayLabel]); - // Scroll to the nearest half-hour mark ONLY on initial load useEffect(() => { - if ( - guideRef.current && - timelineRef.current && - programs.length > 0 && - !initialScrollComplete - ) { - // Round the current time to the nearest half-hour mark - const roundedNow = - now.minute() < 30 - ? now.startOf('hour') - : now.startOf('hour').add(30, 'minute'); - const nowOffset = roundedNow.diff(start, 'minute'); - const scrollPosition = - (nowOffset / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH - - MINUTE_BLOCK_WIDTH; + const node = guideRef.current; + if (!node) return undefined; - const scrollPos = Math.max(scrollPosition, 0); - guideRef.current.scrollLeft = scrollPos; - timelineRef.current.scrollLeft = scrollPos; // Sync timeline scroll + const handleScroll = () => { + const { scrollLeft } = node; + if (scrollLeft === guideScrollLeftRef.current) { + return; + } - // Mark initial scroll as complete - setInitialScrollComplete(true); - } - }, [programs, start, now, initialScrollComplete]); + guideScrollLeftRef.current = scrollLeft; + setGuideScrollLeft(scrollLeft); + + if (isSyncingScroll.current) { + return; + } + + if (timelineRef.current && timelineRef.current.scrollLeft !== scrollLeft) { + isSyncingScroll.current = true; + timelineRef.current.scrollLeft = scrollLeft; + requestAnimationFrame(() => { + isSyncingScroll.current = false; + }); + } + }; + + node.addEventListener('scroll', handleScroll, { passive: true }); + return () => { + node.removeEventListener('scroll', handleScroll); + }; + }, []); // Update “now” every second useEffect(() => { @@ -282,179 +533,42 @@ export default function TVChannelGuide({ startDate, endDate }) { return (minutesSinceStart / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; }, [now, start, end]); - // Helper: find channel by tvg_id - function findChannelByTvgId(tvgId) { - return guideChannels.find( - (ch) => - tvgsById[ch.epg_data_id]?.tvg_id === tvgId || - (!ch.epg_data_id && ch.uuid === tvgId) - ); - } + const syncScrollLeft = useCallback( + (nextLeft, behavior = 'auto') => { + const guideNode = guideRef.current; + const timelineNode = timelineRef.current; - const openRecordChoice = async (program) => { - setRecordChoiceProgram(program); - setRecordChoiceOpen(true); - try { - const rules = await API.listSeriesRules(); - // Only treat as existing if the rule matches this specific show's title (or has no title constraint) - const rule = (rules || []).find( - (r) => String(r.tvg_id) === String(program.tvg_id) && (!r.title || r.title === program.title) - ); - setExistingRuleMode(rule ? rule.mode : null); - } catch {} - // Also detect if this program already has a scheduled recording - try { - const rec = (recordings || []).find((r) => r?.custom_properties?.program?.id == program.id); - setRecordingForProgram(rec || null); - } catch {} - }; + isSyncingScroll.current = true; - const recordOne = async (program) => { - const channel = findChannelByTvgId(program.tvg_id); - await API.createRecording({ - channel: `${channel.id}`, - start_time: program.start_time, - end_time: program.end_time, - custom_properties: { program }, - }); - notifications.show({ title: 'Recording scheduled' }); - }; - - const saveSeriesRule = async (program, mode) => { - await API.createSeriesRule({ tvg_id: program.tvg_id, mode, title: program.title }); - await API.evaluateSeriesRules(program.tvg_id); - // Refresh recordings so icons and DVR reflect new schedules - try { - await useChannelsStore.getState().fetchRecordings(); - } catch (e) { - console.warn('Failed to refresh recordings after saving series rule', e); - } - notifications.show({ title: mode === 'new' ? 'Record new episodes' : 'Record all episodes' }); - }; - - const openRules = async () => { - setRulesOpen(true); - try { - const r = await API.listSeriesRules(); - setRules(r); - } catch (e) { - // handled by API - } - }; - - const deleteAllUpcoming = async () => { - const ok = window.confirm('Delete ALL upcoming recordings?'); - if (!ok) return; - await API.deleteAllUpcomingRecordings(); - try { await useChannelsStore.getState().fetchRecordings(); } catch {} - }; - - // The “Watch Now” click => show floating video - const showVideo = useVideoStore((s) => s.showVideo); - function handleWatchStream(program) { - const matched = findChannelByTvgId(program.tvg_id); - if (!matched) { - console.warn(`No channel found for tvg_id=${program.tvg_id}`); - return; - } - // Build a playable stream URL for that channel - let vidUrl = `/proxy/ts/stream/${matched.uuid}`; - if (env_mode == 'dev') { - vidUrl = `${window.location.protocol}//${window.location.hostname}:5656${vidUrl}`; - } - - showVideo(vidUrl); - } - - // Function to handle logo click to play channel - function handleLogoClick(channel, event) { - // Prevent event from bubbling up - event.stopPropagation(); - - // Build a playable stream URL for the channel - let vidUrl = `/proxy/ts/stream/${channel.uuid}`; - if (env_mode === 'dev') { - vidUrl = `${window.location.protocol}//${window.location.hostname}:5656${vidUrl}`; - } - - // Use the existing showVideo function - showVideo(vidUrl); - } - - // On program click, toggle the expanded state - function handleProgramClick(program, event) { - // Prevent event from bubbling up to parent elements - event.stopPropagation(); - - // Get the program's start time and calculate its position - const programStart = dayjs(program.start_time); - const startOffsetMinutes = programStart.diff(start, 'minute'); - const leftPx = (startOffsetMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; - - // Calculate desired scroll position (account for channel column width) - const desiredScrollPosition = Math.max(0, leftPx - 20); // 20px buffer - - // If already expanded, collapse it - if (expandedProgramId === program.id) { - setExpandedProgramId(null); - setRecordingForProgram(null); - return; - } - - // Otherwise expand this program - setExpandedProgramId(program.id); - - // Check if this program has a recording - const programRecording = recordings.find((recording) => { - if (recording.custom_properties) { - const customProps = recording.custom_properties || {}; - if (customProps.program && customProps.program.id == program.id) { - return true; + if (guideNode) { + if (typeof guideNode.scrollTo === 'function') { + guideNode.scrollTo({ left: nextLeft, behavior }); + } else { + guideNode.scrollLeft = nextLeft; } } - return false; - }); - setRecordingForProgram(programRecording); - - // Scroll to show the start of the program if it's not already fully visible - if (guideRef.current && timelineRef.current) { - const currentScrollPosition = guideRef.current.scrollLeft; - - // Check if we need to scroll (if program start is before current view or too close to edge) - if ( - desiredScrollPosition < currentScrollPosition || - leftPx - currentScrollPosition < 100 - ) { - // 100px from left edge - - // Smooth scroll to the program's start - guideRef.current.scrollTo({ - left: desiredScrollPosition, - behavior: 'smooth', - }); - - // Also sync the timeline scroll - timelineRef.current.scrollTo({ - left: desiredScrollPosition, - behavior: 'smooth', - }); + if (timelineNode) { + if (typeof timelineNode.scrollTo === 'function') { + timelineNode.scrollTo({ left: nextLeft, behavior }); + } else { + timelineNode.scrollLeft = nextLeft; + } } - } - } - // Close the expanded program when clicking elsewhere - const handleClickOutside = () => { - if (expandedProgramId) { - setExpandedProgramId(null); - setRecordingForProgram(null); - } - }; + guideScrollLeftRef.current = nextLeft; + setGuideScrollLeft(nextLeft); - // Function to scroll to current time - matches initial loading position - const scrollToNow = () => { - if (guideRef.current && timelineRef.current && nowPosition >= 0) { - // Round the current time to the nearest half-hour mark + requestAnimationFrame(() => { + isSyncingScroll.current = false; + }); + }, + [] + ); + + // Scroll to the nearest half-hour mark ONLY on initial load + useEffect(() => { + if (programs.length > 0 && !initialScrollComplete) { const roundedNow = now.minute() < 30 ? now.startOf('hour') @@ -465,61 +579,232 @@ export default function TVChannelGuide({ startDate, endDate }) { MINUTE_BLOCK_WIDTH; const scrollPos = Math.max(scrollPosition, 0); - guideRef.current.scrollLeft = scrollPos; - timelineRef.current.scrollLeft = scrollPos; // Sync timeline scroll + syncScrollLeft(scrollPos); + + setInitialScrollComplete(true); + } + }, [programs, start, now, initialScrollComplete, syncScrollLeft]); + + const findChannelByTvgId = useCallback( + (tvgId) => { + const channelId = channelIdByTvgId.get(String(tvgId)); + if (!channelId) { + return null; + } + return channelById.get(channelId) || null; + }, + [channelById, channelIdByTvgId] + ); + + const openRecordChoice = useCallback( + async (program) => { + setRecordChoiceProgram(program); + setRecordChoiceOpen(true); + try { + const rules = await API.listSeriesRules(); + const rule = (rules || []).find( + (r) => + String(r.tvg_id) === String(program.tvg_id) && + (!r.title || r.title === program.title) + ); + setExistingRuleMode(rule ? rule.mode : null); + } catch (error) { + console.warn('Failed to fetch series rules metadata', error); + } + + const existingRecording = recordingsByProgramId.get(program.id) || null; + setRecordingForProgram(existingRecording); + }, + [recordingsByProgramId] + ); + + const recordOne = useCallback( + async (program) => { + const channel = findChannelByTvgId(program.tvg_id); + if (!channel) { + notifications.show({ + title: 'Unable to schedule recording', + message: 'No channel found for this program.', + color: 'red.6', + }); + return; + } + + await API.createRecording({ + channel: `${channel.id}`, + start_time: program.start_time, + end_time: program.end_time, + custom_properties: { program }, + }); + notifications.show({ title: 'Recording scheduled' }); + }, + [findChannelByTvgId] + ); + + const saveSeriesRule = useCallback(async (program, mode) => { + await API.createSeriesRule({ + tvg_id: program.tvg_id, + mode, + title: program.title, + }); + await API.evaluateSeriesRules(program.tvg_id); + try { + await useChannelsStore.getState().fetchRecordings(); + } catch (error) { + console.warn('Failed to refresh recordings after saving series rule', error); + } + notifications.show({ + title: mode === 'new' ? 'Record new episodes' : 'Record all episodes', + }); + }, []); + + const openRules = useCallback(async () => { + setRulesOpen(true); + try { + const r = await API.listSeriesRules(); + setRules(r); + } catch (error) { + console.warn('Failed to load series rules', error); + } + }, []); + + // The “Watch Now” click => show floating video + const showVideo = useVideoStore((s) => s.showVideo); + const handleWatchStream = useCallback( + (program) => { + const matched = findChannelByTvgId(program.tvg_id); + if (!matched) { + console.warn(`No channel found for tvg_id=${program.tvg_id}`); + return; + } + + let vidUrl = `/proxy/ts/stream/${matched.uuid}`; + if (env_mode === 'dev') { + vidUrl = `${window.location.protocol}//${window.location.hostname}:5656${vidUrl}`; + } + + showVideo(vidUrl); + }, + [env_mode, findChannelByTvgId, showVideo] + ); + + const handleLogoClick = useCallback( + (channel, event) => { + event.stopPropagation(); + + let vidUrl = `/proxy/ts/stream/${channel.uuid}`; + if (env_mode === 'dev') { + vidUrl = `${window.location.protocol}//${window.location.hostname}:5656${vidUrl}`; + } + + showVideo(vidUrl); + }, + [env_mode, showVideo] + ); + + const handleProgramClick = useCallback( + (program, event) => { + event.stopPropagation(); + + const programStartMs = program.startMs ?? dayjs(program.start_time).valueOf(); + const startOffsetMinutes = (programStartMs - start.valueOf()) / 60000; + const leftPx = + (startOffsetMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; + const desiredScrollPosition = Math.max(0, leftPx - 20); + + if (expandedProgramId === program.id) { + setExpandedProgramId(null); + setRecordingForProgram(null); + } else { + setExpandedProgramId(program.id); + setRecordingForProgram(recordingsByProgramId.get(program.id) || null); + } + + const guideNode = guideRef.current; + if (guideNode) { + const currentScrollPosition = guideNode.scrollLeft; + if ( + desiredScrollPosition < currentScrollPosition || + leftPx - currentScrollPosition < 100 + ) { + syncScrollLeft(desiredScrollPosition, 'smooth'); + } + } + }, + [expandedProgramId, recordingsByProgramId, start, syncScrollLeft] + ); + + // Close the expanded program when clicking elsewhere + const handleClickOutside = () => { + if (expandedProgramId) { + setExpandedProgramId(null); + setRecordingForProgram(null); } }; - // Sync scrolling between timeline and main content - const handleTimelineScroll = () => { - if (timelineRef.current && guideRef.current) { - guideRef.current.scrollLeft = timelineRef.current.scrollLeft; + const scrollToNow = useCallback(() => { + if (nowPosition < 0) { + return; } - }; - // Sync scrolling between main content and timeline - const handleGuideScroll = () => { - if (guideRef.current && timelineRef.current) { - timelineRef.current.scrollLeft = guideRef.current.scrollLeft; + const roundedNow = + now.minute() < 30 + ? now.startOf('hour') + : now.startOf('hour').add(30, 'minute'); + const nowOffset = roundedNow.diff(start, 'minute'); + const scrollPosition = + (nowOffset / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH - + MINUTE_BLOCK_WIDTH; + + const scrollPos = Math.max(scrollPosition, 0); + syncScrollLeft(scrollPos, 'smooth'); + }, [now, nowPosition, start, syncScrollLeft]); + + const handleTimelineScroll = useCallback(() => { + if (!timelineRef.current) { + return; } - }; - // Handle wheel events on the timeline for horizontal scrolling - const handleTimelineWheel = (e) => { - if (timelineRef.current) { - // Prevent the default vertical scroll - e.preventDefault(); + const nextLeft = timelineRef.current.scrollLeft; + guideScrollLeftRef.current = nextLeft; + setGuideScrollLeft(nextLeft); - // Determine scroll amount (with shift key for faster scrolling) - const scrollAmount = e.shiftKey ? 250 : 125; + if (isSyncingScroll.current) { + return; + } - // Scroll horizontally based on wheel direction - timelineRef.current.scrollLeft += - e.deltaY > 0 ? scrollAmount : -scrollAmount; - - // Sync the main content scroll position - if (guideRef.current) { - guideRef.current.scrollLeft = timelineRef.current.scrollLeft; + isSyncingScroll.current = true; + if (guideRef.current) { + if (typeof guideRef.current.scrollTo === 'function') { + guideRef.current.scrollTo({ left: nextLeft }); + } else { + guideRef.current.scrollLeft = nextLeft; } } - }; - // Function to handle timeline time clicks with 15-minute snapping - const handleTimeClick = (clickedTime, event) => { - if (timelineRef.current && guideRef.current) { - // Calculate where in the hour block the click happened - const hourBlockElement = event.currentTarget; - const rect = hourBlockElement.getBoundingClientRect(); - const clickPositionX = event.clientX - rect.left; // Position within the hour block - const percentageAcross = clickPositionX / rect.width; // 0 to 1 value + requestAnimationFrame(() => { + isSyncingScroll.current = false; + }); + }, []); - // Calculate the minute within the hour based on click position + const handleTimelineWheel = useCallback((event) => { + if (!timelineRef.current) { + return; + } + + event.preventDefault(); + const scrollAmount = event.shiftKey ? 250 : 125; + const delta = event.deltaY > 0 ? scrollAmount : -scrollAmount; + timelineRef.current.scrollBy({ left: delta, behavior: 'smooth' }); + }, []); + + const handleTimeClick = useCallback( + (clickedTime, event) => { + const rect = event.currentTarget.getBoundingClientRect(); + const clickPositionX = event.clientX - rect.left; + const percentageAcross = clickPositionX / rect.width; const minuteWithinHour = Math.floor(percentageAcross * 60); - // Create a new time object with the calculated minute - const exactTime = clickedTime.minute(minuteWithinHour); - - // Determine the nearest 15-minute interval (0, 15, 30, 45) let snappedMinute; if (minuteWithinHour < 7.5) { snappedMinute = 0; @@ -530,176 +815,107 @@ export default function TVChannelGuide({ startDate, endDate }) { } else if (minuteWithinHour < 52.5) { snappedMinute = 45; } else { - // If we're past 52.5 minutes, snap to the next hour snappedMinute = 0; clickedTime = clickedTime.add(1, 'hour'); } - // Create the snapped time const snappedTime = clickedTime.minute(snappedMinute); - - // Calculate the offset from the start of the timeline to the snapped time const snappedOffset = snappedTime.diff(start, 'minute'); - - // Convert to pixels const scrollPosition = (snappedOffset / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; - // Scroll both containers to the snapped position - timelineRef.current.scrollLeft = scrollPosition; - guideRef.current.scrollLeft = scrollPosition; - } - }; - // Renders each program block - function renderProgram(program, channelStart) { - const programKey = `${program.tvg_id}-${program.start_time}`; - const programStart = dayjs(program.start_time); - const programEnd = dayjs(program.end_time); - const startOffsetMinutes = programStart.diff(channelStart, 'minute'); - const durationMinutes = programEnd.diff(programStart, 'minute'); - const leftPx = (startOffsetMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; + syncScrollLeft(scrollPosition, 'smooth'); + }, + [start, syncScrollLeft] + ); + const renderProgram = useCallback( + (program, channelStart = start) => { + const programStartMs = + program.startMs ?? dayjs(program.start_time).valueOf(); + const programEndMs = program.endMs ?? dayjs(program.end_time).valueOf(); + const programStart = dayjs(programStartMs); + const programEnd = dayjs(programEndMs); - // Calculate width with a small gap (2px on each side) - const gapSize = 2; - const widthPx = - (durationMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH - gapSize * 2; + const startOffsetMinutes = + (programStartMs - channelStart.valueOf()) / 60000; + const durationMinutes = (programEndMs - programStartMs) / 60000; + const leftPx = + (startOffsetMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; - // Check if we have a recording for this program - const recording = recordings.find((recording) => { - if (recording.custom_properties) { - const customProps = recording.custom_properties || {}; - if (customProps.program && customProps.program.id == program.id) { - return recording; - } + const gapSize = 2; + const widthPx = + (durationMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH - + gapSize * 2; + + const recording = recordingsByProgramId.get(program.id); + + const isLive = now.isAfter(programStart) && now.isBefore(programEnd); + const isPast = now.isAfter(programEnd); + const isExpanded = expandedProgramId === program.id; + + const rowHeight = isExpanded ? EXPANDED_PROGRAM_HEIGHT : PROGRAM_HEIGHT; + const MIN_EXPANDED_WIDTH = 450; + const expandedWidthPx = Math.max(widthPx, MIN_EXPANDED_WIDTH); + + const programStartInView = leftPx + gapSize; + const programEndInView = leftPx + gapSize + widthPx; + const viewportLeft = guideScrollLeft; + const startsBeforeView = programStartInView < viewportLeft; + const extendsIntoView = programEndInView > viewportLeft; + + let textOffsetLeft = 0; + if (startsBeforeView && extendsIntoView) { + const visibleStart = Math.max(viewportLeft - programStartInView, 0); + const maxOffset = widthPx - 200; + textOffsetLeft = Math.min(visibleStart, maxOffset); } - return null; - }); - // Highlight if currently live - const isLive = now.isAfter(programStart) && now.isBefore(programEnd); - - // Determine if the program has ended - const isPast = now.isAfter(programEnd); // Check if this program is expanded - const isExpanded = expandedProgramId === program.id; - - // Set the height based on expanded state - const rowHeight = isExpanded ? EXPANDED_PROGRAM_HEIGHT : PROGRAM_HEIGHT; - - // Determine expanded width - if program is short, ensure it has a minimum expanded width - // This will allow it to overlap programs to the right - const MIN_EXPANDED_WIDTH = 450; // Minimum width in pixels when expanded - const expandedWidthPx = Math.max(widthPx, MIN_EXPANDED_WIDTH); - - // Calculate text positioning for long programs that start before the visible area - const currentScrollLeft = guideRef.current?.scrollLeft || 0; - const programStartInView = leftPx + gapSize; - const programEndInView = leftPx + gapSize + widthPx; - const viewportLeft = currentScrollLeft; - - // Check if program starts before viewport but extends into it - const startsBeforeView = programStartInView < viewportLeft; - const extendsIntoView = programEndInView > viewportLeft; - - // Calculate text offset to position it at the visible portion - let textOffsetLeft = 0; - if (startsBeforeView && extendsIntoView) { - // Position text at the start of the visible area, but not beyond the program end - const visibleStart = Math.max(viewportLeft - programStartInView, 0); - const maxOffset = widthPx - 200; // Leave some space for text, don't push to very end - textOffsetLeft = Math.min(visibleStart, maxOffset); - } - - return ( - handleProgramClick(program, e)} - > - handleProgramClick(program, event)} > - - - - {recording && ( -
- )} - {program.title} -
-
- - {programStart.format(timeFormat)} -{' '} - {programEnd.format(timeFormat)} - -
{' '} - {/* Description is always shown but expands when row is expanded */} - {program.description && ( - {program.description} + + {recording && ( +
+ )} + {program.title} +
+
+ + {programStart.format(timeFormat)} - {programEnd.format(timeFormat)}
- )} - {/* Expanded content */} - {isExpanded && ( - - - {/* Always show Record for not-past; it opens options (schedule/remove) */} - {!isPast && ( - - )} - {isLive && ( - - )} - - - )} -
-
- ); - } + {program.description && ( + + + {program.description} + + + )} + + {isExpanded && ( + + + {!isPast && ( + + )} + + {isLive && ( + + )} + + + )} + + + ); + }, + [ + expandedProgramId, + guideScrollLeft, + handleProgramClick, + handleWatchStream, + now, + openRecordChoice, + recordingsByProgramId, + start, + timeFormat, + ] + ); + + const contentWidth = useMemo( + () => hourTimeline.length * HOUR_WIDTH + CHANNEL_WIDTH, + [hourTimeline] + ); + + const virtualizedHeight = useMemo( + () => guideHeight || 600, + [guideHeight] + ); + + const virtualizedWidth = useMemo(() => { + if (guideWidth) { + return guideWidth; + } + if (typeof window !== 'undefined') { + return Math.min(window.innerWidth, contentWidth); + } + return contentWidth; + }, [guideWidth, contentWidth]); + + const itemKey = useCallback( + (index) => filteredChannels[index]?.id ?? index, + [filteredChannels] + ); + + const listData = useMemo( + () => ({ + filteredChannels, + programsByChannelId, + expandedProgramId, + rowHeights, + logos, + hoveredChannelId, + setHoveredChannelId, + renderProgram, + handleLogoClick, + contentWidth, + }), + [ + filteredChannels, + programsByChannelId, + expandedProgramId, + rowHeights, + logos, + hoveredChannelId, + renderProgram, + handleLogoClick, + contentWidth, + setHoveredChannelId, + ] + ); + + useEffect(() => { + if (listRef.current) { + listRef.current.resetAfterIndex(0, true); + } + }, [rowHeights]); + + useEffect(() => { + if (listRef.current) { + listRef.current.scrollToItem(0); + } + }, [searchQuery, selectedGroupId, selectedProfileId]); // Create group options for dropdown - but only include groups used by guide channels const groupOptions = useMemo(() => { @@ -825,12 +1160,6 @@ export default function TVChannelGuide({ startDate, endDate }) { setSelectedProfileId(value || 'all'); }; - // Handle date-time formats - const [timeFormatSetting] = useLocalStorage('time-format', '12h'); - const [dateFormatSetting] = useLocalStorage('date-format', 'mdy'); - const timeFormat = timeFormatSetting === '12h' ? 'h:mm A' : 'HH:mm'; - const dateFormat = dateFormatSetting === 'mdy' ? 'MMMM D' : 'D MMMM'; - return ( - {/* Content wrapper with min-width to ensure scroll range */} - - {/* Now line - positioned absolutely within content */} - {nowPosition >= 0 && ( - - )} + {nowPosition >= 0 && ( + + )} - {/* Channel rows with logos and programs */} - {filteredChannels.length > 0 ? ( - filteredChannels.map((channel) => { - const channelPrograms = programs.filter( - (p) => - (channel.epg_data_id && - p.tvg_id === tvgsById[channel.epg_data_id].tvg_id) || - (!channel.epg_data_id && p.tvg_id === channel.uuid) - ); - // Check if any program in this channel is expanded - const hasExpandedProgram = channelPrograms.some( - (prog) => prog.id === expandedProgramId - ); - const rowHeight = hasExpandedProgram - ? EXPANDED_PROGRAM_HEIGHT - : PROGRAM_HEIGHT; - - return ( - - {/* Channel logo - sticky horizontally */} - handleLogoClick(channel, e)} - onMouseEnter={() => setHoveredChannelId(channel.id)} - onMouseLeave={() => setHoveredChannelId(null)} - > - {/* Play icon overlay - visible on hover (moved outside to cover entire box) */} - {hoveredChannelId === channel.id && ( - - {' '} - {/* Changed from Video to Play and increased size */} - - )} - - {/* Logo content - restructured for better positioning */} - - {/* Logo container with padding */} - - {channel.name} - - - {/* Channel number - fixed position at bottom with consistent height */} - - {channel.channel_number || '-'} - - - - - {/* Programs for this channel */} - - {channelPrograms.length > 0 ? ( - channelPrograms.map((program) => ( -
- {renderProgram(program, start)} -
- )) - ) : ( - // Simple placeholder for channels with no program data - 2 hour blocks - <> - {/* Generate repeating placeholder blocks every 2 hours across the timeline */} - {Array.from({ - length: Math.ceil(hourTimeline.length / 2), - }).map((_, index) => ( - - - - No Program Information Available - - - - ))} - - )} -
-
- ); - }) - ) : ( - - No channels match your filters - - - )} -
+ {filteredChannels.length > 0 ? ( + + {GuideRow} + + ) : ( + + No channels match your filters + + + )}
- {/* Record choice modal */} {recordChoiceOpen && recordChoiceProgram && ( { saveSeriesRule(recordChoiceProgram, 'new'); setRecordChoiceOpen(false); }}>New episodes only {recordingForProgram && ( <> - - + + )} {existingRuleMode && ( @@ -1433,18 +1592,47 @@ export default function TVChannelGuide({ startDate, endDate }) { {r.title || r.tvg_id} — {r.mode === 'new' ? 'New episodes' : 'Every episode'} - - + + ))} diff --git a/frontend/src/pages/__tests__/guideUtils.test.js b/frontend/src/pages/__tests__/guideUtils.test.js new file mode 100644 index 00000000..58a6d292 --- /dev/null +++ b/frontend/src/pages/__tests__/guideUtils.test.js @@ -0,0 +1,100 @@ +import { describe, it, expect } from 'vitest'; +import dayjs from 'dayjs'; +import { + PROGRAM_HEIGHT, + EXPANDED_PROGRAM_HEIGHT, + buildChannelIdMap, + mapProgramsByChannel, + computeRowHeights, +} from '../guideUtils.js'; + +describe('guideUtils', () => { + describe('buildChannelIdMap', () => { + it('maps tvg ids from epg records and falls back to channel uuid', () => { + const channels = [ + { id: 1, epg_data_id: 'epg-1', uuid: 'uuid-1' }, + { id: 2, epg_data_id: null, uuid: 'uuid-2' }, + ]; + const tvgsById = { + 'epg-1': { tvg_id: 'alpha' }, + }; + + const map = buildChannelIdMap(channels, tvgsById); + + expect(map.get('alpha')).toBe(1); + expect(map.get('uuid-2')).toBe(2); + }); + }); + + describe('mapProgramsByChannel', () => { + it('groups programs by channel and sorts them by start time', () => { + const programs = [ + { + id: 10, + tvg_id: 'alpha', + start_time: dayjs('2025-01-01T02:00:00Z').toISOString(), + end_time: dayjs('2025-01-01T03:00:00Z').toISOString(), + title: 'Late Show', + }, + { + id: 11, + tvg_id: 'alpha', + start_time: dayjs('2025-01-01T01:00:00Z').toISOString(), + end_time: dayjs('2025-01-01T02:00:00Z').toISOString(), + title: 'Evening News', + }, + { + id: 20, + tvg_id: 'beta', + start_time: dayjs('2025-01-01T00:00:00Z').toISOString(), + end_time: dayjs('2025-01-01T01:00:00Z').toISOString(), + title: 'Morning Show', + }, + ]; + + const channelIdByTvgId = new Map([ + ['alpha', 1], + ['beta', 2], + ]); + + const map = mapProgramsByChannel(programs, channelIdByTvgId); + + expect(map.get(1)).toHaveLength(2); + expect(map.get(1)?.map((item) => item.id)).toEqual([11, 10]); + expect(map.get(2)).toHaveLength(1); + expect(map.get(2)?.[0].startMs).toBeTypeOf('number'); + expect(map.get(2)?.[0].endMs).toBeTypeOf('number'); + }); + }); + + describe('computeRowHeights', () => { + it('returns program heights with expanded rows when needed', () => { + const filteredChannels = [ + { id: 1 }, + { id: 2 }, + ]; + + const programsByChannel = new Map([ + [1, [{ id: 10 }, { id: 11 }]], + [2, [{ id: 20 }]], + ]); + + const collapsed = computeRowHeights( + filteredChannels, + programsByChannel, + null + ); + expect(collapsed).toEqual([PROGRAM_HEIGHT, PROGRAM_HEIGHT]); + + const expanded = computeRowHeights( + filteredChannels, + programsByChannel, + 10 + ); + expect(expanded).toEqual([ + EXPANDED_PROGRAM_HEIGHT, + PROGRAM_HEIGHT, + ]); + }); + }); +}); diff --git a/frontend/src/pages/guideUtils.js b/frontend/src/pages/guideUtils.js new file mode 100644 index 00000000..9e5bcbc2 --- /dev/null +++ b/frontend/src/pages/guideUtils.js @@ -0,0 +1,71 @@ +import dayjs from 'dayjs'; + +export const PROGRAM_HEIGHT = 90; +export const EXPANDED_PROGRAM_HEIGHT = 180; + +export function buildChannelIdMap(channels, tvgsById) { + const map = new Map(); + channels.forEach((channel) => { + const tvgRecord = channel.epg_data_id + ? tvgsById[channel.epg_data_id] + : null; + const tvgId = tvgRecord?.tvg_id ?? channel.uuid; + if (tvgId) { + map.set(String(tvgId), channel.id); + } + }); + return map; +} + +export function mapProgramsByChannel(programs, channelIdByTvgId) { + if (!programs?.length || !channelIdByTvgId?.size) { + return new Map(); + } + + const map = new Map(); + programs.forEach((program) => { + const channelId = channelIdByTvgId.get(String(program.tvg_id)); + if (!channelId) { + return; + } + + if (!map.has(channelId)) { + map.set(channelId, []); + } + + const startMs = program.startMs ?? dayjs(program.start_time).valueOf(); + const endMs = program.endMs ?? dayjs(program.end_time).valueOf(); + + map.get(channelId).push({ + ...program, + startMs, + endMs, + }); + }); + + map.forEach((list) => { + list.sort((a, b) => a.startMs - b.startMs); + }); + + return map; +} + +export function computeRowHeights( + filteredChannels, + programsByChannelId, + expandedProgramId, + defaultHeight = PROGRAM_HEIGHT, + expandedHeight = EXPANDED_PROGRAM_HEIGHT +) { + if (!filteredChannels?.length) { + return []; + } + + return filteredChannels.map((channel) => { + const channelPrograms = programsByChannelId.get(channel.id) || []; + const expanded = channelPrograms.some( + (program) => program.id === expandedProgramId + ); + return expanded ? expandedHeight : defaultHeight; + }); +} diff --git a/frontend/src/test/setupTests.js b/frontend/src/test/setupTests.js new file mode 100644 index 00000000..b5f53af0 --- /dev/null +++ b/frontend/src/test/setupTests.js @@ -0,0 +1,42 @@ +import '@testing-library/jest-dom/vitest'; +import { afterEach, vi } from 'vitest'; +import { cleanup } from '@testing-library/react'; + +afterEach(() => { + cleanup(); +}); + +if (typeof window !== 'undefined' && !window.matchMedia) { + window.matchMedia = vi.fn().mockImplementation((query) => ({ + matches: false, + media: query, + onchange: null, + addListener: vi.fn(), + removeListener: vi.fn(), + addEventListener: vi.fn(), + removeEventListener: vi.fn(), + dispatchEvent: vi.fn(), + })); +} + +if (typeof window !== 'undefined' && !window.ResizeObserver) { + class ResizeObserver { + constructor(callback) { + this.callback = callback; + } + observe() {} + unobserve() {} + disconnect() {} + } + + window.ResizeObserver = ResizeObserver; +} + +if (typeof window !== 'undefined') { + if (!window.requestAnimationFrame) { + window.requestAnimationFrame = (cb) => setTimeout(cb, 16); + } + if (!window.cancelAnimationFrame) { + window.cancelAnimationFrame = (id) => clearTimeout(id); + } +} diff --git a/frontend/vite.config.js b/frontend/vite.config.js index 9ce8189b..1026e519 100644 --- a/frontend/vite.config.js +++ b/frontend/vite.config.js @@ -26,4 +26,10 @@ export default defineConfig({ // }, // }, }, + + test: { + environment: 'jsdom', + setupFiles: ['./src/test/setupTests.js'], + globals: true, + }, }); From 207613c00be51f7869a0b16713b286cf69e14af4 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 21 Sep 2025 10:06:53 -0500 Subject: [PATCH 035/119] Fix frontend saving case sensitive setting as json string. --- frontend/src/components/forms/M3UFilter.jsx | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/frontend/src/components/forms/M3UFilter.jsx b/frontend/src/components/forms/M3UFilter.jsx index dcb3323f..aaf5a1d7 100644 --- a/frontend/src/components/forms/M3UFilter.jsx +++ b/frontend/src/components/forms/M3UFilter.jsx @@ -51,8 +51,7 @@ const M3UFilter = ({ filter = null, m3u, isOpen, onClose }) => { values.custom_properties = setCustomProperty( filter ? filter.custom_properties : {}, 'case_sensitive', - values.case_sensitive, - true + values.case_sensitive ); delete values.case_sensitive; From 63729fb0ea30265f1603ad49c70b5c117ff86e2a Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 21 Sep 2025 10:42:23 -0500 Subject: [PATCH 036/119] Improved logging for stream filters. --- apps/m3u/tasks.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index 2caeb519..d8482d3b 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -903,6 +903,8 @@ def process_m3u_batch_direct(account_id, batch, groups, hash_keys): stream_hashes = {} logger.debug(f"Processing batch of {len(batch)} for M3U account {account_id}") + if compiled_filters: + logger.debug(f"Using compiled filters: {[f[1].regex_pattern for f in compiled_filters]}") for stream_info in batch: try: name, url = stream_info["name"], stream_info["url"] @@ -912,10 +914,10 @@ def process_m3u_batch_direct(account_id, batch, groups, hash_keys): group_title = get_case_insensitive_attr( stream_info["attributes"], "group-title", "Default Group" ) - + logger.debug(f"Processing stream: {name} - {url} in group {group_title}") include = True for pattern, filter in compiled_filters: - logger.debug(f"Checking filter patterh {pattern}") + logger.trace(f"Checking filter pattern {pattern}") target = name if filter.filter_type == "url": target = url From 3f7edd840e495f9aa1690329d5a29e2aad56859f Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 21 Sep 2025 11:17:14 -0500 Subject: [PATCH 037/119] Change whiteSpace style from 'nowrap' to 'pre' in StreamsTable for better text formatting. Users can you reliably copy a name or group from the stream table and use as a filter for m3u accounts --- frontend/src/components/tables/StreamsTable.jsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/components/tables/StreamsTable.jsx b/frontend/src/components/tables/StreamsTable.jsx index ef944bbd..c4ab3652 100644 --- a/frontend/src/components/tables/StreamsTable.jsx +++ b/frontend/src/components/tables/StreamsTable.jsx @@ -282,7 +282,7 @@ const StreamsTable = () => { cell: ({ getValue }) => ( { cell: ({ getValue }) => ( Date: Sun, 21 Sep 2025 12:23:48 -0500 Subject: [PATCH 038/119] Bug fix: If URL for Channel element in EPG is longer than 500 characters parsing would fail. Added validation during scanning. --- apps/epg/tasks.py | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/apps/epg/tasks.py b/apps/epg/tasks.py index 45b17bb7..5ab8b809 100644 --- a/apps/epg/tasks.py +++ b/apps/epg/tasks.py @@ -28,6 +28,23 @@ from core.utils import acquire_task_lock, release_task_lock, send_websocket_upda logger = logging.getLogger(__name__) + +def validate_icon_url_fast(icon_url, max_length=None): + """ + Fast validation for icon URLs during parsing. + Returns None if URL is too long, original URL otherwise. + If max_length is None, gets it dynamically from the EPGData model field. + """ + if max_length is None: + # Get max_length dynamically from the model field + max_length = EPGData._meta.get_field('icon_url').max_length + + if icon_url and len(icon_url) > max_length: + logger.warning(f"Icon URL too long ({len(icon_url)} > {max_length}), skipping: {icon_url[:100]}...") + return None + return icon_url + + MAX_EXTRACT_CHUNK_SIZE = 65536 # 64kb (base2) @@ -831,6 +848,7 @@ def parse_channels_only(source): processed_channels = 0 batch_size = 500 # Process in batches to limit memory usage progress = 0 # Initialize progress variable here + icon_url_max_length = EPGData._meta.get_field('icon_url').max_length # Get max length for icon_url field # Track memory at key points if process: @@ -878,7 +896,8 @@ def parse_channels_only(source): if child.tag == 'display-name' and child.text: display_name = child.text.strip() elif child.tag == 'icon': - icon_url = child.get('src', '').strip() + raw_icon_url = child.get('src', '').strip() + icon_url = validate_icon_url_fast(raw_icon_url, icon_url_max_length) if display_name and icon_url: break # No need to continue if we have both From 6f79845b215d9e0fd1088cd0e94fc1a002d988aa Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 21 Sep 2025 12:40:20 -0500 Subject: [PATCH 039/119] Enhancement: Only grab first display name for a channel during epg scanning. --- apps/epg/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/epg/tasks.py b/apps/epg/tasks.py index 5ab8b809..27735df8 100644 --- a/apps/epg/tasks.py +++ b/apps/epg/tasks.py @@ -893,7 +893,7 @@ def parse_channels_only(source): display_name = None icon_url = None for child in elem: - if child.tag == 'display-name' and child.text: + if display_name is None and child.tag == 'display-name' and child.text: display_name = child.text.strip() elif child.tag == 'icon': raw_icon_url = child.get('src', '').strip() From 08e5b6f36fbfc6709176ac6bc91ccc37e20f1edf Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 21 Sep 2025 15:17:15 -0500 Subject: [PATCH 040/119] Apply prettier formatting. --- frontend/src/pages/Guide.jsx | 338 +++++++++++++++++++++-------------- 1 file changed, 207 insertions(+), 131 deletions(-) diff --git a/frontend/src/pages/Guide.jsx b/frontend/src/pages/Guide.jsx index 11b17e5b..9c90bb1d 100644 --- a/frontend/src/pages/Guide.jsx +++ b/frontend/src/pages/Guide.jsx @@ -1,5 +1,11 @@ // frontend/src/pages/Guide.js -import React, { useMemo, useState, useEffect, useRef, useCallback } from 'react'; +import React, { + useMemo, + useState, + useEffect, + useRef, + useCallback, +} from 'react'; import dayjs from 'dayjs'; import API from '../api'; import useChannelsStore from '../store/channels'; @@ -204,26 +210,28 @@ const GuideRow = React.memo(({ index, style, data }) => { channelPrograms.map((program) => renderProgram(program)) ) : ( <> - {Array.from({ length: Math.ceil(24 / 2) }).map((_, placeholderIndex) => ( - - No program data - - ))} + {Array.from({ length: Math.ceil(24 / 2) }).map( + (_, placeholderIndex) => ( + + No program data + + ) + )} )} @@ -435,29 +443,33 @@ export default function TVChannelGuide({ startDate, endDate }) { const [timeFormatSetting] = useLocalStorage('time-format', '12h'); const [dateFormatSetting] = useLocalStorage('date-format', 'mdy'); + // Use user preference for time format const timeFormat = timeFormatSetting === '12h' ? 'h:mm A' : 'HH:mm'; const dateFormat = dateFormatSetting === 'mdy' ? 'MMMM D' : 'D MMMM'; // Format day label using relative terms when possible (Today, Tomorrow, etc) - const formatDayLabel = useCallback((time) => { - const today = dayjs().startOf('day'); - const tomorrow = today.add(1, 'day'); - const weekLater = today.add(7, 'day'); + const formatDayLabel = useCallback( + (time) => { + const today = dayjs().startOf('day'); + const tomorrow = today.add(1, 'day'); + const weekLater = today.add(7, 'day'); - const day = time.startOf('day'); + const day = time.startOf('day'); - if (day.isSame(today, 'day')) { - return 'Today'; - } else if (day.isSame(tomorrow, 'day')) { - return 'Tomorrow'; - } else if (day.isBefore(weekLater)) { - // Within a week, show day name - return time.format('dddd'); - } else { - // Beyond a week, show month and day - return time.format(dateFormat); - } - }, [dateFormat]); + if (day.isSame(today, 'day')) { + return 'Today'; + } else if (day.isSame(tomorrow, 'day')) { + return 'Tomorrow'; + } else if (day.isBefore(weekLater)) { + // Within a week, show day name + return time.format('dddd'); + } else { + // Beyond a week, show month and day + return time.format(dateFormat); + } + }, + [dateFormat] + ); // Hourly marks with day labels const hourTimeline = useMemo(() => { @@ -503,7 +515,10 @@ export default function TVChannelGuide({ startDate, endDate }) { return; } - if (timelineRef.current && timelineRef.current.scrollLeft !== scrollLeft) { + if ( + timelineRef.current && + timelineRef.current.scrollLeft !== scrollLeft + ) { isSyncingScroll.current = true; timelineRef.current.scrollLeft = scrollLeft; requestAnimationFrame(() => { @@ -533,38 +548,35 @@ export default function TVChannelGuide({ startDate, endDate }) { return (minutesSinceStart / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; }, [now, start, end]); - const syncScrollLeft = useCallback( - (nextLeft, behavior = 'auto') => { - const guideNode = guideRef.current; - const timelineNode = timelineRef.current; + const syncScrollLeft = useCallback((nextLeft, behavior = 'auto') => { + const guideNode = guideRef.current; + const timelineNode = timelineRef.current; - isSyncingScroll.current = true; + isSyncingScroll.current = true; - if (guideNode) { - if (typeof guideNode.scrollTo === 'function') { - guideNode.scrollTo({ left: nextLeft, behavior }); - } else { - guideNode.scrollLeft = nextLeft; - } + if (guideNode) { + if (typeof guideNode.scrollTo === 'function') { + guideNode.scrollTo({ left: nextLeft, behavior }); + } else { + guideNode.scrollLeft = nextLeft; } + } - if (timelineNode) { - if (typeof timelineNode.scrollTo === 'function') { - timelineNode.scrollTo({ left: nextLeft, behavior }); - } else { - timelineNode.scrollLeft = nextLeft; - } + if (timelineNode) { + if (typeof timelineNode.scrollTo === 'function') { + timelineNode.scrollTo({ left: nextLeft, behavior }); + } else { + timelineNode.scrollLeft = nextLeft; } + } - guideScrollLeftRef.current = nextLeft; - setGuideScrollLeft(nextLeft); + guideScrollLeftRef.current = nextLeft; + setGuideScrollLeft(nextLeft); - requestAnimationFrame(() => { - isSyncingScroll.current = false; - }); - }, - [] - ); + requestAnimationFrame(() => { + isSyncingScroll.current = false; + }); + }, []); // Scroll to the nearest half-hour mark ONLY on initial load useEffect(() => { @@ -651,7 +663,10 @@ export default function TVChannelGuide({ startDate, endDate }) { try { await useChannelsStore.getState().fetchRecordings(); } catch (error) { - console.warn('Failed to refresh recordings after saving series rule', error); + console.warn( + 'Failed to refresh recordings after saving series rule', + error + ); } notifications.show({ title: mode === 'new' ? 'Record new episodes' : 'Record all episodes', @@ -706,7 +721,8 @@ export default function TVChannelGuide({ startDate, endDate }) { (program, event) => { event.stopPropagation(); - const programStartMs = program.startMs ?? dayjs(program.start_time).valueOf(); + const programStartMs = + program.startMs ?? dayjs(program.start_time).valueOf(); const startOffsetMinutes = (programStartMs - start.valueOf()) / 60000; const leftPx = (startOffsetMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; @@ -753,8 +769,7 @@ export default function TVChannelGuide({ startDate, endDate }) { : now.startOf('hour').add(30, 'minute'); const nowOffset = roundedNow.diff(start, 'minute'); const scrollPosition = - (nowOffset / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH - - MINUTE_BLOCK_WIDTH; + (nowOffset / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH - MINUTE_BLOCK_WIDTH; const scrollPos = Math.max(scrollPosition, 0); syncScrollLeft(scrollPos, 'smooth'); @@ -844,8 +859,7 @@ export default function TVChannelGuide({ startDate, endDate }) { const gapSize = 2; const widthPx = - (durationMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH - - gapSize * 2; + (durationMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH - gapSize * 2; const recording = recordingsByProgramId.get(program.id); @@ -955,7 +969,8 @@ export default function TVChannelGuide({ startDate, endDate }) { overflow: 'hidden', }} > - {programStart.format(timeFormat)} - {programEnd.format(timeFormat)} + {programStart.format(timeFormat)} -{' '} + {programEnd.format(timeFormat)}
@@ -1039,10 +1054,7 @@ export default function TVChannelGuide({ startDate, endDate }) { [hourTimeline] ); - const virtualizedHeight = useMemo( - () => guideHeight || 600, - [guideHeight] - ); + const virtualizedHeight = useMemo(() => guideHeight || 600, [guideHeight]); const virtualizedWidth = useMemo(() => { if (guideWidth) { @@ -1507,9 +1519,32 @@ export default function TVChannelGuide({ startDate, endDate }) { }} > - - - + + + {recordingForProgram && ( <> + )} @@ -1586,56 +1640,78 @@ export default function TVChannelGuide({ startDate, endDate }) { > {(!rules || rules.length === 0) && ( - No series rules configured + + No series rules configured + )} - {rules && rules.map((r) => ( - - {r.title || r.tvg_id} — {r.mode === 'new' ? 'New episodes' : 'Every episode'} - - - - - - ))} + {rules && + rules.map((r) => ( + + + {r.title || r.tvg_id} —{' '} + {r.mode === 'new' ? 'New episodes' : 'Every episode'} + + + + + + + ))} )} From 75fbf9639a284860f123fa2cdf0530ca5781ca8a Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 21 Sep 2025 16:23:07 -0500 Subject: [PATCH 041/119] Enhancement: Update channel and program mapping to support multiple channels per TVG ID --- frontend/src/pages/Guide.jsx | 15 +++++++++------ frontend/src/pages/guideUtils.js | 24 ++++++++++++++++-------- 2 files changed, 25 insertions(+), 14 deletions(-) diff --git a/frontend/src/pages/Guide.jsx b/frontend/src/pages/Guide.jsx index 9c90bb1d..50bd1222 100644 --- a/frontend/src/pages/Guide.jsx +++ b/frontend/src/pages/Guide.jsx @@ -207,7 +207,9 @@ const GuideRow = React.memo(({ index, style, data }) => { }} > {channelPrograms.length > 0 ? ( - channelPrograms.map((program) => renderProgram(program)) + channelPrograms.map((program) => + renderProgram(program, undefined, channel) + ) ) : ( <> {Array.from({ length: Math.ceil(24 / 2) }).map( @@ -599,11 +601,12 @@ export default function TVChannelGuide({ startDate, endDate }) { const findChannelByTvgId = useCallback( (tvgId) => { - const channelId = channelIdByTvgId.get(String(tvgId)); - if (!channelId) { + const channelIds = channelIdByTvgId.get(String(tvgId)); + if (!channelIds || channelIds.length === 0) { return null; } - return channelById.get(channelId) || null; + // Return the first channel that matches this TVG ID + return channelById.get(channelIds[0]) || null; }, [channelById, channelIdByTvgId] ); @@ -844,7 +847,7 @@ export default function TVChannelGuide({ startDate, endDate }) { [start, syncScrollLeft] ); const renderProgram = useCallback( - (program, channelStart = start) => { + (program, channelStart = start, channel = null) => { const programStartMs = program.startMs ?? dayjs(program.start_time).valueOf(); const programEndMs = program.endMs ?? dayjs(program.end_time).valueOf(); @@ -887,7 +890,7 @@ export default function TVChannelGuide({ startDate, endDate }) { return ( { - const channelId = channelIdByTvgId.get(String(program.tvg_id)); - if (!channelId) { + const channelIds = channelIdByTvgId.get(String(program.tvg_id)); + if (!channelIds || channelIds.length === 0) { return; } - if (!map.has(channelId)) { - map.set(channelId, []); - } - const startMs = program.startMs ?? dayjs(program.start_time).valueOf(); const endMs = program.endMs ?? dayjs(program.end_time).valueOf(); - map.get(channelId).push({ + const programData = { ...program, startMs, endMs, + }; + + // Add this program to all channels that share the same TVG ID + channelIds.forEach((channelId) => { + if (!map.has(channelId)) { + map.set(channelId, []); + } + map.get(channelId).push(programData); }); }); From fd9038463b706fc253f197a3c8028dbce9cb2cfa Mon Sep 17 00:00:00 2001 From: Connor Smith Date: Sun, 21 Sep 2025 01:23:57 -0400 Subject: [PATCH 042/119] Allow for batch editing channel logos --- .../src/components/forms/ChannelBatch.jsx | 186 ++++++++++++++++++ 1 file changed, 186 insertions(+) diff --git a/frontend/src/components/forms/ChannelBatch.jsx b/frontend/src/components/forms/ChannelBatch.jsx index ad61fb26..134807d4 100644 --- a/frontend/src/components/forms/ChannelBatch.jsx +++ b/frontend/src/components/forms/ChannelBatch.jsx @@ -29,18 +29,32 @@ import { FixedSizeList as List } from 'react-window'; import { useForm } from '@mantine/form'; import { notifications } from '@mantine/notifications'; import { USER_LEVELS, USER_LEVEL_LABELS } from '../../constants'; +import { useChannelLogoSelection } from '../../hooks/useSmartLogos'; +import LazyLogo from '../LazyLogo'; +import logo from '../../images/logo.png'; const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => { const theme = useMantineTheme(); const groupListRef = useRef(null); + const logoListRef = useRef(null); const channelGroups = useChannelsStore((s) => s.channelGroups); + const { + logos: channelLogos, + ensureLogosLoaded, + isLoading: logosLoading, + } = useChannelLogoSelection(); + + useEffect(() => { + ensureLogosLoaded(); + }, [ensureLogosLoaded]); const streamProfiles = useStreamProfilesStore((s) => s.profiles); const [channelGroupModelOpen, setChannelGroupModalOpen] = useState(false); const [selectedChannelGroup, setSelectedChannelGroup] = useState('-1'); + const [selectedLogoId, setSelectedLogoId] = useState('-1'); const [isSubmitting, setIsSubmitting] = useState(false); const [regexFind, setRegexFind] = useState(''); const [regexReplace, setRegexReplace] = useState(''); @@ -49,10 +63,14 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => { const [groupFilter, setGroupFilter] = useState(''); const groupOptions = Object.values(channelGroups); + const [logoPopoverOpened, setLogoPopoverOpened] = useState(false); + const [logoFilter, setLogoFilter] = useState(''); + const form = useForm({ mode: 'uncontrolled', initialValues: { channel_group: '(no change)', + logo: '(no change)', stream_profile_id: '-1', user_level: '-1', }, @@ -70,6 +88,15 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => { delete values.channel_group_id; } + if (selectedLogoId && selectedLogoId !== '-1') { + if (selectedLogoId === '0') { + values.logo_id = null; + } else { + values.logo_id = parseInt(selectedLogoId); + } + } + delete values.logo; + // Handle stream profile ID - convert special values if (!values.stream_profile_id || values.stream_profile_id === '-1') { delete values.stream_profile_id; @@ -242,6 +269,18 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => { ), ]; + const logoOptions = useMemo(() => { + return [ + { id: '-1', name: '(no change)' }, + { id: '0', name: '(remove logo)' }, + ...Object.values(channelLogos), + ]; + }, [channelLogos]); + + const filteredLogos = logoOptions.filter((logo) => + logo.name.toLowerCase().includes(logoFilter.toLowerCase()) + ); + if (!isOpen) { return <>; } @@ -445,6 +484,153 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => { + + + + setLogoPopoverOpened(true)} + size="xs" + style={{ flex: 1 }} + rightSection={ + selectedLogoId !== '-1' && ( + { + e.stopPropagation(); + setSelectedLogoId('-1'); + form.setValues({ logo: '(no change)' }); + }} + > + + + ) + } + /> + + e.stopPropagation()}> + + + setLogoFilter(event.currentTarget.value) + } + mb="xs" + size="xs" + /> + {logosLoading && ( + + Loading... + + )} + + + {filteredLogos.length === 0 ? ( +
+ + {logoFilter + ? 'No logos match your filter' + : 'No logos available'} + +
+ ) : ( + + {({ index, style }) => ( +
{ + setSelectedLogoId(filteredLogos[index].id); + form.setValues({ + logo: filteredLogos[index].name, + }); + setLogoPopoverOpened(false); + }} + onMouseEnter={(e) => { + e.currentTarget.style.backgroundColor = + 'rgb(68, 68, 68)'; + }} + onMouseLeave={(e) => { + e.currentTarget.style.backgroundColor = + 'transparent'; + }} + > +
+ {filteredLogos[index].id > 0 ? ( + {filteredLogos[index].name { + if (e.target.src !== logo) { + e.target.src = logo; + } + }} + /> + ) : ( + + )} + + {filteredLogos[index].name} + +
+
+ )} +
+ )} +
+
+
+ {selectedLogoId > 0 && ( + + )} +
+ + Date: Sun, 5 Oct 2025 19:32:40 -0500 Subject: [PATCH 107/119] Bug fix: Ensure distinct channel results in generate_m3u, generate_epg, and xc_get_live_streams functions. Fixes duplicate channels output for streamer profiles that were set to "All" --- apps/output/views.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/apps/output/views.py b/apps/output/views.py index 1666013d..e4d05d59 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -62,7 +62,7 @@ def generate_m3u(request, profile_name=None, user=None): channel_profiles ) - channels = Channel.objects.filter(**filters).order_by("channel_number") + channels = Channel.objects.filter(**filters).distinct().order_by("channel_number") else: channels = Channel.objects.filter(user_level__lte=user.user_level).order_by( "channel_number" @@ -326,7 +326,7 @@ def generate_epg(request, profile_name=None, user=None): channel_profiles ) - channels = Channel.objects.filter(**filters).order_by("channel_number") + channels = Channel.objects.filter(**filters).distinct().order_by("channel_number") else: channels = Channel.objects.filter(user_level__lte=user.user_level).order_by( "channel_number" @@ -910,7 +910,7 @@ def xc_get_live_streams(request, user, category_id=None): if category_id is not None: filters["channel_group__id"] = category_id - channels = Channel.objects.filter(**filters).order_by("channel_number") + channels = Channel.objects.filter(**filters).distinct().order_by("channel_number") else: if not category_id: channels = Channel.objects.filter(user_level__lte=user.user_level).order_by("channel_number") @@ -966,7 +966,10 @@ def xc_get_epg(request, user, short=False): channel_profiles = user.channel_profiles.all() filters["channelprofilemembership__channel_profile__in"] = channel_profiles - channel = get_object_or_404(Channel, **filters) + # Use filter().first() with distinct instead of get_object_or_404 to handle multiple profile memberships + channel = Channel.objects.filter(**filters).distinct().first() + if not channel: + raise Http404() else: channel = get_object_or_404(Channel, id=channel_id) From dea6411e1ca7200d95b6f6ffceb1c55a5a61aa5b Mon Sep 17 00:00:00 2001 From: Dispatcharr Date: Mon, 6 Oct 2025 07:46:23 -0500 Subject: [PATCH 108/119] Time Zones - Added time zone settings --- ...ecurringrecordingrule_end_date_and_more.py | 23 + apps/channels/serializers.py | 11 +- apps/channels/tasks.py | 11 +- core/models.py | 26 + frontend/src/pages/DVR.jsx | 866 ++++++++++++++---- frontend/src/pages/Settings.jsx | 218 ++++- 6 files changed, 948 insertions(+), 207 deletions(-) create mode 100644 apps/channels/migrations/0027_recurringrecordingrule_end_date_and_more.py diff --git a/apps/channels/migrations/0027_recurringrecordingrule_end_date_and_more.py b/apps/channels/migrations/0027_recurringrecordingrule_end_date_and_more.py new file mode 100644 index 00000000..8cdb9868 --- /dev/null +++ b/apps/channels/migrations/0027_recurringrecordingrule_end_date_and_more.py @@ -0,0 +1,23 @@ +# Generated by Django 5.2.4 on 2025-10-05 20:50 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dispatcharr_channels', '0026_recurringrecordingrule'), + ] + + operations = [ + migrations.AddField( + model_name='recurringrecordingrule', + name='end_date', + field=models.DateField(blank=True, null=True), + ), + migrations.AddField( + model_name='recurringrecordingrule', + name='start_date', + field=models.DateField(blank=True, null=True), + ), + ] diff --git a/apps/channels/serializers.py b/apps/channels/serializers.py index 1fa2b68a..7058ced2 100644 --- a/apps/channels/serializers.py +++ b/apps/channels/serializers.py @@ -1,4 +1,6 @@ import json +from datetime import datetime + from rest_framework import serializers from .models import ( Stream, @@ -530,8 +532,6 @@ class RecurringRecordingRuleSerializer(serializers.ModelSerializer): def validate(self, attrs): start = attrs.get("start_time") or getattr(self.instance, "start_time", None) end = attrs.get("end_time") or getattr(self.instance, "end_time", None) - if start and end and end <= start: - raise serializers.ValidationError("End time must be after start time") start_date = attrs.get("start_date") if "start_date" in attrs else getattr(self.instance, "start_date", None) end_date = attrs.get("end_date") if "end_date" in attrs else getattr(self.instance, "end_date", None) if start_date is None: @@ -544,6 +544,13 @@ class RecurringRecordingRuleSerializer(serializers.ModelSerializer): existing_end = getattr(self.instance, "end_date", None) if existing_end is None: raise serializers.ValidationError("End date is required") + if start and end and start_date and end_date: + start_dt = datetime.combine(start_date, start) + end_dt = datetime.combine(end_date, end) + if end_dt <= start_dt: + raise serializers.ValidationError("End datetime must be after start datetime") + elif start and end and end == start: + raise serializers.ValidationError("End time must be different from start time") # Normalize empty strings to None for dates if attrs.get("end_date") == "": attrs["end_date"] = None diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py index 688dc79d..23ae82b2 100755 --- a/apps/channels/tasks.py +++ b/apps/channels/tasks.py @@ -8,6 +8,7 @@ import time import json import subprocess import signal +from zoneinfo import ZoneInfo from datetime import datetime, timedelta import gc @@ -1140,7 +1141,12 @@ def sync_recurring_rule_impl(rule_id: int, drop_existing: bool = True, horizon_d if not days: return 0 - tz = timezone.get_current_timezone() + tz_name = CoreSettings.get_system_time_zone() + try: + tz = ZoneInfo(tz_name) + except Exception: + logger.warning("Invalid or unsupported time zone '%s'; falling back to Server default", tz_name) + tz = timezone.get_current_timezone() start_limit = rule.start_date or now.date() end_limit = rule.end_date horizon = now + timedelta(days=horizon_days) @@ -2152,7 +2158,8 @@ def comskip_process_recording(recording_id: int): list_path = os.path.join(workdir, "concat_list.txt") with open(list_path, "w") as lf: for pth in parts: - lf.write(f"file '{pth}'\n") + escaped = pth.replace("'", "'\\''") + lf.write(f"file '{escaped}'\n") output_path = os.path.join(workdir, f"{os.path.splitext(os.path.basename(file_path))[0]}.cut.mkv") subprocess.run([ diff --git a/core/models.py b/core/models.py index 5584d7ca..3a5895ba 100644 --- a/core/models.py +++ b/core/models.py @@ -1,4 +1,5 @@ # core/models.py +from django.conf import settings from django.db import models from django.utils.text import slugify from django.core.exceptions import ValidationError @@ -161,6 +162,7 @@ DVR_COMSKIP_ENABLED_KEY = slugify("DVR Comskip Enabled") DVR_COMSKIP_CUSTOM_PATH_KEY = slugify("DVR Comskip Custom Path") DVR_PRE_OFFSET_MINUTES_KEY = slugify("DVR Pre-Offset Minutes") DVR_POST_OFFSET_MINUTES_KEY = slugify("DVR Post-Offset Minutes") +SYSTEM_TIME_ZONE_KEY = slugify("System Time Zone") class CoreSettings(models.Model): @@ -324,6 +326,30 @@ class CoreSettings(models.Model): except Exception: return 0 + @classmethod + def get_system_time_zone(cls): + """Return configured system time zone or fall back to Django settings.""" + try: + value = cls.objects.get(key=SYSTEM_TIME_ZONE_KEY).value + if value: + return value + except cls.DoesNotExist: + pass + return getattr(settings, "TIME_ZONE", "UTC") or "UTC" + + @classmethod + def set_system_time_zone(cls, tz_name: str | None): + """Persist the desired system time zone identifier.""" + value = (tz_name or "").strip() or getattr(settings, "TIME_ZONE", "UTC") or "UTC" + obj, _ = cls.objects.get_or_create( + key=SYSTEM_TIME_ZONE_KEY, + defaults={"name": "System Time Zone", "value": value}, + ) + if obj.value != value: + obj.value = value + obj.save(update_fields=["value"]) + return value + @classmethod def get_dvr_series_rules(cls): """Return list of series recording rules. Each: {tvg_id, title, mode: 'all'|'new'}""" diff --git a/frontend/src/pages/DVR.jsx b/frontend/src/pages/DVR.jsx index 83faae06..ae2fd4ca 100644 --- a/frontend/src/pages/DVR.jsx +++ b/frontend/src/pages/DVR.jsx @@ -1,4 +1,4 @@ -import React, { useMemo, useState, useEffect } from 'react'; +import React, { useMemo, useState, useEffect, useCallback } from 'react'; import { ActionIcon, Box, @@ -36,8 +36,11 @@ import { import dayjs from 'dayjs'; import duration from 'dayjs/plugin/duration'; import relativeTime from 'dayjs/plugin/relativeTime'; +import utc from 'dayjs/plugin/utc'; +import timezone from 'dayjs/plugin/timezone'; import useChannelsStore from '../store/channels'; import useSettingsStore from '../store/settings'; +import useLocalStorage from '../hooks/useLocalStorage'; import useVideoStore from '../store/useVideoStore'; import RecordingForm from '../components/forms/Recording'; import { notifications } from '@mantine/notifications'; @@ -47,6 +50,47 @@ import { useForm } from '@mantine/form'; dayjs.extend(duration); dayjs.extend(relativeTime); +dayjs.extend(utc); +dayjs.extend(timezone); + +const useUserTimeZone = () => { + const settings = useSettingsStore((s) => s.settings); + const [timeZone, setTimeZone] = useLocalStorage( + 'time-zone', + dayjs.tz?.guess + ? dayjs.tz.guess() + : Intl.DateTimeFormat().resolvedOptions().timeZone + ); + + useEffect(() => { + const tz = settings?.['system-time-zone']?.value; + if (tz && tz !== timeZone) { + setTimeZone(tz); + } + }, [settings, timeZone, setTimeZone]); + + return timeZone; +}; + +const useTimeHelpers = () => { + const timeZone = useUserTimeZone(); + + const toUserTime = useCallback( + (value) => { + if (!value) return dayjs.invalid(); + try { + return dayjs(value).tz(timeZone); + } catch (error) { + return dayjs(value); + } + }, + [timeZone] + ); + + const userNow = useCallback(() => dayjs().tz(timeZone), [timeZone]); + + return { timeZone, toUserTime, userNow }; +}; const RECURRING_DAY_OPTIONS = [ { value: 6, label: 'Sun' }, @@ -61,7 +105,9 @@ const RECURRING_DAY_OPTIONS = [ // Short preview that triggers the details modal when clicked const RecordingSynopsis = ({ description, onOpen }) => { const truncated = description?.length > 140; - const preview = truncated ? `${description.slice(0, 140).trim()}...` : description; + const preview = truncated + ? `${description.slice(0, 140).trim()}...` + : description; if (!description) return null; return ( { ); }; -const RecordingDetailsModal = ({ opened, onClose, recording, channel, posterUrl, onWatchLive, onWatchRecording, env_mode, onEdit }) => { +const RecordingDetailsModal = ({ + opened, + onClose, + recording, + channel, + posterUrl, + onWatchLive, + onWatchRecording, + env_mode, + onEdit, +}) => { const allRecordings = useChannelsStore((s) => s.recordings); const channelMap = useChannelsStore((s) => s.channels); + const { toUserTime, userNow } = useTimeHelpers(); const [childOpen, setChildOpen] = React.useState(false); const [childRec, setChildRec] = React.useState(null); @@ -88,13 +145,17 @@ const RecordingDetailsModal = ({ opened, onClose, recording, channel, posterUrl, const program = customProps.program || {}; const recordingName = program.title || 'Custom Recording'; const description = program.description || customProps.description || ''; - const start = dayjs(safeRecording.start_time); - const end = dayjs(safeRecording.end_time); + const start = toUserTime(safeRecording.start_time); + const end = toUserTime(safeRecording.end_time); const stats = customProps.stream_info || {}; const statRows = [ ['Video Codec', stats.video_codec], - ['Resolution', stats.resolution || (stats.width && stats.height ? `${stats.width}x${stats.height}` : null)], + [ + 'Resolution', + stats.resolution || + (stats.width && stats.height ? `${stats.width}x${stats.height}` : null), + ], ['FPS', stats.source_fps], ['Video Bitrate', stats.video_bitrate && `${stats.video_bitrate} kb/s`], ['Audio Codec', stats.audio_codec], @@ -104,34 +165,48 @@ const RecordingDetailsModal = ({ opened, onClose, recording, channel, posterUrl, ].filter(([, v]) => v !== null && v !== undefined && v !== ''); // Rating (if available) - const rating = customProps.rating || customProps.rating_value || (program && program.custom_properties && program.custom_properties.rating); + const rating = + customProps.rating || + customProps.rating_value || + (program && program.custom_properties && program.custom_properties.rating); const ratingSystem = customProps.rating_system || 'MPAA'; const fileUrl = customProps.file_url || customProps.output_file_url; - const canWatchRecording = (customProps.status === 'completed' || customProps.status === 'interrupted') && Boolean(fileUrl); + const canWatchRecording = + (customProps.status === 'completed' || + customProps.status === 'interrupted') && + Boolean(fileUrl); // Prefix in dev (Vite) if needed let resolvedPosterUrl = posterUrl; - if (typeof import.meta !== 'undefined' && import.meta.env && import.meta.env.DEV) { + if ( + typeof import.meta !== 'undefined' && + import.meta.env && + import.meta.env.DEV + ) { if (resolvedPosterUrl && resolvedPosterUrl.startsWith('/')) { resolvedPosterUrl = `${window.location.protocol}//${window.location.hostname}:5656${resolvedPosterUrl}`; } } - const isSeriesGroup = Boolean(safeRecording._group_count && safeRecording._group_count > 1); + const isSeriesGroup = Boolean( + safeRecording._group_count && safeRecording._group_count > 1 + ); const upcomingEpisodes = React.useMemo(() => { if (!isSeriesGroup) return []; - const arr = Array.isArray(allRecordings) ? allRecordings : Object.values(allRecordings || {}); + const arr = Array.isArray(allRecordings) + ? allRecordings + : Object.values(allRecordings || {}); const tvid = program.tvg_id || ''; const titleKey = (program.title || '').toLowerCase(); const filtered = arr.filter((r) => { - const cp = r.custom_properties || {}; - const pr = cp.program || {}; - if ((pr.tvg_id || '') !== tvid) return false; - if ((pr.title || '').toLowerCase() !== titleKey) return false; - const st = dayjs(r.start_time); - return st.isAfter(dayjs()); - }); + const cp = r.custom_properties || {}; + const pr = cp.program || {}; + if ((pr.tvg_id || '') !== tvid) return false; + if ((pr.title || '').toLowerCase() !== titleKey) return false; + const st = toUserTime(r.start_time); + return st.isAfter(userNow()); + }); // Deduplicate by program.id if present, else by time+title const seen = new Set(); const deduped = []; @@ -141,54 +216,117 @@ const RecordingDetailsModal = ({ opened, onClose, recording, channel, posterUrl, // Prefer season/episode or onscreen code; else fall back to sub_title; else program id/slot const season = cp.season ?? pr?.custom_properties?.season; const episode = cp.episode ?? pr?.custom_properties?.episode; - const onscreen = cp.onscreen_episode ?? pr?.custom_properties?.onscreen_episode; + const onscreen = + cp.onscreen_episode ?? pr?.custom_properties?.onscreen_episode; let key = null; if (season != null && episode != null) key = `se:${season}:${episode}`; else if (onscreen) key = `onscreen:${String(onscreen).toLowerCase()}`; else if (pr.sub_title) key = `sub:${(pr.sub_title || '').toLowerCase()}`; else if (pr.id != null) key = `id:${pr.id}`; - else key = `slot:${r.channel}|${r.start_time}|${r.end_time}|${(pr.title||'')}`; + else + key = `slot:${r.channel}|${r.start_time}|${r.end_time}|${pr.title || ''}`; if (seen.has(key)) continue; seen.add(key); deduped.push(r); } - return deduped.sort((a, b) => dayjs(a.start_time) - dayjs(b.start_time)); - }, [allRecordings, isSeriesGroup, program.tvg_id, program.title]); + return deduped.sort( + (a, b) => toUserTime(a.start_time) - toUserTime(b.start_time) + ); + }, [ + allRecordings, + isSeriesGroup, + program.tvg_id, + program.title, + toUserTime, + userNow, + ]); if (!recording) return null; const EpisodeRow = ({ rec }) => { const cp = rec.custom_properties || {}; const pr = cp.program || {}; - const start = dayjs(rec.start_time); - const end = dayjs(rec.end_time); + const start = toUserTime(rec.start_time); + const end = toUserTime(rec.end_time); const season = cp.season ?? pr?.custom_properties?.season; const episode = cp.episode ?? pr?.custom_properties?.episode; - const onscreen = cp.onscreen_episode ?? pr?.custom_properties?.onscreen_episode; - const se = season && episode ? `S${String(season).padStart(2,'0')}E${String(episode).padStart(2,'0')}` : (onscreen || null); + const onscreen = + cp.onscreen_episode ?? pr?.custom_properties?.onscreen_episode; + const se = + season && episode + ? `S${String(season).padStart(2, '0')}E${String(episode).padStart(2, '0')}` + : onscreen || null; const posterLogoId = cp.poster_logo_id; - let purl = posterLogoId ? `/api/channels/logos/${posterLogoId}/cache/` : cp.poster_url || posterUrl || '/logo.png'; - if (typeof import.meta !== 'undefined' && import.meta.env && import.meta.env.DEV && purl && purl.startsWith('/')) { + let purl = posterLogoId + ? `/api/channels/logos/${posterLogoId}/cache/` + : cp.poster_url || posterUrl || '/logo.png'; + if ( + typeof import.meta !== 'undefined' && + import.meta.env && + import.meta.env.DEV && + purl && + purl.startsWith('/') + ) { purl = `${window.location.protocol}//${window.location.hostname}:5656${purl}`; } const onRemove = async (e) => { e?.stopPropagation?.(); - try { await API.deleteRecording(rec.id); } catch (error) { console.error('Failed to delete upcoming recording', error); } - try { await useChannelsStore.getState().fetchRecordings(); } catch (error) { console.error('Failed to refresh recordings after delete', error); } + try { + await API.deleteRecording(rec.id); + } catch (error) { + console.error('Failed to delete upcoming recording', error); + } + try { + await useChannelsStore.getState().fetchRecordings(); + } catch (error) { + console.error('Failed to refresh recordings after delete', error); + } }; return ( - { setChildRec(rec); setChildOpen(true); }}> + { + setChildRec(rec); + setChildOpen(true); + }} + > - {pr.title + {pr.title - {pr.sub_title || pr.title} - {se && {se}} + + {pr.sub_title || pr.title} + + {se && ( + + {se} + + )} - {start.format('MMM D, YYYY h:mma')} – {end.format('h:mma')} + + {start.format('MMM D, YYYY h:mma')} – {end.format('h:mma')} + - + @@ -199,7 +337,11 @@ const RecordingDetailsModal = ({ opened, onClose, recording, channel, posterUrl, {upcomingEpisodes.length === 0 && ( - No upcoming episodes found + + No upcoming episodes found + )} {upcomingEpisodes.map((ep) => ( @@ -225,17 +369,19 @@ const RecordingDetailsModal = ({ opened, onClose, recording, channel, posterUrl, onClose={() => setChildOpen(false)} recording={childRec} channel={channelMap[childRec.channel]} - posterUrl={( - childRec.custom_properties?.poster_logo_id + posterUrl={ + (childRec.custom_properties?.poster_logo_id ? `/api/channels/logos/${childRec.custom_properties.poster_logo_id}/cache/` - : childRec.custom_properties?.poster_url || channelMap[childRec.channel]?.logo?.cache_url - ) || '/logo.png'} + : childRec.custom_properties?.poster_url || + channelMap[childRec.channel]?.logo?.cache_url) || + '/logo.png' + } env_mode={env_mode} onWatchLive={() => { const rec = childRec; - const now = dayjs(); - const s = dayjs(rec.start_time); - const e = dayjs(rec.end_time); + const now = userNow(); + const s = toUserTime(rec.start_time); + const e = toUserTime(rec.end_time); if (now.isAfter(s) && now.isBefore(e)) { const ch = channelMap[rec.channel]; if (!ch) return; @@ -247,77 +393,142 @@ const RecordingDetailsModal = ({ opened, onClose, recording, channel, posterUrl, } }} onWatchRecording={() => { - let fileUrl = childRec.custom_properties?.file_url || childRec.custom_properties?.output_file_url; + let fileUrl = + childRec.custom_properties?.file_url || + childRec.custom_properties?.output_file_url; if (!fileUrl) return; if (env_mode === 'dev' && fileUrl.startsWith('/')) { fileUrl = `${window.location.protocol}//${window.location.hostname}:5656${fileUrl}`; } - useVideoStore.getState().showVideo(fileUrl, 'vod', { name: childRec.custom_properties?.program?.title || 'Recording', logo: { url: (childRec.custom_properties?.poster_logo_id ? `/api/channels/logos/${childRec.custom_properties.poster_logo_id}/cache/` : channelMap[childRec.channel]?.logo?.cache_url) || '/logo.png' } }); + useVideoStore.getState().showVideo(fileUrl, 'vod', { + name: + childRec.custom_properties?.program?.title || 'Recording', + logo: { + url: + (childRec.custom_properties?.poster_logo_id + ? `/api/channels/logos/${childRec.custom_properties.poster_logo_id}/cache/` + : channelMap[childRec.channel]?.logo?.cache_url) || + '/logo.png', + }, + }); }} /> )} ) : ( - - {recordingName} - - - {channel ? `${channel.channel_number} • ${channel.name}` : '—'} - - {onWatchLive && ( - - )} - {onWatchRecording && ( - - )} - {onEdit && start.isAfter(dayjs()) && ( - - )} - {customProps.status === 'completed' && (!customProps?.comskip || customProps?.comskip?.status !== 'completed') && ( - - )} + + {recordingName} + + + + {channel ? `${channel.channel_number} • ${channel.name}` : '—'} + + + {onWatchLive && ( + + )} + {onWatchRecording && ( + + )} + {onEdit && start.isAfter(userNow()) && ( + + )} + {customProps.status === 'completed' && + (!customProps?.comskip || + customProps?.comskip?.status !== 'completed') && ( + + )} + - - {start.format('MMM D, YYYY h:mma')} – {end.format('h:mma')} - {rating && ( - - {rating} - - )} - {description && ( - {description} - )} - {statRows.length > 0 && ( - - Stream Stats - {statRows.map(([k, v]) => ( - - {k} - {v} - - ))} - - )} - - + + {start.format('MMM D, YYYY h:mma')} – {end.format('h:mma')} + + {rating && ( + + + {rating} + + + )} + {description && ( + + {description} + + )} + {statRows.length > 0 && ( + + + Stream Stats + + {statRows.map(([k, v]) => ( + + + {k} + + {v} + + ))} + + )} + + )} ); @@ -346,6 +557,7 @@ const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence }) => { const fetchRecurringRules = useChannelsStore((s) => s.fetchRecurringRules); const fetchRecordings = useChannelsStore((s) => s.fetchRecordings); const recordings = useChannelsStore((s) => s.recordings); + const { toUserTime, userNow } = useTimeHelpers(); const [saving, setSaving] = useState(false); const [deleting, setDeleting] = useState(false); @@ -363,7 +575,10 @@ const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence }) => { } return aNum - bNum; }); - return list.map((item) => ({ value: `${item.id}`, label: item.name || `Channel ${item.id}` })); + return list.map((item) => ({ + value: `${item.id}`, + label: item.name || `Channel ${item.id}`, + })); }, [channels]); const form = useForm({ @@ -380,12 +595,21 @@ const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence }) => { }, validate: { channel_id: (value) => (value ? null : 'Select a channel'), - days_of_week: (value) => (value && value.length ? null : 'Pick at least one day'), + days_of_week: (value) => + value && value.length ? null : 'Pick at least one day', end_time: (value, values) => { if (!value) return 'Select an end time'; - const startValue = dayjs(values.start_time, ['HH:mm', 'hh:mm A', 'h:mm A'], true); + const startValue = dayjs( + values.start_time, + ['HH:mm', 'hh:mm A', 'h:mm A'], + true + ); const endValue = dayjs(value, ['HH:mm', 'hh:mm A', 'h:mm A'], true); - if (startValue.isValid() && endValue.isValid() && endValue.diff(startValue, 'minute') === 0) { + if ( + startValue.isValid() && + endValue.isValid() && + endValue.diff(startValue, 'minute') === 0 + ) { return 'End time must differ from start time'; } return null; @@ -421,11 +645,22 @@ const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence }) => { }, [opened, ruleId, rule]); const upcomingOccurrences = useMemo(() => { - const list = Array.isArray(recordings) ? recordings : Object.values(recordings || {}); + const list = Array.isArray(recordings) + ? recordings + : Object.values(recordings || {}); + const now = userNow(); return list - .filter((rec) => rec?.custom_properties?.rule?.id === ruleId && dayjs(rec.start_time).isAfter(dayjs())) - .sort((a, b) => dayjs(a.start_time).valueOf() - dayjs(b.start_time).valueOf()); - }, [recordings, ruleId]); + .filter( + (rec) => + rec?.custom_properties?.rule?.id === ruleId && + toUserTime(rec.start_time).isAfter(now) + ) + .sort( + (a, b) => + toUserTime(a.start_time).valueOf() - + toUserTime(b.start_time).valueOf() + ); + }, [recordings, ruleId, toUserTime, userNow]); const handleSave = async (values) => { if (!rule) return; @@ -436,8 +671,12 @@ const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence }) => { days_of_week: (values.days_of_week || []).map((d) => Number(d)), start_time: toTimeString(values.start_time), end_time: toTimeString(values.end_time), - start_date: values.start_date ? dayjs(values.start_date).format('YYYY-MM-DD') : null, - end_date: values.end_date ? dayjs(values.end_date).format('YYYY-MM-DD') : null, + start_date: values.start_date + ? dayjs(values.start_date).format('YYYY-MM-DD') + : null, + end_date: values.end_date + ? dayjs(values.end_date).format('YYYY-MM-DD') + : null, name: values.rule_name?.trim() || '', enabled: Boolean(values.enabled), }); @@ -484,7 +723,9 @@ const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence }) => { await Promise.all([fetchRecurringRules(), fetchRecordings()]); notifications.show({ title: checked ? 'Recurring rule enabled' : 'Recurring rule paused', - message: checked ? 'Future occurrences will resume' : 'Upcoming occurrences were removed', + message: checked + ? 'Future occurrences will resume' + : 'Upcoming occurrences were removed', color: checked ? 'green' : 'yellow', autoClose: 2500, }); @@ -523,10 +764,18 @@ const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence }) => { } return ( - + - {channels?.[rule.channel]?.name || `Channel ${rule.channel}`} + + {channels?.[rule.channel]?.name || `Channel ${rule.channel}`} + { ({ value: String(opt.value), label: opt.label }))} + data={RECURRING_DAY_OPTIONS.map((opt) => ({ + value: String(opt.value), + label: opt.label, + }))} searchable clearable /> @@ -562,7 +814,9 @@ const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence }) => { form.setFieldValue('start_date', value || dayjs().toDate())} + onChange={(value) => + form.setFieldValue('start_date', value || dayjs().toDate()) + } valueFormat="MMM D, YYYY" /> { form.setFieldValue('start_time', toTimeString(value))} + onChange={(value) => + form.setFieldValue('start_time', toTimeString(value)) + } withSeconds={false} format="12" amLabel="AM" @@ -586,7 +842,9 @@ const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence }) => { form.setFieldValue('end_time', toTimeString(value))} + onChange={(value) => + form.setFieldValue('end_time', toTimeString(value)) + } withSeconds={false} format="12" amLabel="AM" @@ -597,7 +855,12 @@ const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence }) => { - @@ -605,22 +868,35 @@ const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence }) => { - Upcoming occurrences + + Upcoming occurrences + {upcomingOccurrences.length} {upcomingOccurrences.length === 0 ? ( - No future airings currently scheduled. + + No future airings currently scheduled. + ) : ( {upcomingOccurrences.map((occ) => { - const occStart = dayjs(occ.start_time); - const occEnd = dayjs(occ.end_time); + const occStart = toUserTime(occ.start_time); + const occEnd = toUserTime(occ.end_time); return ( - + - {occStart.format('MMM D, YYYY')} - {occStart.format('h:mma')} – {occEnd.format('h:mma')} + + {occStart.format('MMM D, YYYY')} + + + {occStart.format('h:mma')} – {occEnd.format('h:mma')} + )} {!isUpcoming && ( - + )} - {!isUpcoming && customProps?.status === 'completed' && (!customProps?.comskip || customProps?.comskip?.status !== 'completed') && ( - - )} + {!isUpcoming && + customProps?.status === 'completed' && + (!customProps?.comskip || + customProps?.comskip?.status !== 'completed') && ( + + )} {/* If this card is a grouped upcoming series, show count */} {recording._group_count > 1 && ( - + Next of {recording._group_count} )} @@ -922,12 +1309,27 @@ const RecordingCard = ({ recording, onOpenDetails, onOpenRecurring }) => { // Stacked look for series groups: render two shadow layers behind the main card return ( - setCancelOpen(false)} title="Cancel Series" centered size="md" zIndex={9999}> + setCancelOpen(false)} + title="Cancel Series" + centered + size="md" + zIndex={9999} + > This is a series rule. What would you like to cancel? - - + + @@ -969,6 +1371,7 @@ const DVRPage = () => { const channels = useChannelsStore((s) => s.channels); const fetchChannels = useChannelsStore((s) => s.fetchChannels); const fetchRecurringRules = useChannelsStore((s) => s.fetchRecurringRules); + const { toUserTime, userNow } = useTimeHelpers(); const [recordingModalOpen, setRecordingModalOpen] = useState(false); const [detailsOpen, setDetailsOpen] = useState(false); @@ -1013,18 +1416,24 @@ const DVRPage = () => { }, [channels, fetchChannels, fetchRecordings, fetchRecurringRules]); // Re-render every second so time-based bucketing updates without a refresh - const [now, setNow] = useState(dayjs()); + const [now, setNow] = useState(userNow()); useEffect(() => { - const interval = setInterval(() => setNow(dayjs()), 1000); + const interval = setInterval(() => setNow(userNow()), 1000); return () => clearInterval(interval); - }, []); + }, [userNow]); + + useEffect(() => { + setNow(userNow()); + }, [userNow]); // Categorize recordings const { inProgress, upcoming, completed } = useMemo(() => { const inProgress = []; const upcoming = []; const completed = []; - const list = Array.isArray(recordings) ? recordings : Object.values(recordings || {}); + const list = Array.isArray(recordings) + ? recordings + : Object.values(recordings || {}); // ID-based dedupe guard in case store returns duplicates const seenIds = new Set(); @@ -1034,8 +1443,8 @@ const DVRPage = () => { if (seenIds.has(k)) continue; seenIds.add(k); } - const s = dayjs(rec.start_time); - const e = dayjs(rec.end_time); + const s = toUserTime(rec.start_time); + const e = toUserTime(rec.end_time); const status = rec.custom_properties?.status; if (status === 'interrupted' || status === 'completed') { completed.push(rec); @@ -1053,7 +1462,10 @@ const DVRPage = () => { for (const r of arr) { const cp = r.custom_properties || {}; const pr = cp.program || {}; - const sig = pr?.id != null ? `id:${pr.id}` : `slot:${r.channel}|${r.start_time}|${r.end_time}|${(pr.title||'')}`; + const sig = + pr?.id != null + ? `id:${pr.id}` + : `slot:${r.channel}|${r.start_time}|${r.end_time}|${pr.title || ''}`; if (sigs.has(sig)) continue; sigs.add(sig); out.push(r); @@ -1061,11 +1473,15 @@ const DVRPage = () => { return out; }; - const inProgressDedup = dedupeByProgramOrSlot(inProgress).sort((a, b) => dayjs(b.start_time) - dayjs(a.start_time)); + const inProgressDedup = dedupeByProgramOrSlot(inProgress).sort( + (a, b) => toUserTime(b.start_time) - toUserTime(a.start_time) + ); // Group upcoming by series title+tvg_id (keep only next episode) const grouped = new Map(); - const upcomingDedup = dedupeByProgramOrSlot(upcoming).sort((a, b) => dayjs(a.start_time) - dayjs(b.start_time)); + const upcomingDedup = dedupeByProgramOrSlot(upcoming).sort( + (a, b) => toUserTime(a.start_time) - toUserTime(b.start_time) + ); for (const rec of upcomingDedup) { const cp = rec.custom_properties || {}; const prog = cp.program || {}; @@ -1082,9 +1498,13 @@ const DVRPage = () => { item._group_count = e.count; return item; }); - completed.sort((a, b) => dayjs(b.end_time) - dayjs(a.end_time)); - return { inProgress: inProgressDedup, upcoming: upcomingGrouped, completed }; - }, [recordings, now]); + completed.sort((a, b) => toUserTime(b.end_time) - toUserTime(a.end_time)); + return { + inProgress: inProgressDedup, + upcoming: upcomingGrouped, + completed, + }; + }, [recordings, now, toUserTime]); return ( @@ -1109,9 +1529,21 @@ const DVRPage = () => { Currently Recording {inProgress.length} - + {inProgress.map((rec) => ( - + ))} {inProgress.length === 0 && ( @@ -1126,9 +1558,21 @@ const DVRPage = () => { Upcoming Recordings {upcoming.length} - + {upcoming.map((rec) => ( - + ))} {upcoming.length === 0 && ( @@ -1143,9 +1587,21 @@ const DVRPage = () => { Previously Recorded {completed.length} - + {completed.map((rec) => ( - + ))} {completed.length === 0 && ( @@ -1184,17 +1640,19 @@ const DVRPage = () => { onClose={closeDetails} recording={detailsRecording} channel={channels[detailsRecording.channel]} - posterUrl={( - detailsRecording.custom_properties?.poster_logo_id + posterUrl={ + (detailsRecording.custom_properties?.poster_logo_id ? `/api/channels/logos/${detailsRecording.custom_properties.poster_logo_id}/cache/` - : detailsRecording.custom_properties?.poster_url || channels[detailsRecording.channel]?.logo?.cache_url - ) || '/logo.png'} + : detailsRecording.custom_properties?.poster_url || + channels[detailsRecording.channel]?.logo?.cache_url) || + '/logo.png' + } env_mode={useSettingsStore.getState().environment.env_mode} onWatchLive={() => { const rec = detailsRecording; - const now = dayjs(); - const s = dayjs(rec.start_time); - const e = dayjs(rec.end_time); + const now = userNow(); + const s = toUserTime(rec.start_time); + const e = toUserTime(rec.end_time); if (now.isAfter(s) && now.isBefore(e)) { // call into child RecordingCard behavior by constructing a URL like there const channel = channels[rec.channel]; @@ -1207,12 +1665,28 @@ const DVRPage = () => { } }} onWatchRecording={() => { - let fileUrl = detailsRecording.custom_properties?.file_url || detailsRecording.custom_properties?.output_file_url; + let fileUrl = + detailsRecording.custom_properties?.file_url || + detailsRecording.custom_properties?.output_file_url; if (!fileUrl) return; - if (useSettingsStore.getState().environment.env_mode === 'dev' && fileUrl.startsWith('/')) { + if ( + useSettingsStore.getState().environment.env_mode === 'dev' && + fileUrl.startsWith('/') + ) { fileUrl = `${window.location.protocol}//${window.location.hostname}:5656${fileUrl}`; } - useVideoStore.getState().showVideo(fileUrl, 'vod', { name: detailsRecording.custom_properties?.program?.title || 'Recording', logo: { url: (detailsRecording.custom_properties?.poster_logo_id ? `/api/channels/logos/${detailsRecording.custom_properties.poster_logo_id}/cache/` : channels[detailsRecording.channel]?.logo?.cache_url) || '/logo.png' } }); + useVideoStore.getState().showVideo(fileUrl, 'vod', { + name: + detailsRecording.custom_properties?.program?.title || + 'Recording', + logo: { + url: + (detailsRecording.custom_properties?.poster_logo_id + ? `/api/channels/logos/${detailsRecording.custom_properties.poster_logo_id}/cache/` + : channels[detailsRecording.channel]?.logo?.cache_url) || + '/logo.png', + }, + }); }} onEdit={(rec) => { setEditRecording(rec); diff --git a/frontend/src/pages/Settings.jsx b/frontend/src/pages/Settings.jsx index fa30cd74..865358df 100644 --- a/frontend/src/pages/Settings.jsx +++ b/frontend/src/pages/Settings.jsx @@ -1,4 +1,10 @@ -import React, { useEffect, useState } from 'react'; +import React, { + useCallback, + useEffect, + useMemo, + useRef, + useState, +} from 'react'; import API from '../api'; import useSettingsStore from '../store/settings'; import useUserAgentsStore from '../store/userAgents'; @@ -35,6 +41,140 @@ import { import ConfirmationDialog from '../components/ConfirmationDialog'; import useWarningsStore from '../store/warnings'; +const TIMEZONE_FALLBACKS = [ + 'UTC', + 'America/New_York', + 'America/Chicago', + 'America/Denver', + 'America/Los_Angeles', + 'America/Phoenix', + 'America/Anchorage', + 'Pacific/Honolulu', + 'Europe/London', + 'Europe/Paris', + 'Europe/Berlin', + 'Europe/Madrid', + 'Europe/Warsaw', + 'Europe/Moscow', + 'Asia/Dubai', + 'Asia/Kolkata', + 'Asia/Shanghai', + 'Asia/Tokyo', + 'Asia/Seoul', + 'Australia/Sydney', +]; + +const getSupportedTimeZones = () => { + try { + if (typeof Intl.supportedValuesOf === 'function') { + return Intl.supportedValuesOf('timeZone'); + } + } catch (error) { + console.warn('Unable to enumerate supported time zones:', error); + } + return TIMEZONE_FALLBACKS; +}; + +const getTimeZoneOffsetMinutes = (date, timeZone) => { + try { + const dtf = new Intl.DateTimeFormat('en-US', { + timeZone, + year: 'numeric', + month: '2-digit', + day: '2-digit', + hour: '2-digit', + minute: '2-digit', + second: '2-digit', + hourCycle: 'h23', + }); + const parts = dtf.formatToParts(date).reduce((acc, part) => { + if (part.type !== 'literal') acc[part.type] = part.value; + return acc; + }, {}); + const asUTC = Date.UTC( + Number(parts.year), + Number(parts.month) - 1, + Number(parts.day), + Number(parts.hour), + Number(parts.minute), + Number(parts.second) + ); + return (asUTC - date.getTime()) / 60000; + } catch (error) { + console.warn(`Failed to compute offset for ${timeZone}:`, error); + return 0; + } +}; + +const formatOffset = (minutes) => { + const rounded = Math.round(minutes); + const sign = rounded < 0 ? '-' : '+'; + const absolute = Math.abs(rounded); + const hours = String(Math.floor(absolute / 60)).padStart(2, '0'); + const mins = String(absolute % 60).padStart(2, '0'); + return `UTC${sign}${hours}:${mins}`; +}; + +const buildTimeZoneOptions = (preferredZone) => { + const zones = getSupportedTimeZones(); + const referenceYear = new Date().getUTCFullYear(); + const janDate = new Date(Date.UTC(referenceYear, 0, 1, 12, 0, 0)); + const julDate = new Date(Date.UTC(referenceYear, 6, 1, 12, 0, 0)); + + const options = zones + .map((zone) => { + const janOffset = getTimeZoneOffsetMinutes(janDate, zone); + const julOffset = getTimeZoneOffsetMinutes(julDate, zone); + const currentOffset = getTimeZoneOffsetMinutes(new Date(), zone); + const minOffset = Math.min(janOffset, julOffset); + const maxOffset = Math.max(janOffset, julOffset); + const usesDst = minOffset !== maxOffset; + const labelParts = [`now ${formatOffset(currentOffset)}`]; + if (usesDst) { + labelParts.push( + `DST range ${formatOffset(minOffset)} to ${formatOffset(maxOffset)}` + ); + } + return { + value: zone, + label: `${zone} (${labelParts.join(' | ')})`, + numericOffset: minOffset, + }; + }) + .sort((a, b) => { + if (a.numericOffset !== b.numericOffset) { + return a.numericOffset - b.numericOffset; + } + return a.value.localeCompare(b.value); + }); + if ( + preferredZone && + !options.some((option) => option.value === preferredZone) + ) { + const currentOffset = getTimeZoneOffsetMinutes(new Date(), preferredZone); + options.push({ + value: preferredZone, + label: `${preferredZone} (now ${formatOffset(currentOffset)})`, + numericOffset: currentOffset, + }); + options.sort((a, b) => { + if (a.numericOffset !== b.numericOffset) { + return a.numericOffset - b.numericOffset; + } + return a.value.localeCompare(b.value); + }); + } + return options; +}; + +const getDefaultTimeZone = () => { + try { + return Intl.DateTimeFormat().resolvedOptions().timeZone || 'UTC'; + } catch (error) { + return 'UTC'; + } +}; + const SettingsPage = () => { const settings = useSettingsStore((s) => s.settings); const userAgents = useUserAgentsStore((s) => s.userAgents); @@ -61,12 +201,49 @@ const SettingsPage = () => { const [comskipFile, setComskipFile] = useState(null); const [comskipUploadLoading, setComskipUploadLoading] = useState(false); - const [comskipConfig, setComskipConfig] = useState({ path: '', exists: false }); + const [comskipConfig, setComskipConfig] = useState({ + path: '', + exists: false, + }); // UI / local storage settings const [tableSize, setTableSize] = useLocalStorage('table-size', 'default'); const [timeFormat, setTimeFormat] = useLocalStorage('time-format', '12h'); const [dateFormat, setDateFormat] = useLocalStorage('date-format', 'mdy'); + const [timeZone, setTimeZone] = useLocalStorage( + 'time-zone', + getDefaultTimeZone() + ); + const timeZoneOptions = useMemo( + () => buildTimeZoneOptions(timeZone), + [timeZone] + ); + const timeZoneSyncedRef = useRef(false); + + const persistTimeZoneSetting = useCallback( + async (tzValue) => { + try { + const existing = settings['system-time-zone']; + if (existing && existing.id) { + await API.updateSetting({ ...existing, value: tzValue }); + } else { + await API.createSetting({ + key: 'system-time-zone', + name: 'System Time Zone', + value: tzValue, + }); + } + } catch (error) { + console.error('Failed to persist time zone setting', error); + notifications.show({ + title: 'Failed to update time zone', + message: 'Could not save the selected time zone. Please try again.', + color: 'red', + }); + } + }, + [settings] + ); const regionChoices = REGION_CHOICES; @@ -187,8 +364,19 @@ const SettingsPage = () => { console.error('Error parsing proxy settings:', error); } } + + const tzSetting = settings['system-time-zone']; + if (tzSetting?.value) { + timeZoneSyncedRef.current = true; + setTimeZone((prev) => + prev === tzSetting.value ? prev : tzSetting.value + ); + } else if (!timeZoneSyncedRef.current && timeZone) { + timeZoneSyncedRef.current = true; + persistTimeZoneSetting(timeZone); + } } - }, [settings]); + }, [settings, timeZone, setTimeZone, persistTimeZoneSetting]); useEffect(() => { const loadComskipConfig = async () => { @@ -357,13 +545,19 @@ const SettingsPage = () => { const onUISettingsChange = (name, value) => { switch (name) { case 'table-size': - setTableSize(value); + if (value) setTableSize(value); break; case 'time-format': - setTimeFormat(value); + if (value) setTimeFormat(value); break; case 'date-format': - setDateFormat(value); + if (value) setDateFormat(value); + break; + case 'time-zone': + if (value) { + setTimeZone(value); + persistTimeZoneSetting(value); + } break; } }; @@ -490,6 +684,14 @@ const SettingsPage = () => { }, ]} /> +