From 0dbc5221b2d602323de9fa938f07f1b1a4363126 Mon Sep 17 00:00:00 2001 From: BigPanda Date: Thu, 18 Sep 2025 21:20:47 +0100 Subject: [PATCH 001/220] Add 'UK' region I'm not sure if this was intentional, but the UK seems to be missing from the region list. --- frontend/src/constants.js | 1 + 1 file changed, 1 insertion(+) diff --git a/frontend/src/constants.js b/frontend/src/constants.js index 78f374d4..528c5f04 100644 --- a/frontend/src/constants.js +++ b/frontend/src/constants.js @@ -303,6 +303,7 @@ export const REGION_CHOICES = [ { value: 'tz', label: 'TZ' }, { value: 'ua', label: 'UA' }, { value: 'ug', label: 'UG' }, + { value: 'uk', label: 'UK' }, { value: 'um', label: 'UM' }, { value: 'us', label: 'US' }, { value: 'uy', label: 'UY' }, From 3eaa76174e507a3eab3933cd398193715ef26dfc Mon Sep 17 00:00:00 2001 From: Jim McBride Date: Wed, 26 Nov 2025 21:11:13 -0600 Subject: [PATCH 002/220] Feature: Automated configuration backups with scheduling - Create/Download/Upload/Restore database backups (PostgreSQL and SQLite) - Configurable data directory backups (via settings.py) - Scheduled backups (daily/weekly) via Celery Beat - Retention policy (keep last N backups) - Token-based auth for async task polling - X-Accel-Redirect support for nginx file serving - Comprehensive tests --- .dockerignore | 1 + apps/api/urls.py | 1 + apps/backups/__init__.py | 0 apps/backups/api_urls.py | 18 + apps/backups/api_views.py | 364 ++++++ apps/backups/apps.py | 7 + apps/backups/migrations/__init__.py | 0 apps/backups/models.py | 0 apps/backups/scheduler.py | 144 +++ apps/backups/services.py | 347 ++++++ apps/backups/tasks.py | 106 ++ apps/backups/tests.py | 1010 +++++++++++++++++ dispatcharr/settings.py | 7 + docker/nginx.conf | 7 + docker/uwsgi.ini | 1 + frontend/src/api.js | 177 +++ .../src/components/backups/BackupManager.jsx | 496 ++++++++ frontend/src/pages/Settings.jsx | 8 + 18 files changed, 2694 insertions(+) create mode 100644 apps/backups/__init__.py create mode 100644 apps/backups/api_urls.py create mode 100644 apps/backups/api_views.py create mode 100644 apps/backups/apps.py create mode 100644 apps/backups/migrations/__init__.py create mode 100644 apps/backups/models.py create mode 100644 apps/backups/scheduler.py create mode 100644 apps/backups/services.py create mode 100644 apps/backups/tasks.py create mode 100644 apps/backups/tests.py create mode 100644 frontend/src/components/backups/BackupManager.jsx diff --git a/.dockerignore b/.dockerignore index c79ca7b4..296537de 100755 --- a/.dockerignore +++ b/.dockerignore @@ -31,3 +31,4 @@ LICENSE README.md data/ +docker/data/ diff --git a/apps/api/urls.py b/apps/api/urls.py index 7d9edb52..4c92c70a 100644 --- a/apps/api/urls.py +++ b/apps/api/urls.py @@ -27,6 +27,7 @@ urlpatterns = [ path('core/', include(('core.api_urls', 'core'), namespace='core')), path('plugins/', include(('apps.plugins.api_urls', 'plugins'), namespace='plugins')), path('vod/', include(('apps.vod.api_urls', 'vod'), namespace='vod')), + path('backups/', include(('apps.backups.api_urls', 'backups'), namespace='backups')), # path('output/', include(('apps.output.api_urls', 'output'), namespace='output')), #path('player/', include(('apps.player.api_urls', 'player'), namespace='player')), #path('settings/', include(('apps.settings.api_urls', 'settings'), namespace='settings')), diff --git a/apps/backups/__init__.py b/apps/backups/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/apps/backups/api_urls.py b/apps/backups/api_urls.py new file mode 100644 index 00000000..226758cc --- /dev/null +++ b/apps/backups/api_urls.py @@ -0,0 +1,18 @@ +from django.urls import path + +from . import api_views + +app_name = "backups" + +urlpatterns = [ + path("", api_views.list_backups, name="backup-list"), + path("create/", api_views.create_backup, name="backup-create"), + path("upload/", api_views.upload_backup, name="backup-upload"), + path("schedule/", api_views.get_schedule, name="backup-schedule-get"), + path("schedule/update/", api_views.update_schedule, name="backup-schedule-update"), + path("status//", api_views.backup_status, name="backup-status"), + path("/download-token/", api_views.get_download_token, name="backup-download-token"), + path("/download/", api_views.download_backup, name="backup-download"), + path("/delete/", api_views.delete_backup, name="backup-delete"), + path("/restore/", api_views.restore_backup, name="backup-restore"), +] diff --git a/apps/backups/api_views.py b/apps/backups/api_views.py new file mode 100644 index 00000000..c6ff7d26 --- /dev/null +++ b/apps/backups/api_views.py @@ -0,0 +1,364 @@ +import hashlib +import hmac +import logging +import os +from pathlib import Path + +from celery.result import AsyncResult +from django.conf import settings +from django.http import HttpResponse, StreamingHttpResponse, Http404 +from rest_framework import status +from rest_framework.decorators import api_view, permission_classes, parser_classes +from rest_framework.permissions import IsAdminUser, AllowAny +from rest_framework.parsers import MultiPartParser, FormParser +from rest_framework.response import Response + +from . import services +from .tasks import create_backup_task, restore_backup_task +from .scheduler import get_schedule_settings, update_schedule_settings + +logger = logging.getLogger(__name__) + + +def _generate_task_token(task_id: str) -> str: + """Generate a signed token for task status access without auth.""" + secret = settings.SECRET_KEY.encode() + return hmac.new(secret, task_id.encode(), hashlib.sha256).hexdigest()[:32] + + +def _verify_task_token(task_id: str, token: str) -> bool: + """Verify a task token is valid.""" + expected = _generate_task_token(task_id) + return hmac.compare_digest(expected, token) + + +@api_view(["GET"]) +@permission_classes([IsAdminUser]) +def list_backups(request): + """List all available backup files.""" + try: + backups = services.list_backups() + return Response(backups, status=status.HTTP_200_OK) + except Exception as e: + return Response( + {"detail": f"Failed to list backups: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + +@api_view(["POST"]) +@permission_classes([IsAdminUser]) +def create_backup(request): + """Create a new backup (async via Celery).""" + try: + task = create_backup_task.delay() + return Response( + { + "detail": "Backup started", + "task_id": task.id, + "task_token": _generate_task_token(task.id), + }, + status=status.HTTP_202_ACCEPTED, + ) + except Exception as e: + return Response( + {"detail": f"Failed to start backup: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + +@api_view(["GET"]) +@permission_classes([AllowAny]) +def backup_status(request, task_id): + """Check the status of a backup/restore task. + + Requires either: + - Valid admin authentication, OR + - Valid task_token query parameter + """ + # Check for token-based auth (for restore when session is invalidated) + token = request.query_params.get("token") + if token: + if not _verify_task_token(task_id, token): + return Response( + {"detail": "Invalid task token"}, + status=status.HTTP_403_FORBIDDEN, + ) + else: + # Fall back to admin auth check + if not request.user.is_authenticated or not request.user.is_staff: + return Response( + {"detail": "Authentication required"}, + status=status.HTTP_401_UNAUTHORIZED, + ) + + try: + result = AsyncResult(task_id) + + if result.ready(): + task_result = result.get() + if task_result.get("status") == "completed": + return Response({ + "state": "completed", + "result": task_result, + }) + else: + return Response({ + "state": "failed", + "error": task_result.get("error", "Unknown error"), + }) + elif result.failed(): + return Response({ + "state": "failed", + "error": str(result.result), + }) + else: + return Response({ + "state": result.state.lower(), + }) + except Exception as e: + return Response( + {"detail": f"Failed to get task status: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + +@api_view(["GET"]) +@permission_classes([IsAdminUser]) +def get_download_token(request, filename): + """Get a signed token for downloading a backup file.""" + try: + # Security: prevent path traversal + if ".." in filename or "/" in filename or "\\" in filename: + raise Http404("Invalid filename") + + backup_dir = services.get_backup_dir() + backup_file = backup_dir / filename + + if not backup_file.exists(): + raise Http404("Backup file not found") + + token = _generate_task_token(filename) + return Response({"token": token}) + except Http404: + raise + except Exception as e: + return Response( + {"detail": f"Failed to generate token: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + +@api_view(["GET"]) +@permission_classes([AllowAny]) +def download_backup(request, filename): + """Download a backup file. + + Requires either: + - Valid admin authentication, OR + - Valid download_token query parameter + """ + # Check for token-based auth (avoids CORS preflight issues) + token = request.query_params.get("token") + if token: + if not _verify_task_token(filename, token): + return Response( + {"detail": "Invalid download token"}, + status=status.HTTP_403_FORBIDDEN, + ) + else: + # Fall back to admin auth check + if not request.user.is_authenticated or not request.user.is_staff: + return Response( + {"detail": "Authentication required"}, + status=status.HTTP_401_UNAUTHORIZED, + ) + + try: + # Security: prevent path traversal by checking for suspicious characters + if ".." in filename or "/" in filename or "\\" in filename: + raise Http404("Invalid filename") + + backup_dir = services.get_backup_dir() + backup_file = (backup_dir / filename).resolve() + + # Security: ensure the resolved path is still within backup_dir + if not str(backup_file).startswith(str(backup_dir.resolve())): + raise Http404("Invalid filename") + + if not backup_file.exists() or not backup_file.is_file(): + raise Http404("Backup file not found") + + file_size = backup_file.stat().st_size + + # Use X-Accel-Redirect for nginx (AIO container) - nginx serves file directly + # Fall back to streaming for non-nginx deployments + use_nginx_accel = os.environ.get("USE_NGINX_ACCEL", "").lower() == "true" + logger.info(f"[DOWNLOAD] File: {filename}, Size: {file_size}, USE_NGINX_ACCEL: {use_nginx_accel}") + + if use_nginx_accel: + # X-Accel-Redirect: Django returns immediately, nginx serves file + logger.info(f"[DOWNLOAD] Using X-Accel-Redirect: /protected-backups/{filename}") + response = HttpResponse() + response["X-Accel-Redirect"] = f"/protected-backups/{filename}" + response["Content-Type"] = "application/zip" + response["Content-Length"] = file_size + response["Content-Disposition"] = f'attachment; filename="{filename}"' + return response + else: + # Streaming fallback for non-nginx deployments + logger.info(f"[DOWNLOAD] Using streaming fallback (no nginx)") + def file_iterator(file_path, chunk_size=2 * 1024 * 1024): + with open(file_path, "rb") as f: + while chunk := f.read(chunk_size): + yield chunk + + response = StreamingHttpResponse( + file_iterator(backup_file), + content_type="application/zip", + ) + response["Content-Length"] = file_size + response["Content-Disposition"] = f'attachment; filename="{filename}"' + return response + except Http404: + raise + except Exception as e: + return Response( + {"detail": f"Download failed: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + +@api_view(["DELETE"]) +@permission_classes([IsAdminUser]) +def delete_backup(request, filename): + """Delete a backup file.""" + try: + # Security: prevent path traversal + if ".." in filename or "/" in filename or "\\" in filename: + raise Http404("Invalid filename") + + services.delete_backup(filename) + return Response( + {"detail": "Backup deleted successfully"}, + status=status.HTTP_204_NO_CONTENT, + ) + except FileNotFoundError: + raise Http404("Backup file not found") + except Exception as e: + return Response( + {"detail": f"Delete failed: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + +@api_view(["POST"]) +@permission_classes([IsAdminUser]) +@parser_classes([MultiPartParser, FormParser]) +def upload_backup(request): + """Upload a backup file for restoration.""" + uploaded = request.FILES.get("file") + if not uploaded: + return Response( + {"detail": "No file uploaded"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + try: + backup_dir = services.get_backup_dir() + filename = uploaded.name or "uploaded-backup.zip" + + # Ensure unique filename + backup_file = backup_dir / filename + counter = 1 + while backup_file.exists(): + name_parts = filename.rsplit(".", 1) + if len(name_parts) == 2: + backup_file = backup_dir / f"{name_parts[0]}-{counter}.{name_parts[1]}" + else: + backup_file = backup_dir / f"{filename}-{counter}" + counter += 1 + + # Save uploaded file + with backup_file.open("wb") as f: + for chunk in uploaded.chunks(): + f.write(chunk) + + return Response( + { + "detail": "Backup uploaded successfully", + "filename": backup_file.name, + }, + status=status.HTTP_201_CREATED, + ) + except Exception as e: + return Response( + {"detail": f"Upload failed: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + +@api_view(["POST"]) +@permission_classes([IsAdminUser]) +def restore_backup(request, filename): + """Restore from a backup file (async via Celery). WARNING: This will flush the database!""" + try: + # Security: prevent path traversal + if ".." in filename or "/" in filename or "\\" in filename: + raise Http404("Invalid filename") + + backup_dir = services.get_backup_dir() + backup_file = backup_dir / filename + + if not backup_file.exists(): + raise Http404("Backup file not found") + + task = restore_backup_task.delay(filename) + return Response( + { + "detail": "Restore started", + "task_id": task.id, + "task_token": _generate_task_token(task.id), + }, + status=status.HTTP_202_ACCEPTED, + ) + except Http404: + raise + except Exception as e: + return Response( + {"detail": f"Failed to start restore: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + +@api_view(["GET"]) +@permission_classes([IsAdminUser]) +def get_schedule(request): + """Get backup schedule settings.""" + try: + settings = get_schedule_settings() + return Response(settings) + except Exception as e: + return Response( + {"detail": f"Failed to get schedule: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + +@api_view(["PUT"]) +@permission_classes([IsAdminUser]) +def update_schedule(request): + """Update backup schedule settings.""" + try: + settings = update_schedule_settings(request.data) + return Response(settings) + except ValueError as e: + return Response( + {"detail": str(e)}, + status=status.HTTP_400_BAD_REQUEST, + ) + except Exception as e: + return Response( + {"detail": f"Failed to update schedule: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) diff --git a/apps/backups/apps.py b/apps/backups/apps.py new file mode 100644 index 00000000..ee644149 --- /dev/null +++ b/apps/backups/apps.py @@ -0,0 +1,7 @@ +from django.apps import AppConfig + + +class BackupsConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "apps.backups" + verbose_name = "Backups" diff --git a/apps/backups/migrations/__init__.py b/apps/backups/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/apps/backups/models.py b/apps/backups/models.py new file mode 100644 index 00000000..e69de29b diff --git a/apps/backups/scheduler.py b/apps/backups/scheduler.py new file mode 100644 index 00000000..52186e90 --- /dev/null +++ b/apps/backups/scheduler.py @@ -0,0 +1,144 @@ +import json +import logging + +from django_celery_beat.models import PeriodicTask, CrontabSchedule + +from core.models import CoreSettings + +logger = logging.getLogger(__name__) + +BACKUP_SCHEDULE_TASK_NAME = "backup-scheduled-task" + +SETTING_KEYS = { + "enabled": "backup_schedule_enabled", + "frequency": "backup_schedule_frequency", + "time": "backup_schedule_time", + "day_of_week": "backup_schedule_day_of_week", + "retention_count": "backup_retention_count", +} + +DEFAULTS = { + "enabled": False, + "frequency": "daily", + "time": "03:00", + "day_of_week": 0, # Sunday + "retention_count": 0, +} + + +def _get_setting(key: str, default=None): + """Get a backup setting from CoreSettings.""" + try: + setting = CoreSettings.objects.get(key=SETTING_KEYS[key]) + value = setting.value + if key == "enabled": + return value.lower() == "true" + elif key in ("day_of_week", "retention_count"): + return int(value) + return value + except CoreSettings.DoesNotExist: + return default if default is not None else DEFAULTS.get(key) + + +def _set_setting(key: str, value) -> None: + """Set a backup setting in CoreSettings.""" + str_value = str(value).lower() if isinstance(value, bool) else str(value) + CoreSettings.objects.update_or_create( + key=SETTING_KEYS[key], + defaults={ + "name": f"Backup {key.replace('_', ' ').title()}", + "value": str_value, + }, + ) + + +def get_schedule_settings() -> dict: + """Get all backup schedule settings.""" + return { + "enabled": _get_setting("enabled"), + "frequency": _get_setting("frequency"), + "time": _get_setting("time"), + "day_of_week": _get_setting("day_of_week"), + "retention_count": _get_setting("retention_count"), + } + + +def update_schedule_settings(data: dict) -> dict: + """Update backup schedule settings and sync the PeriodicTask.""" + # Validate + if "frequency" in data and data["frequency"] not in ("daily", "weekly"): + raise ValueError("frequency must be 'daily' or 'weekly'") + + if "time" in data: + try: + hour, minute = data["time"].split(":") + int(hour) + int(minute) + except (ValueError, AttributeError): + raise ValueError("time must be in HH:MM format") + + if "day_of_week" in data: + day = int(data["day_of_week"]) + if day < 0 or day > 6: + raise ValueError("day_of_week must be 0-6 (Sunday-Saturday)") + + if "retention_count" in data: + count = int(data["retention_count"]) + if count < 0: + raise ValueError("retention_count must be >= 0") + + # Update settings + for key in ("enabled", "frequency", "time", "day_of_week", "retention_count"): + if key in data: + _set_setting(key, data[key]) + + # Sync the periodic task + _sync_periodic_task() + + return get_schedule_settings() + + +def _sync_periodic_task() -> None: + """Create, update, or delete the scheduled backup task based on settings.""" + settings = get_schedule_settings() + + if not settings["enabled"]: + # Delete the task if it exists + PeriodicTask.objects.filter(name=BACKUP_SCHEDULE_TASK_NAME).delete() + logger.info("Backup schedule disabled, removed periodic task") + return + + # Parse time + hour, minute = settings["time"].split(":") + + # Build crontab based on frequency + if settings["frequency"] == "daily": + crontab, _ = CrontabSchedule.objects.get_or_create( + minute=minute, + hour=hour, + day_of_week="*", + day_of_month="*", + month_of_year="*", + ) + else: # weekly + crontab, _ = CrontabSchedule.objects.get_or_create( + minute=minute, + hour=hour, + day_of_week=str(settings["day_of_week"]), + day_of_month="*", + month_of_year="*", + ) + + # Create or update the periodic task + task, created = PeriodicTask.objects.update_or_create( + name=BACKUP_SCHEDULE_TASK_NAME, + defaults={ + "task": "apps.backups.tasks.scheduled_backup_task", + "crontab": crontab, + "enabled": True, + "kwargs": json.dumps({"retention_count": settings["retention_count"]}), + }, + ) + + action = "Created" if created else "Updated" + logger.info(f"{action} backup schedule: {settings['frequency']} at {settings['time']}") diff --git a/apps/backups/services.py b/apps/backups/services.py new file mode 100644 index 00000000..96838417 --- /dev/null +++ b/apps/backups/services.py @@ -0,0 +1,347 @@ +import datetime +import json +import os +import shutil +import subprocess +import tempfile +from pathlib import Path +from zipfile import ZipFile, ZIP_DEFLATED +import logging + +from django.conf import settings + +logger = logging.getLogger(__name__) + + +def get_backup_dir() -> Path: + """Get the backup directory, creating it if necessary.""" + backup_dir = Path(settings.BACKUP_ROOT) + backup_dir.mkdir(parents=True, exist_ok=True) + return backup_dir + + +def get_data_dirs() -> list[Path]: + """Get list of data directories to include in backups.""" + dirs = getattr(settings, "BACKUP_DATA_DIRS", []) + return [Path(d) for d in dirs if d and Path(d).exists()] + + +def _is_postgresql() -> bool: + """Check if we're using PostgreSQL.""" + return settings.DATABASES["default"]["ENGINE"] == "django.db.backends.postgresql" + + +def _get_pg_env() -> dict: + """Get environment variables for PostgreSQL commands.""" + db_config = settings.DATABASES["default"] + env = os.environ.copy() + env["PGPASSWORD"] = db_config.get("PASSWORD", "") + return env + + +def _get_pg_args() -> list[str]: + """Get common PostgreSQL command arguments.""" + db_config = settings.DATABASES["default"] + return [ + "-h", db_config.get("HOST", "localhost"), + "-p", str(db_config.get("PORT", 5432)), + "-U", db_config.get("USER", "postgres"), + "-d", db_config.get("NAME", "dispatcharr"), + ] + + +def _dump_postgresql(output_file: Path) -> None: + """Dump PostgreSQL database using pg_dump.""" + logger.info("Dumping PostgreSQL database with pg_dump...") + + cmd = [ + "pg_dump", + *_get_pg_args(), + "-Fc", # Custom format for pg_restore + "-v", # Verbose + "-f", str(output_file), + ] + + result = subprocess.run( + cmd, + env=_get_pg_env(), + capture_output=True, + text=True, + ) + + if result.returncode != 0: + logger.error(f"pg_dump failed: {result.stderr}") + raise RuntimeError(f"pg_dump failed: {result.stderr}") + + logger.debug(f"pg_dump output: {result.stderr}") + + +def _restore_postgresql(dump_file: Path) -> None: + """Restore PostgreSQL database using pg_restore.""" + logger.info("[PG_RESTORE] Starting pg_restore...") + logger.info(f"[PG_RESTORE] Dump file: {dump_file}") + + pg_args = _get_pg_args() + logger.info(f"[PG_RESTORE] Connection args: {pg_args}") + + cmd = [ + "pg_restore", + "--clean", # Clean (drop) database objects before recreating + *pg_args, + "-v", # Verbose + str(dump_file), + ] + + logger.info(f"[PG_RESTORE] Running command: {' '.join(cmd)}") + + result = subprocess.run( + cmd, + env=_get_pg_env(), + capture_output=True, + text=True, + ) + + logger.info(f"[PG_RESTORE] Return code: {result.returncode}") + + # pg_restore may return non-zero even on partial success + # Check for actual errors vs warnings + if result.returncode != 0: + # Some errors during restore are expected (e.g., "does not exist" when cleaning) + # Only fail on critical errors + stderr = result.stderr.lower() + if "fatal" in stderr or "could not connect" in stderr: + logger.error(f"[PG_RESTORE] Failed critically: {result.stderr}") + raise RuntimeError(f"pg_restore failed: {result.stderr}") + else: + logger.warning(f"[PG_RESTORE] Completed with warnings: {result.stderr[:500]}...") + + logger.info("[PG_RESTORE] Completed successfully") + + +def _dump_sqlite(output_file: Path) -> None: + """Dump SQLite database using sqlite3 .backup command.""" + logger.info("Dumping SQLite database with sqlite3 .backup...") + db_path = Path(settings.DATABASES["default"]["NAME"]) + + if not db_path.exists(): + raise FileNotFoundError(f"SQLite database not found: {db_path}") + + # Use sqlite3 .backup command via stdin for reliable execution + result = subprocess.run( + ["sqlite3", str(db_path)], + input=f".backup '{output_file}'\n", + capture_output=True, + text=True, + ) + + if result.returncode != 0: + logger.error(f"sqlite3 backup failed: {result.stderr}") + raise RuntimeError(f"sqlite3 backup failed: {result.stderr}") + + # Verify the backup file was created + if not output_file.exists(): + raise RuntimeError("sqlite3 backup failed: output file not created") + + logger.info(f"sqlite3 backup completed successfully: {output_file}") + + +def _restore_sqlite(dump_file: Path) -> None: + """Restore SQLite database by replacing the database file.""" + logger.info("Restoring SQLite database...") + db_path = Path(settings.DATABASES["default"]["NAME"]) + backup_current = None + + # Backup current database before overwriting + if db_path.exists(): + backup_current = db_path.with_suffix(".db.bak") + shutil.copy2(db_path, backup_current) + logger.info(f"Backed up current database to {backup_current}") + + # Ensure parent directory exists + db_path.parent.mkdir(parents=True, exist_ok=True) + + # The backup file from _dump_sqlite is a complete SQLite database file + # We can simply copy it over the existing database + shutil.copy2(dump_file, db_path) + + # Verify the restore worked by checking if sqlite3 can read it + result = subprocess.run( + ["sqlite3", str(db_path)], + input=".tables\n", + capture_output=True, + text=True, + ) + + if result.returncode != 0: + logger.error(f"sqlite3 verification failed: {result.stderr}") + # Try to restore from backup + if backup_current and backup_current.exists(): + shutil.copy2(backup_current, db_path) + logger.info("Restored original database from backup") + raise RuntimeError(f"sqlite3 restore verification failed: {result.stderr}") + + logger.info("sqlite3 restore completed successfully") + + +def create_backup() -> Path: + """ + Create a backup archive containing database dump and data directories. + Returns the path to the created backup file. + """ + backup_dir = get_backup_dir() + timestamp = datetime.datetime.now(datetime.UTC).strftime("%Y.%m.%d.%H.%M.%S") + backup_name = f"dispatcharr-backup-{timestamp}.zip" + backup_file = backup_dir / backup_name + + logger.info(f"Creating backup: {backup_name}") + + with tempfile.TemporaryDirectory(prefix="dispatcharr-backup-") as temp_dir: + temp_path = Path(temp_dir) + + # Determine database type and dump accordingly + if _is_postgresql(): + db_dump_file = temp_path / "database.dump" + _dump_postgresql(db_dump_file) + db_type = "postgresql" + else: + db_dump_file = temp_path / "database.sqlite3" + _dump_sqlite(db_dump_file) + db_type = "sqlite" + + # Create ZIP archive with compression and ZIP64 support for large files + with ZipFile(backup_file, "w", compression=ZIP_DEFLATED, allowZip64=True) as zip_file: + # Add database dump + zip_file.write(db_dump_file, db_dump_file.name) + + # Add metadata + metadata = { + "format": "dispatcharr-backup", + "version": 2, + "database_type": db_type, + "database_file": db_dump_file.name, + "created_at": datetime.datetime.now(datetime.UTC).isoformat(), + } + zip_file.writestr("metadata.json", json.dumps(metadata, indent=2)) + + # Add data directories + for data_dir in get_data_dirs(): + logger.debug(f"Adding directory: {data_dir}") + for file_path in data_dir.rglob("*"): + if file_path.is_file(): + arcname = f"data/{data_dir.name}/{file_path.relative_to(data_dir)}" + zip_file.write(file_path, arcname) + + logger.info(f"Backup created successfully: {backup_file}") + return backup_file + + +def restore_backup(backup_file: Path) -> None: + """ + Restore from a backup archive. + WARNING: This will overwrite the database! + """ + if not backup_file.exists(): + raise FileNotFoundError(f"Backup file not found: {backup_file}") + + logger.info(f"Restoring from backup: {backup_file}") + + with tempfile.TemporaryDirectory(prefix="dispatcharr-restore-") as temp_dir: + temp_path = Path(temp_dir) + + # Extract backup + logger.debug("Extracting backup archive...") + with ZipFile(backup_file, "r") as zip_file: + zip_file.extractall(temp_path) + + # Read metadata + metadata_file = temp_path / "metadata.json" + if not metadata_file.exists(): + raise ValueError("Invalid backup: missing metadata.json") + + with open(metadata_file) as f: + metadata = json.load(f) + + # Restore database + _restore_database(temp_path, metadata) + + # Restore data directories + data_root = temp_path / "data" + if data_root.exists(): + logger.info("Restoring data directories...") + for extracted_dir in data_root.iterdir(): + if not extracted_dir.is_dir(): + continue + + target_name = extracted_dir.name + data_dirs = get_data_dirs() + matching = [d for d in data_dirs if d.name == target_name] + + if not matching: + logger.warning(f"No configured directory for {target_name}, skipping") + continue + + target = matching[0] + logger.debug(f"Restoring {target_name} to {target}") + + # Create parent directory if needed + target.parent.mkdir(parents=True, exist_ok=True) + + # Remove existing and copy from backup + if target.exists(): + shutil.rmtree(target) + shutil.copytree(extracted_dir, target) + + logger.info("Restore completed successfully") + + +def _restore_database(temp_path: Path, metadata: dict) -> None: + """Restore database from backup.""" + db_type = metadata.get("database_type", "postgresql") + db_file = metadata.get("database_file", "database.dump") + dump_file = temp_path / db_file + + if not dump_file.exists(): + raise ValueError(f"Invalid backup: missing {db_file}") + + current_db_type = "postgresql" if _is_postgresql() else "sqlite" + + if db_type != current_db_type: + raise ValueError( + f"Database type mismatch: backup is {db_type}, " + f"but current database is {current_db_type}" + ) + + if db_type == "postgresql": + _restore_postgresql(dump_file) + else: + _restore_sqlite(dump_file) + + +def list_backups() -> list[dict]: + """List all available backup files with metadata.""" + backup_dir = get_backup_dir() + backups = [] + + for backup_file in sorted(backup_dir.glob("dispatcharr-backup-*.zip"), reverse=True): + backups.append({ + "name": backup_file.name, + "size": backup_file.stat().st_size, + "created": datetime.datetime.fromtimestamp(backup_file.stat().st_mtime).isoformat(), + }) + + return backups + + +def delete_backup(filename: str) -> None: + """Delete a backup file.""" + backup_dir = get_backup_dir() + backup_file = backup_dir / filename + + if not backup_file.exists(): + raise FileNotFoundError(f"Backup file not found: {filename}") + + if not backup_file.is_file(): + raise ValueError(f"Invalid backup file: {filename}") + + backup_file.unlink() + logger.info(f"Deleted backup: {filename}") diff --git a/apps/backups/tasks.py b/apps/backups/tasks.py new file mode 100644 index 00000000..f531fef8 --- /dev/null +++ b/apps/backups/tasks.py @@ -0,0 +1,106 @@ +import logging +import traceback +from celery import shared_task + +from . import services + +logger = logging.getLogger(__name__) + + +def _cleanup_old_backups(retention_count: int) -> int: + """Delete old backups, keeping only the most recent N. Returns count deleted.""" + if retention_count <= 0: + return 0 + + backups = services.list_backups() + if len(backups) <= retention_count: + return 0 + + # Backups are sorted newest first, so delete from the end + to_delete = backups[retention_count:] + deleted = 0 + + for backup in to_delete: + try: + services.delete_backup(backup["name"]) + deleted += 1 + logger.info(f"[CLEANUP] Deleted old backup: {backup['name']}") + except Exception as e: + logger.error(f"[CLEANUP] Failed to delete {backup['name']}: {e}") + + return deleted + + +@shared_task(bind=True) +def create_backup_task(self): + """Celery task to create a backup asynchronously.""" + try: + logger.info(f"[BACKUP] Starting backup task {self.request.id}") + backup_file = services.create_backup() + logger.info(f"[BACKUP] Task {self.request.id} completed: {backup_file.name}") + return { + "status": "completed", + "filename": backup_file.name, + "size": backup_file.stat().st_size, + } + except Exception as e: + logger.error(f"[BACKUP] Task {self.request.id} failed: {str(e)}") + logger.error(f"[BACKUP] Traceback: {traceback.format_exc()}") + return { + "status": "failed", + "error": str(e), + } + + +@shared_task(bind=True) +def restore_backup_task(self, filename: str): + """Celery task to restore a backup asynchronously.""" + try: + logger.info(f"[RESTORE] Starting restore task {self.request.id} for {filename}") + backup_dir = services.get_backup_dir() + backup_file = backup_dir / filename + logger.info(f"[RESTORE] Backup file path: {backup_file}") + services.restore_backup(backup_file) + logger.info(f"[RESTORE] Task {self.request.id} completed successfully") + return { + "status": "completed", + "filename": filename, + } + except Exception as e: + logger.error(f"[RESTORE] Task {self.request.id} failed: {str(e)}") + logger.error(f"[RESTORE] Traceback: {traceback.format_exc()}") + return { + "status": "failed", + "error": str(e), + } + + +@shared_task(bind=True) +def scheduled_backup_task(self, retention_count: int = 0): + """Celery task for scheduled backups with optional retention cleanup.""" + try: + logger.info(f"[SCHEDULED] Starting scheduled backup task {self.request.id}") + + # Create backup + backup_file = services.create_backup() + logger.info(f"[SCHEDULED] Backup created: {backup_file.name}") + + # Cleanup old backups if retention is set + deleted = 0 + if retention_count > 0: + deleted = _cleanup_old_backups(retention_count) + logger.info(f"[SCHEDULED] Cleanup complete, deleted {deleted} old backup(s)") + + return { + "status": "completed", + "filename": backup_file.name, + "size": backup_file.stat().st_size, + "deleted_count": deleted, + } + except Exception as e: + logger.error(f"[SCHEDULED] Task {self.request.id} failed: {str(e)}") + logger.error(f"[SCHEDULED] Traceback: {traceback.format_exc()}") + return { + "status": "failed", + "error": str(e), + } diff --git a/apps/backups/tests.py b/apps/backups/tests.py new file mode 100644 index 00000000..a06bb7d2 --- /dev/null +++ b/apps/backups/tests.py @@ -0,0 +1,1010 @@ +import json +import tempfile +from io import BytesIO +from pathlib import Path +from zipfile import ZipFile +from unittest.mock import patch, MagicMock + +from django.test import TestCase +from django.contrib.auth import get_user_model +from rest_framework.test import APIClient +from rest_framework_simplejwt.tokens import RefreshToken + +from . import services + +User = get_user_model() + + +class BackupServicesTestCase(TestCase): + """Test cases for backup services""" + + def setUp(self): + self.temp_backup_dir = tempfile.mkdtemp() + self.temp_data_dir = tempfile.mkdtemp() + + def tearDown(self): + import shutil + if Path(self.temp_backup_dir).exists(): + shutil.rmtree(self.temp_backup_dir) + if Path(self.temp_data_dir).exists(): + shutil.rmtree(self.temp_data_dir) + + @patch('apps.backups.services.settings') + def test_get_backup_dir_creates_directory(self, mock_settings): + """Test that get_backup_dir creates the directory if it doesn't exist""" + mock_settings.BACKUP_ROOT = self.temp_backup_dir + + with patch('apps.backups.services.Path') as mock_path: + mock_path_instance = MagicMock() + mock_path_instance.mkdir = MagicMock() + mock_path.return_value = mock_path_instance + + services.get_backup_dir() + mock_path_instance.mkdir.assert_called_once_with(parents=True, exist_ok=True) + + @patch('apps.backups.services.settings') + def test_get_data_dirs_with_empty_config(self, mock_settings): + """Test that get_data_dirs returns empty list when no dirs configured""" + mock_settings.BACKUP_DATA_DIRS = [] + result = services.get_data_dirs() + self.assertEqual(result, []) + + @patch('apps.backups.services.settings') + def test_get_data_dirs_filters_nonexistent(self, mock_settings): + """Test that get_data_dirs filters out non-existent directories""" + nonexistent_dir = '/tmp/does-not-exist-12345' + mock_settings.BACKUP_DATA_DIRS = [self.temp_data_dir, nonexistent_dir] + + result = services.get_data_dirs() + self.assertEqual(len(result), 1) + self.assertEqual(str(result[0]), self.temp_data_dir) + + @patch('apps.backups.services.get_backup_dir') + @patch('apps.backups.services.get_data_dirs') + @patch('apps.backups.services._is_postgresql') + @patch('apps.backups.services._dump_sqlite') + def test_create_backup_success_sqlite(self, mock_dump_sqlite, mock_is_pg, mock_get_data_dirs, mock_get_backup_dir): + """Test successful backup creation with SQLite""" + mock_get_backup_dir.return_value = Path(self.temp_backup_dir) + mock_get_data_dirs.return_value = [] + mock_is_pg.return_value = False + + # Mock SQLite dump to create a temp file + def mock_dump(output_file): + output_file.write_text("sqlite dump") + + mock_dump_sqlite.side_effect = mock_dump + + result = services.create_backup() + + self.assertIsInstance(result, Path) + self.assertTrue(result.exists()) + self.assertTrue(result.name.startswith('dispatcharr-backup-')) + self.assertTrue(result.name.endswith('.zip')) + + # Verify the backup contains expected files + with ZipFile(result, 'r') as zf: + names = zf.namelist() + self.assertIn('database.sqlite3', names) + self.assertIn('metadata.json', names) + + # Check metadata + metadata = json.loads(zf.read('metadata.json')) + self.assertEqual(metadata['version'], 2) + self.assertEqual(metadata['database_type'], 'sqlite') + + @patch('apps.backups.services.get_backup_dir') + @patch('apps.backups.services.get_data_dirs') + @patch('apps.backups.services._is_postgresql') + @patch('apps.backups.services._dump_postgresql') + def test_create_backup_success_postgresql(self, mock_dump_pg, mock_is_pg, mock_get_data_dirs, mock_get_backup_dir): + """Test successful backup creation with PostgreSQL""" + mock_get_backup_dir.return_value = Path(self.temp_backup_dir) + mock_get_data_dirs.return_value = [] + mock_is_pg.return_value = True + + # Mock PostgreSQL dump to create a temp file + def mock_dump(output_file): + output_file.write_bytes(b"pg dump data") + + mock_dump_pg.side_effect = mock_dump + + result = services.create_backup() + + self.assertIsInstance(result, Path) + self.assertTrue(result.exists()) + + # Verify the backup contains expected files + with ZipFile(result, 'r') as zf: + names = zf.namelist() + self.assertIn('database.dump', names) + self.assertIn('metadata.json', names) + + # Check metadata + metadata = json.loads(zf.read('metadata.json')) + self.assertEqual(metadata['version'], 2) + self.assertEqual(metadata['database_type'], 'postgresql') + + @patch('apps.backups.services.get_backup_dir') + def test_list_backups_empty(self, mock_get_backup_dir): + """Test listing backups when none exist""" + mock_get_backup_dir.return_value = Path(self.temp_backup_dir) + + result = services.list_backups() + + self.assertEqual(result, []) + + @patch('apps.backups.services.get_backup_dir') + def test_list_backups_with_files(self, mock_get_backup_dir): + """Test listing backups with existing backup files""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + + # Create a fake backup file + test_backup = backup_dir / "dispatcharr-backup-2025.01.01.12.00.00.zip" + test_backup.write_text("fake backup content") + + result = services.list_backups() + + self.assertEqual(len(result), 1) + self.assertEqual(result[0]['name'], test_backup.name) + self.assertIn('size', result[0]) + self.assertIn('created', result[0]) + + @patch('apps.backups.services.get_backup_dir') + def test_delete_backup_success(self, mock_get_backup_dir): + """Test successful backup deletion""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + + # Create a fake backup file + test_backup = backup_dir / "dispatcharr-backup-test.zip" + test_backup.write_text("fake backup content") + + self.assertTrue(test_backup.exists()) + + services.delete_backup(test_backup.name) + + self.assertFalse(test_backup.exists()) + + @patch('apps.backups.services.get_backup_dir') + def test_delete_backup_not_found(self, mock_get_backup_dir): + """Test deleting a non-existent backup raises error""" + mock_get_backup_dir.return_value = Path(self.temp_backup_dir) + + with self.assertRaises(FileNotFoundError): + services.delete_backup("nonexistent-backup.zip") + + @patch('apps.backups.services.get_backup_dir') + @patch('apps.backups.services.get_data_dirs') + @patch('apps.backups.services._is_postgresql') + @patch('apps.backups.services._restore_postgresql') + def test_restore_backup_postgresql(self, mock_restore_pg, mock_is_pg, mock_get_data_dirs, mock_get_backup_dir): + """Test successful restoration of PostgreSQL backup""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + mock_get_data_dirs.return_value = [] + mock_is_pg.return_value = True + + # Create PostgreSQL backup file + backup_file = backup_dir / "test-backup.zip" + with ZipFile(backup_file, 'w') as zf: + zf.writestr('database.dump', b'pg dump data') + zf.writestr('metadata.json', json.dumps({ + 'version': 2, + 'database_type': 'postgresql', + 'database_file': 'database.dump' + })) + + services.restore_backup(backup_file) + + mock_restore_pg.assert_called_once() + + @patch('apps.backups.services.get_backup_dir') + @patch('apps.backups.services.get_data_dirs') + @patch('apps.backups.services._is_postgresql') + @patch('apps.backups.services._restore_sqlite') + def test_restore_backup_sqlite(self, mock_restore_sqlite, mock_is_pg, mock_get_data_dirs, mock_get_backup_dir): + """Test successful restoration of SQLite backup""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + mock_get_data_dirs.return_value = [] + mock_is_pg.return_value = False + + # Create SQLite backup file + backup_file = backup_dir / "test-backup.zip" + with ZipFile(backup_file, 'w') as zf: + zf.writestr('database.sqlite3', 'sqlite data') + zf.writestr('metadata.json', json.dumps({ + 'version': 2, + 'database_type': 'sqlite', + 'database_file': 'database.sqlite3' + })) + + services.restore_backup(backup_file) + + mock_restore_sqlite.assert_called_once() + + @patch('apps.backups.services.get_backup_dir') + @patch('apps.backups.services.get_data_dirs') + @patch('apps.backups.services._is_postgresql') + def test_restore_backup_database_type_mismatch(self, mock_is_pg, mock_get_data_dirs, mock_get_backup_dir): + """Test restore fails when database type doesn't match""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + mock_get_data_dirs.return_value = [] + mock_is_pg.return_value = True # Current system is PostgreSQL + + # Create SQLite backup file + backup_file = backup_dir / "test-backup.zip" + with ZipFile(backup_file, 'w') as zf: + zf.writestr('database.sqlite3', 'sqlite data') + zf.writestr('metadata.json', json.dumps({ + 'version': 2, + 'database_type': 'sqlite', # Backup is SQLite + 'database_file': 'database.sqlite3' + })) + + with self.assertRaises(ValueError) as context: + services.restore_backup(backup_file) + + self.assertIn('mismatch', str(context.exception).lower()) + + def test_restore_backup_not_found(self): + """Test restoring from non-existent backup file""" + fake_path = Path("/tmp/nonexistent-backup-12345.zip") + + with self.assertRaises(FileNotFoundError): + services.restore_backup(fake_path) + + @patch('apps.backups.services.get_backup_dir') + def test_restore_backup_missing_metadata(self, mock_get_backup_dir): + """Test restoring from backup without metadata.json""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + + # Create a backup file missing metadata.json + backup_file = backup_dir / "invalid-backup.zip" + with ZipFile(backup_file, 'w') as zf: + zf.writestr('database.dump', b'fake dump data') + + with self.assertRaises(ValueError) as context: + services.restore_backup(backup_file) + + self.assertIn('metadata.json', str(context.exception)) + + @patch('apps.backups.services.get_backup_dir') + @patch('apps.backups.services._is_postgresql') + def test_restore_backup_missing_database(self, mock_is_pg, mock_get_backup_dir): + """Test restoring from backup missing database dump""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + mock_is_pg.return_value = True + + # Create backup file missing database dump + backup_file = backup_dir / "invalid-backup.zip" + with ZipFile(backup_file, 'w') as zf: + zf.writestr('metadata.json', json.dumps({ + 'version': 2, + 'database_type': 'postgresql', + 'database_file': 'database.dump' + })) + + with self.assertRaises(ValueError) as context: + services.restore_backup(backup_file) + + self.assertIn('database.dump', str(context.exception)) + + +class BackupAPITestCase(TestCase): + """Test cases for backup API endpoints""" + + def setUp(self): + self.client = APIClient() + self.user = User.objects.create_user( + username='testuser', + email='test@example.com', + password='testpass123' + ) + self.admin_user = User.objects.create_superuser( + username='admin', + email='admin@example.com', + password='adminpass123' + ) + self.temp_backup_dir = tempfile.mkdtemp() + + def get_auth_header(self, user): + """Helper method to get JWT auth header for a user""" + refresh = RefreshToken.for_user(user) + return f'Bearer {str(refresh.access_token)}' + + def tearDown(self): + import shutil + if Path(self.temp_backup_dir).exists(): + shutil.rmtree(self.temp_backup_dir) + + def test_list_backups_requires_admin(self): + """Test that listing backups requires admin privileges""" + url = '/api/backups/' + + # Unauthenticated request + response = self.client.get(url) + self.assertIn(response.status_code, [401, 403]) + + # Regular user request + response = self.client.get(url, HTTP_AUTHORIZATION=self.get_auth_header(self.user)) + self.assertIn(response.status_code, [401, 403]) + + @patch('apps.backups.services.list_backups') + def test_list_backups_success(self, mock_list_backups): + """Test successful backup listing""" + mock_list_backups.return_value = [ + { + 'name': 'backup-test.zip', + 'size': 1024, + 'created': '2025-01-01T12:00:00' + } + ] + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/' + response = self.client.get(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertEqual(len(data), 1) + self.assertEqual(data[0]['name'], 'backup-test.zip') + + def test_create_backup_requires_admin(self): + """Test that creating backups requires admin privileges""" + url = '/api/backups/create/' + + # Unauthenticated request + response = self.client.post(url) + self.assertIn(response.status_code, [401, 403]) + + # Regular user request + response = self.client.post(url, HTTP_AUTHORIZATION=self.get_auth_header(self.user)) + self.assertIn(response.status_code, [401, 403]) + + @patch('apps.backups.tasks.create_backup_task.delay') + def test_create_backup_success(self, mock_create_task): + """Test successful backup creation via API (async task)""" + mock_task = MagicMock() + mock_task.id = 'test-task-id-123' + mock_create_task.return_value = mock_task + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/create/' + response = self.client.post(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 202) + data = response.json() + self.assertIn('task_id', data) + self.assertIn('task_token', data) + self.assertEqual(data['task_id'], 'test-task-id-123') + + @patch('apps.backups.tasks.create_backup_task.delay') + def test_create_backup_failure(self, mock_create_task): + """Test backup creation failure handling""" + mock_create_task.side_effect = Exception("Failed to start task") + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/create/' + response = self.client.post(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 500) + data = response.json() + self.assertIn('detail', data) + + @patch('apps.backups.services.get_backup_dir') + def test_download_backup_success(self, mock_get_backup_dir): + """Test successful backup download""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + + # Create a test backup file + backup_file = backup_dir / "test-backup.zip" + backup_file.write_text("test backup content") + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/test-backup.zip/download/' + response = self.client.get(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 200) + self.assertEqual(response['Content-Type'], 'application/zip') + + @patch('apps.backups.services.get_backup_dir') + def test_download_backup_not_found(self, mock_get_backup_dir): + """Test downloading non-existent backup""" + mock_get_backup_dir.return_value = Path(self.temp_backup_dir) + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/nonexistent.zip/download/' + response = self.client.get(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 404) + + @patch('apps.backups.services.delete_backup') + def test_delete_backup_success(self, mock_delete_backup): + """Test successful backup deletion via API""" + mock_delete_backup.return_value = None + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/test-backup.zip/delete/' + response = self.client.delete(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 204) + mock_delete_backup.assert_called_once_with('test-backup.zip') + + @patch('apps.backups.services.delete_backup') + def test_delete_backup_not_found(self, mock_delete_backup): + """Test deleting non-existent backup via API""" + mock_delete_backup.side_effect = FileNotFoundError("Not found") + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/nonexistent.zip/delete/' + response = self.client.delete(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 404) + + def test_upload_backup_requires_file(self): + """Test that upload requires a file""" + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/upload/' + response = self.client.post(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 400) + data = response.json() + self.assertIn('No file uploaded', data['detail']) + + @patch('apps.backups.services.get_backup_dir') + def test_upload_backup_success(self, mock_get_backup_dir): + """Test successful backup upload""" + mock_get_backup_dir.return_value = Path(self.temp_backup_dir) + + # Create a fake backup file + fake_backup = BytesIO(b"fake backup content") + fake_backup.name = 'uploaded-backup.zip' + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/upload/' + response = self.client.post(url, {'file': fake_backup}, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 201) + data = response.json() + self.assertIn('filename', data) + + @patch('apps.backups.services.get_backup_dir') + @patch('apps.backups.tasks.restore_backup_task.delay') + def test_restore_backup_success(self, mock_restore_task, mock_get_backup_dir): + """Test successful backup restoration via API (async task)""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + + mock_task = MagicMock() + mock_task.id = 'test-restore-task-456' + mock_restore_task.return_value = mock_task + + # Create a test backup file + backup_file = backup_dir / "test-backup.zip" + backup_file.write_text("test backup content") + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/test-backup.zip/restore/' + response = self.client.post(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 202) + data = response.json() + self.assertIn('task_id', data) + self.assertIn('task_token', data) + self.assertEqual(data['task_id'], 'test-restore-task-456') + + @patch('apps.backups.services.get_backup_dir') + def test_restore_backup_not_found(self, mock_get_backup_dir): + """Test restoring from non-existent backup via API""" + mock_get_backup_dir.return_value = Path(self.temp_backup_dir) + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/nonexistent.zip/restore/' + response = self.client.post(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 404) + + # --- Backup Status Endpoint Tests --- + + def test_backup_status_requires_auth_or_token(self): + """Test that backup_status requires auth or valid token""" + url = '/api/backups/status/fake-task-id/' + + # Unauthenticated request without token + response = self.client.get(url) + self.assertEqual(response.status_code, 401) + + def test_backup_status_invalid_token(self): + """Test that backup_status rejects invalid tokens""" + url = '/api/backups/status/fake-task-id/?token=invalid-token' + response = self.client.get(url) + self.assertEqual(response.status_code, 403) + + @patch('apps.backups.api_views.AsyncResult') + def test_backup_status_with_admin_auth(self, mock_async_result): + """Test backup_status with admin authentication""" + mock_result = MagicMock() + mock_result.ready.return_value = False + mock_result.failed.return_value = False + mock_result.state = 'PENDING' + mock_async_result.return_value = mock_result + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/status/test-task-id/' + response = self.client.get(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertEqual(data['state'], 'pending') + + @patch('apps.backups.api_views.AsyncResult') + @patch('apps.backups.api_views._verify_task_token') + def test_backup_status_with_valid_token(self, mock_verify, mock_async_result): + """Test backup_status with valid token""" + mock_verify.return_value = True + mock_result = MagicMock() + mock_result.ready.return_value = True + mock_result.get.return_value = {'status': 'completed', 'filename': 'test.zip'} + mock_async_result.return_value = mock_result + + url = '/api/backups/status/test-task-id/?token=valid-token' + response = self.client.get(url) + + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertEqual(data['state'], 'completed') + + @patch('apps.backups.api_views.AsyncResult') + def test_backup_status_task_failed(self, mock_async_result): + """Test backup_status when task failed""" + mock_result = MagicMock() + mock_result.ready.return_value = True + mock_result.get.return_value = {'status': 'failed', 'error': 'Something went wrong'} + mock_async_result.return_value = mock_result + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/status/test-task-id/' + response = self.client.get(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertEqual(data['state'], 'failed') + self.assertIn('Something went wrong', data['error']) + + # --- Download Token Endpoint Tests --- + + def test_get_download_token_requires_admin(self): + """Test that get_download_token requires admin privileges""" + url = '/api/backups/test.zip/download-token/' + + response = self.client.get(url) + self.assertIn(response.status_code, [401, 403]) + + response = self.client.get(url, HTTP_AUTHORIZATION=self.get_auth_header(self.user)) + self.assertIn(response.status_code, [401, 403]) + + @patch('apps.backups.services.get_backup_dir') + def test_get_download_token_success(self, mock_get_backup_dir): + """Test successful download token generation""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + + # Create a test backup file + backup_file = backup_dir / "test-backup.zip" + backup_file.write_text("test content") + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/test-backup.zip/download-token/' + response = self.client.get(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertIn('token', data) + self.assertEqual(len(data['token']), 32) + + @patch('apps.backups.services.get_backup_dir') + def test_get_download_token_not_found(self, mock_get_backup_dir): + """Test download token for non-existent file""" + mock_get_backup_dir.return_value = Path(self.temp_backup_dir) + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/nonexistent.zip/download-token/' + response = self.client.get(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 404) + + # --- Download with Token Auth Tests --- + + @patch('apps.backups.services.get_backup_dir') + @patch('apps.backups.api_views._verify_task_token') + def test_download_backup_with_valid_token(self, mock_verify, mock_get_backup_dir): + """Test downloading backup with valid token (no auth header)""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + mock_verify.return_value = True + + # Create a test backup file + backup_file = backup_dir / "test-backup.zip" + backup_file.write_text("test backup content") + + url = '/api/backups/test-backup.zip/download/?token=valid-token' + response = self.client.get(url) + + self.assertEqual(response.status_code, 200) + + @patch('apps.backups.services.get_backup_dir') + def test_download_backup_invalid_token(self, mock_get_backup_dir): + """Test downloading backup with invalid token""" + mock_get_backup_dir.return_value = Path(self.temp_backup_dir) + + url = '/api/backups/test-backup.zip/download/?token=invalid-token' + response = self.client.get(url) + + self.assertEqual(response.status_code, 403) + + @patch('apps.backups.services.get_backup_dir') + @patch('apps.backups.tasks.restore_backup_task.delay') + def test_restore_backup_task_start_failure(self, mock_restore_task, mock_get_backup_dir): + """Test restore task start failure via API""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + mock_restore_task.side_effect = Exception("Failed to start restore task") + + # Create a test backup file + backup_file = backup_dir / "test-backup.zip" + backup_file.write_text("test content") + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/test-backup.zip/restore/' + response = self.client.post(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 500) + data = response.json() + self.assertIn('detail', data) + + def test_get_schedule_requires_admin(self): + """Test that getting schedule requires admin privileges""" + url = '/api/backups/schedule/' + + # Unauthenticated request + response = self.client.get(url) + self.assertIn(response.status_code, [401, 403]) + + # Regular user request + response = self.client.get(url, HTTP_AUTHORIZATION=self.get_auth_header(self.user)) + self.assertIn(response.status_code, [401, 403]) + + @patch('apps.backups.api_views.get_schedule_settings') + def test_get_schedule_success(self, mock_get_settings): + """Test successful schedule retrieval""" + mock_get_settings.return_value = { + 'enabled': True, + 'frequency': 'daily', + 'time': '03:00', + 'day_of_week': 0, + 'retention_count': 5, + } + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/schedule/' + response = self.client.get(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertEqual(data['enabled'], True) + self.assertEqual(data['frequency'], 'daily') + self.assertEqual(data['retention_count'], 5) + + def test_update_schedule_requires_admin(self): + """Test that updating schedule requires admin privileges""" + url = '/api/backups/schedule/update/' + + # Unauthenticated request + response = self.client.put(url, {}, content_type='application/json') + self.assertIn(response.status_code, [401, 403]) + + # Regular user request + response = self.client.put( + url, + {}, + content_type='application/json', + HTTP_AUTHORIZATION=self.get_auth_header(self.user) + ) + self.assertIn(response.status_code, [401, 403]) + + @patch('apps.backups.api_views.update_schedule_settings') + def test_update_schedule_success(self, mock_update_settings): + """Test successful schedule update""" + mock_update_settings.return_value = { + 'enabled': True, + 'frequency': 'weekly', + 'time': '02:00', + 'day_of_week': 1, + 'retention_count': 10, + } + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/schedule/update/' + response = self.client.put( + url, + {'enabled': True, 'frequency': 'weekly', 'time': '02:00', 'day_of_week': 1, 'retention_count': 10}, + content_type='application/json', + HTTP_AUTHORIZATION=auth_header + ) + + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertEqual(data['frequency'], 'weekly') + self.assertEqual(data['day_of_week'], 1) + + @patch('apps.backups.api_views.update_schedule_settings') + def test_update_schedule_validation_error(self, mock_update_settings): + """Test schedule update with invalid data""" + mock_update_settings.side_effect = ValueError("frequency must be 'daily' or 'weekly'") + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/schedule/update/' + response = self.client.put( + url, + {'frequency': 'invalid'}, + content_type='application/json', + HTTP_AUTHORIZATION=auth_header + ) + + self.assertEqual(response.status_code, 400) + data = response.json() + self.assertIn('frequency', data['detail']) + + +class BackupSchedulerTestCase(TestCase): + """Test cases for backup scheduler""" + + def setUp(self): + from core.models import CoreSettings + # Clean up any existing settings + CoreSettings.objects.filter(key__startswith='backup_').delete() + + def tearDown(self): + from core.models import CoreSettings + from django_celery_beat.models import PeriodicTask + CoreSettings.objects.filter(key__startswith='backup_').delete() + PeriodicTask.objects.filter(name='backup-scheduled-task').delete() + + def test_get_schedule_settings_defaults(self): + """Test that get_schedule_settings returns defaults when no settings exist""" + from . import scheduler + + settings = scheduler.get_schedule_settings() + + self.assertEqual(settings['enabled'], False) + self.assertEqual(settings['frequency'], 'daily') + self.assertEqual(settings['time'], '03:00') + self.assertEqual(settings['day_of_week'], 0) + self.assertEqual(settings['retention_count'], 0) + + def test_update_schedule_settings_stores_values(self): + """Test that update_schedule_settings stores values correctly""" + from . import scheduler + + result = scheduler.update_schedule_settings({ + 'enabled': True, + 'frequency': 'weekly', + 'time': '04:30', + 'day_of_week': 3, + 'retention_count': 7, + }) + + self.assertEqual(result['enabled'], True) + self.assertEqual(result['frequency'], 'weekly') + self.assertEqual(result['time'], '04:30') + self.assertEqual(result['day_of_week'], 3) + self.assertEqual(result['retention_count'], 7) + + # Verify persistence + settings = scheduler.get_schedule_settings() + self.assertEqual(settings['enabled'], True) + self.assertEqual(settings['frequency'], 'weekly') + + def test_update_schedule_settings_invalid_frequency(self): + """Test that invalid frequency raises ValueError""" + from . import scheduler + + with self.assertRaises(ValueError) as context: + scheduler.update_schedule_settings({'frequency': 'monthly'}) + + self.assertIn('frequency', str(context.exception).lower()) + + def test_update_schedule_settings_invalid_time(self): + """Test that invalid time raises ValueError""" + from . import scheduler + + with self.assertRaises(ValueError) as context: + scheduler.update_schedule_settings({'time': 'invalid'}) + + self.assertIn('HH:MM', str(context.exception)) + + def test_update_schedule_settings_invalid_day_of_week(self): + """Test that invalid day_of_week raises ValueError""" + from . import scheduler + + with self.assertRaises(ValueError) as context: + scheduler.update_schedule_settings({'day_of_week': 7}) + + self.assertIn('day_of_week', str(context.exception).lower()) + + def test_update_schedule_settings_invalid_retention(self): + """Test that negative retention_count raises ValueError""" + from . import scheduler + + with self.assertRaises(ValueError) as context: + scheduler.update_schedule_settings({'retention_count': -1}) + + self.assertIn('retention_count', str(context.exception).lower()) + + def test_sync_creates_periodic_task_when_enabled(self): + """Test that enabling schedule creates a PeriodicTask""" + from . import scheduler + from django_celery_beat.models import PeriodicTask + + scheduler.update_schedule_settings({ + 'enabled': True, + 'frequency': 'daily', + 'time': '05:00', + }) + + task = PeriodicTask.objects.get(name='backup-scheduled-task') + self.assertTrue(task.enabled) + self.assertEqual(task.crontab.hour, '05') + self.assertEqual(task.crontab.minute, '00') + + def test_sync_deletes_periodic_task_when_disabled(self): + """Test that disabling schedule removes PeriodicTask""" + from . import scheduler + from django_celery_beat.models import PeriodicTask + + # First enable + scheduler.update_schedule_settings({ + 'enabled': True, + 'frequency': 'daily', + 'time': '05:00', + }) + + self.assertTrue(PeriodicTask.objects.filter(name='backup-scheduled-task').exists()) + + # Then disable + scheduler.update_schedule_settings({'enabled': False}) + + self.assertFalse(PeriodicTask.objects.filter(name='backup-scheduled-task').exists()) + + def test_weekly_schedule_sets_day_of_week(self): + """Test that weekly schedule sets correct day_of_week in crontab""" + from . import scheduler + from django_celery_beat.models import PeriodicTask + + scheduler.update_schedule_settings({ + 'enabled': True, + 'frequency': 'weekly', + 'time': '06:00', + 'day_of_week': 3, # Wednesday + }) + + task = PeriodicTask.objects.get(name='backup-scheduled-task') + self.assertEqual(task.crontab.day_of_week, '3') + + +class BackupTasksTestCase(TestCase): + """Test cases for backup Celery tasks""" + + def setUp(self): + self.temp_backup_dir = tempfile.mkdtemp() + + def tearDown(self): + import shutil + if Path(self.temp_backup_dir).exists(): + shutil.rmtree(self.temp_backup_dir) + + @patch('apps.backups.tasks.services.list_backups') + @patch('apps.backups.tasks.services.delete_backup') + def test_cleanup_old_backups_keeps_recent(self, mock_delete, mock_list): + """Test that cleanup keeps the most recent backups""" + from .tasks import _cleanup_old_backups + + mock_list.return_value = [ + {'name': 'backup-3.zip'}, # newest + {'name': 'backup-2.zip'}, + {'name': 'backup-1.zip'}, # oldest + ] + + deleted = _cleanup_old_backups(retention_count=2) + + self.assertEqual(deleted, 1) + mock_delete.assert_called_once_with('backup-1.zip') + + @patch('apps.backups.tasks.services.list_backups') + @patch('apps.backups.tasks.services.delete_backup') + def test_cleanup_old_backups_does_nothing_when_under_limit(self, mock_delete, mock_list): + """Test that cleanup does nothing when under retention limit""" + from .tasks import _cleanup_old_backups + + mock_list.return_value = [ + {'name': 'backup-2.zip'}, + {'name': 'backup-1.zip'}, + ] + + deleted = _cleanup_old_backups(retention_count=5) + + self.assertEqual(deleted, 0) + mock_delete.assert_not_called() + + @patch('apps.backups.tasks.services.list_backups') + @patch('apps.backups.tasks.services.delete_backup') + def test_cleanup_old_backups_zero_retention_keeps_all(self, mock_delete, mock_list): + """Test that retention_count=0 keeps all backups""" + from .tasks import _cleanup_old_backups + + mock_list.return_value = [ + {'name': 'backup-3.zip'}, + {'name': 'backup-2.zip'}, + {'name': 'backup-1.zip'}, + ] + + deleted = _cleanup_old_backups(retention_count=0) + + self.assertEqual(deleted, 0) + mock_delete.assert_not_called() + + @patch('apps.backups.tasks.services.create_backup') + @patch('apps.backups.tasks._cleanup_old_backups') + def test_scheduled_backup_task_success(self, mock_cleanup, mock_create): + """Test scheduled backup task success""" + from .tasks import scheduled_backup_task + + mock_backup_file = MagicMock() + mock_backup_file.name = 'scheduled-backup.zip' + mock_backup_file.stat.return_value.st_size = 1024 + mock_create.return_value = mock_backup_file + mock_cleanup.return_value = 2 + + result = scheduled_backup_task(retention_count=5) + + self.assertEqual(result['status'], 'completed') + self.assertEqual(result['filename'], 'scheduled-backup.zip') + self.assertEqual(result['size'], 1024) + self.assertEqual(result['deleted_count'], 2) + mock_cleanup.assert_called_once_with(5) + + @patch('apps.backups.tasks.services.create_backup') + @patch('apps.backups.tasks._cleanup_old_backups') + def test_scheduled_backup_task_no_cleanup_when_retention_zero(self, mock_cleanup, mock_create): + """Test scheduled backup skips cleanup when retention is 0""" + from .tasks import scheduled_backup_task + + mock_backup_file = MagicMock() + mock_backup_file.name = 'scheduled-backup.zip' + mock_backup_file.stat.return_value.st_size = 1024 + mock_create.return_value = mock_backup_file + + result = scheduled_backup_task(retention_count=0) + + self.assertEqual(result['status'], 'completed') + self.assertEqual(result['deleted_count'], 0) + mock_cleanup.assert_not_called() + + @patch('apps.backups.tasks.services.create_backup') + def test_scheduled_backup_task_failure(self, mock_create): + """Test scheduled backup task handles failure""" + from .tasks import scheduled_backup_task + + mock_create.side_effect = Exception("Backup failed") + + result = scheduled_backup_task(retention_count=5) + + self.assertEqual(result['status'], 'failed') + self.assertIn('Backup failed', result['error']) diff --git a/dispatcharr/settings.py b/dispatcharr/settings.py index 289c6794..39f814db 100644 --- a/dispatcharr/settings.py +++ b/dispatcharr/settings.py @@ -216,6 +216,13 @@ CELERY_BEAT_SCHEDULE = { MEDIA_ROOT = BASE_DIR / "media" MEDIA_URL = "/media/" +# Backup settings +BACKUP_ROOT = os.environ.get("BACKUP_ROOT", "/data/backups") +BACKUP_DATA_DIRS = [ + os.environ.get("LOGOS_DIR", "/data/logos"), + os.environ.get("UPLOADS_DIR", "/data/uploads"), + os.environ.get("PLUGINS_DIR", "/data/plugins"), +] SERVER_IP = "127.0.0.1" diff --git a/docker/nginx.conf b/docker/nginx.conf index 5e754d20..be221036 100644 --- a/docker/nginx.conf +++ b/docker/nginx.conf @@ -34,6 +34,13 @@ server { root /data; } + # Internal location for X-Accel-Redirect backup downloads + # Django handles auth, nginx serves the file directly + location /protected-backups/ { + internal; + alias /data/backups/; + } + location /api/logos/(?\d+)/cache/ { proxy_pass http://127.0.0.1:5656; proxy_cache logo_cache; diff --git a/docker/uwsgi.ini b/docker/uwsgi.ini index b35ea5bf..3814aaf6 100644 --- a/docker/uwsgi.ini +++ b/docker/uwsgi.ini @@ -20,6 +20,7 @@ module = dispatcharr.wsgi:application virtualenv = /dispatcharrpy master = true env = DJANGO_SETTINGS_MODULE=dispatcharr.settings +env = USE_NGINX_ACCEL=true socket = /app/uwsgi.sock chmod-socket = 777 vacuum = true diff --git a/frontend/src/api.js b/frontend/src/api.js index 01186bf6..b11b59a6 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -1290,6 +1290,183 @@ export default class API { } } + // Backup API (async with Celery task polling) + static async listBackups() { + try { + const response = await request(`${host}/api/backups/`); + return response || []; + } catch (e) { + errorNotification('Failed to load backups', e); + throw e; + } + } + + static async getBackupStatus(taskId, token = null) { + try { + let url = `${host}/api/backups/status/${taskId}/`; + if (token) { + url += `?token=${encodeURIComponent(token)}`; + } + const response = await request(url, { auth: !token }); + return response; + } catch (e) { + throw e; + } + } + + static async waitForBackupTask(taskId, onProgress, token = null) { + const pollInterval = 2000; // Poll every 2 seconds + const maxAttempts = 300; // Max 10 minutes (300 * 2s) + + for (let attempt = 0; attempt < maxAttempts; attempt++) { + try { + const status = await API.getBackupStatus(taskId, token); + + if (onProgress) { + onProgress(status); + } + + if (status.state === 'completed') { + return status.result; + } else if (status.state === 'failed') { + throw new Error(status.error || 'Task failed'); + } + } catch (e) { + throw e; + } + + // Wait before next poll + await new Promise((resolve) => setTimeout(resolve, pollInterval)); + } + + throw new Error('Task timed out'); + } + + static async createBackup(onProgress) { + try { + // Start the backup task + const response = await request(`${host}/api/backups/create/`, { + method: 'POST', + }); + + // Wait for the task to complete using token for auth + const result = await API.waitForBackupTask(response.task_id, onProgress, response.task_token); + return result; + } catch (e) { + errorNotification('Failed to create backup', e); + throw e; + } + } + + static async uploadBackup(file) { + try { + const formData = new FormData(); + formData.append('file', file); + + const response = await request( + `${host}/api/backups/upload/`, + { + method: 'POST', + body: formData, + } + ); + return response; + } catch (e) { + errorNotification('Failed to upload backup', e); + throw e; + } + } + + static async deleteBackup(filename) { + try { + const encodedFilename = encodeURIComponent(filename); + await request(`${host}/api/backups/${encodedFilename}/delete/`, { + method: 'DELETE', + }); + } catch (e) { + errorNotification('Failed to delete backup', e); + throw e; + } + } + + static async getDownloadToken(filename) { + // Get a download token from the server + try { + const response = await request(`${host}/api/backups/${encodeURIComponent(filename)}/download-token/`); + return response.token; + } catch (e) { + throw e; + } + } + + static async downloadBackup(filename) { + try { + // Get a download token first (requires auth) + const token = await API.getDownloadToken(filename); + const encodedFilename = encodeURIComponent(filename); + + // Build the download URL with token + const downloadUrl = `${host}/api/backups/${encodedFilename}/download/?token=${encodeURIComponent(token)}`; + + // Use direct browser navigation instead of fetch to avoid CORS issues + const link = document.createElement('a'); + link.href = downloadUrl; + link.download = filename; + document.body.appendChild(link); + link.click(); + document.body.removeChild(link); + + return { filename }; + } catch (e) { + errorNotification('Failed to download backup', e); + throw e; + } + } + + static async restoreBackup(filename, onProgress) { + try { + // Start the restore task + const encodedFilename = encodeURIComponent(filename); + const response = await request( + `${host}/api/backups/${encodedFilename}/restore/`, + { + method: 'POST', + } + ); + + // Wait for the task to complete using token for auth + // Token-based auth allows status polling even after DB restore invalidates user sessions + const result = await API.waitForBackupTask(response.task_id, onProgress, response.task_token); + return result; + } catch (e) { + errorNotification('Failed to restore backup', e); + throw e; + } + } + + static async getBackupSchedule() { + try { + const response = await request(`${host}/api/backups/schedule/`); + return response; + } catch (e) { + errorNotification('Failed to get backup schedule', e); + throw e; + } + } + + static async updateBackupSchedule(settings) { + try { + const response = await request(`${host}/api/backups/schedule/update/`, { + method: 'PUT', + body: settings, + }); + return response; + } catch (e) { + errorNotification('Failed to update backup schedule', e); + throw e; + } + } + static async getVersion() { try { const response = await request(`${host}/api/core/version/`, { diff --git a/frontend/src/components/backups/BackupManager.jsx b/frontend/src/components/backups/BackupManager.jsx new file mode 100644 index 00000000..468bcdf9 --- /dev/null +++ b/frontend/src/components/backups/BackupManager.jsx @@ -0,0 +1,496 @@ +import { useEffect, useState } from 'react'; +import { + Alert, + Button, + Card, + Divider, + FileInput, + Group, + Loader, + Modal, + NumberInput, + Select, + Stack, + Switch, + Table, + Text, + Tooltip, +} from '@mantine/core'; +import { TimeInput } from '@mantine/dates'; +import { + Download, + PlayCircle, + RefreshCcw, + UploadCloud, + Trash2, + Clock, + Save, +} from 'lucide-react'; +import { notifications } from '@mantine/notifications'; + +import API from '../../api'; +import ConfirmationDialog from '../ConfirmationDialog'; + +const DAYS_OF_WEEK = [ + { value: '0', label: 'Sunday' }, + { value: '1', label: 'Monday' }, + { value: '2', label: 'Tuesday' }, + { value: '3', label: 'Wednesday' }, + { value: '4', label: 'Thursday' }, + { value: '5', label: 'Friday' }, + { value: '6', label: 'Saturday' }, +]; + +function formatBytes(bytes) { + if (bytes === 0) return '0 B'; + const k = 1024; + const sizes = ['B', 'KB', 'MB', 'GB']; + const i = Math.floor(Math.log(bytes) / Math.log(k)); + return `${(bytes / Math.pow(k, i)).toFixed(2)} ${sizes[i]}`; +} + +function formatDate(dateString) { + const date = new Date(dateString); + return date.toLocaleString(); +} + +export default function BackupManager() { + const [backups, setBackups] = useState([]); + const [loading, setLoading] = useState(false); + const [creating, setCreating] = useState(false); + const [downloading, setDownloading] = useState(null); + const [uploadFile, setUploadFile] = useState(null); + const [uploadModalOpen, setUploadModalOpen] = useState(false); + const [restoreConfirmOpen, setRestoreConfirmOpen] = useState(false); + const [deleteConfirmOpen, setDeleteConfirmOpen] = useState(false); + const [selectedBackup, setSelectedBackup] = useState(null); + + // Schedule state + const [schedule, setSchedule] = useState({ + enabled: false, + frequency: 'daily', + time: '03:00', + day_of_week: 0, + retention_count: 0, + }); + const [scheduleLoading, setScheduleLoading] = useState(false); + const [scheduleSaving, setScheduleSaving] = useState(false); + const [scheduleChanged, setScheduleChanged] = useState(false); + + const loadBackups = async () => { + setLoading(true); + try { + const backupList = await API.listBackups(); + setBackups(backupList); + } catch (error) { + notifications.show({ + title: 'Error', + message: error?.message || 'Failed to load backups', + color: 'red', + }); + } finally { + setLoading(false); + } + }; + + const loadSchedule = async () => { + setScheduleLoading(true); + try { + const settings = await API.getBackupSchedule(); + setSchedule(settings); + setScheduleChanged(false); + } catch (error) { + // Ignore errors on initial load - settings may not exist yet + } finally { + setScheduleLoading(false); + } + }; + + useEffect(() => { + loadBackups(); + loadSchedule(); + }, []); + + const handleScheduleChange = (field, value) => { + setSchedule((prev) => ({ ...prev, [field]: value })); + setScheduleChanged(true); + }; + + const handleSaveSchedule = async () => { + setScheduleSaving(true); + try { + const updated = await API.updateBackupSchedule(schedule); + setSchedule(updated); + setScheduleChanged(false); + notifications.show({ + title: 'Success', + message: 'Backup schedule saved', + color: 'green', + }); + } catch (error) { + notifications.show({ + title: 'Error', + message: error?.message || 'Failed to save schedule', + color: 'red', + }); + } finally { + setScheduleSaving(false); + } + }; + + const handleCreateBackup = async () => { + setCreating(true); + try { + await API.createBackup(); + notifications.show({ + title: 'Success', + message: 'Backup created successfully', + color: 'green', + }); + await loadBackups(); + } catch (error) { + notifications.show({ + title: 'Error', + message: error?.message || 'Failed to create backup', + color: 'red', + }); + } finally { + setCreating(false); + } + }; + + const handleDownload = async (filename) => { + setDownloading(filename); + try { + await API.downloadBackup(filename); + notifications.show({ + title: 'Download Started', + message: `Downloading ${filename}...`, + color: 'blue', + }); + } catch (error) { + notifications.show({ + title: 'Error', + message: error?.message || 'Failed to download backup', + color: 'red', + }); + } finally { + setDownloading(null); + } + }; + + const handleDeleteClick = (backup) => { + setSelectedBackup(backup); + setDeleteConfirmOpen(true); + }; + + const handleDeleteConfirm = async () => { + try { + await API.deleteBackup(selectedBackup.name); + notifications.show({ + title: 'Success', + message: 'Backup deleted successfully', + color: 'green', + }); + await loadBackups(); + } catch (error) { + notifications.show({ + title: 'Error', + message: error?.message || 'Failed to delete backup', + color: 'red', + }); + } finally { + setDeleteConfirmOpen(false); + setSelectedBackup(null); + } + }; + + const handleRestoreClick = (backup) => { + setSelectedBackup(backup); + setRestoreConfirmOpen(true); + }; + + const handleRestoreConfirm = async () => { + try { + await API.restoreBackup(selectedBackup.name); + notifications.show({ + title: 'Success', + message: 'Backup restored successfully. You may need to refresh the page.', + color: 'green', + }); + setTimeout(() => window.location.reload(), 2000); + } catch (error) { + notifications.show({ + title: 'Error', + message: error?.message || 'Failed to restore backup', + color: 'red', + }); + } finally { + setRestoreConfirmOpen(false); + setSelectedBackup(null); + } + }; + + const handleUploadSubmit = async () => { + if (!uploadFile) return; + + try { + await API.uploadBackup(uploadFile); + notifications.show({ + title: 'Success', + message: 'Backup uploaded successfully', + color: 'green', + }); + setUploadModalOpen(false); + setUploadFile(null); + await loadBackups(); + } catch (error) { + notifications.show({ + title: 'Error', + message: error?.message || 'Failed to upload backup', + color: 'red', + }); + } + }; + + return ( + + + Backups include your database and configured data directories. Use the + create button to generate a new backup, or upload an existing backup to + restore. + + + {/* Schedule Settings */} + + + + + Scheduled Backups + + handleScheduleChange('enabled', e.currentTarget.checked)} + label={schedule.enabled ? 'Enabled' : 'Disabled'} + /> + + + {scheduleLoading ? ( + + ) : ( + <> + + handleScheduleChange('day_of_week', parseInt(value, 10))} + data={DAYS_OF_WEEK} + disabled={!schedule.enabled} + /> + )} + handleScheduleChange('retention_count', value || 0)} + min={0} + disabled={!schedule.enabled} + /> + + + + + + )} + + + + + + + Backups + + + + + + + + + {loading ? ( + + + + ) : backups.length === 0 ? ( + No backups found. Create one to get started! + ) : ( + + + + + + + + + + + {backups.map((backup) => ( + + + + + + + ))} + +
FilenameSizeCreatedActions
+ + {backup.name} + + + {formatBytes(backup.size)} + + {formatDate(backup.created)} + + + + + + + + + + + + +
+ )} + + { + setUploadModalOpen(false); + setUploadFile(null); + }} + title="Upload Backup" + > + + + + + + + + + + { + setRestoreConfirmOpen(false); + setSelectedBackup(null); + }} + onConfirm={handleRestoreConfirm} + title="Restore Backup" + message={`Are you sure you want to restore from "${selectedBackup?.name}"? This will replace all current data with the backup data. This action cannot be undone.`} + confirmLabel="Restore" + cancelLabel="Cancel" + color="orange" + /> + + { + setDeleteConfirmOpen(false); + setSelectedBackup(null); + }} + onConfirm={handleDeleteConfirm} + title="Delete Backup" + message={`Are you sure you want to delete "${selectedBackup?.name}"? This action cannot be undone.`} + confirmLabel="Delete" + cancelLabel="Cancel" + color="red" + /> +
+ ); +} diff --git a/frontend/src/pages/Settings.jsx b/frontend/src/pages/Settings.jsx index df7a755a..62e7f3ce 100644 --- a/frontend/src/pages/Settings.jsx +++ b/frontend/src/pages/Settings.jsx @@ -22,6 +22,7 @@ import { import { isNotEmpty, useForm } from '@mantine/form'; import UserAgentsTable from '../components/tables/UserAgentsTable'; import StreamProfilesTable from '../components/tables/StreamProfilesTable'; +import BackupManager from '../components/backups/BackupManager'; import useLocalStorage from '../hooks/useLocalStorage'; import useAuthStore from '../store/auth'; import { @@ -862,6 +863,13 @@ const SettingsPage = () => { + + + Backup & Restore + + + + )} From 3fb18ecce8ac70fe23983eace7484a1cdb1f5def Mon Sep 17 00:00:00 2001 From: Jim McBride Date: Thu, 27 Nov 2025 08:49:29 -0600 Subject: [PATCH 003/220] Enhancement: Respect user's 12h/24h time format preference in backup scheduler - Read time-format setting from UI Settings via useLocalStorage - Show 12-hour time input with AM/PM selector when user prefers 12h - Show 24-hour time input when user prefers 24h - Backend always stores 24-hour format (no API changes) --- .../src/components/backups/BackupManager.jsx | 96 +++++++++++++++++-- 1 file changed, 89 insertions(+), 7 deletions(-) diff --git a/frontend/src/components/backups/BackupManager.jsx b/frontend/src/components/backups/BackupManager.jsx index 468bcdf9..6ba487b5 100644 --- a/frontend/src/components/backups/BackupManager.jsx +++ b/frontend/src/components/backups/BackupManager.jsx @@ -14,9 +14,9 @@ import { Switch, Table, Text, + TextInput, Tooltip, } from '@mantine/core'; -import { TimeInput } from '@mantine/dates'; import { Download, PlayCircle, @@ -30,6 +30,32 @@ import { notifications } from '@mantine/notifications'; import API from '../../api'; import ConfirmationDialog from '../ConfirmationDialog'; +import useLocalStorage from '../../hooks/useLocalStorage'; + +// Convert 24h time string to 12h format with period +function to12Hour(time24) { + if (!time24) return { time: '12:00', period: 'AM' }; + const [hours, minutes] = time24.split(':').map(Number); + const period = hours >= 12 ? 'PM' : 'AM'; + const hours12 = hours % 12 || 12; + return { + time: `${hours12}:${String(minutes).padStart(2, '0')}`, + period, + }; +} + +// Convert 12h time + period to 24h format +function to24Hour(time12, period) { + if (!time12) return '00:00'; + const [hours, minutes] = time12.split(':').map(Number); + let hours24 = hours; + if (period === 'PM' && hours !== 12) { + hours24 = hours + 12; + } else if (period === 'AM' && hours === 12) { + hours24 = 0; + } + return `${String(hours24).padStart(2, '0')}:${String(minutes).padStart(2, '0')}`; +} const DAYS_OF_WEEK = [ { value: '0', label: 'Sunday' }, @@ -65,6 +91,10 @@ export default function BackupManager() { const [deleteConfirmOpen, setDeleteConfirmOpen] = useState(false); const [selectedBackup, setSelectedBackup] = useState(null); + // Read user's time format preference from settings + const [timeFormat] = useLocalStorage('time-format', '12h'); + const is12Hour = timeFormat === '12h'; + // Schedule state const [schedule, setSchedule] = useState({ enabled: false, @@ -77,6 +107,10 @@ export default function BackupManager() { const [scheduleSaving, setScheduleSaving] = useState(false); const [scheduleChanged, setScheduleChanged] = useState(false); + // For 12-hour display mode + const [displayTime, setDisplayTime] = useState('3:00'); + const [timePeriod, setTimePeriod] = useState('AM'); + const loadBackups = async () => { setLoading(true); try { @@ -99,6 +133,10 @@ export default function BackupManager() { const settings = await API.getBackupSchedule(); setSchedule(settings); setScheduleChanged(false); + // Initialize 12-hour display values from the loaded time + const { time, period } = to12Hour(settings.time); + setDisplayTime(time); + setTimePeriod(period); } catch (error) { // Ignore errors on initial load - settings may not exist yet } finally { @@ -116,6 +154,26 @@ export default function BackupManager() { setScheduleChanged(true); }; + // Handle time changes in 12-hour mode + const handleTimeChange12h = (newTime, newPeriod) => { + const time = newTime ?? displayTime; + const period = newPeriod ?? timePeriod; + setDisplayTime(time); + setTimePeriod(period); + // Convert to 24h and update schedule + const time24 = to24Hour(time, period); + handleScheduleChange('time', time24); + }; + + // Handle time changes in 24-hour mode + const handleTimeChange24h = (value) => { + handleScheduleChange('time', value); + // Also update 12h display state in case user switches formats + const { time, period } = to12Hour(value); + setDisplayTime(time); + setTimePeriod(period); + }; + const handleSaveSchedule = async () => { setScheduleSaving(true); try { @@ -290,12 +348,36 @@ export default function BackupManager() { ]} disabled={!schedule.enabled} /> - handleScheduleChange('time', e.currentTarget.value)} - disabled={!schedule.enabled} - /> + {is12Hour ? ( + + handleTimeChange12h(e.currentTarget.value, null)} + placeholder="3:00" + disabled={!schedule.enabled} + style={{ flex: 2 }} + /> + Date: Sun, 30 Nov 2025 00:39:30 +1100 Subject: [PATCH 004/220] fix: allow all IPv6 CIDRs by default This change ensures that by default, IPv6 clients can connect to the service unless explicitly denied. Fixes #593 --- dispatcharr/utils.py | 2 +- frontend/src/pages/Settings.jsx | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/dispatcharr/utils.py b/dispatcharr/utils.py index 260515fc..56243b7a 100644 --- a/dispatcharr/utils.py +++ b/dispatcharr/utils.py @@ -44,7 +44,7 @@ def network_access_allowed(request, settings_key): cidrs = ( network_access[settings_key].split(",") if settings_key in network_access - else ["0.0.0.0/0"] + else ["0.0.0.0/0", "::/0"] ) network_allowed = False diff --git a/frontend/src/pages/Settings.jsx b/frontend/src/pages/Settings.jsx index 10f6f5a2..5c25897a 100644 --- a/frontend/src/pages/Settings.jsx +++ b/frontend/src/pages/Settings.jsx @@ -278,7 +278,7 @@ const SettingsPage = () => { const networkAccessForm = useForm({ mode: 'controlled', initialValues: Object.keys(NETWORK_ACCESS_OPTIONS).reduce((acc, key) => { - acc[key] = '0.0.0.0/0,::0/0'; + acc[key] = '0.0.0.0/0,::/0'; return acc; }, {}), validate: Object.keys(NETWORK_ACCESS_OPTIONS).reduce((acc, key) => { @@ -358,7 +358,7 @@ const SettingsPage = () => { ); networkAccessForm.setValues( Object.keys(NETWORK_ACCESS_OPTIONS).reduce((acc, key) => { - acc[key] = networkAccessSettings[key] || '0.0.0.0/0,::0/0'; + acc[key] = networkAccessSettings[key] || '0.0.0.0/0,::/0'; return acc; }, {}) ); From 43949c3ef432d14fb9add2c7dca56aa3709959a6 Mon Sep 17 00:00:00 2001 From: 3l3m3nt Date: Sun, 30 Nov 2025 19:30:47 +1300 Subject: [PATCH 005/220] Added IPv6 port bind to nginx.conf --- docker/nginx.conf | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/nginx.conf b/docker/nginx.conf index 5e754d20..020bc99a 100644 --- a/docker/nginx.conf +++ b/docker/nginx.conf @@ -3,6 +3,7 @@ proxy_cache_path /app/logo_cache levels=1:2 keys_zone=logo_cache:10m server { listen NGINX_PORT; + listen [::]:NGINX_PORT; proxy_connect_timeout 75; proxy_send_timeout 300; From 641dcfc21e40cb05d42123f0276ebcc705c46608 Mon Sep 17 00:00:00 2001 From: GitHub Copilot Date: Sun, 30 Nov 2025 19:20:25 +0000 Subject: [PATCH 006/220] Add sorting functionality to Group and M3U columns in Streams table - Added m3u_account__name to backend ordering_fields in StreamViewSet - Implemented field mapping in frontend to convert column IDs to backend field names - Added sort buttons to both Group and M3U columns with proper icons - Sort buttons show current sort state (ascending/descending/none) - Maintains consistent UX with existing Name column sorting --- apps/channels/api_views.py | 2 +- .../src/components/tables/StreamsTable.jsx | 87 ++++++++++++------- 2 files changed, 56 insertions(+), 33 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index bc920537..eccc5028 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -124,7 +124,7 @@ class StreamViewSet(viewsets.ModelViewSet): filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter] filterset_class = StreamFilter search_fields = ["name", "channel_group__name"] - ordering_fields = ["name", "channel_group__name"] + ordering_fields = ["name", "channel_group__name", "m3u_account__name"] ordering = ["-name"] def get_permissions(self): diff --git a/frontend/src/components/tables/StreamsTable.jsx b/frontend/src/components/tables/StreamsTable.jsx index d309552c..a0ae1f5e 100644 --- a/frontend/src/components/tables/StreamsTable.jsx +++ b/frontend/src/components/tables/StreamsTable.jsx @@ -385,7 +385,14 @@ const StreamsTable = () => { // Apply sorting if (sorting.length > 0) { - const sortField = sorting[0].id; + const columnId = sorting[0].id; + // Map frontend column IDs to backend field names + const fieldMapping = { + name: 'name', + group: 'channel_group__name', + m3u: 'm3u_account__name', + }; + const sortField = fieldMapping[columnId] || columnId; const sortDirection = sorting[0].desc ? '-' : ''; params.append('ordering', `${sortDirection}${sortField}`); } @@ -747,41 +754,57 @@ const StreamsTable = () => { case 'group': return ( - - - + + + + +
+ {React.createElement(sortingIcon, { + onClick: () => onSortingChange('group'), + size: 14, + })} +
+
); case 'm3u': return ( - - ({ + label: playlist.name, + value: `${playlist.id}`, + }))} + variant="unstyled" + className="table-input-header" + /> + +
+ {React.createElement(sortingIcon, { + onClick: () => onSortingChange('m3u'), + size: 14, + })} +
+ ); } }; From cf08e54bd822e38e42779e7702d774807a92571f Mon Sep 17 00:00:00 2001 From: root Date: Mon, 1 Dec 2025 18:11:58 +0000 Subject: [PATCH 007/220] Fix sorting functionality for Group and M3U columns - Add missing header properties to group and m3u columns - Fix layout issues with sort buttons (proper flex layout, remove blocking onClick) - Fix sorting state initialization (use boolean instead of empty string) - Fix sorting comparison operators (use strict equality) - Fix 3rd click behavior to return to default sort instead of clearing - Map frontend column IDs to backend field names for proper API requests --- .../src/components/tables/StreamsTable.jsx | 23 +++++++++++-------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/frontend/src/components/tables/StreamsTable.jsx b/frontend/src/components/tables/StreamsTable.jsx index a0ae1f5e..ca3ee21f 100644 --- a/frontend/src/components/tables/StreamsTable.jsx +++ b/frontend/src/components/tables/StreamsTable.jsx @@ -182,7 +182,7 @@ const StreamsTable = () => { const [pageCount, setPageCount] = useState(0); const [paginationString, setPaginationString] = useState(''); const [isLoading, setIsLoading] = useState(true); - const [sorting, setSorting] = useState([{ id: 'name', desc: '' }]); + const [sorting, setSorting] = useState([{ id: 'name', desc: true }]); const [selectedStreamIds, setSelectedStreamIds] = useState([]); // Channel numbering modal state @@ -298,6 +298,7 @@ const StreamsTable = () => { ), }, { + header: 'Group', id: 'group', accessorFn: (row) => channelGroups[row.channel_group] @@ -319,6 +320,7 @@ const StreamsTable = () => { ), }, { + header: 'M3U', id: 'm3u', size: columnSizing.m3u || 150, accessorFn: (row) => @@ -698,8 +700,8 @@ const StreamsTable = () => { const sortField = sorting[0]?.id; const sortDirection = sorting[0]?.desc; - if (sortField == column) { - if (sortDirection == false) { + if (sortField === column) { + if (sortDirection === false) { setSorting([ { id: column, @@ -707,7 +709,8 @@ const StreamsTable = () => { }, ]); } else { - setSorting([]); + // Reset to default sort (name descending) instead of clearing + setSorting([{ id: 'name', desc: true }]); } } else { setSorting([ @@ -754,8 +757,8 @@ const StreamsTable = () => { case 'group': return ( - - + + { clearable /> -
+
{React.createElement(sortingIcon, { onClick: () => onSortingChange('group'), size: 14, @@ -780,8 +783,8 @@ const StreamsTable = () => { case 'm3u': return ( - - + + )} handleScheduleChange('retention_count', value || 0)} min={0} disabled={!schedule.enabled} /> - + - + )} - + - + {/* Backups List */} + + + + + + + + + + + + + + + + - - - Backups - - - - - - - - - {loading ? ( - - - - ) : backups.length === 0 ? ( - No backups found. Create one to get started! - ) : ( - - - - - - - - - - - {backups.map((backup) => ( - - - - - - - ))} - -
FilenameSizeCreatedActions
- - {backup.name} - - - {formatBytes(backup.size)} - - {formatDate(backup.created)} - - - - - - - - - - - - -
- )} + {loading ? ( + + + + ) : backups.length === 0 ? ( + + No backups found. Create one to get started. + + ) : ( + + + + Filename + Size + Created + Actions + + + + {backups.map((backup) => ( + + + + {backup.name} + + + + {formatBytes(backup.size)} + + + {formatDate(backup.created)} + + + + + handleDownload(backup.name)} + loading={downloading === backup.name} + disabled={downloading !== null} + > + + + + + handleRestoreClick(backup)} + > + + + + + handleDeleteClick(backup)} + > + + + + + + + ))} + +
+ )} +
+ + - + - @@ -557,7 +592,6 @@ export default function BackupManager() { message={`Are you sure you want to restore from "${selectedBackup?.name}"? This will replace all current data with the backup data. This action cannot be undone.`} confirmLabel="Restore" cancelLabel="Cancel" - color="orange" /> ); From 70cf8928c457e162bb738480ac2ac80a1c4084af Mon Sep 17 00:00:00 2001 From: Jim McBride Date: Tue, 2 Dec 2025 22:01:59 -0600 Subject: [PATCH 011/220] Use CustomTable component for backup list --- .../src/components/backups/BackupManager.jsx | 174 +++++++++++------- 1 file changed, 112 insertions(+), 62 deletions(-) diff --git a/frontend/src/components/backups/BackupManager.jsx b/frontend/src/components/backups/BackupManager.jsx index f538d7da..1e4d4fb8 100644 --- a/frontend/src/components/backups/BackupManager.jsx +++ b/frontend/src/components/backups/BackupManager.jsx @@ -1,8 +1,9 @@ -import { useEffect, useState } from 'react'; +import { useEffect, useMemo, useState } from 'react'; import { ActionIcon, Box, Button, + Center, FileInput, Flex, Group, @@ -13,7 +14,6 @@ import { Select, Stack, Switch, - Table, Text, TextInput, Tooltip, @@ -31,6 +31,7 @@ import { notifications } from '@mantine/notifications'; import API from '../../api'; import ConfirmationDialog from '../ConfirmationDialog'; import useLocalStorage from '../../hooks/useLocalStorage'; +import { CustomTable, useTable } from '../tables/CustomTable'; // Convert 24h time string to 12h format with period function to12Hour(time24) { @@ -111,6 +112,112 @@ export default function BackupManager() { const [displayTime, setDisplayTime] = useState('3:00'); const [timePeriod, setTimePeriod] = useState('AM'); + const columns = useMemo( + () => [ + { + header: 'Filename', + accessorKey: 'name', + size: 250, + cell: ({ cell }) => ( + + {cell.getValue()} + + ), + }, + { + header: 'Size', + accessorKey: 'size', + size: 100, + cell: ({ cell }) => ( + {formatBytes(cell.getValue())} + ), + }, + { + header: 'Created', + accessorKey: 'created', + size: 175, + cell: ({ cell }) => ( + {formatDate(cell.getValue())} + ), + }, + { + id: 'actions', + header: 'Actions', + size: 150, + }, + ], + [] + ); + + const renderHeaderCell = (header) => { + if (header.id === 'actions') { + return ( +
+ {header.column.columnDef.header} +
+ ); + } + return ( + + {header.column.columnDef.header} + + ); + }; + + const renderBodyCell = ({ cell, row }) => { + if (cell.column.id === 'actions') { + return ( +
+ + handleDownload(row.original.name)} + loading={downloading === row.original.name} + disabled={downloading !== null} + > + + + + + handleRestoreClick(row.original)} + > + + + + + handleDeleteClick(row.original)} + > + + + +
+ ); + } + return null; + }; + + const table = useTable({ + columns, + data: backups, + allRowIds: backups.map((b) => b.name), + bodyCellRenderFns: { + actions: renderBodyCell, + }, + headerCellRenderFns: { + name: renderHeaderCell, + size: renderHeaderCell, + created: renderHeaderCell, + actions: renderHeaderCell, + }, + }); + const loadBackups = async () => { setLoading(true); try { @@ -483,66 +590,9 @@ export default function BackupManager() { No backups found. Create one to get started. ) : ( - - - - Filename - Size - Created - Actions - - - - {backups.map((backup) => ( - - - - {backup.name} - - - - {formatBytes(backup.size)} - - - {formatDate(backup.created)} - - - - - handleDownload(backup.name)} - loading={downloading === backup.name} - disabled={downloading !== null} - > - - - - - handleRestoreClick(backup)} - > - - - - - handleDeleteClick(backup)} - > - - - - - - - ))} - -
+
+ +
)} From e64002dfc4bb6de00d38256e77520b6fd83c8630 Mon Sep 17 00:00:00 2001 From: Jim McBride Date: Tue, 2 Dec 2025 22:19:20 -0600 Subject: [PATCH 012/220] Refactor BackupManager to match app table conventions --- .../src/components/backups/BackupManager.jsx | 143 ++++++++++-------- 1 file changed, 77 insertions(+), 66 deletions(-) diff --git a/frontend/src/components/backups/BackupManager.jsx b/frontend/src/components/backups/BackupManager.jsx index 1e4d4fb8..46bd0689 100644 --- a/frontend/src/components/backups/BackupManager.jsx +++ b/frontend/src/components/backups/BackupManager.jsx @@ -3,7 +3,6 @@ import { ActionIcon, Box, Button, - Center, FileInput, Flex, Group, @@ -33,6 +32,45 @@ import ConfirmationDialog from '../ConfirmationDialog'; import useLocalStorage from '../../hooks/useLocalStorage'; import { CustomTable, useTable } from '../tables/CustomTable'; +const RowActions = ({ row, handleDownload, handleRestoreClick, handleDeleteClick, downloading }) => { + return ( + + + handleDownload(row.original.name)} + loading={downloading === row.original.name} + disabled={downloading !== null} + > + + + + + handleRestoreClick(row.original)} + > + + + + + handleDeleteClick(row.original)} + > + + + + + ); +}; + // Convert 24h time string to 12h format with period function to12Hour(time24) { if (!time24) return { time: '12:00', period: 'AM' }; @@ -94,6 +132,7 @@ export default function BackupManager() { // Read user's time format preference from settings const [timeFormat] = useLocalStorage('time-format', '12h'); + const [tableSize] = useLocalStorage('table-size', 'default'); const is12Hour = timeFormat === '12h'; // Schedule state @@ -117,17 +156,23 @@ export default function BackupManager() { { header: 'Filename', accessorKey: 'name', - size: 250, + grow: true, cell: ({ cell }) => ( - +
{cell.getValue()} - +
), }, { header: 'Size', accessorKey: 'size', - size: 100, + size: 80, cell: ({ cell }) => ( {formatBytes(cell.getValue())} ), @@ -135,7 +180,7 @@ export default function BackupManager() { { header: 'Created', accessorKey: 'created', - size: 175, + size: 160, cell: ({ cell }) => ( {formatDate(cell.getValue())} ), @@ -143,20 +188,13 @@ export default function BackupManager() { { id: 'actions', header: 'Actions', - size: 150, + size: tableSize === 'compact' ? 75 : 100, }, ], - [] + [tableSize] ); const renderHeaderCell = (header) => { - if (header.id === 'actions') { - return ( -
- {header.column.columnDef.header} -
- ); - } return ( {header.column.columnDef.header} @@ -165,42 +203,18 @@ export default function BackupManager() { }; const renderBodyCell = ({ cell, row }) => { - if (cell.column.id === 'actions') { - return ( -
- - handleDownload(row.original.name)} - loading={downloading === row.original.name} - disabled={downloading !== null} - > - - - - - handleRestoreClick(row.original)} - > - - - - - handleDeleteClick(row.original)} - > - - - -
- ); + switch (cell.column.id) { + case 'actions': + return ( + + ); } - return null; }; const table = useTable({ @@ -435,7 +449,7 @@ export default function BackupManager() { ) : ( <> - + handleScheduleChange('day_of_week', parseInt(value, 10))} + data={DAYS_OF_WEEK} + disabled={!schedule.enabled} + /> + )} {is12Hour ? ( handleTimeChange12h(e.currentTarget.value, null)} placeholder="3:00" disabled={!schedule.enabled} - style={{ flex: 2 }} /> handleScheduleChange('day_of_week', parseInt(value, 10))} - data={DAYS_OF_WEEK} - disabled={!schedule.enabled} - /> - )} handleScheduleChange('retention_count', value || 0)} min={0} disabled={!schedule.enabled} /> - - - + )} From 81b657036611633a9828232a3cf3fd297c06604f Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 3 Dec 2025 17:03:58 -0600 Subject: [PATCH 013/220] Fix name not sorting. --- frontend/src/components/tables/StreamsTable.jsx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/frontend/src/components/tables/StreamsTable.jsx b/frontend/src/components/tables/StreamsTable.jsx index db8b43bc..3e497f99 100644 --- a/frontend/src/components/tables/StreamsTable.jsx +++ b/frontend/src/components/tables/StreamsTable.jsx @@ -183,7 +183,7 @@ const StreamsTable = () => { const [pageCount, setPageCount] = useState(0); const [paginationString, setPaginationString] = useState(''); const [isLoading, setIsLoading] = useState(true); - const [sorting, setSorting] = useState([{ id: 'name', desc: true }]); + const [sorting, setSorting] = useState([{ id: 'name', desc: false }]); const [selectedStreamIds, setSelectedStreamIds] = useState([]); // Channel numbering modal state @@ -710,8 +710,8 @@ const StreamsTable = () => { }, ]); } else { - // Reset to default sort (name descending) instead of clearing - setSorting([{ id: 'name', desc: true }]); + // Reset to default sort (name ascending) instead of clearing + setSorting([{ id: 'name', desc: false }]); } } else { setSorting([ From 5fce83fb5126e9a76df5bbd12c03ef60d333b2a8 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 3 Dec 2025 17:13:50 -0600 Subject: [PATCH 014/220] style: Adjust table header and input components for consistent width --- .../tables/CustomTable/CustomTableHeader.jsx | 1 + .../src/components/tables/StreamsTable.jsx | 102 ++++++++++-------- 2 files changed, 56 insertions(+), 47 deletions(-) diff --git a/frontend/src/components/tables/CustomTable/CustomTableHeader.jsx b/frontend/src/components/tables/CustomTable/CustomTableHeader.jsx index 92643fc9..004687dd 100644 --- a/frontend/src/components/tables/CustomTable/CustomTableHeader.jsx +++ b/frontend/src/components/tables/CustomTable/CustomTableHeader.jsx @@ -105,6 +105,7 @@ const CustomTableHeader = ({ ...(header.column.columnDef.style && header.column.columnDef.style), height: '100%', + width: '100%', paddingRight: header.column.getCanResize() ? '8px' : '0px', // Add padding for resize handle }} > diff --git a/frontend/src/components/tables/StreamsTable.jsx b/frontend/src/components/tables/StreamsTable.jsx index 3e497f99..f3f4dc20 100644 --- a/frontend/src/components/tables/StreamsTable.jsx +++ b/frontend/src/components/tables/StreamsTable.jsx @@ -736,7 +736,7 @@ const StreamsTable = () => { switch (header.id) { case 'name': return ( - + { variant="unstyled" className="table-input-header" leftSection={} - /> -
- {React.createElement(sortingIcon, { - onClick: () => onSortingChange('name'), + style={{ flex: 1, minWidth: 0 }} + rightSectionPointerEvents="auto" + rightSection={React.createElement(sortingIcon, { + onClick: (e) => { + e.stopPropagation(); + onSortingChange('name'); + }, size: 14, + style: { cursor: 'pointer' }, })} -
+ />
); case 'group': return ( - - - - -
- {React.createElement(sortingIcon, { - onClick: () => onSortingChange('group'), + + { + e.stopPropagation(); + onSortingChange('group'); + }, size: 14, + style: { cursor: 'pointer' }, })} -
+ />
); case 'm3u': return ( - - - ({ + label: playlist.name, + value: `${playlist.id}`, + }))} + variant="unstyled" + className="table-input-header" + style={{ flex: 1, minWidth: 0 }} + rightSectionPointerEvents="auto" + rightSection={React.createElement(sortingIcon, { + onClick: (e) => { + e.stopPropagation(); + onSortingChange('m3u'); + }, size: 14, + style: { cursor: 'pointer' }, })} -
+ /> ); } From 2de6ac5da1802566f220a7abb359943b8a2c61f3 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 3 Dec 2025 17:31:16 -0600 Subject: [PATCH 015/220] changelog: Add sort buttons for 'Group' and 'M3U' columns in Streams table --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8501b122..501c0aea 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +### Added + +- Sort buttons for 'Group' and 'M3U' columns in Streams table for improved stream organization and filtering - Thanks [@bobey6](https://github.com/bobey6) + ### Changed - IPv6 access now allowed by default with all IPv6 CIDRs accepted - Thanks [@adrianmace](https://github.com/adrianmace) From 2a8ba9125c053de988bb5a7a446118f83e3c5223 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 4 Dec 2025 14:07:28 -0600 Subject: [PATCH 016/220] perf: optimize EPG program parsing for multi-channel sources Dramatically improve EPG refresh performance by parsing the XML file once per source instead of once per channel. The new implementation: - Pre-filters to only process EPG entries mapped to actual channels - Parses the entire XML file in a single pass - Uses O(1) set lookups to skip unmapped channel programmes - Skips non-mapped channels entirely with minimal overhead For EPG sources with many channels but few mapped (e.g., 10,000 channels with 100 mapped to channels), this provides approximately: - 99% reduction in file open operations - 99% reduction in XML file scans - Proportional reduction in CPU and I/O overhead The parse_programs_for_tvg_id() function is retained for single-channel use cases (e.g., when a new channel is mapped via signals). Fixes inefficient repeated file parsing that was occurring with large EPG sources. --- CHANGELOG.md | 1 + apps/epg/tasks.py | 279 ++++++++++++++++++++++++++++++++++------------ 2 files changed, 209 insertions(+), 71 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 501c0aea..65100539 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed +- **Performance**: EPG program parsing optimized for sources with many channels but only a fraction mapped. Now parses XML file once per source instead of once per channel, dramatically reducing I/O and CPU overhead. For sources with 10,000 channels and 100 mapped, this results in ~99x fewer file opens and ~100x fewer full file scans. - IPv6 access now allowed by default with all IPv6 CIDRs accepted - Thanks [@adrianmace](https://github.com/adrianmace) - nginx.conf updated to bind to both IPv4 and IPv6 ports - Thanks [@jordandalley](https://github.com/jordandalley) diff --git a/apps/epg/tasks.py b/apps/epg/tasks.py index 59d658b1..3ed222d9 100644 --- a/apps/epg/tasks.py +++ b/apps/epg/tasks.py @@ -1393,11 +1393,23 @@ def parse_programs_for_tvg_id(epg_id): def parse_programs_for_source(epg_source, tvg_id=None): + """ + Parse programs for all MAPPED channels from an EPG source in a single pass. + + This is an optimized version that: + 1. Only processes EPG entries that are actually mapped to channels + 2. Parses the XML file ONCE instead of once per channel + 3. Skips programmes for unmapped channels entirely during parsing + + This dramatically improves performance when an EPG source has many channels + but only a fraction are mapped. + """ # Send initial programs parsing notification send_epg_update(epg_source.id, "parsing_programs", 0) should_log_memory = False process = None initial_memory = 0 + source_file = None # Add memory tracking only in trace mode or higher try: @@ -1417,82 +1429,199 @@ def parse_programs_for_source(epg_source, tvg_id=None): should_log_memory = False try: - # Process EPG entries in batches rather than all at once - batch_size = 20 # Process fewer channels at once to reduce memory usage - epg_count = EPGData.objects.filter(epg_source=epg_source).count() + # Only get EPG entries that are actually mapped to channels + mapped_epg_ids = set( + Channel.objects.filter( + epg_data__epg_source=epg_source, + epg_data__isnull=False + ).values_list('epg_data_id', flat=True) + ) - if epg_count == 0: - logger.info(f"No EPG entries found for source: {epg_source.name}") - # Update status - this is not an error, just no entries + if not mapped_epg_ids: + total_epg_count = EPGData.objects.filter(epg_source=epg_source).count() + logger.info(f"No channels mapped to any EPG entries from source: {epg_source.name} " + f"(source has {total_epg_count} EPG entries, 0 mapped)") + # Update status - this is not an error, just no mapped entries epg_source.status = 'success' - epg_source.save(update_fields=['status']) + epg_source.last_message = f"No channels mapped to this EPG source ({total_epg_count} entries available)" + epg_source.save(update_fields=['status', 'last_message']) send_epg_update(epg_source.id, "parsing_programs", 100, status="success") return True - logger.info(f"Parsing programs for {epg_count} EPG entries from source: {epg_source.name}") + # Get the mapped EPG entries with their tvg_ids + mapped_epgs = EPGData.objects.filter(id__in=mapped_epg_ids).values('id', 'tvg_id') + tvg_id_to_epg_id = {epg['tvg_id']: epg['id'] for epg in mapped_epgs if epg['tvg_id']} + mapped_tvg_ids = set(tvg_id_to_epg_id.keys()) - failed_entries = [] - program_count = 0 - channel_count = 0 - updated_count = 0 - processed = 0 - # Process in batches using cursor-based approach to limit memory usage - last_id = 0 - while True: - # Get a batch of EPG entries - batch_entries = list(EPGData.objects.filter( - epg_source=epg_source, - id__gt=last_id - ).order_by('id')[:batch_size]) + total_epg_count = EPGData.objects.filter(epg_source=epg_source).count() + mapped_count = len(mapped_tvg_ids) - if not batch_entries: - break # No more entries to process + logger.info(f"Parsing programs for {mapped_count} MAPPED channels from source: {epg_source.name} " + f"(skipping {total_epg_count - mapped_count} unmapped EPG entries)") - # Update last_id for next iteration - last_id = batch_entries[-1].id + # Get the file path + file_path = epg_source.extracted_file_path if epg_source.extracted_file_path else epg_source.file_path + if not file_path: + file_path = epg_source.get_cache_file() - # Process this batch - for epg in batch_entries: - if epg.tvg_id: - try: - result = parse_programs_for_tvg_id(epg.id) - if result == "Task already running": - logger.info(f"Program parse for {epg.id} already in progress, skipping") + # Check if the file exists + if not os.path.exists(file_path): + logger.error(f"EPG file not found at: {file_path}") - processed += 1 - progress = min(95, int((processed / epg_count) * 100)) if epg_count > 0 else 50 - send_epg_update(epg_source.id, "parsing_programs", progress) - except Exception as e: - logger.error(f"Error parsing programs for tvg_id={epg.tvg_id}: {e}", exc_info=True) - failed_entries.append(f"{epg.tvg_id}: {str(e)}") + if epg_source.url: + # Update the file path in the database + new_path = epg_source.get_cache_file() + logger.info(f"Updating file_path from '{file_path}' to '{new_path}'") + epg_source.file_path = new_path + epg_source.save(update_fields=['file_path']) + logger.info(f"Fetching new EPG data from URL: {epg_source.url}") - # Force garbage collection after each batch - batch_entries = None # Remove reference to help garbage collection + # Fetch new data before continuing + fetch_success = fetch_xmltv(epg_source) + + if not fetch_success: + logger.error(f"Failed to fetch EPG data for source: {epg_source.name}") + epg_source.status = 'error' + epg_source.last_message = f"Failed to download EPG data" + epg_source.save(update_fields=['status', 'last_message']) + send_epg_update(epg_source.id, "parsing_programs", 100, status="error", error="Failed to download EPG file") + return False + + # Update file_path with the new location + file_path = epg_source.extracted_file_path if epg_source.extracted_file_path else epg_source.file_path + else: + logger.error(f"No URL provided for EPG source {epg_source.name}, cannot fetch new data") + epg_source.status = 'error' + epg_source.last_message = f"No URL provided, cannot fetch EPG data" + epg_source.save(update_fields=['status', 'last_message']) + send_epg_update(epg_source.id, "parsing_programs", 100, status="error", error="No URL provided") + return False + + # Delete existing programs for all mapped EPGs in one query + logger.info(f"Deleting existing programs for {mapped_count} mapped EPG entries...") + deleted_count = ProgramData.objects.filter(epg_id__in=mapped_epg_ids).delete()[0] + logger.info(f"Deleted {deleted_count} existing programs") + + # SINGLE PASS PARSING: Parse the XML file once and process all mapped channels + programs_to_create = [] + programs_by_channel = {tvg_id: 0 for tvg_id in mapped_tvg_ids} # Track count per channel + total_programs = 0 + skipped_programs = 0 + batch_size = 1000 + last_progress_update = 0 + + try: + logger.debug(f"Opening file for single-pass parsing: {file_path}") + source_file = open(file_path, 'rb') + + # Stream parse the file using lxml's iterparse + program_parser = etree.iterparse(source_file, events=('end',), tag='programme', remove_blank_text=True, recover=True) + + for _, elem in program_parser: + channel_id = elem.get('channel') + + # Skip programmes for unmapped channels immediately + if channel_id not in mapped_tvg_ids: + skipped_programs += 1 + # Clear element to free memory + clear_element(elem) + continue + + # This programme is for a mapped channel - process it + try: + start_time = parse_xmltv_time(elem.get('start')) + end_time = parse_xmltv_time(elem.get('stop')) + title = None + desc = None + sub_title = None + + # Efficiently process child elements + for child in elem: + if child.tag == 'title': + title = child.text or 'No Title' + elif child.tag == 'desc': + desc = child.text or '' + elif child.tag == 'sub-title': + sub_title = child.text or '' + + if not title: + title = 'No Title' + + # Extract custom properties + custom_props = extract_custom_properties(elem) + custom_properties_json = custom_props if custom_props else None + + epg_id = tvg_id_to_epg_id[channel_id] + programs_to_create.append(ProgramData( + epg_id=epg_id, + start_time=start_time, + end_time=end_time, + title=title, + description=desc, + sub_title=sub_title, + tvg_id=channel_id, + custom_properties=custom_properties_json + )) + total_programs += 1 + programs_by_channel[channel_id] += 1 + + # Clear the element to free memory + clear_element(elem) + + # Batch processing + if len(programs_to_create) >= batch_size: + ProgramData.objects.bulk_create(programs_to_create) + logger.debug(f"Saved batch of {len(programs_to_create)} programs (total: {total_programs})") + programs_to_create = [] + + # Send progress update (estimate based on programs processed) + # We don't know total programs upfront, so use a rough estimate + if total_programs - last_progress_update >= 5000: + last_progress_update = total_programs + # Cap at 90% until we're done + progress = min(90, 10 + int((total_programs / max(total_programs + 10000, 1)) * 80)) + send_epg_update(epg_source.id, "parsing_programs", progress, + processed=total_programs, channels=mapped_count) + + # Periodic garbage collection + if total_programs % (batch_size * 5) == 0: + gc.collect() + + except Exception as e: + logger.error(f"Error processing program for {channel_id}: {e}", exc_info=True) + clear_element(elem) + continue + + # Process any remaining items + if programs_to_create: + ProgramData.objects.bulk_create(programs_to_create) + logger.debug(f"Saved final batch of {len(programs_to_create)} programs") + + except etree.XMLSyntaxError as xml_error: + logger.error(f"XML syntax error parsing program data: {xml_error}") + epg_source.status = EPGSource.STATUS_ERROR + epg_source.last_message = f"XML parsing error: {str(xml_error)}" + epg_source.save(update_fields=['status', 'last_message']) + send_epg_update(epg_source.id, "parsing_programs", 100, status="error", message=str(xml_error)) + return False + except Exception as e: + logger.error(f"Error parsing XML for programs: {e}", exc_info=True) + raise + finally: + if source_file: + source_file.close() + source_file = None gc.collect() - # If there were failures, include them in the message but continue - if failed_entries: - epg_source.status = EPGSource.STATUS_SUCCESS # Still mark as success if some processed - error_summary = f"Failed to parse {len(failed_entries)} of {epg_count} entries" - stats_summary = f"Processed {program_count} programs across {channel_count} channels. Updated: {updated_count}." - epg_source.last_message = f"{stats_summary} Warning: {error_summary}" - epg_source.updated_at = timezone.now() - epg_source.save(update_fields=['status', 'last_message', 'updated_at']) + # Count channels that actually got programs + channels_with_programs = sum(1 for count in programs_by_channel.values() if count > 0) - # Send completion notification with mixed status - send_epg_update(epg_source.id, "parsing_programs", 100, - status="success", - message=epg_source.last_message) - - # Explicitly release memory of large lists before returning - del failed_entries - gc.collect() - - return True - - # If all successful, set a comprehensive success message + # Success message epg_source.status = EPGSource.STATUS_SUCCESS - epg_source.last_message = f"Successfully processed {program_count} programs across {channel_count} channels. Updated: {updated_count}." + epg_source.last_message = ( + f"Parsed {total_programs:,} programs for {channels_with_programs} channels " + f"(skipped {skipped_programs:,} programmes for {total_epg_count - mapped_count} unmapped channels)" + ) epg_source.updated_at = timezone.now() epg_source.save(update_fields=['status', 'last_message', 'updated_at']) @@ -1500,9 +1629,10 @@ def parse_programs_for_source(epg_source, tvg_id=None): log_system_event( event_type='epg_refresh', source_name=epg_source.name, - programs=program_count, - channels=channel_count, - updated=updated_count, + programs=total_programs, + channels=channels_with_programs, + skipped_programs=skipped_programs, + unmapped_channels=total_epg_count - mapped_count, ) # Send completion notification with status @@ -1510,7 +1640,9 @@ def parse_programs_for_source(epg_source, tvg_id=None): status="success", message=epg_source.last_message) - logger.info(f"Completed parsing all programs for source: {epg_source.name}") + logger.info(f"Completed parsing programs for source: {epg_source.name} - " + f"{total_programs:,} programs for {channels_with_programs} channels, " + f"skipped {skipped_programs:,} programmes for unmapped channels") return True except Exception as e: @@ -1525,14 +1657,19 @@ def parse_programs_for_source(epg_source, tvg_id=None): return False finally: # Final memory cleanup and tracking - + if source_file: + try: + source_file.close() + except: + pass + source_file = None # Explicitly release any remaining large data structures - failed_entries = None - program_count = None - channel_count = None - updated_count = None - processed = None + programs_to_create = None + programs_by_channel = None + mapped_epg_ids = None + mapped_tvg_ids = None + tvg_id_to_epg_id = None gc.collect() # Add comprehensive memory cleanup at the end From 256ac2f55ab64a6f56f041957f1ae73f8ef6357f Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 4 Dec 2025 14:25:44 -0600 Subject: [PATCH 017/220] Enhancement: Clean up orphaned programs for unmapped EPG entries --- CHANGELOG.md | 2 +- apps/epg/tasks.py | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 65100539..9bc21c7c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,7 +13,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed -- **Performance**: EPG program parsing optimized for sources with many channels but only a fraction mapped. Now parses XML file once per source instead of once per channel, dramatically reducing I/O and CPU overhead. For sources with 10,000 channels and 100 mapped, this results in ~99x fewer file opens and ~100x fewer full file scans. +- **Performance**: EPG program parsing optimized for sources with many channels but only a fraction mapped. Now parses XML file once per source instead of once per channel, dramatically reducing I/O and CPU overhead. For sources with 10,000 channels and 100 mapped, this results in ~99x fewer file opens and ~100x fewer full file scans. Orphaned programs for unmapped channels are also cleaned up during refresh to prevent database bloat. - IPv6 access now allowed by default with all IPv6 CIDRs accepted - Thanks [@adrianmace](https://github.com/adrianmace) - nginx.conf updated to bind to both IPv4 and IPv6 ports - Thanks [@jordandalley](https://github.com/jordandalley) diff --git a/apps/epg/tasks.py b/apps/epg/tasks.py index 3ed222d9..4a4adddd 100644 --- a/apps/epg/tasks.py +++ b/apps/epg/tasks.py @@ -1502,6 +1502,17 @@ def parse_programs_for_source(epg_source, tvg_id=None): deleted_count = ProgramData.objects.filter(epg_id__in=mapped_epg_ids).delete()[0] logger.info(f"Deleted {deleted_count} existing programs") + # Clean up orphaned programs for unmapped EPG entries + # These accumulate if a channel is unmapped after being mapped + unmapped_epg_ids = EPGData.objects.filter( + epg_source=epg_source + ).exclude(id__in=mapped_epg_ids).values_list('id', flat=True) + + if unmapped_epg_ids: + orphaned_count = ProgramData.objects.filter(epg_id__in=unmapped_epg_ids).delete()[0] + if orphaned_count > 0: + logger.info(f"Cleaned up {orphaned_count} orphaned programs for {len(unmapped_epg_ids)} unmapped EPG entries") + # SINGLE PASS PARSING: Parse the XML file once and process all mapped channels programs_to_create = [] programs_by_channel = {tvg_id: 0 for tvg_id in mapped_tvg_ids} # Track count per channel From 5693ee7f9e3431d1b1fe86df48bc5e9951439a2a Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 4 Dec 2025 14:57:57 -0600 Subject: [PATCH 018/220] perf: optimize EPG program parsing and implement atomic database updates to reduce I/O overhead and prevent partial data visibility --- CHANGELOG.md | 2 +- apps/epg/tasks.py | 103 +++++++++++++++++++++++++++------------------- 2 files changed, 62 insertions(+), 43 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9bc21c7c..e46bffe1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,7 +13,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed -- **Performance**: EPG program parsing optimized for sources with many channels but only a fraction mapped. Now parses XML file once per source instead of once per channel, dramatically reducing I/O and CPU overhead. For sources with 10,000 channels and 100 mapped, this results in ~99x fewer file opens and ~100x fewer full file scans. Orphaned programs for unmapped channels are also cleaned up during refresh to prevent database bloat. +- **Performance**: EPG program parsing optimized for sources with many channels but only a fraction mapped. Now parses XML file once per source instead of once per channel, dramatically reducing I/O and CPU overhead. For sources with 10,000 channels and 100 mapped, this results in ~99x fewer file opens and ~100x fewer full file scans. Orphaned programs for unmapped channels are also cleaned up during refresh to prevent database bloat. Database updates are now atomic to prevent clients from seeing empty/partial EPG data during refresh. - IPv6 access now allowed by default with all IPv6 CIDRs accepted - Thanks [@adrianmace](https://github.com/adrianmace) - nginx.conf updated to bind to both IPv4 and IPv6 ports - Thanks [@jordandalley](https://github.com/jordandalley) diff --git a/apps/epg/tasks.py b/apps/epg/tasks.py index 4a4adddd..9fa999cd 100644 --- a/apps/epg/tasks.py +++ b/apps/epg/tasks.py @@ -1497,28 +1497,13 @@ def parse_programs_for_source(epg_source, tvg_id=None): send_epg_update(epg_source.id, "parsing_programs", 100, status="error", error="No URL provided") return False - # Delete existing programs for all mapped EPGs in one query - logger.info(f"Deleting existing programs for {mapped_count} mapped EPG entries...") - deleted_count = ProgramData.objects.filter(epg_id__in=mapped_epg_ids).delete()[0] - logger.info(f"Deleted {deleted_count} existing programs") - - # Clean up orphaned programs for unmapped EPG entries - # These accumulate if a channel is unmapped after being mapped - unmapped_epg_ids = EPGData.objects.filter( - epg_source=epg_source - ).exclude(id__in=mapped_epg_ids).values_list('id', flat=True) - - if unmapped_epg_ids: - orphaned_count = ProgramData.objects.filter(epg_id__in=unmapped_epg_ids).delete()[0] - if orphaned_count > 0: - logger.info(f"Cleaned up {orphaned_count} orphaned programs for {len(unmapped_epg_ids)} unmapped EPG entries") - - # SINGLE PASS PARSING: Parse the XML file once and process all mapped channels - programs_to_create = [] + # SINGLE PASS PARSING: Parse the XML file once and collect all programs in memory + # We parse FIRST, then do an atomic delete+insert to avoid race conditions + # where clients might see empty/partial EPG data during the transition + all_programs_to_create = [] programs_by_channel = {tvg_id: 0 for tvg_id in mapped_tvg_ids} # Track count per channel total_programs = 0 skipped_programs = 0 - batch_size = 1000 last_progress_update = 0 try: @@ -1563,7 +1548,7 @@ def parse_programs_for_source(epg_source, tvg_id=None): custom_properties_json = custom_props if custom_props else None epg_id = tvg_id_to_epg_id[channel_id] - programs_to_create.append(ProgramData( + all_programs_to_create.append(ProgramData( epg_id=epg_id, start_time=start_time, end_time=end_time, @@ -1579,35 +1564,23 @@ def parse_programs_for_source(epg_source, tvg_id=None): # Clear the element to free memory clear_element(elem) - # Batch processing - if len(programs_to_create) >= batch_size: - ProgramData.objects.bulk_create(programs_to_create) - logger.debug(f"Saved batch of {len(programs_to_create)} programs (total: {total_programs})") - programs_to_create = [] + # Send progress update (estimate based on programs processed) + if total_programs - last_progress_update >= 5000: + last_progress_update = total_programs + # Cap at 70% during parsing phase (save 30% for DB operations) + progress = min(70, 10 + int((total_programs / max(total_programs + 10000, 1)) * 60)) + send_epg_update(epg_source.id, "parsing_programs", progress, + processed=total_programs, channels=mapped_count) - # Send progress update (estimate based on programs processed) - # We don't know total programs upfront, so use a rough estimate - if total_programs - last_progress_update >= 5000: - last_progress_update = total_programs - # Cap at 90% until we're done - progress = min(90, 10 + int((total_programs / max(total_programs + 10000, 1)) * 80)) - send_epg_update(epg_source.id, "parsing_programs", progress, - processed=total_programs, channels=mapped_count) - - # Periodic garbage collection - if total_programs % (batch_size * 5) == 0: - gc.collect() + # Periodic garbage collection during parsing + if total_programs % 5000 == 0: + gc.collect() except Exception as e: logger.error(f"Error processing program for {channel_id}: {e}", exc_info=True) clear_element(elem) continue - # Process any remaining items - if programs_to_create: - ProgramData.objects.bulk_create(programs_to_create) - logger.debug(f"Saved final batch of {len(programs_to_create)} programs") - except etree.XMLSyntaxError as xml_error: logger.error(f"XML syntax error parsing program data: {xml_error}") epg_source.status = EPGSource.STATUS_ERROR @@ -1622,6 +1595,52 @@ def parse_programs_for_source(epg_source, tvg_id=None): if source_file: source_file.close() source_file = None + + # Now perform atomic delete + bulk insert + # This ensures clients never see empty/partial EPG data + logger.info(f"Parsed {total_programs} programs, performing atomic database update...") + send_epg_update(epg_source.id, "parsing_programs", 75, message="Updating database...") + + batch_size = 1000 + try: + with transaction.atomic(): + # Delete existing programs for mapped EPGs + deleted_count = ProgramData.objects.filter(epg_id__in=mapped_epg_ids).delete()[0] + logger.debug(f"Deleted {deleted_count} existing programs") + + # Clean up orphaned programs for unmapped EPG entries + unmapped_epg_ids = list(EPGData.objects.filter( + epg_source=epg_source + ).exclude(id__in=mapped_epg_ids).values_list('id', flat=True)) + + if unmapped_epg_ids: + orphaned_count = ProgramData.objects.filter(epg_id__in=unmapped_epg_ids).delete()[0] + if orphaned_count > 0: + logger.info(f"Cleaned up {orphaned_count} orphaned programs for {len(unmapped_epg_ids)} unmapped EPG entries") + + # Bulk insert all new programs in batches within the same transaction + for i in range(0, len(all_programs_to_create), batch_size): + batch = all_programs_to_create[i:i + batch_size] + ProgramData.objects.bulk_create(batch) + + # Update progress during insertion + progress = 75 + int((i / len(all_programs_to_create)) * 20) if all_programs_to_create else 95 + if i % (batch_size * 5) == 0: + send_epg_update(epg_source.id, "parsing_programs", min(95, progress), + message=f"Inserting programs... {i}/{len(all_programs_to_create)}") + + logger.info(f"Atomic update complete: deleted {deleted_count}, inserted {total_programs} programs") + + except Exception as db_error: + logger.error(f"Database error during atomic update: {db_error}", exc_info=True) + epg_source.status = EPGSource.STATUS_ERROR + epg_source.last_message = f"Database error: {str(db_error)}" + epg_source.save(update_fields=['status', 'last_message']) + send_epg_update(epg_source.id, "parsing_programs", 100, status="error", message=str(db_error)) + return False + finally: + # Clear the large list to free memory + all_programs_to_create = None gc.collect() # Count channels that actually got programs From 6c8270d0e582ea7667db75036b354de72e972673 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 4 Dec 2025 15:28:21 -0600 Subject: [PATCH 019/220] Enhancement: Add support for 'extracting' status and display additional progress information in EPGsTable --- CHANGELOG.md | 1 + frontend/src/components/tables/EPGsTable.jsx | 63 +++++++++++++++++--- 2 files changed, 56 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e46bffe1..0de26314 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed - **Performance**: EPG program parsing optimized for sources with many channels but only a fraction mapped. Now parses XML file once per source instead of once per channel, dramatically reducing I/O and CPU overhead. For sources with 10,000 channels and 100 mapped, this results in ~99x fewer file opens and ~100x fewer full file scans. Orphaned programs for unmapped channels are also cleaned up during refresh to prevent database bloat. Database updates are now atomic to prevent clients from seeing empty/partial EPG data during refresh. +- EPG table now displays detailed status messages including refresh progress, success messages, and last message for idle sources (matching M3U table behavior) - IPv6 access now allowed by default with all IPv6 CIDRs accepted - Thanks [@adrianmace](https://github.com/adrianmace) - nginx.conf updated to bind to both IPv4 and IPv6 ports - Thanks [@jordandalley](https://github.com/jordandalley) diff --git a/frontend/src/components/tables/EPGsTable.jsx b/frontend/src/components/tables/EPGsTable.jsx index 71e920e0..b8dfeb6d 100644 --- a/frontend/src/components/tables/EPGsTable.jsx +++ b/frontend/src/components/tables/EPGsTable.jsx @@ -160,6 +160,9 @@ const EPGsTable = () => { case 'downloading': label = 'Downloading'; break; + case 'extracting': + label = 'Extracting'; + break; case 'parsing_channels': label = 'Parsing Channels'; break; @@ -170,6 +173,22 @@ const EPGsTable = () => { return null; } + // Build additional info string from progress data + let additionalInfo = ''; + if (progress.message) { + additionalInfo = progress.message; + } else if ( + progress.processed !== undefined && + progress.channels !== undefined + ) { + additionalInfo = `${progress.processed.toLocaleString()} programs for ${progress.channels} channels`; + } else if ( + progress.processed !== undefined && + progress.total !== undefined + ) { + additionalInfo = `${progress.processed.toLocaleString()} / ${progress.total.toLocaleString()}`; + } + return ( @@ -181,7 +200,14 @@ const EPGsTable = () => { style={{ margin: '2px 0' }} /> {progress.speed && ( - Speed: {parseInt(progress.speed)} KB/s + + Speed: {parseInt(progress.speed)} KB/s + + )} + {additionalInfo && ( + + {additionalInfo} + )} ); @@ -286,14 +312,35 @@ const EPGsTable = () => { // Show success message for successful sources if (data.status === 'success') { + const successMessage = + data.last_message || 'EPG data refreshed successfully'; return ( - - EPG data refreshed successfully - + + + {successMessage} + + + ); + } + + // Show last_message for idle sources (from previous refresh) + if (data.status === 'idle' && data.last_message) { + return ( + + + {data.last_message} + + ); } From 3b34fb11ef0d401c07be61ec8f0c76228fdf3485 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 4 Dec 2025 15:43:33 -0600 Subject: [PATCH 020/220] Fix: Fixes bug where Updated column wouldn't update in the EPG table without a webui refresh. --- CHANGELOG.md | 3 ++- apps/epg/tasks.py | 3 ++- frontend/src/WebSocket.jsx | 10 ++++++++-- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0de26314..8a77b7b7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,7 +14,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed - **Performance**: EPG program parsing optimized for sources with many channels but only a fraction mapped. Now parses XML file once per source instead of once per channel, dramatically reducing I/O and CPU overhead. For sources with 10,000 channels and 100 mapped, this results in ~99x fewer file opens and ~100x fewer full file scans. Orphaned programs for unmapped channels are also cleaned up during refresh to prevent database bloat. Database updates are now atomic to prevent clients from seeing empty/partial EPG data during refresh. -- EPG table now displays detailed status messages including refresh progress, success messages, and last message for idle sources (matching M3U table behavior) +- EPG table now displays detailed status messages including refresh progress, success messages, and last message for idle sources (matching M3U table behavior) (Closes #214) +- EPG table "Updated" column now updates in real-time via WebSocket using the actual backend timestamp instead of requiring a page refresh - IPv6 access now allowed by default with all IPv6 CIDRs accepted - Thanks [@adrianmace](https://github.com/adrianmace) - nginx.conf updated to bind to both IPv4 and IPv6 ports - Thanks [@jordandalley](https://github.com/jordandalley) diff --git a/apps/epg/tasks.py b/apps/epg/tasks.py index 9fa999cd..c565dbf5 100644 --- a/apps/epg/tasks.py +++ b/apps/epg/tasks.py @@ -1668,7 +1668,8 @@ def parse_programs_for_source(epg_source, tvg_id=None): # Send completion notification with status send_epg_update(epg_source.id, "parsing_programs", 100, status="success", - message=epg_source.last_message) + message=epg_source.last_message, + updated_at=epg_source.updated_at.isoformat()) logger.info(f"Completed parsing programs for source: {epg_source.name} - " f"{total_programs:,} programs for {channels_with_programs} channels, " diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index f2e28ae9..40035d33 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -574,7 +574,7 @@ export const WebsocketProvider = ({ children }) => { const sourceId = parsedEvent.data.source || parsedEvent.data.account; const epg = epgs[sourceId]; - + // Only update progress if the EPG still exists in the store // This prevents crashes when receiving updates for deleted EPGs if (epg) { @@ -582,7 +582,9 @@ export const WebsocketProvider = ({ children }) => { updateEPGProgress(parsedEvent.data); } else { // EPG was deleted, ignore this update - console.debug(`Ignoring EPG refresh update for deleted EPG ${sourceId}`); + console.debug( + `Ignoring EPG refresh update for deleted EPG ${sourceId}` + ); break; } @@ -621,6 +623,10 @@ export const WebsocketProvider = ({ children }) => { status: parsedEvent.data.status || 'success', last_message: parsedEvent.data.message || epg.last_message, + // Use the timestamp from the backend if provided + ...(parsedEvent.data.updated_at && { + updated_at: parsedEvent.data.updated_at, + }), }); // Only show success notification if we've finished parsing programs and had no errors From 0d177e44f8ce8cdb2e7dba10bc9266f2b973849e Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 4 Dec 2025 15:45:09 -0600 Subject: [PATCH 021/220] changelog: Change updated change to bug fix instead of change. --- CHANGELOG.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8a77b7b7..0b95749e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,10 +15,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - **Performance**: EPG program parsing optimized for sources with many channels but only a fraction mapped. Now parses XML file once per source instead of once per channel, dramatically reducing I/O and CPU overhead. For sources with 10,000 channels and 100 mapped, this results in ~99x fewer file opens and ~100x fewer full file scans. Orphaned programs for unmapped channels are also cleaned up during refresh to prevent database bloat. Database updates are now atomic to prevent clients from seeing empty/partial EPG data during refresh. - EPG table now displays detailed status messages including refresh progress, success messages, and last message for idle sources (matching M3U table behavior) (Closes #214) -- EPG table "Updated" column now updates in real-time via WebSocket using the actual backend timestamp instead of requiring a page refresh - IPv6 access now allowed by default with all IPv6 CIDRs accepted - Thanks [@adrianmace](https://github.com/adrianmace) - nginx.conf updated to bind to both IPv4 and IPv6 ports - Thanks [@jordandalley](https://github.com/jordandalley) +### Fixed + +- EPG table "Updated" column now updates in real-time via WebSocket using the actual backend timestamp instead of requiring a page refresh + ## [0.13.0] - 2025-12-02 ### Added From c1d960138e4f455543caac34e0b6ef8ca16911e8 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 5 Dec 2025 09:02:03 -0600 Subject: [PATCH 022/220] Fix: Bulk channel editor confirmation dialog now shows the correct stream profile that will be set. --- CHANGELOG.md | 1 + frontend/src/components/forms/ChannelBatch.jsx | 6 ++++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0b95749e..0b0223f9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed - EPG table "Updated" column now updates in real-time via WebSocket using the actual backend timestamp instead of requiring a page refresh +- Bulk channel editor confirmation dialog now displays the correct stream profile name that will be applied to the selected channels. ## [0.13.0] - 2025-12-02 diff --git a/frontend/src/components/forms/ChannelBatch.jsx b/frontend/src/components/forms/ChannelBatch.jsx index e42d418c..a1cebe54 100644 --- a/frontend/src/components/forms/ChannelBatch.jsx +++ b/frontend/src/components/forms/ChannelBatch.jsx @@ -135,8 +135,10 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => { if (values.stream_profile_id === '0') { changes.push(`• Stream Profile: Use Default`); } else { - const profileName = - streamProfiles[values.stream_profile_id]?.name || 'Selected Profile'; + const profile = streamProfiles.find( + (p) => `${p.id}` === `${values.stream_profile_id}` + ); + const profileName = profile?.name || 'Selected Profile'; changes.push(`• Stream Profile: ${profileName}`); } } From 759569b871973253c89dd7b625e1272fe5e9c7eb Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 5 Dec 2025 09:54:11 -0600 Subject: [PATCH 023/220] Enhancement: Add a priority field to EPGSource and prefer higher-priority sources when matching channels. Also ignore EPG sources where is_active is false during matching, and update serializers/forms/frontend accordingly.(Closes #603, #672) --- CHANGELOG.md | 3 ++ apps/channels/tasks.py | 37 ++++++++++++++----- .../epg/migrations/0021_epgsource_priority.py | 18 +++++++++ apps/epg/models.py | 4 ++ apps/epg/serializers.py | 1 + frontend/src/components/forms/EPG.jsx | 28 ++++++++++---- 6 files changed, 73 insertions(+), 18 deletions(-) create mode 100644 apps/epg/migrations/0021_epgsource_priority.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 0b0223f9..d58c0ce1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added - Sort buttons for 'Group' and 'M3U' columns in Streams table for improved stream organization and filtering - Thanks [@bobey6](https://github.com/bobey6) +- EPG source priority field for controlling which EPG source is preferred when multiple sources have matching entries for a channel (higher numbers = higher priority) (Closes #603) ### Changed @@ -17,6 +18,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - EPG table now displays detailed status messages including refresh progress, success messages, and last message for idle sources (matching M3U table behavior) (Closes #214) - IPv6 access now allowed by default with all IPv6 CIDRs accepted - Thanks [@adrianmace](https://github.com/adrianmace) - nginx.conf updated to bind to both IPv4 and IPv6 ports - Thanks [@jordandalley](https://github.com/jordandalley) +- EPG matching now respects source priority and only uses active (enabled) EPG sources (Closes #672) +- EPG form API Key field now only visible when Schedules Direct source type is selected ### Fixed diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py index 5a9528a7..7ca73ac2 100755 --- a/apps/channels/tasks.py +++ b/apps/channels/tasks.py @@ -295,7 +295,11 @@ def match_channels_to_epg(channels_data, epg_data, region_code=None, use_ml=True if score > 50: # Only show decent matches logger.debug(f" EPG '{row['name']}' (norm: '{row['norm_name']}') => score: {score} (base: {base_score}, bonus: {bonus})") - if score > best_score: + # When scores are equal, prefer higher priority EPG source + row_priority = row.get('epg_source_priority', 0) + best_priority = best_epg.get('epg_source_priority', 0) if best_epg else -1 + + if score > best_score or (score == best_score and row_priority > best_priority): best_score = score best_epg = row @@ -471,9 +475,9 @@ def match_epg_channels(): "norm_chan": normalize_name(channel.name) # Always use channel name for fuzzy matching! }) - # Get all EPG data + # Get all EPG data from active sources, ordered by source priority (highest first) so we prefer higher priority matches epg_data = [] - for epg in EPGData.objects.all(): + for epg in EPGData.objects.select_related('epg_source').filter(epg_source__is_active=True): normalized_tvg_id = epg.tvg_id.strip().lower() if epg.tvg_id else "" epg_data.append({ 'id': epg.id, @@ -482,9 +486,13 @@ def match_epg_channels(): 'name': epg.name, 'norm_name': normalize_name(epg.name), 'epg_source_id': epg.epg_source.id if epg.epg_source else None, + 'epg_source_priority': epg.epg_source.priority if epg.epg_source else 0, }) - logger.info(f"Processing {len(channels_data)} channels against {len(epg_data)} EPG entries") + # Sort EPG data by source priority (highest first) so we prefer higher priority matches + epg_data.sort(key=lambda x: x['epg_source_priority'], reverse=True) + + logger.info(f"Processing {len(channels_data)} channels against {len(epg_data)} EPG entries (from active sources only)") # Run EPG matching with progress updates - automatically uses conservative thresholds for bulk operations result = match_channels_to_epg(channels_data, epg_data, region_code, use_ml=True, send_progress=True) @@ -618,9 +626,9 @@ def match_selected_channels_epg(channel_ids): "norm_chan": normalize_name(channel.name) }) - # Get all EPG data + # Get all EPG data from active sources, ordered by source priority (highest first) so we prefer higher priority matches epg_data = [] - for epg in EPGData.objects.all(): + for epg in EPGData.objects.select_related('epg_source').filter(epg_source__is_active=True): normalized_tvg_id = epg.tvg_id.strip().lower() if epg.tvg_id else "" epg_data.append({ 'id': epg.id, @@ -629,9 +637,13 @@ def match_selected_channels_epg(channel_ids): 'name': epg.name, 'norm_name': normalize_name(epg.name), 'epg_source_id': epg.epg_source.id if epg.epg_source else None, + 'epg_source_priority': epg.epg_source.priority if epg.epg_source else 0, }) - logger.info(f"Processing {len(channels_data)} selected channels against {len(epg_data)} EPG entries") + # Sort EPG data by source priority (highest first) so we prefer higher priority matches + epg_data.sort(key=lambda x: x['epg_source_priority'], reverse=True) + + logger.info(f"Processing {len(channels_data)} selected channels against {len(epg_data)} EPG entries (from active sources only)") # Run EPG matching with progress updates - automatically uses appropriate thresholds result = match_channels_to_epg(channels_data, epg_data, region_code, use_ml=True, send_progress=True) @@ -749,9 +761,10 @@ def match_single_channel_epg(channel_id): test_normalized = normalize_name(test_name) logger.debug(f"DEBUG normalization example: '{test_name}' → '{test_normalized}' (call sign preserved)") - # Get all EPG data for matching - must include norm_name field + # Get all EPG data for matching from active sources - must include norm_name field + # Ordered by source priority (highest first) so we prefer higher priority matches epg_data_list = [] - for epg in EPGData.objects.filter(name__isnull=False).exclude(name=''): + for epg in EPGData.objects.select_related('epg_source').filter(epg_source__is_active=True, name__isnull=False).exclude(name=''): normalized_epg_tvg_id = epg.tvg_id.strip().lower() if epg.tvg_id else "" epg_data_list.append({ 'id': epg.id, @@ -760,10 +773,14 @@ def match_single_channel_epg(channel_id): 'name': epg.name, 'norm_name': normalize_name(epg.name), 'epg_source_id': epg.epg_source.id if epg.epg_source else None, + 'epg_source_priority': epg.epg_source.priority if epg.epg_source else 0, }) + # Sort EPG data by source priority (highest first) so we prefer higher priority matches + epg_data_list.sort(key=lambda x: x['epg_source_priority'], reverse=True) + if not epg_data_list: - return {"matched": False, "message": "No EPG data available for matching"} + return {"matched": False, "message": "No EPG data available for matching (from active sources)"} logger.info(f"Matching single channel '{channel.name}' against {len(epg_data_list)} EPG entries") diff --git a/apps/epg/migrations/0021_epgsource_priority.py b/apps/epg/migrations/0021_epgsource_priority.py new file mode 100644 index 00000000..f2696d67 --- /dev/null +++ b/apps/epg/migrations/0021_epgsource_priority.py @@ -0,0 +1,18 @@ +# Generated by Django 5.2.4 on 2025-12-05 15:24 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('epg', '0020_migrate_time_to_starttime_placeholders'), + ] + + operations = [ + migrations.AddField( + model_name='epgsource', + name='priority', + field=models.PositiveIntegerField(default=0, help_text='Priority for EPG matching (higher numbers = higher priority). Used when multiple EPG sources have matching entries for a channel.'), + ), + ] diff --git a/apps/epg/models.py b/apps/epg/models.py index e5f3847b..b3696edc 100644 --- a/apps/epg/models.py +++ b/apps/epg/models.py @@ -45,6 +45,10 @@ class EPGSource(models.Model): null=True, help_text="Custom properties for dummy EPG configuration (regex patterns, timezone, duration, etc.)" ) + priority = models.PositiveIntegerField( + default=0, + help_text="Priority for EPG matching (higher numbers = higher priority). Used when multiple EPG sources have matching entries for a channel." + ) status = models.CharField( max_length=20, choices=STATUS_CHOICES, diff --git a/apps/epg/serializers.py b/apps/epg/serializers.py index bfb750fc..e4d5f466 100644 --- a/apps/epg/serializers.py +++ b/apps/epg/serializers.py @@ -24,6 +24,7 @@ class EPGSourceSerializer(serializers.ModelSerializer): 'is_active', 'file_path', 'refresh_interval', + 'priority', 'status', 'last_message', 'created_at', diff --git a/frontend/src/components/forms/EPG.jsx b/frontend/src/components/forms/EPG.jsx index db4f8310..50c8553c 100644 --- a/frontend/src/components/forms/EPG.jsx +++ b/frontend/src/components/forms/EPG.jsx @@ -29,6 +29,7 @@ const EPG = ({ epg = null, isOpen, onClose }) => { api_key: '', is_active: true, refresh_interval: 24, + priority: 0, }, validate: { @@ -69,6 +70,7 @@ const EPG = ({ epg = null, isOpen, onClose }) => { api_key: epg.api_key, is_active: epg.is_active, refresh_interval: epg.refresh_interval, + priority: epg.priority ?? 0, }; form.setValues(values); setSourceType(epg.source_type); @@ -148,14 +150,24 @@ const EPG = ({ epg = null, isOpen, onClose }) => { key={form.key('url')} /> - + )} + + {/* Put checkbox at the same level as Refresh Interval */} From f3a901cb3a50f16f104598ec615cd6bd1a2ffc35 Mon Sep 17 00:00:00 2001 From: dekzter Date: Sat, 6 Dec 2025 13:40:10 -0500 Subject: [PATCH 024/220] Security Fix - generate JWT on application init --- dispatcharr/settings.py | 2 +- docker/entrypoint.sh | 18 +++++++++++++++++- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/dispatcharr/settings.py b/dispatcharr/settings.py index d6c29dd9..5f8c23e2 100644 --- a/dispatcharr/settings.py +++ b/dispatcharr/settings.py @@ -4,7 +4,7 @@ from datetime import timedelta BASE_DIR = Path(__file__).resolve().parent.parent -SECRET_KEY = "REPLACE_ME_WITH_A_REAL_SECRET" +SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY") REDIS_HOST = os.environ.get("REDIS_HOST", "localhost") REDIS_DB = os.environ.get("REDIS_DB", "0") diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh index fa0eea01..9c3ec88c 100755 --- a/docker/entrypoint.sh +++ b/docker/entrypoint.sh @@ -40,6 +40,22 @@ export REDIS_DB=${REDIS_DB:-0} export DISPATCHARR_PORT=${DISPATCHARR_PORT:-9191} export LIBVA_DRIVERS_PATH='/usr/local/lib/x86_64-linux-gnu/dri' export LD_LIBRARY_PATH='/usr/local/lib' +export SECRET_FILE="/data/jwt" + +if [ ! -f "$SECRET_FILE" ]; then + umask 077 + tmpfile="$(mktemp "${SECRET_FILE}.XXXXXX")" || { echo "mktemp failed"; exit 1; } + python3 - <<'PY' >"$tmpfile" || { echo "secret generation failed"; rm -f "$tmpfile"; exit 1; } +import secrets +print(secrets.token_urlsafe(64)) +PY + mv -f "$tmpfile" "$SECRET_FILE" || { echo "move failed"; rm -f "$tmpfile"; exit 1; } +fi + +chown $PUID:$PGID "$SECRET_FILE" || true +chmod 600 "$SECRET_FILE" || true + +export DJANGO_SECRET_KEY="$(cat "$SECRET_FILE")" # Process priority configuration # UWSGI_NICE_LEVEL: Absolute nice value for uWSGI/streaming (default: 0 = normal priority) @@ -90,7 +106,7 @@ if [[ ! -f /etc/profile.d/dispatcharr.sh ]]; then DISPATCHARR_ENV DISPATCHARR_DEBUG DISPATCHARR_LOG_LEVEL REDIS_HOST REDIS_DB POSTGRES_DIR DISPATCHARR_PORT DISPATCHARR_VERSION DISPATCHARR_TIMESTAMP LIBVA_DRIVERS_PATH LIBVA_DRIVER_NAME LD_LIBRARY_PATH - CELERY_NICE_LEVEL UWSGI_NICE_LEVEL + CELERY_NICE_LEVEL UWSGI_NICE_LEVEL DJANGO_SECRET_KEY ) # Process each variable for both profile.d and environment From 10f329d67380eca9a619d1208d51ab88920c9601 Mon Sep 17 00:00:00 2001 From: dekzter Date: Sat, 6 Dec 2025 13:42:48 -0500 Subject: [PATCH 025/220] release notes for built --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2f082eb8..f9b7b450 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +### Fixed + +- JWT token generated so is unique for each deployment + ## [0.13.0] - 2025-12-02 ### Added From a9120552551860b40ecff123fc68fc3a803234cc Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Sat, 6 Dec 2025 18:43:16 +0000 Subject: [PATCH 026/220] Release v0.13.1 --- CHANGELOG.md | 2 ++ version.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f9b7b450..bf381879 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [0.13.1] - 2025-12-06 + ### Fixed - JWT token generated so is unique for each deployment diff --git a/version.py b/version.py index b27fed86..f017df85 100644 --- a/version.py +++ b/version.py @@ -1,5 +1,5 @@ """ Dispatcharr version information. """ -__version__ = '0.13.0' # Follow semantic versioning (MAJOR.MINOR.PATCH) +__version__ = '0.13.1' # Follow semantic versioning (MAJOR.MINOR.PATCH) __timestamp__ = None # Set during CI/CD build process From d0edc3fa072f726b3f6a6117a1ea16b38f8eeda3 Mon Sep 17 00:00:00 2001 From: dekzter Date: Sun, 7 Dec 2025 07:54:30 -0500 Subject: [PATCH 027/220] remove permission lines to see if this resolves lack of django secret key in environment profile.d --- docker/entrypoint.sh | 3 --- 1 file changed, 3 deletions(-) diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh index 9c3ec88c..df1584b0 100755 --- a/docker/entrypoint.sh +++ b/docker/entrypoint.sh @@ -52,9 +52,6 @@ PY mv -f "$tmpfile" "$SECRET_FILE" || { echo "move failed"; rm -f "$tmpfile"; exit 1; } fi -chown $PUID:$PGID "$SECRET_FILE" || true -chmod 600 "$SECRET_FILE" || true - export DJANGO_SECRET_KEY="$(cat "$SECRET_FILE")" # Process priority configuration From 3512c3a6233844b41ed2c8132bb0fdcfdf3f3740 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 7 Dec 2025 19:05:31 -0600 Subject: [PATCH 028/220] Add DJANGO_SECRET_KEY environment variable to uwsgi configuration files --- docker/uwsgi.debug.ini | 2 +- docker/uwsgi.dev.ini | 1 + docker/uwsgi.ini | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/docker/uwsgi.debug.ini b/docker/uwsgi.debug.ini index 3de890a5..1d7cca93 100644 --- a/docker/uwsgi.debug.ini +++ b/docker/uwsgi.debug.ini @@ -20,7 +20,7 @@ module = scripts.debug_wrapper:application virtualenv = /dispatcharrpy master = true env = DJANGO_SETTINGS_MODULE=dispatcharr.settings - +env = DJANGO_SECRET_KEY=$(DJANGO_SECRET_KEY) socket = /app/uwsgi.sock chmod-socket = 777 vacuum = true diff --git a/docker/uwsgi.dev.ini b/docker/uwsgi.dev.ini index e476e216..1ef9709e 100644 --- a/docker/uwsgi.dev.ini +++ b/docker/uwsgi.dev.ini @@ -22,6 +22,7 @@ module = dispatcharr.wsgi:application virtualenv = /dispatcharrpy master = true env = DJANGO_SETTINGS_MODULE=dispatcharr.settings +env = DJANGO_SECRET_KEY=$(DJANGO_SECRET_KEY) socket = /app/uwsgi.sock chmod-socket = 777 vacuum = true diff --git a/docker/uwsgi.ini b/docker/uwsgi.ini index f8fe8ab7..bb359b06 100644 --- a/docker/uwsgi.ini +++ b/docker/uwsgi.ini @@ -21,6 +21,7 @@ module = dispatcharr.wsgi:application virtualenv = /dispatcharrpy master = true env = DJANGO_SETTINGS_MODULE=dispatcharr.settings +env = DJANGO_SECRET_KEY=$(DJANGO_SECRET_KEY) socket = /app/uwsgi.sock chmod-socket = 777 vacuum = true From cf37c6fd9869e8210589a8a99331da702030c2db Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 7 Dec 2025 19:06:45 -0600 Subject: [PATCH 029/220] changelog: Updated changelog for 0.13.1 --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index eb8324f1..90db90c6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,6 +30,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed +- JWT token generated so is unique for each deployment + ## [0.13.0] - 2025-12-02 ### Added From 2155229d7f0ce6ad079e3b802588822f1ef1b6b4 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 7 Dec 2025 19:40:32 -0600 Subject: [PATCH 030/220] Fix uwsgi command path in entrypoint script --- docker/entrypoint.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh index df1584b0..088bcd1e 100755 --- a/docker/entrypoint.sh +++ b/docker/entrypoint.sh @@ -200,7 +200,7 @@ fi # Users can override via UWSGI_NICE_LEVEL environment variable in docker-compose # Start with nice as root, then use setpriv to drop privileges to dispatch user # This preserves both the nice value and environment variables -nice -n $UWSGI_NICE_LEVEL su -p - "$POSTGRES_USER" -c "cd /app && exec uwsgi $uwsgi_args" & uwsgi_pid=$! +nice -n $UWSGI_NICE_LEVEL su - "$POSTGRES_USER" -c "cd /app && exec /dispatcharrpy/bin/uwsgi $uwsgi_args" & uwsgi_pid=$! echo "✅ uwsgi started with PID $uwsgi_pid (nice $UWSGI_NICE_LEVEL)" pids+=("$uwsgi_pid") From e2736babaae4db7393560f7c1ae50d49a611baf9 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 7 Dec 2025 20:04:58 -0600 Subject: [PATCH 031/220] Reset umask after creating secret file. --- docker/entrypoint.sh | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh index 088bcd1e..72eb5928 100755 --- a/docker/entrypoint.sh +++ b/docker/entrypoint.sh @@ -41,8 +41,10 @@ export DISPATCHARR_PORT=${DISPATCHARR_PORT:-9191} export LIBVA_DRIVERS_PATH='/usr/local/lib/x86_64-linux-gnu/dri' export LD_LIBRARY_PATH='/usr/local/lib' export SECRET_FILE="/data/jwt" - +# Ensure Django secret key exists or generate a new one if [ ! -f "$SECRET_FILE" ]; then + echo "Generating new Django secret key..." + old_umask=$(umask) umask 077 tmpfile="$(mktemp "${SECRET_FILE}.XXXXXX")" || { echo "mktemp failed"; exit 1; } python3 - <<'PY' >"$tmpfile" || { echo "secret generation failed"; rm -f "$tmpfile"; exit 1; } @@ -50,8 +52,8 @@ import secrets print(secrets.token_urlsafe(64)) PY mv -f "$tmpfile" "$SECRET_FILE" || { echo "move failed"; rm -f "$tmpfile"; exit 1; } + umask $old_umask fi - export DJANGO_SECRET_KEY="$(cat "$SECRET_FILE")" # Process priority configuration From ce70b04097cb5fa0e52f500035fad4f7dcab73f5 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 7 Dec 2025 20:56:59 -0600 Subject: [PATCH 032/220] changelog: update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 90db90c6..8efdc30c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - EPG table "Updated" column now updates in real-time via WebSocket using the actual backend timestamp instead of requiring a page refresh - Bulk channel editor confirmation dialog now displays the correct stream profile name that will be applied to the selected channels. +- uWSGI not found and 502 bad gateway on first startup ## [0.13.1] - 2025-12-06 From c03ddf60a09175631e868bd5d647ba1484426ad2 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 7 Dec 2025 21:28:04 -0600 Subject: [PATCH 033/220] Fixed verbiage for epg parsing status. --- apps/epg/tasks.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/apps/epg/tasks.py b/apps/epg/tasks.py index c565dbf5..bd78c6a3 100644 --- a/apps/epg/tasks.py +++ b/apps/epg/tasks.py @@ -1650,7 +1650,7 @@ def parse_programs_for_source(epg_source, tvg_id=None): epg_source.status = EPGSource.STATUS_SUCCESS epg_source.last_message = ( f"Parsed {total_programs:,} programs for {channels_with_programs} channels " - f"(skipped {skipped_programs:,} programmes for {total_epg_count - mapped_count} unmapped channels)" + f"(skipped {skipped_programs:,} programs for {total_epg_count - mapped_count} unmapped channels)" ) epg_source.updated_at = timezone.now() epg_source.save(update_fields=['status', 'last_message', 'updated_at']) @@ -1672,8 +1672,8 @@ def parse_programs_for_source(epg_source, tvg_id=None): updated_at=epg_source.updated_at.isoformat()) logger.info(f"Completed parsing programs for source: {epg_source.name} - " - f"{total_programs:,} programs for {channels_with_programs} channels, " - f"skipped {skipped_programs:,} programmes for unmapped channels") + f"{total_programs:,} programs for {channels_with_programs} channels, " + f"skipped {skipped_programs:,} programs for unmapped channels") return True except Exception as e: From 43b55e2d9913a71ffec5a1998079e7ea2dd4be3c Mon Sep 17 00:00:00 2001 From: dekzter Date: Mon, 8 Dec 2025 08:38:39 -0500 Subject: [PATCH 034/220] first run at hiding disabled channels in channel profiles --- apps/channels/api_views.py | 21 ++++++++++++++++ .../src/components/tables/ChannelsTable.jsx | 24 +++++++++++++++---- .../ChannelsTable/ChannelTableHeader.jsx | 14 +++++++++++ 3 files changed, 54 insertions(+), 5 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index eccc5028..4cfe9777 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -419,6 +419,27 @@ class ChannelViewSet(viewsets.ModelViewSet): group_names = channel_group.split(",") qs = qs.filter(channel_group__name__in=group_names) + channel_profile_id = self.request.query_params.get("channel_profile_id") + show_disabled_param = self.request.query_params.get("show_disabled", None) + + if channel_profile_id: + try: + profile_id_int = int(channel_profile_id) + # If show_disabled is present, include all memberships for that profile. + # If absent, restrict to enabled=True. + if show_disabled_param is None: + qs = qs.filter( + channelprofilemembership__channel_profile_id=profile_id_int, + channelprofilemembership__enabled=True, + ) + else: + qs = qs.filter( + channelprofilemembership__channel_profile_id=profile_id_int + ) + except (ValueError, TypeError): + # Ignore invalid profile id values + pass + if self.request.user.user_level < 10: qs = qs.filter(user_level__lte=self.request.user.user_level) diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index 9b9958f7..949b9760 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -68,7 +68,7 @@ const epgUrlBase = `${window.location.protocol}//${window.location.host}/output/ const hdhrUrlBase = `${window.location.protocol}//${window.location.host}/hdhr`; const ChannelEnabledSwitch = React.memo( - ({ rowId, selectedProfileId, selectedTableIds }) => { + ({ rowId, selectedProfileId, selectedTableIds, setSelectedTableIds }) => { // Directly extract the channels set once to avoid re-renders on every change. const isEnabled = useChannelsStore( useCallback( @@ -79,16 +79,20 @@ const ChannelEnabledSwitch = React.memo( ) ); - const handleToggle = () => { + const handleToggle = async () => { if (selectedTableIds.length > 1) { - API.updateProfileChannels( + await API.updateProfileChannels( selectedTableIds, selectedProfileId, !isEnabled ); } else { - API.updateProfileChannel(rowId, selectedProfileId, !isEnabled); + await API.updateProfileChannel(rowId, selectedProfileId, !isEnabled); } + + setSelectedTableIds([]); + + return API.requeryChannels(); }; return ( @@ -289,6 +293,7 @@ const ChannelsTable = ({}) => { const [selectedProfile, setSelectedProfile] = useState( profiles[selectedProfileId] ); + const [showDisabled, setShowDisabled] = useState(true); const [paginationString, setPaginationString] = useState(''); const [filters, setFilters] = useState({ @@ -369,6 +374,12 @@ const ChannelsTable = ({}) => { params.append('page', pagination.pageIndex + 1); params.append('page_size', pagination.pageSize); params.append('include_streams', 'true'); + if (selectedProfileId !== '0') { + params.append('channel_profile_id', selectedProfileId); + } + if (showDisabled === true) { + params.append('show_disabled', true); + } // Apply sorting if (sorting.length > 0) { @@ -401,7 +412,7 @@ const ChannelsTable = ({}) => { pageSize: pagination.pageSize, }); setAllRowIds(ids); - }, [pagination, sorting, debouncedFilters]); + }, [pagination, sorting, debouncedFilters, showDisabled, selectedProfileId]); const stopPropagation = useCallback((e) => { e.stopPropagation(); @@ -728,6 +739,7 @@ const ChannelsTable = ({}) => { rowId={row.original.id} selectedProfileId={selectedProfileId} selectedTableIds={table.getState().selectedTableIds} + setSelectedTableIds={table.setSelectedTableIds} /> ); }, @@ -1326,6 +1338,8 @@ const ChannelsTable = ({}) => { deleteChannels={deleteChannels} selectedTableIds={table.selectedTableIds} table={table} + showDisabled={showDisabled} + setShowDisabled={setShowDisabled} /> {/* Table or ghost empty state inside Paper */} diff --git a/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx b/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx index b7e04d7d..d3376b4d 100644 --- a/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx +++ b/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx @@ -26,6 +26,8 @@ import { SquarePen, SquarePlus, Settings, + Eye, + EyeOff, } from 'lucide-react'; import API from '../../../api'; import { notifications } from '@mantine/notifications'; @@ -102,6 +104,8 @@ const ChannelTableHeader = ({ editChannel, deleteChannels, selectedTableIds, + showDisabled, + setShowDisabled, }) => { const theme = useMantineTheme(); @@ -208,6 +212,10 @@ const ChannelTableHeader = ({ ); }; + const toggleShowDisabled = () => { + setShowDisabled(!showDisabled); + }; + return ( @@ -226,6 +234,12 @@ const ChannelTableHeader = ({ + + + + Date: Mon, 8 Dec 2025 17:27:07 -0600 Subject: [PATCH 035/220] Remove DJANGO_SECRET_KEY environment variable from uwsgi configuration files --- docker/uwsgi.debug.ini | 1 - docker/uwsgi.dev.ini | 1 - docker/uwsgi.ini | 1 - 3 files changed, 3 deletions(-) diff --git a/docker/uwsgi.debug.ini b/docker/uwsgi.debug.ini index 1d7cca93..69c040f2 100644 --- a/docker/uwsgi.debug.ini +++ b/docker/uwsgi.debug.ini @@ -20,7 +20,6 @@ module = scripts.debug_wrapper:application virtualenv = /dispatcharrpy master = true env = DJANGO_SETTINGS_MODULE=dispatcharr.settings -env = DJANGO_SECRET_KEY=$(DJANGO_SECRET_KEY) socket = /app/uwsgi.sock chmod-socket = 777 vacuum = true diff --git a/docker/uwsgi.dev.ini b/docker/uwsgi.dev.ini index 1ef9709e..e476e216 100644 --- a/docker/uwsgi.dev.ini +++ b/docker/uwsgi.dev.ini @@ -22,7 +22,6 @@ module = dispatcharr.wsgi:application virtualenv = /dispatcharrpy master = true env = DJANGO_SETTINGS_MODULE=dispatcharr.settings -env = DJANGO_SECRET_KEY=$(DJANGO_SECRET_KEY) socket = /app/uwsgi.sock chmod-socket = 777 vacuum = true diff --git a/docker/uwsgi.ini b/docker/uwsgi.ini index bb359b06..f8fe8ab7 100644 --- a/docker/uwsgi.ini +++ b/docker/uwsgi.ini @@ -21,7 +21,6 @@ module = dispatcharr.wsgi:application virtualenv = /dispatcharrpy master = true env = DJANGO_SETTINGS_MODULE=dispatcharr.settings -env = DJANGO_SECRET_KEY=$(DJANGO_SECRET_KEY) socket = /app/uwsgi.sock chmod-socket = 777 vacuum = true From 98b29f97a1df25395cf72cbb9612c54c06367870 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 8 Dec 2025 17:49:40 -0600 Subject: [PATCH 036/220] changelog: Update verbiage --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8efdc30c..347bb22a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,7 +14,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed -- **Performance**: EPG program parsing optimized for sources with many channels but only a fraction mapped. Now parses XML file once per source instead of once per channel, dramatically reducing I/O and CPU overhead. For sources with 10,000 channels and 100 mapped, this results in ~99x fewer file opens and ~100x fewer full file scans. Orphaned programs for unmapped channels are also cleaned up during refresh to prevent database bloat. Database updates are now atomic to prevent clients from seeing empty/partial EPG data during refresh. +- EPG program parsing optimized for sources with many channels but only a fraction mapped. Now parses XML file once per source instead of once per channel, dramatically reducing I/O and CPU overhead. For sources with 10,000 channels and 100 mapped, this results in ~99x fewer file opens and ~100x fewer full file scans. Orphaned programs for unmapped channels are also cleaned up during refresh to prevent database bloat. Database updates are now atomic to prevent clients from seeing empty/partial EPG data during refresh. - EPG table now displays detailed status messages including refresh progress, success messages, and last message for idle sources (matching M3U table behavior) (Closes #214) - IPv6 access now allowed by default with all IPv6 CIDRs accepted - Thanks [@adrianmace](https://github.com/adrianmace) - nginx.conf updated to bind to both IPv4 and IPv6 ports - Thanks [@jordandalley](https://github.com/jordandalley) From 4df4e5f963606c66c137cc1b969355cdcf47e2bd Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Tue, 9 Dec 2025 00:01:50 +0000 Subject: [PATCH 037/220] Release v0.14.0 --- CHANGELOG.md | 2 ++ version.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 347bb22a..4716c250 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [0.14.0] - 2025-12-09 + ### Added - Sort buttons for 'Group' and 'M3U' columns in Streams table for improved stream organization and filtering - Thanks [@bobey6](https://github.com/bobey6) diff --git a/version.py b/version.py index f017df85..807fc629 100644 --- a/version.py +++ b/version.py @@ -1,5 +1,5 @@ """ Dispatcharr version information. """ -__version__ = '0.13.1' # Follow semantic versioning (MAJOR.MINOR.PATCH) +__version__ = '0.14.0' # Follow semantic versioning (MAJOR.MINOR.PATCH) __timestamp__ = None # Set during CI/CD build process From 69f9ecd93c7868f57fd9e58c8339cc7adef969b9 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 8 Dec 2025 20:12:44 -0600 Subject: [PATCH 038/220] Bug Fix: Remove ipv6 binding from nginx config if ipv6 is not available. --- CHANGELOG.md | 4 ++++ docker/init/03-init-dispatcharr.sh | 10 +++++++++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4716c250..389bb8ad 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +### Fixed + +- nginx now gracefully handles hosts without IPv6 support by automatically disabling IPv6 binding at startup + ## [0.14.0] - 2025-12-09 ### Added diff --git a/docker/init/03-init-dispatcharr.sh b/docker/init/03-init-dispatcharr.sh index 5fbef23d..da7d4484 100644 --- a/docker/init/03-init-dispatcharr.sh +++ b/docker/init/03-init-dispatcharr.sh @@ -29,9 +29,17 @@ if [ "$(id -u)" = "0" ] && [ -d "/app" ]; then chown $PUID:$PGID /app fi fi - +# Configure nginx port sed -i "s/NGINX_PORT/${DISPATCHARR_PORT}/g" /etc/nginx/sites-enabled/default +# Configure nginx based on IPv6 availability +if ip -6 addr show | grep -q "inet6"; then + echo "✅ IPv6 is available, enabling IPv6 in nginx" +else + echo "⚠️ IPv6 not available, disabling IPv6 in nginx" + sed -i '/listen \[::\]:/d' /etc/nginx/sites-enabled/default +fi + # NOTE: mac doesn't run as root, so only manage permissions # if this script is running as root if [ "$(id -u)" = "0" ]; then From 514e7e06e4dfcdb8d24ed0eddfd3cf67cc2a7a49 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 8 Dec 2025 20:50:50 -0600 Subject: [PATCH 039/220] Bug fix: EPG API now returns correct date/time format for start/end fields and proper string types for timestamps and channel_id --- CHANGELOG.md | 1 + apps/output/views.py | 12 ++++++------ 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 389bb8ad..e363135f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed - nginx now gracefully handles hosts without IPv6 support by automatically disabling IPv6 binding at startup +- XtreamCodes EPG API now returns correct date/time format for start/end fields and proper string types for timestamps and channel_id ## [0.14.0] - 2025-12-09 diff --git a/apps/output/views.py b/apps/output/views.py index bc2bace5..3a8406cb 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -2316,18 +2316,18 @@ def xc_get_epg(request, user, short=False): "epg_id": f"{epg_id}", "title": base64.b64encode(title.encode()).decode(), "lang": "", - "start": start.strftime("%Y%m%d%H%M%S"), - "end": end.strftime("%Y%m%d%H%M%S"), + "start": start.strftime("%Y-%m-%d %H:%M:%S"), + "end": end.strftime("%Y-%m-%d %H:%M:%S"), "description": base64.b64encode(description.encode()).decode(), - "channel_id": channel_num_int, - "start_timestamp": int(start.timestamp()), - "stop_timestamp": int(end.timestamp()), + "channel_id": str(channel_num_int), + "start_timestamp": str(int(start.timestamp())), + "stop_timestamp": str(int(end.timestamp())), "stream_id": f"{channel_id}", } if short == False: program_output["now_playing"] = 1 if start <= django_timezone.now() <= end else 0 - program_output["has_archive"] = "0" + program_output["has_archive"] = 0 output['epg_listings'].append(program_output) From e8fb01ebdd23071818da88040ec8bee7b08cebfc Mon Sep 17 00:00:00 2001 From: DawtCom Date: Mon, 8 Dec 2025 21:50:13 -0600 Subject: [PATCH 040/220] Removing unreachable code --- apps/output/views.py | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/apps/output/views.py b/apps/output/views.py index bc2bace5..07098268 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -161,18 +161,7 @@ def generate_m3u(request, profile_name=None, user=None): channelprofilemembership__enabled=True ).order_by('channel_number') else: - if profile_name is not None: - try: - channel_profile = ChannelProfile.objects.get(name=profile_name) - except ChannelProfile.DoesNotExist: - logger.warning("Requested channel profile (%s) during m3u generation does not exist", profile_name) - raise Http404(f"Channel profile '{profile_name}' not found") - channels = Channel.objects.filter( - channelprofilemembership__channel_profile=channel_profile, - channelprofilemembership__enabled=True, - ).order_by("channel_number") - else: - channels = Channel.objects.order_by("channel_number") + channels = Channel.objects.order_by("channel_number") # Check if the request wants to use direct logo URLs instead of cache use_cached_logos = request.GET.get('cachedlogos', 'true').lower() != 'false' From 806f78244df976bef7abd4f0402d763d1a1c36ee Mon Sep 17 00:00:00 2001 From: Jim McBride Date: Tue, 9 Dec 2025 07:49:31 -0600 Subject: [PATCH 041/220] Add proper ConfirmationDialog usage to BackupManager - Import useWarningsStore from warnings store - Add suppressWarning hook to component - Add actionKey props to restore and delete confirmation dialogs - Add onSuppressChange callback to enable "Don't ask again" functionality This aligns BackupManager with the project's standard confirmation dialog pattern used throughout the codebase (ChannelsTable, EPGsTable, etc). --- frontend/src/components/backups/BackupManager.jsx | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/frontend/src/components/backups/BackupManager.jsx b/frontend/src/components/backups/BackupManager.jsx index 46bd0689..0246d47f 100644 --- a/frontend/src/components/backups/BackupManager.jsx +++ b/frontend/src/components/backups/BackupManager.jsx @@ -30,6 +30,7 @@ import { notifications } from '@mantine/notifications'; import API from '../../api'; import ConfirmationDialog from '../ConfirmationDialog'; import useLocalStorage from '../../hooks/useLocalStorage'; +import useWarningsStore from '../../store/warnings'; import { CustomTable, useTable } from '../tables/CustomTable'; const RowActions = ({ row, handleDownload, handleRestoreClick, handleDeleteClick, downloading }) => { @@ -135,6 +136,9 @@ export default function BackupManager() { const [tableSize] = useLocalStorage('table-size', 'default'); const is12Hour = timeFormat === '12h'; + // Warning suppression for confirmation dialogs + const suppressWarning = useWarningsStore((s) => s.suppressWarning); + // Schedule state const [schedule, setSchedule] = useState({ enabled: false, @@ -653,6 +657,8 @@ export default function BackupManager() { message={`Are you sure you want to restore from "${selectedBackup?.name}"? This will replace all current data with the backup data. This action cannot be undone.`} confirmLabel="Restore" cancelLabel="Cancel" + actionKey="restore-backup" + onSuppressChange={suppressWarning} /> ); From d718e5a142c7374748efe5d0de2446c13ca56808 Mon Sep 17 00:00:00 2001 From: Jim McBride Date: Tue, 9 Dec 2025 07:52:53 -0600 Subject: [PATCH 042/220] Implement timezone-aware backup scheduling MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add timezone conversion functions (utcToLocal, localToUtc) - Use user's configured timezone from Settings (localStorage 'time-zone') - Convert times to UTC when saving to backend - Convert times from UTC to local when loading from backend - Display timezone info showing user's timezone and scheduled time - Helper text shows: "Timezone: America/New_York • Backup will run at 03:00" This addresses maintainer feedback to handle timezone properly: backend stores/schedules in UTC, frontend displays/edits in user's local time. --- .../src/components/backups/BackupManager.jsx | 73 +++++++++++++++++-- 1 file changed, 67 insertions(+), 6 deletions(-) diff --git a/frontend/src/components/backups/BackupManager.jsx b/frontend/src/components/backups/BackupManager.jsx index 0246d47f..3732ae09 100644 --- a/frontend/src/components/backups/BackupManager.jsx +++ b/frontend/src/components/backups/BackupManager.jsx @@ -97,6 +97,47 @@ function to24Hour(time12, period) { return `${String(hours24).padStart(2, '0')}:${String(minutes).padStart(2, '0')}`; } +// Convert UTC time (HH:MM) to local time (HH:MM) +function utcToLocal(utcTime) { + if (!utcTime) return '00:00'; + const [hours, minutes] = utcTime.split(':').map(Number); + + // Create a date in UTC + const date = new Date(); + date.setUTCHours(hours, minutes, 0, 0); + + // Get local time components + const localHours = date.getHours(); + const localMinutes = date.getMinutes(); + + return `${String(localHours).padStart(2, '0')}:${String(localMinutes).padStart(2, '0')}`; +} + +// Convert local time (HH:MM) to UTC time (HH:MM) +function localToUtc(localTime) { + if (!localTime) return '00:00'; + const [hours, minutes] = localTime.split(':').map(Number); + + // Create a date in local time + const date = new Date(); + date.setHours(hours, minutes, 0, 0); + + // Get UTC time components + const utcHours = date.getUTCHours(); + const utcMinutes = date.getUTCMinutes(); + + return `${String(utcHours).padStart(2, '0')}:${String(utcMinutes).padStart(2, '0')}`; +} + +// Get default timezone (same as Settings page) +function getDefaultTimeZone() { + try { + return Intl.DateTimeFormat().resolvedOptions().timeZone || 'UTC'; + } catch { + return 'UTC'; + } +} + const DAYS_OF_WEEK = [ { value: '0', label: 'Sunday' }, { value: '1', label: 'Monday' }, @@ -131,9 +172,10 @@ export default function BackupManager() { const [deleteConfirmOpen, setDeleteConfirmOpen] = useState(false); const [selectedBackup, setSelectedBackup] = useState(null); - // Read user's time format preference from settings + // Read user's preferences from settings const [timeFormat] = useLocalStorage('time-format', '12h'); const [tableSize] = useLocalStorage('table-size', 'default'); + const [userTimezone] = useLocalStorage('time-zone', getDefaultTimeZone()); const is12Hour = timeFormat === '12h'; // Warning suppression for confirmation dialogs @@ -256,10 +298,16 @@ export default function BackupManager() { setScheduleLoading(true); try { const settings = await API.getBackupSchedule(); - setSchedule(settings); + + // Convert UTC time from backend to local time + const localTime = utcToLocal(settings.time); + + // Store with local time for display + setSchedule({ ...settings, time: localTime }); setScheduleChanged(false); - // Initialize 12-hour display values from the loaded time - const { time, period } = to12Hour(settings.time); + + // Initialize 12-hour display values from the local time + const { time, period } = to12Hour(localTime); setDisplayTime(time); setTimePeriod(period); } catch (error) { @@ -302,9 +350,17 @@ export default function BackupManager() { const handleSaveSchedule = async () => { setScheduleSaving(true); try { - const updated = await API.updateBackupSchedule(schedule); - setSchedule(updated); + // Convert local time to UTC before sending to backend + const utcTime = localToUtc(schedule.time); + const scheduleToSave = { ...schedule, time: utcTime }; + + const updated = await API.updateBackupSchedule(scheduleToSave); + + // Convert UTC time from backend response back to local time + const localTime = utcToLocal(updated.time); + setSchedule({ ...updated, time: localTime }); setScheduleChanged(false); + notifications.show({ title: 'Success', message: 'Backup schedule saved', @@ -518,6 +574,11 @@ export default function BackupManager() { Save + {schedule.enabled && schedule.time && ( + + Timezone: {userTimezone} • Backup will run at {schedule.time} + + )} )} From 5fbcaa91e0906069c95f5ae166a2a75a774cc26d Mon Sep 17 00:00:00 2001 From: Jim McBride Date: Tue, 9 Dec 2025 07:55:47 -0600 Subject: [PATCH 043/220] Add custom cron expression support for backup scheduling Frontend changes: - Add advanced mode toggle switch for cron expressions - Show cron expression input with helpful examples when enabled - Display format hints: "minute hour day month weekday" - Provide common examples (daily, weekly, every 6 hours, etc.) - Conditionally render simple or advanced scheduling UI - Support switching between simple and advanced modes Backend changes: - Add cron_expression to schedule settings (SETTING_KEYS, DEFAULTS) - Update get_schedule_settings to include cron_expression - Update update_schedule_settings to handle cron_expression - Extend _sync_periodic_task to parse and use cron expressions - Parse 5-part cron format: minute hour day_of_month month_of_year day_of_week - Create CrontabSchedule from cron expression or simple frequency - Add validation and error handling for invalid cron expressions This addresses maintainer feedback for "custom scheduler (cron style) for more control". Users can now schedule backups with full cron flexibility beyond daily/weekly. --- apps/backups/scheduler.py | 65 ++++--- .../src/components/backups/BackupManager.jsx | 160 +++++++++++++----- 2 files changed, 161 insertions(+), 64 deletions(-) diff --git a/apps/backups/scheduler.py b/apps/backups/scheduler.py index 52186e90..b0b37567 100644 --- a/apps/backups/scheduler.py +++ b/apps/backups/scheduler.py @@ -15,6 +15,7 @@ SETTING_KEYS = { "time": "backup_schedule_time", "day_of_week": "backup_schedule_day_of_week", "retention_count": "backup_retention_count", + "cron_expression": "backup_schedule_cron_expression", } DEFAULTS = { @@ -23,6 +24,7 @@ DEFAULTS = { "time": "03:00", "day_of_week": 0, # Sunday "retention_count": 0, + "cron_expression": "", } @@ -60,6 +62,7 @@ def get_schedule_settings() -> dict: "time": _get_setting("time"), "day_of_week": _get_setting("day_of_week"), "retention_count": _get_setting("retention_count"), + "cron_expression": _get_setting("cron_expression"), } @@ -88,7 +91,7 @@ def update_schedule_settings(data: dict) -> dict: raise ValueError("retention_count must be >= 0") # Update settings - for key in ("enabled", "frequency", "time", "day_of_week", "retention_count"): + for key in ("enabled", "frequency", "time", "day_of_week", "retention_count", "cron_expression"): if key in data: _set_setting(key, data[key]) @@ -108,26 +111,48 @@ def _sync_periodic_task() -> None: logger.info("Backup schedule disabled, removed periodic task") return - # Parse time - hour, minute = settings["time"].split(":") + # Check if using cron expression (advanced mode) + if settings["cron_expression"]: + # Parse cron expression: "minute hour day month weekday" + try: + parts = settings["cron_expression"].split() + if len(parts) != 5: + raise ValueError("Cron expression must have 5 parts: minute hour day month weekday") - # Build crontab based on frequency - if settings["frequency"] == "daily": - crontab, _ = CrontabSchedule.objects.get_or_create( - minute=minute, - hour=hour, - day_of_week="*", - day_of_month="*", - month_of_year="*", - ) - else: # weekly - crontab, _ = CrontabSchedule.objects.get_or_create( - minute=minute, - hour=hour, - day_of_week=str(settings["day_of_week"]), - day_of_month="*", - month_of_year="*", - ) + minute, hour, day_of_month, month_of_year, day_of_week = parts + + crontab, _ = CrontabSchedule.objects.get_or_create( + minute=minute, + hour=hour, + day_of_week=day_of_week, + day_of_month=day_of_month, + month_of_year=month_of_year, + ) + except Exception as e: + logger.error(f"Invalid cron expression '{settings['cron_expression']}': {e}") + raise ValueError(f"Invalid cron expression: {e}") + else: + # Use simple frequency-based scheduling + # Parse time + hour, minute = settings["time"].split(":") + + # Build crontab based on frequency + if settings["frequency"] == "daily": + crontab, _ = CrontabSchedule.objects.get_or_create( + minute=minute, + hour=hour, + day_of_week="*", + day_of_month="*", + month_of_year="*", + ) + else: # weekly + crontab, _ = CrontabSchedule.objects.get_or_create( + minute=minute, + hour=hour, + day_of_week=str(settings["day_of_week"]), + day_of_month="*", + month_of_year="*", + ) # Create or update the periodic task task, created = PeriodicTask.objects.update_or_create( diff --git a/frontend/src/components/backups/BackupManager.jsx b/frontend/src/components/backups/BackupManager.jsx index 3732ae09..dd47f732 100644 --- a/frontend/src/components/backups/BackupManager.jsx +++ b/frontend/src/components/backups/BackupManager.jsx @@ -188,10 +188,12 @@ export default function BackupManager() { time: '03:00', day_of_week: 0, retention_count: 0, + cron_expression: '', }); const [scheduleLoading, setScheduleLoading] = useState(false); const [scheduleSaving, setScheduleSaving] = useState(false); const [scheduleChanged, setScheduleChanged] = useState(false); + const [advancedMode, setAdvancedMode] = useState(false); // For 12-hour display mode const [displayTime, setDisplayTime] = useState('3:00'); @@ -299,17 +301,24 @@ export default function BackupManager() { try { const settings = await API.getBackupSchedule(); - // Convert UTC time from backend to local time - const localTime = utcToLocal(settings.time); + // Check if using cron expression (advanced mode) + if (settings.cron_expression) { + setAdvancedMode(true); + setSchedule(settings); + } else { + // Convert UTC time from backend to local time + const localTime = utcToLocal(settings.time); + + // Store with local time for display + setSchedule({ ...settings, time: localTime }); + + // Initialize 12-hour display values from the local time + const { time, period } = to12Hour(localTime); + setDisplayTime(time); + setTimePeriod(period); + } - // Store with local time for display - setSchedule({ ...settings, time: localTime }); setScheduleChanged(false); - - // Initialize 12-hour display values from the local time - const { time, period } = to12Hour(localTime); - setDisplayTime(time); - setTimePeriod(period); } catch (error) { // Ignore errors on initial load - settings may not exist yet } finally { @@ -350,15 +359,27 @@ export default function BackupManager() { const handleSaveSchedule = async () => { setScheduleSaving(true); try { - // Convert local time to UTC before sending to backend - const utcTime = localToUtc(schedule.time); - const scheduleToSave = { ...schedule, time: utcTime }; + let scheduleToSave; + + if (advancedMode) { + // In advanced mode, send cron expression as-is + scheduleToSave = schedule; + } else { + // Convert local time to UTC before sending to backend + const utcTime = localToUtc(schedule.time); + scheduleToSave = { ...schedule, time: utcTime, cron_expression: '' }; + } const updated = await API.updateBackupSchedule(scheduleToSave); - // Convert UTC time from backend response back to local time - const localTime = utcToLocal(updated.time); - setSchedule({ ...updated, time: localTime }); + if (advancedMode) { + setSchedule(updated); + } else { + // Convert UTC time from backend response back to local time + const localTime = utcToLocal(updated.time); + setSchedule({ ...updated, time: localTime }); + } + setScheduleChanged(false); notifications.show({ @@ -509,17 +530,65 @@ export default function BackupManager() { ) : ( <> - - handleScheduleChange('frequency', value)} + data={[ + { value: 'daily', label: 'Daily' }, + { value: 'weekly', label: 'Weekly' }, + ]} + disabled={!schedule.enabled} + /> {schedule.frequency === 'weekly' && ( setNewStream(e.target.value)} - placeholder="Enter Stream" - /> - -

Streams:

-
    - {state.streams.map((stream, index) => ( -
  • {stream}
  • - ))} -
- - ); -}; - -export default Dashboard; diff --git a/frontend/src/pages/Home.jsx b/frontend/src/pages/Home.jsx deleted file mode 100644 index e9751d8d..00000000 --- a/frontend/src/pages/Home.jsx +++ /dev/null @@ -1,14 +0,0 @@ -// src/components/Home.js -import React, { useState } from 'react'; - -const Home = () => { - const [newChannel, setNewChannel] = useState(''); - - return ( -
-

Home Page

-
- ); -}; - -export default Home; From 0070d9e50025783b3a625e94bd24a2c02e684d4a Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Wed, 10 Dec 2025 22:29:48 -0800 Subject: [PATCH 056/220] Added ErrorBoundary component --- frontend/src/components/ErrorBoundary.jsx | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 frontend/src/components/ErrorBoundary.jsx diff --git a/frontend/src/components/ErrorBoundary.jsx b/frontend/src/components/ErrorBoundary.jsx new file mode 100644 index 00000000..60c4ba38 --- /dev/null +++ b/frontend/src/components/ErrorBoundary.jsx @@ -0,0 +1,18 @@ +import React from 'react'; + +class ErrorBoundary extends React.Component { + state = { hasError: false }; + + static getDerivedStateFromError(error) { + return { hasError: true }; + } + + render() { + if (this.state.hasError) { + return
Something went wrong
; + } + return this.props.children; + } +} + +export default ErrorBoundary; \ No newline at end of file From dd5ae8450dc9a1548ea41a3fd2b44d75ea78d6d7 Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Wed, 10 Dec 2025 22:31:04 -0800 Subject: [PATCH 057/220] Updated pages to utilize error boundary --- frontend/src/pages/Channels.jsx | 57 +++++++++++++++------------ frontend/src/pages/ContentSources.jsx | 48 +++++++++++----------- frontend/src/pages/Users.jsx | 54 ++++++------------------- 3 files changed, 69 insertions(+), 90 deletions(-) diff --git a/frontend/src/pages/Channels.jsx b/frontend/src/pages/Channels.jsx index 7663276d..d09f0c41 100644 --- a/frontend/src/pages/Channels.jsx +++ b/frontend/src/pages/Channels.jsx @@ -1,14 +1,17 @@ -import React from 'react'; +import React, { lazy, Suspense } from 'react'; import ChannelsTable from '../components/tables/ChannelsTable'; -import StreamsTable from '../components/tables/StreamsTable'; -import { Box } from '@mantine/core'; +const StreamsTable = lazy(() => import('../components/tables/StreamsTable')); +import { Box, Text } from '@mantine/core'; import { Allotment } from 'allotment'; import { USER_LEVELS } from '../constants'; import useAuthStore from '../store/auth'; import useLocalStorage from '../hooks/useLocalStorage'; +import ErrorBoundary from '../components/ErrorBoundary'; -const ChannelsPage = () => { +const PageContent = () => { const authUser = useAuthStore((s) => s.user); + if (!authUser.id) throw new Error() + const [allotmentSizes, setAllotmentSizes] = useLocalStorage( 'channels-splitter-sizes', [50, 50] @@ -22,9 +25,6 @@ const ChannelsPage = () => { setAllotmentSizes(sizes); }; - if (!authUser.id) { - return <>; - } if (authUser.user_level <= USER_LEVELS.STANDARD) { return ( @@ -34,34 +34,41 @@ const ChannelsPage = () => { } return ( -
-
-
+ + -
-
-
-
- -
-
+ + + + + + Loading...}> + + + + +
-
+
+ ); +}; + +const ChannelsPage = () => { + return ( + + + ); }; diff --git a/frontend/src/pages/ContentSources.jsx b/frontend/src/pages/ContentSources.jsx index 310abb7f..c9eaaffc 100644 --- a/frontend/src/pages/ContentSources.jsx +++ b/frontend/src/pages/ContentSources.jsx @@ -2,36 +2,38 @@ import useUserAgentsStore from '../store/userAgents'; import M3UsTable from '../components/tables/M3UsTable'; import EPGsTable from '../components/tables/EPGsTable'; import { Box, Stack } from '@mantine/core'; +import ErrorBoundary from '../components/ErrorBoundary' -const ErrorBoundary = ({ children }) => { +const PageContent = () => { const error = useUserAgentsStore((state) => state.error); - if (error) { - return
Error: {error}
; - } - return children; + if (error) throw new Error(error); + + return ( + + + + + + + + + + ); } const M3UPage = () => { return ( - - - - - - - - - + ); } diff --git a/frontend/src/pages/Users.jsx b/frontend/src/pages/Users.jsx index 570e49c1..e69f07f8 100644 --- a/frontend/src/pages/Users.jsx +++ b/frontend/src/pages/Users.jsx @@ -1,55 +1,25 @@ -import React, { useState } from 'react'; import UsersTable from '../components/tables/UsersTable'; import { Box } from '@mantine/core'; import useAuthStore from '../store/auth'; -import { USER_LEVELS } from '../constants'; +import ErrorBoundary from '../components/ErrorBoundary'; -const UsersPage = () => { +const PageContent = () => { const authUser = useAuthStore((s) => s.user); - - const [selectedUser, setSelectedUser] = useState(null); - const [userModalOpen, setUserModalOpen] = useState(false); - const [confirmDeleteOpen, setConfirmDeleteOpen] = useState(false); - const [deleteTarget, setDeleteTarget] = useState(null); - const [userToDelete, setUserToDelete] = useState(null); - - if (!authUser.id) { - return <>; - } - - const closeUserModal = () => { - setSelectedUser(null); - setUserModalOpen(false); - }; - const editUser = (user) => { - setSelectedUser(user); - setUserModalOpen(true); - }; - - const deleteUser = (id) => { - // Get user details for the confirmation dialog - const user = users.find((u) => u.id === id); - setUserToDelete(user); - setDeleteTarget(id); - - // Skip warning if it's been suppressed - if (isWarningSuppressed('delete-user')) { - return executeDeleteUser(id); - } - - setConfirmDeleteOpen(true); - }; - - const executeDeleteUser = async (id) => { - await API.deleteUser(id); - setConfirmDeleteOpen(false); - }; + if (!authUser.id) throw new Error(); return ( - + ); +} + +const UsersPage = () => { + return ( + + + + ); }; export default UsersPage; From ea38c0b4b88bac1d89c186f4d17cd9f1dde0ef6d Mon Sep 17 00:00:00 2001 From: dekzter Date: Thu, 11 Dec 2025 11:54:41 -0500 Subject: [PATCH 058/220] advanced filtering for hiding disabled channels and viewing only empty channels --- apps/channels/api_views.py | 32 ++++++----- .../src/components/tables/ChannelsTable.jsx | 16 +++++- .../ChannelsTable/ChannelTableHeader.jsx | 53 +++++++++++++++---- 3 files changed, 78 insertions(+), 23 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 4cfe9777..40063245 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -8,6 +8,7 @@ from drf_yasg.utils import swagger_auto_schema from drf_yasg import openapi from django.shortcuts import get_object_or_404, get_list_or_404 from django.db import transaction +from django.db.models import Q import os, json, requests, logging from apps.accounts.permissions import ( Authenticated, @@ -419,31 +420,36 @@ class ChannelViewSet(viewsets.ModelViewSet): group_names = channel_group.split(",") qs = qs.filter(channel_group__name__in=group_names) + filters = {} + q_filters = Q() + channel_profile_id = self.request.query_params.get("channel_profile_id") show_disabled_param = self.request.query_params.get("show_disabled", None) + only_streamless = self.request.query_params.get("only_streamless", None) if channel_profile_id: try: profile_id_int = int(channel_profile_id) - # If show_disabled is present, include all memberships for that profile. - # If absent, restrict to enabled=True. + filters["channelprofilemembership__channel_profile_id"] = profile_id_int + if show_disabled_param is None: - qs = qs.filter( - channelprofilemembership__channel_profile_id=profile_id_int, - channelprofilemembership__enabled=True, - ) - else: - qs = qs.filter( - channelprofilemembership__channel_profile_id=profile_id_int - ) + filters["channelprofilemembership__enabled"] = True except (ValueError, TypeError): # Ignore invalid profile id values pass - if self.request.user.user_level < 10: - qs = qs.filter(user_level__lte=self.request.user.user_level) + if only_streamless: + q_filters &= Q(streams__isnull=True) - return qs + if self.request.user.user_level < 10: + filters["user_level__lte"] = self.request.user.user_level + + if filters: + qs = qs.filter(**filters) + if q_filters: + qs = qs.filter(q_filters) + + return qs.distinct() def get_serializer_context(self): context = super().get_serializer_context() diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index 949b9760..7f82140f 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -294,6 +294,8 @@ const ChannelsTable = ({}) => { profiles[selectedProfileId] ); const [showDisabled, setShowDisabled] = useState(true); + const [showOnlyStreamlessChannels, setShowOnlyStreamlessChannels] = + useState(false); const [paginationString, setPaginationString] = useState(''); const [filters, setFilters] = useState({ @@ -380,6 +382,9 @@ const ChannelsTable = ({}) => { if (showDisabled === true) { params.append('show_disabled', true); } + if (showOnlyStreamlessChannels === true) { + params.append('only_streamless', true); + } // Apply sorting if (sorting.length > 0) { @@ -412,7 +417,14 @@ const ChannelsTable = ({}) => { pageSize: pagination.pageSize, }); setAllRowIds(ids); - }, [pagination, sorting, debouncedFilters, showDisabled, selectedProfileId]); + }, [ + pagination, + sorting, + debouncedFilters, + showDisabled, + selectedProfileId, + showOnlyStreamlessChannels, + ]); const stopPropagation = useCallback((e) => { e.stopPropagation(); @@ -1340,6 +1352,8 @@ const ChannelsTable = ({}) => { table={table} showDisabled={showDisabled} setShowDisabled={setShowDisabled} + showOnlyStreamlessChannels={showOnlyStreamlessChannels} + setShowOnlyStreamlessChannels={setShowOnlyStreamlessChannels} /> {/* Table or ghost empty state inside Paper */} diff --git a/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx b/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx index d3376b4d..460ab12a 100644 --- a/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx +++ b/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx @@ -12,15 +12,12 @@ import { Text, TextInput, Tooltip, - UnstyledButton, useMantineTheme, } from '@mantine/core'; import { ArrowDown01, Binary, - Check, CircleCheck, - Ellipsis, EllipsisVertical, SquareMinus, SquarePen, @@ -28,6 +25,9 @@ import { Settings, Eye, EyeOff, + Filter, + Square, + SquareCheck, } from 'lucide-react'; import API from '../../../api'; import { notifications } from '@mantine/notifications'; @@ -106,6 +106,8 @@ const ChannelTableHeader = ({ selectedTableIds, showDisabled, setShowDisabled, + showOnlyStreamlessChannels, + setShowOnlyStreamlessChannels, }) => { const theme = useMantineTheme(); @@ -216,6 +218,10 @@ const ChannelTableHeader = ({ setShowDisabled(!showDisabled); }; + const toggleShowOnlyStreamlessChannels = () => { + setShowOnlyStreamlessChannels(!showOnlyStreamlessChannels); + }; + return ( @@ -234,12 +240,6 @@ const ChannelTableHeader = ({ - - - - + + + + + + + : + } + disabled={selectedProfileId === '0'} + > + + {showDisabled ? 'Hide Disabled' : 'Show Disabled'} + + + + + ) : ( + + ) + } + > + Only Empty Channels + + + + - - - - : - } - disabled={selectedProfileId === '0'} - > - - {showDisabled ? 'Hide Disabled' : 'Show Disabled'} - - - - - ) : ( - - ) - } - > - Only Empty Channels - - - - + )} + + {!isUpcoming && ( + + + + )} + {!isUpcoming && + customProps?.status === 'completed' && + (!customProps?.comskip || + customProps?.comskip?.status !== 'completed') && ( + + )} + + + + {/* If this card is a grouped upcoming series, show count */} + {recording._group_count > 1 && ( + + Next of {recording._group_count} + + )} + + ); + if (!isSeriesGroup) return MainCard; + + // Stacked look for series groups: render two shadow layers behind the main card + return ( + + setCancelOpen(false)} + title="Cancel Series" + centered + size="md" + zIndex={9999} + > + + This is a series rule. What would you like to cancel? + + + + + + + + + {MainCard} + + ); +}; \ No newline at end of file diff --git a/frontend/src/components/forms/RecordingDetailsModal.jsx b/frontend/src/components/forms/RecordingDetailsModal.jsx new file mode 100644 index 00000000..9b01945c --- /dev/null +++ b/frontend/src/components/forms/RecordingDetailsModal.jsx @@ -0,0 +1,429 @@ +import useChannelsStore from '../../store/channels.jsx'; +import { useDateTimeFormat, useTimeHelpers } from '../../utils/dateTimeUtils.js'; +import React from 'react'; +import API from '../../api.js'; +import { + Badge, + Button, + Card, + Flex, + Group, + Image, + Modal, + Stack, + Text, +} from '@mantine/core'; +import useVideoStore from '../../store/useVideoStore.jsx'; +import { notifications } from '@mantine/notifications'; + +export const RecordingDetailsModal = ({ + opened, + onClose, + recording, + channel, + posterUrl, + onWatchLive, + onWatchRecording, + env_mode, + onEdit, + }) => { + const allRecordings = useChannelsStore((s) => s.recordings); + const channelMap = useChannelsStore((s) => s.channels); + const { toUserTime, userNow } = useTimeHelpers(); + const [childOpen, setChildOpen] = React.useState(false); + const [childRec, setChildRec] = React.useState(null); + const [timeformat, dateformat] = useDateTimeFormat(); + + const safeRecording = recording || {}; + const customProps = safeRecording.custom_properties || {}; + const program = customProps.program || {}; + const recordingName = program.title || 'Custom Recording'; + const description = program.description || customProps.description || ''; + const start = toUserTime(safeRecording.start_time); + const end = toUserTime(safeRecording.end_time); + const stats = customProps.stream_info || {}; + + const statRows = [ + ['Video Codec', stats.video_codec], + [ + 'Resolution', + stats.resolution || + (stats.width && stats.height ? `${stats.width}x${stats.height}` : null), + ], + ['FPS', stats.source_fps], + ['Video Bitrate', stats.video_bitrate && `${stats.video_bitrate} kb/s`], + ['Audio Codec', stats.audio_codec], + ['Audio Channels', stats.audio_channels], + ['Sample Rate', stats.sample_rate && `${stats.sample_rate} Hz`], + ['Audio Bitrate', stats.audio_bitrate && `${stats.audio_bitrate} kb/s`], + ].filter(([, v]) => v !== null && v !== undefined && v !== ''); + + // Rating (if available) + const rating = + customProps.rating || + customProps.rating_value || + (program && program.custom_properties && program.custom_properties.rating); + const ratingSystem = customProps.rating_system || 'MPAA'; + + const fileUrl = customProps.file_url || customProps.output_file_url; + const canWatchRecording = + (customProps.status === 'completed' || + customProps.status === 'interrupted') && + Boolean(fileUrl); + + // Prefix in dev (Vite) if needed + let resolvedPosterUrl = posterUrl; + if ( + typeof import.meta !== 'undefined' && + import.meta.env && + import.meta.env.DEV + ) { + if (resolvedPosterUrl && resolvedPosterUrl.startsWith('/')) { + resolvedPosterUrl = `${window.location.protocol}//${window.location.hostname}:5656${resolvedPosterUrl}`; + } + } + + const isSeriesGroup = Boolean( + safeRecording._group_count && safeRecording._group_count > 1 + ); + const upcomingEpisodes = React.useMemo(() => { + if (!isSeriesGroup) return []; + const arr = Array.isArray(allRecordings) + ? allRecordings + : Object.values(allRecordings || {}); + const tvid = program.tvg_id || ''; + const titleKey = (program.title || '').toLowerCase(); + const filtered = arr.filter((r) => { + const cp = r.custom_properties || {}; + const pr = cp.program || {}; + if ((pr.tvg_id || '') !== tvid) return false; + if ((pr.title || '').toLowerCase() !== titleKey) return false; + const st = toUserTime(r.start_time); + return st.isAfter(userNow()); + }); + // Deduplicate by program.id if present, else by time+title + const seen = new Set(); + const deduped = []; + for (const r of filtered) { + const cp = r.custom_properties || {}; + const pr = cp.program || {}; + // Prefer season/episode or onscreen code; else fall back to sub_title; else program id/slot + const season = cp.season ?? pr?.custom_properties?.season; + const episode = cp.episode ?? pr?.custom_properties?.episode; + const onscreen = + cp.onscreen_episode ?? pr?.custom_properties?.onscreen_episode; + let key = null; + if (season != null && episode != null) key = `se:${season}:${episode}`; + else if (onscreen) key = `onscreen:${String(onscreen).toLowerCase()}`; + else if (pr.sub_title) key = `sub:${(pr.sub_title || '').toLowerCase()}`; + else if (pr.id != null) key = `id:${pr.id}`; + else + key = `slot:${r.channel}|${r.start_time}|${r.end_time}|${pr.title || ''}`; + if (seen.has(key)) continue; + seen.add(key); + deduped.push(r); + } + return deduped.sort( + (a, b) => toUserTime(a.start_time) - toUserTime(b.start_time) + ); + }, [ + allRecordings, + isSeriesGroup, + program.tvg_id, + program.title, + toUserTime, + userNow, + ]); + + if (!recording) return null; + + const EpisodeRow = ({ rec }) => { + const cp = rec.custom_properties || {}; + const pr = cp.program || {}; + const start = toUserTime(rec.start_time); + const end = toUserTime(rec.end_time); + const season = cp.season ?? pr?.custom_properties?.season; + const episode = cp.episode ?? pr?.custom_properties?.episode; + const onscreen = + cp.onscreen_episode ?? pr?.custom_properties?.onscreen_episode; + const se = + season && episode + ? `S${String(season).padStart(2, '0')}E${String(episode).padStart(2, '0')}` + : onscreen || null; + const posterLogoId = cp.poster_logo_id; + let purl = posterLogoId + ? `/api/channels/logos/${posterLogoId}/cache/` + : cp.poster_url || posterUrl || '/logo.png'; + if ( + typeof import.meta !== 'undefined' && + import.meta.env && + import.meta.env.DEV && + purl && + purl.startsWith('/') + ) { + purl = `${window.location.protocol}//${window.location.hostname}:5656${purl}`; + } + const onRemove = async (e) => { + e?.stopPropagation?.(); + try { + await API.deleteRecording(rec.id); + } catch (error) { + console.error('Failed to delete upcoming recording', error); + } + try { + await useChannelsStore.getState().fetchRecordings(); + } catch (error) { + console.error('Failed to refresh recordings after delete', error); + } + }; + return ( + { + setChildRec(rec); + setChildOpen(true); + }} + > + + {pr.title + + + + {pr.sub_title || pr.title} + + {se && ( + + {se} + + )} + + + {start.format(`${dateformat}, YYYY ${timeformat}`)} – {end.format(timeformat)} + + + + + + + + ); + }; + + return ( + + {isSeriesGroup ? ( + + {upcomingEpisodes.length === 0 && ( + + No upcoming episodes found + + )} + {upcomingEpisodes.map((ep) => ( + + ))} + {childOpen && childRec && ( + setChildOpen(false)} + recording={childRec} + channel={channelMap[childRec.channel]} + posterUrl={ + (childRec.custom_properties?.poster_logo_id + ? `/api/channels/logos/${childRec.custom_properties.poster_logo_id}/cache/` + : childRec.custom_properties?.poster_url || + channelMap[childRec.channel]?.logo?.cache_url) || + '/logo.png' + } + env_mode={env_mode} + onWatchLive={() => { + const rec = childRec; + const now = userNow(); + const s = toUserTime(rec.start_time); + const e = toUserTime(rec.end_time); + if (now.isAfter(s) && now.isBefore(e)) { + const ch = channelMap[rec.channel]; + if (!ch) return; + let url = `/proxy/ts/stream/${ch.uuid}`; + if (env_mode === 'dev') { + url = `${window.location.protocol}//${window.location.hostname}:5656${url}`; + } + useVideoStore.getState().showVideo(url, 'live'); + } + }} + onWatchRecording={() => { + let fileUrl = + childRec.custom_properties?.file_url || + childRec.custom_properties?.output_file_url; + if (!fileUrl) return; + if (env_mode === 'dev' && fileUrl.startsWith('/')) { + fileUrl = `${window.location.protocol}//${window.location.hostname}:5656${fileUrl}`; + } + useVideoStore.getState().showVideo(fileUrl, 'vod', { + name: + childRec.custom_properties?.program?.title || 'Recording', + logo: { + url: + (childRec.custom_properties?.poster_logo_id + ? `/api/channels/logos/${childRec.custom_properties.poster_logo_id}/cache/` + : channelMap[childRec.channel]?.logo?.cache_url) || + '/logo.png', + }, + }); + }} + /> + )} + + ) : ( + + {recordingName} + + + + {channel ? `${channel.channel_number} • ${channel.name}` : '—'} + + + {onWatchLive && ( + + )} + {onWatchRecording && ( + + )} + {onEdit && start.isAfter(userNow()) && ( + + )} + {customProps.status === 'completed' && + (!customProps?.comskip || + customProps?.comskip?.status !== 'completed') && ( + + )} + + + + {start.format(`${dateformat}, YYYY ${timeformat}`)} – {end.format(timeformat)} + + {rating && ( + + + {rating} + + + )} + {description && ( + + {description} + + )} + {statRows.length > 0 && ( + + + Stream Stats + + {statRows.map(([k, v]) => ( + + + {k} + + {v} + + ))} + + )} + + + )} + + ); +}; \ No newline at end of file diff --git a/frontend/src/components/forms/RecurringRuleModal.jsx b/frontend/src/components/forms/RecurringRuleModal.jsx new file mode 100644 index 00000000..590d4641 --- /dev/null +++ b/frontend/src/components/forms/RecurringRuleModal.jsx @@ -0,0 +1,396 @@ +import useChannelsStore from '../../store/channels.jsx'; +import { + parseDate, + RECURRING_DAY_OPTIONS, + toTimeString, + useDateTimeFormat, + useTimeHelpers, +} from '../../utils/dateTimeUtils.js'; +import React, { useEffect, useMemo, useState } from 'react'; +import { useForm } from '@mantine/form'; +import dayjs from 'dayjs'; +import API from '../../api.js'; +import { notifications } from '@mantine/notifications'; +import { Badge, Button, Card, Group, Modal, MultiSelect, Select, Stack, Switch, Text, TextInput } from '@mantine/core'; +import { DatePickerInput, TimeInput } from '@mantine/dates'; + +export const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence }) => { + const channels = useChannelsStore((s) => s.channels); + const recurringRules = useChannelsStore((s) => s.recurringRules); + const fetchRecurringRules = useChannelsStore((s) => s.fetchRecurringRules); + const fetchRecordings = useChannelsStore((s) => s.fetchRecordings); + const recordings = useChannelsStore((s) => s.recordings); + const { toUserTime, userNow } = useTimeHelpers(); + const [timeformat, dateformat] = useDateTimeFormat(); + + const [saving, setSaving] = useState(false); + const [deleting, setDeleting] = useState(false); + const [busyOccurrence, setBusyOccurrence] = useState(null); + + const rule = recurringRules.find((r) => r.id === ruleId); + + const channelOptions = useMemo(() => { + const list = Object.values(channels || {}); + list.sort((a, b) => { + const aNum = Number(a.channel_number) || 0; + const bNum = Number(b.channel_number) || 0; + if (aNum === bNum) { + return (a.name || '').localeCompare(b.name || ''); + } + return aNum - bNum; + }); + return list.map((item) => ({ + value: `${item.id}`, + label: item.name || `Channel ${item.id}`, + })); + }, [channels]); + + const form = useForm({ + mode: 'controlled', + initialValues: { + channel_id: '', + days_of_week: [], + rule_name: '', + start_time: dayjs().startOf('hour').format('HH:mm'), + end_time: dayjs().startOf('hour').add(1, 'hour').format('HH:mm'), + start_date: dayjs().toDate(), + end_date: dayjs().toDate(), + enabled: true, + }, + validate: { + channel_id: (value) => (value ? null : 'Select a channel'), + days_of_week: (value) => + value && value.length ? null : 'Pick at least one day', + end_time: (value, values) => { + if (!value) return 'Select an end time'; + const startValue = dayjs( + values.start_time, + ['HH:mm', 'hh:mm A', 'h:mm A'], + true + ); + const endValue = dayjs(value, ['HH:mm', 'hh:mm A', 'h:mm A'], true); + if ( + startValue.isValid() && + endValue.isValid() && + endValue.diff(startValue, 'minute') === 0 + ) { + return 'End time must differ from start time'; + } + return null; + }, + end_date: (value, values) => { + const endDate = dayjs(value); + const startDate = dayjs(values.start_date); + if (!value) return 'Select an end date'; + if (startDate.isValid() && endDate.isBefore(startDate, 'day')) { + return 'End date cannot be before start date'; + } + return null; + }, + }, + }); + + useEffect(() => { + if (opened && rule) { + form.setValues({ + channel_id: `${rule.channel}`, + days_of_week: (rule.days_of_week || []).map((d) => String(d)), + rule_name: rule.name || '', + start_time: toTimeString(rule.start_time), + end_time: toTimeString(rule.end_time), + start_date: parseDate(rule.start_date) || dayjs().toDate(), + end_date: parseDate(rule.end_date), + enabled: Boolean(rule.enabled), + }); + } else { + form.reset(); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [opened, ruleId, rule]); + + const upcomingOccurrences = useMemo(() => { + const list = Array.isArray(recordings) + ? recordings + : Object.values(recordings || {}); + const now = userNow(); + return list + .filter( + (rec) => + rec?.custom_properties?.rule?.id === ruleId && + toUserTime(rec.start_time).isAfter(now) + ) + .sort( + (a, b) => + toUserTime(a.start_time).valueOf() - + toUserTime(b.start_time).valueOf() + ); + }, [recordings, ruleId, toUserTime, userNow]); + + const handleSave = async (values) => { + if (!rule) return; + setSaving(true); + try { + await API.updateRecurringRule(ruleId, { + channel: values.channel_id, + days_of_week: (values.days_of_week || []).map((d) => Number(d)), + start_time: toTimeString(values.start_time), + end_time: toTimeString(values.end_time), + start_date: values.start_date + ? dayjs(values.start_date).format('YYYY-MM-DD') + : null, + end_date: values.end_date + ? dayjs(values.end_date).format('YYYY-MM-DD') + : null, + name: values.rule_name?.trim() || '', + enabled: Boolean(values.enabled), + }); + await Promise.all([fetchRecurringRules(), fetchRecordings()]); + notifications.show({ + title: 'Recurring rule updated', + message: 'Schedule adjustments saved', + color: 'green', + autoClose: 2500, + }); + onClose(); + } catch (error) { + console.error('Failed to update recurring rule', error); + } finally { + setSaving(false); + } + }; + + const handleDelete = async () => { + if (!rule) return; + setDeleting(true); + try { + await API.deleteRecurringRule(ruleId); + await Promise.all([fetchRecurringRules(), fetchRecordings()]); + notifications.show({ + title: 'Recurring rule removed', + message: 'All future occurrences were cancelled', + color: 'red', + autoClose: 2500, + }); + onClose(); + } catch (error) { + console.error('Failed to delete recurring rule', error); + } finally { + setDeleting(false); + } + }; + + const handleToggleEnabled = async (checked) => { + if (!rule) return; + setSaving(true); + try { + await API.updateRecurringRule(ruleId, { enabled: checked }); + await Promise.all([fetchRecurringRules(), fetchRecordings()]); + notifications.show({ + title: checked ? 'Recurring rule enabled' : 'Recurring rule paused', + message: checked + ? 'Future occurrences will resume' + : 'Upcoming occurrences were removed', + color: checked ? 'green' : 'yellow', + autoClose: 2500, + }); + } catch (error) { + console.error('Failed to toggle recurring rule', error); + form.setFieldValue('enabled', !checked); + } finally { + setSaving(false); + } + }; + + const handleCancelOccurrence = async (occurrence) => { + setBusyOccurrence(occurrence.id); + try { + await API.deleteRecording(occurrence.id); + await fetchRecordings(); + notifications.show({ + title: 'Occurrence cancelled', + message: 'The selected airing was removed', + color: 'yellow', + autoClose: 2000, + }); + } catch (error) { + console.error('Failed to cancel occurrence', error); + } finally { + setBusyOccurrence(null); + } + }; + + if (!rule) { + return ( + + Recurring rule not found. + + ); + } + + return ( + + + + + {channels?.[rule.channel]?.name || `Channel ${rule.channel}`} + + { + form.setFieldValue('enabled', event.currentTarget.checked); + handleToggleEnabled(event.currentTarget.checked); + }} + label={form.values.enabled ? 'Enabled' : 'Paused'} + disabled={saving} + /> + +
+ + - - ({ - value: String(opt.value), - label: opt.label, - }))} - searchable - clearable - /> - - - form.setFieldValue('start_date', value || dayjs().toDate()) - } - valueFormat="MMM D, YYYY" - /> - form.setFieldValue('end_date', value)} - valueFormat="MMM D, YYYY" - minDate={form.values.start_date || undefined} - /> - - - - form.setFieldValue('start_time', toTimeString(value)) - } - withSeconds={false} - format="12" - amLabel="AM" - pmLabel="PM" - /> - - form.setFieldValue('end_time', toTimeString(value)) - } - withSeconds={false} - format="12" - amLabel="AM" - pmLabel="PM" - /> - - - - - - -
- - - - Upcoming occurrences - - {upcomingOccurrences.length} - - {upcomingOccurrences.length === 0 ? ( - - No future airings currently scheduled. - - ) : ( - - {upcomingOccurrences.map((occ) => { - const occStart = toUserTime(occ.start_time); - const occEnd = toUserTime(occ.end_time); - return ( - - - - - {occStart.format(`${dateformat}, YYYY`)} - - - {occStart.format(timeformat)} – {occEnd.format(timeformat)} - - - - - - - - - ); - })} - - )} - -
-
- ); -}; - -const RecordingCard = ({ recording, onOpenDetails, onOpenRecurring }) => { - const channels = useChannelsStore((s) => s.channels); - const env_mode = useSettingsStore((s) => s.environment.env_mode); - const showVideo = useVideoStore((s) => s.showVideo); - const fetchRecordings = useChannelsStore((s) => s.fetchRecordings); - const { toUserTime, userNow } = useTimeHelpers(); - const [timeformat, dateformat] = useDateTimeFormat(); - - const channel = channels?.[recording.channel]; - - const deleteRecording = (id) => { - // Optimistically remove immediately from UI - try { - useChannelsStore.getState().removeRecording(id); - } catch (error) { - console.error('Failed to optimistically remove recording', error); - } - // Fire-and-forget server delete; websocket will keep others in sync - API.deleteRecording(id).catch(() => { - // On failure, fallback to refetch to restore state - try { - useChannelsStore.getState().fetchRecordings(); - } catch (error) { - console.error('Failed to refresh recordings after delete', error); - } - }); - }; - - const customProps = recording.custom_properties || {}; - const program = customProps.program || {}; - const recordingName = program.title || 'Custom Recording'; - const subTitle = program.sub_title || ''; - const description = program.description || customProps.description || ''; - const isRecurringRule = customProps?.rule?.type === 'recurring'; - - // Poster or channel logo - const posterLogoId = customProps.poster_logo_id; - let posterUrl = posterLogoId - ? `/api/channels/logos/${posterLogoId}/cache/` - : customProps.poster_url || channel?.logo?.cache_url || '/logo.png'; - // Prefix API host in dev if using a relative path - if (env_mode === 'dev' && posterUrl && posterUrl.startsWith('/')) { - posterUrl = `${window.location.protocol}//${window.location.hostname}:5656${posterUrl}`; - } - - const start = toUserTime(recording.start_time); - const end = toUserTime(recording.end_time); - const now = userNow(); - const status = customProps.status; - const isTimeActive = now.isAfter(start) && now.isBefore(end); - const isInterrupted = status === 'interrupted'; - const isInProgress = isTimeActive; // Show as recording by time, regardless of status glitches - const isUpcoming = now.isBefore(start); - const isSeriesGroup = Boolean( - recording._group_count && recording._group_count > 1 - ); - // Season/Episode display if present - const season = customProps.season ?? program?.custom_properties?.season; - const episode = customProps.episode ?? program?.custom_properties?.episode; - const onscreen = - customProps.onscreen_episode ?? - program?.custom_properties?.onscreen_episode; - const seLabel = - season && episode - ? `S${String(season).padStart(2, '0')}E${String(episode).padStart(2, '0')}` - : onscreen || null; - - const handleWatchLive = () => { - if (!channel) return; - let url = `/proxy/ts/stream/${channel.uuid}`; - if (env_mode === 'dev') { - url = `${window.location.protocol}//${window.location.hostname}:5656${url}`; - } - showVideo(url, 'live'); - }; - - const handleWatchRecording = () => { - // Only enable if backend provides a playable file URL in custom properties - let fileUrl = customProps.file_url || customProps.output_file_url; - if (!fileUrl) return; - if (env_mode === 'dev' && fileUrl.startsWith('/')) { - fileUrl = `${window.location.protocol}//${window.location.hostname}:5656${fileUrl}`; - } - showVideo(fileUrl, 'vod', { - name: recordingName, - logo: { url: posterUrl }, - }); - }; - - const handleRunComskip = async (e) => { - e?.stopPropagation?.(); - try { - await API.runComskip(recording.id); - notifications.show({ - title: 'Removing commercials', - message: 'Queued comskip for this recording', - color: 'blue.5', - autoClose: 2000, - }); - } catch (error) { - console.error('Failed to queue comskip for recording', error); - } - }; - - // Cancel handling for series groups - const [cancelOpen, setCancelOpen] = React.useState(false); - const [busy, setBusy] = React.useState(false); - const handleCancelClick = (e) => { - e.stopPropagation(); - if (isRecurringRule) { - onOpenRecurring?.(recording, true); - return; - } - if (isSeriesGroup) { - setCancelOpen(true); - } else { - deleteRecording(recording.id); - } - }; - - const seriesInfo = (() => { - const cp = customProps || {}; - const pr = cp.program || {}; - return { tvg_id: pr.tvg_id, title: pr.title }; - })(); - - const removeUpcomingOnly = async () => { - try { - setBusy(true); - await API.deleteRecording(recording.id); - } finally { - setBusy(false); - setCancelOpen(false); - try { - await fetchRecordings(); - } catch (error) { - console.error('Failed to refresh recordings', error); - } - } - }; - - const removeSeriesAndRule = async () => { - try { - setBusy(true); - const { tvg_id, title } = seriesInfo; - if (tvg_id) { - try { - await API.bulkRemoveSeriesRecordings({ - tvg_id, - title, - scope: 'title', - }); - } catch (error) { - console.error('Failed to remove series recordings', error); - } - try { - await API.deleteSeriesRule(tvg_id); - } catch (error) { - console.error('Failed to delete series rule', error); - } - } - } finally { - setBusy(false); - setCancelOpen(false); - try { - await fetchRecordings(); - } catch (error) { - console.error( - 'Failed to refresh recordings after series removal', - error - ); - } - } - }; - - const MainCard = ( - { - if (isRecurringRule) { - onOpenRecurring?.(recording, false); - } else { - onOpenDetails?.(recording); - } - }} - > - - - - {isInterrupted - ? 'Interrupted' - : isInProgress - ? 'Recording' - : isUpcoming - ? 'Scheduled' - : 'Completed'} - - {isInterrupted && } - - - - {recordingName} - - {isSeriesGroup && ( - - Series - - )} - {isRecurringRule && ( - - Recurring - - )} - {seLabel && !isSeriesGroup && ( - - {seLabel} - - )} - - - - -
- - e.stopPropagation()} - onClick={handleCancelClick} - > - - - -
-
- - - {recordingName} - - {!isSeriesGroup && subTitle && ( - - - Episode - - - {subTitle} - - - )} - - - Channel - - - {channel ? `${channel.channel_number} • ${channel.name}` : '—'} - - - - - - {isSeriesGroup ? 'Next recording' : 'Time'} - - - {start.format(`${dateformat}, YYYY ${timeformat}`)} – {end.format(timeformat)} - - - - {!isSeriesGroup && description && ( - onOpenDetails?.(recording)} - /> - )} - - {isInterrupted && customProps.interrupted_reason && ( - - {customProps.interrupted_reason} - - )} - - - {isInProgress && ( - - )} - - {!isUpcoming && ( - - - - )} - {!isUpcoming && - customProps?.status === 'completed' && - (!customProps?.comskip || - customProps?.comskip?.status !== 'completed') && ( - - )} - - - - {/* If this card is a grouped upcoming series, show count */} - {recording._group_count > 1 && ( - - Next of {recording._group_count} - - )} -
- ); - if (!isSeriesGroup) return MainCard; - - // Stacked look for series groups: render two shadow layers behind the main card - return ( - - setCancelOpen(false)} - title="Cancel Series" - centered - size="md" - zIndex={9999} - > - - This is a series rule. What would you like to cancel? - - - - - - - - - {MainCard} - - ); -}; +import { + parseDate, + RECURRING_DAY_OPTIONS, + toTimeString, + useDateTimeFormat, + useTimeHelpers, +} from '../utils/dateTimeUtils.js'; +import { RecordingDetailsModal } from '../components/forms/RecordingDetailsModal.jsx'; +import { RecurringRuleModal } from '../components/forms/RecurringRuleModal.jsx'; +import { RecordingCard } from '../components/cards/RecordingCard.jsx'; +import { categorizeRecordings } from '../utils/pages/DVRUtils.js'; const DVRPage = () => { const theme = useMantineTheme(); @@ -1441,86 +116,67 @@ const DVRPage = () => { // Categorize recordings const { inProgress, upcoming, completed } = useMemo(() => { - const inProgress = []; - const upcoming = []; - const completed = []; - const list = Array.isArray(recordings) - ? recordings - : Object.values(recordings || {}); - - // ID-based dedupe guard in case store returns duplicates - const seenIds = new Set(); - for (const rec of list) { - if (rec && rec.id != null) { - const k = String(rec.id); - if (seenIds.has(k)) continue; - seenIds.add(k); - } - const s = toUserTime(rec.start_time); - const e = toUserTime(rec.end_time); - const status = rec.custom_properties?.status; - if (status === 'interrupted' || status === 'completed') { - completed.push(rec); - } else { - if (now.isAfter(s) && now.isBefore(e)) inProgress.push(rec); - else if (now.isBefore(s)) upcoming.push(rec); - else completed.push(rec); - } - } - - // Deduplicate in-progress and upcoming by program id or channel+slot - const dedupeByProgramOrSlot = (arr) => { - const out = []; - const sigs = new Set(); - for (const r of arr) { - const cp = r.custom_properties || {}; - const pr = cp.program || {}; - const sig = - pr?.id != null - ? `id:${pr.id}` - : `slot:${r.channel}|${r.start_time}|${r.end_time}|${pr.title || ''}`; - if (sigs.has(sig)) continue; - sigs.add(sig); - out.push(r); - } - return out; - }; - - const inProgressDedup = dedupeByProgramOrSlot(inProgress).sort( - (a, b) => toUserTime(b.start_time) - toUserTime(a.start_time) - ); - - // Group upcoming by series title+tvg_id (keep only next episode) - const grouped = new Map(); - const upcomingDedup = dedupeByProgramOrSlot(upcoming).sort( - (a, b) => toUserTime(a.start_time) - toUserTime(b.start_time) - ); - for (const rec of upcomingDedup) { - const cp = rec.custom_properties || {}; - const prog = cp.program || {}; - const key = `${prog.tvg_id || ''}|${(prog.title || '').toLowerCase()}`; - if (!grouped.has(key)) { - grouped.set(key, { rec, count: 1 }); - } else { - const entry = grouped.get(key); - entry.count += 1; - } - } - const upcomingGrouped = Array.from(grouped.values()).map((e) => { - const item = { ...e.rec }; - item._group_count = e.count; - return item; - }); - completed.sort((a, b) => toUserTime(b.end_time) - toUserTime(a.end_time)); - return { - inProgress: inProgressDedup, - upcoming: upcomingGrouped, - completed, - }; + return categorizeRecordings(recordings, toUserTime, now); }, [recordings, now, toUserTime]); + const RecordingList = (list) => { + return list.map((rec) => ( + + )); + } + + const getOnWatchLive = () => { + return () => { + const rec = detailsRecording; + const now = userNow(); + const s = toUserTime(rec.start_time); + const e = toUserTime(rec.end_time); + if (now.isAfter(s) && now.isBefore(e)) { + // call into child RecordingCard behavior by constructing a URL like there + const channel = channels[rec.channel]; + if (!channel) return; + let url = `/proxy/ts/stream/${channel.uuid}`; + if (useSettingsStore.getState().environment.env_mode === 'dev') { + url = `${window.location.protocol}//${window.location.hostname}:5656${url}`; + } + useVideoStore.getState().showVideo(url, 'live'); + } + }; + } + + const getOnWatchRecording = () => { + return () => { + let fileUrl = + detailsRecording.custom_properties?.file_url || + detailsRecording.custom_properties?.output_file_url; + if (!fileUrl) return; + if ( + useSettingsStore.getState().environment.env_mode === 'dev' && + fileUrl.startsWith('/') + ) { + fileUrl = `${window.location.protocol}//${window.location.hostname}:5656${fileUrl}`; + } + useVideoStore.getState().showVideo(fileUrl, 'vod', { + name: + detailsRecording.custom_properties?.program?.title || + 'Recording', + logo: { + url: + (detailsRecording.custom_properties?.poster_logo_id + ? `/api/channels/logos/${detailsRecording.custom_properties.poster_logo_id}/cache/` + : channels[detailsRecording.channel]?.logo?.cache_url) || + '/logo.png', + }, + }); + }; + } return ( - +
- )} - handleScheduleChange('retention_count', value || 0)} - min={0} - disabled={!schedule.enabled} - /> - - + )} {/* Timezone info - only show in simple mode */} From bd6cf287dcb29526f0470e31f80aea1a014384a8 Mon Sep 17 00:00:00 2001 From: Jim McBride Date: Sat, 13 Dec 2025 19:02:36 -0600 Subject: [PATCH 069/220] Clean up orphaned CrontabSchedule records - Add _cleanup_orphaned_crontab() helper function - Delete old crontab when disabling backup schedule - Delete old crontab when schedule settings change - Prevents database bloat from unused CrontabSchedule records --- apps/backups/scheduler.py | 27 ++++++++++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/apps/backups/scheduler.py b/apps/backups/scheduler.py index 426d2c7e..011d63db 100644 --- a/apps/backups/scheduler.py +++ b/apps/backups/scheduler.py @@ -107,10 +107,22 @@ def _sync_periodic_task() -> None: if not settings["enabled"]: # Delete the task if it exists - PeriodicTask.objects.filter(name=BACKUP_SCHEDULE_TASK_NAME).delete() + task = PeriodicTask.objects.filter(name=BACKUP_SCHEDULE_TASK_NAME).first() + if task: + old_crontab = task.crontab + task.delete() + _cleanup_orphaned_crontab(old_crontab) logger.info("Backup schedule disabled, removed periodic task") return + # Get old crontab before creating new one + old_crontab = None + try: + old_task = PeriodicTask.objects.get(name=BACKUP_SCHEDULE_TASK_NAME) + old_crontab = old_task.crontab + except PeriodicTask.DoesNotExist: + pass + # Check if using cron expression (advanced mode) if settings["cron_expression"]: # Parse cron expression: "minute hour day month weekday" @@ -169,5 +181,18 @@ def _sync_periodic_task() -> None: }, ) + # Clean up old crontab if it changed and is orphaned + if old_crontab and old_crontab.id != crontab.id: + _cleanup_orphaned_crontab(old_crontab) + action = "Created" if created else "Updated" logger.info(f"{action} backup schedule: {settings['frequency']} at {settings['time']}") + + +def _cleanup_orphaned_crontab(crontab_schedule): + """Delete old CrontabSchedule from backup task.""" + if crontab_schedule is None: + return + + logger.debug(f"Cleaning up old CrontabSchedule: {crontab_schedule.id}") + crontab_schedule.delete() From 85390a078c2b40bf592f0d3ecfcd176adc3a5def Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Mon, 15 Dec 2025 07:48:24 -0800 Subject: [PATCH 070/220] Removed unused imports --- frontend/src/pages/DVR.jsx | 29 +---------------------------- 1 file changed, 1 insertion(+), 28 deletions(-) diff --git a/frontend/src/pages/DVR.jsx b/frontend/src/pages/DVR.jsx index f300899d..81ef300a 100644 --- a/frontend/src/pages/DVR.jsx +++ b/frontend/src/pages/DVR.jsx @@ -1,50 +1,23 @@ -import React, { useMemo, useState, useEffect, useCallback } from 'react'; +import React, { useMemo, useState, useEffect } from 'react'; import { - ActionIcon, Box, Button, - Card, - Center, - Flex, Badge, Group, - Image, - Modal, SimpleGrid, Stack, Text, Title, - Tooltip, - Switch, - Select, - MultiSelect, - TextInput, useMantineTheme, } from '@mantine/core'; import { - AlertTriangle, SquarePlus, - SquareX, } from 'lucide-react'; -import dayjs from 'dayjs'; -import duration from 'dayjs/plugin/duration'; -import relativeTime from 'dayjs/plugin/relativeTime'; -import utc from 'dayjs/plugin/utc'; -import timezone from 'dayjs/plugin/timezone'; import useChannelsStore from '../store/channels'; import useSettingsStore from '../store/settings'; -import useLocalStorage from '../hooks/useLocalStorage'; import useVideoStore from '../store/useVideoStore'; import RecordingForm from '../components/forms/Recording'; -import { notifications } from '@mantine/notifications'; -import API from '../api'; -import { DatePickerInput, TimeInput } from '@mantine/dates'; -import { useForm } from '@mantine/form'; import { - parseDate, - RECURRING_DAY_OPTIONS, - toTimeString, - useDateTimeFormat, useTimeHelpers, } from '../utils/dateTimeUtils.js'; import { RecordingDetailsModal } from '../components/forms/RecordingDetailsModal.jsx'; From 65dbc5498da3f14758c590f1b029a4e52b457edc Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Mon, 15 Dec 2025 08:21:00 -0800 Subject: [PATCH 071/220] Fixed handler arrow functions --- frontend/src/pages/DVR.jsx | 82 ++++++++++++++++++-------------------- 1 file changed, 39 insertions(+), 43 deletions(-) diff --git a/frontend/src/pages/DVR.jsx b/frontend/src/pages/DVR.jsx index 81ef300a..bca5e246 100644 --- a/frontend/src/pages/DVR.jsx +++ b/frontend/src/pages/DVR.jsx @@ -103,50 +103,46 @@ const DVRPage = () => { )); } - const getOnWatchLive = () => { - return () => { - const rec = detailsRecording; - const now = userNow(); - const s = toUserTime(rec.start_time); - const e = toUserTime(rec.end_time); - if (now.isAfter(s) && now.isBefore(e)) { - // call into child RecordingCard behavior by constructing a URL like there - const channel = channels[rec.channel]; - if (!channel) return; - let url = `/proxy/ts/stream/${channel.uuid}`; - if (useSettingsStore.getState().environment.env_mode === 'dev') { - url = `${window.location.protocol}//${window.location.hostname}:5656${url}`; - } - useVideoStore.getState().showVideo(url, 'live'); + const handleOnWatchLive = () => { + const rec = detailsRecording; + const now = userNow(); + const s = toUserTime(rec.start_time); + const e = toUserTime(rec.end_time); + if (now.isAfter(s) && now.isBefore(e)) { + // call into child RecordingCard behavior by constructing a URL like there + const channel = channels[rec.channel]; + if (!channel) return; + let url = `/proxy/ts/stream/${channel.uuid}`; + if (useSettingsStore.getState().environment.env_mode === 'dev') { + url = `${window.location.protocol}//${window.location.hostname}:5656${url}`; } - }; + useVideoStore.getState().showVideo(url, 'live'); + } } - const getOnWatchRecording = () => { - return () => { - let fileUrl = - detailsRecording.custom_properties?.file_url || - detailsRecording.custom_properties?.output_file_url; - if (!fileUrl) return; - if ( - useSettingsStore.getState().environment.env_mode === 'dev' && - fileUrl.startsWith('/') - ) { - fileUrl = `${window.location.protocol}//${window.location.hostname}:5656${fileUrl}`; - } - useVideoStore.getState().showVideo(fileUrl, 'vod', { - name: - detailsRecording.custom_properties?.program?.title || - 'Recording', - logo: { - url: - (detailsRecording.custom_properties?.poster_logo_id - ? `/api/channels/logos/${detailsRecording.custom_properties.poster_logo_id}/cache/` - : channels[detailsRecording.channel]?.logo?.cache_url) || - '/logo.png', - }, - }); - }; + const handleOnWatchRecording = () => { + let fileUrl = + detailsRecording.custom_properties?.file_url || + detailsRecording.custom_properties?.output_file_url; + if (!fileUrl) return; + if ( + useSettingsStore.getState().environment.env_mode === 'dev' && + fileUrl.startsWith('/') + ) { + fileUrl = `${window.location.protocol}//${window.location.hostname}:5656${fileUrl}`; + } + useVideoStore.getState().showVideo(fileUrl, 'vod', { + name: + detailsRecording.custom_properties?.program?.title || + 'Recording', + logo: { + url: + (detailsRecording.custom_properties?.poster_logo_id + ? `/api/channels/logos/${detailsRecording.custom_properties.poster_logo_id}/cache/` + : channels[detailsRecording.channel]?.logo?.cache_url) || + '/logo.png', + }, + }); } return ( @@ -269,8 +265,8 @@ const DVRPage = () => { '/logo.png' } env_mode={useSettingsStore.getState().environment.env_mode} - onWatchLive={getOnWatchLive()} - onWatchRecording={getOnWatchRecording()} + onWatchLive={handleOnWatchLive} + onWatchRecording={handleOnWatchRecording} onEdit={(rec) => { setEditRecording(rec); closeDetails(); From 3bf8ddf376d2f81b2b8d871ccde874784f26c68d Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Mon, 15 Dec 2025 09:19:54 -0800 Subject: [PATCH 072/220] Removed unused imports --- frontend/src/utils/dateTimeUtils.js | 36 +---------------------------- 1 file changed, 1 insertion(+), 35 deletions(-) diff --git a/frontend/src/utils/dateTimeUtils.js b/frontend/src/utils/dateTimeUtils.js index cd497f59..b7490f88 100644 --- a/frontend/src/utils/dateTimeUtils.js +++ b/frontend/src/utils/dateTimeUtils.js @@ -1,45 +1,11 @@ -import React, { useMemo, useState, useEffect, useCallback } from 'react'; -import { - ActionIcon, - Box, - Button, - Card, - Center, - Flex, - Badge, - Group, - Image, - Modal, - SimpleGrid, - Stack, - Text, - Title, - Tooltip, - Switch, - Select, - MultiSelect, - TextInput, - useMantineTheme, -} from '@mantine/core'; -import { - AlertTriangle, - SquarePlus, - SquareX, -} from 'lucide-react'; +import { useEffect, useCallback } from 'react'; import dayjs from 'dayjs'; import duration from 'dayjs/plugin/duration'; import relativeTime from 'dayjs/plugin/relativeTime'; import utc from 'dayjs/plugin/utc'; import timezone from 'dayjs/plugin/timezone'; -import useChannelsStore from '../store/channels'; import useSettingsStore from '../store/settings'; import useLocalStorage from '../hooks/useLocalStorage'; -import useVideoStore from '../store/useVideoStore'; -import RecordingForm from '../components/forms/Recording'; -import { notifications } from '@mantine/notifications'; -import API from '../api'; -import { DatePickerInput, TimeInput } from '@mantine/dates'; -import { useForm } from '@mantine/form'; dayjs.extend(duration); dayjs.extend(relativeTime); From 1b27472c8135b52be41d480d3dc8fafe23f878f6 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 15 Dec 2025 16:22:38 -0600 Subject: [PATCH 073/220] changelog: Add automated configuration backup/restore system to changelog --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7766d7da..151018cb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +### Added + +- Automated configuration backup/restore system with scheduled backups, retention policies, and async task processing - Thanks [@stlalpha](https://github.com/stlalpha) (Closes #153) + ### Changed - Removed unreachable code path in m3u output - Thanks [@DawtCom](https://github.com/DawtCom) From 41642cd479f0d661f93a43c381b50ef6409b0cbc Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 15 Dec 2025 16:54:12 -0600 Subject: [PATCH 074/220] Improve orphaned CrontabSchedule cleanup logic to avoid deleting in-use schedules --- apps/backups/scheduler.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/apps/backups/scheduler.py b/apps/backups/scheduler.py index 011d63db..2dd9e828 100644 --- a/apps/backups/scheduler.py +++ b/apps/backups/scheduler.py @@ -190,9 +190,14 @@ def _sync_periodic_task() -> None: def _cleanup_orphaned_crontab(crontab_schedule): - """Delete old CrontabSchedule from backup task.""" + """Delete old CrontabSchedule if no other tasks are using it.""" if crontab_schedule is None: return - logger.debug(f"Cleaning up old CrontabSchedule: {crontab_schedule.id}") + # Check if any other tasks are using this crontab + if PeriodicTask.objects.filter(crontab=crontab_schedule).exists(): + logger.debug(f"CrontabSchedule {crontab_schedule.id} still in use, not deleting") + return + + logger.debug(f"Cleaning up orphaned CrontabSchedule: {crontab_schedule.id}") crontab_schedule.delete() From 3b7f6dadaab23e41c03d3f804fcf1f4bb2b27c0b Mon Sep 17 00:00:00 2001 From: Seth Van Niekerk Date: Fri, 12 Dec 2025 14:29:58 -0500 Subject: [PATCH 075/220] Add VLC packages and environment variables to DispatcharrBase --- docker/DispatcharrBase | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/docker/DispatcharrBase b/docker/DispatcharrBase index d37d8958..786ead1a 100644 --- a/docker/DispatcharrBase +++ b/docker/DispatcharrBase @@ -15,7 +15,8 @@ RUN apt-get update && apt-get install --no-install-recommends -y \ python-is-python3 python3-pip \ libpcre3 libpcre3-dev libpq-dev procps \ build-essential gcc pciutils \ - nginx streamlink comskip\ + nginx streamlink comskip \ + vlc-bin vlc-plugin-base vlc-plugin-access-extra \ && apt-get clean && rm -rf /var/lib/apt/lists/* # --- Create Python virtual environment --- @@ -25,6 +26,11 @@ RUN python3.13 -m venv $VIRTUAL_ENV && $VIRTUAL_ENV/bin/pip install --upgrade pi COPY requirements.txt /tmp/requirements.txt RUN $VIRTUAL_ENV/bin/pip install --no-cache-dir -r /tmp/requirements.txt && rm /tmp/requirements.txt +# --- Configure VLC for headless operation --- +# Set VLC environment variables for headless operation +ENV PULSE_SERVER=none \ + DBUS_SESSION_BUS_ADDRESS=/dev/null + # --- Set up Redis 7.x --- RUN curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg && \ echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" | \ From ee7a39fe21f11a02b45b98e66993645675451976 Mon Sep 17 00:00:00 2001 From: Seth Van Niekerk Date: Fri, 12 Dec 2025 14:29:58 -0500 Subject: [PATCH 076/220] Add VLC stream profile migration with correct parameters --- .../migrations/0019_add_vlc_stream_profile.py | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 core/migrations/0019_add_vlc_stream_profile.py diff --git a/core/migrations/0019_add_vlc_stream_profile.py b/core/migrations/0019_add_vlc_stream_profile.py new file mode 100644 index 00000000..5d794647 --- /dev/null +++ b/core/migrations/0019_add_vlc_stream_profile.py @@ -0,0 +1,42 @@ +# Generated migration to add VLC stream profile + +from django.db import migrations + +def add_vlc_profile(apps, schema_editor): + StreamProfile = apps.get_model("core", "StreamProfile") + UserAgent = apps.get_model("core", "UserAgent") + + # Check if VLC profile already exists + if not StreamProfile.objects.filter(name="vlc").exists(): + # Get the TiviMate user agent (should be pk=1) + try: + tivimate_ua = UserAgent.objects.get(pk=1) + except UserAgent.DoesNotExist: + # Fallback: get first available user agent + tivimate_ua = UserAgent.objects.first() + if not tivimate_ua: + # No user agents exist, skip creating profile + return + + StreamProfile.objects.create( + name="vlc", + command="cvlc", + parameters="-vv -I dummy --no-video-title-show --http-user-agent {userAgent} {streamUrl} --sout #standard{access=file,mux=ts,dst=-}", + is_active=True, + user_agent=tivimate_ua, + locked=True, # Make it read-only like ffmpeg/streamlink + ) + +def remove_vlc_profile(apps, schema_editor): + StreamProfile = apps.get_model("core", "StreamProfile") + StreamProfile.objects.filter(name="vlc").delete() + +class Migration(migrations.Migration): + + dependencies = [ + ('core', '0018_alter_systemevent_event_type'), + ] + + operations = [ + migrations.RunPython(add_vlc_profile, remove_vlc_profile), + ] From 1ad8d6cdfdb24f886fcd3ac2d7ebb4739e572ede Mon Sep 17 00:00:00 2001 From: Seth Van Niekerk Date: Fri, 12 Dec 2025 14:29:59 -0500 Subject: [PATCH 077/220] Add VLC profile to fixtures with correct parameter order --- core/fixtures/initial_data.json | 11 +++++++++++ fixtures.json | 10 ++++++++++ 2 files changed, 21 insertions(+) diff --git a/core/fixtures/initial_data.json b/core/fixtures/initial_data.json index c037fa78..49ecf080 100644 --- a/core/fixtures/initial_data.json +++ b/core/fixtures/initial_data.json @@ -40,5 +40,16 @@ "is_active": true, "user_agent": "1" } + }, + { + "model": "core.streamprofile", + "pk": 3, + "fields": { + "name": "vlc", + "command": "cvlc", + "parameters": "-vv -I dummy --no-video-title-show --http-user-agent {userAgent} {streamUrl} --sout #standard{access=file,mux=ts,dst=-}", + "is_active": true, + "user_agent": "1" + } } ] diff --git a/fixtures.json b/fixtures.json index 2d42f84e..c0f5e0ea 100644 --- a/fixtures.json +++ b/fixtures.json @@ -53,6 +53,16 @@ "user_agent": "1" } }, + { + "model": "core.streamprofile", + "fields": { + "profile_name": "vlc", + "command": "cvlc", + "parameters": "-vv -I dummy --no-video-title-show --http-user-agent {userAgent} {streamUrl} --sout #standard{access=file,mux=ts,dst=-}", + "is_active": true, + "user_agent": "1" + } + }, { "model": "core.coresettings", "fields": { From 88c10e85c3abe2421c1fc7d1d6d20fcb63820aa1 Mon Sep 17 00:00:00 2001 From: Seth Van Niekerk Date: Fri, 12 Dec 2025 14:29:59 -0500 Subject: [PATCH 078/220] Add VLC TS demux output detection for codec parsing --- apps/proxy/ts_proxy/stream_manager.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index bbeb4bb7..9b0d9ada 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -645,6 +645,16 @@ class StreamManager: if content_lower.startswith('output #') or 'encoder' in content_lower: self.ffmpeg_input_phase = False + # Parse VLC-specific output - look for TS demux type info for codec detection + if 'ts demux debug' in content_lower and 'type=' in content_lower and ('video' in content_lower or 'audio' in content_lower): + from .services.channel_service import ChannelService + ChannelService.parse_and_store_stream_info(self.channel_id, content, "vlc", self.current_stream_id) + + # Parse streamlink-specific output + if 'opening stream:' in content_lower or 'available streams:' in content_lower: + from .services.channel_service import ChannelService + ChannelService.parse_and_store_stream_info(self.channel_id, content, "streamlink", self.current_stream_id) + # Only parse stream info if we're still in the input phase if ("stream #" in content_lower and ("video:" in content_lower or "audio:" in content_lower) and From bc72b2d4a357134d69139b70ffd1d2ed053344ed Mon Sep 17 00:00:00 2001 From: Seth Van Niekerk Date: Fri, 12 Dec 2025 14:29:59 -0500 Subject: [PATCH 079/220] Add VLC and streamlink codec parsing support --- .../ts_proxy/services/channel_service.py | 57 ++++++++++++++++++- 1 file changed, 54 insertions(+), 3 deletions(-) diff --git a/apps/proxy/ts_proxy/services/channel_service.py b/apps/proxy/ts_proxy/services/channel_service.py index 6484cd3f..cea0d957 100644 --- a/apps/proxy/ts_proxy/services/channel_service.py +++ b/apps/proxy/ts_proxy/services/channel_service.py @@ -419,12 +419,12 @@ class ChannelService: @staticmethod def parse_and_store_stream_info(channel_id, stream_info_line, stream_type="video", stream_id=None): - """Parse FFmpeg stream info line and store in Redis metadata and database""" + """Parse FFmpeg/VLC/streamlink stream info line and store in Redis metadata and database""" try: if stream_type == "input": # Example lines: - # Input #0, mpegts, from 'http://example.com/stream.ts': - # Input #0, hls, from 'http://example.com/stream.m3u8': + # FFmpeg: Input #0, mpegts, from 'http://example.com/stream.ts': + # FFmpeg: Input #0, hls, from 'http://example.com/stream.m3u8': # Extract input format (e.g., "mpegts", "hls", "flv", etc.) input_match = re.search(r'Input #\d+,\s*([^,]+)', stream_info_line) @@ -439,6 +439,57 @@ class ChannelService: logger.debug(f"Input format info - Format: {input_format} for channel {channel_id}") + elif stream_type == "vlc": + # VLC parsing - extract codecs from TS demux output (no resolution/fps in stream-copy mode) + lower = stream_info_line.lower() + + # Video codec detection + video_codec_map = { + ('avc', 'h.264', 'type=0x1b'): "h264", + ('hevc', 'h.265', 'type=0x24'): "hevc", + ('mpeg-2', 'type=0x02'): "mpeg2video", + ('mpeg-4', 'type=0x10'): "mpeg4" + } + for patterns, codec in video_codec_map.items(): + if any(p in lower for p in patterns): + ChannelService._update_stream_info_in_redis(channel_id, codec, None, None, None, None, None, None, None, None, None, None, None) + if stream_id: + ChannelService._update_stream_stats_in_db(stream_id, video_codec=codec) + break + + # Audio codec detection + audio_codec_map = { + ('type=0xf', 'adts'): "aac", + ('type=0x03', 'type=0x04'): "mp3", + ('type=0x06', 'type=0x81'): "ac3", + ('type=0x0b', 'lpcm'): "pcm" + } + for patterns, codec in audio_codec_map.items(): + if any(p in lower for p in patterns): + ChannelService._update_stream_info_in_redis(channel_id, None, None, None, None, None, None, None, codec, None, None, None, None) + if stream_id: + ChannelService._update_stream_stats_in_db(stream_id, audio_codec=codec) + break + + elif stream_type == "streamlink": + # Streamlink parsing - extract quality/resolution + quality_match = re.search(r'(\d+p|\d+x\d+)', stream_info_line) + if quality_match: + quality = quality_match.group(1) + if 'x' in quality: + resolution = quality + width, height = map(int, quality.split('x')) + else: + resolutions = { + '2160p': ('3840x2160', 3840, 2160), '1080p': ('1920x1080', 1920, 1080), + '720p': ('1280x720', 1280, 720), '480p': ('854x480', 854, 480), '360p': ('640x360', 640, 360) + } + resolution, width, height = resolutions.get(quality, ('1920x1080', 1920, 1080)) + + ChannelService._update_stream_info_in_redis(channel_id, "h264", resolution, width, height, None, "yuv420p", None, None, None, None, None, None) + if stream_id: + ChannelService._update_stream_stats_in_db(stream_id, video_codec="h264", resolution=resolution, pixel_format="yuv420p") + elif stream_type == "video": # Example line: # Stream #0:0: Video: h264 (Main), yuv420p(tv, progressive), 1280x720 [SAR 1:1 DAR 16:9], q=2-31, 2000 kb/s, 29.97 fps, 90k tbn From 0ba22df233273ba2eb342b2c8f3db096ebc219fd Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Tue, 16 Dec 2025 11:53:26 -0800 Subject: [PATCH 080/220] Updated Component syntax --- frontend/src/pages/DVR.jsx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/frontend/src/pages/DVR.jsx b/frontend/src/pages/DVR.jsx index bca5e246..1f3b98f5 100644 --- a/frontend/src/pages/DVR.jsx +++ b/frontend/src/pages/DVR.jsx @@ -175,7 +175,7 @@ const DVRPage = () => { { maxWidth: '36rem', cols: 1 }, ]} > - {RecordingList(inProgress)} + {} {inProgress.length === 0 && ( Nothing recording right now. @@ -197,7 +197,7 @@ const DVRPage = () => { { maxWidth: '36rem', cols: 1 }, ]} > - {RecordingList(upcoming)} + {} {upcoming.length === 0 && ( No upcoming recordings. @@ -219,7 +219,7 @@ const DVRPage = () => { { maxWidth: '36rem', cols: 1 }, ]} > - {RecordingList(completed)} + {} {completed.length === 0 && ( No completed recordings yet. From ffd8d9fe6b0ad091355f7ac9434778f55b0c117b Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Tue, 16 Dec 2025 11:53:45 -0800 Subject: [PATCH 081/220] Using util for getPosterUrl --- frontend/src/pages/DVR.jsx | 23 +++++++++++------------ 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/frontend/src/pages/DVR.jsx b/frontend/src/pages/DVR.jsx index 1f3b98f5..023457e3 100644 --- a/frontend/src/pages/DVR.jsx +++ b/frontend/src/pages/DVR.jsx @@ -24,6 +24,7 @@ import { RecordingDetailsModal } from '../components/forms/RecordingDetailsModal import { RecurringRuleModal } from '../components/forms/RecurringRuleModal.jsx'; import { RecordingCard } from '../components/cards/RecordingCard.jsx'; import { categorizeRecordings } from '../utils/pages/DVRUtils.js'; +import { getPosterUrl } from '../utils/cards/RecordingCardUtils.js'; const DVRPage = () => { const theme = useMantineTheme(); @@ -136,11 +137,11 @@ const DVRPage = () => { detailsRecording.custom_properties?.program?.title || 'Recording', logo: { - url: - (detailsRecording.custom_properties?.poster_logo_id - ? `/api/channels/logos/${detailsRecording.custom_properties.poster_logo_id}/cache/` - : channels[detailsRecording.channel]?.logo?.cache_url) || - '/logo.png', + url: getPosterUrl( + detailsRecording.custom_properties?.poster_logo_id, + undefined, + channels[detailsRecording.channel]?.logo?.cache_url + ) }, }); } @@ -257,13 +258,11 @@ const DVRPage = () => { onClose={closeDetails} recording={detailsRecording} channel={channels[detailsRecording.channel]} - posterUrl={ - (detailsRecording.custom_properties?.poster_logo_id - ? `/api/channels/logos/${detailsRecording.custom_properties.poster_logo_id}/cache/` - : detailsRecording.custom_properties?.poster_url || - channels[detailsRecording.channel]?.logo?.cache_url) || - '/logo.png' - } + posterUrl={getPosterUrl( + detailsRecording.custom_properties?.poster_logo_id, + detailsRecording.custom_properties, + channels[detailsRecording.channel]?.logo?.cache_url + )} env_mode={useSettingsStore.getState().environment.env_mode} onWatchLive={handleOnWatchLive} onWatchRecording={handleOnWatchRecording} From 6c1b0f9a60be6a337f80f266861bc548d5db8206 Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Tue, 16 Dec 2025 11:55:22 -0800 Subject: [PATCH 082/220] Extracted component and util logic --- .../src/components/cards/RecordingCard.jsx | 184 ++++------ .../forms/RecordingDetailsModal.jsx | 319 +++++++----------- .../src/utils/cards/RecordingCardUtils.js | 92 +++++ .../utils/forms/RecordingDetailsModalUtils.js | 87 +++++ 4 files changed, 373 insertions(+), 309 deletions(-) create mode 100644 frontend/src/utils/cards/RecordingCardUtils.js create mode 100644 frontend/src/utils/forms/RecordingDetailsModalUtils.js diff --git a/frontend/src/components/cards/RecordingCard.jsx b/frontend/src/components/cards/RecordingCard.jsx index 1a0fe307..96dcea11 100644 --- a/frontend/src/components/cards/RecordingCard.jsx +++ b/frontend/src/components/cards/RecordingCard.jsx @@ -2,7 +2,6 @@ import useChannelsStore from '../../store/channels.jsx'; import useSettingsStore from '../../store/settings.jsx'; import useVideoStore from '../../store/useVideoStore.jsx'; import { useDateTimeFormat, useTimeHelpers } from '../../utils/dateTimeUtils.js'; -import API from '../../api.js'; import { notifications } from '@mantine/notifications'; import React from 'react'; import { @@ -22,6 +21,17 @@ import { } from '@mantine/core'; import { AlertTriangle, SquareX } from 'lucide-react'; import { RecordingSynopsis } from '../RecordingSynopsis.jsx'; +import { + deleteRecordingById, + deleteSeriesAndRule, + getPosterUrl, + getRecordingUrl, + getSeasonLabel, + getSeriesInfo, + getShowVideoUrl, + removeRecording, + runComSkip, +} from './../../utils/cards/RecordingCardUtils.js'; export const RecordingCard = ({ recording, onOpenDetails, onOpenRecurring }) => { const channels = useChannelsStore((s) => s.channels); @@ -33,24 +43,6 @@ export const RecordingCard = ({ recording, onOpenDetails, onOpenRecurring }) => const channel = channels?.[recording.channel]; - const deleteRecording = (id) => { - // Optimistically remove immediately from UI - try { - useChannelsStore.getState().removeRecording(id); - } catch (error) { - console.error('Failed to optimistically remove recording', error); - } - // Fire-and-forget server delete; websocket will keep others in sync - API.deleteRecording(id).catch(() => { - // On failure, fallback to refetch to restore state - try { - useChannelsStore.getState().fetchRecordings(); - } catch (error) { - console.error('Failed to refresh recordings after delete', error); - } - }); - }; - const customProps = recording.custom_properties || {}; const program = customProps.program || {}; const recordingName = program.title || 'Custom Recording'; @@ -60,13 +52,7 @@ export const RecordingCard = ({ recording, onOpenDetails, onOpenRecurring }) => // Poster or channel logo const posterLogoId = customProps.poster_logo_id; - let posterUrl = posterLogoId - ? `/api/channels/logos/${posterLogoId}/cache/` - : customProps.poster_url || channel?.logo?.cache_url || '/logo.png'; - // Prefix API host in dev if using a relative path - if (env_mode === 'dev' && posterUrl && posterUrl.startsWith('/')) { - posterUrl = `${window.location.protocol}//${window.location.hostname}:5656${posterUrl}`; - } + const posterUrl = getPosterUrl(posterLogoId, customProps, channel, env_mode); const start = toUserTime(recording.start_time); const end = toUserTime(recording.end_time); @@ -85,27 +71,18 @@ export const RecordingCard = ({ recording, onOpenDetails, onOpenRecurring }) => const onscreen = customProps.onscreen_episode ?? program?.custom_properties?.onscreen_episode; - const seLabel = - season && episode - ? `S${String(season).padStart(2, '0')}E${String(episode).padStart(2, '0')}` - : onscreen || null; + const seLabel = getSeasonLabel(season, episode, onscreen); const handleWatchLive = () => { if (!channel) return; - let url = `/proxy/ts/stream/${channel.uuid}`; - if (env_mode === 'dev') { - url = `${window.location.protocol}//${window.location.hostname}:5656${url}`; - } - showVideo(url, 'live'); + showVideo(getShowVideoUrl(channel, env_mode), 'live'); }; const handleWatchRecording = () => { // Only enable if backend provides a playable file URL in custom properties - let fileUrl = customProps.file_url || customProps.output_file_url; + const fileUrl = getRecordingUrl(customProps, env_mode); if (!fileUrl) return; - if (env_mode === 'dev' && fileUrl.startsWith('/')) { - fileUrl = `${window.location.protocol}//${window.location.hostname}:5656${fileUrl}`; - } + showVideo(fileUrl, 'vod', { name: recordingName, logo: { url: posterUrl }, @@ -115,7 +92,7 @@ export const RecordingCard = ({ recording, onOpenDetails, onOpenRecurring }) => const handleRunComskip = async (e) => { e?.stopPropagation?.(); try { - await API.runComskip(recording.id); + await runComSkip(recording); notifications.show({ title: 'Removing commercials', message: 'Queued comskip for this recording', @@ -139,20 +116,16 @@ export const RecordingCard = ({ recording, onOpenDetails, onOpenRecurring }) => if (isSeriesGroup) { setCancelOpen(true); } else { - deleteRecording(recording.id); + removeRecording(recording.id); } }; - const seriesInfo = (() => { - const cp = customProps || {}; - const pr = cp.program || {}; - return { tvg_id: pr.tvg_id, title: pr.title }; - })(); + const seriesInfo = getSeriesInfo(customProps); const removeUpcomingOnly = async () => { try { setBusy(true); - await API.deleteRecording(recording.id); + await deleteRecordingById(recording.id); } finally { setBusy(false); setCancelOpen(false); @@ -167,23 +140,7 @@ export const RecordingCard = ({ recording, onOpenDetails, onOpenRecurring }) => const removeSeriesAndRule = async () => { try { setBusy(true); - const { tvg_id, title } = seriesInfo; - if (tvg_id) { - try { - await API.bulkRemoveSeriesRecordings({ - tvg_id, - title, - scope: 'title', - }); - } catch (error) { - console.error('Failed to remove series recordings', error); - } - try { - await API.deleteSeriesRule(tvg_id); - } catch (error) { - console.error('Failed to delete series rule', error); - } - } + await deleteSeriesAndRule(seriesInfo); } finally { setBusy(false); setCancelOpen(false); @@ -198,6 +155,51 @@ export const RecordingCard = ({ recording, onOpenDetails, onOpenRecurring }) => } }; + const handleOnMainCardClick = () => { + if (isRecurringRule) { + onOpenRecurring?.(recording, false); + } else { + onOpenDetails?.(recording); + } + } + + const WatchLive = () => { + return ; + } + + const WatchRecording = () => { + return + + ; + } + const MainCard = ( height: '100%', cursor: 'pointer', }} - onClick={() => { - if (isRecurringRule) { - onOpenRecurring?.(recording, false); - } else { - onOpenDetails?.(recording); - } - }} + onClick={handleOnMainCardClick} > - - + + : 'Completed'} {isInterrupted && } - + {recordingName} @@ -289,7 +285,7 @@ export const RecordingCard = ({ recording, onOpenDetails, onOpenRecurring }) => alt={recordingName} fallbackSrc="/logo.png" /> - + {!isSeriesGroup && subTitle && ( @@ -332,43 +328,9 @@ export const RecordingCard = ({ recording, onOpenDetails, onOpenRecurring }) => )} - {isInProgress && ( - - )} + {isInProgress && } - {!isUpcoming && ( - - - - )} + {!isUpcoming && } {!isUpcoming && customProps?.status === 'completed' && (!customProps?.comskip || diff --git a/frontend/src/components/forms/RecordingDetailsModal.jsx b/frontend/src/components/forms/RecordingDetailsModal.jsx index 9b01945c..36410b6f 100644 --- a/frontend/src/components/forms/RecordingDetailsModal.jsx +++ b/frontend/src/components/forms/RecordingDetailsModal.jsx @@ -1,20 +1,19 @@ import useChannelsStore from '../../store/channels.jsx'; import { useDateTimeFormat, useTimeHelpers } from '../../utils/dateTimeUtils.js'; import React from 'react'; -import API from '../../api.js'; -import { - Badge, - Button, - Card, - Flex, - Group, - Image, - Modal, - Stack, - Text, -} from '@mantine/core'; +import { Badge, Button, Card, Flex, Group, Image, Modal, Stack, Text, } from '@mantine/core'; import useVideoStore from '../../store/useVideoStore.jsx'; import { notifications } from '@mantine/notifications'; +import { + deleteRecordingById, + getPosterUrl, getRecordingUrl, + getSeasonLabel, getShowVideoUrl, runComSkip, +} from '../../utils/cards/RecordingCardUtils.js'; +import { + getRating, + getStatRows, + getUpcomingEpisodes, +} from '../../utils/forms/RecordingDetailsModalUtils.js'; export const RecordingDetailsModal = ({ opened, @@ -43,26 +42,10 @@ export const RecordingDetailsModal = ({ const end = toUserTime(safeRecording.end_time); const stats = customProps.stream_info || {}; - const statRows = [ - ['Video Codec', stats.video_codec], - [ - 'Resolution', - stats.resolution || - (stats.width && stats.height ? `${stats.width}x${stats.height}` : null), - ], - ['FPS', stats.source_fps], - ['Video Bitrate', stats.video_bitrate && `${stats.video_bitrate} kb/s`], - ['Audio Codec', stats.audio_codec], - ['Audio Channels', stats.audio_channels], - ['Sample Rate', stats.sample_rate && `${stats.sample_rate} Hz`], - ['Audio Bitrate', stats.audio_bitrate && `${stats.audio_bitrate} kb/s`], - ].filter(([, v]) => v !== null && v !== undefined && v !== ''); + const statRows = getStatRows(stats); // Rating (if available) - const rating = - customProps.rating || - customProps.rating_value || - (program && program.custom_properties && program.custom_properties.rating); + const rating = getRating(customProps, program); const ratingSystem = customProps.rating_system || 'MPAA'; const fileUrl = customProps.file_url || customProps.output_file_url; @@ -71,61 +54,11 @@ export const RecordingDetailsModal = ({ customProps.status === 'interrupted') && Boolean(fileUrl); - // Prefix in dev (Vite) if needed - let resolvedPosterUrl = posterUrl; - if ( - typeof import.meta !== 'undefined' && - import.meta.env && - import.meta.env.DEV - ) { - if (resolvedPosterUrl && resolvedPosterUrl.startsWith('/')) { - resolvedPosterUrl = `${window.location.protocol}//${window.location.hostname}:5656${resolvedPosterUrl}`; - } - } - const isSeriesGroup = Boolean( safeRecording._group_count && safeRecording._group_count > 1 ); const upcomingEpisodes = React.useMemo(() => { - if (!isSeriesGroup) return []; - const arr = Array.isArray(allRecordings) - ? allRecordings - : Object.values(allRecordings || {}); - const tvid = program.tvg_id || ''; - const titleKey = (program.title || '').toLowerCase(); - const filtered = arr.filter((r) => { - const cp = r.custom_properties || {}; - const pr = cp.program || {}; - if ((pr.tvg_id || '') !== tvid) return false; - if ((pr.title || '').toLowerCase() !== titleKey) return false; - const st = toUserTime(r.start_time); - return st.isAfter(userNow()); - }); - // Deduplicate by program.id if present, else by time+title - const seen = new Set(); - const deduped = []; - for (const r of filtered) { - const cp = r.custom_properties || {}; - const pr = cp.program || {}; - // Prefer season/episode or onscreen code; else fall back to sub_title; else program id/slot - const season = cp.season ?? pr?.custom_properties?.season; - const episode = cp.episode ?? pr?.custom_properties?.episode; - const onscreen = - cp.onscreen_episode ?? pr?.custom_properties?.onscreen_episode; - let key = null; - if (season != null && episode != null) key = `se:${season}:${episode}`; - else if (onscreen) key = `onscreen:${String(onscreen).toLowerCase()}`; - else if (pr.sub_title) key = `sub:${(pr.sub_title || '').toLowerCase()}`; - else if (pr.id != null) key = `id:${pr.id}`; - else - key = `slot:${r.channel}|${r.start_time}|${r.end_time}|${pr.title || ''}`; - if (seen.has(key)) continue; - seen.add(key); - deduped.push(r); - } - return deduped.sort( - (a, b) => toUserTime(a.start_time) - toUserTime(b.start_time) - ); + return getUpcomingEpisodes(isSeriesGroup, allRecordings, program, toUserTime, userNow); }, [ allRecordings, isSeriesGroup, @@ -146,27 +79,14 @@ export const RecordingDetailsModal = ({ const episode = cp.episode ?? pr?.custom_properties?.episode; const onscreen = cp.onscreen_episode ?? pr?.custom_properties?.onscreen_episode; - const se = - season && episode - ? `S${String(season).padStart(2, '0')}E${String(episode).padStart(2, '0')}` - : onscreen || null; + const se = getSeasonLabel(season, episode, onscreen); const posterLogoId = cp.poster_logo_id; - let purl = posterLogoId - ? `/api/channels/logos/${posterLogoId}/cache/` - : cp.poster_url || posterUrl || '/logo.png'; - if ( - typeof import.meta !== 'undefined' && - import.meta.env && - import.meta.env.DEV && - purl && - purl.startsWith('/') - ) { - purl = `${window.location.protocol}//${window.location.hostname}:5656${purl}`; - } + const purl = getPosterUrl(posterLogoId, cp, posterUrl); + const onRemove = async (e) => { e?.stopPropagation?.(); try { - await API.deleteRecording(rec.id); + await deleteRecordingById(rec.id); } catch (error) { console.error('Failed to delete upcoming recording', error); } @@ -176,16 +96,18 @@ export const RecordingDetailsModal = ({ console.error('Failed to refresh recordings after delete', error); } }; + + const handleOnMainCardClick = () => { + setChildRec(rec); + setChildOpen(true); + } return ( { - setChildRec(rec); - setChildOpen(true); - }} + onClick={handleOnMainCardClick} > {pr.title - + { + const rec = childRec; + const now = userNow(); + const s = toUserTime(rec.start_time); + const e = toUserTime(rec.end_time); + + if (now.isAfter(s) && now.isBefore(e)) { + if (!channelMap[rec.channel]) return; + useVideoStore.getState().showVideo(getShowVideoUrl(channelMap[rec.channel], env_mode), 'live'); + } + } + + const handleOnWatchRecording = () => { + let fileUrl = getRecordingUrl(childRec.custom_properties, env_mode) + if (!fileUrl) return; + + useVideoStore.getState().showVideo(fileUrl, 'vod', { + name: + childRec.custom_properties?.program?.title || 'Recording', + logo: { + url: getPosterUrl( + childRec.custom_properties?.poster_logo_id, + undefined, + channelMap[childRec.channel]?.logo?.cache_url + ) + }, + }); + } + + const WatchLive = () => { + return ; + } + + const WatchRecording = () => { + return ; + } + + const Edit = () => { + return ; + } + + const handleRunComskip = async (e) => { + e.stopPropagation?.(); + try { + await runComSkip(recording) + notifications.show({ + title: 'Removing commercials', + message: 'Queued comskip for this recording', + color: 'blue.5', + autoClose: 2000, + }); + } catch (error) { + console.error('Failed to run comskip', error); + } + } return ( setChildOpen(false)} recording={childRec} channel={channelMap[childRec.channel]} - posterUrl={ - (childRec.custom_properties?.poster_logo_id - ? `/api/channels/logos/${childRec.custom_properties.poster_logo_id}/cache/` - : childRec.custom_properties?.poster_url || - channelMap[childRec.channel]?.logo?.cache_url) || - '/logo.png' - } + posterUrl={getPosterUrl( + childRec.custom_properties?.poster_logo_id, + childRec.custom_properties, + channelMap[childRec.channel]?.logo?.cache_url + )} env_mode={env_mode} - onWatchLive={() => { - const rec = childRec; - const now = userNow(); - const s = toUserTime(rec.start_time); - const e = toUserTime(rec.end_time); - if (now.isAfter(s) && now.isBefore(e)) { - const ch = channelMap[rec.channel]; - if (!ch) return; - let url = `/proxy/ts/stream/${ch.uuid}`; - if (env_mode === 'dev') { - url = `${window.location.protocol}//${window.location.hostname}:5656${url}`; - } - useVideoStore.getState().showVideo(url, 'live'); - } - }} - onWatchRecording={() => { - let fileUrl = - childRec.custom_properties?.file_url || - childRec.custom_properties?.output_file_url; - if (!fileUrl) return; - if (env_mode === 'dev' && fileUrl.startsWith('/')) { - fileUrl = `${window.location.protocol}//${window.location.hostname}:5656${fileUrl}`; - } - useVideoStore.getState().showVideo(fileUrl, 'vod', { - name: - childRec.custom_properties?.program?.title || 'Recording', - logo: { - url: - (childRec.custom_properties?.poster_logo_id - ? `/api/channels/logos/${childRec.custom_properties.poster_logo_id}/cache/` - : channelMap[childRec.channel]?.logo?.cache_url) || - '/logo.png', - }, - }); - }} + onWatchLive={handleOnWatchLive} + onWatchRecording={handleOnWatchRecording} /> )} ) : ( - {onWatchLive && ( - - )} - {onWatchRecording && ( - - )} - {onEdit && start.isAfter(userNow()) && ( - - )} + {onWatchLive && } + {onWatchRecording && } + {onEdit && start.isAfter(userNow()) && } {customProps.status === 'completed' && (!customProps?.comskip || customProps?.comskip?.status !== 'completed') && ( @@ -371,20 +307,7 @@ export const RecordingDetailsModal = ({ size="xs" variant="light" color="teal" - onClick={async (e) => { - e.stopPropagation?.(); - try { - await API.runComskip(recording.id); - notifications.show({ - title: 'Removing commercials', - message: 'Queued comskip for this recording', - color: 'blue.5', - autoClose: 2000, - }); - } catch (error) { - console.error('Failed to run comskip', error); - } - }} + onClick={handleRunComskip} > Remove commercials diff --git a/frontend/src/utils/cards/RecordingCardUtils.js b/frontend/src/utils/cards/RecordingCardUtils.js new file mode 100644 index 00000000..65b3da3a --- /dev/null +++ b/frontend/src/utils/cards/RecordingCardUtils.js @@ -0,0 +1,92 @@ +import API from '../../api.js'; +import useChannelsStore from '../../store/channels.jsx'; + +export const removeRecording = (id) => { + // Optimistically remove immediately from UI + try { + useChannelsStore.getState().removeRecording(id); + } catch (error) { + console.error('Failed to optimistically remove recording', error); + } + // Fire-and-forget server delete; websocket will keep others in sync + API.deleteRecording(id).catch(() => { + // On failure, fallback to refetch to restore state + try { + useChannelsStore.getState().fetchRecordings(); + } catch (error) { + console.error('Failed to refresh recordings after delete', error); + } + }); +}; + +export const getPosterUrl = (posterLogoId, customProperties, posterUrl) => { + let purl = posterLogoId + ? `/api/channels/logos/${posterLogoId}/cache/` + : customProperties?.poster_url || posterUrl || '/logo.png'; + if ( + typeof import.meta !== 'undefined' && + import.meta.env && + import.meta.env.DEV && + purl && + purl.startsWith('/') + ) { + purl = `${window.location.protocol}//${window.location.hostname}:5656${purl}`; + } + return purl; +}; + +export const getShowVideoUrl = (channel, env_mode) => { + let url = `/proxy/ts/stream/${channel.uuid}`; + if (env_mode === 'dev') { + url = `${window.location.protocol}//${window.location.hostname}:5656${url}`; + } + return url; +}; + +export const runComSkip = async (recording) => { + await API.runComskip(recording.id); +}; + +export const deleteRecordingById = async (recordingId) => { + await API.deleteRecording(recordingId); +}; + +export const deleteSeriesAndRule = async (seriesInfo) => { + const { tvg_id, title } = seriesInfo; + if (tvg_id) { + try { + await API.bulkRemoveSeriesRecordings({ + tvg_id, + title, + scope: 'title', + }); + } catch (error) { + console.error('Failed to remove series recordings', error); + } + try { + await API.deleteSeriesRule(tvg_id); + } catch (error) { + console.error('Failed to delete series rule', error); + } + } +}; + +export const getRecordingUrl = (customProps, env_mode) => { + let fileUrl = customProps?.file_url || customProps?.output_file_url; + if (fileUrl && env_mode === 'dev' && fileUrl.startsWith('/')) { + fileUrl = `${window.location.protocol}//${window.location.hostname}:5656${fileUrl}`; + } + return fileUrl; +}; + +export const getSeasonLabel = (season, episode, onscreen) => { + return season && episode + ? `S${String(season).padStart(2, '0')}E${String(episode).padStart(2, '0')}` + : onscreen || null; +}; + +export const getSeriesInfo = (customProps) => { + const cp = customProps || {}; + const pr = cp.program || {}; + return { tvg_id: pr.tvg_id, title: pr.title }; +}; \ No newline at end of file diff --git a/frontend/src/utils/forms/RecordingDetailsModalUtils.js b/frontend/src/utils/forms/RecordingDetailsModalUtils.js new file mode 100644 index 00000000..805bc006 --- /dev/null +++ b/frontend/src/utils/forms/RecordingDetailsModalUtils.js @@ -0,0 +1,87 @@ +export const getStatRows = (stats) => { + return [ + ['Video Codec', stats.video_codec], + [ + 'Resolution', + stats.resolution || + (stats.width && stats.height ? `${stats.width}x${stats.height}` : null), + ], + ['FPS', stats.source_fps], + ['Video Bitrate', stats.video_bitrate && `${stats.video_bitrate} kb/s`], + ['Audio Codec', stats.audio_codec], + ['Audio Channels', stats.audio_channels], + ['Sample Rate', stats.sample_rate && `${stats.sample_rate} Hz`], + ['Audio Bitrate', stats.audio_bitrate && `${stats.audio_bitrate} kb/s`], + ].filter(([, v]) => v !== null && v !== undefined && v !== ''); +}; + +export const getRating = (customProps, program) => { + return ( + customProps.rating || + customProps.rating_value || + (program && program.custom_properties && program.custom_properties.rating) + ); +}; + +const filterByUpcoming = (arr, tvid, titleKey, toUserTime, userNow) => { + return arr.filter((r) => { + const cp = r.custom_properties || {}; + const pr = cp.program || {}; + + if ((pr.tvg_id || '') !== tvid) return false; + if ((pr.title || '').toLowerCase() !== titleKey) return false; + const st = toUserTime(r.start_time); + return st.isAfter(userNow()); + }); +} + +const dedupeByProgram = (filtered) => { + // Deduplicate by program.id if present, else by time+title + const seen = new Set(); + const deduped = []; + + for (const r of filtered) { + const cp = r.custom_properties || {}; + const pr = cp.program || {}; + // Prefer season/episode or onscreen code; else fall back to sub_title; else program id/slot + const season = cp.season ?? pr?.custom_properties?.season; + const episode = cp.episode ?? pr?.custom_properties?.episode; + const onscreen = + cp.onscreen_episode ?? pr?.custom_properties?.onscreen_episode; + + let key = null; + if (season != null && episode != null) key = `se:${season}:${episode}`; + else if (onscreen) key = `onscreen:${String(onscreen).toLowerCase()}`; + else if (pr.sub_title) key = `sub:${(pr.sub_title || '').toLowerCase()}`; + else if (pr.id != null) key = `id:${pr.id}`; + else + key = `slot:${r.channel}|${r.start_time}|${r.end_time}|${pr.title || ''}`; + + if (seen.has(key)) continue; + seen.add(key); + deduped.push(r); + } + return deduped; +} + +export const getUpcomingEpisodes = ( + isSeriesGroup, + allRecordings, + program, + toUserTime, + userNow +) => { + if (!isSeriesGroup) return []; + + const arr = Array.isArray(allRecordings) + ? allRecordings + : Object.values(allRecordings || {}); + const tvid = program.tvg_id || ''; + const titleKey = (program.title || '').toLowerCase(); + + const filtered = filterByUpcoming(arr, tvid, titleKey, toUserTime, userNow); + + return dedupeByProgram(filtered).sort( + (a, b) => toUserTime(a.start_time) - toUserTime(b.start_time) + ); +}; From 48e7060cdbded51efc5064c65c5f1ecd27921b27 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Dec 2025 15:24:16 -0600 Subject: [PATCH 083/220] Bug Fix: VOD episode processing now correctly handles duplicate episodes from the same provider. (Fixes #556) --- CHANGELOG.md | 3 ++- apps/vod/tasks.py | 57 +++++++++++++++++++++++++++++++++++++++-------- 2 files changed, 50 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 151018cb..6b8eea3a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,7 +17,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed -- nginx now gracefully handles hosts without IPv6 support by automatically disabling IPv6 binding at startup (Closes #744) +- VOD episode processing now correctly handles duplicate episodes (same episode in multiple languages/qualities) by reusing Episode records across multiple M3UEpisodeRelation entries instead of attempting to create duplicates (Fixes #556) +- nginx now gracefully handles hosts without IPv6 support by automatically disabling IPv6 binding at startup (Fixes #744) - XtreamCodes EPG API now returns correct date/time format for start/end fields and proper string types for timestamps and channel_id - XtreamCodes EPG API now handles None values for title and description fields to prevent AttributeError diff --git a/apps/vod/tasks.py b/apps/vod/tasks.py index 1170543a..d42be946 100644 --- a/apps/vod/tasks.py +++ b/apps/vod/tasks.py @@ -1232,7 +1232,13 @@ def refresh_series_episodes(account, series, external_series_id, episodes_data=N def batch_process_episodes(account, series, episodes_data, scan_start_time=None): - """Process episodes in batches for better performance""" + """Process episodes in batches for better performance. + + Note: Multiple streams can represent the same episode (e.g., different languages + or qualities). Each stream has a unique stream_id, but they share the same + season/episode number. We create one Episode record per (series, season, episode) + and multiple M3UEpisodeRelation records pointing to it. + """ if not episodes_data: return @@ -1249,12 +1255,13 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None) logger.info(f"Batch processing {len(all_episodes_data)} episodes for series {series.name}") # Extract episode identifiers - episode_keys = [] + # Note: episode_keys may have duplicates when multiple streams represent same episode + episode_keys = set() # Use set to track unique episode keys episode_ids = [] for episode_data in all_episodes_data: season_num = episode_data['_season_number'] episode_num = episode_data.get('episode_num', 0) - episode_keys.append((series.id, season_num, episode_num)) + episode_keys.add((series.id, season_num, episode_num)) episode_ids.append(str(episode_data.get('id'))) # Pre-fetch existing episodes @@ -1277,6 +1284,10 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None) relations_to_create = [] relations_to_update = [] + # Track episodes we're creating in this batch to avoid duplicates + # Key: (series_id, season_number, episode_number) -> Episode object + episodes_pending_creation = {} + for episode_data in all_episodes_data: try: episode_id = str(episode_data.get('id')) @@ -1306,10 +1317,15 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None) if backdrop: custom_props['backdrop_path'] = [backdrop] - # Find existing episode + # Find existing episode - check DB first, then pending creations episode_key = (series.id, season_number, episode_number) episode = existing_episodes.get(episode_key) + # Check if we already have this episode pending creation (multiple streams for same episode) + if not episode and episode_key in episodes_pending_creation: + episode = episodes_pending_creation[episode_key] + logger.debug(f"Reusing pending episode for S{season_number:02d}E{episode_number:02d} (stream_id: {episode_id})") + if episode: # Update existing episode updated = False @@ -1338,7 +1354,9 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None) episode.custom_properties = custom_props if custom_props else None updated = True - if updated: + # Only add to update list if episode has a PK (exists in DB) and isn't already in list + # Episodes pending creation don't have PKs yet and will be created via bulk_create + if updated and episode.pk and episode not in episodes_to_update: episodes_to_update.append(episode) else: # Create new episode @@ -1356,6 +1374,8 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None) custom_properties=custom_props if custom_props else None ) episodes_to_create.append(episode) + # Track this episode so subsequent streams with same season/episode can reuse it + episodes_pending_creation[episode_key] = episode # Handle episode relation if episode_id in existing_relations: @@ -1389,9 +1409,28 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None) # Execute batch operations with transaction.atomic(): - # Create new episodes + # Create new episodes - use ignore_conflicts in case of race conditions if episodes_to_create: - Episode.objects.bulk_create(episodes_to_create) + Episode.objects.bulk_create(episodes_to_create, ignore_conflicts=True) + + # Re-fetch the created episodes to get their PKs + # We need to do this because bulk_create with ignore_conflicts doesn't set PKs + created_episode_keys = [ + (ep.series_id, ep.season_number, ep.episode_number) + for ep in episodes_to_create + ] + db_episodes = Episode.objects.filter(series=series) + episode_pk_map = { + (ep.series_id, ep.season_number, ep.episode_number): ep + for ep in db_episodes + } + + # Update relations to point to the actual DB episodes with PKs + for relation in relations_to_create: + ep = relation.episode + key = (ep.series_id, ep.season_number, ep.episode_number) + if key in episode_pk_map: + relation.episode = episode_pk_map[key] # Update existing episodes if episodes_to_update: @@ -1400,9 +1439,9 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None) 'tmdb_id', 'imdb_id', 'custom_properties' ]) - # Create new episode relations + # Create new episode relations - use ignore_conflicts for stream_id duplicates if relations_to_create: - M3UEpisodeRelation.objects.bulk_create(relations_to_create) + M3UEpisodeRelation.objects.bulk_create(relations_to_create, ignore_conflicts=True) # Update existing episode relations if relations_to_update: From 748d5dc72dae9aa421e6c11dae23f3c8d1ddb379 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Dec 2025 15:44:42 -0600 Subject: [PATCH 084/220] Bug Fix: When multiple m3uepisoderelations for a requested episode existed, the xc api would fail.(Fixes #569) --- CHANGELOG.md | 1 + apps/output/views.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6b8eea3a..e77c8ea6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,6 +18,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed - VOD episode processing now correctly handles duplicate episodes (same episode in multiple languages/qualities) by reusing Episode records across multiple M3UEpisodeRelation entries instead of attempting to create duplicates (Fixes #556) +- XtreamCodes series streaming endpoint now correctly handles episodes with multiple streams (different languages/qualities) by selecting the best available stream based on account priority (Fixes #569) - nginx now gracefully handles hosts without IPv6 support by automatically disabling IPv6 binding at startup (Fixes #744) - XtreamCodes EPG API now returns correct date/time format for start/end fields and proper string types for timestamps and channel_id - XtreamCodes EPG API now handles None values for title and description fields to prevent AttributeError diff --git a/apps/output/views.py b/apps/output/views.py index 2f8eac3b..2966b5b5 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -2878,7 +2878,7 @@ def xc_series_stream(request, username, password, stream_id, extension): filters = {"episode_id": stream_id, "m3u_account__is_active": True} try: - episode_relation = M3UEpisodeRelation.objects.select_related('episode').get(**filters) + episode_relation = M3UEpisodeRelation.objects.select_related('episode').filter(**filters).order_by('-m3u_account__priority', 'id').first() except M3UEpisodeRelation.DoesNotExist: return JsonResponse({"error": "Episode not found"}, status=404) From 7c4554233211f2eb937e1b9457f5f6c40e21b28b Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Tue, 16 Dec 2025 13:43:39 -0800 Subject: [PATCH 085/220] Fixed cache_url fallback --- frontend/src/components/cards/RecordingCard.jsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/components/cards/RecordingCard.jsx b/frontend/src/components/cards/RecordingCard.jsx index 96dcea11..6a25259b 100644 --- a/frontend/src/components/cards/RecordingCard.jsx +++ b/frontend/src/components/cards/RecordingCard.jsx @@ -51,8 +51,8 @@ export const RecordingCard = ({ recording, onOpenDetails, onOpenRecurring }) => const isRecurringRule = customProps?.rule?.type === 'recurring'; // Poster or channel logo - const posterLogoId = customProps.poster_logo_id; - const posterUrl = getPosterUrl(posterLogoId, customProps, channel, env_mode); + const posterUrl = getPosterUrl( + customProps.poster_logo_id, customProps, channel?.logo?.cache_url, env_mode); const start = toUserTime(recording.start_time); const end = toUserTime(recording.end_time); From 38033da90f1e56aaad05bcf70567d5f58f7cda35 Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Tue, 16 Dec 2025 13:43:55 -0800 Subject: [PATCH 086/220] Fixed component syntax --- frontend/src/pages/DVR.jsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/pages/DVR.jsx b/frontend/src/pages/DVR.jsx index 023457e3..711c9ba1 100644 --- a/frontend/src/pages/DVR.jsx +++ b/frontend/src/pages/DVR.jsx @@ -93,7 +93,7 @@ const DVRPage = () => { return categorizeRecordings(recordings, toUserTime, now); }, [recordings, now, toUserTime]); - const RecordingList = (list) => { + const RecordingList = ({ list }) => { return list.map((rec) => ( { onOpenRecurring={openRuleModal} /> )); - } + }; const handleOnWatchLive = () => { const rec = detailsRecording; From dd75b5b21aedec5e62ed5bc2b5d20577e99b9909 Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Tue, 16 Dec 2025 13:44:11 -0800 Subject: [PATCH 087/220] Added correct import for Text component --- frontend/src/components/RecordingSynopsis.jsx | 2 ++ 1 file changed, 2 insertions(+) diff --git a/frontend/src/components/RecordingSynopsis.jsx b/frontend/src/components/RecordingSynopsis.jsx index aa870258..1b6ec5ab 100644 --- a/frontend/src/components/RecordingSynopsis.jsx +++ b/frontend/src/components/RecordingSynopsis.jsx @@ -1,3 +1,5 @@ +import { Text, } from '@mantine/core'; + // Short preview that triggers the details modal when clicked export const RecordingSynopsis = ({ description, onOpen }) => { const truncated = description?.length > 140; From 36ec2fb1b02c79fc600f4634db34ab81a77d8a27 Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Tue, 16 Dec 2025 13:44:55 -0800 Subject: [PATCH 088/220] Extracted component and util logic --- .../components/forms/RecurringRuleModal.jsx | 199 ++++++++---------- .../utils/forms/RecurringRuleModalUtils.js | 66 ++++++ 2 files changed, 157 insertions(+), 108 deletions(-) create mode 100644 frontend/src/utils/forms/RecurringRuleModalUtils.js diff --git a/frontend/src/components/forms/RecurringRuleModal.jsx b/frontend/src/components/forms/RecurringRuleModal.jsx index 590d4641..1fbdd549 100644 --- a/frontend/src/components/forms/RecurringRuleModal.jsx +++ b/frontend/src/components/forms/RecurringRuleModal.jsx @@ -9,10 +9,17 @@ import { import React, { useEffect, useMemo, useState } from 'react'; import { useForm } from '@mantine/form'; import dayjs from 'dayjs'; -import API from '../../api.js'; import { notifications } from '@mantine/notifications'; import { Badge, Button, Card, Group, Modal, MultiSelect, Select, Stack, Switch, Text, TextInput } from '@mantine/core'; import { DatePickerInput, TimeInput } from '@mantine/dates'; +import { deleteRecordingById } from '../../utils/cards/RecordingCardUtils.js'; +import { + deleteRecurringRuleById, + getChannelOptions, + getUpcomingOccurrences, + updateRecurringRule, + updateRecurringRuleEnabled, +} from '../../utils/forms/RecurringRuleModalUtils.js'; export const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence }) => { const channels = useChannelsStore((s) => s.channels); @@ -30,19 +37,7 @@ export const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence } const rule = recurringRules.find((r) => r.id === ruleId); const channelOptions = useMemo(() => { - const list = Object.values(channels || {}); - list.sort((a, b) => { - const aNum = Number(a.channel_number) || 0; - const bNum = Number(b.channel_number) || 0; - if (aNum === bNum) { - return (a.name || '').localeCompare(b.name || ''); - } - return aNum - bNum; - }); - return list.map((item) => ({ - value: `${item.id}`, - label: item.name || `Channel ${item.id}`, - })); + return getChannelOptions(channels); }, [channels]); const form = useForm({ @@ -109,41 +104,14 @@ export const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence } }, [opened, ruleId, rule]); const upcomingOccurrences = useMemo(() => { - const list = Array.isArray(recordings) - ? recordings - : Object.values(recordings || {}); - const now = userNow(); - return list - .filter( - (rec) => - rec?.custom_properties?.rule?.id === ruleId && - toUserTime(rec.start_time).isAfter(now) - ) - .sort( - (a, b) => - toUserTime(a.start_time).valueOf() - - toUserTime(b.start_time).valueOf() - ); + return getUpcomingOccurrences(recordings, userNow, ruleId, toUserTime); }, [recordings, ruleId, toUserTime, userNow]); const handleSave = async (values) => { if (!rule) return; setSaving(true); try { - await API.updateRecurringRule(ruleId, { - channel: values.channel_id, - days_of_week: (values.days_of_week || []).map((d) => Number(d)), - start_time: toTimeString(values.start_time), - end_time: toTimeString(values.end_time), - start_date: values.start_date - ? dayjs(values.start_date).format('YYYY-MM-DD') - : null, - end_date: values.end_date - ? dayjs(values.end_date).format('YYYY-MM-DD') - : null, - name: values.rule_name?.trim() || '', - enabled: Boolean(values.enabled), - }); + await updateRecurringRule(ruleId, values); await Promise.all([fetchRecurringRules(), fetchRecordings()]); notifications.show({ title: 'Recurring rule updated', @@ -163,7 +131,7 @@ export const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence } if (!rule) return; setDeleting(true); try { - await API.deleteRecurringRule(ruleId); + await deleteRecurringRuleById(ruleId); await Promise.all([fetchRecurringRules(), fetchRecordings()]); notifications.show({ title: 'Recurring rule removed', @@ -183,7 +151,7 @@ export const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence } if (!rule) return; setSaving(true); try { - await API.updateRecurringRule(ruleId, { enabled: checked }); + await updateRecurringRuleEnabled(ruleId, checked); await Promise.all([fetchRecurringRules(), fetchRecordings()]); notifications.show({ title: checked ? 'Recurring rule enabled' : 'Recurring rule paused', @@ -204,7 +172,7 @@ export const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence } const handleCancelOccurrence = async (occurrence) => { setBusyOccurrence(occurrence.id); try { - await API.deleteRecording(occurrence.id); + await deleteRecordingById(occurrence.id); await fetchRecordings(); notifications.show({ title: 'Occurrence cancelled', @@ -227,6 +195,77 @@ export const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence } ); } + const handleEnableChange = (event) => { + form.setFieldValue('enabled', event.currentTarget.checked); + handleToggleEnabled(event.currentTarget.checked); + } + + const handleStartDateChange = (value) => { + form.setFieldValue('start_date', value || dayjs().toDate()); + } + + const handleEndDateChange = (value) => { + form.setFieldValue('end_date', value); + } + + const handleStartTimeChange = (value) => { + form.setFieldValue('start_time', toTimeString(value)); + } + + const handleEndTimeChange = (value) => { + form.setFieldValue('end_time', toTimeString(value)); + } + + const UpcomingList = () => { + return + {upcomingOccurrences.map((occ) => { + const occStart = toUserTime(occ.start_time); + const occEnd = toUserTime(occ.end_time); + + return ( + + + + + {occStart.format(`${dateformat}, YYYY`)} + + + {occStart.format(timeformat)} – {occEnd.format(timeformat)} + + + + + + + + + ); + })} + ; + } + return ( { - form.setFieldValue('enabled', event.currentTarget.checked); - handleToggleEnabled(event.currentTarget.checked); - }} + onChange={handleEnableChange} label={form.values.enabled ? 'Enabled' : 'Paused'} disabled={saving} /> @@ -278,15 +314,13 @@ export const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence } - form.setFieldValue('start_date', value || dayjs().toDate()) - } + onChange={handleStartDateChange} valueFormat="MMM D, YYYY" /> form.setFieldValue('end_date', value)} + onChange={handleEndDateChange} valueFormat="MMM D, YYYY" minDate={form.values.start_date || undefined} /> @@ -295,9 +329,7 @@ export const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence } - form.setFieldValue('start_time', toTimeString(value)) - } + onChange={handleStartTimeChange} withSeconds={false} format="12" amLabel="AM" @@ -306,9 +338,7 @@ export const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence } - form.setFieldValue('end_time', toTimeString(value)) - } + onChange={handleEndTimeChange} withSeconds={false} format="12" amLabel="AM" @@ -341,54 +371,7 @@ export const RecurringRuleModal = ({ opened, onClose, ruleId, onEditOccurrence } No future airings currently scheduled. - ) : ( - - {upcomingOccurrences.map((occ) => { - const occStart = toUserTime(occ.start_time); - const occEnd = toUserTime(occ.end_time); - return ( - - - - - {occStart.format(`${dateformat}, YYYY`)} - - - {occStart.format(timeformat)} – {occEnd.format(timeformat)} - - - - - - - - - ); - })} - - )} + ) : } diff --git a/frontend/src/utils/forms/RecurringRuleModalUtils.js b/frontend/src/utils/forms/RecurringRuleModalUtils.js new file mode 100644 index 00000000..1eb9194a --- /dev/null +++ b/frontend/src/utils/forms/RecurringRuleModalUtils.js @@ -0,0 +1,66 @@ +import API from '../../api.js'; +import { toTimeString } from '../dateTimeUtils.js'; +import dayjs from 'dayjs'; + +export const getChannelOptions = (channels) => { + return Object.values(channels || {}) + .sort((a, b) => { + const aNum = Number(a.channel_number) || 0; + const bNum = Number(b.channel_number) || 0; + if (aNum === bNum) { + return (a.name || '').localeCompare(b.name || ''); + } + return aNum - bNum; + }) + .map((item) => ({ + value: `${item.id}`, + label: item.name || `Channel ${item.id}`, + })); +}; + +export const getUpcomingOccurrences = ( + recordings, + userNow, + ruleId, + toUserTime +) => { + const list = Array.isArray(recordings) + ? recordings + : Object.values(recordings || {}); + const now = userNow(); + return list + .filter( + (rec) => + rec?.custom_properties?.rule?.id === ruleId && + toUserTime(rec.start_time).isAfter(now) + ) + .sort( + (a, b) => + toUserTime(a.start_time).valueOf() - toUserTime(b.start_time).valueOf() + ); +}; + +export const updateRecurringRule = async (ruleId, values) => { + await API.updateRecurringRule(ruleId, { + channel: values.channel_id, + days_of_week: (values.days_of_week || []).map((d) => Number(d)), + start_time: toTimeString(values.start_time), + end_time: toTimeString(values.end_time), + start_date: values.start_date + ? dayjs(values.start_date).format('YYYY-MM-DD') + : null, + end_date: values.end_date + ? dayjs(values.end_date).format('YYYY-MM-DD') + : null, + name: values.rule_name?.trim() || '', + enabled: Boolean(values.enabled), + }); +}; + +export const deleteRecurringRuleById = async (ruleId) => { + await API.deleteRecurringRule(ruleId); +}; + +export const updateRecurringRuleEnabled = async (ruleId, checked) => { + await API.updateRecurringRule(ruleId, { enabled: checked }); +}; \ No newline at end of file From 98a016a41865cc14b7cdbfec67a1f561932b7505 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 16 Dec 2025 15:54:33 -0600 Subject: [PATCH 089/220] Enhance series info retrieval to return unique episodes and improve relation handling for active M3U accounts --- CHANGELOG.md | 1 + apps/output/views.py | 51 +++++++++++++++++++++++++++----------------- 2 files changed, 32 insertions(+), 20 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e77c8ea6..019f7893 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,6 +19,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - VOD episode processing now correctly handles duplicate episodes (same episode in multiple languages/qualities) by reusing Episode records across multiple M3UEpisodeRelation entries instead of attempting to create duplicates (Fixes #556) - XtreamCodes series streaming endpoint now correctly handles episodes with multiple streams (different languages/qualities) by selecting the best available stream based on account priority (Fixes #569) +- XtreamCodes series info API now returns unique episodes instead of duplicate entries when multiple streams exist for the same episode (different languages/qualities) - nginx now gracefully handles hosts without IPv6 support by automatically disabling IPv6 binding at startup (Fixes #744) - XtreamCodes EPG API now returns correct date/time format for start/end fields and proper string types for timestamps and channel_id - XtreamCodes EPG API now handles None values for title and description fields to prevent AttributeError diff --git a/apps/output/views.py b/apps/output/views.py index 2966b5b5..1710fa4d 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -2521,34 +2521,45 @@ def xc_get_series_info(request, user, series_id): except Exception as e: logger.error(f"Error refreshing series data for relation {series_relation.id}: {str(e)}") - # Get episodes for this series from the same M3U account - episode_relations = M3UEpisodeRelation.objects.filter( - episode__series=series, - m3u_account=series_relation.m3u_account - ).select_related('episode').order_by('episode__season_number', 'episode__episode_number') + # Get unique episodes for this series that have relations from any active M3U account + # We query episodes directly to avoid duplicates when multiple relations exist + # (e.g., same episode in different languages/qualities) + from apps.vod.models import Episode + episodes = Episode.objects.filter( + series=series, + m3u_relations__m3u_account__is_active=True + ).distinct().order_by('season_number', 'episode_number') # Group episodes by season seasons = {} - for relation in episode_relations: - episode = relation.episode + for episode in episodes: season_num = episode.season_number or 1 if season_num not in seasons: seasons[season_num] = [] - # Try to get the highest priority related M3UEpisodeRelation for this episode (for video/audio/bitrate) + # Get the highest priority relation for this episode (for container_extension, video/audio/bitrate) from apps.vod.models import M3UEpisodeRelation - first_relation = M3UEpisodeRelation.objects.filter( - episode=episode + best_relation = M3UEpisodeRelation.objects.filter( + episode=episode, + m3u_account__is_active=True ).select_related('m3u_account').order_by('-m3u_account__priority', 'id').first() + video = audio = bitrate = None - if first_relation and first_relation.custom_properties: - info = first_relation.custom_properties.get('info') - if info and isinstance(info, dict): - info_info = info.get('info') - if info_info and isinstance(info_info, dict): - video = info_info.get('video', {}) - audio = info_info.get('audio', {}) - bitrate = info_info.get('bitrate', 0) + container_extension = "mp4" + added_timestamp = str(int(episode.created_at.timestamp())) + + if best_relation: + container_extension = best_relation.container_extension or "mp4" + added_timestamp = str(int(best_relation.created_at.timestamp())) + if best_relation.custom_properties: + info = best_relation.custom_properties.get('info') + if info and isinstance(info, dict): + info_info = info.get('info') + if info_info and isinstance(info_info, dict): + video = info_info.get('video', {}) + audio = info_info.get('audio', {}) + bitrate = info_info.get('bitrate', 0) + if video is None: video = episode.custom_properties.get('video', {}) if episode.custom_properties else {} if audio is None: @@ -2561,8 +2572,8 @@ def xc_get_series_info(request, user, series_id): "season": season_num, "episode_num": episode.episode_number or 0, "title": episode.name, - "container_extension": relation.container_extension or "mp4", - "added": str(int(relation.created_at.timestamp())), + "container_extension": container_extension, + "added": added_timestamp, "custom_sid": None, "direct_source": "", "info": { From 7ea843956bae427282f06123285e8c1b1bf56732 Mon Sep 17 00:00:00 2001 From: Dispatcharr Date: Tue, 16 Dec 2025 21:52:35 -0600 Subject: [PATCH 090/220] Updated FloatingVideo.jsx Added resizing of the floating video Fixed floating video dragging --- frontend/src/components/FloatingVideo.jsx | 249 +++++++++++++++++++++- 1 file changed, 242 insertions(+), 7 deletions(-) diff --git a/frontend/src/components/FloatingVideo.jsx b/frontend/src/components/FloatingVideo.jsx index 6aaeecda..611d7e2a 100644 --- a/frontend/src/components/FloatingVideo.jsx +++ b/frontend/src/components/FloatingVideo.jsx @@ -1,5 +1,5 @@ // frontend/src/components/FloatingVideo.js -import React, { useEffect, useRef, useState } from 'react'; +import React, { useCallback, useEffect, useRef, useState } from 'react'; import Draggable from 'react-draggable'; import useVideoStore from '../store/useVideoStore'; import mpegts from 'mpegts.js'; @@ -17,7 +17,19 @@ export default function FloatingVideo() { const [isLoading, setIsLoading] = useState(false); const [loadError, setLoadError] = useState(null); const [showOverlay, setShowOverlay] = useState(true); + const [videoSize, setVideoSize] = useState({ width: 320, height: 180 }); + const [isResizing, setIsResizing] = useState(false); + const resizeStateRef = useRef(null); const overlayTimeoutRef = useRef(null); + const aspectRatioRef = useRef(320 / 180); + const [dragPosition, setDragPosition] = useState(null); + const dragPositionRef = useRef(null); + const dragOffsetRef = useRef({ x: 0, y: 0 }); + const initialPositionRef = useRef(null); + + const MIN_WIDTH = 220; + const MIN_HEIGHT = 124; + const VISIBLE_MARGIN = 48; // keep part of the window visible when dragging // Safely destroy the mpegts player to prevent errors const safeDestroyPlayer = () => { @@ -315,24 +327,223 @@ export default function FloatingVideo() { }, 50); }; + const clampToVisible = useCallback( + (x, y) => { + if (typeof window === 'undefined') return { x, y }; + + const minX = -(videoSize.width - VISIBLE_MARGIN); + const minY = -(videoSize.height - VISIBLE_MARGIN); + const maxX = window.innerWidth - VISIBLE_MARGIN; + const maxY = window.innerHeight - VISIBLE_MARGIN; + + return { + x: Math.min(Math.max(x, minX), maxX), + y: Math.min(Math.max(y, minY), maxY), + }; + }, + [VISIBLE_MARGIN, videoSize.height, videoSize.width] + ); + + const handleResizeMove = useCallback( + (event) => { + if (!resizeStateRef.current) return; + + const clientX = + event.touches && event.touches.length ? event.touches[0].clientX : event.clientX; + const clientY = + event.touches && event.touches.length ? event.touches[0].clientY : event.clientY; + + const deltaX = clientX - resizeStateRef.current.startX; + const deltaY = clientY - resizeStateRef.current.startY; + const aspectRatio = resizeStateRef.current.aspectRatio || aspectRatioRef.current; + + // Derive width/height while keeping the original aspect ratio + let nextWidth = resizeStateRef.current.startWidth + deltaX; + let nextHeight = nextWidth / aspectRatio; + + // Allow vertical-driven resize if the user drags mostly vertically + if (Math.abs(deltaY) > Math.abs(deltaX)) { + nextHeight = resizeStateRef.current.startHeight + deltaY; + nextWidth = nextHeight * aspectRatio; + } + + // Respect minimums while keeping the ratio + if (nextWidth < MIN_WIDTH) { + nextWidth = MIN_WIDTH; + nextHeight = nextWidth / aspectRatio; + } + + if (nextHeight < MIN_HEIGHT) { + nextHeight = MIN_HEIGHT; + nextWidth = nextHeight * aspectRatio; + } + + // Keep within viewport with a margin based on current position + const posX = dragPositionRef.current?.x ?? 0; + const posY = dragPositionRef.current?.y ?? 0; + const margin = VISIBLE_MARGIN; + + const maxWidth = Math.max(MIN_WIDTH, window.innerWidth - posX - margin); + const maxHeight = Math.max(MIN_HEIGHT, window.innerHeight - posY - margin); + + if (nextWidth > maxWidth) { + nextWidth = maxWidth; + nextHeight = nextWidth / aspectRatio; + } + + if (nextHeight > maxHeight) { + nextHeight = maxHeight; + nextWidth = nextHeight * aspectRatio; + } + + // Final pass to honor both bounds while keeping the ratio + if (nextWidth > maxWidth) { + nextWidth = maxWidth; + nextHeight = nextWidth / aspectRatio; + } + + setVideoSize({ + width: Math.round(nextWidth), + height: Math.round(nextHeight), + }); + }, + [MIN_HEIGHT, MIN_WIDTH, VISIBLE_MARGIN] + ); + + const endResize = useCallback(() => { + setIsResizing(false); + resizeStateRef.current = null; + window.removeEventListener('mousemove', handleResizeMove); + window.removeEventListener('mouseup', endResize); + window.removeEventListener('touchmove', handleResizeMove); + window.removeEventListener('touchend', endResize); + }, [handleResizeMove]); + + const startResize = (event) => { + event.stopPropagation(); + event.preventDefault(); + + const clientX = + event.touches && event.touches.length ? event.touches[0].clientX : event.clientX; + const clientY = + event.touches && event.touches.length ? event.touches[0].clientY : event.clientY; + + const aspectRatio = + videoSize.height > 0 ? videoSize.width / videoSize.height : aspectRatioRef.current; + aspectRatioRef.current = aspectRatio; + + resizeStateRef.current = { + startX: clientX, + startY: clientY, + startWidth: videoSize.width, + startHeight: videoSize.height, + aspectRatio, + }; + + setIsResizing(true); + + window.addEventListener('mousemove', handleResizeMove); + window.addEventListener('mouseup', endResize); + window.addEventListener('touchmove', handleResizeMove); + window.addEventListener('touchend', endResize); + }; + + useEffect(() => { + return () => { + endResize(); + }; + }, [endResize]); + + useEffect(() => { + dragPositionRef.current = dragPosition; + }, [dragPosition]); + + // Initialize the floating window near bottom-right once + useEffect(() => { + if (initialPositionRef.current || typeof window === 'undefined') return; + + const initialX = Math.max(10, window.innerWidth - videoSize.width - 20); + const initialY = Math.max(10, window.innerHeight - videoSize.height - 20); + const pos = clampToVisible(initialX, initialY); + + initialPositionRef.current = pos; + setDragPosition(pos); + dragPositionRef.current = pos; + }, [clampToVisible, videoSize.height, videoSize.width]); + + const handleDragStart = useCallback( + (event, data) => { + const clientX = event.touches?.[0]?.clientX ?? event.clientX; + const clientY = event.touches?.[0]?.clientY ?? event.clientY; + const rect = videoContainerRef.current?.getBoundingClientRect(); + + if (clientX != null && clientY != null && rect) { + dragOffsetRef.current = { + x: clientX - rect.left, + y: clientY - rect.top, + }; + } else { + dragOffsetRef.current = { x: 0, y: 0 }; + } + + const clamped = clampToVisible(data?.x ?? 0, data?.y ?? 0); + setDragPosition(clamped); + dragPositionRef.current = clamped; + }, + [clampToVisible] + ); + + const handleDrag = useCallback( + (event) => { + const clientX = event.touches?.[0]?.clientX ?? event.clientX; + const clientY = event.touches?.[0]?.clientY ?? event.clientY; + if (clientX == null || clientY == null) return; + + const nextX = clientX - (dragOffsetRef.current?.x ?? 0); + const nextY = clientY - (dragOffsetRef.current?.y ?? 0); + const clamped = clampToVisible(nextX, nextY); + setDragPosition(clamped); + dragPositionRef.current = clamped; + }, + [clampToVisible] + ); + + const handleDragStop = useCallback( + (_, data) => { + const clamped = clampToVisible(data?.x ?? 0, data?.y ?? 0); + setDragPosition(clamped); + dragPositionRef.current = clamped; + }, + [clampToVisible] + ); + // If the floating video is hidden or no URL is selected, do not render if (!isVisible || !streamUrl) { return null; } return ( - +
@@ -378,10 +589,12 @@ export default function FloatingVideo() {
)} + {connection && stopVODClient && ( +
+ + stopVODClient(connection.client_id)} + > + + + +
+ )}
@@ -1297,6 +1310,12 @@ const ChannelsPage = () => { await API.stopClient(channelId, clientId); }; + const stopVODClient = async (clientId) => { + await API.stopVODClient(clientId); + // Refresh VOD stats after stopping to update the UI + fetchVODStats(); + }; + // Function to fetch channel stats from API const fetchChannelStats = useCallback(async () => { try { @@ -1585,7 +1604,11 @@ const ChannelsPage = () => { ); } else if (connection.type === 'vod') { return ( - + ); } return null; From bd148a7f140bc0e29de3ecf2b5b8b253ea5d36dd Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Thu, 18 Dec 2025 07:46:21 -0800 Subject: [PATCH 096/220] Reverted Channels change for initial render --- frontend/src/pages/Channels.jsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/frontend/src/pages/Channels.jsx b/frontend/src/pages/Channels.jsx index d09f0c41..8f5cae26 100644 --- a/frontend/src/pages/Channels.jsx +++ b/frontend/src/pages/Channels.jsx @@ -10,7 +10,6 @@ import ErrorBoundary from '../components/ErrorBoundary'; const PageContent = () => { const authUser = useAuthStore((s) => s.user); - if (!authUser.id) throw new Error() const [allotmentSizes, setAllotmentSizes] = useLocalStorage( 'channels-splitter-sizes', @@ -25,6 +24,8 @@ const PageContent = () => { setAllotmentSizes(sizes); }; + if (!authUser.id) return <>; + if (authUser.user_level <= USER_LEVELS.STANDARD) { return ( From 2b1d5622a64378d1d3c50dd1649aa00ab1cb2b71 Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Thu, 18 Dec 2025 07:47:18 -0800 Subject: [PATCH 097/220] Setting User before fetch settings completes --- frontend/src/store/auth.jsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/store/auth.jsx b/frontend/src/store/auth.jsx index b1d60a1a..7f92f669 100644 --- a/frontend/src/store/auth.jsx +++ b/frontend/src/store/auth.jsx @@ -43,6 +43,8 @@ const useAuthStore = create((set, get) => ({ throw new Error('Unauthorized'); } + set({ user, isAuthenticated: true }); + // Ensure settings are loaded first await useSettingsStore.getState().fetchSettings(); @@ -62,8 +64,6 @@ const useAuthStore = create((set, get) => ({ if (user.user_level >= USER_LEVELS.ADMIN) { await Promise.all([useUsersStore.getState().fetchUsers()]); } - - set({ user, isAuthenticated: true }); } catch (error) { console.error('Error initializing data:', error); } From f43de44946000ddb56468b929bb6208876240ec2 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 18 Dec 2025 10:58:48 -0600 Subject: [PATCH 098/220] Enhancement: Refactor Docker workflows to use docker/metadata-action for cleaner OCI label management --- .github/workflows/base-image.yml | 87 +++++++++++++++++++++++++++-- .github/workflows/ci.yml | 87 +++++++++++++++++++++++++++-- .github/workflows/release.yml | 95 ++++++++++++++++++++++++++++++-- CHANGELOG.md | 1 + 4 files changed, 257 insertions(+), 13 deletions(-) diff --git a/.github/workflows/base-image.yml b/.github/workflows/base-image.yml index f926d892..d290d49a 100644 --- a/.github/workflows/base-image.yml +++ b/.github/workflows/base-image.yml @@ -101,6 +101,28 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Extract metadata for Docker + id: meta + uses: docker/metadata-action@v5 + with: + images: | + ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }} + docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }} + labels: | + org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }} + org.opencontainers.image.description=Your ultimate IPTV & stream Management companion. + org.opencontainers.image.url=https://github.com/${{ github.repository }} + org.opencontainers.image.source=https://github.com/${{ github.repository }} + org.opencontainers.image.version=${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }} + org.opencontainers.image.created=${{ needs.prepare.outputs.timestamp }} + org.opencontainers.image.revision=${{ github.sha }} + org.opencontainers.image.licenses=See repository + org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/ + org.opencontainers.image.vendor=${{ needs.prepare.outputs.repo_owner }} + org.opencontainers.image.authors=${{ github.actor }} + maintainer=${{ github.actor }} + build_version=DispatcharrBase version: ${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }} + - name: Build and push Docker base image uses: docker/build-push-action@v4 with: @@ -113,6 +135,7 @@ jobs: ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }} docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }} docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }} + labels: ${{ steps.meta.outputs.labels }} build-args: | REPO_OWNER=${{ needs.prepare.outputs.repo_owner }} REPO_NAME=${{ needs.prepare.outputs.repo_name }} @@ -154,18 +177,74 @@ jobs: # GitHub Container Registry manifests # branch tag (e.g. base or base-dev) - docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \ + docker buildx imagetools create \ + --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ + --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ + --annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \ + --annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \ + --annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \ + --annotation "index:org.opencontainers.image.licenses=See repository" \ + --annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \ + --annotation "index:org.opencontainers.image.vendor=${OWNER}" \ + --annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \ + --annotation "index:maintainer=${{ github.actor }}" \ + --annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \ + --tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \ ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-arm64 # branch + timestamp tag - docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \ + docker buildx imagetools create \ + --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ + --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ + --annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \ + --annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \ + --annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \ + --annotation "index:org.opencontainers.image.licenses=See repository" \ + --annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \ + --annotation "index:org.opencontainers.image.vendor=${OWNER}" \ + --annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \ + --annotation "index:maintainer=${{ github.actor }}" \ + --annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \ + --tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \ ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-arm64 # Docker Hub manifests # branch tag (e.g. base or base-dev) - docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \ + docker buildx imagetools create \ + --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ + --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ + --annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \ + --annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \ + --annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \ + --annotation "index:org.opencontainers.image.licenses=See repository" \ + --annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \ + --annotation "index:org.opencontainers.image.vendor=${OWNER}" \ + --annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \ + --annotation "index:maintainer=${{ github.actor }}" \ + --annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \ + --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \ docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-arm64 # branch + timestamp tag - docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \ + docker buildx imagetools create \ + --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ + --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ + --annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.version=${BRANCH_TAG}-${TIMESTAMP}" \ + --annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \ + --annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \ + --annotation "index:org.opencontainers.image.licenses=See repository" \ + --annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \ + --annotation "index:org.opencontainers.image.vendor=${OWNER}" \ + --annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \ + --annotation "index:maintainer=${{ github.actor }}" \ + --annotation "index:build_version=DispatcharrBase version: ${BRANCH_TAG}-${TIMESTAMP}" \ + --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \ docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-arm64 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 80bd8984..d8f4a3a7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -119,7 +119,27 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - # use metadata from the prepare job + - name: Extract metadata for Docker + id: meta + uses: docker/metadata-action@v5 + with: + images: | + ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }} + docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }} + labels: | + org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }} + org.opencontainers.image.description=Your ultimate IPTV & stream Management companion. + org.opencontainers.image.url=https://github.com/${{ github.repository }} + org.opencontainers.image.source=https://github.com/${{ github.repository }} + org.opencontainers.image.version=${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }} + org.opencontainers.image.created=${{ needs.prepare.outputs.timestamp }} + org.opencontainers.image.revision=${{ github.sha }} + org.opencontainers.image.licenses=See repository + org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/ + org.opencontainers.image.vendor=${{ needs.prepare.outputs.repo_owner }} + org.opencontainers.image.authors=${{ github.actor }} + maintainer=${{ github.actor }} + build_version=Dispatcharr version: ${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }} - name: Build and push Docker image uses: docker/build-push-action@v4 @@ -137,6 +157,7 @@ jobs: ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }} docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }} docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }} + labels: ${{ steps.meta.outputs.labels }} build-args: | REPO_OWNER=${{ needs.prepare.outputs.repo_owner }} REPO_NAME=${{ needs.prepare.outputs.repo_name }} @@ -181,16 +202,72 @@ jobs: echo "Creating multi-arch manifest for ${OWNER}/${REPO}" # branch tag (e.g. latest or dev) - docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \ + docker buildx imagetools create \ + --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ + --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ + --annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.version=${BRANCH_TAG}" \ + --annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \ + --annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \ + --annotation "index:org.opencontainers.image.licenses=See repository" \ + --annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \ + --annotation "index:org.opencontainers.image.vendor=${OWNER}" \ + --annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \ + --annotation "index:maintainer=${{ github.actor }}" \ + --annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \ + --tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \ ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-arm64 # version + timestamp tag - docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP} \ + docker buildx imagetools create \ + --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ + --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ + --annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.version=${VERSION}-${TIMESTAMP}" \ + --annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \ + --annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \ + --annotation "index:org.opencontainers.image.licenses=See repository" \ + --annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \ + --annotation "index:org.opencontainers.image.vendor=${OWNER}" \ + --annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \ + --annotation "index:maintainer=${{ github.actor }}" \ + --annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \ + --tag ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP} \ ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP}-arm64 # also create Docker Hub manifests using the same username - docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \ + docker buildx imagetools create \ + --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ + --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ + --annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.version=${BRANCH_TAG}" \ + --annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \ + --annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \ + --annotation "index:org.opencontainers.image.licenses=See repository" \ + --annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \ + --annotation "index:org.opencontainers.image.vendor=${OWNER}" \ + --annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \ + --annotation "index:maintainer=${{ github.actor }}" \ + --annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \ + --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \ docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-arm64 - docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP} \ + docker buildx imagetools create \ + --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ + --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ + --annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.version=${VERSION}-${TIMESTAMP}" \ + --annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \ + --annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \ + --annotation "index:org.opencontainers.image.licenses=See repository" \ + --annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \ + --annotation "index:org.opencontainers.image.vendor=${OWNER}" \ + --annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \ + --annotation "index:maintainer=${{ github.actor }}" \ + --annotation "index:build_version=Dispatcharr version: ${VERSION}-${TIMESTAMP}" \ + --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP} \ docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP}-arm64 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e9734eb4..a1cb27bb 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -25,6 +25,7 @@ jobs: new_version: ${{ steps.update_version.outputs.new_version }} repo_owner: ${{ steps.meta.outputs.repo_owner }} repo_name: ${{ steps.meta.outputs.repo_name }} + timestamp: ${{ steps.timestamp.outputs.timestamp }} steps: - uses: actions/checkout@v3 with: @@ -56,6 +57,12 @@ jobs: REPO_NAME=$(echo "${{ github.repository }}" | cut -d '/' -f 2 | tr '[:upper:]' '[:lower:]') echo "repo_name=${REPO_NAME}" >> $GITHUB_OUTPUT + - name: Generate timestamp for build + id: timestamp + run: | + TIMESTAMP=$(date -u +'%Y%m%d%H%M%S') + echo "timestamp=${TIMESTAMP}" >> $GITHUB_OUTPUT + - name: Commit and Tag run: | git add version.py CHANGELOG.md @@ -104,6 +111,28 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Extract metadata for Docker + id: meta + uses: docker/metadata-action@v5 + with: + images: | + ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }} + docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }} + labels: | + org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }} + org.opencontainers.image.description=Your ultimate IPTV & stream Management companion. + org.opencontainers.image.url=https://github.com/${{ github.repository }} + org.opencontainers.image.source=https://github.com/${{ github.repository }} + org.opencontainers.image.version=${{ needs.prepare.outputs.new_version }} + org.opencontainers.image.created=${{ needs.prepare.outputs.timestamp }} + org.opencontainers.image.revision=${{ github.sha }} + org.opencontainers.image.licenses=See repository + org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/ + org.opencontainers.image.vendor=${{ needs.prepare.outputs.repo_owner }} + org.opencontainers.image.authors=${{ github.actor }} + maintainer=${{ github.actor }} + build_version=Dispatcharr version: ${{ needs.prepare.outputs.new_version }} Build date: ${{ needs.prepare.outputs.timestamp }} + - name: Build and push Docker image uses: docker/build-push-action@v4 with: @@ -115,6 +144,7 @@ jobs: ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.new_version }}-${{ matrix.platform }} docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:latest-${{ matrix.platform }} docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.new_version }}-${{ matrix.platform }} + labels: ${{ steps.meta.outputs.labels }} build-args: | REPO_OWNER=${{ needs.prepare.outputs.repo_owner }} REPO_NAME=${{ needs.prepare.outputs.repo_name }} @@ -149,25 +179,82 @@ jobs: OWNER=${{ needs.prepare.outputs.repo_owner }} REPO=${{ needs.prepare.outputs.repo_name }} VERSION=${{ needs.prepare.outputs.new_version }} + TIMESTAMP=${{ needs.prepare.outputs.timestamp }} echo "Creating multi-arch manifest for ${OWNER}/${REPO}" # GitHub Container Registry manifests # latest tag - docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:latest \ + docker buildx imagetools create \ + --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ + --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ + --annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.version=latest" \ + --annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \ + --annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \ + --annotation "index:org.opencontainers.image.licenses=See repository" \ + --annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \ + --annotation "index:org.opencontainers.image.vendor=${OWNER}" \ + --annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \ + --annotation "index:maintainer=${{ github.actor }}" \ + --annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \ + --tag ghcr.io/${OWNER}/${REPO}:latest \ ghcr.io/${OWNER}/${REPO}:latest-amd64 ghcr.io/${OWNER}/${REPO}:latest-arm64 # version tag - docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${VERSION} \ + docker buildx imagetools create \ + --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ + --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ + --annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.version=${VERSION}" \ + --annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \ + --annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \ + --annotation "index:org.opencontainers.image.licenses=See repository" \ + --annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \ + --annotation "index:org.opencontainers.image.vendor=${OWNER}" \ + --annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \ + --annotation "index:maintainer=${{ github.actor }}" \ + --annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \ + --tag ghcr.io/${OWNER}/${REPO}:${VERSION} \ ghcr.io/${OWNER}/${REPO}:${VERSION}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-arm64 # Docker Hub manifests # latest tag - docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest \ + docker buildx imagetools create \ + --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ + --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ + --annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.version=latest" \ + --annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \ + --annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \ + --annotation "index:org.opencontainers.image.licenses=See repository" \ + --annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \ + --annotation "index:org.opencontainers.image.vendor=${OWNER}" \ + --annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \ + --annotation "index:maintainer=${{ github.actor }}" \ + --annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \ + --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest \ docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest-arm64 # version tag - docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION} \ + docker buildx imagetools create \ + --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ + --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ + --annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \ + --annotation "index:org.opencontainers.image.version=${VERSION}" \ + --annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \ + --annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \ + --annotation "index:org.opencontainers.image.licenses=See repository" \ + --annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \ + --annotation "index:org.opencontainers.image.vendor=${OWNER}" \ + --annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \ + --annotation "index:maintainer=${{ github.actor }}" \ + --annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \ + --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION} \ docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-arm64 create-release: diff --git a/CHANGELOG.md b/CHANGELOG.md index 6ed00250..906987b0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed - Removed unreachable code path in m3u output - Thanks [@DawtCom](https://github.com/DawtCom) +- GitHub Actions workflows now use `docker/metadata-action` for cleaner and more maintainable OCI-compliant image label generation across all build pipelines (ci.yml, base-image.yml, release.yml). Labels are applied to both platform-specific images and multi-arch manifests with proper annotation formatting. ### Fixed From 3ddcadb50deb2d61f8419d6c1dff3fb203d1266b Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 18 Dec 2025 11:07:13 -0600 Subject: [PATCH 099/220] changelog: Give acknowledgement and reference issue. --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 24f88ba5..8d12ee80 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,7 +15,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed - Removed unreachable code path in m3u output - Thanks [@DawtCom](https://github.com/DawtCom) -- GitHub Actions workflows now use `docker/metadata-action` for cleaner and more maintainable OCI-compliant image label generation across all build pipelines (ci.yml, base-image.yml, release.yml). Labels are applied to both platform-specific images and multi-arch manifests with proper annotation formatting. +- GitHub Actions workflows now use `docker/metadata-action` for cleaner and more maintainable OCI-compliant image label generation across all build pipelines (ci.yml, base-image.yml, release.yml). Labels are applied to both platform-specific images and multi-arch manifests with proper annotation formatting. - Thanks [@mrdynamo]https://github.com/mrdynamo) (Closes #724) - Update docker/dev-build.sh to support private registries, multiple architectures and pushing. Now you can do things like `dev-build.sh -p -r my.private.registry -a linux/arm64,linux/amd64` - Thanks [@jdblack](https://github.com/jblack) ### Fixed From 9623dff6b199270b50dc2a7ea00a6d0a7fe43c9d Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 18 Dec 2025 13:19:18 -0600 Subject: [PATCH 100/220] =?UTF-8?q?Enhancement:=20Updated=20dependencies:?= =?UTF-8?q?=20Django=20(5.2.4=20=E2=86=92=205.2.9)=20includes=20CVE=20secu?= =?UTF-8?q?rity=20patch,=20psycopg2-binary=20(2.9.10=20=E2=86=92=202.9.11)?= =?UTF-8?q?,=20celery=20(5.5.3=20=E2=86=92=205.6.0),=20djangorestframework?= =?UTF-8?q?=20(3.16.0=20=E2=86=92=203.16.1),=20requests=20(2.32.4=20?= =?UTF-8?q?=E2=86=92=202.32.5),=20psutil=20(7.0.0=20=E2=86=92=207.1.3),=20?= =?UTF-8?q?gevent=20(25.5.1=20=E2=86=92=2025.9.1),=20rapidfuzz=20(3.13.0?= =?UTF-8?q?=20=E2=86=92=203.14.3),=20torch=20(2.7.1=20=E2=86=92=202.9.1),?= =?UTF-8?q?=20sentence-transformers=20(5.1.0=20=E2=86=92=205.2.0),=20lxml?= =?UTF-8?q?=20(6.0.0=20=E2=86=92=206.0.2)=20(Closes=20#662)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CHANGELOG.md | 1 + requirements.txt | 24 ++++++++++++------------ 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8d12ee80..d95ba566 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Removed unreachable code path in m3u output - Thanks [@DawtCom](https://github.com/DawtCom) - GitHub Actions workflows now use `docker/metadata-action` for cleaner and more maintainable OCI-compliant image label generation across all build pipelines (ci.yml, base-image.yml, release.yml). Labels are applied to both platform-specific images and multi-arch manifests with proper annotation formatting. - Thanks [@mrdynamo]https://github.com/mrdynamo) (Closes #724) - Update docker/dev-build.sh to support private registries, multiple architectures and pushing. Now you can do things like `dev-build.sh -p -r my.private.registry -a linux/arm64,linux/amd64` - Thanks [@jdblack](https://github.com/jblack) +- Updated dependencies: Django (5.2.4 → 5.2.9) includes CVE security patch, psycopg2-binary (2.9.10 → 2.9.11), celery (5.5.3 → 5.6.0), djangorestframework (3.16.0 → 3.16.1), requests (2.32.4 → 2.32.5), psutil (7.0.0 → 7.1.3), gevent (25.5.1 → 25.9.1), rapidfuzz (3.13.0 → 3.14.3), torch (2.7.1 → 2.9.1), sentence-transformers (5.1.0 → 5.2.0), lxml (6.0.0 → 6.0.2) (Closes #662) ### Fixed diff --git a/requirements.txt b/requirements.txt index 9d7c1965..3416804d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,32 +1,32 @@ -Django==5.2.4 -psycopg2-binary==2.9.10 -celery[redis]==5.5.3 -djangorestframework==3.16.0 -requests==2.32.4 -psutil==7.0.0 +Django==5.2.9 +psycopg2-binary==2.9.11 +celery[redis]==5.6.0 +djangorestframework==3.16.1 +requests==2.32.5 +psutil==7.1.3 pillow -drf-yasg>=1.20.0 +drf-yasg>=1.21.11 streamlink python-vlc yt-dlp -gevent==25.5.1 +gevent==25.9.1 daphne uwsgi django-cors-headers djangorestframework-simplejwt m3u8 -rapidfuzz==3.13.0 +rapidfuzz==3.14.3 regex # Required by transformers but also used for advanced regex features tzlocal # PyTorch dependencies (CPU only) --extra-index-url https://download.pytorch.org/whl/cpu/ -torch==2.7.1+cpu +torch==2.9.1+cpu # ML/NLP dependencies -sentence-transformers==5.1.0 +sentence-transformers==5.2.0 channels channels-redis==4.3.0 django-filter django-celery-beat -lxml==6.0.0 +lxml==6.0.2 From 1510197bf0e24dc0928d7c393162526de81b5bea Mon Sep 17 00:00:00 2001 From: Dispatcharr Date: Thu, 18 Dec 2025 14:19:51 -0600 Subject: [PATCH 101/220] Floating Video Added handles on the corners of FloatingVideo to resize --- CHANGELOG.md | 1 + frontend/src/components/FloatingVideo.jsx | 228 +++++++++++++++++----- 2 files changed, 185 insertions(+), 44 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d95ba566..f2f361bd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,6 +18,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - GitHub Actions workflows now use `docker/metadata-action` for cleaner and more maintainable OCI-compliant image label generation across all build pipelines (ci.yml, base-image.yml, release.yml). Labels are applied to both platform-specific images and multi-arch manifests with proper annotation formatting. - Thanks [@mrdynamo]https://github.com/mrdynamo) (Closes #724) - Update docker/dev-build.sh to support private registries, multiple architectures and pushing. Now you can do things like `dev-build.sh -p -r my.private.registry -a linux/arm64,linux/amd64` - Thanks [@jdblack](https://github.com/jblack) - Updated dependencies: Django (5.2.4 → 5.2.9) includes CVE security patch, psycopg2-binary (2.9.10 → 2.9.11), celery (5.5.3 → 5.6.0), djangorestframework (3.16.0 → 3.16.1), requests (2.32.4 → 2.32.5), psutil (7.0.0 → 7.1.3), gevent (25.5.1 → 25.9.1), rapidfuzz (3.13.0 → 3.14.3), torch (2.7.1 → 2.9.1), sentence-transformers (5.1.0 → 5.2.0), lxml (6.0.0 → 6.0.2) (Closes #662) +- Floating video player now supports resizing via a drag handles, with minimum size enforcement and viewport/page boundary constraints to keep it visible. ### Fixed diff --git a/frontend/src/components/FloatingVideo.jsx b/frontend/src/components/FloatingVideo.jsx index 611d7e2a..857f67aa 100644 --- a/frontend/src/components/FloatingVideo.jsx +++ b/frontend/src/components/FloatingVideo.jsx @@ -30,6 +30,79 @@ export default function FloatingVideo() { const MIN_WIDTH = 220; const MIN_HEIGHT = 124; const VISIBLE_MARGIN = 48; // keep part of the window visible when dragging + const HANDLE_SIZE = 18; + const HANDLE_OFFSET = 0; + const resizeHandleBaseStyle = { + position: 'absolute', + width: HANDLE_SIZE, + height: HANDLE_SIZE, + backgroundColor: 'transparent', + borderRadius: 6, + zIndex: 8, + touchAction: 'none', + }; + const resizeHandles = [ + { + id: 'bottom-right', + cursor: 'nwse-resize', + xDir: 1, + yDir: 1, + isLeft: false, + isTop: false, + style: { + bottom: HANDLE_OFFSET, + right: HANDLE_OFFSET, + borderBottom: '2px solid rgba(255, 255, 255, 0.9)', + borderRight: '2px solid rgba(255, 255, 255, 0.9)', + borderRadius: '0 0 6px 0', + }, + }, + { + id: 'bottom-left', + cursor: 'nesw-resize', + xDir: -1, + yDir: 1, + isLeft: true, + isTop: false, + style: { + bottom: HANDLE_OFFSET, + left: HANDLE_OFFSET, + borderBottom: '2px solid rgba(255, 255, 255, 0.9)', + borderLeft: '2px solid rgba(255, 255, 255, 0.9)', + borderRadius: '0 0 0 6px', + }, + }, + { + id: 'top-right', + cursor: 'nesw-resize', + xDir: 1, + yDir: -1, + isLeft: false, + isTop: true, + style: { + top: HANDLE_OFFSET, + right: HANDLE_OFFSET, + borderTop: '2px solid rgba(255, 255, 255, 0.9)', + borderRight: '2px solid rgba(255, 255, 255, 0.9)', + borderRadius: '0 6px 0 0', + }, + }, + { + id: 'top-left', + cursor: 'nwse-resize', + xDir: -1, + yDir: -1, + isLeft: true, + isTop: true, + style: { + top: HANDLE_OFFSET, + left: HANDLE_OFFSET, + borderTop: '2px solid rgba(255, 255, 255, 0.9)', + borderLeft: '2px solid rgba(255, 255, 255, 0.9)', + borderRadius: '6px 0 0 0', + }, + }, + ]; // Safely destroy the mpegts player to prevent errors const safeDestroyPlayer = () => { @@ -344,6 +417,23 @@ export default function FloatingVideo() { [VISIBLE_MARGIN, videoSize.height, videoSize.width] ); + const clampToVisibleWithSize = useCallback( + (x, y, width, height) => { + if (typeof window === 'undefined') return { x, y }; + + const minX = -(width - VISIBLE_MARGIN); + const minY = -(height - VISIBLE_MARGIN); + const maxX = window.innerWidth - VISIBLE_MARGIN; + const maxY = window.innerHeight - VISIBLE_MARGIN; + + return { + x: Math.min(Math.max(x, minX), maxX), + y: Math.min(Math.max(y, minY), maxY), + }; + }, + [VISIBLE_MARGIN] + ); + const handleResizeMove = useCallback( (event) => { if (!resizeStateRef.current) return; @@ -353,61 +443,111 @@ export default function FloatingVideo() { const clientY = event.touches && event.touches.length ? event.touches[0].clientY : event.clientY; - const deltaX = clientX - resizeStateRef.current.startX; - const deltaY = clientY - resizeStateRef.current.startY; - const aspectRatio = resizeStateRef.current.aspectRatio || aspectRatioRef.current; + const { + startX, + startY, + startWidth, + startHeight, + startPos, + handle, + aspectRatio, + } = resizeStateRef.current; + const deltaX = clientX - startX; + const deltaY = clientY - startY; + const widthDelta = deltaX * handle.xDir; + const heightDelta = deltaY * handle.yDir; + const ratio = aspectRatio || aspectRatioRef.current; // Derive width/height while keeping the original aspect ratio - let nextWidth = resizeStateRef.current.startWidth + deltaX; - let nextHeight = nextWidth / aspectRatio; + let nextWidth = startWidth + widthDelta; + let nextHeight = nextWidth / ratio; // Allow vertical-driven resize if the user drags mostly vertically if (Math.abs(deltaY) > Math.abs(deltaX)) { - nextHeight = resizeStateRef.current.startHeight + deltaY; - nextWidth = nextHeight * aspectRatio; + nextHeight = startHeight + heightDelta; + nextWidth = nextHeight * ratio; } // Respect minimums while keeping the ratio if (nextWidth < MIN_WIDTH) { nextWidth = MIN_WIDTH; - nextHeight = nextWidth / aspectRatio; + nextHeight = nextWidth / ratio; } if (nextHeight < MIN_HEIGHT) { nextHeight = MIN_HEIGHT; - nextWidth = nextHeight * aspectRatio; + nextWidth = nextHeight * ratio; } // Keep within viewport with a margin based on current position - const posX = dragPositionRef.current?.x ?? 0; - const posY = dragPositionRef.current?.y ?? 0; + const posX = startPos?.x ?? 0; + const posY = startPos?.y ?? 0; const margin = VISIBLE_MARGIN; + let maxWidth = null; + let maxHeight = null; - const maxWidth = Math.max(MIN_WIDTH, window.innerWidth - posX - margin); - const maxHeight = Math.max(MIN_HEIGHT, window.innerHeight - posY - margin); - - if (nextWidth > maxWidth) { - nextWidth = maxWidth; - nextHeight = nextWidth / aspectRatio; + if (!handle.isLeft) { + maxWidth = Math.max(MIN_WIDTH, window.innerWidth - posX - margin); } - if (nextHeight > maxHeight) { + if (!handle.isTop) { + maxHeight = Math.max(MIN_HEIGHT, window.innerHeight - posY - margin); + } + + if (maxWidth != null && nextWidth > maxWidth) { + nextWidth = maxWidth; + nextHeight = nextWidth / ratio; + } + + if (maxHeight != null && nextHeight > maxHeight) { nextHeight = maxHeight; - nextWidth = nextHeight * aspectRatio; + nextWidth = nextHeight * ratio; } // Final pass to honor both bounds while keeping the ratio - if (nextWidth > maxWidth) { + if (maxWidth != null && nextWidth > maxWidth) { nextWidth = maxWidth; - nextHeight = nextWidth / aspectRatio; + nextHeight = nextWidth / ratio; } setVideoSize({ width: Math.round(nextWidth), height: Math.round(nextHeight), }); + + if (handle.isLeft || handle.isTop) { + let nextX = posX; + let nextY = posY; + + if (handle.isLeft) { + nextX = posX + (startWidth - nextWidth); + } + + if (handle.isTop) { + nextY = posY + (startHeight - nextHeight); + } + + const clamped = clampToVisibleWithSize( + nextX, + nextY, + nextWidth, + nextHeight + ); + + if (handle.isLeft) { + nextX = clamped.x; + } + + if (handle.isTop) { + nextY = clamped.y; + } + + const nextPos = { x: nextX, y: nextY }; + setDragPosition(nextPos); + dragPositionRef.current = nextPos; + } }, - [MIN_HEIGHT, MIN_WIDTH, VISIBLE_MARGIN] + [MIN_HEIGHT, MIN_WIDTH, VISIBLE_MARGIN, clampToVisibleWithSize] ); const endResize = useCallback(() => { @@ -419,7 +559,7 @@ export default function FloatingVideo() { window.removeEventListener('touchend', endResize); }, [handleResizeMove]); - const startResize = (event) => { + const startResize = (event, handle) => { event.stopPropagation(); event.preventDefault(); @@ -431,6 +571,10 @@ export default function FloatingVideo() { const aspectRatio = videoSize.height > 0 ? videoSize.width / videoSize.height : aspectRatioRef.current; aspectRatioRef.current = aspectRatio; + const startPos = + dragPositionRef.current || + initialPositionRef.current || + { x: 0, y: 0 }; resizeStateRef.current = { startX: clientX, @@ -438,6 +582,8 @@ export default function FloatingVideo() { startWidth: videoSize.width, startHeight: videoSize.height, aspectRatio, + startPos, + handle, }; setIsResizing(true); @@ -666,27 +812,6 @@ export default function FloatingVideo() { )} - {/* Resize handle */} - {/* Error message below video - doesn't block controls */} @@ -703,6 +828,21 @@ export default function FloatingVideo() { )} + + {/* Resize handles */} + {resizeHandles.map((handle) => ( + startResize(event, handle)} + onTouchStart={(event) => startResize(event, handle)} + style={{ + ...resizeHandleBaseStyle, + ...handle.style, + cursor: handle.cursor, + }} + /> + ))} ); From 45ea63e9cfd71dc31dcafa2d5ed813458f26503e Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 18 Dec 2025 14:45:55 -0600 Subject: [PATCH 102/220] chore: update dependencies in package.json - Bump eslint from ^9.21.0 to ^9.27.0 - Upgrade vite from ^6.2.0 to ^7.1.7 - Add overrides for js-yaml to ^4.1.1 --- CHANGELOG.md | 1 + frontend/package-lock.json | 1317 ++++++++++++++++++------------------ frontend/package.json | 7 +- 3 files changed, 680 insertions(+), 645 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d95ba566..d745ab6f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,6 +18,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - GitHub Actions workflows now use `docker/metadata-action` for cleaner and more maintainable OCI-compliant image label generation across all build pipelines (ci.yml, base-image.yml, release.yml). Labels are applied to both platform-specific images and multi-arch manifests with proper annotation formatting. - Thanks [@mrdynamo]https://github.com/mrdynamo) (Closes #724) - Update docker/dev-build.sh to support private registries, multiple architectures and pushing. Now you can do things like `dev-build.sh -p -r my.private.registry -a linux/arm64,linux/amd64` - Thanks [@jdblack](https://github.com/jblack) - Updated dependencies: Django (5.2.4 → 5.2.9) includes CVE security patch, psycopg2-binary (2.9.10 → 2.9.11), celery (5.5.3 → 5.6.0), djangorestframework (3.16.0 → 3.16.1), requests (2.32.4 → 2.32.5), psutil (7.0.0 → 7.1.3), gevent (25.5.1 → 25.9.1), rapidfuzz (3.13.0 → 3.14.3), torch (2.7.1 → 2.9.1), sentence-transformers (5.1.0 → 5.2.0), lxml (6.0.0 → 6.0.2) (Closes #662) +- Frontend dependencies updated: Vite (6.2.0 → 7.1.7), ESLint (9.21.0 → 9.27.0), and related packages; added npm `overrides` to enforce js-yaml@^4.1.1 for transitive security fix. All 6 reported vulnerabilities resolved with `npm audit fix`. ### Fixed diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 780aabe1..84d18989 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -50,16 +50,23 @@ "@types/react": "^19.1.0", "@types/react-dom": "^19.1.0", "@vitejs/plugin-react-swc": "^4.1.0", - "eslint": "^9.21.0", + "eslint": "^9.27.0", "eslint-plugin-react-hooks": "^5.1.0", "eslint-plugin-react-refresh": "^0.4.19", "globals": "^15.15.0", "jsdom": "^27.0.0", "prettier": "^3.5.3", - "vite": "^6.2.0", + "vite": "^7.1.7", "vitest": "^3.2.4" } }, + "node_modules/@acemir/cssom": { + "version": "0.9.29", + "resolved": "https://registry.npmjs.org/@acemir/cssom/-/cssom-0.9.29.tgz", + "integrity": "sha512-G90x0VW+9nW4dFajtjCoT+NM0scAfH9Mb08IcjgFHYbfiL/lU04dTF9JuVOi3/OH+DJCQdcIseSXkdCB9Ky6JA==", + "dev": true, + "license": "MIT" + }, "node_modules/@adobe/css-tools": { "version": "4.4.4", "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.4.4.tgz", @@ -68,30 +75,31 @@ "license": "MIT" }, "node_modules/@asamuzakjp/css-color": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-4.0.4.tgz", - "integrity": "sha512-cKjSKvWGmAziQWbCouOsFwb14mp1betm8Y7Fn+yglDMUUu3r9DCbJ9iJbeFDenLMqFbIMC0pQP8K+B8LAxX3OQ==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-4.1.1.tgz", + "integrity": "sha512-B0Hv6G3gWGMn0xKJ0txEi/jM5iFpT3MfDxmhZFb4W047GvytCf1DHQ1D69W3zHI4yWe2aTZAA0JnbMZ7Xc8DuQ==", "dev": true, "license": "MIT", "dependencies": { "@csstools/css-calc": "^2.1.4", - "@csstools/css-color-parser": "^3.0.10", + "@csstools/css-color-parser": "^3.1.0", "@csstools/css-parser-algorithms": "^3.0.5", "@csstools/css-tokenizer": "^3.0.4", - "lru-cache": "^11.1.0" + "lru-cache": "^11.2.4" } }, "node_modules/@asamuzakjp/dom-selector": { - "version": "6.5.5", - "resolved": "https://registry.npmjs.org/@asamuzakjp/dom-selector/-/dom-selector-6.5.5.tgz", - "integrity": "sha512-kI2MX9pmImjxWT8nxDZY+MuN6r1jJGe7WxizEbsAEPB/zxfW5wYLIiPG1v3UKgEOOP8EsDkp0ZL99oRFAdPM8g==", + "version": "6.7.6", + "resolved": "https://registry.npmjs.org/@asamuzakjp/dom-selector/-/dom-selector-6.7.6.tgz", + "integrity": "sha512-hBaJER6A9MpdG3WgdlOolHmbOYvSk46y7IQN/1+iqiCuUu6iWdQrs9DGKF8ocqsEqWujWf/V7b7vaDgiUmIvUg==", "dev": true, "license": "MIT", "dependencies": { "@asamuzakjp/nwsapi": "^2.3.9", "bidi-js": "^1.0.3", "css-tree": "^3.1.0", - "is-potential-custom-element-name": "^1.0.1" + "is-potential-custom-element-name": "^1.0.1", + "lru-cache": "^11.2.4" } }, "node_modules/@asamuzakjp/nwsapi": { @@ -102,73 +110,82 @@ "license": "MIT" }, "node_modules/@babel/code-frame": { - "version": "7.26.2", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", - "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", + "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", "license": "MIT", "dependencies": { - "@babel/helper-validator-identifier": "^7.25.9", + "@babel/helper-validator-identifier": "^7.27.1", "js-tokens": "^4.0.0", - "picocolors": "^1.0.0" + "picocolors": "^1.1.1" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/generator": { - "version": "7.26.10", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.26.10.tgz", - "integrity": "sha512-rRHT8siFIXQrAYOYqZQVsAr8vJ+cBNqcVAY6m5V8/4QqzaPl+zDBe6cLEPRDuNOUf3ww8RfJVlOyQMoSI+5Ang==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.5.tgz", + "integrity": "sha512-3EwLFhZ38J4VyIP6WNtt2kUdW9dokXA9Cr4IVIFHuCpZ3H8/YFOl5JjZHisrn1fATPBmKKqXzDFvh9fUwHz6CQ==", "license": "MIT", "dependencies": { - "@babel/parser": "^7.26.10", - "@babel/types": "^7.26.10", - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.25", + "@babel/parser": "^7.28.5", + "@babel/types": "^7.28.5", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", "jsesc": "^3.0.2" }, "engines": { "node": ">=6.9.0" } }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, "node_modules/@babel/helper-module-imports": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.25.9.tgz", - "integrity": "sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz", + "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==", "license": "MIT", "dependencies": { - "@babel/traverse": "^7.25.9", - "@babel/types": "^7.25.9" + "@babel/traverse": "^7.27.1", + "@babel/types": "^7.27.1" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-string-parser": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", - "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", - "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/parser": { - "version": "7.26.10", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.10.tgz", - "integrity": "sha512-6aQR2zGE/QFi8JpDLjUZEPYOs7+mhKXm86VaKFiLP35JQwQb6bwUE+XbvkH0EptsYhbNBSUGaUBLKqxH1xSgsA==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz", + "integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==", "license": "MIT", "dependencies": { - "@babel/types": "^7.26.10" + "@babel/types": "^7.28.5" }, "bin": { "parser": "bin/babel-parser.js" @@ -178,66 +195,54 @@ } }, "node_modules/@babel/runtime": { - "version": "7.26.10", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.26.10.tgz", - "integrity": "sha512-2WJMeRQPHKSPemqk/awGrAiuFfzBmOIPXKizAsVhWH9YJqLZ0H+HS4c8loHGgW6utJ3E/ejXQUsiGaQy2NZ9Fw==", + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.4.tgz", + "integrity": "sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==", "license": "MIT", - "dependencies": { - "regenerator-runtime": "^0.14.0" - }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/template": { - "version": "7.26.9", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.26.9.tgz", - "integrity": "sha512-qyRplbeIpNZhmzOysF/wFMuP9sctmh2cFzRAZOn1YapxBsE1i9bJIY586R/WBLfLcmcBlM8ROBiQURnnNy+zfA==", + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz", + "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==", "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.26.2", - "@babel/parser": "^7.26.9", - "@babel/types": "^7.26.9" + "@babel/code-frame": "^7.27.1", + "@babel/parser": "^7.27.2", + "@babel/types": "^7.27.1" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { - "version": "7.26.10", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.26.10.tgz", - "integrity": "sha512-k8NuDrxr0WrPH5Aupqb2LCVURP/S0vBEn5mK6iH+GIYob66U5EtoZvcdudR2jQ4cmTwhEwW1DLB+Yyas9zjF6A==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.5.tgz", + "integrity": "sha512-TCCj4t55U90khlYkVV/0TfkJkAkUg3jZFA3Neb7unZT8CPok7iiRfaX0F+WnqWqt7OxhOn0uBKXCw4lbL8W0aQ==", "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.26.2", - "@babel/generator": "^7.26.10", - "@babel/parser": "^7.26.10", - "@babel/template": "^7.26.9", - "@babel/types": "^7.26.10", - "debug": "^4.3.1", - "globals": "^11.1.0" + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.5", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.5", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.5", + "debug": "^4.3.1" }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/traverse/node_modules/globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "license": "MIT", - "engines": { - "node": ">=4" - } - }, "node_modules/@babel/types": { - "version": "7.26.10", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.10.tgz", - "integrity": "sha512-emqcG3vHrpxUKTrxcblR36dcrcoRDvKmnL/dCL6ZsHaShW80qxCAcNhzQZrpeM765VzEos+xOi4s+r4IXzTwdQ==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz", + "integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==", "license": "MIT", "dependencies": { - "@babel/helper-string-parser": "^7.25.9", - "@babel/helper-validator-identifier": "^7.25.9" + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" }, "engines": { "node": ">=6.9.0" @@ -339,9 +344,9 @@ } }, "node_modules/@csstools/css-syntax-patches-for-csstree": { - "version": "1.0.14", - "resolved": "https://registry.npmjs.org/@csstools/css-syntax-patches-for-csstree/-/css-syntax-patches-for-csstree-1.0.14.tgz", - "integrity": "sha512-zSlIxa20WvMojjpCSy8WrNpcZ61RqfTfX3XTaOeVlGJrt/8HF3YbzgFZa01yTbT4GWQLwfTcC3EB8i3XnB647Q==", + "version": "1.0.21", + "resolved": "https://registry.npmjs.org/@csstools/css-syntax-patches-for-csstree/-/css-syntax-patches-for-csstree-1.0.21.tgz", + "integrity": "sha512-plP8N8zKfEZ26figX4Nvajx8DuzfuRpLTqglQ5d0chfnt35Qt3X+m6ASZ+rG0D0kxe/upDVNwSIVJP5n4FuNfw==", "dev": true, "funding": [ { @@ -356,9 +361,6 @@ "license": "MIT-0", "engines": { "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" } }, "node_modules/@csstools/css-tokenizer": { @@ -487,9 +489,9 @@ "license": "MIT" }, "node_modules/@emotion/is-prop-valid": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-1.3.1.tgz", - "integrity": "sha512-/ACwoqx7XQi9knQs/G0qKvv5teDMhD7bXYns9N/wM8ah8iNb8jZ2uNO0YOgiq2o2poIvVtJS2YALasQuMSQ7Kw==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-1.4.0.tgz", + "integrity": "sha512-QgD4fyscGcbbKwJmqNvUMSE02OsHUa+lAWKdEUIJKgqe5IwRSKd7+KhibEWdaKwgjLj0DRSHA9biAIqGBk05lw==", "license": "MIT", "dependencies": { "@emotion/memoize": "^0.9.0" @@ -545,9 +547,9 @@ "license": "MIT" }, "node_modules/@emotion/styled": { - "version": "11.14.0", - "resolved": "https://registry.npmjs.org/@emotion/styled/-/styled-11.14.0.tgz", - "integrity": "sha512-XxfOnXFffatap2IyCeJyNov3kiDQWoR08gPUQxvbL7fxKryGBKUZUkG6Hz48DZwVrJSVh9sJboyV1Ds4OW6SgA==", + "version": "11.14.1", + "resolved": "https://registry.npmjs.org/@emotion/styled/-/styled-11.14.1.tgz", + "integrity": "sha512-qEEJt42DuToa3gurlH4Qqc1kVpNq8wO8cJtDzU46TjlzWjDlsVyevtYCRijVq3SrHsROS+gVQ8Fnea108GnKzw==", "license": "MIT", "dependencies": { "@babel/runtime": "^7.18.3", @@ -595,9 +597,9 @@ "license": "MIT" }, "node_modules/@esbuild/aix-ppc64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.1.tgz", - "integrity": "sha512-kfYGy8IdzTGy+z0vFGvExZtxkFlA4zAxgKEahG9KE1ScBjpQnFsNOX8KTU5ojNru5ed5CVoJYXFtoxaq5nFbjQ==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.2.tgz", + "integrity": "sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw==", "cpu": [ "ppc64" ], @@ -612,9 +614,9 @@ } }, "node_modules/@esbuild/android-arm": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.1.tgz", - "integrity": "sha512-dp+MshLYux6j/JjdqVLnMglQlFu+MuVeNrmT5nk6q07wNhCdSnB7QZj+7G8VMUGh1q+vj2Bq8kRsuyA00I/k+Q==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.2.tgz", + "integrity": "sha512-DVNI8jlPa7Ujbr1yjU2PfUSRtAUZPG9I1RwW4F4xFB1Imiu2on0ADiI/c3td+KmDtVKNbi+nffGDQMfcIMkwIA==", "cpu": [ "arm" ], @@ -629,9 +631,9 @@ } }, "node_modules/@esbuild/android-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.1.tgz", - "integrity": "sha512-50tM0zCJW5kGqgG7fQ7IHvQOcAn9TKiVRuQ/lN0xR+T2lzEFvAi1ZcS8DiksFcEpf1t/GYOeOfCAgDHFpkiSmA==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.2.tgz", + "integrity": "sha512-pvz8ZZ7ot/RBphf8fv60ljmaoydPU12VuXHImtAs0XhLLw+EXBi2BLe3OYSBslR4rryHvweW5gmkKFwTiFy6KA==", "cpu": [ "arm64" ], @@ -646,9 +648,9 @@ } }, "node_modules/@esbuild/android-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.1.tgz", - "integrity": "sha512-GCj6WfUtNldqUzYkN/ITtlhwQqGWu9S45vUXs7EIYf+7rCiiqH9bCloatO9VhxsL0Pji+PF4Lz2XXCES+Q8hDw==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.2.tgz", + "integrity": "sha512-z8Ank4Byh4TJJOh4wpz8g2vDy75zFL0TlZlkUkEwYXuPSgX8yzep596n6mT7905kA9uHZsf/o2OJZubl2l3M7A==", "cpu": [ "x64" ], @@ -663,9 +665,9 @@ } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.1.tgz", - "integrity": "sha512-5hEZKPf+nQjYoSr/elb62U19/l1mZDdqidGfmFutVUjjUZrOazAtwK+Kr+3y0C/oeJfLlxo9fXb1w7L+P7E4FQ==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.2.tgz", + "integrity": "sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg==", "cpu": [ "arm64" ], @@ -680,9 +682,9 @@ } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.1.tgz", - "integrity": "sha512-hxVnwL2Dqs3fM1IWq8Iezh0cX7ZGdVhbTfnOy5uURtao5OIVCEyj9xIzemDi7sRvKsuSdtCAhMKarxqtlyVyfA==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.2.tgz", + "integrity": "sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA==", "cpu": [ "x64" ], @@ -697,9 +699,9 @@ } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.1.tgz", - "integrity": "sha512-1MrCZs0fZa2g8E+FUo2ipw6jw5qqQiH+tERoS5fAfKnRx6NXH31tXBKI3VpmLijLH6yriMZsxJtaXUyFt/8Y4A==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.2.tgz", + "integrity": "sha512-lS/9CN+rgqQ9czogxlMcBMGd+l8Q3Nj1MFQwBZJyoEKI50XGxwuzznYdwcav6lpOGv5BqaZXqvBSiB/kJ5op+g==", "cpu": [ "arm64" ], @@ -714,9 +716,9 @@ } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.1.tgz", - "integrity": "sha512-0IZWLiTyz7nm0xuIs0q1Y3QWJC52R8aSXxe40VUxm6BB1RNmkODtW6LHvWRrGiICulcX7ZvyH6h5fqdLu4gkww==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.2.tgz", + "integrity": "sha512-tAfqtNYb4YgPnJlEFu4c212HYjQWSO/w/h/lQaBK7RbwGIkBOuNKQI9tqWzx7Wtp7bTPaGC6MJvWI608P3wXYA==", "cpu": [ "x64" ], @@ -731,9 +733,9 @@ } }, "node_modules/@esbuild/linux-arm": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.1.tgz", - "integrity": "sha512-NdKOhS4u7JhDKw9G3cY6sWqFcnLITn6SqivVArbzIaf3cemShqfLGHYMx8Xlm/lBit3/5d7kXvriTUGa5YViuQ==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.2.tgz", + "integrity": "sha512-vWfq4GaIMP9AIe4yj1ZUW18RDhx6EPQKjwe7n8BbIecFtCQG4CfHGaHuh7fdfq+y3LIA2vGS/o9ZBGVxIDi9hw==", "cpu": [ "arm" ], @@ -748,9 +750,9 @@ } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.1.tgz", - "integrity": "sha512-jaN3dHi0/DDPelk0nLcXRm1q7DNJpjXy7yWaWvbfkPvI+7XNSc/lDOnCLN7gzsyzgu6qSAmgSvP9oXAhP973uQ==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.2.tgz", + "integrity": "sha512-hYxN8pr66NsCCiRFkHUAsxylNOcAQaxSSkHMMjcpx0si13t1LHFphxJZUiGwojB1a/Hd5OiPIqDdXONia6bhTw==", "cpu": [ "arm64" ], @@ -765,9 +767,9 @@ } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.1.tgz", - "integrity": "sha512-OJykPaF4v8JidKNGz8c/q1lBO44sQNUQtq1KktJXdBLn1hPod5rE/Hko5ugKKZd+D2+o1a9MFGUEIUwO2YfgkQ==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.2.tgz", + "integrity": "sha512-MJt5BRRSScPDwG2hLelYhAAKh9imjHK5+NE/tvnRLbIqUWa+0E9N4WNMjmp/kXXPHZGqPLxggwVhz7QP8CTR8w==", "cpu": [ "ia32" ], @@ -782,9 +784,9 @@ } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.1.tgz", - "integrity": "sha512-nGfornQj4dzcq5Vp835oM/o21UMlXzn79KobKlcs3Wz9smwiifknLy4xDCLUU0BWp7b/houtdrgUz7nOGnfIYg==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.2.tgz", + "integrity": "sha512-lugyF1atnAT463aO6KPshVCJK5NgRnU4yb3FUumyVz+cGvZbontBgzeGFO1nF+dPueHD367a2ZXe1NtUkAjOtg==", "cpu": [ "loong64" ], @@ -799,9 +801,9 @@ } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.1.tgz", - "integrity": "sha512-1osBbPEFYwIE5IVB/0g2X6i1qInZa1aIoj1TdL4AaAb55xIIgbg8Doq6a5BzYWgr+tEcDzYH67XVnTmUzL+nXg==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.2.tgz", + "integrity": "sha512-nlP2I6ArEBewvJ2gjrrkESEZkB5mIoaTswuqNFRv/WYd+ATtUpe9Y09RnJvgvdag7he0OWgEZWhviS1OTOKixw==", "cpu": [ "mips64el" ], @@ -816,9 +818,9 @@ } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.1.tgz", - "integrity": "sha512-/6VBJOwUf3TdTvJZ82qF3tbLuWsscd7/1w+D9LH0W/SqUgM5/JJD0lrJ1fVIfZsqB6RFmLCe0Xz3fmZc3WtyVg==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.2.tgz", + "integrity": "sha512-C92gnpey7tUQONqg1n6dKVbx3vphKtTHJaNG2Ok9lGwbZil6DrfyecMsp9CrmXGQJmZ7iiVXvvZH6Ml5hL6XdQ==", "cpu": [ "ppc64" ], @@ -833,9 +835,9 @@ } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.1.tgz", - "integrity": "sha512-nSut/Mx5gnilhcq2yIMLMe3Wl4FK5wx/o0QuuCLMtmJn+WeWYoEGDN1ipcN72g1WHsnIbxGXd4i/MF0gTcuAjQ==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.2.tgz", + "integrity": "sha512-B5BOmojNtUyN8AXlK0QJyvjEZkWwy/FKvakkTDCziX95AowLZKR6aCDhG7LeF7uMCXEJqwa8Bejz5LTPYm8AvA==", "cpu": [ "riscv64" ], @@ -850,9 +852,9 @@ } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.1.tgz", - "integrity": "sha512-cEECeLlJNfT8kZHqLarDBQso9a27o2Zd2AQ8USAEoGtejOrCYHNtKP8XQhMDJMtthdF4GBmjR2au3x1udADQQQ==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.2.tgz", + "integrity": "sha512-p4bm9+wsPwup5Z8f4EpfN63qNagQ47Ua2znaqGH6bqLlmJ4bx97Y9JdqxgGZ6Y8xVTixUnEkoKSHcpRlDnNr5w==", "cpu": [ "s390x" ], @@ -867,9 +869,9 @@ } }, "node_modules/@esbuild/linux-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.1.tgz", - "integrity": "sha512-xbfUhu/gnvSEg+EGovRc+kjBAkrvtk38RlerAzQxvMzlB4fXpCFCeUAYzJvrnhFtdeyVCDANSjJvOvGYoeKzFA==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.2.tgz", + "integrity": "sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA==", "cpu": [ "x64" ], @@ -884,9 +886,9 @@ } }, "node_modules/@esbuild/netbsd-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.1.tgz", - "integrity": "sha512-O96poM2XGhLtpTh+s4+nP7YCCAfb4tJNRVZHfIE7dgmax+yMP2WgMd2OecBuaATHKTHsLWHQeuaxMRnCsH8+5g==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.2.tgz", + "integrity": "sha512-Kj6DiBlwXrPsCRDeRvGAUb/LNrBASrfqAIok+xB0LxK8CHqxZ037viF13ugfsIpePH93mX7xfJp97cyDuTZ3cw==", "cpu": [ "arm64" ], @@ -901,9 +903,9 @@ } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.1.tgz", - "integrity": "sha512-X53z6uXip6KFXBQ+Krbx25XHV/NCbzryM6ehOAeAil7X7oa4XIq+394PWGnwaSQ2WRA0KI6PUO6hTO5zeF5ijA==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.2.tgz", + "integrity": "sha512-HwGDZ0VLVBY3Y+Nw0JexZy9o/nUAWq9MlV7cahpaXKW6TOzfVno3y3/M8Ga8u8Yr7GldLOov27xiCnqRZf0tCA==", "cpu": [ "x64" ], @@ -918,9 +920,9 @@ } }, "node_modules/@esbuild/openbsd-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.1.tgz", - "integrity": "sha512-Na9T3szbXezdzM/Kfs3GcRQNjHzM6GzFBeU1/6IV/npKP5ORtp9zbQjvkDJ47s6BCgaAZnnnu/cY1x342+MvZg==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.2.tgz", + "integrity": "sha512-DNIHH2BPQ5551A7oSHD0CKbwIA/Ox7+78/AWkbS5QoRzaqlev2uFayfSxq68EkonB+IKjiuxBFoV8ESJy8bOHA==", "cpu": [ "arm64" ], @@ -935,9 +937,9 @@ } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.1.tgz", - "integrity": "sha512-T3H78X2h1tszfRSf+txbt5aOp/e7TAz3ptVKu9Oyir3IAOFPGV6O9c2naym5TOriy1l0nNf6a4X5UXRZSGX/dw==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.2.tgz", + "integrity": "sha512-/it7w9Nb7+0KFIzjalNJVR5bOzA9Vay+yIPLVHfIQYG/j+j9VTH84aNB8ExGKPU4AzfaEvN9/V4HV+F+vo8OEg==", "cpu": [ "x64" ], @@ -951,10 +953,27 @@ "node": ">=18" } }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.2.tgz", + "integrity": "sha512-LRBbCmiU51IXfeXk59csuX/aSaToeG7w48nMwA6049Y4J4+VbWALAuXcs+qcD04rHDuSCSRKdmY63sruDS5qag==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, "node_modules/@esbuild/sunos-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.1.tgz", - "integrity": "sha512-2H3RUvcmULO7dIE5EWJH8eubZAI4xw54H1ilJnRNZdeo8dTADEZ21w6J22XBkXqGJbe0+wnNJtw3UXRoLJnFEg==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.2.tgz", + "integrity": "sha512-kMtx1yqJHTmqaqHPAzKCAkDaKsffmXkPHThSfRwZGyuqyIeBvf08KSsYXl+abf5HDAPMJIPnbBfXvP2ZC2TfHg==", "cpu": [ "x64" ], @@ -969,9 +988,9 @@ } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.1.tgz", - "integrity": "sha512-GE7XvrdOzrb+yVKB9KsRMq+7a2U/K5Cf/8grVFRAGJmfADr/e/ODQ134RK2/eeHqYV5eQRFxb1hY7Nr15fv1NQ==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.2.tgz", + "integrity": "sha512-Yaf78O/B3Kkh+nKABUF++bvJv5Ijoy9AN1ww904rOXZFLWVc5OLOfL56W+C8F9xn5JQZa3UX6m+IktJnIb1Jjg==", "cpu": [ "arm64" ], @@ -986,9 +1005,9 @@ } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.1.tgz", - "integrity": "sha512-uOxSJCIcavSiT6UnBhBzE8wy3n0hOkJsBOzy7HDAuTDE++1DJMRRVCPGisULScHL+a/ZwdXPpXD3IyFKjA7K8A==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.2.tgz", + "integrity": "sha512-Iuws0kxo4yusk7sw70Xa2E2imZU5HoixzxfGCdxwBdhiDgt9vX9VUCBhqcwY7/uh//78A1hMkkROMJq9l27oLQ==", "cpu": [ "ia32" ], @@ -1003,9 +1022,9 @@ } }, "node_modules/@esbuild/win32-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.1.tgz", - "integrity": "sha512-Y1EQdcfwMSeQN/ujR5VayLOJ1BHaK+ssyk0AEzPjC+t1lITgsnccPqFjb6V+LsTp/9Iov4ysfjxLaGJ9RPtkVg==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.2.tgz", + "integrity": "sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ==", "cpu": [ "x64" ], @@ -1020,9 +1039,9 @@ } }, "node_modules/@eslint-community/eslint-utils": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.5.0.tgz", - "integrity": "sha512-RoV8Xs9eNwiDvhv7M+xcL4PWyRyIXRY/FLp3buU4h1EYfdF7unWUy3dOjPqb3C7rMUewIcqwW850PgS8h1o1yg==", + "version": "4.9.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.0.tgz", + "integrity": "sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==", "dev": true, "license": "MIT", "dependencies": { @@ -1052,9 +1071,9 @@ } }, "node_modules/@eslint-community/regexpp": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz", - "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==", + "version": "4.12.2", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.2.tgz", + "integrity": "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==", "dev": true, "license": "MIT", "engines": { @@ -1062,13 +1081,13 @@ } }, "node_modules/@eslint/config-array": { - "version": "0.19.2", - "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.19.2.tgz", - "integrity": "sha512-GNKqxfHG2ySmJOBSHg7LxeUx4xpuCoFjacmlCoYWEbaPXLwvfIjixRI12xCQZeULksQb23uiA8F40w5TojpV7w==", + "version": "0.21.1", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.1.tgz", + "integrity": "sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@eslint/object-schema": "^2.1.6", + "@eslint/object-schema": "^2.1.7", "debug": "^4.3.1", "minimatch": "^3.1.2" }, @@ -1077,19 +1096,22 @@ } }, "node_modules/@eslint/config-helpers": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.1.0.tgz", - "integrity": "sha512-kLrdPDJE1ckPo94kmPPf9Hfd0DU0Jw6oKYrhe+pwSC0iTUInmTa+w6fw8sGgcfkFJGNdWOUeOaDM4quW4a7OkA==", + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.4.2.tgz", + "integrity": "sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw==", "dev": true, "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^0.17.0" + }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, "node_modules/@eslint/core": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.12.0.tgz", - "integrity": "sha512-cmrR6pytBuSMTaBweKoGMwu3EiHiEC+DoyupPmlZ0HxBJBtIxwe+j/E4XPIKNx+Q74c8lXKPwYawBf5glsTkHg==", + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.17.0.tgz", + "integrity": "sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -1100,9 +1122,9 @@ } }, "node_modules/@eslint/eslintrc": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.0.tgz", - "integrity": "sha512-yaVPAiNAalnCZedKLdR21GOGILMLKPyqSLWaAjQFvYA2i/ciDi8ArYVr69Anohb6cH2Ukhqti4aFnYyPm8wdwQ==", + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.3.tgz", + "integrity": "sha512-Kr+LPIUVKz2qkx1HAMH8q1q6azbqBAsXJUxBl/ODDuVPX45Z9DfwB8tPjTi6nNZ8BuM3nbJxC5zCAg5elnBUTQ==", "dev": true, "license": "MIT", "dependencies": { @@ -1112,7 +1134,7 @@ "globals": "^14.0.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", - "js-yaml": "^4.1.0", + "js-yaml": "^4.1.1", "minimatch": "^3.1.2", "strip-json-comments": "^3.1.1" }, @@ -1137,19 +1159,22 @@ } }, "node_modules/@eslint/js": { - "version": "9.22.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.22.0.tgz", - "integrity": "sha512-vLFajx9o8d1/oL2ZkpMYbkLv8nDB6yaIwFNt7nI4+I80U/z03SxmfOMsLbvWr3p7C+Wnoh//aOu2pQW8cS0HCQ==", + "version": "9.39.2", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.39.2.tgz", + "integrity": "sha512-q1mjIoW1VX4IvSocvM/vbTiveKC4k9eLrajNEuSsmjymSDEbpGddtpfOoN7YGAqBK3NG+uqo8ia4PDTt8buCYA==", "dev": true, "license": "MIT", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" } }, "node_modules/@eslint/object-schema": { - "version": "2.1.6", - "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.6.tgz", - "integrity": "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA==", + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.7.tgz", + "integrity": "sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA==", "dev": true, "license": "Apache-2.0", "engines": { @@ -1157,13 +1182,13 @@ } }, "node_modules/@eslint/plugin-kit": { - "version": "0.2.7", - "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.2.7.tgz", - "integrity": "sha512-JubJ5B2pJ4k4yGxaNLdbjrnk9d/iDz6/q8wOilpIowd6PJPgaxCuHBnBszq7Ce2TyMrywm5r4PnKm6V3iiZF+g==", + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.4.1.tgz", + "integrity": "sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@eslint/core": "^0.12.0", + "@eslint/core": "^0.17.0", "levn": "^0.4.1" }, "engines": { @@ -1234,33 +1259,19 @@ } }, "node_modules/@humanfs/node": { - "version": "0.16.6", - "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.6.tgz", - "integrity": "sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw==", + "version": "0.16.7", + "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.7.tgz", + "integrity": "sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==", "dev": true, "license": "Apache-2.0", "dependencies": { "@humanfs/core": "^0.19.1", - "@humanwhocodes/retry": "^0.3.0" + "@humanwhocodes/retry": "^0.4.0" }, "engines": { "node": ">=18.18.0" } }, - "node_modules/@humanfs/node/node_modules/@humanwhocodes/retry": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz", - "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">=18.18" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/nzakas" - } - }, "node_modules/@humanwhocodes/module-importer": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", @@ -1276,9 +1287,9 @@ } }, "node_modules/@humanwhocodes/retry": { - "version": "0.4.2", - "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.2.tgz", - "integrity": "sha512-xeO57FpIu4p1Ri3Jq/EXq4ClRm86dVF2z/+kvFnyqVYRavTZmaFaUBbWCOuuTh0o/g7DSsk6kc2vrS4Vl5oPOQ==", + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz", + "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", "dev": true, "license": "Apache-2.0", "engines": { @@ -1290,17 +1301,13 @@ } }, "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.8", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz", - "integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==", + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", "license": "MIT", "dependencies": { - "@jridgewell/set-array": "^1.2.1", - "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/sourcemap-codec": "^1.5.0", "@jridgewell/trace-mapping": "^0.3.24" - }, - "engines": { - "node": ">=6.0.0" } }, "node_modules/@jridgewell/resolve-uri": { @@ -1312,15 +1319,6 @@ "node": ">=6.0.0" } }, - "node_modules/@jridgewell/set-array": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", - "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", - "license": "MIT", - "engines": { - "node": ">=6.0.0" - } - }, "node_modules/@jridgewell/sourcemap-codec": { "version": "1.5.5", "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", @@ -1328,9 +1326,9 @@ "license": "MIT" }, "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.25", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", - "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", "license": "MIT", "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", @@ -1344,22 +1342,22 @@ "license": "Apache-2.0" }, "node_modules/@mantine/charts": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/@mantine/charts/-/charts-8.0.1.tgz", - "integrity": "sha512-yntk4siXpQGSj83tDwftJw6fHTOBS6c/VWinjvTW29ptEdjBCxbKFfyyDc9UGVVuO7ovbdtpfCZBpuN2I7HPCA==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@mantine/charts/-/charts-8.0.2.tgz", + "integrity": "sha512-hVS1+CT+7e3+ZbU1xx7Nyx/5ZBSxzS+68SKeVLeOZPGl9Wx35CY1oLn0n53vQPWV2WFKd0u0Bq3d1iuaDpkzGA==", "license": "MIT", "peerDependencies": { - "@mantine/core": "8.0.1", - "@mantine/hooks": "8.0.1", + "@mantine/core": "8.0.2", + "@mantine/hooks": "8.0.2", "react": "^18.x || ^19.x", "react-dom": "^18.x || ^19.x", "recharts": "^2.13.3" } }, "node_modules/@mantine/core": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/@mantine/core/-/core-8.0.1.tgz", - "integrity": "sha512-4ezaxKjChSPtawamQ3KrJq+x506uTouXlL0Z5fP+t105KnyxMrAJUENhbh2ivD4pq9Zh1BFiD9IWzyu3IXFR8w==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@mantine/core/-/core-8.0.2.tgz", + "integrity": "sha512-2Ps7bRTeTbRwAKTCL9xdflPz0pwOlTq6ohyTbDZMCADqecf09GHI7GiX+HJatqbPZ2t8jK0fN1b48YhjJaxTqg==", "license": "MIT", "dependencies": { "@floating-ui/react": "^0.26.28", @@ -1370,46 +1368,46 @@ "type-fest": "^4.27.0" }, "peerDependencies": { - "@mantine/hooks": "8.0.1", + "@mantine/hooks": "8.0.2", "react": "^18.x || ^19.x", "react-dom": "^18.x || ^19.x" } }, "node_modules/@mantine/dates": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/@mantine/dates/-/dates-8.0.1.tgz", - "integrity": "sha512-YCmV5jiGE9Ts2uhNS217IA1Hd5kAa8oaEtfnU0bS1sL36zKEf2s6elmzY718XdF8tFil0jJWAj0jiCrA3/udMg==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@mantine/dates/-/dates-8.0.2.tgz", + "integrity": "sha512-V1xU00gECfykA4UFln8ulPsPHvaTncsg9zUbzCwqwEAYlZFG3Nnj5eBzzpV3IN1LNDPEVGb1gAOM6jZ+fi2uRQ==", "license": "MIT", "dependencies": { "clsx": "^2.1.1" }, "peerDependencies": { - "@mantine/core": "8.0.1", - "@mantine/hooks": "8.0.1", + "@mantine/core": "8.0.2", + "@mantine/hooks": "8.0.2", "dayjs": ">=1.0.0", "react": "^18.x || ^19.x", "react-dom": "^18.x || ^19.x" } }, "node_modules/@mantine/dropzone": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/@mantine/dropzone/-/dropzone-8.0.1.tgz", - "integrity": "sha512-8PH5yrtA/ebCIwjs0m4J9qOvEyS/P4XmNlHrw0E389/qq64Ol7+/ZH7Xtiq64IaY8kvsMW1XHaV0c+bdYrijiA==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@mantine/dropzone/-/dropzone-8.0.2.tgz", + "integrity": "sha512-dWsz99QjWOQy7wDx4zzvBrPQ6l3201kg0iugk2Dm+MmN9mlboychz/LIZzoCGsodtQRLAsoTlN2zOqhsiggRfw==", "license": "MIT", "dependencies": { "react-dropzone": "14.3.8" }, "peerDependencies": { - "@mantine/core": "8.0.1", - "@mantine/hooks": "8.0.1", + "@mantine/core": "8.0.2", + "@mantine/hooks": "8.0.2", "react": "^18.x || ^19.x", "react-dom": "^18.x || ^19.x" } }, "node_modules/@mantine/form": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/@mantine/form/-/form-8.0.1.tgz", - "integrity": "sha512-lQ94gn/9p60C+tKEW7psQ1tZHod58Q0bXLbRDadRKMwnqBb2WFoIuaQWPDo7ox+PqyOv28dtflgS+Lm95EbBhg==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@mantine/form/-/form-8.0.2.tgz", + "integrity": "sha512-vSp9BfrhC9o7RMRYMaND2UAflXO4i6c5F1qPkiM2FID6ye2RJxW8YHaGa3kA0VfBbhDw9sFBbl8p7ttE4RPzcw==", "license": "MIT", "dependencies": { "fast-deep-equal": "^3.1.3", @@ -1420,34 +1418,34 @@ } }, "node_modules/@mantine/hooks": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/@mantine/hooks/-/hooks-8.0.1.tgz", - "integrity": "sha512-GvLdM4Ro3QcDyIgqrdXsUZmeeKye2TNL/k3mEr9JhM5KacHQjr83JPp0u9eLobn7kiyBqpLTYmVYAbmjJdCxHw==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@mantine/hooks/-/hooks-8.0.2.tgz", + "integrity": "sha512-0jpEdC0KIAZ54D5kd9rJudrEm6vkvnrL9yYHnkuNbxokXSzDdYA/wpHnKR5WW+u6fW4JF6A6A7gN1vXKeC9MSw==", "license": "MIT", "peerDependencies": { "react": "^18.x || ^19.x" } }, "node_modules/@mantine/notifications": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/@mantine/notifications/-/notifications-8.0.1.tgz", - "integrity": "sha512-7TX9OyAmUcok3qffnheS7gTAMKDczETy8XEYDr38Sy/XIoXLjM+3CwO+a/vfd1F9oW2LvkahkHT0Ey+vBOVd0Q==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@mantine/notifications/-/notifications-8.0.2.tgz", + "integrity": "sha512-whSuoCCZxQF3VM40sumCte9tA79to8OCV/vv0z8PeVTj/eKlaTR+P9LKigO9ovhuNELrvvO3Rxcnno5aMBz0oA==", "license": "MIT", "dependencies": { - "@mantine/store": "8.0.1", + "@mantine/store": "8.0.2", "react-transition-group": "4.4.5" }, "peerDependencies": { - "@mantine/core": "8.0.1", - "@mantine/hooks": "8.0.1", + "@mantine/core": "8.0.2", + "@mantine/hooks": "8.0.2", "react": "^18.x || ^19.x", "react-dom": "^18.x || ^19.x" } }, "node_modules/@mantine/store": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/@mantine/store/-/store-8.0.1.tgz", - "integrity": "sha512-3wfUDeiERXJEI+MGgRAbh+9aY35D9oE4UzquLqZh8cIiH5i5g64Y/eJx3PfjHgO5+Zeu6lbgTgL6k4lg4a2SBQ==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@mantine/store/-/store-8.0.2.tgz", + "integrity": "sha512-/LuizGWAXjVnLLZ55f0QYotiqb8GlHpIb4KRf4LqRkbsA6UAZEVb6beuk0vI2Azf6vfuh7sTHu1xVC5zI6C+Cw==", "license": "MIT", "peerDependencies": { "react": "^18.x || ^19.x" @@ -1464,16 +1462,16 @@ } }, "node_modules/@rolldown/pluginutils": { - "version": "1.0.0-beta.35", - "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.35.tgz", - "integrity": "sha512-slYrCpoxJUqzFDDNlvrOYRazQUNRvWPjXA17dAOISY3rDMxX6k8K4cj2H+hEYMHF81HO3uNd5rHVigAWRM5dSg==", + "version": "1.0.0-beta.47", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.47.tgz", + "integrity": "sha512-8QagwMH3kNCuzD8EWL8R2YPW5e4OrHNSAHRFDdmFqEwEaD/KcNKjVoumo+gP2vW5eKB2UPbM6vTYiGZX0ixLnw==", "dev": true, "license": "MIT" }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.35.0.tgz", - "integrity": "sha512-uYQ2WfPaqz5QtVgMxfN6NpLD+no0MYHDBywl7itPYd3K5TjjSghNKmX8ic9S8NU8w81NVhJv/XojcHptRly7qQ==", + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.53.5.tgz", + "integrity": "sha512-iDGS/h7D8t7tvZ1t6+WPK04KD0MwzLZrG0se1hzBjSi5fyxlsiggoJHwh18PCFNn7tG43OWb6pdZ6Y+rMlmyNQ==", "cpu": [ "arm" ], @@ -1485,9 +1483,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.35.0.tgz", - "integrity": "sha512-FtKddj9XZudurLhdJnBl9fl6BwCJ3ky8riCXjEw3/UIbjmIY58ppWwPEvU3fNu+W7FUsAsB1CdH+7EQE6CXAPA==", + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.53.5.tgz", + "integrity": "sha512-wrSAViWvZHBMMlWk6EJhvg8/rjxzyEhEdgfMMjREHEq11EtJ6IP6yfcCH57YAEca2Oe3FNCE9DSTgU70EIGmVw==", "cpu": [ "arm64" ], @@ -1499,9 +1497,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.35.0.tgz", - "integrity": "sha512-Uk+GjOJR6CY844/q6r5DR/6lkPFOw0hjfOIzVx22THJXMxktXG6CbejseJFznU8vHcEBLpiXKY3/6xc+cBm65Q==", + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.53.5.tgz", + "integrity": "sha512-S87zZPBmRO6u1YXQLwpveZm4JfPpAa6oHBX7/ghSiGH3rz/KDgAu1rKdGutV+WUI6tKDMbaBJomhnT30Y2t4VQ==", "cpu": [ "arm64" ], @@ -1513,9 +1511,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.35.0.tgz", - "integrity": "sha512-3IrHjfAS6Vkp+5bISNQnPogRAW5GAV1n+bNCrDwXmfMHbPl5EhTmWtfmwlJxFRUCBZ+tZ/OxDyU08aF6NI/N5Q==", + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.53.5.tgz", + "integrity": "sha512-YTbnsAaHo6VrAczISxgpTva8EkfQus0VPEVJCEaboHtZRIb6h6j0BNxRBOwnDciFTZLDPW5r+ZBmhL/+YpTZgA==", "cpu": [ "x64" ], @@ -1527,9 +1525,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.35.0.tgz", - "integrity": "sha512-sxjoD/6F9cDLSELuLNnY0fOrM9WA0KrM0vWm57XhrIMf5FGiN8D0l7fn+bpUeBSU7dCgPV2oX4zHAsAXyHFGcQ==", + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.53.5.tgz", + "integrity": "sha512-1T8eY2J8rKJWzaznV7zedfdhD1BqVs1iqILhmHDq/bqCUZsrMt+j8VCTHhP0vdfbHK3e1IQ7VYx3jlKqwlf+vw==", "cpu": [ "arm64" ], @@ -1541,9 +1539,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.35.0.tgz", - "integrity": "sha512-2mpHCeRuD1u/2kruUiHSsnjWtHjqVbzhBkNVQ1aVD63CcexKVcQGwJ2g5VphOd84GvxfSvnnlEyBtQCE5hxVVw==", + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.53.5.tgz", + "integrity": "sha512-sHTiuXyBJApxRn+VFMaw1U+Qsz4kcNlxQ742snICYPrY+DDL8/ZbaC4DVIB7vgZmp3jiDaKA0WpBdP0aqPJoBQ==", "cpu": [ "x64" ], @@ -1555,9 +1553,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.35.0.tgz", - "integrity": "sha512-mrA0v3QMy6ZSvEuLs0dMxcO2LnaCONs1Z73GUDBHWbY8tFFocM6yl7YyMu7rz4zS81NDSqhrUuolyZXGi8TEqg==", + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.53.5.tgz", + "integrity": "sha512-dV3T9MyAf0w8zPVLVBptVlzaXxka6xg1f16VAQmjg+4KMSTWDvhimI/Y6mp8oHwNrmnmVl9XxJ/w/mO4uIQONA==", "cpu": [ "arm" ], @@ -1569,9 +1567,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.35.0.tgz", - "integrity": "sha512-DnYhhzcvTAKNexIql8pFajr0PiDGrIsBYPRvCKlA5ixSS3uwo/CWNZxB09jhIapEIg945KOzcYEAGGSmTSpk7A==", + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.53.5.tgz", + "integrity": "sha512-wIGYC1x/hyjP+KAu9+ewDI+fi5XSNiUi9Bvg6KGAh2TsNMA3tSEs+Sh6jJ/r4BV/bx/CyWu2ue9kDnIdRyafcQ==", "cpu": [ "arm" ], @@ -1583,9 +1581,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.35.0.tgz", - "integrity": "sha512-uagpnH2M2g2b5iLsCTZ35CL1FgyuzzJQ8L9VtlJ+FckBXroTwNOaD0z0/UF+k5K3aNQjbm8LIVpxykUOQt1m/A==", + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.53.5.tgz", + "integrity": "sha512-Y+qVA0D9d0y2FRNiG9oM3Hut/DgODZbU9I8pLLPwAsU0tUKZ49cyV1tzmB/qRbSzGvY8lpgGkJuMyuhH7Ma+Vg==", "cpu": [ "arm64" ], @@ -1597,9 +1595,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.35.0.tgz", - "integrity": "sha512-XQxVOCd6VJeHQA/7YcqyV0/88N6ysSVzRjJ9I9UA/xXpEsjvAgDTgH3wQYz5bmr7SPtVK2TsP2fQ2N9L4ukoUg==", + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.53.5.tgz", + "integrity": "sha512-juaC4bEgJsyFVfqhtGLz8mbopaWD+WeSOYr5E16y+1of6KQjc0BpwZLuxkClqY1i8sco+MdyoXPNiCkQou09+g==", "cpu": [ "arm64" ], @@ -1610,10 +1608,10 @@ "linux" ] }, - "node_modules/@rollup/rollup-linux-loongarch64-gnu": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.35.0.tgz", - "integrity": "sha512-5pMT5PzfgwcXEwOaSrqVsz/LvjDZt+vQ8RT/70yhPU06PTuq8WaHhfT1LW+cdD7mW6i/J5/XIkX/1tCAkh1W6g==", + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.53.5.tgz", + "integrity": "sha512-rIEC0hZ17A42iXtHX+EPJVL/CakHo+tT7W0pbzdAGuWOt2jxDFh7A/lRhsNHBcqL4T36+UiAgwO8pbmn3dE8wA==", "cpu": [ "loong64" ], @@ -1624,10 +1622,10 @@ "linux" ] }, - "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.35.0.tgz", - "integrity": "sha512-c+zkcvbhbXF98f4CtEIP1EBA/lCic5xB0lToneZYvMeKu5Kamq3O8gqrxiYYLzlZH6E3Aq+TSW86E4ay8iD8EA==", + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.53.5.tgz", + "integrity": "sha512-T7l409NhUE552RcAOcmJHj3xyZ2h7vMWzcwQI0hvn5tqHh3oSoclf9WgTl+0QqffWFG8MEVZZP1/OBglKZx52Q==", "cpu": [ "ppc64" ], @@ -1639,9 +1637,23 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.35.0.tgz", - "integrity": "sha512-s91fuAHdOwH/Tad2tzTtPX7UZyytHIRR6V4+2IGlV0Cej5rkG0R61SX4l4y9sh0JBibMiploZx3oHKPnQBKe4g==", + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.53.5.tgz", + "integrity": "sha512-7OK5/GhxbnrMcxIFoYfhV/TkknarkYC1hqUw1wU2xUN3TVRLNT5FmBv4KkheSG2xZ6IEbRAhTooTV2+R5Tk0lQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.53.5.tgz", + "integrity": "sha512-GwuDBE/PsXaTa76lO5eLJTyr2k8QkPipAyOrs4V/KJufHCZBJ495VCGJol35grx9xryk4V+2zd3Ri+3v7NPh+w==", "cpu": [ "riscv64" ], @@ -1653,9 +1665,9 @@ ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.35.0.tgz", - "integrity": "sha512-hQRkPQPLYJZYGP+Hj4fR9dDBMIM7zrzJDWFEMPdTnTy95Ljnv0/4w/ixFw3pTBMEuuEuoqtBINYND4M7ujcuQw==", + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.53.5.tgz", + "integrity": "sha512-IAE1Ziyr1qNfnmiQLHBURAD+eh/zH1pIeJjeShleII7Vj8kyEm2PF77o+lf3WTHDpNJcu4IXJxNO0Zluro8bOw==", "cpu": [ "s390x" ], @@ -1667,9 +1679,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.35.0.tgz", - "integrity": "sha512-Pim1T8rXOri+0HmV4CdKSGrqcBWX0d1HoPnQ0uw0bdp1aP5SdQVNBy8LjYncvnLgu3fnnCt17xjWGd4cqh8/hA==", + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.53.5.tgz", + "integrity": "sha512-Pg6E+oP7GvZ4XwgRJBuSXZjcqpIW3yCBhK4BcsANvb47qMvAbCjR6E+1a/U2WXz1JJxp9/4Dno3/iSJLcm5auw==", "cpu": [ "x64" ], @@ -1681,9 +1693,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.35.0.tgz", - "integrity": "sha512-QysqXzYiDvQWfUiTm8XmJNO2zm9yC9P/2Gkrwg2dH9cxotQzunBHYr6jk4SujCTqnfGxduOmQcI7c2ryuW8XVg==", + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.53.5.tgz", + "integrity": "sha512-txGtluxDKTxaMDzUduGP0wdfng24y1rygUMnmlUJ88fzCCULCLn7oE5kb2+tRB+MWq1QDZT6ObT5RrR8HFRKqg==", "cpu": [ "x64" ], @@ -1694,10 +1706,24 @@ "linux" ] }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.53.5.tgz", + "integrity": "sha512-3DFiLPnTxiOQV993fMc+KO8zXHTcIjgaInrqlG8zDp1TlhYl6WgrOHuJkJQ6M8zHEcntSJsUp1XFZSY8C1DYbg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.35.0.tgz", - "integrity": "sha512-OUOlGqPkVJCdJETKOCEf1mw848ZyJ5w50/rZ/3IBQVdLfR5jk/6Sr5m3iO2tdPgwo0x7VcncYuOvMhBWZq8ayg==", + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.53.5.tgz", + "integrity": "sha512-nggc/wPpNTgjGg75hu+Q/3i32R00Lq1B6N1DO7MCU340MRKL3WZJMjA9U4K4gzy3dkZPXm9E1Nc81FItBVGRlA==", "cpu": [ "arm64" ], @@ -1709,9 +1735,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.35.0.tgz", - "integrity": "sha512-2/lsgejMrtwQe44glq7AFFHLfJBPafpsTa6JvP2NGef/ifOa4KBoglVf7AKN7EV9o32evBPRqfg96fEHzWo5kw==", + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.53.5.tgz", + "integrity": "sha512-U/54pTbdQpPLBdEzCT6NBCFAfSZMvmjr0twhnD9f4EIvlm9wy3jjQ38yQj1AGznrNO65EWQMgm/QUjuIVrYF9w==", "cpu": [ "ia32" ], @@ -1722,10 +1748,24 @@ "win32" ] }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.53.5.tgz", + "integrity": "sha512-2NqKgZSuLH9SXBBV2dWNRCZmocgSOx8OJSdpRaEcRlIfX8YrKxUT6z0F1NpvDVhOsl190UFTRh2F2WDWWCYp3A==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.35.0.tgz", - "integrity": "sha512-PIQeY5XDkrOysbQblSW7v3l1MDZzkTEzAfTPkj5VAu3FW8fS4ynyLg2sINp0fp3SjZ8xkRYpLqoKcYqAkhU1dw==", + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.53.5.tgz", + "integrity": "sha512-JRpZUhCfhZ4keB5v0fe02gQJy05GqboPOaxvjugW04RLSYYoB/9t2lx2u/tMs/Na/1NXfY8QYjgRljRpN+MjTQ==", "cpu": [ "x64" ], @@ -1745,9 +1785,9 @@ "license": "Apache-2.0" }, "node_modules/@swc/wasm": { - "version": "1.13.20", - "resolved": "https://registry.npmjs.org/@swc/wasm/-/wasm-1.13.20.tgz", - "integrity": "sha512-NJzN+QrbdwXeVTfTYiHkqv13zleOCQA52NXBOrwKvjxWJQecRqakjUhUP2z8lqs7eWVthko4Cilqs+VeBrwo3Q==", + "version": "1.15.7", + "resolved": "https://registry.npmjs.org/@swc/wasm/-/wasm-1.15.7.tgz", + "integrity": "sha512-m1Cslgkp7gFIUB2ZiIUHMoUskwxOAi9uaf27inoKb7Oc8MkMjt+eNTeSyeGckkwRtMQiybKYTGGnA5imxSsedQ==", "dev": true, "license": "Apache-2.0" }, @@ -1805,9 +1845,9 @@ } }, "node_modules/@testing-library/jest-dom": { - "version": "6.8.0", - "resolved": "https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-6.8.0.tgz", - "integrity": "sha512-WgXcWzVM6idy5JaftTVC8Vs83NKRmGJz4Hqs4oyOuO2J4r/y79vvKZsb+CaGyCSEbUPI6OsewfPd0G1A0/TUZQ==", + "version": "6.9.1", + "resolved": "https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-6.9.1.tgz", + "integrity": "sha512-zIcONa+hVtVSSep9UT3jZ5rizo2BsxgyDYU7WFD5eICBE7no3881HGeb/QkGfsJs6JTkY1aQhT7rIPC7e+0nnA==", "dev": true, "license": "MIT", "dependencies": { @@ -1832,9 +1872,9 @@ "license": "MIT" }, "node_modules/@testing-library/react": { - "version": "16.3.0", - "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-16.3.0.tgz", - "integrity": "sha512-kFSyxiEDwv1WLl2fgsq6pPBbw5aWKrsY2/noi1Id0TK0UParSF62oFQFGHXIyaG4pp2tEub/Zlel+fjjZILDsw==", + "version": "16.3.1", + "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-16.3.1.tgz", + "integrity": "sha512-gr4KtAWqIOQoucWYD/f6ki+j5chXfcPc74Col/6poTyqTmn7zRmodWahWRCp8tYd+GMqBonw6hstNzqjbs6gjw==", "dev": true, "license": "MIT", "dependencies": { @@ -1881,19 +1921,20 @@ "license": "MIT" }, "node_modules/@types/chai": { - "version": "5.2.2", - "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.2.tgz", - "integrity": "sha512-8kB30R7Hwqf40JPiKhVzodJs2Qc1ZJ5zuT3uzw5Hq/dhNCl3G3l83jfpdI1e20BP348+fV7VIL/+FxaXkqBmWg==", + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.3.tgz", + "integrity": "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==", "dev": true, "license": "MIT", "dependencies": { - "@types/deep-eql": "*" + "@types/deep-eql": "*", + "assertion-error": "^2.0.1" } }, "node_modules/@types/d3-array": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.1.tgz", - "integrity": "sha512-Y2Jn2idRrLzUfAKV2LyRImR+y4oa2AntrgID95SHJxuMUrkNXmanDSed71sRNZysveJVt1hLLemQZIady0FpEg==", + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.2.tgz", + "integrity": "sha512-hOLWVbm7uRza0BYXpIIW5pxfrKe0W+D5lrFiAEYR+pb6w3N2SwSMaJbXdUfSEv+dT4MfHBLtn5js0LAWaO6otw==", "license": "MIT" }, "node_modules/@types/d3-color": { @@ -1961,20 +2002,22 @@ "license": "MIT" }, "node_modules/@types/estree": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", - "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", "dev": true, "license": "MIT" }, "node_modules/@types/hoist-non-react-statics": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.6.tgz", - "integrity": "sha512-lPByRJUer/iN/xa4qpyL0qmL11DqNW81iU/IG1S3uvRUq4oKagz8VCxZjiWkumgt66YT3vOdDgZ0o32sGKtCEw==", + "version": "3.3.7", + "resolved": "https://registry.npmjs.org/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.7.tgz", + "integrity": "sha512-PQTyIulDkIDro8P+IHbKCsw7U2xxBYflVzW/FgWdCAePD9xGSidgA76/GeJ6lBKoblyhf9pBY763gbrN+1dI8g==", "license": "MIT", "dependencies": { - "@types/react": "*", "hoist-non-react-statics": "^3.3.0" + }, + "peerDependencies": { + "@types/react": "*" } }, "node_modules/@types/json-schema": { @@ -1991,28 +2034,28 @@ "license": "MIT" }, "node_modules/@types/react": { - "version": "19.1.16", - "resolved": "https://registry.npmjs.org/@types/react/-/react-19.1.16.tgz", - "integrity": "sha512-WBM/nDbEZmDUORKnh5i1bTnAz6vTohUf9b8esSMu+b24+srbaxa04UbJgWx78CVfNXA20sNu0odEIluZDFdCog==", + "version": "19.2.7", + "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.7.tgz", + "integrity": "sha512-MWtvHrGZLFttgeEj28VXHxpmwYbor/ATPYbBfSFZEIRK0ecCFLl2Qo55z52Hss+UV9CRN7trSeq1zbgx7YDWWg==", "license": "MIT", "dependencies": { - "csstype": "^3.0.2" + "csstype": "^3.2.2" } }, "node_modules/@types/react-dom": { - "version": "19.1.9", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-19.1.9.tgz", - "integrity": "sha512-qXRuZaOsAdXKFyOhRBg6Lqqc0yay13vN7KrIg4L7N4aaHN68ma9OK3NE1BoDFgFOTfM7zg+3/8+2n8rLUH3OKQ==", + "version": "19.2.3", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-19.2.3.tgz", + "integrity": "sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==", "dev": true, "license": "MIT", "peerDependencies": { - "@types/react": "^19.0.0" + "@types/react": "^19.2.0" } }, "node_modules/@videojs/http-streaming": { - "version": "3.17.0", - "resolved": "https://registry.npmjs.org/@videojs/http-streaming/-/http-streaming-3.17.0.tgz", - "integrity": "sha512-Ch1P3tvvIEezeZXyK11UfWgp4cWKX4vIhZ30baN/lRinqdbakZ5hiAI3pGjRy3d+q/Epyc8Csz5xMdKNNGYpcw==", + "version": "3.17.2", + "resolved": "https://registry.npmjs.org/@videojs/http-streaming/-/http-streaming-3.17.2.tgz", + "integrity": "sha512-VBQ3W4wnKnVKb/limLdtSD2rAd5cmHN70xoMf4OmuDd0t2kfJX04G+sfw6u2j8oOm2BXYM9E1f4acHruqKnM1g==", "license": "Apache-2.0", "dependencies": { "@babel/runtime": "^7.12.5", @@ -2058,13 +2101,13 @@ } }, "node_modules/@vitejs/plugin-react-swc": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-4.1.0.tgz", - "integrity": "sha512-Ff690TUck0Anlh7wdIcnsVMhofeEVgm44Y4OYdeeEEPSKyZHzDI9gfVBvySEhDfXtBp8tLCbfsVKPWEMEjq8/g==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-4.2.2.tgz", + "integrity": "sha512-x+rE6tsxq/gxrEJN3Nv3dIV60lFflPj94c90b+NNo6n1QV1QQUTLoL0MpaOVasUZ0zqVBn7ead1B5ecx1JAGfA==", "dev": true, "license": "MIT", "dependencies": { - "@rolldown/pluginutils": "1.0.0-beta.35", + "@rolldown/pluginutils": "1.0.0-beta.47", "@swc/core": "^1.13.5" }, "engines": { @@ -2190,18 +2233,18 @@ } }, "node_modules/@xmldom/xmldom": { - "version": "0.8.10", - "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.10.tgz", - "integrity": "sha512-2WALfTl4xo2SkGCYRt6rDTFfk9R1czmBvUQy12gK2KuRKIpWEhcbbzy8EZXtz/jkRqHX8bFEc6FC1HjX4TUWYw==", + "version": "0.8.11", + "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.11.tgz", + "integrity": "sha512-cQzWCtO6C8TQiYl1ruKNn2U6Ao4o4WBBcbL61yJl84x+j5sOWWFU9X7DpND8XZG3daDppSsigMdfAIl2upQBRw==", "license": "MIT", "engines": { "node": ">=10.0.0" } }, "node_modules/acorn": { - "version": "8.14.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz", - "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==", + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", "dev": true, "license": "MIT", "bin": { @@ -2278,6 +2321,19 @@ "react-dom": "^17.0.0 || ^18.0.0 || ^19.0.0" } }, + "node_modules/allotment/node_modules/use-resize-observer": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/use-resize-observer/-/use-resize-observer-9.1.0.tgz", + "integrity": "sha512-R25VqO9Wb3asSD4eqtcxk8sJalvIOYBqS8MNZlpDSQ4l4xMQxC/J7Id9HoTqPq8FwULIn0PVW+OAqF2dyYbjow==", + "license": "MIT", + "dependencies": { + "@juggle/resize-observer": "^3.3.1" + }, + "peerDependencies": { + "react": "16.8.0 - 18", + "react-dom": "16.8.0 - 18" + } + }, "node_modules/ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", @@ -2373,9 +2429,9 @@ } }, "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "dev": true, "license": "MIT", "dependencies": { @@ -2495,12 +2551,16 @@ "license": "MIT" }, "node_modules/cookie": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-1.0.2.tgz", - "integrity": "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-1.1.1.tgz", + "integrity": "sha512-ei8Aos7ja0weRpFzJnEA9UHJ/7XQmqglbRwnf2ATjcB9Wq874VKH9kfjjirM6UhU2/E5fFYadylyhFldcqSidQ==", "license": "MIT", "engines": { "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/cosmiconfig": { @@ -2565,14 +2625,14 @@ "license": "MIT" }, "node_modules/cssstyle": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-5.3.1.tgz", - "integrity": "sha512-g5PC9Aiph9eiczFpcgUhd9S4UUO3F+LHGRIi5NUMZ+4xtoIYbHNZwZnWA2JsFGe8OU8nl4WyaEFiZuGuxlutJQ==", + "version": "5.3.5", + "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-5.3.5.tgz", + "integrity": "sha512-GlsEptulso7Jg0VaOZ8BXQi3AkYM5BOJKEO/rjMidSCq70FkIC5y0eawrCXeYzxgt3OCf4Ls+eoxN+/05vN0Ag==", "dev": true, "license": "MIT", "dependencies": { - "@asamuzakjp/css-color": "^4.0.3", - "@csstools/css-syntax-patches-for-csstree": "^1.0.14", + "@asamuzakjp/css-color": "^4.1.1", + "@csstools/css-syntax-patches-for-csstree": "^1.0.21", "css-tree": "^3.1.0" }, "engines": { @@ -2580,9 +2640,9 @@ } }, "node_modules/csstype": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", - "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", + "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", "license": "MIT" }, "node_modules/d3-array": { @@ -2721,9 +2781,9 @@ } }, "node_modules/dayjs": { - "version": "1.11.13", - "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.13.tgz", - "integrity": "sha512-oaMBel6gjolK862uaPQOVTA7q3TZhuSvuMQAAglQDOWYO9A91IrAOUJEyKVlqJlHE0vq5p5UXxzdPfMH/x6xNg==", + "version": "1.11.19", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.19.tgz", + "integrity": "sha512-t5EcLVS6QPBNqM2z8fakk/NKel+Xzshgt8FFKAn+qwlD1pzZWxh0nVCrvFK7ZDb6XucZeF9z8C7CBWTRIVApAw==", "license": "MIT" }, "node_modules/debug": { @@ -2834,9 +2894,9 @@ } }, "node_modules/error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.4.tgz", + "integrity": "sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==", "license": "MIT", "dependencies": { "is-arrayish": "^0.2.1" @@ -2856,9 +2916,9 @@ "license": "MIT" }, "node_modules/esbuild": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.1.tgz", - "integrity": "sha512-BGO5LtrGC7vxnqucAe/rmvKdJllfGaYWdyABvyMoXQlfYMb2bbRuReWR5tEGE//4LcNJj9XrkovTqNYRFZHAMQ==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.2.tgz", + "integrity": "sha512-HyNQImnsOC7X9PMNaCIeAm4ISCQXs5a5YasTXVliKv4uuBo1dKrG0A+uQS8M5eXjVMnLg3WgXaKvprHlFJQffw==", "dev": true, "hasInstallScript": true, "license": "MIT", @@ -2869,31 +2929,32 @@ "node": ">=18" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.25.1", - "@esbuild/android-arm": "0.25.1", - "@esbuild/android-arm64": "0.25.1", - "@esbuild/android-x64": "0.25.1", - "@esbuild/darwin-arm64": "0.25.1", - "@esbuild/darwin-x64": "0.25.1", - "@esbuild/freebsd-arm64": "0.25.1", - "@esbuild/freebsd-x64": "0.25.1", - "@esbuild/linux-arm": "0.25.1", - "@esbuild/linux-arm64": "0.25.1", - "@esbuild/linux-ia32": "0.25.1", - "@esbuild/linux-loong64": "0.25.1", - "@esbuild/linux-mips64el": "0.25.1", - "@esbuild/linux-ppc64": "0.25.1", - "@esbuild/linux-riscv64": "0.25.1", - "@esbuild/linux-s390x": "0.25.1", - "@esbuild/linux-x64": "0.25.1", - "@esbuild/netbsd-arm64": "0.25.1", - "@esbuild/netbsd-x64": "0.25.1", - "@esbuild/openbsd-arm64": "0.25.1", - "@esbuild/openbsd-x64": "0.25.1", - "@esbuild/sunos-x64": "0.25.1", - "@esbuild/win32-arm64": "0.25.1", - "@esbuild/win32-ia32": "0.25.1", - "@esbuild/win32-x64": "0.25.1" + "@esbuild/aix-ppc64": "0.27.2", + "@esbuild/android-arm": "0.27.2", + "@esbuild/android-arm64": "0.27.2", + "@esbuild/android-x64": "0.27.2", + "@esbuild/darwin-arm64": "0.27.2", + "@esbuild/darwin-x64": "0.27.2", + "@esbuild/freebsd-arm64": "0.27.2", + "@esbuild/freebsd-x64": "0.27.2", + "@esbuild/linux-arm": "0.27.2", + "@esbuild/linux-arm64": "0.27.2", + "@esbuild/linux-ia32": "0.27.2", + "@esbuild/linux-loong64": "0.27.2", + "@esbuild/linux-mips64el": "0.27.2", + "@esbuild/linux-ppc64": "0.27.2", + "@esbuild/linux-riscv64": "0.27.2", + "@esbuild/linux-s390x": "0.27.2", + "@esbuild/linux-x64": "0.27.2", + "@esbuild/netbsd-arm64": "0.27.2", + "@esbuild/netbsd-x64": "0.27.2", + "@esbuild/openbsd-arm64": "0.27.2", + "@esbuild/openbsd-x64": "0.27.2", + "@esbuild/openharmony-arm64": "0.27.2", + "@esbuild/sunos-x64": "0.27.2", + "@esbuild/win32-arm64": "0.27.2", + "@esbuild/win32-ia32": "0.27.2", + "@esbuild/win32-x64": "0.27.2" } }, "node_modules/escape-string-regexp": { @@ -2909,33 +2970,32 @@ } }, "node_modules/eslint": { - "version": "9.22.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.22.0.tgz", - "integrity": "sha512-9V/QURhsRN40xuHXWjV64yvrzMjcz7ZyNoF2jJFmy9j/SLk0u1OLSZgXi28MrXjymnjEGSR80WCdab3RGMDveQ==", + "version": "9.39.2", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.39.2.tgz", + "integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==", "dev": true, "license": "MIT", "dependencies": { - "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/eslint-utils": "^4.8.0", "@eslint-community/regexpp": "^4.12.1", - "@eslint/config-array": "^0.19.2", - "@eslint/config-helpers": "^0.1.0", - "@eslint/core": "^0.12.0", - "@eslint/eslintrc": "^3.3.0", - "@eslint/js": "9.22.0", - "@eslint/plugin-kit": "^0.2.7", + "@eslint/config-array": "^0.21.1", + "@eslint/config-helpers": "^0.4.2", + "@eslint/core": "^0.17.0", + "@eslint/eslintrc": "^3.3.1", + "@eslint/js": "9.39.2", + "@eslint/plugin-kit": "^0.4.1", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", "@types/estree": "^1.0.6", - "@types/json-schema": "^7.0.15", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", - "eslint-scope": "^8.3.0", - "eslint-visitor-keys": "^4.2.0", - "espree": "^10.3.0", + "eslint-scope": "^8.4.0", + "eslint-visitor-keys": "^4.2.1", + "espree": "^10.4.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", @@ -2983,9 +3043,9 @@ } }, "node_modules/eslint-plugin-react-refresh": { - "version": "0.4.19", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.19.tgz", - "integrity": "sha512-eyy8pcr/YxSYjBoqIFSrlbn9i/xvxUFa8CjzAYo9cFjgGXqq1hyjihcpZvxRLalpaWmueWR81xn7vuKmAFijDQ==", + "version": "0.4.26", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.26.tgz", + "integrity": "sha512-1RETEylht2O6FM/MvgnyvT+8K21wLqDNg4qD51Zj3guhjt433XbnnkVttHMyaVyAFD03QSV4LPS5iE3VQmO7XQ==", "dev": true, "license": "MIT", "peerDependencies": { @@ -2993,9 +3053,9 @@ } }, "node_modules/eslint-scope": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.3.0.tgz", - "integrity": "sha512-pUNxi75F8MJ/GdeKtVLSbYg4ZI34J6C0C7sbL4YOp2exGwen7ZsuBqKzUhXd0qMQ362yET3z+uPwKeg/0C2XCQ==", + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz", + "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", "dev": true, "license": "BSD-2-Clause", "dependencies": { @@ -3010,9 +3070,9 @@ } }, "node_modules/eslint-visitor-keys": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", - "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", "dev": true, "license": "Apache-2.0", "engines": { @@ -3023,15 +3083,15 @@ } }, "node_modules/espree": { - "version": "10.3.0", - "resolved": "https://registry.npmjs.org/espree/-/espree-10.3.0.tgz", - "integrity": "sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg==", + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz", + "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", "dev": true, "license": "BSD-2-Clause", "dependencies": { - "acorn": "^8.14.0", + "acorn": "^8.15.0", "acorn-jsx": "^5.3.2", - "eslint-visitor-keys": "^4.2.0" + "eslint-visitor-keys": "^4.2.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -3103,9 +3163,9 @@ "license": "MIT" }, "node_modules/expect-type": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.2.2.tgz", - "integrity": "sha512-JhFGDVJ7tmDJItKhYgJCGLOWjuK9vPxiXoUFLwLDc99NlmklilbiQJwoctZtt13+xMw91MCk/REan6MWHqDjyA==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.3.0.tgz", + "integrity": "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==", "dev": true, "license": "Apache-2.0", "engines": { @@ -3119,9 +3179,9 @@ "license": "MIT" }, "node_modules/fast-equals": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/fast-equals/-/fast-equals-5.3.2.tgz", - "integrity": "sha512-6rxyATwPCkaFIL3JLqw8qXqMpIZ942pTX/tbQFkRsDGblS8tNGtlUauA/+mt6RUfqn/4MoEr+WDkYoIQbibWuQ==", + "version": "5.4.0", + "resolved": "https://registry.npmjs.org/fast-equals/-/fast-equals-5.4.0.tgz", + "integrity": "sha512-jt2DW/aNFNwke7AUd+Z+e6pz39KO5rzdbbFCg2sGafS4mk13MI7Z8O5z9cADNn5lhGODIgLwug6TZO2ctf7kcw==", "license": "MIT", "engines": { "node": ">=6.0.0" @@ -3229,9 +3289,9 @@ "license": "ISC" }, "node_modules/formik": { - "version": "2.4.6", - "resolved": "https://registry.npmjs.org/formik/-/formik-2.4.6.tgz", - "integrity": "sha512-A+2EI7U7aG296q2TLGvNapDNTZp1khVt5Vk0Q/fyfSROss0V/V6+txt2aJnwEos44IxTCW/LYAi/zgWzlevj+g==", + "version": "2.4.9", + "resolved": "https://registry.npmjs.org/formik/-/formik-2.4.9.tgz", + "integrity": "sha512-5nI94BMnlFDdQRBY4Sz39WkhxajZJ57Fzs8wVbtsQlm5ScKIR1QLYqv/ultBnobObtlUyxpxoLodpixrsf36Og==", "funding": [ { "type": "individual", @@ -3345,9 +3405,9 @@ } }, "node_modules/hls.js": { - "version": "1.5.20", - "resolved": "https://registry.npmjs.org/hls.js/-/hls.js-1.5.20.tgz", - "integrity": "sha512-uu0VXUK52JhihhnN/MVVo1lvqNNuhoxkonqgO3IpjvQiGpJBdIXMGkofjQb/j9zvV7a1SW8U9g1FslWx/1HOiQ==", + "version": "1.6.15", + "resolved": "https://registry.npmjs.org/hls.js/-/hls.js-1.6.15.tgz", + "integrity": "sha512-E3a5VwgXimGHwpRGV+WxRTKeSp2DW5DI5MWv34ulL3t5UNmyJWCQ1KmLEHbYzcfThfXG8amBL+fCYPneGHC4VA==", "license": "Apache-2.0" }, "node_modules/hoist-non-react-statics": { @@ -3539,9 +3599,9 @@ "license": "MIT" }, "node_modules/js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz", + "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==", "dev": true, "license": "MIT", "dependencies": { @@ -3552,22 +3612,22 @@ } }, "node_modules/jsdom": { - "version": "27.0.0", - "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-27.0.0.tgz", - "integrity": "sha512-lIHeR1qlIRrIN5VMccd8tI2Sgw6ieYXSVktcSHaNe3Z5nE/tcPQYQWOq00wxMvYOsz+73eAkNenVvmPC6bba9A==", + "version": "27.3.0", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-27.3.0.tgz", + "integrity": "sha512-GtldT42B8+jefDUC4yUKAvsaOrH7PDHmZxZXNgF2xMmymjUbRYJvpAybZAKEmXDGTM0mCsz8duOa4vTm5AY2Kg==", "dev": true, "license": "MIT", "dependencies": { - "@asamuzakjp/dom-selector": "^6.5.4", - "cssstyle": "^5.3.0", + "@acemir/cssom": "^0.9.28", + "@asamuzakjp/dom-selector": "^6.7.6", + "cssstyle": "^5.3.4", "data-urls": "^6.0.0", - "decimal.js": "^10.5.0", + "decimal.js": "^10.6.0", "html-encoding-sniffer": "^4.0.0", "http-proxy-agent": "^7.0.2", "https-proxy-agent": "^7.0.6", "is-potential-custom-element-name": "^1.0.1", - "parse5": "^7.3.0", - "rrweb-cssom": "^0.8.0", + "parse5": "^8.0.0", "saxes": "^6.0.0", "symbol-tree": "^3.2.4", "tough-cookie": "^6.0.0", @@ -3575,12 +3635,12 @@ "webidl-conversions": "^8.0.0", "whatwg-encoding": "^3.1.1", "whatwg-mimetype": "^4.0.0", - "whatwg-url": "^15.0.0", - "ws": "^8.18.2", + "whatwg-url": "^15.1.0", + "ws": "^8.18.3", "xml-name-validator": "^5.0.0" }, "engines": { - "node": ">=20" + "node": "^20.19.0 || ^22.12.0 || >=24.0.0" }, "peerDependencies": { "canvas": "^3.0.0" @@ -3692,9 +3752,9 @@ "license": "MIT" }, "node_modules/lodash-es": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", - "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==", + "version": "4.17.22", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.22.tgz", + "integrity": "sha512-XEawp1t0gxSi9x01glktRZ5HDy0HXqrM0x5pXQM98EaI0NxO6jVM7omDOxsuEo5UIASAnm2bRp1Jt/e0a2XU8Q==", "license": "MIT" }, "node_modules/lodash.clamp": { @@ -3743,11 +3803,11 @@ "license": "MIT" }, "node_modules/lru-cache": { - "version": "11.2.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz", - "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==", + "version": "11.2.4", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.4.tgz", + "integrity": "sha512-B5Y16Jr9LB9dHVkh6ZevG+vAbOsNOYCX+sXvFWFu7B3Iz5mijW3zdbMyhsh8ANd2mSWBYdJgnqi+mL7/LrOPYg==", "dev": true, - "license": "ISC", + "license": "BlueOak-1.0.0", "engines": { "node": "20 || >=22" } @@ -3783,9 +3843,9 @@ } }, "node_modules/magic-string": { - "version": "0.30.19", - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.19.tgz", - "integrity": "sha512-2N21sPY9Ws53PZvsEpVtNuSW+ScYbQdp4b9qUaL+9QkHUrGFKo56Lg9Emg5s9V/qrtNBmiR01sYhUOwu3H+VOw==", + "version": "0.30.21", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", "dev": true, "license": "MIT", "dependencies": { @@ -3806,9 +3866,10 @@ "license": "MIT" }, "node_modules/min-document": { - "version": "2.19.0", - "resolved": "https://registry.npmjs.org/min-document/-/min-document-2.19.0.tgz", - "integrity": "sha512-9Wy1B3m3f66bPPmU5hdA4DR4PB2OfDU/+GS3yAB7IQozE3tqXaVv2zOjgla7MEGSRv95+ILmOuvhLkOK6wJtCQ==", + "version": "2.19.2", + "resolved": "https://registry.npmjs.org/min-document/-/min-document-2.19.2.tgz", + "integrity": "sha512-8S5I8db/uZN8r9HSLFVWPdJCvYOejMcEC82VIzNUc6Zkklf/d1gg2psfE79/vyhWOj4+J8MtwmoOz3TmvaGu5A==", + "license": "MIT", "dependencies": { "dom-walk": "^0.1.0" } @@ -3885,9 +3946,9 @@ } }, "node_modules/nanoid": { - "version": "3.3.9", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.9.tgz", - "integrity": "sha512-SppoicMGpZvbF1l3z4x7No3OlIjP7QJvC9XR7AhZr1kL133KHnKPztkKDc+Ir4aJ/1VhTySrtKhrsycmrMQfvg==", + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", "dev": true, "funding": [ { @@ -4000,9 +4061,9 @@ } }, "node_modules/parse5": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.3.0.tgz", - "integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-8.0.0.tgz", + "integrity": "sha512-9m4m5GSgXjL4AjumKzq1Fgfp3Z8rsvjRNbnkVwfu2ImRqE5D0LnY2QfDen18FSY9C573YU5XxSapdHZTZ2WolA==", "dev": true, "license": "MIT", "dependencies": { @@ -4096,9 +4157,9 @@ } }, "node_modules/postcss": { - "version": "8.5.3", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.3.tgz", - "integrity": "sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==", + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", "dev": true, "funding": [ { @@ -4116,7 +4177,7 @@ ], "license": "MIT", "dependencies": { - "nanoid": "^3.3.8", + "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" }, @@ -4135,9 +4196,9 @@ } }, "node_modules/prettier": { - "version": "3.5.3", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.3.tgz", - "integrity": "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==", + "version": "3.7.4", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.7.4.tgz", + "integrity": "sha512-v6UNi1+3hSlVvv8fSaoUbggEM5VErKmmpGA7Pl3HF8V6uKY7rvClBOJlH6yNwQtfTueNkGVpOv/mtWL9L4bgRA==", "dev": true, "license": "MIT", "bin": { @@ -4222,33 +4283,33 @@ } }, "node_modules/react": { - "version": "19.1.1", - "resolved": "https://registry.npmjs.org/react/-/react-19.1.1.tgz", - "integrity": "sha512-w8nqGImo45dmMIfljjMwOGtbmC/mk4CMYhWIicdSflH91J9TyCyczcPFXJzrZ/ZXcgGRFeP6BU0BEJTw6tZdfQ==", + "version": "19.2.3", + "resolved": "https://registry.npmjs.org/react/-/react-19.2.3.tgz", + "integrity": "sha512-Ku/hhYbVjOQnXDZFv2+RibmLFGwFdeeKHFcOTlrt7xplBnya5OGn/hIRDsqDiSUcfORsDC7MPxwork8jBwsIWA==", "license": "MIT", "engines": { "node": ">=0.10.0" } }, "node_modules/react-dom": { - "version": "19.1.1", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.1.1.tgz", - "integrity": "sha512-Dlq/5LAZgF0Gaz6yiqZCf6VCcZs1ghAJyrsu84Q/GT0gV+mCxbfmKNoGRKBYMJ8IEdGPqu49YWXD02GCknEDkw==", + "version": "19.2.3", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.2.3.tgz", + "integrity": "sha512-yELu4WmLPw5Mr/lmeEpox5rw3RETacE++JgHqQzd2dg+YbJuat3jH4ingc+WPZhxaoFzdv9y33G+F7Nl5O0GBg==", "license": "MIT", "dependencies": { - "scheduler": "^0.26.0" + "scheduler": "^0.27.0" }, "peerDependencies": { - "react": "^19.1.1" + "react": "^19.2.3" } }, "node_modules/react-draggable": { - "version": "4.4.6", - "resolved": "https://registry.npmjs.org/react-draggable/-/react-draggable-4.4.6.tgz", - "integrity": "sha512-LtY5Xw1zTPqHkVmtM3X8MUOxNDOUhv/khTgBgrUvwaS064bwVvxT+q5El0uUFNx5IEPKXuRejr7UqLwBIg5pdw==", + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/react-draggable/-/react-draggable-4.5.0.tgz", + "integrity": "sha512-VC+HBLEZ0XJxnOxVAZsdRi8rD04Iz3SiiKOoYzamjylUcju/hP9np/aZdLHf/7WOD268WMoNJMvYfB5yAK45cw==", "license": "MIT", "dependencies": { - "clsx": "^1.1.1", + "clsx": "^2.1.1", "prop-types": "^15.8.1" }, "peerDependencies": { @@ -4256,15 +4317,6 @@ "react-dom": ">= 16.3.0" } }, - "node_modules/react-draggable/node_modules/clsx": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz", - "integrity": "sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/react-dropzone": { "version": "14.3.8", "resolved": "https://registry.npmjs.org/react-dropzone/-/react-dropzone-14.3.8.tgz", @@ -4327,9 +4379,9 @@ } }, "node_modules/react-remove-scroll": { - "version": "2.6.3", - "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.6.3.tgz", - "integrity": "sha512-pnAi91oOk8g8ABQKGF5/M9qxmmOPxaAnopyTHYfqYEwJhyFrbbBtHuSgtKEoH0jpcxx5o3hXqH1mNd9/Oi+8iQ==", + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.7.2.tgz", + "integrity": "sha512-Iqb9NjCCTt6Hf+vOdNIZGdTiH1QSqr27H/Ek9sv/a97gfueI/5h1s3yRi1nngzMUaOOToin5dI1dXKdXiF+u0Q==", "license": "MIT", "dependencies": { "react-remove-scroll-bar": "^2.3.7", @@ -4374,9 +4426,9 @@ } }, "node_modules/react-router": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.6.0.tgz", - "integrity": "sha512-GGufuHIVCJDbnIAXP3P9Sxzq3UUsddG3rrI3ut1q6m0FI6vxVBF3JoPQ38+W/blslLH4a5Yutp8drkEpXoddGQ==", + "version": "7.11.0", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.11.0.tgz", + "integrity": "sha512-uI4JkMmjbWCZc01WVP2cH7ZfSzH91JAZUDd7/nIprDgWxBV1TkkmLToFh7EbMTcMak8URFRa2YoBL/W8GWnCTQ==", "license": "MIT", "dependencies": { "cookie": "^1.0.1", @@ -4396,12 +4448,12 @@ } }, "node_modules/react-router-dom": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.6.0.tgz", - "integrity": "sha512-DYgm6RDEuKdopSyGOWZGtDfSm7Aofb8CCzgkliTjtu/eDuB0gcsv6qdFhhi8HdtmA+KHkt5MfZ5K2PdzjugYsA==", + "version": "7.11.0", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.11.0.tgz", + "integrity": "sha512-e49Ir/kMGRzFOOrYQBdoitq3ULigw4lKbAyKusnvtDu2t4dBX4AGYPrzNvorXmVuOyeakai6FUPW5MmibvVG8g==", "license": "MIT", "dependencies": { - "react-router": "7.6.0" + "react-router": "7.11.0" }, "engines": { "node": ">=20.0.0" @@ -4536,9 +4588,9 @@ } }, "node_modules/recharts": { - "version": "2.15.1", - "resolved": "https://registry.npmjs.org/recharts/-/recharts-2.15.1.tgz", - "integrity": "sha512-v8PUTUlyiDe56qUj82w/EDVuzEFXwEHp9/xOowGAZwfLjB9uAy3GllQVIYMWF6nU+qibx85WF75zD7AjqoT54Q==", + "version": "2.15.4", + "resolved": "https://registry.npmjs.org/recharts/-/recharts-2.15.4.tgz", + "integrity": "sha512-UT/q6fwS3c1dHbXv2uFgYJ9BMFHu3fwnd7AYZaEQhXuYQ4hgsxLvsUXzGdKeZrW5xopzDCvuA2N41WJ88I7zIw==", "license": "MIT", "dependencies": { "clsx": "^2.0.0", @@ -4593,12 +4645,6 @@ "node": ">=8" } }, - "node_modules/regenerator-runtime": { - "version": "0.14.1", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", - "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==", - "license": "MIT" - }, "node_modules/require-from-string": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", @@ -4610,12 +4656,12 @@ } }, "node_modules/resolve": { - "version": "1.22.10", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", - "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", + "version": "1.22.11", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", + "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", "license": "MIT", "dependencies": { - "is-core-module": "^2.16.0", + "is-core-module": "^2.16.1", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, @@ -4639,13 +4685,13 @@ } }, "node_modules/rollup": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.35.0.tgz", - "integrity": "sha512-kg6oI4g+vc41vePJyO6dHt/yl0Rz3Thv0kJeVQ3D1kS3E5XSuKbPc29G4IpT/Kv1KQwgHVcN+HtyS+HYLNSvQg==", + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.53.5.tgz", + "integrity": "sha512-iTNAbFSlRpcHeeWu73ywU/8KuU/LZmNCSxp6fjQkJBD3ivUb8tpDrXhIxEzA05HlYMEwmtaUnb3RP+YNv162OQ==", "dev": true, "license": "MIT", "dependencies": { - "@types/estree": "1.0.6" + "@types/estree": "1.0.8" }, "bin": { "rollup": "dist/bin/rollup" @@ -4655,35 +4701,31 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.35.0", - "@rollup/rollup-android-arm64": "4.35.0", - "@rollup/rollup-darwin-arm64": "4.35.0", - "@rollup/rollup-darwin-x64": "4.35.0", - "@rollup/rollup-freebsd-arm64": "4.35.0", - "@rollup/rollup-freebsd-x64": "4.35.0", - "@rollup/rollup-linux-arm-gnueabihf": "4.35.0", - "@rollup/rollup-linux-arm-musleabihf": "4.35.0", - "@rollup/rollup-linux-arm64-gnu": "4.35.0", - "@rollup/rollup-linux-arm64-musl": "4.35.0", - "@rollup/rollup-linux-loongarch64-gnu": "4.35.0", - "@rollup/rollup-linux-powerpc64le-gnu": "4.35.0", - "@rollup/rollup-linux-riscv64-gnu": "4.35.0", - "@rollup/rollup-linux-s390x-gnu": "4.35.0", - "@rollup/rollup-linux-x64-gnu": "4.35.0", - "@rollup/rollup-linux-x64-musl": "4.35.0", - "@rollup/rollup-win32-arm64-msvc": "4.35.0", - "@rollup/rollup-win32-ia32-msvc": "4.35.0", - "@rollup/rollup-win32-x64-msvc": "4.35.0", + "@rollup/rollup-android-arm-eabi": "4.53.5", + "@rollup/rollup-android-arm64": "4.53.5", + "@rollup/rollup-darwin-arm64": "4.53.5", + "@rollup/rollup-darwin-x64": "4.53.5", + "@rollup/rollup-freebsd-arm64": "4.53.5", + "@rollup/rollup-freebsd-x64": "4.53.5", + "@rollup/rollup-linux-arm-gnueabihf": "4.53.5", + "@rollup/rollup-linux-arm-musleabihf": "4.53.5", + "@rollup/rollup-linux-arm64-gnu": "4.53.5", + "@rollup/rollup-linux-arm64-musl": "4.53.5", + "@rollup/rollup-linux-loong64-gnu": "4.53.5", + "@rollup/rollup-linux-ppc64-gnu": "4.53.5", + "@rollup/rollup-linux-riscv64-gnu": "4.53.5", + "@rollup/rollup-linux-riscv64-musl": "4.53.5", + "@rollup/rollup-linux-s390x-gnu": "4.53.5", + "@rollup/rollup-linux-x64-gnu": "4.53.5", + "@rollup/rollup-linux-x64-musl": "4.53.5", + "@rollup/rollup-openharmony-arm64": "4.53.5", + "@rollup/rollup-win32-arm64-msvc": "4.53.5", + "@rollup/rollup-win32-ia32-msvc": "4.53.5", + "@rollup/rollup-win32-x64-gnu": "4.53.5", + "@rollup/rollup-win32-x64-msvc": "4.53.5", "fsevents": "~2.3.2" } }, - "node_modules/rrweb-cssom": { - "version": "0.8.0", - "resolved": "https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.8.0.tgz", - "integrity": "sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw==", - "dev": true, - "license": "MIT" - }, "node_modules/safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", @@ -4705,15 +4747,15 @@ } }, "node_modules/scheduler": { - "version": "0.26.0", - "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.26.0.tgz", - "integrity": "sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA==", + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.27.0.tgz", + "integrity": "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==", "license": "MIT" }, "node_modules/set-cookie-parser": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.1.tgz", - "integrity": "sha512-IOc8uWeOZgnb3ptbCURJWNjWUPcO3ZnTTdzsurqERrP6nPyv+paC55vJM0LpOlT2ne+Ix+9+CRG1MNLlyZ4GjQ==", + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.2.tgz", + "integrity": "sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw==", "license": "MIT" }, "node_modules/shebang-command": { @@ -4773,9 +4815,9 @@ "license": "MIT" }, "node_modules/std-env": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.9.0.tgz", - "integrity": "sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw==", + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.10.0.tgz", + "integrity": "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==", "dev": true, "license": "MIT" }, @@ -4806,9 +4848,9 @@ } }, "node_modules/strip-literal": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-3.0.0.tgz", - "integrity": "sha512-TcccoMhJOM3OebGhSBEmp3UZ2SfDMZUEBdRA/9ynfLi8yYajyWX3JiXArcJt4Umh4vISpspkQIY8ZZoCqjbviA==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-3.1.0.tgz", + "integrity": "sha512-8r3mkIM/2+PpjHoOtiAW8Rg3jJLHaV7xPwG+YRGrv6FP0wwk/toTpATxWYOW0BKdWwl82VT2tFYi5DlROa0Mxg==", "dev": true, "license": "MIT", "dependencies": { @@ -4864,9 +4906,9 @@ "license": "MIT" }, "node_modules/tabbable": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/tabbable/-/tabbable-6.2.0.tgz", - "integrity": "sha512-Cat63mxsVJlzYvN51JmVXIgNoUokrIaT2zLclCXjRd8boZ0004U4KCs/sToJ75C6sdlByWxpYnb5Boif1VSFew==", + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/tabbable/-/tabbable-6.3.0.tgz", + "integrity": "sha512-EIHvdY5bPLuWForiR/AN2Bxngzpuwn1is4asboytXtpTgsArc+WmSJKVLlhdh71u7jFcryDqB2A8lQvj78MkyQ==", "license": "MIT" }, "node_modules/tiny-case": { @@ -4949,22 +4991,22 @@ } }, "node_modules/tldts": { - "version": "7.0.15", - "resolved": "https://registry.npmjs.org/tldts/-/tldts-7.0.15.tgz", - "integrity": "sha512-heYRCiGLhtI+U/D0V8YM3QRwPfsLJiP+HX+YwiHZTnWzjIKC+ZCxQRYlzvOoTEc6KIP62B1VeAN63diGCng2hg==", + "version": "7.0.19", + "resolved": "https://registry.npmjs.org/tldts/-/tldts-7.0.19.tgz", + "integrity": "sha512-8PWx8tvC4jDB39BQw1m4x8y5MH1BcQ5xHeL2n7UVFulMPH/3Q0uiamahFJ3lXA0zO2SUyRXuVVbWSDmstlt9YA==", "dev": true, "license": "MIT", "dependencies": { - "tldts-core": "^7.0.15" + "tldts-core": "^7.0.19" }, "bin": { "tldts": "bin/cli.js" } }, "node_modules/tldts-core": { - "version": "7.0.15", - "resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.15.tgz", - "integrity": "sha512-YBkp2VfS9VTRMPNL2PA6PMESmxV1JEVoAr5iBlZnB5JG3KUrWzNCB3yNNkRa2FZkqClaBgfNYCp8PgpYmpjkZw==", + "version": "7.0.19", + "resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.19.tgz", + "integrity": "sha512-lJX2dEWx0SGH4O6p+7FPwYmJ/bu1JbcGJ8RLaG9b7liIgZ85itUVEPbMtWRVrde/0fnDPEPHW10ZsKW3kVsE9A==", "dev": true, "license": "MIT" }, @@ -5020,9 +5062,9 @@ } }, "node_modules/type-fest": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.37.0.tgz", - "integrity": "sha512-S/5/0kFftkq27FPNye0XM1e2NsnoD/3FS+pBmbjmmtLT6I+i344KoOf7pvXreaFsDamWeaJX55nczA1m5PsBDg==", + "version": "4.41.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.41.0.tgz", + "integrity": "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==", "license": "(MIT OR CC0-1.0)", "engines": { "node": ">=16" @@ -5107,19 +5149,6 @@ } } }, - "node_modules/use-resize-observer": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/use-resize-observer/-/use-resize-observer-9.1.0.tgz", - "integrity": "sha512-R25VqO9Wb3asSD4eqtcxk8sJalvIOYBqS8MNZlpDSQ4l4xMQxC/J7Id9HoTqPq8FwULIn0PVW+OAqF2dyYbjow==", - "license": "MIT", - "dependencies": { - "@juggle/resize-observer": "^3.3.1" - }, - "peerDependencies": { - "react": "16.8.0 - 18", - "react-dom": "16.8.0 - 18" - } - }, "node_modules/use-sidecar": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/use-sidecar/-/use-sidecar-1.1.3.tgz", @@ -5165,13 +5194,13 @@ } }, "node_modules/video.js": { - "version": "8.22.0", - "resolved": "https://registry.npmjs.org/video.js/-/video.js-8.22.0.tgz", - "integrity": "sha512-xge2kpjsvC0zgFJ1cqt+wTqsi21+huFswlonPFh7qiplypsb4FN/D2Rz6bWdG/S9eQaPHfWHsarmJL/7D3DHoA==", + "version": "8.23.4", + "resolved": "https://registry.npmjs.org/video.js/-/video.js-8.23.4.tgz", + "integrity": "sha512-qI0VTlYmKzEqRsz1Nppdfcaww4RSxZAq77z2oNSl3cNg2h6do5C8Ffl0KqWQ1OpD8desWXsCrde7tKJ9gGTEyQ==", "license": "Apache-2.0", "dependencies": { "@babel/runtime": "^7.12.5", - "@videojs/http-streaming": "^3.17.0", + "@videojs/http-streaming": "^3.17.2", "@videojs/vhs-utils": "^4.1.1", "@videojs/xhr": "2.7.0", "aes-decrypter": "^4.0.2", @@ -5216,24 +5245,24 @@ } }, "node_modules/vite": { - "version": "6.3.5", - "resolved": "https://registry.npmjs.org/vite/-/vite-6.3.5.tgz", - "integrity": "sha512-cZn6NDFE7wdTpINgs++ZJ4N49W2vRp8LCKrn3Ob1kYNtOo21vfDoaV5GzBfLU4MovSAB8uNRm4jgzVQZ+mBzPQ==", + "version": "7.3.0", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.0.tgz", + "integrity": "sha512-dZwN5L1VlUBewiP6H9s2+B3e3Jg96D0vzN+Ry73sOefebhYr9f94wwkMNN/9ouoU8pV1BqA1d1zGk8928cx0rg==", "dev": true, "license": "MIT", "dependencies": { - "esbuild": "^0.25.0", - "fdir": "^6.4.4", - "picomatch": "^4.0.2", - "postcss": "^8.5.3", - "rollup": "^4.34.9", - "tinyglobby": "^0.2.13" + "esbuild": "^0.27.0", + "fdir": "^6.5.0", + "picomatch": "^4.0.3", + "postcss": "^8.5.6", + "rollup": "^4.43.0", + "tinyglobby": "^0.2.15" }, "bin": { "vite": "bin/vite.js" }, "engines": { - "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + "node": "^20.19.0 || >=22.12.0" }, "funding": { "url": "https://github.com/vitejs/vite?sponsor=1" @@ -5242,14 +5271,14 @@ "fsevents": "~2.3.3" }, "peerDependencies": { - "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "@types/node": "^20.19.0 || >=22.12.0", "jiti": ">=1.21.0", - "less": "*", + "less": "^4.0.0", "lightningcss": "^1.21.0", - "sass": "*", - "sass-embedded": "*", - "stylus": "*", - "sugarss": "*", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" @@ -5411,8 +5440,7 @@ }, "node_modules/webworkify-webpack": { "version": "2.1.5", - "resolved": "git+ssh://git@github.com/xqq/webworkify-webpack.git", - "integrity": "sha512-W8Bg+iLq52d2GFvwabPNCIDCgMHcW3g68Tr8zwpJliEz2cKBIKYL3T0VdYeZWhz5rOxWRBBEdF931fquSO6iCQ==", + "resolved": "git+ssh://git@github.com/xqq/webworkify-webpack.git#24d1e719b4a6cac37a518b2bb10fe124527ef4ef", "license": "MIT" }, "node_modules/whatwg-encoding": { @@ -5535,9 +5563,9 @@ "license": "MIT" }, "node_modules/yaml": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.1.tgz", - "integrity": "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==", + "version": "2.8.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.2.tgz", + "integrity": "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==", "dev": true, "license": "ISC", "optional": true, @@ -5547,6 +5575,9 @@ }, "engines": { "node": ">= 14.6" + }, + "funding": { + "url": "https://github.com/sponsors/eemeli" } }, "node_modules/yocto-queue": { @@ -5563,9 +5594,9 @@ } }, "node_modules/yup": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/yup/-/yup-1.6.1.tgz", - "integrity": "sha512-JED8pB50qbA4FOkDol0bYF/p60qSEDQqBD0/qeIrUCG1KbPBIQ776fCUNb9ldbPcSTxA69g/47XTo4TqWiuXOA==", + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/yup/-/yup-1.7.1.tgz", + "integrity": "sha512-GKHFX2nXul2/4Dtfxhozv701jLQHdf6J34YDh2cEkpqoo8le5Mg6/LrdseVLrFarmFygZTlfIhHx/QKfb/QWXw==", "license": "MIT", "dependencies": { "property-expr": "^2.0.5", @@ -5587,9 +5618,9 @@ } }, "node_modules/zustand": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/zustand/-/zustand-5.0.3.tgz", - "integrity": "sha512-14fwWQtU3pH4dE0dOpdMiWjddcH+QzKIgk1cl8epwSE7yag43k/AD/m4L6+K7DytAOr9gGBe3/EXj9g7cdostg==", + "version": "5.0.9", + "resolved": "https://registry.npmjs.org/zustand/-/zustand-5.0.9.tgz", + "integrity": "sha512-ALBtUj0AfjJt3uNRQoL1tL2tMvj6Gp/6e39dnfT6uzpelGru8v1tPOGBzayOWbPJvujM8JojDk3E1LxeFisBNg==", "license": "MIT", "engines": { "node": ">=12.20.0" diff --git a/frontend/package.json b/frontend/package.json index fea6b73e..ff5be72d 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -54,18 +54,21 @@ "@types/react": "^19.1.0", "@types/react-dom": "^19.1.0", "@vitejs/plugin-react-swc": "^4.1.0", - "eslint": "^9.21.0", + "eslint": "^9.27.0", "eslint-plugin-react-hooks": "^5.1.0", "eslint-plugin-react-refresh": "^0.4.19", "globals": "^15.15.0", "jsdom": "^27.0.0", "prettier": "^3.5.3", - "vite": "^6.2.0", + "vite": "^7.1.7", "vitest": "^3.2.4" }, "resolutions": { "vite": "7.1.7", "react": "19.1.0", "react-dom": "19.1.0" + }, + "overrides": { + "js-yaml": "^4.1.1" } } From 73956924f5f17740c0c4a059f43c7cfa13478240 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 18 Dec 2025 15:26:08 -0600 Subject: [PATCH 103/220] =?UTF-8?q?Enhancement:=20Stream=20group=20as=20av?= =?UTF-8?q?ailable=20hash=20option:=20Users=20can=20now=20select=20'Group'?= =?UTF-8?q?=20as=20a=20hash=20key=20option=20in=20Settings=20=E2=86=92=20S?= =?UTF-8?q?tream=20Settings=20=E2=86=92=20M3U=20Hash=20Key,=20allowing=20s?= =?UTF-8?q?treams=20to=20be=20differentiated=20by=20their=20group=20member?= =?UTF-8?q?ship=20in=20addition=20to=20name,=20URL,=20TVG-ID,=20and=20M3U?= =?UTF-8?q?=20ID?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CHANGELOG.md | 1 + apps/channels/models.py | 4 ++-- apps/m3u/tasks.py | 4 ++-- core/tasks.py | 3 ++- frontend/src/pages/Settings.jsx | 4 ++++ 5 files changed, 11 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d5532173..b6ff642d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - VOD client stop button in Stats page: Users can now disconnect individual VOD clients from the Stats view, similar to the existing channel client disconnect functionality. - Automated configuration backup/restore system with scheduled backups, retention policies, and async task processing - Thanks [@stlalpha](https://github.com/stlalpha) (Closes #153) +- Stream group as available hash option: Users can now select 'Group' as a hash key option in Settings → Stream Settings → M3U Hash Key, allowing streams to be differentiated by their group membership in addition to name, URL, TVG-ID, and M3U ID ### Changed diff --git a/apps/channels/models.py b/apps/channels/models.py index 3dfb392b..88df3661 100644 --- a/apps/channels/models.py +++ b/apps/channels/models.py @@ -119,11 +119,11 @@ class Stream(models.Model): return self.name or self.url or f"Stream ID {self.id}" @classmethod - def generate_hash_key(cls, name, url, tvg_id, keys=None, m3u_id=None): + def generate_hash_key(cls, name, url, tvg_id, keys=None, m3u_id=None, group=None): if keys is None: keys = CoreSettings.get_m3u_hash_key().split(",") - stream_parts = {"name": name, "url": url, "tvg_id": tvg_id, "m3u_id": m3u_id} + stream_parts = {"name": name, "url": url, "tvg_id": tvg_id, "m3u_id": m3u_id, "group": group} hash_parts = {key: stream_parts[key] for key in keys if key in stream_parts} diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index cb82402e..87759ab9 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -792,7 +792,7 @@ def process_xc_category_direct(account_id, batch, groups, hash_keys): group_title = group_name stream_hash = Stream.generate_hash_key( - name, url, tvg_id, hash_keys, m3u_id=account_id + name, url, tvg_id, hash_keys, m3u_id=account_id, group=group_title ) stream_props = { "name": name, @@ -966,7 +966,7 @@ def process_m3u_batch_direct(account_id, batch, groups, hash_keys): ) continue - stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys, m3u_id=account_id) + stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys, m3u_id=account_id, group=group_title) stream_props = { "name": name, "url": url, diff --git a/core/tasks.py b/core/tasks.py index f757613b..207e7c5e 100644 --- a/core/tasks.py +++ b/core/tasks.py @@ -513,7 +513,8 @@ def rehash_streams(keys): for obj in batch: # Generate new hash - new_hash = Stream.generate_hash_key(obj.name, obj.url, obj.tvg_id, keys, m3u_id=obj.m3u_account_id) + group_name = obj.channel_group.name if obj.channel_group else None + new_hash = Stream.generate_hash_key(obj.name, obj.url, obj.tvg_id, keys, m3u_id=obj.m3u_account_id, group=group_name) # Check if this hash already exists in our tracking dict or in database if new_hash in hash_keys: diff --git a/frontend/src/pages/Settings.jsx b/frontend/src/pages/Settings.jsx index 74de842e..46ad4710 100644 --- a/frontend/src/pages/Settings.jsx +++ b/frontend/src/pages/Settings.jsx @@ -1056,6 +1056,10 @@ const SettingsPage = () => { value: 'm3u_id', label: 'M3U ID', }, + { + value: 'group', + label: 'Group', + }, ]} {...form.getInputProps('m3u-hash-key')} key={form.key('m3u-hash-key')} From e78c18c47333ea8ff813210e0a4e48a8e214e78b Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 18 Dec 2025 16:11:26 -0600 Subject: [PATCH 104/220] Bug Fix: XC get_simple_data_table now returns the id of the program in the database and epg_id the epg id from the matched epg. --- CHANGELOG.md | 2 ++ apps/output/views.py | 18 ++++++++++++++---- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b6ff642d..15a176a8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,6 +30,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - nginx now gracefully handles hosts without IPv6 support by automatically disabling IPv6 binding at startup (Fixes #744) - XtreamCodes EPG API now returns correct date/time format for start/end fields and proper string types for timestamps and channel_id - XtreamCodes EPG API now handles None values for title and description fields to prevent AttributeError +- XtreamCodes EPG `id` field now provides unique identifiers per program listing instead of always returning "0" for better client EPG handling +- XtreamCodes EPG `epg_id` field now correctly returns the EPGData record ID (representing the EPG source/channel mapping) instead of a dummy value ## [0.14.0] - 2025-12-09 diff --git a/apps/output/views.py b/apps/output/views.py index 1710fa4d..635bb9d9 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -2292,17 +2292,27 @@ def xc_get_epg(request, user, short=False): output = {"epg_listings": []} for program in programs: - id = "0" - epg_id = "0" title = program['title'] if isinstance(program, dict) else program.title description = program['description'] if isinstance(program, dict) else program.description start = program["start_time"] if isinstance(program, dict) else program.start_time end = program["end_time"] if isinstance(program, dict) else program.end_time + # For database programs, use actual ID; for generated dummy programs, create synthetic ID + if isinstance(program, dict): + # Generated dummy program - create unique ID from channel + timestamp + program_id = str(abs(hash(f"{channel_id}_{int(start.timestamp())}"))) + else: + # Database program - use actual ID + program_id = str(program.id) + + # epg_id refers to the EPG source/channel mapping in XC panels + # Use the actual EPGData ID when available, otherwise fall back to 0 + epg_id = str(channel.epg_data.id) if channel.epg_data else "0" + program_output = { - "id": f"{id}", - "epg_id": f"{epg_id}", + "id": program_id, + "epg_id": epg_id, "title": base64.b64encode((title or "").encode()).decode(), "lang": "", "start": start.strftime("%Y-%m-%d %H:%M:%S"), From de31826137bb6545a5e1730958aba4600ce98a9c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 18 Dec 2025 16:54:59 -0600 Subject: [PATCH 105/220] refactor: externalize Redis and Celery configuration via environment variables Replace hardcoded localhost:6379 values throughout codebase with environment-based configuration. Add REDIS_PORT support and allow REDIS_URL override for external Redis services. Configure Celery broker/result backend to use Redis settings with environment variable overrides. Closes #762 --- CHANGELOG.md | 2 ++ apps/proxy/ts_proxy/client_manager.py | 6 ++++-- apps/proxy/vod_proxy/connection_manager.py | 6 +++++- apps/proxy/vod_proxy/views.py | 6 +++++- core/views.py | 4 +++- dispatcharr/persistent_lock.py | 8 ++++++-- dispatcharr/settings.py | 11 +++++++---- 7 files changed, 32 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 15a176a8..370efa71 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Updated dependencies: Django (5.2.4 → 5.2.9) includes CVE security patch, psycopg2-binary (2.9.10 → 2.9.11), celery (5.5.3 → 5.6.0), djangorestframework (3.16.0 → 3.16.1), requests (2.32.4 → 2.32.5), psutil (7.0.0 → 7.1.3), gevent (25.5.1 → 25.9.1), rapidfuzz (3.13.0 → 3.14.3), torch (2.7.1 → 2.9.1), sentence-transformers (5.1.0 → 5.2.0), lxml (6.0.0 → 6.0.2) (Closes #662) - Frontend dependencies updated: Vite (6.2.0 → 7.1.7), ESLint (9.21.0 → 9.27.0), and related packages; added npm `overrides` to enforce js-yaml@^4.1.1 for transitive security fix. All 6 reported vulnerabilities resolved with `npm audit fix`. - Floating video player now supports resizing via a drag handles, with minimum size enforcement and viewport/page boundary constraints to keep it visible. +- Redis connection settings now fully configurable via environment variables (`REDIS_HOST`, `REDIS_PORT`, `REDIS_DB`, `REDIS_URL`), replacing hardcoded `localhost:6379` values throughout the codebase. This enables use of external Redis services in production deployments. (Closes #762) +- Celery broker and result backend URLs now respect `REDIS_HOST`/`REDIS_PORT`/`REDIS_DB` settings as defaults, with `CELERY_BROKER_URL` and `CELERY_RESULT_BACKEND` environment variables available for override. ### Fixed diff --git a/apps/proxy/ts_proxy/client_manager.py b/apps/proxy/ts_proxy/client_manager.py index bffecdde..a361bfa1 100644 --- a/apps/proxy/ts_proxy/client_manager.py +++ b/apps/proxy/ts_proxy/client_manager.py @@ -48,9 +48,11 @@ class ClientManager: # Import here to avoid potential import issues from apps.proxy.ts_proxy.channel_status import ChannelStatus import redis + from django.conf import settings - # Get all channels from Redis - redis_client = redis.Redis.from_url('redis://localhost:6379', decode_responses=True) + # Get all channels from Redis using settings + redis_url = getattr(settings, 'REDIS_URL', 'redis://localhost:6379/0') + redis_client = redis.Redis.from_url(redis_url, decode_responses=True) all_channels = [] cursor = 0 diff --git a/apps/proxy/vod_proxy/connection_manager.py b/apps/proxy/vod_proxy/connection_manager.py index dea5759b..ec0bffa5 100644 --- a/apps/proxy/vod_proxy/connection_manager.py +++ b/apps/proxy/vod_proxy/connection_manager.py @@ -97,7 +97,11 @@ class PersistentVODConnection: # First check if we have a pre-stored content length from HEAD request try: import redis - r = redis.StrictRedis(host='localhost', port=6379, db=0, decode_responses=True) + from django.conf import settings + redis_host = getattr(settings, 'REDIS_HOST', 'localhost') + redis_port = int(getattr(settings, 'REDIS_PORT', 6379)) + redis_db = int(getattr(settings, 'REDIS_DB', 0)) + r = redis.StrictRedis(host=redis_host, port=redis_port, db=redis_db, decode_responses=True) content_length_key = f"vod_content_length:{self.session_id}" stored_length = r.get(content_length_key) if stored_length: diff --git a/apps/proxy/vod_proxy/views.py b/apps/proxy/vod_proxy/views.py index f3aca3fc..2ec95cc3 100644 --- a/apps/proxy/vod_proxy/views.py +++ b/apps/proxy/vod_proxy/views.py @@ -329,7 +329,11 @@ class VODStreamView(View): # Store the total content length in Redis for the persistent connection to use try: import redis - r = redis.StrictRedis(host='localhost', port=6379, db=0, decode_responses=True) + from django.conf import settings + redis_host = getattr(settings, 'REDIS_HOST', 'localhost') + redis_port = int(getattr(settings, 'REDIS_PORT', 6379)) + redis_db = int(getattr(settings, 'REDIS_DB', 0)) + r = redis.StrictRedis(host=redis_host, port=redis_port, db=redis_db, decode_responses=True) content_length_key = f"vod_content_length:{session_id}" r.set(content_length_key, total_size, ex=1800) # Store for 30 minutes logger.info(f"[VOD-HEAD] Stored total content length {total_size} for session {session_id}") diff --git a/core/views.py b/core/views.py index d10df027..312d8836 100644 --- a/core/views.py +++ b/core/views.py @@ -37,7 +37,9 @@ def stream_view(request, channel_uuid): """ try: redis_host = getattr(settings, "REDIS_HOST", "localhost") - redis_client = redis.Redis(host=settings.REDIS_HOST, port=6379, db=int(getattr(settings, "REDIS_DB", "0"))) + redis_port = int(getattr(settings, "REDIS_PORT", 6379)) + redis_db = int(getattr(settings, "REDIS_DB", "0")) + redis_client = redis.Redis(host=redis_host, port=redis_port, db=redis_db) # Retrieve the channel by the provided stream_id. channel = Channel.objects.get(uuid=channel_uuid) diff --git a/dispatcharr/persistent_lock.py b/dispatcharr/persistent_lock.py index 360c9b5d..27d480be 100644 --- a/dispatcharr/persistent_lock.py +++ b/dispatcharr/persistent_lock.py @@ -73,8 +73,12 @@ class PersistentLock: # Example usage (for testing purposes only): if __name__ == "__main__": - # Connect to Redis on localhost; adjust connection parameters as needed. - client = redis.Redis(host="localhost", port=6379, db=0) + import os + # Connect to Redis using environment variables; adjust connection parameters as needed. + redis_host = os.environ.get("REDIS_HOST", "localhost") + redis_port = int(os.environ.get("REDIS_PORT", 6379)) + redis_db = int(os.environ.get("REDIS_DB", 0)) + client = redis.Redis(host=redis_host, port=redis_port, db=redis_db) lock = PersistentLock(client, "lock:example_account", lock_timeout=120) if lock.acquire(): diff --git a/dispatcharr/settings.py b/dispatcharr/settings.py index 556fb39d..1a9a1a44 100644 --- a/dispatcharr/settings.py +++ b/dispatcharr/settings.py @@ -6,6 +6,7 @@ BASE_DIR = Path(__file__).resolve().parent.parent SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY") REDIS_HOST = os.environ.get("REDIS_HOST", "localhost") +REDIS_PORT = int(os.environ.get("REDIS_PORT", 6379)) REDIS_DB = os.environ.get("REDIS_DB", "0") # Set DEBUG to True for development, False for production @@ -118,7 +119,7 @@ CHANNEL_LAYERS = { "default": { "BACKEND": "channels_redis.core.RedisChannelLayer", "CONFIG": { - "hosts": [(REDIS_HOST, 6379, REDIS_DB)], # Ensure Redis is running + "hosts": [(REDIS_HOST, REDIS_PORT, REDIS_DB)], # Ensure Redis is running }, }, } @@ -184,8 +185,10 @@ STATICFILES_DIRS = [ DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" AUTH_USER_MODEL = "accounts.User" -CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL", "redis://localhost:6379/0") -CELERY_RESULT_BACKEND = CELERY_BROKER_URL +# Build default Redis URL from components for Celery +_default_redis_url = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}" +CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL", _default_redis_url) +CELERY_RESULT_BACKEND = os.environ.get("CELERY_RESULT_BACKEND", CELERY_BROKER_URL) # Configure Redis key prefix CELERY_RESULT_BACKEND_TRANSPORT_OPTIONS = { @@ -249,7 +252,7 @@ SIMPLE_JWT = { } # Redis connection settings -REDIS_URL = "redis://localhost:6379/0" +REDIS_URL = os.environ.get("REDIS_URL", f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}") REDIS_SOCKET_TIMEOUT = 60 # Socket timeout in seconds REDIS_SOCKET_CONNECT_TIMEOUT = 5 # Connection timeout in seconds REDIS_HEALTH_CHECK_INTERVAL = 15 # Health check every 15 seconds From 601f7d02976fa3543be3b4b50c05f4dfe8ddfefb Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 18 Dec 2025 16:57:43 -0600 Subject: [PATCH 106/220] changelog: Update changelog for DVR bug fix. --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 370efa71..0109277c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -26,6 +26,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed +- DVR series rule deletion now properly handles TVG IDs that contain slashes by encoding them in the URL path (Fixes #697) - VOD episode processing now correctly handles duplicate episodes (same episode in multiple languages/qualities) by reusing Episode records across multiple M3UEpisodeRelation entries instead of attempting to create duplicates (Fixes #556) - XtreamCodes series streaming endpoint now correctly handles episodes with multiple streams (different languages/qualities) by selecting the best available stream based on account priority (Fixes #569) - XtreamCodes series info API now returns unique episodes instead of duplicate entries when multiple streams exist for the same episode (different languages/qualities) From b83f12809fe7c7acaafc5eb13dc8af7e23438bb8 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 18 Dec 2025 17:18:44 -0600 Subject: [PATCH 107/220] Enhancement: Add HEADER_HEIGHT and ERROR_HEIGHT constants for improved layout calculations in FloatingVideo component --- frontend/src/components/FloatingVideo.jsx | 64 ++++++++++++++++------- 1 file changed, 44 insertions(+), 20 deletions(-) diff --git a/frontend/src/components/FloatingVideo.jsx b/frontend/src/components/FloatingVideo.jsx index 857f67aa..557767ed 100644 --- a/frontend/src/components/FloatingVideo.jsx +++ b/frontend/src/components/FloatingVideo.jsx @@ -30,6 +30,8 @@ export default function FloatingVideo() { const MIN_WIDTH = 220; const MIN_HEIGHT = 124; const VISIBLE_MARGIN = 48; // keep part of the window visible when dragging + const HEADER_HEIGHT = 38; // height of the close button header area + const ERROR_HEIGHT = 45; // approximate height of error message area when displayed const HANDLE_SIZE = 18; const HANDLE_OFFSET = 0; const resizeHandleBaseStyle = { @@ -404,34 +406,42 @@ export default function FloatingVideo() { (x, y) => { if (typeof window === 'undefined') return { x, y }; + const totalHeight = videoSize.height + HEADER_HEIGHT + ERROR_HEIGHT; const minX = -(videoSize.width - VISIBLE_MARGIN); - const minY = -(videoSize.height - VISIBLE_MARGIN); - const maxX = window.innerWidth - VISIBLE_MARGIN; - const maxY = window.innerHeight - VISIBLE_MARGIN; + const minY = -(totalHeight - VISIBLE_MARGIN); + const maxX = window.innerWidth - videoSize.width; + const maxY = window.innerHeight - totalHeight; return { x: Math.min(Math.max(x, minX), maxX), y: Math.min(Math.max(y, minY), maxY), }; }, - [VISIBLE_MARGIN, videoSize.height, videoSize.width] + [ + VISIBLE_MARGIN, + HEADER_HEIGHT, + ERROR_HEIGHT, + videoSize.height, + videoSize.width, + ] ); const clampToVisibleWithSize = useCallback( (x, y, width, height) => { if (typeof window === 'undefined') return { x, y }; + const totalHeight = height + HEADER_HEIGHT + ERROR_HEIGHT; const minX = -(width - VISIBLE_MARGIN); - const minY = -(height - VISIBLE_MARGIN); - const maxX = window.innerWidth - VISIBLE_MARGIN; - const maxY = window.innerHeight - VISIBLE_MARGIN; + const minY = -(totalHeight - VISIBLE_MARGIN); + const maxX = window.innerWidth - width; + const maxY = window.innerHeight - totalHeight; return { x: Math.min(Math.max(x, minX), maxX), y: Math.min(Math.max(y, minY), maxY), }; }, - [VISIBLE_MARGIN] + [VISIBLE_MARGIN, HEADER_HEIGHT, ERROR_HEIGHT] ); const handleResizeMove = useCallback( @@ -439,9 +449,13 @@ export default function FloatingVideo() { if (!resizeStateRef.current) return; const clientX = - event.touches && event.touches.length ? event.touches[0].clientX : event.clientX; + event.touches && event.touches.length + ? event.touches[0].clientX + : event.clientX; const clientY = - event.touches && event.touches.length ? event.touches[0].clientY : event.clientY; + event.touches && event.touches.length + ? event.touches[0].clientY + : event.clientY; const { startX, @@ -564,17 +578,21 @@ export default function FloatingVideo() { event.preventDefault(); const clientX = - event.touches && event.touches.length ? event.touches[0].clientX : event.clientX; + event.touches && event.touches.length + ? event.touches[0].clientX + : event.clientX; const clientY = - event.touches && event.touches.length ? event.touches[0].clientY : event.clientY; + event.touches && event.touches.length + ? event.touches[0].clientY + : event.clientY; const aspectRatio = - videoSize.height > 0 ? videoSize.width / videoSize.height : aspectRatioRef.current; + videoSize.height > 0 + ? videoSize.width / videoSize.height + : aspectRatioRef.current; aspectRatioRef.current = aspectRatio; - const startPos = - dragPositionRef.current || - initialPositionRef.current || - { x: 0, y: 0 }; + const startPos = dragPositionRef.current || + initialPositionRef.current || { x: 0, y: 0 }; resizeStateRef.current = { startX: clientX, @@ -608,14 +626,21 @@ export default function FloatingVideo() { useEffect(() => { if (initialPositionRef.current || typeof window === 'undefined') return; + const totalHeight = videoSize.height + HEADER_HEIGHT + ERROR_HEIGHT; const initialX = Math.max(10, window.innerWidth - videoSize.width - 20); - const initialY = Math.max(10, window.innerHeight - videoSize.height - 20); + const initialY = Math.max(10, window.innerHeight - totalHeight - 20); const pos = clampToVisible(initialX, initialY); initialPositionRef.current = pos; setDragPosition(pos); dragPositionRef.current = pos; - }, [clampToVisible, videoSize.height, videoSize.width]); + }, [ + clampToVisible, + videoSize.height, + videoSize.width, + HEADER_HEIGHT, + ERROR_HEIGHT, + ]); const handleDragStart = useCallback( (event, data) => { @@ -811,7 +836,6 @@ export default function FloatingVideo() { )} - {/* Error message below video - doesn't block controls */} From 5371519d8a2abca3be8c26918f9e49df7edb25c9 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 19 Dec 2025 10:40:56 -0600 Subject: [PATCH 108/220] Enhancement: Update default backup settings to enable backups and set retention count to 3 --- apps/backups/scheduler.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/backups/scheduler.py b/apps/backups/scheduler.py index 2dd9e828..b5f99fe5 100644 --- a/apps/backups/scheduler.py +++ b/apps/backups/scheduler.py @@ -19,11 +19,11 @@ SETTING_KEYS = { } DEFAULTS = { - "enabled": False, + "enabled": True, "frequency": "daily", "time": "03:00", "day_of_week": 0, # Sunday - "retention_count": 0, + "retention_count": 3, "cron_expression": "", } From abc6ae94e5e806278b9d8836162e7f45c4e3d766 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 19 Dec 2025 10:44:39 -0600 Subject: [PATCH 109/220] Enhancement: Update SuperuserForm to include logo, version info, and improved layout --- CHANGELOG.md | 1 + .../src/components/forms/SuperuserForm.jsx | 67 ++++++++++++++++--- 2 files changed, 60 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0109277c..a42843db 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed +- Initial super user creation page now matches the login page design with logo, welcome message, divider, and version display for a more consistent and polished first-time setup experience - Removed unreachable code path in m3u output - Thanks [@DawtCom](https://github.com/DawtCom) - GitHub Actions workflows now use `docker/metadata-action` for cleaner and more maintainable OCI-compliant image label generation across all build pipelines (ci.yml, base-image.yml, release.yml). Labels are applied to both platform-specific images and multi-arch manifests with proper annotation formatting. - Thanks [@mrdynamo]https://github.com/mrdynamo) (Closes #724) - Update docker/dev-build.sh to support private registries, multiple architectures and pushing. Now you can do things like `dev-build.sh -p -r my.private.registry -a linux/arm64,linux/amd64` - Thanks [@jdblack](https://github.com/jblack) diff --git a/frontend/src/components/forms/SuperuserForm.jsx b/frontend/src/components/forms/SuperuserForm.jsx index fbcf0eaa..ca8c81fc 100644 --- a/frontend/src/components/forms/SuperuserForm.jsx +++ b/frontend/src/components/forms/SuperuserForm.jsx @@ -1,8 +1,19 @@ // frontend/src/components/forms/SuperuserForm.js -import React, { useState } from 'react'; -import { TextInput, Center, Button, Paper, Title, Stack } from '@mantine/core'; +import React, { useState, useEffect } from 'react'; +import { + TextInput, + Center, + Button, + Paper, + Title, + Stack, + Text, + Image, + Divider, +} from '@mantine/core'; import API from '../../api'; import useAuthStore from '../../store/auth'; +import logo from '../../assets/logo.png'; function SuperuserForm() { const [formData, setFormData] = useState({ @@ -11,8 +22,16 @@ function SuperuserForm() { email: '', }); const [error, setError] = useState(''); + const [version, setVersion] = useState(null); const setSuperuserExists = useAuthStore((s) => s.setSuperuserExists); + useEffect(() => { + // Fetch version info + API.getVersion().then((data) => { + setVersion(data?.version); + }); + }, []); + const handleChange = (e) => { setFormData((prev) => ({ ...prev, @@ -46,11 +65,29 @@ function SuperuserForm() { > - - Create your Super User Account - + + Dispatcharr Logo + + Dispatcharr + + + Welcome! Create your Super User Account to get started. + + +
-
+ + {version && ( + + v{version} + + )}
); From 944736612bfa0940b16cb98e5f7e49724af3341a Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 19 Dec 2025 15:49:18 -0600 Subject: [PATCH 110/220] Bug Fix: M3U profile form resets local state for search and replace patterns after saving, preventing validation errors when adding multiple profiles in a row --- CHANGELOG.md | 1 + frontend/src/components/forms/M3UProfile.jsx | 3 +++ 2 files changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a42843db..2c365565 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -27,6 +27,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed +- M3U Profile form now properly resets local state for search and replace patterns after saving, preventing validation errors when adding multiple profiles in a row - DVR series rule deletion now properly handles TVG IDs that contain slashes by encoding them in the URL path (Fixes #697) - VOD episode processing now correctly handles duplicate episodes (same episode in multiple languages/qualities) by reusing Episode records across multiple M3UEpisodeRelation entries instead of attempting to create duplicates (Fixes #556) - XtreamCodes series streaming endpoint now correctly handles episodes with multiple streams (different languages/qualities) by selecting the best available stream based on account priority (Fixes #569) diff --git a/frontend/src/components/forms/M3UProfile.jsx b/frontend/src/components/forms/M3UProfile.jsx index 353e48d1..b225ec38 100644 --- a/frontend/src/components/forms/M3UProfile.jsx +++ b/frontend/src/components/forms/M3UProfile.jsx @@ -149,6 +149,9 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => { } resetForm(); + // Reset local state to sync with formik reset + setSearchPattern(''); + setReplacePattern(''); setSubmitting(false); onClose(); }, From 22527b085d94816b95c7a1087ff784ee0e540423 Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Fri, 19 Dec 2025 14:09:17 -0800 Subject: [PATCH 111/220] Checking if data has been fetched before displaying empty channels --- frontend/src/components/tables/ChannelsTable.jsx | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index 9b9958f7..eb16146d 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -47,6 +47,7 @@ import { Select, NumberInput, Tooltip, + LoadingOverlay, } from '@mantine/core'; import { getCoreRowModel, flexRender } from '@tanstack/react-table'; import './table.css'; @@ -289,6 +290,7 @@ const ChannelsTable = ({}) => { const [selectedProfile, setSelectedProfile] = useState( profiles[selectedProfileId] ); + const [hasFetchedData, setHasFetchedData] = useState(false); const [paginationString, setPaginationString] = useState(''); const [filters, setFilters] = useState({ @@ -361,10 +363,14 @@ const ChannelsTable = ({}) => { }); }); + const channelsTableLength = hasFetchedData ? Object.keys(data).length : undefined; + /** * Functions */ const fetchData = useCallback(async () => { + setIsLoading(true); + const params = new URLSearchParams(); params.append('page', pagination.pageIndex + 1); params.append('page_size', pagination.pageSize); @@ -397,6 +403,9 @@ const ChannelsTable = ({}) => { await API.getAllChannelIds(params), ]); + setIsLoading(false); + setHasFetchedData(true); + setTablePrefs({ pageSize: pagination.pageSize, }); @@ -1330,12 +1339,12 @@ const ChannelsTable = ({}) => { {/* Table or ghost empty state inside Paper */} - {Object.keys(channels).length === 0 && ( + {channelsTableLength === 0 && ( )} - {Object.keys(channels).length > 0 && ( + {channelsTableLength > 0 && ( { borderRadius: 'var(--mantine-radius-default)', }} > + From f0a9a3fc15889fe304c3682fcce9955282223c92 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 19 Dec 2025 17:00:30 -0600 Subject: [PATCH 112/220] Bug Fix: Docker init script now validates DISPATCHARR_PORT is an integer before using it, preventing sed errors when Kubernetes sets it to a service URL like `tcp://10.98.37.10:80`. Falls back to default port 9191 when invalid (Fixes #737) --- CHANGELOG.md | 1 + docker/init/03-init-dispatcharr.sh | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2c365565..3d09a733 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -27,6 +27,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed +- Docker init script now validates DISPATCHARR_PORT is an integer before using it, preventing sed errors when Kubernetes sets it to a service URL like `tcp://10.98.37.10:80`. Falls back to default port 9191 when invalid (Fixes #737) - M3U Profile form now properly resets local state for search and replace patterns after saving, preventing validation errors when adding multiple profiles in a row - DVR series rule deletion now properly handles TVG IDs that contain slashes by encoding them in the URL path (Fixes #697) - VOD episode processing now correctly handles duplicate episodes (same episode in multiple languages/qualities) by reusing Episode records across multiple M3UEpisodeRelation entries instead of attempting to create duplicates (Fixes #556) diff --git a/docker/init/03-init-dispatcharr.sh b/docker/init/03-init-dispatcharr.sh index da7d4484..03fe6816 100644 --- a/docker/init/03-init-dispatcharr.sh +++ b/docker/init/03-init-dispatcharr.sh @@ -30,6 +30,10 @@ if [ "$(id -u)" = "0" ] && [ -d "/app" ]; then fi fi # Configure nginx port +if ! [[ "$DISPATCHARR_PORT" =~ ^[0-9]+$ ]]; then + echo "⚠️ Warning: DISPATCHARR_PORT is not a valid integer, using default port 9191" + DISPATCHARR_PORT=9191 +fi sed -i "s/NGINX_PORT/${DISPATCHARR_PORT}/g" /etc/nginx/sites-enabled/default # Configure nginx based on IPv6 availability From 05b62c22ad7a7ef04f3f46c7af6a04b27bdcb4ed Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Sat, 20 Dec 2025 00:08:41 +0000 Subject: [PATCH 113/220] Release v0.15.0 --- CHANGELOG.md | 2 ++ version.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3d09a733..4155bb68 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [0.15.0] - 2025-12-20 + ### Added - VOD client stop button in Stats page: Users can now disconnect individual VOD clients from the Stats view, similar to the existing channel client disconnect functionality. diff --git a/version.py b/version.py index 807fc629..07d3d4c7 100644 --- a/version.py +++ b/version.py @@ -1,5 +1,5 @@ """ Dispatcharr version information. """ -__version__ = '0.14.0' # Follow semantic versioning (MAJOR.MINOR.PATCH) +__version__ = '0.15.0' # Follow semantic versioning (MAJOR.MINOR.PATCH) __timestamp__ = None # Set during CI/CD build process From 4cd63bc8984551021c7d5cd3428449b9f6c27531 Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Fri, 19 Dec 2025 16:33:21 -0800 Subject: [PATCH 114/220] Reverted LoadingOverlay --- frontend/src/components/tables/ChannelsTable.jsx | 2 -- 1 file changed, 2 deletions(-) diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index 57f524f7..b025d2d5 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -47,7 +47,6 @@ import { Select, NumberInput, Tooltip, - LoadingOverlay, } from '@mantine/core'; import { getCoreRowModel, flexRender } from '@tanstack/react-table'; import './table.css'; @@ -1389,7 +1388,6 @@ const ChannelsTable = ({}) => { borderRadius: 'var(--mantine-radius-default)', }} > - From ee183a9f753ac1755933de0acf95df98d299c32d Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 19 Dec 2025 18:39:43 -0600 Subject: [PATCH 115/220] Bug Fix: XtreamCodes EPG `has_archive` field now returns integer `0` instead of string `"0"` for proper JSON type consistency --- CHANGELOG.md | 4 ++++ apps/output/views.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4155bb68..adb9c748 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +### Fixed + +- XtreamCodes EPG `has_archive` field now returns integer `0` instead of string `"0"` for proper JSON type consistency + ## [0.15.0] - 2025-12-20 ### Added diff --git a/apps/output/views.py b/apps/output/views.py index c0d72bfb..635bb9d9 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -2326,7 +2326,7 @@ def xc_get_epg(request, user, short=False): if short == False: program_output["now_playing"] = 1 if start <= django_timezone.now() <= end else 0 - program_output["has_archive"] = "0" + program_output["has_archive"] = 0 output['epg_listings'].append(program_output) From 1029eb5b5c3c806d16b6ca5a9e5f8912a78d3e6f Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Fri, 19 Dec 2025 19:19:04 -0800 Subject: [PATCH 116/220] Table length checking if data is already set --- frontend/src/components/tables/ChannelsTable.jsx | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index 1b2e73d7..b60ada56 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -1,4 +1,10 @@ -import React, { useEffect, useMemo, useState, useCallback } from 'react'; +import React, { + useEffect, + useMemo, + useState, + useCallback, + useRef, +} from 'react'; import useChannelsStore from '../../store/channels'; import useLogosStore from '../../store/logos'; import { notifications } from '@mantine/notifications'; @@ -289,7 +295,6 @@ const ChannelsTable = ({}) => { const [selectedProfile, setSelectedProfile] = useState( profiles[selectedProfileId] ); - const [hasFetchedData, setHasFetchedData] = useState(false); const [showDisabled, setShowDisabled] = useState(true); const [showOnlyStreamlessChannels, setShowOnlyStreamlessChannels] = useState(false); @@ -311,6 +316,8 @@ const ChannelsTable = ({}) => { const [isBulkDelete, setIsBulkDelete] = useState(false); const [channelToDelete, setChannelToDelete] = useState(null); + const hasFetchedData = useRef(false); + // Column sizing state for resizable columns // Store in localStorage but with empty object as default const [columnSizing, setColumnSizing] = useLocalStorage( @@ -365,7 +372,8 @@ const ChannelsTable = ({}) => { }); }); - const channelsTableLength = hasFetchedData ? Object.keys(data).length : undefined; + const channelsTableLength = (Object.keys(data).length > 0 || hasFetchedData.current) ? + Object.keys(data).length : undefined; /** * Functions @@ -406,7 +414,7 @@ const ChannelsTable = ({}) => { ]); setIsLoading(false); - setHasFetchedData(true); + hasFetchedData.current = true; setTablePrefs({ pageSize: pagination.pageSize, From aa5db6c3f4e1dd74186ce36e90eea11ca51b2f94 Mon Sep 17 00:00:00 2001 From: Seth Van Niekerk Date: Mon, 22 Dec 2025 15:14:46 -0500 Subject: [PATCH 117/220] Squash: Log Parsing Refactor & Enhancing --- .../ts_proxy/services/channel_service.py | 205 ++------- apps/proxy/ts_proxy/services/log_parsers.py | 410 ++++++++++++++++++ apps/proxy/ts_proxy/stream_manager.py | 32 +- 3 files changed, 464 insertions(+), 183 deletions(-) create mode 100644 apps/proxy/ts_proxy/services/log_parsers.py diff --git a/apps/proxy/ts_proxy/services/channel_service.py b/apps/proxy/ts_proxy/services/channel_service.py index cea0d957..4c4a73ac 100644 --- a/apps/proxy/ts_proxy/services/channel_service.py +++ b/apps/proxy/ts_proxy/services/channel_service.py @@ -15,6 +15,7 @@ from ..redis_keys import RedisKeys from ..constants import EventType, ChannelState, ChannelMetadataField from ..url_utils import get_stream_info_for_switch from core.utils import log_system_event +from .log_parsers import LogParserFactory logger = logging.getLogger("ts_proxy") @@ -419,175 +420,51 @@ class ChannelService: @staticmethod def parse_and_store_stream_info(channel_id, stream_info_line, stream_type="video", stream_id=None): - """Parse FFmpeg/VLC/streamlink stream info line and store in Redis metadata and database""" + """ + Parse stream info from FFmpeg/VLC/Streamlink logs and store in Redis/DB. + Uses specialized parsers for each streaming tool. + """ try: - if stream_type == "input": - # Example lines: - # FFmpeg: Input #0, mpegts, from 'http://example.com/stream.ts': - # FFmpeg: Input #0, hls, from 'http://example.com/stream.m3u8': + # Use factory to parse the line based on stream type + parsed_data = LogParserFactory.parse(stream_type, stream_info_line) + + if not parsed_data: + return - # Extract input format (e.g., "mpegts", "hls", "flv", etc.) - input_match = re.search(r'Input #\d+,\s*([^,]+)', stream_info_line) - input_format = input_match.group(1).strip() if input_match else None + # Update Redis and database with parsed data + ChannelService._update_stream_info_in_redis( + channel_id, + parsed_data.get('video_codec'), + parsed_data.get('resolution'), + parsed_data.get('width'), + parsed_data.get('height'), + parsed_data.get('source_fps'), + parsed_data.get('pixel_format'), + parsed_data.get('video_bitrate'), + parsed_data.get('audio_codec'), + parsed_data.get('sample_rate'), + parsed_data.get('audio_channels'), + parsed_data.get('audio_bitrate'), + parsed_data.get('stream_type') + ) - # Store in Redis if we have valid data - if input_format: - ChannelService._update_stream_info_in_redis(channel_id, None, None, None, None, None, None, None, None, None, None, None, input_format) - # Save to database if stream_id is provided - if stream_id: - ChannelService._update_stream_stats_in_db(stream_id, stream_type=input_format) - - logger.debug(f"Input format info - Format: {input_format} for channel {channel_id}") - - elif stream_type == "vlc": - # VLC parsing - extract codecs from TS demux output (no resolution/fps in stream-copy mode) - lower = stream_info_line.lower() - - # Video codec detection - video_codec_map = { - ('avc', 'h.264', 'type=0x1b'): "h264", - ('hevc', 'h.265', 'type=0x24'): "hevc", - ('mpeg-2', 'type=0x02'): "mpeg2video", - ('mpeg-4', 'type=0x10'): "mpeg4" - } - for patterns, codec in video_codec_map.items(): - if any(p in lower for p in patterns): - ChannelService._update_stream_info_in_redis(channel_id, codec, None, None, None, None, None, None, None, None, None, None, None) - if stream_id: - ChannelService._update_stream_stats_in_db(stream_id, video_codec=codec) - break - - # Audio codec detection - audio_codec_map = { - ('type=0xf', 'adts'): "aac", - ('type=0x03', 'type=0x04'): "mp3", - ('type=0x06', 'type=0x81'): "ac3", - ('type=0x0b', 'lpcm'): "pcm" - } - for patterns, codec in audio_codec_map.items(): - if any(p in lower for p in patterns): - ChannelService._update_stream_info_in_redis(channel_id, None, None, None, None, None, None, None, codec, None, None, None, None) - if stream_id: - ChannelService._update_stream_stats_in_db(stream_id, audio_codec=codec) - break - - elif stream_type == "streamlink": - # Streamlink parsing - extract quality/resolution - quality_match = re.search(r'(\d+p|\d+x\d+)', stream_info_line) - if quality_match: - quality = quality_match.group(1) - if 'x' in quality: - resolution = quality - width, height = map(int, quality.split('x')) - else: - resolutions = { - '2160p': ('3840x2160', 3840, 2160), '1080p': ('1920x1080', 1920, 1080), - '720p': ('1280x720', 1280, 720), '480p': ('854x480', 854, 480), '360p': ('640x360', 640, 360) - } - resolution, width, height = resolutions.get(quality, ('1920x1080', 1920, 1080)) - - ChannelService._update_stream_info_in_redis(channel_id, "h264", resolution, width, height, None, "yuv420p", None, None, None, None, None, None) - if stream_id: - ChannelService._update_stream_stats_in_db(stream_id, video_codec="h264", resolution=resolution, pixel_format="yuv420p") - - elif stream_type == "video": - # Example line: - # Stream #0:0: Video: h264 (Main), yuv420p(tv, progressive), 1280x720 [SAR 1:1 DAR 16:9], q=2-31, 2000 kb/s, 29.97 fps, 90k tbn - - # Extract video codec (e.g., "h264", "mpeg2video", etc.) - codec_match = re.search(r'Video:\s*([a-zA-Z0-9_]+)', stream_info_line) - video_codec = codec_match.group(1) if codec_match else None - - # Extract resolution (e.g., "1280x720") - be more specific to avoid hex values - # Look for resolution patterns that are realistic video dimensions - resolution_match = re.search(r'\b(\d{3,5})x(\d{3,5})\b', stream_info_line) - if resolution_match: - width = int(resolution_match.group(1)) - height = int(resolution_match.group(2)) - # Validate that these look like reasonable video dimensions - if 100 <= width <= 10000 and 100 <= height <= 10000: - resolution = f"{width}x{height}" - else: - width = height = resolution = None - else: - width = height = resolution = None - - # Extract source FPS (e.g., "29.97 fps") - fps_match = re.search(r'(\d+(?:\.\d+)?)\s*fps', stream_info_line) - source_fps = float(fps_match.group(1)) if fps_match else None - - # Extract pixel format (e.g., "yuv420p") - pixel_format_match = re.search(r'Video:\s*[^,]+,\s*([^,(]+)', stream_info_line) - pixel_format = None - if pixel_format_match: - pf = pixel_format_match.group(1).strip() - # Clean up pixel format (remove extra info in parentheses) - if '(' in pf: - pf = pf.split('(')[0].strip() - pixel_format = pf - - # Extract bitrate if present (e.g., "2000 kb/s") - video_bitrate = None - bitrate_match = re.search(r'(\d+(?:\.\d+)?)\s*kb/s', stream_info_line) - if bitrate_match: - video_bitrate = float(bitrate_match.group(1)) - - # Store in Redis if we have valid data - if any(x is not None for x in [video_codec, resolution, source_fps, pixel_format, video_bitrate]): - ChannelService._update_stream_info_in_redis(channel_id, video_codec, resolution, width, height, source_fps, pixel_format, video_bitrate, None, None, None, None, None) - # Save to database if stream_id is provided - if stream_id: - ChannelService._update_stream_stats_in_db( - stream_id, - video_codec=video_codec, - resolution=resolution, - source_fps=source_fps, - pixel_format=pixel_format, - video_bitrate=video_bitrate - ) - - logger.info(f"Video stream info - Codec: {video_codec}, Resolution: {resolution}, " - f"Source FPS: {source_fps}, Pixel Format: {pixel_format}, " - f"Video Bitrate: {video_bitrate} kb/s") - - elif stream_type == "audio": - # Example line: - # Stream #0:1[0x101]: Audio: aac (LC) ([15][0][0][0] / 0x000F), 48000 Hz, stereo, fltp, 64 kb/s - - # Extract audio codec (e.g., "aac", "mp3", etc.) - codec_match = re.search(r'Audio:\s*([a-zA-Z0-9_]+)', stream_info_line) - audio_codec = codec_match.group(1) if codec_match else None - - # Extract sample rate (e.g., "48000 Hz") - sample_rate_match = re.search(r'(\d+)\s*Hz', stream_info_line) - sample_rate = int(sample_rate_match.group(1)) if sample_rate_match else None - - # Extract channel layout (e.g., "stereo", "5.1", "mono") - # Look for common channel layouts - channel_match = re.search(r'\b(mono|stereo|5\.1|7\.1|quad|2\.1)\b', stream_info_line, re.IGNORECASE) - channels = channel_match.group(1) if channel_match else None - - # Extract audio bitrate if present (e.g., "64 kb/s") - audio_bitrate = None - bitrate_match = re.search(r'(\d+(?:\.\d+)?)\s*kb/s', stream_info_line) - if bitrate_match: - audio_bitrate = float(bitrate_match.group(1)) - - # Store in Redis if we have valid data - if any(x is not None for x in [audio_codec, sample_rate, channels, audio_bitrate]): - ChannelService._update_stream_info_in_redis(channel_id, None, None, None, None, None, None, None, audio_codec, sample_rate, channels, audio_bitrate, None) - # Save to database if stream_id is provided - if stream_id: - ChannelService._update_stream_stats_in_db( - stream_id, - audio_codec=audio_codec, - sample_rate=sample_rate, - audio_channels=channels, - audio_bitrate=audio_bitrate - ) + if stream_id: + ChannelService._update_stream_stats_in_db( + stream_id, + video_codec=parsed_data.get('video_codec'), + resolution=parsed_data.get('resolution'), + source_fps=parsed_data.get('source_fps'), + pixel_format=parsed_data.get('pixel_format'), + video_bitrate=parsed_data.get('video_bitrate'), + audio_codec=parsed_data.get('audio_codec'), + sample_rate=parsed_data.get('sample_rate'), + audio_channels=parsed_data.get('audio_channels'), + audio_bitrate=parsed_data.get('audio_bitrate'), + stream_type=parsed_data.get('stream_type') + ) except Exception as e: - logger.debug(f"Error parsing FFmpeg {stream_type} stream info: {e}") + logger.debug(f"Error parsing {stream_type} stream info: {e}") @staticmethod def _update_stream_info_in_redis(channel_id, codec, resolution, width, height, fps, pixel_format, video_bitrate, audio_codec=None, sample_rate=None, channels=None, audio_bitrate=None, input_format=None): diff --git a/apps/proxy/ts_proxy/services/log_parsers.py b/apps/proxy/ts_proxy/services/log_parsers.py new file mode 100644 index 00000000..95ee7a06 --- /dev/null +++ b/apps/proxy/ts_proxy/services/log_parsers.py @@ -0,0 +1,410 @@ +"""Log parsers for FFmpeg, Streamlink, and VLC output.""" +import re +import logging +from abc import ABC, abstractmethod +from typing import Optional, Dict, Any + +logger = logging.getLogger(__name__) + + +class BaseLogParser(ABC): + """Base class for log parsers""" + + # Map of stream_type -> method_name that this parser handles + STREAM_TYPE_METHODS: Dict[str, str] = {} + + @abstractmethod + def can_parse(self, line: str) -> Optional[str]: + """ + Check if this parser can handle the line. + Returns the stream_type if it can parse, None otherwise. + e.g., 'video', 'audio', 'vlc_video', 'vlc_audio', 'streamlink' + """ + pass + + @abstractmethod + def parse_input_format(self, line: str) -> Optional[Dict[str, Any]]: + pass + + @abstractmethod + def parse_video_stream(self, line: str) -> Optional[Dict[str, Any]]: + pass + + @abstractmethod + def parse_audio_stream(self, line: str) -> Optional[Dict[str, Any]]: + pass + + +class FFmpegLogParser(BaseLogParser): + """Parser for FFmpeg log output""" + + STREAM_TYPE_METHODS = { + 'input': 'parse_input_format', + 'video': 'parse_video_stream', + 'audio': 'parse_audio_stream' + } + + def can_parse(self, line: str) -> Optional[str]: + """Check if this is an FFmpeg line we can parse""" + lower = line.lower() + + # Input format detection + if lower.startswith('input #'): + return 'input' + + # Stream info (only during input phase, but we'll let stream_manager handle phase tracking) + if 'stream #' in lower: + if 'video:' in lower: + return 'video' + elif 'audio:' in lower: + return 'audio' + + return None + + def parse_input_format(self, line: str) -> Optional[Dict[str, Any]]: + """Parse FFmpeg input format (e.g., mpegts, hls)""" + try: + input_match = re.search(r'Input #\d+,\s*([^,]+)', line) + input_format = input_match.group(1).strip() if input_match else None + + if input_format: + logger.debug(f"Input format info - Format: {input_format}") + return {'stream_type': input_format} + except Exception as e: + logger.debug(f"Error parsing FFmpeg input format: {e}") + + return None + + def parse_video_stream(self, line: str) -> Optional[Dict[str, Any]]: + """Parse FFmpeg video stream info""" + try: + result = {} + + # Extract codec, resolution, fps, pixel format, bitrate + codec_match = re.search(r'Video:\s*([a-zA-Z0-9_]+)', line) + if codec_match: + result['video_codec'] = codec_match.group(1) + + resolution_match = re.search(r'\b(\d{3,5})x(\d{3,5})\b', line) + if resolution_match: + width = int(resolution_match.group(1)) + height = int(resolution_match.group(2)) + if 100 <= width <= 10000 and 100 <= height <= 10000: + result['resolution'] = f"{width}x{height}" + result['width'] = width + result['height'] = height + + fps_match = re.search(r'(\d+(?:\.\d+)?)\s*fps', line) + if fps_match: + result['source_fps'] = float(fps_match.group(1)) + + pixel_format_match = re.search(r'Video:\s*[^,]+,\s*([^,(]+)', line) + if pixel_format_match: + pf = pixel_format_match.group(1).strip() + if '(' in pf: + pf = pf.split('(')[0].strip() + result['pixel_format'] = pf + + bitrate_match = re.search(r'(\d+(?:\.\d+)?)\s*kb/s', line) + if bitrate_match: + result['video_bitrate'] = float(bitrate_match.group(1)) + + if result: + logger.info(f"Video stream info - Codec: {result.get('video_codec')}, " + f"Resolution: {result.get('resolution')}, " + f"Source FPS: {result.get('source_fps')}, " + f"Pixel Format: {result.get('pixel_format')}, " + f"Video Bitrate: {result.get('video_bitrate')} kb/s") + return result + + except Exception as e: + logger.debug(f"Error parsing FFmpeg video stream info: {e}") + + return None + + def parse_audio_stream(self, line: str) -> Optional[Dict[str, Any]]: + """Parse FFmpeg audio stream info""" + try: + result = {} + + codec_match = re.search(r'Audio:\s*([a-zA-Z0-9_]+)', line) + if codec_match: + result['audio_codec'] = codec_match.group(1) + + sample_rate_match = re.search(r'(\d+)\s*Hz', line) + if sample_rate_match: + result['sample_rate'] = int(sample_rate_match.group(1)) + + channel_match = re.search(r'\b(mono|stereo|5\.1|7\.1|quad|2\.1)\b', line, re.IGNORECASE) + if channel_match: + result['audio_channels'] = channel_match.group(1) + + bitrate_match = re.search(r'(\d+(?:\.\d+)?)\s*kb/s', line) + if bitrate_match: + result['audio_bitrate'] = float(bitrate_match.group(1)) + + if result: + return result + + except Exception as e: + logger.debug(f"Error parsing FFmpeg audio stream info: {e}") + + return None + + +class VLCLogParser(BaseLogParser): + """Parser for VLC log output""" + + STREAM_TYPE_METHODS = { + 'vlc_video': 'parse_video_stream', + 'vlc_audio': 'parse_audio_stream' + } + + def can_parse(self, line: str) -> Optional[str]: + """Check if this is a VLC line we can parse""" + lower = line.lower() + + # VLC TS demux codec detection + if 'ts demux debug' in lower and 'type=' in lower: + if 'video' in lower: + return 'vlc_video' + elif 'audio' in lower: + return 'vlc_audio' + + # VLC decoder output + if 'decoder' in lower and ('channels:' in lower or 'samplerate:' in lower or 'x' in line or 'fps' in lower): + if 'audio' in lower or 'channels:' in lower or 'samplerate:' in lower: + return 'vlc_audio' + else: + return 'vlc_video' + + # VLC transcode output for resolution/FPS + if 'stream_out_transcode' in lower and ('source fps' in lower or ('source ' in lower and 'x' in line)): + return 'vlc_video' + + return None + + def parse_input_format(self, line: str) -> Optional[Dict[str, Any]]: + return None + + def parse_video_stream(self, line: str) -> Optional[Dict[str, Any]]: + """Parse VLC TS demux output and decoder info for video""" + try: + lower = line.lower() + result = {} + + # Codec detection from TS demux + video_codec_map = { + ('avc', 'h.264', 'type=0x1b'): "h264", + ('hevc', 'h.265', 'type=0x24'): "hevc", + ('mpeg-2', 'type=0x02'): "mpeg2video", + ('mpeg-4', 'type=0x10'): "mpeg4" + } + + for patterns, codec in video_codec_map.items(): + if any(p in lower for p in patterns): + result['video_codec'] = codec + break + + # Extract FPS from transcode output: "source fps 30/1" + fps_fraction_match = re.search(r'source fps\s+(\d+)/(\d+)', lower) + if fps_fraction_match: + numerator = int(fps_fraction_match.group(1)) + denominator = int(fps_fraction_match.group(2)) + if denominator > 0: + result['source_fps'] = numerator / denominator + + # Extract resolution from transcode output: "source 1280x720" + source_res_match = re.search(r'source\s+(\d{3,4})x(\d{3,4})', lower) + if source_res_match: + width = int(source_res_match.group(1)) + height = int(source_res_match.group(2)) + if 100 <= width <= 10000 and 100 <= height <= 10000: + result['resolution'] = f"{width}x{height}" + result['width'] = width + result['height'] = height + else: + # Fallback: generic resolution pattern + resolution_match = re.search(r'(\d{3,4})x(\d{3,4})', line) + if resolution_match: + width = int(resolution_match.group(1)) + height = int(resolution_match.group(2)) + if 100 <= width <= 10000 and 100 <= height <= 10000: + result['resolution'] = f"{width}x{height}" + result['width'] = width + result['height'] = height + + # Fallback: try to extract FPS from generic format + if 'source_fps' not in result: + fps_match = re.search(r'(\d+\.?\d*)\s*fps', lower) + if fps_match: + result['source_fps'] = float(fps_match.group(1)) + + return result if result else None + + except Exception as e: + logger.debug(f"Error parsing VLC video stream info: {e}") + + return None + + def parse_audio_stream(self, line: str) -> Optional[Dict[str, Any]]: + """Parse VLC TS demux output and decoder info for audio""" + try: + lower = line.lower() + result = {} + + # Codec detection from TS demux + audio_codec_map = { + ('type=0xf', 'adts'): "aac", + ('type=0x03', 'type=0x04'): "mp3", + ('type=0x06', 'type=0x81'): "ac3", + ('type=0x0b', 'lpcm'): "pcm" + } + + for patterns, codec in audio_codec_map.items(): + if any(p in lower for p in patterns): + result['audio_codec'] = codec + break + + # VLC decoder format: "AAC channels: 2 samplerate: 48000" + if 'channels:' in lower: + channels_match = re.search(r'channels:\s*(\d+)', lower) + if channels_match: + num_channels = int(channels_match.group(1)) + # Convert number to name + channel_names = {1: 'mono', 2: 'stereo', 6: '5.1', 8: '7.1'} + result['audio_channels'] = channel_names.get(num_channels, str(num_channels)) + + if 'samplerate:' in lower: + samplerate_match = re.search(r'samplerate:\s*(\d+)', lower) + if samplerate_match: + result['sample_rate'] = int(samplerate_match.group(1)) + + # Try to extract sample rate (Hz format) + sample_rate_match = re.search(r'(\d+)\s*hz', lower) + if sample_rate_match and 'sample_rate' not in result: + result['sample_rate'] = int(sample_rate_match.group(1)) + + # Try to extract channels (word format) + if 'audio_channels' not in result: + channel_match = re.search(r'\b(mono|stereo|5\.1|7\.1|quad|2\.1)\b', lower) + if channel_match: + result['audio_channels'] = channel_match.group(1) + + return result if result else None + + except Exception as e: + logger.error(f"[VLC AUDIO PARSER] Error parsing VLC audio stream info: {e}") + + return None + + +class StreamlinkLogParser(BaseLogParser): + """Parser for Streamlink log output""" + + STREAM_TYPE_METHODS = { + 'streamlink': 'parse_video_stream' + } + + def can_parse(self, line: str) -> Optional[str]: + """Check if this is a Streamlink line we can parse""" + lower = line.lower() + + if 'opening stream:' in lower or 'available streams:' in lower: + return 'streamlink' + + return None + + def parse_input_format(self, line: str) -> Optional[Dict[str, Any]]: + return None + + def parse_video_stream(self, line: str) -> Optional[Dict[str, Any]]: + """Parse Streamlink quality/resolution""" + try: + quality_match = re.search(r'(\d+p|\d+x\d+)', line) + if quality_match: + quality = quality_match.group(1) + + if 'x' in quality: + resolution = quality + width, height = map(int, quality.split('x')) + else: + resolutions = { + '2160p': ('3840x2160', 3840, 2160), + '1080p': ('1920x1080', 1920, 1080), + '720p': ('1280x720', 1280, 720), + '480p': ('854x480', 854, 480), + '360p': ('640x360', 640, 360) + } + resolution, width, height = resolutions.get(quality, ('1920x1080', 1920, 1080)) + + return { + 'video_codec': 'h264', + 'resolution': resolution, + 'width': width, + 'height': height, + 'pixel_format': 'yuv420p' + } + + except Exception as e: + logger.debug(f"Error parsing Streamlink video info: {e}") + + return None + + def parse_audio_stream(self, line: str) -> Optional[Dict[str, Any]]: + return None + + +class LogParserFactory: + """Factory to get the appropriate log parser""" + + _parsers = { + 'ffmpeg': FFmpegLogParser(), + 'vlc': VLCLogParser(), + 'streamlink': StreamlinkLogParser() + } + + @classmethod + def _get_parser_and_method(cls, stream_type: str) -> Optional[tuple[BaseLogParser, str]]: + """Determine parser and method from stream_type""" + # Check each parser to see if it handles this stream_type + for parser in cls._parsers.values(): + method_name = parser.STREAM_TYPE_METHODS.get(stream_type) + if method_name: + return (parser, method_name) + + return None + + @classmethod + def parse(cls, stream_type: str, line: str) -> Optional[Dict[str, Any]]: + """ + Parse a log line based on stream type. + Returns parsed data or None if parsing fails. + """ + result = cls._get_parser_and_method(stream_type) + if not result: + return None + + parser, method_name = result + method = getattr(parser, method_name, None) + if method: + return method(line) + + return None + + @classmethod + def auto_parse(cls, line: str) -> Optional[tuple[str, Dict[str, Any]]]: + """ + Automatically detect which parser can handle this line and parse it. + Returns (stream_type, parsed_data) or None if no parser can handle it. + """ + # Try each parser to see if it can handle this line + for parser in cls._parsers.values(): + stream_type = parser.can_parse(line) + if stream_type: + # Parser can handle this line, now parse it + parsed_data = cls.parse(stream_type, line) + if parsed_data: + return (stream_type, parsed_data) + + return None diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index 9b0d9ada..da840f2d 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -645,26 +645,20 @@ class StreamManager: if content_lower.startswith('output #') or 'encoder' in content_lower: self.ffmpeg_input_phase = False - # Parse VLC-specific output - look for TS demux type info for codec detection - if 'ts demux debug' in content_lower and 'type=' in content_lower and ('video' in content_lower or 'audio' in content_lower): - from .services.channel_service import ChannelService - ChannelService.parse_and_store_stream_info(self.channel_id, content, "vlc", self.current_stream_id) + # Try to auto-parse with any available parser + from .services.log_parsers import LogParserFactory + from .services.channel_service import ChannelService - # Parse streamlink-specific output - if 'opening stream:' in content_lower or 'available streams:' in content_lower: - from .services.channel_service import ChannelService - ChannelService.parse_and_store_stream_info(self.channel_id, content, "streamlink", self.current_stream_id) - - # Only parse stream info if we're still in the input phase - if ("stream #" in content_lower and - ("video:" in content_lower or "audio:" in content_lower) and - self.ffmpeg_input_phase): - - from .services.channel_service import ChannelService - if "video:" in content_lower: - ChannelService.parse_and_store_stream_info(self.channel_id, content, "video", self.current_stream_id) - elif "audio:" in content_lower: - ChannelService.parse_and_store_stream_info(self.channel_id, content, "audio", self.current_stream_id) + parse_result = LogParserFactory.auto_parse(content) + if parse_result: + stream_type, parsed_data = parse_result + # For FFmpeg, only parse during input phase + if stream_type in ['video', 'audio', 'input']: + if self.ffmpeg_input_phase: + ChannelService.parse_and_store_stream_info(self.channel_id, content, stream_type, self.current_stream_id) + else: + # VLC and Streamlink can be parsed anytime + ChannelService.parse_and_store_stream_info(self.channel_id, content, stream_type, self.current_stream_id) # Determine log level based on content if any(keyword in content_lower for keyword in ['error', 'failed', 'cannot', 'invalid', 'corrupt']): From 18645fc08fbcf442329c32e4d090c0655a0570bd Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 22 Dec 2025 16:39:09 -0600 Subject: [PATCH 118/220] Bug Fix: Re-apply failed merge to fix clients that don't have ipv6 support. --- CHANGELOG.md | 1 + docker/init/03-init-dispatcharr.sh | 8 ++++++++ 2 files changed, 9 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index adb9c748..d10635e8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed - XtreamCodes EPG `has_archive` field now returns integer `0` instead of string `"0"` for proper JSON type consistency +- nginx now gracefully handles hosts without IPv6 support by automatically disabling IPv6 binding at startup (Fixes #744) ## [0.15.0] - 2025-12-20 diff --git a/docker/init/03-init-dispatcharr.sh b/docker/init/03-init-dispatcharr.sh index c9eaf18b..03fe6816 100644 --- a/docker/init/03-init-dispatcharr.sh +++ b/docker/init/03-init-dispatcharr.sh @@ -36,6 +36,14 @@ if ! [[ "$DISPATCHARR_PORT" =~ ^[0-9]+$ ]]; then fi sed -i "s/NGINX_PORT/${DISPATCHARR_PORT}/g" /etc/nginx/sites-enabled/default +# Configure nginx based on IPv6 availability +if ip -6 addr show | grep -q "inet6"; then + echo "✅ IPv6 is available, enabling IPv6 in nginx" +else + echo "⚠️ IPv6 not available, disabling IPv6 in nginx" + sed -i '/listen \[::\]:/d' /etc/nginx/sites-enabled/default +fi + # NOTE: mac doesn't run as root, so only manage permissions # if this script is running as root if [ "$(id -u)" = "0" ]; then From c7590d204e2ca6fc6368696d08dfea66c402712a Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Mon, 22 Dec 2025 22:58:41 +0000 Subject: [PATCH 119/220] Release v0.15.1 --- CHANGELOG.md | 2 ++ version.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d10635e8..0cb610fa 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [0.15.1] - 2025-12-22 + ### Fixed - XtreamCodes EPG `has_archive` field now returns integer `0` instead of string `"0"` for proper JSON type consistency diff --git a/version.py b/version.py index 07d3d4c7..714a29fd 100644 --- a/version.py +++ b/version.py @@ -1,5 +1,5 @@ """ Dispatcharr version information. """ -__version__ = '0.15.0' # Follow semantic versioning (MAJOR.MINOR.PATCH) +__version__ = '0.15.1' # Follow semantic versioning (MAJOR.MINOR.PATCH) __timestamp__ = None # Set during CI/CD build process From eea84cfd8b8e9ccd69942f2a5f27536b88a2f8bd Mon Sep 17 00:00:00 2001 From: drnikcuk Date: Mon, 22 Dec 2025 23:33:26 +0000 Subject: [PATCH 120/220] Update Stats.jsx (#773) * Update Stats.jsx Adds fix for stats control arrows direction swap --- frontend/src/pages/Stats.jsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index e7e3043a..8ec576a8 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -481,8 +481,8 @@ const VODCard = ({ vodContent, stopVODClient }) => { size={16} style={{ transform: isClientExpanded - ? 'rotate(180deg)' - : 'rotate(0deg)', + ? 'rotate(0deg)' + : 'rotate(180deg)', transition: 'transform 0.2s', }} /> From 106ea72c9ddc5d1a62d7f9d9850ff595d9cd3796 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 22 Dec 2025 17:38:55 -0600 Subject: [PATCH 121/220] Changelog: Fix event viewer arrow direction for corrected UI behavior --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0cb610fa..2e2e9003 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +### Changed + +- Fixed event viewer arrow direction (previously inverted) — UI behavior corrected. Thanks [@drnikcuk](https://github.com/drnikcuk) (Closes #772) + ## [0.15.1] - 2025-12-22 ### Fixed From 904500906ca0f5d843225a3751aa3bf40c3b47d3 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 23 Dec 2025 09:51:02 -0600 Subject: [PATCH 122/220] Bug Fix: Update stream validation to return original URL instead of redirected URL when using redirect profile. --- CHANGELOG.md | 4 ++++ apps/proxy/ts_proxy/url_utils.py | 6 +++--- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2e2e9003..a36db70a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Fixed event viewer arrow direction (previously inverted) — UI behavior corrected. Thanks [@drnikcuk](https://github.com/drnikcuk) (Closes #772) +### Fixed + +- Stream validation now returns original URL instead of redirected URL to prevent issues with temporary redirect URLs that expire before clients can connect + ## [0.15.1] - 2025-12-22 ### Fixed diff --git a/apps/proxy/ts_proxy/url_utils.py b/apps/proxy/ts_proxy/url_utils.py index 3b05c9f2..2afe2871 100644 --- a/apps/proxy/ts_proxy/url_utils.py +++ b/apps/proxy/ts_proxy/url_utils.py @@ -471,7 +471,7 @@ def validate_stream_url(url, user_agent=None, timeout=(5, 5)): # If HEAD not supported, server will return 405 or other error if 200 <= head_response.status_code < 300: # HEAD request successful - return True, head_response.url, head_response.status_code, "Valid (HEAD request)" + return True, url, head_response.status_code, "Valid (HEAD request)" # Try a GET request with stream=True to avoid downloading all content get_response = session.get( @@ -484,7 +484,7 @@ def validate_stream_url(url, user_agent=None, timeout=(5, 5)): # IMPORTANT: Check status code first before checking content if not (200 <= get_response.status_code < 300): logger.warning(f"Stream validation failed with HTTP status {get_response.status_code}") - return False, get_response.url, get_response.status_code, f"Invalid HTTP status: {get_response.status_code}" + return False, url, get_response.status_code, f"Invalid HTTP status: {get_response.status_code}" # Only check content if status code is valid try: @@ -538,7 +538,7 @@ def validate_stream_url(url, user_agent=None, timeout=(5, 5)): get_response.close() # If we have content, consider it valid even with unrecognized content type - return is_valid, get_response.url, get_response.status_code, message + return is_valid, url, get_response.status_code, message except requests.exceptions.Timeout: return False, url, 0, "Timeout connecting to stream" From 9c9cbab94cf0caaf58b96a20150335ef539d498d Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Tue, 23 Dec 2025 12:27:29 -0800 Subject: [PATCH 123/220] Reverted lazy load of StreamsTable --- frontend/src/pages/Channels.jsx | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/frontend/src/pages/Channels.jsx b/frontend/src/pages/Channels.jsx index 8f5cae26..26ed77fa 100644 --- a/frontend/src/pages/Channels.jsx +++ b/frontend/src/pages/Channels.jsx @@ -1,7 +1,7 @@ -import React, { lazy, Suspense } from 'react'; +import React from 'react'; import ChannelsTable from '../components/tables/ChannelsTable'; -const StreamsTable = lazy(() => import('../components/tables/StreamsTable')); -import { Box, Text } from '@mantine/core'; +import StreamsTable from '../components/tables/StreamsTable'; +import { Box, } from '@mantine/core'; import { Allotment } from 'allotment'; import { USER_LEVELS } from '../constants'; import useAuthStore from '../store/auth'; @@ -53,11 +53,7 @@ const PageContent = () => { - - Loading...}> - - - + From ff7298a93e77f76f250908fb7c2db431a903789d Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 23 Dec 2025 15:07:25 -0600 Subject: [PATCH 124/220] Enhance StreamManager for efficient log parsing and update VLC stream profile naming --- apps/proxy/ts_proxy/stream_manager.py | 41 +++++++++++++++++-- core/fixtures/initial_data.json | 6 +-- .../migrations/0019_add_vlc_stream_profile.py | 10 ++--- 3 files changed, 46 insertions(+), 11 deletions(-) diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index da840f2d..cbaa0bc0 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -107,6 +107,10 @@ class StreamManager: # Add this flag for tracking transcoding process status self.transcode_process_active = False + # Track stream command for efficient log parser routing + self.stream_command = None + self.parser_type = None # Will be set when transcode process starts + # Add tracking for data throughput self.bytes_processed = 0 self.last_bytes_update = time.time() @@ -476,6 +480,21 @@ class StreamManager: # Build and start transcode command self.transcode_cmd = stream_profile.build_command(self.url, self.user_agent) + # Store stream command for efficient log parser routing + self.stream_command = stream_profile.command + # Map actual commands to parser types for direct routing + command_to_parser = { + 'ffmpeg': 'ffmpeg', + 'cvlc': 'vlc', + 'vlc': 'vlc', + 'streamlink': 'streamlink' + } + self.parser_type = command_to_parser.get(self.stream_command.lower()) + if self.parser_type: + logger.debug(f"Using {self.parser_type} parser for log parsing (command: {self.stream_command})") + else: + logger.debug(f"Unknown stream command '{self.stream_command}', will use auto-detection for log parsing") + # For UDP streams, remove any user_agent parameters from the command if hasattr(self, 'stream_type') and self.stream_type == StreamType.UDP: # Filter out any arguments that contain the user_agent value or related headers @@ -645,11 +664,27 @@ class StreamManager: if content_lower.startswith('output #') or 'encoder' in content_lower: self.ffmpeg_input_phase = False - # Try to auto-parse with any available parser + # Route to appropriate parser based on known command type from .services.log_parsers import LogParserFactory from .services.channel_service import ChannelService - - parse_result = LogParserFactory.auto_parse(content) + + parse_result = None + + # If we know the parser type, use direct routing for efficiency + if self.parser_type: + # Get the appropriate parser and check what it can parse + parser = LogParserFactory._parsers.get(self.parser_type) + if parser: + stream_type = parser.can_parse(content) + if stream_type: + # Parser can handle this line, parse it directly + parsed_data = LogParserFactory.parse(stream_type, content) + if parsed_data: + parse_result = (stream_type, parsed_data) + else: + # Unknown command type - use auto-detection as fallback + parse_result = LogParserFactory.auto_parse(content) + if parse_result: stream_type, parsed_data = parse_result # For FFmpeg, only parse during input phase diff --git a/core/fixtures/initial_data.json b/core/fixtures/initial_data.json index 49ecf080..889f0d24 100644 --- a/core/fixtures/initial_data.json +++ b/core/fixtures/initial_data.json @@ -23,7 +23,7 @@ "model": "core.streamprofile", "pk": 1, "fields": { - "name": "ffmpeg", + "name": "FFmpeg", "command": "ffmpeg", "parameters": "-i {streamUrl} -c:v copy -c:a copy -f mpegts pipe:1", "is_active": true, @@ -34,7 +34,7 @@ "model": "core.streamprofile", "pk": 2, "fields": { - "name": "streamlink", + "name": "Streamlink", "command": "streamlink", "parameters": "{streamUrl} best --stdout", "is_active": true, @@ -45,7 +45,7 @@ "model": "core.streamprofile", "pk": 3, "fields": { - "name": "vlc", + "name": "VLC", "command": "cvlc", "parameters": "-vv -I dummy --no-video-title-show --http-user-agent {userAgent} {streamUrl} --sout #standard{access=file,mux=ts,dst=-}", "is_active": true, diff --git a/core/migrations/0019_add_vlc_stream_profile.py b/core/migrations/0019_add_vlc_stream_profile.py index 5d794647..c3f72592 100644 --- a/core/migrations/0019_add_vlc_stream_profile.py +++ b/core/migrations/0019_add_vlc_stream_profile.py @@ -5,9 +5,9 @@ from django.db import migrations def add_vlc_profile(apps, schema_editor): StreamProfile = apps.get_model("core", "StreamProfile") UserAgent = apps.get_model("core", "UserAgent") - + # Check if VLC profile already exists - if not StreamProfile.objects.filter(name="vlc").exists(): + if not StreamProfile.objects.filter(name="VLC").exists(): # Get the TiviMate user agent (should be pk=1) try: tivimate_ua = UserAgent.objects.get(pk=1) @@ -17,9 +17,9 @@ def add_vlc_profile(apps, schema_editor): if not tivimate_ua: # No user agents exist, skip creating profile return - + StreamProfile.objects.create( - name="vlc", + name="VLC", command="cvlc", parameters="-vv -I dummy --no-video-title-show --http-user-agent {userAgent} {streamUrl} --sout #standard{access=file,mux=ts,dst=-}", is_active=True, @@ -29,7 +29,7 @@ def add_vlc_profile(apps, schema_editor): def remove_vlc_profile(apps, schema_editor): StreamProfile = apps.get_model("core", "StreamProfile") - StreamProfile.objects.filter(name="vlc").delete() + StreamProfile.objects.filter(name="VLC").delete() class Migration(migrations.Migration): From 8f811f2ed3f9d9d9194e53fd7f8e8b8d4f524557 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 23 Dec 2025 15:17:50 -0600 Subject: [PATCH 125/220] Correct profile name casing for FFmpeg, Streamlink, and VLC in fixtures.json --- fixtures.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/fixtures.json b/fixtures.json index c0f5e0ea..3c31f926 100644 --- a/fixtures.json +++ b/fixtures.json @@ -36,7 +36,7 @@ "model": "core.streamprofile", "pk": 1, "fields": { - "profile_name": "ffmpeg", + "profile_name": "FFmpeg", "command": "ffmpeg", "parameters": "-i {streamUrl} -c:a copy -c:v copy -f mpegts pipe:1", "is_active": true, @@ -46,7 +46,7 @@ { "model": "core.streamprofile", "fields": { - "profile_name": "streamlink", + "profile_name": "Streamlink", "command": "streamlink", "parameters": "{streamUrl} best --stdout", "is_active": true, @@ -56,7 +56,7 @@ { "model": "core.streamprofile", "fields": { - "profile_name": "vlc", + "profile_name": "VLC", "command": "cvlc", "parameters": "-vv -I dummy --no-video-title-show --http-user-agent {userAgent} {streamUrl} --sout #standard{access=file,mux=ts,dst=-}", "is_active": true, From daa919c76472d0a54522e67810e9cab4f021510a Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 23 Dec 2025 15:52:56 -0600 Subject: [PATCH 126/220] Refactor logging messages in StreamManager for clarity and consistency. Also removed redundant parsing. --- apps/proxy/ts_proxy/stream_manager.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index cbaa0bc0..e7f752d8 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -697,22 +697,18 @@ class StreamManager: # Determine log level based on content if any(keyword in content_lower for keyword in ['error', 'failed', 'cannot', 'invalid', 'corrupt']): - logger.error(f"FFmpeg stderr for channel {self.channel_id}: {content}") + logger.error(f"Stream process error for channel {self.channel_id}: {content}") elif any(keyword in content_lower for keyword in ['warning', 'deprecated', 'ignoring']): - logger.warning(f"FFmpeg stderr for channel {self.channel_id}: {content}") + logger.warning(f"Stream process warning for channel {self.channel_id}: {content}") elif content.startswith('frame=') or 'fps=' in content or 'speed=' in content: # Stats lines - log at trace level to avoid spam - logger.trace(f"FFmpeg stats for channel {self.channel_id}: {content}") + logger.trace(f"Stream stats for channel {self.channel_id}: {content}") elif any(keyword in content_lower for keyword in ['input', 'output', 'stream', 'video', 'audio']): # Stream info - log at info level - logger.info(f"FFmpeg info for channel {self.channel_id}: {content}") - if content.startswith('Input #0'): - # If it's input 0, parse stream info - from .services.channel_service import ChannelService - ChannelService.parse_and_store_stream_info(self.channel_id, content, "input", self.current_stream_id) + logger.info(f"Stream info for channel {self.channel_id}: {content}") else: # Everything else at debug level - logger.debug(f"FFmpeg stderr for channel {self.channel_id}: {content}") + logger.debug(f"Stream process output for channel {self.channel_id}: {content}") except Exception as e: logger.error(f"Error logging stderr content for channel {self.channel_id}: {e}") From 48ebaffaddfbdeb1a10cf73d5c30c7e8a6ee7674 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 23 Dec 2025 17:04:09 -0600 Subject: [PATCH 127/220] Cleanup dockerfile a bit. --- docker/DispatcharrBase | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/docker/DispatcharrBase b/docker/DispatcharrBase index 786ead1a..8bda1ed9 100644 --- a/docker/DispatcharrBase +++ b/docker/DispatcharrBase @@ -16,7 +16,7 @@ RUN apt-get update && apt-get install --no-install-recommends -y \ libpcre3 libpcre3-dev libpq-dev procps \ build-essential gcc pciutils \ nginx streamlink comskip \ - vlc-bin vlc-plugin-base vlc-plugin-access-extra \ + vlc-bin vlc-plugin-base \ && apt-get clean && rm -rf /var/lib/apt/lists/* # --- Create Python virtual environment --- @@ -26,11 +26,6 @@ RUN python3.13 -m venv $VIRTUAL_ENV && $VIRTUAL_ENV/bin/pip install --upgrade pi COPY requirements.txt /tmp/requirements.txt RUN $VIRTUAL_ENV/bin/pip install --no-cache-dir -r /tmp/requirements.txt && rm /tmp/requirements.txt -# --- Configure VLC for headless operation --- -# Set VLC environment variables for headless operation -ENV PULSE_SERVER=none \ - DBUS_SESSION_BUS_ADDRESS=/dev/null - # --- Set up Redis 7.x --- RUN curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg && \ echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" | \ From 44a122924fb98ca467d6c159d9708b0d187b40c8 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 23 Dec 2025 17:37:38 -0600 Subject: [PATCH 128/220] advanced filtering for hiding disabled channels and viewing only empty channels (cherry picked from commit ea38c0b4b88bac1d89c186f4d17cd9f1dde0ef6d) Closes #182 --- CHANGELOG.md | 4 ++ apps/channels/api_views.py | 33 ++++++++++- .../src/components/tables/ChannelsTable.jsx | 25 ++++++++- .../ChannelsTable/ChannelTableHeader.jsx | 55 ++++++++++++++++++- 4 files changed, 110 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a36db70a..99784402 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +### Added + +- Advanced filtering for Channels table: Filter menu now allows toggling disabled channels visibility (when a profile is selected) and filtering to show only empty channels without streams (Closes #182) + ### Changed - Fixed event viewer arrow direction (previously inverted) — UI behavior corrected. Thanks [@drnikcuk](https://github.com/drnikcuk) (Closes #772) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 1f98358e..aebb74a3 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -8,6 +8,7 @@ from drf_yasg.utils import swagger_auto_schema from drf_yasg import openapi from django.shortcuts import get_object_or_404, get_list_or_404 from django.db import transaction +from django.db.models import Q import os, json, requests, logging from urllib.parse import unquote from apps.accounts.permissions import ( @@ -420,10 +421,36 @@ class ChannelViewSet(viewsets.ModelViewSet): group_names = channel_group.split(",") qs = qs.filter(channel_group__name__in=group_names) - if self.request.user.user_level < 10: - qs = qs.filter(user_level__lte=self.request.user.user_level) + filters = {} + q_filters = Q() - return qs + channel_profile_id = self.request.query_params.get("channel_profile_id") + show_disabled_param = self.request.query_params.get("show_disabled", None) + only_streamless = self.request.query_params.get("only_streamless", None) + + if channel_profile_id: + try: + profile_id_int = int(channel_profile_id) + filters["channelprofilemembership__channel_profile_id"] = profile_id_int + + if show_disabled_param is None: + filters["channelprofilemembership__enabled"] = True + except (ValueError, TypeError): + # Ignore invalid profile id values + pass + + if only_streamless: + q_filters &= Q(streams__isnull=True) + + if self.request.user.user_level < 10: + filters["user_level__lte"] = self.request.user.user_level + + if filters: + qs = qs.filter(**filters) + if q_filters: + qs = qs.filter(q_filters) + + return qs.distinct() def get_serializer_context(self): context = super().get_serializer_context() diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index 9b9958f7..ee57dabf 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -289,6 +289,9 @@ const ChannelsTable = ({}) => { const [selectedProfile, setSelectedProfile] = useState( profiles[selectedProfileId] ); + const [showDisabled, setShowDisabled] = useState(true); + const [showOnlyStreamlessChannels, setShowOnlyStreamlessChannels] = + useState(false); const [paginationString, setPaginationString] = useState(''); const [filters, setFilters] = useState({ @@ -369,6 +372,15 @@ const ChannelsTable = ({}) => { params.append('page', pagination.pageIndex + 1); params.append('page_size', pagination.pageSize); params.append('include_streams', 'true'); + if (selectedProfileId !== '0') { + params.append('channel_profile_id', selectedProfileId); + } + if (showDisabled === true) { + params.append('show_disabled', true); + } + if (showOnlyStreamlessChannels === true) { + params.append('only_streamless', true); + } // Apply sorting if (sorting.length > 0) { @@ -401,7 +413,14 @@ const ChannelsTable = ({}) => { pageSize: pagination.pageSize, }); setAllRowIds(ids); - }, [pagination, sorting, debouncedFilters]); + }, [ + pagination, + sorting, + debouncedFilters, + showDisabled, + selectedProfileId, + showOnlyStreamlessChannels, + ]); const stopPropagation = useCallback((e) => { e.stopPropagation(); @@ -1326,6 +1345,10 @@ const ChannelsTable = ({}) => { deleteChannels={deleteChannels} selectedTableIds={table.selectedTableIds} table={table} + showDisabled={showDisabled} + setShowDisabled={setShowDisabled} + showOnlyStreamlessChannels={showOnlyStreamlessChannels} + setShowOnlyStreamlessChannels={setShowOnlyStreamlessChannels} /> {/* Table or ghost empty state inside Paper */} diff --git a/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx b/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx index b7e04d7d..460ab12a 100644 --- a/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx +++ b/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx @@ -12,20 +12,22 @@ import { Text, TextInput, Tooltip, - UnstyledButton, useMantineTheme, } from '@mantine/core'; import { ArrowDown01, Binary, - Check, CircleCheck, - Ellipsis, EllipsisVertical, SquareMinus, SquarePen, SquarePlus, Settings, + Eye, + EyeOff, + Filter, + Square, + SquareCheck, } from 'lucide-react'; import API from '../../../api'; import { notifications } from '@mantine/notifications'; @@ -102,6 +104,10 @@ const ChannelTableHeader = ({ editChannel, deleteChannels, selectedTableIds, + showDisabled, + setShowDisabled, + showOnlyStreamlessChannels, + setShowOnlyStreamlessChannels, }) => { const theme = useMantineTheme(); @@ -208,6 +214,14 @@ const ChannelTableHeader = ({ ); }; + const toggleShowDisabled = () => { + setShowDisabled(!showDisabled); + }; + + const toggleShowOnlyStreamlessChannels = () => { + setShowOnlyStreamlessChannels(!showOnlyStreamlessChannels); + }; + return ( @@ -236,6 +250,41 @@ const ChannelTableHeader = ({ }} > + + + + + + + : + } + disabled={selectedProfileId === '0'} + > + + {showDisabled ? 'Hide Disabled' : 'Show Disabled'} + + + + + ) : ( + + ) + } + > + Only Empty Channels + + + + @@ -1477,34 +1473,34 @@ export default function TVChannelGuide({ startDate, endDate }) { {/* Guide container with headers and scrollable content */} {/* Logo header - Sticky, non-scrollable */} {/* Logo header cell - sticky in both directions */} {/* Timeline header with its own scrollbar */} @@ -1512,26 +1508,26 @@ export default function TVChannelGuide({ startDate, endDate }) { style={{ flex: 1, overflow: 'hidden', - position: 'relative', }} + pos={'relative'} > {' '} {hourTimeline.map((hourData) => { @@ -1541,15 +1537,15 @@ export default function TVChannelGuide({ startDate, endDate }) { handleTimeClick(time, e)} > {/* Remove the special day label for new days since we'll show day for all hours */} @@ -1558,25 +1554,23 @@ export default function TVChannelGuide({ startDate, endDate }) { {/* Show day above time for every hour using the same format */} {formatDayLabel(time)}{' '} {/* Use same formatDayLabel function for all hours */} @@ -1590,38 +1584,38 @@ export default function TVChannelGuide({ startDate, endDate }) { {/* Hour boundary marker - more visible */} {/* Quarter hour tick marks */} {[15, 30, 45].map((minute) => ( ))} @@ -1638,22 +1632,22 @@ export default function TVChannelGuide({ startDate, endDate }) { ref={guideContainerRef} style={{ flex: 1, - position: 'relative', overflow: 'hidden', }} + pos={'relative'} > {nowPosition >= 0 && ( )} @@ -1674,13 +1668,7 @@ export default function TVChannelGuide({ startDate, endDate }) { {GuideRow} ) : ( - + No channels match your filters + + + + + + {recording && ( + <> + + + + )} + + {existingRuleMode && ( + + )} + + + ); +} diff --git a/frontend/src/components/forms/SeriesRecordingModal.jsx b/frontend/src/components/forms/SeriesRecordingModal.jsx new file mode 100644 index 00000000..1c10e4bd --- /dev/null +++ b/frontend/src/components/forms/SeriesRecordingModal.jsx @@ -0,0 +1,91 @@ +import React from 'react'; +import { Modal, Stack, Text, Flex, Group, Button } from '@mantine/core'; +import { notifications } from '@mantine/notifications'; +import useChannelsStore from '../../store/channels.jsx'; +import { deleteSeriesAndRule } from '../../utils/cards/RecordingCardUtils.js'; +import { evaluateSeriesRulesByTvgId, fetchRules } from '../../pages/guideUtils.js'; + +export default function SeriesRecordingModal({ + opened, + onClose, + rules, + onRulesUpdate +}) { + const handleEvaluateNow = async (r) => { + await evaluateSeriesRulesByTvgId(r.tvg_id); + try { + await useChannelsStore.getState().fetchRecordings(); + } catch (error) { + console.warn('Failed to refresh recordings after evaluation', error); + } + notifications.show({ + title: 'Evaluated', + message: 'Checked for episodes', + }); + }; + + const handleRemoveSeries = async (r) => { + await deleteSeriesAndRule({ tvg_id: r.tvg_id, title: r.title }); + try { + await useChannelsStore.getState().fetchRecordings(); + } catch (error) { + console.warn('Failed to refresh recordings after bulk removal', error); + } + const updated = await fetchRules(); + onRulesUpdate(updated); + }; + + return ( + + + {(!rules || rules.length === 0) && ( + + No series rules configured + + )} + {rules && rules.map((r) => ( + + + {r.title || r.tvg_id} —{' '} + {r.mode === 'new' ? 'New episodes' : 'Every episode'} + + + + + + + ))} + + + ); +} diff --git a/frontend/src/pages/Guide.jsx b/frontend/src/pages/Guide.jsx index 4a4ee71e..a382fffe 100644 --- a/frontend/src/pages/Guide.jsx +++ b/frontend/src/pages/Guide.jsx @@ -6,241 +6,80 @@ import React, { useRef, useCallback, } from 'react'; -import dayjs from 'dayjs'; -import API from '../api'; import useChannelsStore from '../store/channels'; import useLogosStore from '../store/logos'; -import logo from '../images/logo.png'; import useVideoStore from '../store/useVideoStore'; // NEW import import { notifications } from '@mantine/notifications'; import useSettingsStore from '../store/settings'; import { - Title, - Box, - Flex, - Button, - Text, - Paper, - Group, - TextInput, - Select, ActionIcon, + Box, + Button, + Flex, + Group, + Paper, + Select, + Text, + TextInput, + Title, Tooltip, - Transition, - Modal, - Stack, } from '@mantine/core'; -import { Search, X, Clock, Video, Calendar, Play } from 'lucide-react'; +import { Calendar, Clock, Search, Video, X } from 'lucide-react'; import './guide.css'; import useEPGsStore from '../store/epgs'; -import useLocalStorage from '../hooks/useLocalStorage'; import { useElementSize } from '@mantine/hooks'; import { VariableSizeList } from 'react-window'; import { - PROGRAM_HEIGHT, - EXPANDED_PROGRAM_HEIGHT, buildChannelIdMap, - mapProgramsByChannel, + calculateDesiredScrollPosition, + calculateEarliestProgramStart, + calculateEnd, + calculateHourTimeline, + calculateLatestProgramEnd, + calculateLeftScrollPosition, + calculateNowPosition, + calculateScrollPosition, + calculateScrollPositionByTimeClick, + calculateStart, + CHANNEL_WIDTH, computeRowHeights, + createRecording, + createSeriesRule, + evaluateSeriesRule, + EXPANDED_PROGRAM_HEIGHT, + fetchPrograms, + fetchRules, + filterGuideChannels, + formatTime, + getGroupOptions, + getProfileOptions, + getRuleByProgram, + HOUR_WIDTH, + mapChannelsById, + mapProgramsByChannel, + mapRecordingsByProgramId, + matchChannelByTvgId, + MINUTE_BLOCK_WIDTH, + MINUTE_INCREMENT, + PROGRAM_HEIGHT, + sortChannels, } from './guideUtils'; - -/** Layout constants */ -const CHANNEL_WIDTH = 120; // Width of the channel/logo column -const HOUR_WIDTH = 450; // Increased from 300 to 450 to make each program wider -const MINUTE_INCREMENT = 15; // For positioning programs every 15 min -const MINUTE_BLOCK_WIDTH = HOUR_WIDTH / (60 / MINUTE_INCREMENT); - -const GuideRow = React.memo(({ index, style, data }) => { - const { - filteredChannels, - programsByChannelId, - expandedProgramId, - rowHeights, - logos, - hoveredChannelId, - setHoveredChannelId, - renderProgram, - handleLogoClick, - contentWidth, - } = data; - - const channel = filteredChannels[index]; - if (!channel) { - return null; - } - - const channelPrograms = programsByChannelId.get(channel.id) || []; - const rowHeight = - rowHeights[index] ?? - (channelPrograms.some((program) => program.id === expandedProgramId) - ? EXPANDED_PROGRAM_HEIGHT - : PROGRAM_HEIGHT); - - return ( -
- - handleLogoClick(channel, event)} - onMouseEnter={() => setHoveredChannelId(channel.id)} - onMouseLeave={() => setHoveredChannelId(null)} - > - {hoveredChannelId === channel.id && ( - - - - )} - - - - {channel.name} - - - - {channel.channel_number || '-'} - - - - - - {channelPrograms.length > 0 ? ( - channelPrograms.map((program) => - renderProgram(program, undefined, channel) - ) - ) : ( - <> - {Array.from({ length: Math.ceil(24 / 2) }).map( - (_, placeholderIndex) => ( - - No program data - - ) - )} - - )} - - -
- ); -}); +import { + getShowVideoUrl, +} from '../utils/cards/RecordingCardUtils.js'; +import { + add, + convertToMs, + format, + getNow, + initializeTime, + startOfDay, + useDateTimeFormat, +} from '../utils/dateTimeUtils.js'; +import GuideRow from '../components/GuideRow.jsx'; +import HourTimeline from '../components/HourTimeline'; +import ProgramRecordingModal from '../components/forms/ProgramRecordingModal'; +import SeriesRecordingModal from '../components/forms/SeriesRecordingModal'; export default function TVChannelGuide({ startDate, endDate }) { const channels = useChannelsStore((s) => s.channels); @@ -254,8 +93,7 @@ export default function TVChannelGuide({ startDate, endDate }) { const [programs, setPrograms] = useState([]); const [guideChannels, setGuideChannels] = useState([]); - const [filteredChannels, setFilteredChannels] = useState([]); - const [now, setNow] = useState(dayjs()); + const [now, setNow] = useState(getNow()); const [expandedProgramId, setExpandedProgramId] = useState(null); // Track expanded program const [recordingForProgram, setRecordingForProgram] = useState(null); const [recordChoiceOpen, setRecordChoiceOpen] = useState(false); @@ -290,81 +128,29 @@ export default function TVChannelGuide({ startDate, endDate }) { // Load program data once useEffect(() => { - if (!Object.keys(channels).length === 0) { + if (Object.keys(channels).length === 0) { console.warn('No channels provided or empty channels array'); notifications.show({ title: 'No channels available', color: 'red.5' }); return; } - const fetchPrograms = async () => { - console.log('Fetching program grid...'); - const fetched = await API.getGrid(); // GETs your EPG grid - console.log(`Received ${fetched.length} programs`); + const sortedChannels = sortChannels(channels); - // Include ALL channels, sorted by channel number - don't filter by EPG data - const sortedChannels = Object.values(channels).sort( - (a, b) => - (a.channel_number || Infinity) - (b.channel_number || Infinity) - ); - - console.log(`Using all ${sortedChannels.length} available channels`); - - const processedPrograms = fetched.map((program) => { - const start = dayjs(program.start_time); - const end = dayjs(program.end_time); - return { - ...program, - startMs: start.valueOf(), - endMs: end.valueOf(), - }; - }); - - setGuideChannels(sortedChannels); - setFilteredChannels(sortedChannels); // Initialize filtered channels - setPrograms(processedPrograms); - }; - - fetchPrograms(); + setGuideChannels(sortedChannels); + fetchPrograms().then((data) => setPrograms(data)); }, [channels]); // Apply filters when search, group, or profile changes - useEffect(() => { - if (!guideChannels.length) return; + const filteredChannels = useMemo(() => { + if (!guideChannels.length) return []; - let result = [...guideChannels]; - - // Apply search filter - if (searchQuery) { - const query = searchQuery.toLowerCase(); - result = result.filter((channel) => - channel.name.toLowerCase().includes(query) - ); - } - - // Apply channel group filter - if (selectedGroupId !== 'all') { - result = result.filter( - (channel) => channel.channel_group_id === parseInt(selectedGroupId) - ); - } - - // Apply profile filter - if (selectedProfileId !== 'all') { - // Get the profile's enabled channels - const profileChannels = profiles[selectedProfileId]?.channels || []; - // Check if channels is a Set (from the error message, it likely is) - const enabledChannelIds = Array.isArray(profileChannels) - ? profileChannels.filter((pc) => pc.enabled).map((pc) => pc.id) - : profiles[selectedProfileId]?.channels instanceof Set - ? Array.from(profiles[selectedProfileId].channels) - : []; - - result = result.filter((channel) => - enabledChannelIds.includes(channel.id) - ); - } - - setFilteredChannels(result); + return filterGuideChannels( + guideChannels, + searchQuery, + selectedGroupId, + selectedProfileId, + profiles + ); }, [ searchQuery, selectedGroupId, @@ -374,61 +160,44 @@ export default function TVChannelGuide({ startDate, endDate }) { ]); // Use start/end from props or default to "today at midnight" +24h - const defaultStart = dayjs(startDate || dayjs().startOf('day')); - const defaultEnd = endDate ? dayjs(endDate) : defaultStart.add(24, 'hour'); + const defaultStart = initializeTime(startDate || startOfDay(getNow())); + const defaultEnd = endDate + ? initializeTime(endDate) + : add(defaultStart, 24, 'hour'); // Expand timeline if needed based on actual earliest/ latest program - const earliestProgramStart = useMemo(() => { - if (!programs.length) return defaultStart; - return programs.reduce((acc, p) => { - const s = dayjs(p.start_time); - return s.isBefore(acc) ? s : acc; - }, defaultStart); - }, [programs, defaultStart]); + const earliestProgramStart = useMemo( + () => calculateEarliestProgramStart(programs, defaultStart), + [programs, defaultStart] + ); - const latestProgramEnd = useMemo(() => { - if (!programs.length) return defaultEnd; - return programs.reduce((acc, p) => { - const e = dayjs(p.end_time); - return e.isAfter(acc) ? e : acc; - }, defaultEnd); - }, [programs, defaultEnd]); + const latestProgramEnd = useMemo( + () => calculateLatestProgramEnd(programs, defaultEnd), + [programs, defaultEnd] + ); - const start = earliestProgramStart.isBefore(defaultStart) - ? earliestProgramStart - : defaultStart; - const end = latestProgramEnd.isAfter(defaultEnd) - ? latestProgramEnd - : defaultEnd; + const start = calculateStart(earliestProgramStart, defaultStart); + const end = calculateEnd(latestProgramEnd, defaultEnd); const channelIdByTvgId = useMemo( () => buildChannelIdMap(guideChannels, tvgsById, epgs), [guideChannels, tvgsById, epgs] ); - const channelById = useMemo(() => { - const map = new Map(); - guideChannels.forEach((channel) => { - map.set(channel.id, channel); - }); - return map; - }, [guideChannels]); + const channelById = useMemo( + () => mapChannelsById(guideChannels), + [guideChannels] + ); const programsByChannelId = useMemo( () => mapProgramsByChannel(programs, channelIdByTvgId), [programs, channelIdByTvgId] ); - const recordingsByProgramId = useMemo(() => { - const map = new Map(); - (recordings || []).forEach((recording) => { - const programId = recording?.custom_properties?.program?.id; - if (programId != null) { - map.set(programId, recording); - } - }); - return map; - }, [recordings]); + const recordingsByProgramId = useMemo( + () => mapRecordingsByProgramId(recordings), + [recordings] + ); const rowHeights = useMemo( () => @@ -445,62 +214,19 @@ export default function TVChannelGuide({ startDate, endDate }) { [rowHeights] ); - const [timeFormatSetting] = useLocalStorage('time-format', '12h'); - const [dateFormatSetting] = useLocalStorage('date-format', 'mdy'); - // Use user preference for time format - const timeFormat = timeFormatSetting === '12h' ? 'h:mm A' : 'HH:mm'; - const dateFormat = dateFormatSetting === 'mdy' ? 'MMMM D' : 'D MMMM'; + const [timeFormat, dateFormat] = useDateTimeFormat(); // Format day label using relative terms when possible (Today, Tomorrow, etc) const formatDayLabel = useCallback( - (time) => { - const today = dayjs().startOf('day'); - const tomorrow = today.add(1, 'day'); - const weekLater = today.add(7, 'day'); - - const day = time.startOf('day'); - - if (day.isSame(today, 'day')) { - return 'Today'; - } else if (day.isSame(tomorrow, 'day')) { - return 'Tomorrow'; - } else if (day.isBefore(weekLater)) { - // Within a week, show day name - return time.format('dddd'); - } else { - // Beyond a week, show month and day - return time.format(dateFormat); - } - }, + (time) => formatTime(time, dateFormat), [dateFormat] ); // Hourly marks with day labels - const hourTimeline = useMemo(() => { - const hours = []; - let current = start; - let currentDay = null; - - while (current.isBefore(end)) { - // Check if we're entering a new day - const day = current.startOf('day'); - const isNewDay = !currentDay || !day.isSame(currentDay, 'day'); - - if (isNewDay) { - currentDay = day; - } - - // Add day information to our hour object - hours.push({ - time: current, - isNewDay, - dayLabel: formatDayLabel(current), - }); - - current = current.add(1, 'hour'); - } - return hours; - }, [start, end, formatDayLabel]); + const hourTimeline = useMemo( + () => calculateHourTimeline(start, end, formatDayLabel), + [start, end, formatDayLabel] + ); useEffect(() => { const node = guideRef.current; @@ -542,17 +268,16 @@ export default function TVChannelGuide({ startDate, endDate }) { // Update "now" every second useEffect(() => { const interval = setInterval(() => { - setNow(dayjs()); + setNow(getNow()); }, 1000); return () => clearInterval(interval); }, []); // Pixel offset for the "now" vertical line - const nowPosition = useMemo(() => { - if (now.isBefore(start) || now.isAfter(end)) return -1; - const minutesSinceStart = now.diff(start, 'minute'); - return (minutesSinceStart / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; - }, [now, start, end]); + const nowPosition = useMemo( + () => calculateNowPosition(now, start, end), + [now, start, end] + ); useEffect(() => { const tvGuide = tvGuideRef.current; @@ -765,31 +490,14 @@ export default function TVChannelGuide({ startDate, endDate }) { // Scroll to the nearest half-hour mark ONLY on initial load useEffect(() => { if (programs.length > 0 && !initialScrollComplete) { - const roundedNow = - now.minute() < 30 - ? now.startOf('hour') - : now.startOf('hour').add(30, 'minute'); - const nowOffset = roundedNow.diff(start, 'minute'); - const scrollPosition = - (nowOffset / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH - - MINUTE_BLOCK_WIDTH; - - const scrollPos = Math.max(scrollPosition, 0); - syncScrollLeft(scrollPos); + syncScrollLeft(calculateScrollPosition(now, start)); setInitialScrollComplete(true); } }, [programs, start, now, initialScrollComplete, syncScrollLeft]); const findChannelByTvgId = useCallback( - (tvgId) => { - const channelIds = channelIdByTvgId.get(String(tvgId)); - if (!channelIds || channelIds.length === 0) { - return null; - } - // Return the first channel that matches this TVG ID - return channelById.get(channelIds[0]) || null; - }, + (tvgId) => matchChannelByTvgId(channelIdByTvgId, channelById, tvgId), [channelById, channelIdByTvgId] ); @@ -798,19 +506,14 @@ export default function TVChannelGuide({ startDate, endDate }) { setRecordChoiceProgram(program); setRecordChoiceOpen(true); try { - const rules = await API.listSeriesRules(); - const rule = (rules || []).find( - (r) => - String(r.tvg_id) === String(program.tvg_id) && - (!r.title || r.title === program.title) - ); + const rules = await fetchRules(); + const rule = getRuleByProgram(rules, program); setExistingRuleMode(rule ? rule.mode : null); } catch (error) { console.warn('Failed to fetch series rules metadata', error); } - const existingRecording = recordingsByProgramId.get(program.id) || null; - setRecordingForProgram(existingRecording); + setRecordingForProgram(recordingsByProgramId.get(program.id) || null); }, [recordingsByProgramId] ); @@ -827,24 +530,15 @@ export default function TVChannelGuide({ startDate, endDate }) { return; } - await API.createRecording({ - channel: `${channel.id}`, - start_time: program.start_time, - end_time: program.end_time, - custom_properties: { program }, - }); + await createRecording(channel, program); notifications.show({ title: 'Recording scheduled' }); }, [findChannelByTvgId] ); const saveSeriesRule = useCallback(async (program, mode) => { - await API.createSeriesRule({ - tvg_id: program.tvg_id, - mode, - title: program.title, - }); - await API.evaluateSeriesRules(program.tvg_id); + await createSeriesRule(program, mode); + await evaluateSeriesRule(program); try { await useChannelsStore.getState().fetchRecordings(); } catch (error) { @@ -861,7 +555,7 @@ export default function TVChannelGuide({ startDate, endDate }) { const openRules = useCallback(async () => { setRulesOpen(true); try { - const r = await API.listSeriesRules(); + const r = await fetchRules(); setRules(r); } catch (error) { console.warn('Failed to load series rules', error); @@ -878,12 +572,7 @@ export default function TVChannelGuide({ startDate, endDate }) { return; } - let vidUrl = `/proxy/ts/stream/${matched.uuid}`; - if (env_mode === 'dev') { - vidUrl = `${window.location.protocol}//${window.location.hostname}:5656${vidUrl}`; - } - - showVideo(vidUrl); + showVideo(getShowVideoUrl(matched, env_mode)); }, [env_mode, findChannelByTvgId, showVideo] ); @@ -892,12 +581,7 @@ export default function TVChannelGuide({ startDate, endDate }) { (channel, event) => { event.stopPropagation(); - let vidUrl = `/proxy/ts/stream/${channel.uuid}`; - if (env_mode === 'dev') { - vidUrl = `${window.location.protocol}//${window.location.hostname}:5656${vidUrl}`; - } - - showVideo(vidUrl); + showVideo(getShowVideoUrl(channel, env_mode)); }, [env_mode, showVideo] ); @@ -906,13 +590,6 @@ export default function TVChannelGuide({ startDate, endDate }) { (program, event) => { event.stopPropagation(); - const programStartMs = - program.startMs ?? dayjs(program.start_time).valueOf(); - const startOffsetMinutes = (programStartMs - start.valueOf()) / 60000; - const leftPx = - (startOffsetMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; - const desiredScrollPosition = Math.max(0, leftPx - 20); - if (expandedProgramId === program.id) { setExpandedProgramId(null); setRecordingForProgram(null); @@ -921,6 +598,9 @@ export default function TVChannelGuide({ startDate, endDate }) { setRecordingForProgram(recordingsByProgramId.get(program.id) || null); } + const leftPx = calculateLeftScrollPosition(program, start); + const desiredScrollPosition = calculateDesiredScrollPosition(leftPx); + const guideNode = guideRef.current; if (guideNode) { const currentScrollPosition = guideNode.scrollLeft; @@ -948,16 +628,7 @@ export default function TVChannelGuide({ startDate, endDate }) { return; } - const roundedNow = - now.minute() < 30 - ? now.startOf('hour') - : now.startOf('hour').add(30, 'minute'); - const nowOffset = roundedNow.diff(start, 'minute'); - const scrollPosition = - (nowOffset / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH - MINUTE_BLOCK_WIDTH; - - const scrollPos = Math.max(scrollPosition, 0); - syncScrollLeft(scrollPos, 'smooth'); + syncScrollLeft(calculateScrollPosition(now, start), 'smooth'); }, [now, nowPosition, start, syncScrollLeft]); const handleTimelineScroll = useCallback(() => { @@ -1000,44 +671,26 @@ export default function TVChannelGuide({ startDate, endDate }) { const handleTimeClick = useCallback( (clickedTime, event) => { - const rect = event.currentTarget.getBoundingClientRect(); - const clickPositionX = event.clientX - rect.left; - const percentageAcross = clickPositionX / rect.width; - const minuteWithinHour = Math.floor(percentageAcross * 60); - - let snappedMinute; - if (minuteWithinHour < 7.5) { - snappedMinute = 0; - } else if (minuteWithinHour < 22.5) { - snappedMinute = 15; - } else if (minuteWithinHour < 37.5) { - snappedMinute = 30; - } else if (minuteWithinHour < 52.5) { - snappedMinute = 45; - } else { - snappedMinute = 0; - clickedTime = clickedTime.add(1, 'hour'); - } - - const snappedTime = clickedTime.minute(snappedMinute); - const snappedOffset = snappedTime.diff(start, 'minute'); - const scrollPosition = - (snappedOffset / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; - - syncScrollLeft(scrollPosition, 'smooth'); + syncScrollLeft( + calculateScrollPositionByTimeClick(event, clickedTime, start), + 'smooth' + ); }, [start, syncScrollLeft] ); const renderProgram = useCallback( (program, channelStart = start, channel = null) => { - const programStartMs = - program.startMs ?? dayjs(program.start_time).valueOf(); - const programEndMs = program.endMs ?? dayjs(program.end_time).valueOf(); - const programStart = dayjs(programStartMs); - const programEnd = dayjs(programEndMs); + const { + programStart, + programEnd, + startMs: programStartMs, + endMs: programEndMs, + isLive, + isPast, + } = program; const startOffsetMinutes = - (programStartMs - channelStart.valueOf()) / 60000; + (programStartMs - convertToMs(channelStart)) / 60000; const durationMinutes = (programEndMs - programStartMs) / 60000; const leftPx = (startOffsetMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; @@ -1048,10 +701,7 @@ export default function TVChannelGuide({ startDate, endDate }) { const recording = recordingsByProgramId.get(program.id); - const isLive = now.isAfter(programStart) && now.isBefore(programEnd); - const isPast = now.isAfter(programEnd); const isExpanded = expandedProgramId === program.id; - const rowHeight = isExpanded ? EXPANDED_PROGRAM_HEIGHT : PROGRAM_HEIGHT; const MIN_EXPANDED_WIDTH = 450; const expandedWidthPx = Math.max(widthPx, MIN_EXPANDED_WIDTH); @@ -1069,6 +719,38 @@ export default function TVChannelGuide({ startDate, endDate }) { textOffsetLeft = Math.min(visibleStart, maxOffset); } + const RecordButton = () => { + return ( + + ); + }; + const WatchNow = () => { + return ( + + ); + }; return ( - {programStart.format(timeFormat)} -{' '} - {programEnd.format(timeFormat)} + {format(programStart, timeFormat)} -{' '} + {format(programEnd, timeFormat)} @@ -1183,35 +865,9 @@ export default function TVChannelGuide({ startDate, endDate }) { {isExpanded && ( - {!isPast && ( - - )} + {!isPast && } - {isLive && ( - - )} + {isLive && } )} @@ -1294,49 +950,13 @@ export default function TVChannelGuide({ startDate, endDate }) { }, [searchQuery, selectedGroupId, selectedProfileId]); // Create group options for dropdown - but only include groups used by guide channels - const groupOptions = useMemo(() => { - const options = [{ value: 'all', label: 'All Channel Groups' }]; - - if (channelGroups && guideChannels.length > 0) { - // Get unique channel group IDs from the channels that have program data - const usedGroupIds = new Set(); - guideChannels.forEach((channel) => { - if (channel.channel_group_id) { - usedGroupIds.add(channel.channel_group_id); - } - }); - // Only add groups that are actually used by channels in the guide - Object.values(channelGroups) - .filter((group) => usedGroupIds.has(group.id)) - .sort((a, b) => a.name.localeCompare(b.name)) // Sort alphabetically - .forEach((group) => { - options.push({ - value: group.id.toString(), - label: group.name, - }); - }); - } - return options; - }, [channelGroups, guideChannels]); + const groupOptions = useMemo( + () => getGroupOptions(channelGroups, guideChannels), + [channelGroups, guideChannels] + ); // Create profile options for dropdown - const profileOptions = useMemo(() => { - const options = [{ value: 'all', label: 'All Profiles' }]; - - if (profiles) { - Object.values(profiles).forEach((profile) => { - if (profile.id !== '0') { - // Skip the 'All' default profile - options.push({ - value: profile.id.toString(), - label: profile.name, - }); - } - }); - } - - return options; - }, [profiles]); + const profileOptions = useMemo(() => getProfileOptions(profiles), [profiles]); // Clear all filters const clearFilters = () => { @@ -1355,6 +975,13 @@ export default function TVChannelGuide({ startDate, endDate }) { setSelectedProfileId(value || 'all'); }; + const handleClearSearchQuery = () => { + setSearchQuery(''); + }; + const handleChangeSearchQuery = (e) => { + setSearchQuery(e.target.value); + }; + return ( @@ -1373,10 +1000,10 @@ export default function TVChannelGuide({ startDate, endDate }) { direction="column" style={{ zIndex: 1000, + position: 'sticky' }} - c={'#fff'} + c='#ffffff' p={'12px 20px'} - pos={'sticky'} top={0} > {/* Title and current time */} @@ -1386,7 +1013,7 @@ export default function TVChannelGuide({ startDate, endDate }) { - {now.format(`dddd, ${dateFormat}, YYYY • ${timeFormat}`)} + {format(now, `dddd, ${dateFormat}, YYYY • ${timeFormat}`)} setSearchQuery(e.target.value)} + onChange={handleChangeSearchQuery} w={'250px'} // Reduced width from flex: 1 leftSection={} rightSection={ searchQuery ? ( setSearchQuery('')} + onClick={handleClearSearchQuery} variant="subtle" color="gray" size="sm" @@ -1458,12 +1085,12 @@ export default function TVChannelGuide({ startDate, endDate }) { backgroundColor: '#245043', }} bd={'1px solid #3BA882'} - c={'#FFFFFF'} + color='#FFFFFF' > Series Rules - + {filteredChannels.length}{' '} {filteredChannels.length === 1 ? 'channel' : 'channels'} @@ -1482,9 +1109,9 @@ export default function TVChannelGuide({ startDate, endDate }) { {/* Logo header cell - sticky in both directions */} @@ -1499,7 +1126,7 @@ export default function TVChannelGuide({ startDate, endDate }) { w={CHANNEL_WIDTH} miw={CHANNEL_WIDTH} h={'40px'} - pos={'sticky'} + pos='sticky' left={0} /> @@ -1509,7 +1136,7 @@ export default function TVChannelGuide({ startDate, endDate }) { flex: 1, overflow: 'hidden', }} - pos={'relative'} + pos='relative' > @@ -1529,99 +1156,12 @@ export default function TVChannelGuide({ startDate, endDate }) { display={'flex'} w={hourTimeline.length * HOUR_WIDTH} > - {' '} - {hourTimeline.map((hourData) => { - const { time, isNewDay } = hourData; - - return ( - handleTimeClick(time, e)} - > - {/* Remove the special day label for new days since we'll show day for all hours */} - - {/* Position time label at the left border of each hour block */} - - {/* Show day above time for every hour using the same format */} - - {formatDayLabel(time)}{' '} - {/* Use same formatDayLabel function for all hours */} - - {time.format(timeFormat)} - - {/*time.format('A')*/} - - - - {/* Hour boundary marker - more visible */} - - - {/* Quarter hour tick marks */} - - {[15, 30, 45].map((minute) => ( - - ))} - - - ); - })} + @@ -1634,7 +1174,7 @@ export default function TVChannelGuide({ startDate, endDate }) { flex: 1, overflow: 'hidden', }} - pos={'relative'} + pos='relative' > {nowPosition >= 0 && ( ) : ( - + No channels match your filters - - - {recordingForProgram && ( - <> - - - - )} - {existingRuleMode && ( - - )} - - + program={recordChoiceProgram} + recording={recordingForProgram} + existingRuleMode={existingRuleMode} + onRecordOne={() => recordOne(recordChoiceProgram)} + onRecordSeriesAll={() => saveSeriesRule(recordChoiceProgram, 'all')} + onRecordSeriesNew={() => saveSeriesRule(recordChoiceProgram, 'new')} + onExistingRuleModeChange={setExistingRuleMode} + /> )} {/* Series rules modal */} {rulesOpen && ( - setRulesOpen(false)} - title="Series Recording Rules" - centered - radius="md" - zIndex={9999} - overlayProps={{ color: '#000', backgroundOpacity: 0.55, blur: 0 }} - styles={{ - content: { backgroundColor: '#18181B', color: 'white' }, - header: { backgroundColor: '#18181B', color: 'white' }, - title: { color: 'white' }, - }} - > - - {(!rules || rules.length === 0) && ( - - No series rules configured - - )} - {rules && - rules.map((r) => ( - - - {r.title || r.tvg_id} —{' '} - {r.mode === 'new' ? 'New episodes' : 'Every episode'} - - - - - - - ))} - - + rules={rules} + onRulesUpdate={setRules} + /> )} ); diff --git a/frontend/src/pages/guideUtils.js b/frontend/src/pages/guideUtils.js index 1f4ff671..68bb74b2 100644 --- a/frontend/src/pages/guideUtils.js +++ b/frontend/src/pages/guideUtils.js @@ -1,7 +1,26 @@ -import dayjs from 'dayjs'; +import { + convertToMs, + initializeTime, + startOfDay, + isBefore, + isAfter, + isSame, + add, + diff, + format, + getNow, + getNowMs, + roundToNearest +} from '../utils/dateTimeUtils.js'; +import API from '../api.js'; export const PROGRAM_HEIGHT = 90; export const EXPANDED_PROGRAM_HEIGHT = 180; +/** Layout constants */ +export const CHANNEL_WIDTH = 120; // Width of the channel/logo column +export const HOUR_WIDTH = 450; // Increased from 300 to 450 to make each program wider +export const MINUTE_INCREMENT = 15; // For positioning programs every 15 min +export const MINUTE_BLOCK_WIDTH = HOUR_WIDTH / (60 / MINUTE_INCREMENT); export function buildChannelIdMap(channels, tvgsById, epgs = {}) { const map = new Map(); @@ -38,25 +57,32 @@ export function buildChannelIdMap(channels, tvgsById, epgs = {}) { return map; } -export function mapProgramsByChannel(programs, channelIdByTvgId) { +export const mapProgramsByChannel = (programs, channelIdByTvgId) => { if (!programs?.length || !channelIdByTvgId?.size) { return new Map(); } const map = new Map(); + const nowMs = getNowMs(); + programs.forEach((program) => { const channelIds = channelIdByTvgId.get(String(program.tvg_id)); if (!channelIds || channelIds.length === 0) { return; } - const startMs = program.startMs ?? dayjs(program.start_time).valueOf(); - const endMs = program.endMs ?? dayjs(program.end_time).valueOf(); + const startMs = program.startMs ?? convertToMs(program.start_time); + const endMs = program.endMs ?? convertToMs(program.end_time); const programData = { ...program, startMs, endMs, + programStart: initializeTime(program.startMs), + programEnd: initializeTime(program.endMs), + // Precompute live/past status + isLive: nowMs >= program.startMs && nowMs < program.endMs, + isPast: nowMs >= program.endMs, }; // Add this program to all channels that share the same TVG ID @@ -73,7 +99,7 @@ export function mapProgramsByChannel(programs, channelIdByTvgId) { }); return map; -} +}; export function computeRowHeights( filteredChannels, @@ -94,3 +120,282 @@ export function computeRowHeights( return expanded ? expandedHeight : defaultHeight; }); } + +export const fetchPrograms = async () => { + console.log('Fetching program grid...'); + const fetched = await API.getGrid(); // GETs your EPG grid + console.log(`Received ${fetched.length} programs`); + + return fetched.map((program) => { + return { + ...program, + startMs: convertToMs(program.start_time), + endMs: convertToMs(program.end_time), + }; + }); +}; + +export const sortChannels = (channels) => { + // Include ALL channels, sorted by channel number - don't filter by EPG data + const sortedChannels = Object.values(channels).sort( + (a, b) => + (a.channel_number || Infinity) - (b.channel_number || Infinity) + ); + + console.log(`Using all ${sortedChannels.length} available channels`); + return sortedChannels; +} + +export const filterGuideChannels = (guideChannels, searchQuery, selectedGroupId, selectedProfileId, profiles) => { + return guideChannels.filter((channel) => { + // Search filter + if (searchQuery) { + if (!channel.name.toLowerCase().includes(searchQuery.toLowerCase())) return false; + } + + // Channel group filter + if (selectedGroupId !== 'all') { + if (channel.channel_group_id !== parseInt(selectedGroupId)) return false; + } + + // Profile filter + if (selectedProfileId !== 'all') { + const profileChannels = profiles[selectedProfileId]?.channels || []; + const enabledChannelIds = Array.isArray(profileChannels) + ? profileChannels.filter((pc) => pc.enabled).map((pc) => pc.id) + : profiles[selectedProfileId]?.channels instanceof Set + ? Array.from(profiles[selectedProfileId].channels) + : []; + + if (!enabledChannelIds.includes(channel.id)) return false; + } + + return true; + }); +} + +export const calculateEarliestProgramStart = (programs, defaultStart) => { + if (!programs.length) return defaultStart; + return programs.reduce((acc, p) => { + const s = initializeTime(p.start_time); + return isBefore(s, acc) ? s : acc; + }, defaultStart); +} + +export const calculateLatestProgramEnd = (programs, defaultEnd) => { + if (!programs.length) return defaultEnd; + return programs.reduce((acc, p) => { + const e = initializeTime(p.end_time); + return isAfter(e, acc) ? e : acc; + }, defaultEnd); +} + +export const calculateStart = (earliestProgramStart, defaultStart) => { + return isBefore(earliestProgramStart, defaultStart) + ? earliestProgramStart + : defaultStart; +} + +export const calculateEnd = (latestProgramEnd, defaultEnd) => { + return isAfter(latestProgramEnd, defaultEnd) ? latestProgramEnd : defaultEnd; +} + +export const mapChannelsById = (guideChannels) => { + const map = new Map(); + guideChannels.forEach((channel) => { + map.set(channel.id, channel); + }); + return map; +} + +export const mapRecordingsByProgramId = (recordings) => { + const map = new Map(); + (recordings || []).forEach((recording) => { + const programId = recording?.custom_properties?.program?.id; + if (programId != null) { + map.set(programId, recording); + } + }); + return map; +} + +export const formatTime = (time, dateFormat) => { + const today = startOfDay(getNow()); + const tomorrow = add(today, 1, 'day'); + const weekLater = add(today, 7, 'day'); + const day = startOfDay(time); + + if (isSame(day, today, 'day')) { + return 'Today'; + } else if (isSame(day, tomorrow, 'day')) { + return 'Tomorrow'; + } else if (isBefore(day, weekLater)) { + // Within a week, show day name + return format(time, 'dddd'); + } else { + // Beyond a week, show month and day + return format(time, dateFormat); + } +} + +export const calculateHourTimeline = (start, end, formatDayLabel) => { + const hours = []; + let current = start; + let currentDay = null; + + while (isBefore(current, end)) { + // Check if we're entering a new day + const day = startOfDay(current); + const isNewDay = !currentDay || !isSame(day, currentDay, 'day'); + + if (isNewDay) { + currentDay = day; + } + + // Add day information to our hour object + hours.push({ + time: current, + isNewDay, + dayLabel: formatDayLabel(current), + }); + + current = add(current, 1, 'hour'); + } + return hours; +} + +export const calculateNowPosition = (now, start, end) => { + if (isBefore(now, start) || isAfter(now, end)) return -1; + const minutesSinceStart = diff(now, start, 'minute'); + return (minutesSinceStart / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; +}; + +export const calculateScrollPosition = (now, start) => { + const roundedNow = roundToNearest(now, 30); + const nowOffset = diff(roundedNow, start, 'minute'); + const scrollPosition = + (nowOffset / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH - MINUTE_BLOCK_WIDTH; + + return Math.max(scrollPosition, 0); +}; + +export const matchChannelByTvgId = (channelIdByTvgId, channelById, tvgId) => { + const channelIds = channelIdByTvgId.get(String(tvgId)); + if (!channelIds || channelIds.length === 0) { + return null; + } + // Return the first channel that matches this TVG ID + return channelById.get(channelIds[0]) || null; +} + +export const fetchRules = async () => { + return await API.listSeriesRules(); +} + +export const getRuleByProgram = (rules, program) => { + return (rules || []).find( + (r) => + String(r.tvg_id) === String(program.tvg_id) && + (!r.title || r.title === program.title) + ); +} + +export const createRecording = async (channel, program) => { + await API.createRecording({ + channel: `${channel.id}`, + start_time: program.start_time, + end_time: program.end_time, + custom_properties: { program }, + }); +} + +export const createSeriesRule = async (program, mode) => { + await API.createSeriesRule({ + tvg_id: program.tvg_id, + mode, + title: program.title, + }); +} + +export const evaluateSeriesRule = async (program) => { + await API.evaluateSeriesRules(program.tvg_id); +} + +export const calculateLeftScrollPosition = (program, start) => { + const programStartMs = + program.startMs ?? convertToMs(program.start_time); + const startOffsetMinutes = (programStartMs - convertToMs(start)) / 60000; + + return (startOffsetMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; +}; + +export const calculateDesiredScrollPosition = (leftPx) => { + return Math.max(0, leftPx - 20); +} + +export const calculateScrollPositionByTimeClick = (event, clickedTime, start) => { + const rect = event.currentTarget.getBoundingClientRect(); + const clickPositionX = event.clientX - rect.left; + const percentageAcross = clickPositionX / rect.width; + const minuteWithinHour = percentageAcross * 60; + + const snappedMinute = Math.round(minuteWithinHour / 15) * 15; + + const adjustedTime = (snappedMinute === 60) + ? add(clickedTime, 1, 'hour').minute(0) + : clickedTime.minute(snappedMinute); + + const snappedOffset = diff(adjustedTime, start, 'minute'); + return (snappedOffset / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH; +}; + +export const getGroupOptions = (channelGroups, guideChannels) => { + const options = [{ value: 'all', label: 'All Channel Groups' }]; + + if (channelGroups && guideChannels.length > 0) { + // Get unique channel group IDs from the channels that have program data + const usedGroupIds = new Set(); + guideChannels.forEach((channel) => { + if (channel.channel_group_id) { + usedGroupIds.add(channel.channel_group_id); + } + }); + // Only add groups that are actually used by channels in the guide + Object.values(channelGroups) + .filter((group) => usedGroupIds.has(group.id)) + .sort((a, b) => a.name.localeCompare(b.name)) // Sort alphabetically + .forEach((group) => { + options.push({ + value: group.id.toString(), + label: group.name, + }); + }); + } + return options; +} + +export const getProfileOptions = (profiles) => { + const options = [{ value: 'all', label: 'All Profiles' }]; + + if (profiles) { + Object.values(profiles).forEach((profile) => { + if (profile.id !== '0') { + // Skip the 'All' default profile + options.push({ + value: profile.id.toString(), + label: profile.name, + }); + } + }); + } + + return options; +} + +export const deleteSeriesRuleByTvgId = async (tvg_id) => { + await API.deleteSeriesRule(tvg_id); +} + +export const evaluateSeriesRulesByTvgId = async (tvg_id) => { + await API.evaluateSeriesRules(tvg_id); +} \ No newline at end of file diff --git a/frontend/src/utils/dateTimeUtils.js b/frontend/src/utils/dateTimeUtils.js index b7490f88..d2d2ea63 100644 --- a/frontend/src/utils/dateTimeUtils.js +++ b/frontend/src/utils/dateTimeUtils.js @@ -12,6 +12,38 @@ dayjs.extend(relativeTime); dayjs.extend(utc); dayjs.extend(timezone); +export const convertToMs = (dateTime) => dayjs(dateTime).valueOf(); + +export const initializeTime = (dateTime) => dayjs(dateTime); + +export const startOfDay = (dateTime) => dayjs(dateTime).startOf('day'); + +export const isBefore = (date1, date2) => dayjs(date1).isBefore(date2); + +export const isAfter = (date1, date2) => dayjs(date1).isAfter(date2); + +export const isSame = (date1, date2, unit = 'day') => dayjs(date1).isSame(date2, unit); + +export const add = (dateTime, value, unit) => dayjs(dateTime).add(value, unit); + +export const diff = (date1, date2, unit = 'millisecond') => dayjs(date1).diff(date2, unit); + +export const format = (dateTime, formatStr) => dayjs(dateTime).format(formatStr); + +export const getNow = () => dayjs(); + +export const getNowMs = () => Date.now(); + +export const roundToNearest = (dateTime, minutes) => { + const current = initializeTime(dateTime); + const minute = current.minute(); + const snappedMinute = Math.round(minute / minutes) * minutes; + + return snappedMinute === 60 + ? current.add(1, 'hour').minute(0) + : current.minute(snappedMinute); +}; + export const useUserTimeZone = () => { const settings = useSettingsStore((s) => s.settings); const [timeZone, setTimeZone] = useLocalStorage( @@ -38,15 +70,15 @@ export const useTimeHelpers = () => { (value) => { if (!value) return dayjs.invalid(); try { - return dayjs(value).tz(timeZone); + return initializeTime(value).tz(timeZone); } catch (error) { - return dayjs(value); + return initializeTime(value); } }, [timeZone] ); - const userNow = useCallback(() => dayjs().tz(timeZone), [timeZone]); + const userNow = useCallback(() => getNow().tz(timeZone), [timeZone]); return { timeZone, toUserTime, userNow }; }; @@ -78,7 +110,7 @@ export const toTimeString = (value) => { if (parsed.isValid()) return parsed.format('HH:mm'); return value; } - const parsed = dayjs(value); + const parsed = initializeTime(value); return parsed.isValid() ? parsed.format('HH:mm') : '00:00'; }; From ca96adf7818f0c84a11e3743e563d837fcee42aa Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Wed, 24 Dec 2025 22:41:51 -0800 Subject: [PATCH 148/220] Extracted notification util --- frontend/src/components/forms/SeriesRecordingModal.jsx | 4 ++-- frontend/src/pages/Guide.jsx | 10 +++++----- frontend/src/utils/notificationUtils.js | 5 +++++ 3 files changed, 12 insertions(+), 7 deletions(-) create mode 100644 frontend/src/utils/notificationUtils.js diff --git a/frontend/src/components/forms/SeriesRecordingModal.jsx b/frontend/src/components/forms/SeriesRecordingModal.jsx index 1c10e4bd..3d890971 100644 --- a/frontend/src/components/forms/SeriesRecordingModal.jsx +++ b/frontend/src/components/forms/SeriesRecordingModal.jsx @@ -1,9 +1,9 @@ import React from 'react'; import { Modal, Stack, Text, Flex, Group, Button } from '@mantine/core'; -import { notifications } from '@mantine/notifications'; import useChannelsStore from '../../store/channels.jsx'; import { deleteSeriesAndRule } from '../../utils/cards/RecordingCardUtils.js'; import { evaluateSeriesRulesByTvgId, fetchRules } from '../../pages/guideUtils.js'; +import { showNotification } from '../../utils/notificationUtils.js'; export default function SeriesRecordingModal({ opened, @@ -18,7 +18,7 @@ export default function SeriesRecordingModal({ } catch (error) { console.warn('Failed to refresh recordings after evaluation', error); } - notifications.show({ + showNotification({ title: 'Evaluated', message: 'Checked for episodes', }); diff --git a/frontend/src/pages/Guide.jsx b/frontend/src/pages/Guide.jsx index a382fffe..214fc216 100644 --- a/frontend/src/pages/Guide.jsx +++ b/frontend/src/pages/Guide.jsx @@ -9,7 +9,6 @@ import React, { import useChannelsStore from '../store/channels'; import useLogosStore from '../store/logos'; import useVideoStore from '../store/useVideoStore'; // NEW import -import { notifications } from '@mantine/notifications'; import useSettingsStore from '../store/settings'; import { ActionIcon, @@ -80,6 +79,7 @@ import GuideRow from '../components/GuideRow.jsx'; import HourTimeline from '../components/HourTimeline'; import ProgramRecordingModal from '../components/forms/ProgramRecordingModal'; import SeriesRecordingModal from '../components/forms/SeriesRecordingModal'; +import { showNotification } from '../utils/notificationUtils.js'; export default function TVChannelGuide({ startDate, endDate }) { const channels = useChannelsStore((s) => s.channels); @@ -130,7 +130,7 @@ export default function TVChannelGuide({ startDate, endDate }) { useEffect(() => { if (Object.keys(channels).length === 0) { console.warn('No channels provided or empty channels array'); - notifications.show({ title: 'No channels available', color: 'red.5' }); + showNotification({ title: 'No channels available', color: 'red.5' }); return; } @@ -522,7 +522,7 @@ export default function TVChannelGuide({ startDate, endDate }) { async (program) => { const channel = findChannelByTvgId(program.tvg_id); if (!channel) { - notifications.show({ + showNotification({ title: 'Unable to schedule recording', message: 'No channel found for this program.', color: 'red.6', @@ -531,7 +531,7 @@ export default function TVChannelGuide({ startDate, endDate }) { } await createRecording(channel, program); - notifications.show({ title: 'Recording scheduled' }); + showNotification({ title: 'Recording scheduled' }); }, [findChannelByTvgId] ); @@ -547,7 +547,7 @@ export default function TVChannelGuide({ startDate, endDate }) { error ); } - notifications.show({ + showNotification({ title: mode === 'new' ? 'Record new episodes' : 'Record all episodes', }); }, []); diff --git a/frontend/src/utils/notificationUtils.js b/frontend/src/utils/notificationUtils.js new file mode 100644 index 00000000..baf91b54 --- /dev/null +++ b/frontend/src/utils/notificationUtils.js @@ -0,0 +1,5 @@ +import { notifications } from '@mantine/notifications'; + +export function showNotification(notificationObject) { + notifications.show(notificationObject); +} \ No newline at end of file From a5688605cd998cc5ab60588a25831a8fa263bf8b Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Wed, 24 Dec 2025 23:13:07 -0800 Subject: [PATCH 149/220] Lazy-loading button modals --- frontend/src/pages/Guide.jsx | 51 ++++++++++++++++++++++-------------- 1 file changed, 32 insertions(+), 19 deletions(-) diff --git a/frontend/src/pages/Guide.jsx b/frontend/src/pages/Guide.jsx index 214fc216..2ae80012 100644 --- a/frontend/src/pages/Guide.jsx +++ b/frontend/src/pages/Guide.jsx @@ -5,6 +5,7 @@ import React, { useEffect, useRef, useCallback, + Suspense, } from 'react'; import useChannelsStore from '../store/channels'; import useLogosStore from '../store/logos'; @@ -16,6 +17,7 @@ import { Button, Flex, Group, + LoadingOverlay, Paper, Select, Text, @@ -77,9 +79,12 @@ import { } from '../utils/dateTimeUtils.js'; import GuideRow from '../components/GuideRow.jsx'; import HourTimeline from '../components/HourTimeline'; -import ProgramRecordingModal from '../components/forms/ProgramRecordingModal'; -import SeriesRecordingModal from '../components/forms/SeriesRecordingModal'; +const ProgramRecordingModal = React.lazy(() => + import('../components/forms/ProgramRecordingModal')); +const SeriesRecordingModal = React.lazy(() => + import('../components/forms/SeriesRecordingModal')); import { showNotification } from '../utils/notificationUtils.js'; +import ErrorBoundary from '../components/ErrorBoundary.jsx'; export default function TVChannelGuide({ startDate, endDate }) { const channels = useChannelsStore((s) => s.channels); @@ -1219,27 +1224,35 @@ export default function TVChannelGuide({ startDate, endDate }) { {/* Record choice modal */} {recordChoiceOpen && recordChoiceProgram && ( - setRecordChoiceOpen(false)} - program={recordChoiceProgram} - recording={recordingForProgram} - existingRuleMode={existingRuleMode} - onRecordOne={() => recordOne(recordChoiceProgram)} - onRecordSeriesAll={() => saveSeriesRule(recordChoiceProgram, 'all')} - onRecordSeriesNew={() => saveSeriesRule(recordChoiceProgram, 'new')} - onExistingRuleModeChange={setExistingRuleMode} - /> + + }> + setRecordChoiceOpen(false)} + program={recordChoiceProgram} + recording={recordingForProgram} + existingRuleMode={existingRuleMode} + onRecordOne={() => recordOne(recordChoiceProgram)} + onRecordSeriesAll={() => saveSeriesRule(recordChoiceProgram, 'all')} + onRecordSeriesNew={() => saveSeriesRule(recordChoiceProgram, 'new')} + onExistingRuleModeChange={setExistingRuleMode} + /> + + )} {/* Series rules modal */} {rulesOpen && ( - setRulesOpen(false)} - rules={rules} - onRulesUpdate={setRules} - /> + + }> + setRulesOpen(false)} + rules={rules} + onRulesUpdate={setRules} + /> + + )} ); From f97399de07761b46c55d1a070341cdfbc13adc7f Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Sat, 27 Dec 2025 22:35:43 -0800 Subject: [PATCH 150/220] Extracted component and util logic --- frontend/src/components/Field.jsx | 47 ++ frontend/src/components/cards/PluginCard.jsx | 258 +++++++ frontend/src/pages/Logos.jsx | 4 +- frontend/src/pages/Plugins.jsx | 770 ++++++------------- frontend/src/utils/cards/PluginCardUtils.js | 24 + frontend/src/utils/notificationUtils.js | 6 +- frontend/src/utils/pages/PluginsUtils.js | 17 + 7 files changed, 603 insertions(+), 523 deletions(-) create mode 100644 frontend/src/components/Field.jsx create mode 100644 frontend/src/components/cards/PluginCard.jsx create mode 100644 frontend/src/utils/cards/PluginCardUtils.js create mode 100644 frontend/src/utils/pages/PluginsUtils.js diff --git a/frontend/src/components/Field.jsx b/frontend/src/components/Field.jsx new file mode 100644 index 00000000..1293bf7b --- /dev/null +++ b/frontend/src/components/Field.jsx @@ -0,0 +1,47 @@ +import { NumberInput, Select, Switch, TextInput } from '@mantine/core'; +import React from 'react'; + +export const Field = ({ field, value, onChange }) => { + const common = { label: field.label, description: field.help_text }; + const effective = value ?? field.default; + switch (field.type) { + case 'boolean': + return ( + onChange(field.id, e.currentTarget.checked)} + label={field.label} + description={field.help_text} + /> + ); + case 'number': + return ( + onChange(field.id, v)} + {...common} + /> + ); + case 'select': + return ( + ({ - value: o.value + '', - label: o.label, - }))} - onChange={(v) => onChange(field.id, v)} - {...common} - /> - ); - case 'string': - default: - return ( - onChange(field.id, e.currentTarget.value)} - {...common} - /> - ); - } -}; +const PluginsList = ({ onRequestDelete, onRequireTrust, onRequestConfirm }) => { + const plugins = usePluginStore((state) => state.plugins); + const loading = usePluginStore((state) => state.loading); + const hasFetchedRef = useRef(false); -const PluginCard = ({ - plugin, - onSaveSettings, - onRunAction, - onToggleEnabled, - onRequireTrust, - onRequestDelete, -}) => { - const [settings, setSettings] = useState(plugin.settings || {}); - const [saving, setSaving] = useState(false); - const [running, setRunning] = useState(false); - const [enabled, setEnabled] = useState(!!plugin.enabled); - const [lastResult, setLastResult] = useState(null); - const [confirmOpen, setConfirmOpen] = useState(false); - const [confirmConfig, setConfirmConfig] = useState({ - title: '', - message: '', - onConfirm: null, - }); + useEffect(() => { + if (!hasFetchedRef.current) { + hasFetchedRef.current = true; + usePluginStore.getState().fetchPlugins(); + } + }, []); - // Keep local enabled state in sync with props (e.g., after import + enable) - React.useEffect(() => { - setEnabled(!!plugin.enabled); - }, [plugin.enabled]); - // Sync settings if plugin changes identity - React.useEffect(() => { - setSettings(plugin.settings || {}); - }, [plugin.key]); + const handleTogglePluginEnabled = async (key, next) => { + const resp = await setPluginEnabled(key, next); - const updateField = (id, val) => { - setSettings((prev) => ({ ...prev, [id]: val })); - }; - - const save = async () => { - setSaving(true); - try { - await onSaveSettings(plugin.key, settings); - notifications.show({ - title: 'Saved', - message: `${plugin.name} settings updated`, - color: 'green', + if (resp?.success) { + usePluginStore.getState().updatePlugin(key, { + enabled: next, + ever_enabled: resp?.ever_enabled, }); - } finally { - setSaving(false); } }; - const missing = plugin.missing; + if (loading && plugins.length === 0) { + return ; + } + return ( - - -
- {plugin.name} - - {plugin.description} + <> + {plugins.length > 0 && + + + }> + {plugins.map((p) => ( + + ))} + + + + } + + {plugins.length === 0 && ( + + + No plugins found. Drop a plugin into /data/plugins{' '} + and reload. -
- - onRequestDelete && onRequestDelete(plugin)} - > - - - - v{plugin.version || '1.0.0'} - - { - const next = e.currentTarget.checked; - if (next && !plugin.ever_enabled && onRequireTrust) { - const ok = await onRequireTrust(plugin); - if (!ok) { - // Revert - setEnabled(false); - return; - } - } - setEnabled(next); - const resp = await onToggleEnabled(plugin.key, next); - if (next && resp?.ever_enabled) { - plugin.ever_enabled = true; - } - }} - size="xs" - onLabel="On" - offLabel="Off" - disabled={missing} - /> - -
- - {missing && ( - - Missing plugin files. Re-import or delete this entry. - + )} - - {!missing && plugin.fields && plugin.fields.length > 0 && ( - - {plugin.fields.map((f) => ( - - ))} - - - - - )} - - {!missing && plugin.actions && plugin.actions.length > 0 && ( - <> - - - {plugin.actions.map((a) => ( - -
- {a.label} - {a.description && ( - - {a.description} - - )} -
- -
- ))} - {running && ( - - Running action… please wait - - )} - {!running && lastResult?.file && ( - - Output: {lastResult.file} - - )} - {!running && lastResult?.error && ( - - Error: {String(lastResult.error)} - - )} -
- - )} - { - setConfirmOpen(false); - setConfirmConfig({ title: '', message: '', onConfirm: null }); - }} - title={confirmConfig.title} - centered - > - - {confirmConfig.message} - - - - - - -
+ ); }; export default function PluginsPage() { - const [loading, setLoading] = useState(true); - const [plugins, setPlugins] = useState([]); const [importOpen, setImportOpen] = useState(false); const [importFile, setImportFile] = useState(null); const [importing, setImporting] = useState(false); @@ -358,118 +113,172 @@ export default function PluginsPage() { const [deleteOpen, setDeleteOpen] = useState(false); const [deleteTarget, setDeleteTarget] = useState(null); const [deleting, setDeleting] = useState(false); - const [uploadNoticeId, setUploadNoticeId] = useState(null); + const [confirmOpen, setConfirmOpen] = useState(false); + const [confirmConfig, setConfirmConfig] = useState({ + title: '', + message: '', + resolve: null, + }); - const load = async () => { - setLoading(true); - try { - const list = await API.getPlugins(); - setPlugins(list); - } finally { - setLoading(false); - } + const handleReload = () => { + usePluginStore.getState().invalidatePlugins(); }; - useEffect(() => { - load(); + const handleRequestDelete = useCallback((pl) => { + setDeleteTarget(pl); + setDeleteOpen(true); }, []); - const requireTrust = (plugin) => { + const requireTrust = useCallback((plugin) => { return new Promise((resolve) => { setTrustResolve(() => resolve); setTrustOpen(true); }); + }, []); + + const showImportForm = useCallback(() => { + setImportOpen(true); + setImported(null); + setImportFile(null); + setEnableAfterImport(false); + }, []); + + const requestConfirm = useCallback((title, message) => { + return new Promise((resolve) => { + setConfirmConfig({ title, message, resolve }); + setConfirmOpen(true); + }); + }, []); + + const handleImportPlugin = () => { + return async () => { + setImporting(true); + const id = showNotification({ + title: 'Uploading plugin', + message: 'Backend may restart; please wait…', + loading: true, + autoClose: false, + withCloseButton: false, + }); + try { + const resp = await importPlugin(importFile); + if (resp?.success && resp.plugin) { + setImported(resp.plugin); + usePluginStore.getState().invalidatePlugins(); + + updateNotification({ + id, + loading: false, + color: 'green', + title: 'Imported', + message: + 'Plugin imported. If the app briefly disconnected, it should be back now.', + autoClose: 3000, + }); + } else { + updateNotification({ + id, + loading: false, + color: 'red', + title: 'Import failed', + message: resp?.error || 'Unknown error', + autoClose: 5000, + }); + } + } catch (e) { + // API.importPlugin already showed a concise error; just update the loading notice + updateNotification({ + id, + loading: false, + color: 'red', + title: 'Import failed', + message: + (e?.body && (e.body.error || e.body.detail)) || + e?.message || + 'Failed', + autoClose: 5000, + }); + } finally { + setImporting(false); + } + }; }; + const handleEnablePlugin = () => { + return async () => { + if (!imported) return; + + const proceed = imported.ever_enabled || (await requireTrust(imported)); + if (proceed) { + const resp = await setPluginEnabled(imported.key, true); + if (resp?.success) { + usePluginStore.getState().updatePlugin(imported.key, { enabled: true, ever_enabled: true }); + + showNotification({ + title: imported.name, + message: 'Plugin enabled', + color: 'green', + }); + } + setImportOpen(false); + setImported(null); + setEnableAfterImport(false); + } + }; + }; + + const handleDeletePlugin = () => { + return async () => { + if (!deleteTarget) return; + setDeleting(true); + try { + const resp = await deletePluginByKey(deleteTarget.key); + if (resp?.success) { + usePluginStore.getState().removePlugin(deleteTarget.key); + + showNotification({ + title: deleteTarget.name, + message: 'Plugin deleted', + color: 'green', + }); + } + setDeleteOpen(false); + setDeleteTarget(null); + } finally { + setDeleting(false); + } + }; + }; + + const handleConfirm = useCallback((confirmed) => { + const resolver = confirmConfig.resolve; + setConfirmOpen(false); + setConfirmConfig({ title: '', message: '', resolve: null }); + if (resolver) resolver(confirmed); + }, [confirmConfig.resolve]); + return ( - + Plugins - - { - await API.reloadPlugins(); - await load(); - }} - title="Reload" - > + - {loading ? ( - - ) : ( - <> - - {plugins.map((p) => ( - { - const resp = await API.setPluginEnabled(key, next); - if (resp?.ever_enabled !== undefined) { - setPlugins((prev) => - prev.map((pl) => - pl.key === key - ? { - ...pl, - ever_enabled: resp.ever_enabled, - enabled: resp.enabled, - } - : pl - ) - ); - } else { - setPlugins((prev) => - prev.map((pl) => - pl.key === key ? { ...pl, enabled: next } : pl - ) - ); - } - return resp; - }} - onRequireTrust={requireTrust} - onRequestDelete={(plugin) => { - setDeleteTarget(plugin); - setDeleteOpen(true); - }} - /> - ))} - - {plugins.length === 0 && ( - - - No plugins found. Drop a plugin into /data/plugins{' '} - and reload. - - - )} - - )} + + {/* Import Plugin Modal */} { - setImporting(true); - const id = notifications.show({ - title: 'Uploading plugin', - message: 'Backend may restart; please wait…', - loading: true, - autoClose: false, - withCloseButton: false, - }); - setUploadNoticeId(id); - try { - const resp = await API.importPlugin(importFile); - if (resp?.success && resp.plugin) { - setImported(resp.plugin); - setPlugins((prev) => [ - resp.plugin, - ...prev.filter((p) => p.key !== resp.plugin.key), - ]); - notifications.update({ - id, - loading: false, - color: 'green', - title: 'Imported', - message: - 'Plugin imported. If the app briefly disconnected, it should be back now.', - autoClose: 3000, - }); - } else { - notifications.update({ - id, - loading: false, - color: 'red', - title: 'Import failed', - message: resp?.error || 'Unknown error', - autoClose: 5000, - }); - } - } catch (e) { - // API.importPlugin already showed a concise error; just update the loading notice - notifications.update({ - id, - loading: false, - color: 'red', - title: 'Import failed', - message: - (e?.body && (e.body.error || e.body.detail)) || - e?.message || - 'Failed', - autoClose: 5000, - }); - } finally { - setImporting(false); - setUploadNoticeId(null); - } - }} + onClick={handleImportPlugin()} > Upload @@ -612,36 +367,7 @@ export default function PluginsPage() { @@ -727,33 +453,37 @@ export default function PluginsPage() { size="xs" color="red" loading={deleting} - onClick={async () => { - if (!deleteTarget) return; - setDeleting(true); - try { - const resp = await API.deletePlugin(deleteTarget.key); - if (resp?.success) { - setPlugins((prev) => - prev.filter((p) => p.key !== deleteTarget.key) - ); - notifications.show({ - title: deleteTarget.name, - message: 'Plugin deleted', - color: 'green', - }); - } - setDeleteOpen(false); - setDeleteTarget(null); - } finally { - setDeleting(false); - } - }} + onClick={handleDeletePlugin()} > Delete
- + + {/* Confirmation modal */} + handleConfirm(false)} + title={confirmConfig.title} + centered + > + + {confirmConfig.message} + + + + + + + ); } diff --git a/frontend/src/utils/cards/PluginCardUtils.js b/frontend/src/utils/cards/PluginCardUtils.js new file mode 100644 index 00000000..8752e019 --- /dev/null +++ b/frontend/src/utils/cards/PluginCardUtils.js @@ -0,0 +1,24 @@ +export const getConfirmationDetails = (action, plugin, settings) => { + const actionConfirm = action.confirm; + const confirmField = (plugin.fields || []).find((f) => f.id === 'confirm'); + let requireConfirm = false; + let confirmTitle = `Run ${action.label}?`; + let confirmMessage = `You're about to run "${action.label}" from "${plugin.name}".`; + + if (actionConfirm) { + if (typeof actionConfirm === 'boolean') { + requireConfirm = actionConfirm; + } else if (typeof actionConfirm === 'object') { + requireConfirm = actionConfirm.required !== false; + if (actionConfirm.title) confirmTitle = actionConfirm.title; + if (actionConfirm.message) confirmMessage = actionConfirm.message; + } + } else if (confirmField) { + const settingVal = settings?.confirm; + const effectiveConfirm = + (settingVal !== undefined ? settingVal : confirmField.default) ?? false; + requireConfirm = !!effectiveConfirm; + } + + return { requireConfirm, confirmTitle, confirmMessage }; +}; diff --git a/frontend/src/utils/notificationUtils.js b/frontend/src/utils/notificationUtils.js index baf91b54..ba965343 100644 --- a/frontend/src/utils/notificationUtils.js +++ b/frontend/src/utils/notificationUtils.js @@ -1,5 +1,9 @@ import { notifications } from '@mantine/notifications'; export function showNotification(notificationObject) { - notifications.show(notificationObject); + return notifications.show(notificationObject); +} + +export function updateNotification(notificationId, notificationObject) { + return notifications.update(notificationId, notificationObject); } \ No newline at end of file diff --git a/frontend/src/utils/pages/PluginsUtils.js b/frontend/src/utils/pages/PluginsUtils.js new file mode 100644 index 00000000..bae98e93 --- /dev/null +++ b/frontend/src/utils/pages/PluginsUtils.js @@ -0,0 +1,17 @@ +import API from '../../api.js'; + +export const updatePluginSettings = async (key, settings) => { + return await API.updatePluginSettings(key, settings); +}; +export const runPluginAction = async (key, actionId) => { + return await API.runPluginAction(key, actionId); +}; +export const setPluginEnabled = async (key, next) => { + return await API.setPluginEnabled(key, next); +}; +export const importPlugin = async (importFile) => { + return await API.importPlugin(importFile); +}; +export const deletePluginByKey = (key) => { + return API.deletePlugin(key); +}; \ No newline at end of file From 26d9dbd246444a2ba1908d88493517c40d942dfa Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Sat, 27 Dec 2025 22:35:53 -0800 Subject: [PATCH 151/220] Added plugins store --- frontend/src/store/plugins.jsx | 41 ++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 frontend/src/store/plugins.jsx diff --git a/frontend/src/store/plugins.jsx b/frontend/src/store/plugins.jsx new file mode 100644 index 00000000..e8d0b065 --- /dev/null +++ b/frontend/src/store/plugins.jsx @@ -0,0 +1,41 @@ +import { create } from 'zustand'; +import API from '../api'; + +export const usePluginStore = create((set, get) => ({ + plugins: [], + loading: false, + error: null, + + fetchPlugins: async () => { + set({ loading: true, error: null }); + try { + const response = await API.getPlugins(); + set({ plugins: response || [], loading: false }); + } catch (error) { + set({ error, loading: false }); + } + }, + + updatePlugin: (key, updates) => { + set((state) => ({ + plugins: state.plugins.map((p) => + p.key === key ? { ...p, ...updates } : p + ), + })); + }, + + addPlugin: (plugin) => { + set((state) => ({ plugins: [...state.plugins, plugin] })); + }, + + removePlugin: (key) => { + set((state) => ({ + plugins: state.plugins.filter((p) => p.key !== key), + })); + }, + + invalidatePlugins: () => { + set({ plugins: [] }); + get().fetchPlugins(); + }, +})); \ No newline at end of file From ffa1331c3bad10c6309d8584b52f1837f68de00c Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Sat, 27 Dec 2025 23:17:42 -0800 Subject: [PATCH 152/220] Updated to use util functions --- frontend/src/pages/DVR.jsx | 27 +++++++++------------------ 1 file changed, 9 insertions(+), 18 deletions(-) diff --git a/frontend/src/pages/DVR.jsx b/frontend/src/pages/DVR.jsx index 8e39cf2c..b1cc1fe8 100644 --- a/frontend/src/pages/DVR.jsx +++ b/frontend/src/pages/DVR.jsx @@ -18,13 +18,14 @@ import useSettingsStore from '../store/settings'; import useVideoStore from '../store/useVideoStore'; import RecordingForm from '../components/forms/Recording'; import { + isAfter, isBefore, useTimeHelpers, } from '../utils/dateTimeUtils.js'; const RecordingDetailsModal = lazy(() => import('../components/forms/RecordingDetailsModal')); import RecurringRuleModal from '../components/forms/RecurringRuleModal.jsx'; import RecordingCard from '../components/cards/RecordingCard.jsx'; import { categorizeRecordings } from '../utils/pages/DVRUtils.js'; -import { getPosterUrl } from '../utils/cards/RecordingCardUtils.js'; +import { getPosterUrl, getRecordingUrl, getShowVideoUrl } from '../utils/cards/RecordingCardUtils.js'; import ErrorBoundary from '../components/ErrorBoundary.jsx'; const DVRPage = () => { @@ -110,30 +111,20 @@ const DVRPage = () => { const now = userNow(); const s = toUserTime(rec.start_time); const e = toUserTime(rec.end_time); - if (now.isAfter(s) && now.isBefore(e)) { + if(isAfter(now, s) && isBefore(now, e)) { // call into child RecordingCard behavior by constructing a URL like there const channel = channels[rec.channel]; if (!channel) return; - let url = `/proxy/ts/stream/${channel.uuid}`; - if (useSettingsStore.getState().environment.env_mode === 'dev') { - url = `${window.location.protocol}//${window.location.hostname}:5656${url}`; - } + const url = getShowVideoUrl(channel, useSettingsStore.getState().environment.env_mode); useVideoStore.getState().showVideo(url, 'live'); } } const handleOnWatchRecording = () => { - let fileUrl = - detailsRecording.custom_properties?.file_url || - detailsRecording.custom_properties?.output_file_url; - if (!fileUrl) return; - if ( - useSettingsStore.getState().environment.env_mode === 'dev' && - fileUrl.startsWith('/') - ) { - fileUrl = `${window.location.protocol}//${window.location.hostname}:5656${fileUrl}`; - } - useVideoStore.getState().showVideo(fileUrl, 'vod', { + const url = getRecordingUrl( + detailsRecording.custom_properties, useSettingsStore.getState().environment.env_mode); + if(!url) return; + useVideoStore.getState().showVideo(url, 'vod', { name: detailsRecording.custom_properties?.program?.title || 'Recording', @@ -163,7 +154,7 @@ const DVRPage = () => { > New Recording - +
Currently Recording From 43525ca32a6cf170f672a895f5df5de3c04019d0 Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Sat, 27 Dec 2025 23:49:06 -0800 Subject: [PATCH 153/220] Moved RecordingList outside of DVRPage Helps to prevent renders --- frontend/src/pages/DVR.jsx | 46 +++++++++++++++++++++++++------------- 1 file changed, 30 insertions(+), 16 deletions(-) diff --git a/frontend/src/pages/DVR.jsx b/frontend/src/pages/DVR.jsx index b1cc1fe8..7bd6e07f 100644 --- a/frontend/src/pages/DVR.jsx +++ b/frontend/src/pages/DVR.jsx @@ -18,16 +18,29 @@ import useSettingsStore from '../store/settings'; import useVideoStore from '../store/useVideoStore'; import RecordingForm from '../components/forms/Recording'; import { - isAfter, isBefore, + isAfter, + isBefore, useTimeHelpers, } from '../utils/dateTimeUtils.js'; -const RecordingDetailsModal = lazy(() => import('../components/forms/RecordingDetailsModal')); +const RecordingDetailsModal = lazy(() => + import('../components/forms/RecordingDetailsModal')); import RecurringRuleModal from '../components/forms/RecurringRuleModal.jsx'; import RecordingCard from '../components/cards/RecordingCard.jsx'; import { categorizeRecordings } from '../utils/pages/DVRUtils.js'; import { getPosterUrl, getRecordingUrl, getShowVideoUrl } from '../utils/cards/RecordingCardUtils.js'; import ErrorBoundary from '../components/ErrorBoundary.jsx'; +const RecordingList = ({ list, onOpenDetails, onOpenRecurring }) => { + return list.map((rec) => ( + + )); +}; + const DVRPage = () => { const theme = useMantineTheme(); const recordings = useChannelsStore((s) => s.recordings); @@ -95,17 +108,6 @@ const DVRPage = () => { return categorizeRecordings(recordings, toUserTime, now); }, [recordings, now, toUserTime]); - const RecordingList = ({ list }) => { - return list.map((rec) => ( - - )); - }; - const handleOnWatchLive = () => { const rec = detailsRecording; const now = userNow(); @@ -168,7 +170,11 @@ const DVRPage = () => { { maxWidth: '36rem', cols: 1 }, ]} > - {} + {} {inProgress.length === 0 && ( Nothing recording right now. @@ -190,7 +196,11 @@ const DVRPage = () => { { maxWidth: '36rem', cols: 1 }, ]} > - {} + {} {upcoming.length === 0 && ( No upcoming recordings. @@ -212,7 +222,11 @@ const DVRPage = () => { { maxWidth: '36rem', cols: 1 }, ]} > - {} + {} {completed.length === 0 && ( No completed recordings yet. From d9fc0e68d69fa683247edbff084909e22542b1b6 Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Mon, 29 Dec 2025 22:18:42 -0800 Subject: [PATCH 154/220] Signaling ready when no StreamTable rendered --- frontend/src/pages/Channels.jsx | 1 + 1 file changed, 1 insertion(+) diff --git a/frontend/src/pages/Channels.jsx b/frontend/src/pages/Channels.jsx index 0fe4f7a7..b7b87b17 100644 --- a/frontend/src/pages/Channels.jsx +++ b/frontend/src/pages/Channels.jsx @@ -65,6 +65,7 @@ const PageContent = () => { if (!authUser.id) return <>; if (authUser.user_level <= USER_LEVELS.STANDARD) { + handleStreamsReady(); return ( From b157159b8706aa91b5d8bb0a6b79eeb64fb6557d Mon Sep 17 00:00:00 2001 From: sethwv-alt Date: Wed, 31 Dec 2025 12:16:19 -0500 Subject: [PATCH 155/220] Fix root-owned __pycache__ by running Django commands as non-root user --- docker/Dockerfile | 3 --- docker/entrypoint.sh | 8 ++++---- docker/init/03-init-dispatcharr.sh | 1 + 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index dc437227..bfb35c11 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -35,9 +35,6 @@ RUN rm -rf /app/frontend # Copy built frontend assets COPY --from=frontend-builder /app/frontend/dist /app/frontend/dist -# Run Django collectstatic -RUN python manage.py collectstatic --noinput - # Add timestamp argument ARG TIMESTAMP diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh index 72eb5928..5de9bf0a 100755 --- a/docker/entrypoint.sh +++ b/docker/entrypoint.sh @@ -100,7 +100,7 @@ export POSTGRES_DIR=/data/db if [[ ! -f /etc/profile.d/dispatcharr.sh ]]; then # Define all variables to process variables=( - PATH VIRTUAL_ENV DJANGO_SETTINGS_MODULE PYTHONUNBUFFERED + PATH VIRTUAL_ENV DJANGO_SETTINGS_MODULE PYTHONUNBUFFERED PYTHONDONTWRITEBYTECODE POSTGRES_DB POSTGRES_USER POSTGRES_PASSWORD POSTGRES_HOST POSTGRES_PORT DISPATCHARR_ENV DISPATCHARR_DEBUG DISPATCHARR_LOG_LEVEL REDIS_HOST REDIS_DB POSTGRES_DIR DISPATCHARR_PORT @@ -174,9 +174,9 @@ else pids+=("$nginx_pid") fi -cd /app -python manage.py migrate --noinput -python manage.py collectstatic --noinput +# Run Django commands as non-root user to prevent permission issues +su - $POSTGRES_USER -c "cd /app && python manage.py migrate --noinput" +su - $POSTGRES_USER -c "cd /app && python manage.py collectstatic --noinput" # Select proper uwsgi config based on environment if [ "$DISPATCHARR_ENV" = "dev" ] && [ "$DISPATCHARR_DEBUG" != "true" ]; then diff --git a/docker/init/03-init-dispatcharr.sh b/docker/init/03-init-dispatcharr.sh index 03fe6816..0c317017 100644 --- a/docker/init/03-init-dispatcharr.sh +++ b/docker/init/03-init-dispatcharr.sh @@ -15,6 +15,7 @@ DATA_DIRS=( APP_DIRS=( "/app/logo_cache" "/app/media" + "/app/static" ) # Create all directories From a6361a07d2e42632736011ba51bcad9794ba7c73 Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Tue, 30 Dec 2025 23:31:29 -0800 Subject: [PATCH 156/220] Extracted component and util logic --- .../forms/settings/DvrSettingsForm.jsx | 263 +++ .../forms/settings/NetworkAccessForm.jsx | 161 ++ .../forms/settings/ProxySettingsForm.jsx | 166 ++ .../forms/settings/StreamSettingsForm.jsx | 306 ++++ .../forms/settings/SystemSettingsForm.jsx | 84 + .../forms/settings/UiSettingsForm.jsx | 142 ++ frontend/src/pages/Logos.jsx | 65 +- frontend/src/pages/Settings.jsx | 1474 ++--------------- frontend/src/utils/dateTimeUtils.js | 173 +- .../forms/settings/DvrSettingsFormUtils.js | 22 + .../forms/settings/NetworkAccessFormUtils.js | 29 + .../forms/settings/ProxySettingsFormUtils.js | 18 + .../forms/settings/StreamSettingsFormUtils.js | 19 + .../forms/settings/SystemSettingsFormUtils.js | 5 + .../forms/settings/UiSettingsFormUtils.js | 14 + frontend/src/utils/networkUtils.js | 4 + frontend/src/utils/pages/SettingsUtils.js | 104 ++ 17 files changed, 1660 insertions(+), 1389 deletions(-) create mode 100644 frontend/src/components/forms/settings/DvrSettingsForm.jsx create mode 100644 frontend/src/components/forms/settings/NetworkAccessForm.jsx create mode 100644 frontend/src/components/forms/settings/ProxySettingsForm.jsx create mode 100644 frontend/src/components/forms/settings/StreamSettingsForm.jsx create mode 100644 frontend/src/components/forms/settings/SystemSettingsForm.jsx create mode 100644 frontend/src/components/forms/settings/UiSettingsForm.jsx create mode 100644 frontend/src/utils/forms/settings/DvrSettingsFormUtils.js create mode 100644 frontend/src/utils/forms/settings/NetworkAccessFormUtils.js create mode 100644 frontend/src/utils/forms/settings/ProxySettingsFormUtils.js create mode 100644 frontend/src/utils/forms/settings/StreamSettingsFormUtils.js create mode 100644 frontend/src/utils/forms/settings/SystemSettingsFormUtils.js create mode 100644 frontend/src/utils/forms/settings/UiSettingsFormUtils.js create mode 100644 frontend/src/utils/networkUtils.js create mode 100644 frontend/src/utils/pages/SettingsUtils.js diff --git a/frontend/src/components/forms/settings/DvrSettingsForm.jsx b/frontend/src/components/forms/settings/DvrSettingsForm.jsx new file mode 100644 index 00000000..f03bdf66 --- /dev/null +++ b/frontend/src/components/forms/settings/DvrSettingsForm.jsx @@ -0,0 +1,263 @@ +import useSettingsStore from '../../../store/settings.jsx'; +import React, { useEffect, useState } from 'react'; +import { + getChangedSettings, + parseSettings, + saveChangedSettings, +} from '../../../utils/pages/SettingsUtils.js'; +import { showNotification } from '../../../utils/notificationUtils.js'; +import { + Alert, + Button, + FileInput, + Flex, + Group, + NumberInput, + Stack, + Switch, + Text, + TextInput, +} from '@mantine/core'; +import { + getComskipConfig, + getDvrSettingsFormInitialValues, + uploadComskipIni, +} from '../../../utils/forms/settings/DvrSettingsFormUtils.js'; +import { useForm } from '@mantine/form'; + +const DvrSettingsForm = React.memo(({ active }) => { + const settings = useSettingsStore((s) => s.settings); + const [saved, setSaved] = useState(false); + const [comskipFile, setComskipFile] = useState(null); + const [comskipUploadLoading, setComskipUploadLoading] = useState(false); + const [comskipConfig, setComskipConfig] = useState({ + path: '', + exists: false, + }); + + const form = useForm({ + mode: 'controlled', + initialValues: getDvrSettingsFormInitialValues(), + }); + + useEffect(() => { + if (!active) setSaved(false); + }, [active]); + + useEffect(() => { + if (settings) { + const formValues = parseSettings(settings); + + form.setValues(formValues); + + if (formValues['dvr-comskip-custom-path']) { + setComskipConfig((prev) => ({ + path: formValues['dvr-comskip-custom-path'], + exists: prev.exists, + })); + } + } + }, [settings]); + + useEffect(() => { + const loadComskipConfig = async () => { + try { + const response = await getComskipConfig(); + if (response) { + setComskipConfig({ + path: response.path || '', + exists: Boolean(response.exists), + }); + if (response.path) { + form.setFieldValue('dvr-comskip-custom-path', response.path); + } + } + } catch (error) { + console.error('Failed to load comskip config', error); + } + }; + loadComskipConfig(); + }, []); + + const onComskipUpload = async () => { + if (!comskipFile) { + return; + } + + setComskipUploadLoading(true); + try { + const response = await uploadComskipIni(comskipFile); + if (response?.path) { + showNotification({ + title: 'comskip.ini uploaded', + message: response.path, + autoClose: 3000, + color: 'green', + }); + form.setFieldValue('dvr-comskip-custom-path', response.path); + useSettingsStore.getState().updateSetting({ + ...(settings['dvr-comskip-custom-path'] || { + key: 'dvr-comskip-custom-path', + name: 'DVR Comskip Custom Path', + }), + value: response.path, + }); + setComskipConfig({ path: response.path, exists: true }); + } + } catch (error) { + console.error('Failed to upload comskip.ini', error); + } finally { + setComskipUploadLoading(false); + setComskipFile(null); + } + }; + + const onSubmit = async () => { + setSaved(false); + + const changedSettings = getChangedSettings(form.getValues(), settings); + + // Update each changed setting in the backend (create if missing) + try { + await saveChangedSettings(settings, changedSettings); + + setSaved(true); + } catch (error) { + // Error notifications are already shown by API functions + // Just don't show the success message + console.error('Error saving settings:', error); + } + }; + + return ( +
+ + {saved && ( + + )} + + + + + + + + {comskipConfig.exists && comskipConfig.path + ? `Using ${comskipConfig.path}` + : 'No custom comskip.ini uploaded.'} + + + + + + + + + + + +
+ ); +}); + +export default DvrSettingsForm; \ No newline at end of file diff --git a/frontend/src/components/forms/settings/NetworkAccessForm.jsx b/frontend/src/components/forms/settings/NetworkAccessForm.jsx new file mode 100644 index 00000000..1d2c42e7 --- /dev/null +++ b/frontend/src/components/forms/settings/NetworkAccessForm.jsx @@ -0,0 +1,161 @@ +import { NETWORK_ACCESS_OPTIONS } from '../../../constants.js'; +import useSettingsStore from '../../../store/settings.jsx'; +import React, { useEffect, useState } from 'react'; +import { useForm } from '@mantine/form'; +import { + checkSetting, + updateSetting, +} from '../../../utils/pages/SettingsUtils.js'; +import { Alert, Button, Flex, Stack, Text, TextInput } from '@mantine/core'; +import ConfirmationDialog from '../../ConfirmationDialog.jsx'; +import { + getNetworkAccessFormInitialValues, + getNetworkAccessFormValidation, +} from '../../../utils/forms/settings/NetworkAccessFormUtils.js'; + +const NetworkAccessForm = React.memo(({ active }) => { + const settings = useSettingsStore((s) => s.settings); + + const [networkAccessError, setNetworkAccessError] = useState(null); + const [saved, setSaved] = useState(false); + const [networkAccessConfirmOpen, setNetworkAccessConfirmOpen] = + useState(false); + const [netNetworkAccessConfirmCIDRs, setNetNetworkAccessConfirmCIDRs] = + useState([]); + const [clientIpAddress, setClientIpAddress] = useState(null); + + const networkAccessForm = useForm({ + mode: 'controlled', + initialValues: getNetworkAccessFormInitialValues(), + validate: getNetworkAccessFormValidation(), + }); + + useEffect(() => { + if(!active) setSaved(false); + }, [active]); + + useEffect(() => { + const networkAccessSettings = JSON.parse( + settings['network-access'].value || '{}' + ); + networkAccessForm.setValues( + Object.keys(NETWORK_ACCESS_OPTIONS).reduce((acc, key) => { + acc[key] = networkAccessSettings[key] || '0.0.0.0/0,::/0'; + return acc; + }, {}) + ); + }, [settings]); + + const onNetworkAccessSubmit = async () => { + setSaved(false); + setNetworkAccessError(null); + const check = await checkSetting({ + ...settings['network-access'], + value: JSON.stringify(networkAccessForm.getValues()), + }); + + if (check.error && check.message) { + setNetworkAccessError(`${check.message}: ${check.data}`); + return; + } + + // Store the client IP + setClientIpAddress(check.client_ip); + + // For now, only warn if we're blocking the UI + const blockedAccess = check.UI; + if (blockedAccess.length === 0) { + return saveNetworkAccess(); + } + + setNetNetworkAccessConfirmCIDRs(blockedAccess); + setNetworkAccessConfirmOpen(true); + }; + + const saveNetworkAccess = async () => { + setSaved(false); + try { + await updateSetting({ + ...settings['network-access'], + value: JSON.stringify(networkAccessForm.getValues()), + }); + setSaved(true); + setNetworkAccessConfirmOpen(false); + } catch (e) { + const errors = {}; + for (const key in e.body.value) { + errors[key] = `Invalid CIDR(s): ${e.body.value[key]}`; + } + networkAccessForm.setErrors(errors); + } + }; + + return ( + <> +
+ + {saved && ( + + )} + {networkAccessError && ( + + )} + + {Object.entries(NETWORK_ACCESS_OPTIONS).map(([key, config]) => ( + + ))} + + + + + +
+ + setNetworkAccessConfirmOpen(false)} + onConfirm={saveNetworkAccess} + title={`Confirm Network Access Blocks`} + message={ + <> + + Your client {clientIpAddress && `(${clientIpAddress}) `}is not + included in the allowed networks for the web UI. Are you sure you + want to proceed? + + +
    + {netNetworkAccessConfirmCIDRs.map((cidr) => ( +
  • {cidr}
  • + ))} +
+ + } + confirmLabel="Save" + cancelLabel="Cancel" + size="md" + /> + + ); +}); + +export default NetworkAccessForm; \ No newline at end of file diff --git a/frontend/src/components/forms/settings/ProxySettingsForm.jsx b/frontend/src/components/forms/settings/ProxySettingsForm.jsx new file mode 100644 index 00000000..7fc2d0cb --- /dev/null +++ b/frontend/src/components/forms/settings/ProxySettingsForm.jsx @@ -0,0 +1,166 @@ +import useSettingsStore from '../../../store/settings.jsx'; +import React, { useEffect, useState } from 'react'; +import { useForm } from '@mantine/form'; +import { updateSetting } from '../../../utils/pages/SettingsUtils.js'; +import { + Alert, + Button, + Flex, + NumberInput, + Stack, + TextInput, +} from '@mantine/core'; +import { PROXY_SETTINGS_OPTIONS } from '../../../constants.js'; +import { + getProxySettingDefaults, + getProxySettingsFormInitialValues, +} from '../../../utils/forms/settings/ProxySettingsFormUtils.js'; + +const ProxySettingsOptions = React.memo(({ proxySettingsForm }) => { + const isNumericField = (key) => { + // Determine if this field should be a NumberInput + return [ + 'buffering_timeout', + 'redis_chunk_ttl', + 'channel_shutdown_delay', + 'channel_init_grace_period', + ].includes(key); + }; + const isFloatField = (key) => { + return key === 'buffering_speed'; + }; + const getNumericFieldMax = (key) => { + return key === 'buffering_timeout' + ? 300 + : key === 'redis_chunk_ttl' + ? 3600 + : key === 'channel_shutdown_delay' + ? 300 + : 60; + }; + return ( + <> + {Object.entries(PROXY_SETTINGS_OPTIONS).map(([key, config]) => { + if (isNumericField(key)) { + return ( + + ); + } else if (isFloatField(key)) { + return ( + + ); + } else { + return ( + + ); + } + })} + + ); +}); + +const ProxySettingsForm = React.memo(({ active }) => { + const settings = useSettingsStore((s) => s.settings); + + const [saved, setSaved] = useState(false); + + const proxySettingsForm = useForm({ + mode: 'controlled', + initialValues: getProxySettingsFormInitialValues(), + }); + + useEffect(() => { + if(!active) setSaved(false); + }, [active]); + + useEffect(() => { + if (settings) { + if (settings['proxy-settings']?.value) { + try { + const proxySettings = JSON.parse(settings['proxy-settings'].value); + proxySettingsForm.setValues(proxySettings); + } catch (error) { + console.error('Error parsing proxy settings:', error); + } + } + } + }, [settings]); + + const resetProxySettingsToDefaults = () => { + proxySettingsForm.setValues(getProxySettingDefaults()); + }; + + const onProxySettingsSubmit = async () => { + setSaved(false); + + try { + const result = await updateSetting({ + ...settings['proxy-settings'], + value: JSON.stringify(proxySettingsForm.getValues()), + }); + // API functions return undefined on error + if (result) { + setSaved(true); + } + } catch (error) { + // Error notifications are already shown by API functions + console.error('Error saving proxy settings:', error); + } + }; + + return ( +
+ + {saved && ( + + )} + + + + + + + + +
+ ); +}); + +export default ProxySettingsForm; \ No newline at end of file diff --git a/frontend/src/components/forms/settings/StreamSettingsForm.jsx b/frontend/src/components/forms/settings/StreamSettingsForm.jsx new file mode 100644 index 00000000..1b6b466d --- /dev/null +++ b/frontend/src/components/forms/settings/StreamSettingsForm.jsx @@ -0,0 +1,306 @@ +import useSettingsStore from '../../../store/settings.jsx'; +import useWarningsStore from '../../../store/warnings.jsx'; +import useUserAgentsStore from '../../../store/userAgents.jsx'; +import useStreamProfilesStore from '../../../store/streamProfiles.jsx'; +import { REGION_CHOICES } from '../../../constants.js'; +import React, { useEffect, useState } from 'react'; +import { + getChangedSettings, + parseSettings, + rehashStreams, + saveChangedSettings, +} from '../../../utils/pages/SettingsUtils.js'; +import { + Alert, + Button, + Flex, + Group, + MultiSelect, + Select, + Switch, + Text, +} from '@mantine/core'; +import ConfirmationDialog from '../../ConfirmationDialog.jsx'; +import { useForm } from '@mantine/form'; +import { + getStreamSettingsFormInitialValues, + getStreamSettingsFormValidation, +} from '../../../utils/forms/settings/StreamSettingsFormUtils.js'; + +const StreamSettingsForm = React.memo(({ active }) => { + const settings = useSettingsStore((s) => s.settings); + const suppressWarning = useWarningsStore((s) => s.suppressWarning); + const isWarningSuppressed = useWarningsStore((s) => s.isWarningSuppressed); + const userAgents = useUserAgentsStore((s) => s.userAgents); + const streamProfiles = useStreamProfilesStore((s) => s.profiles); + const regionChoices = REGION_CHOICES; + + // Store pending changed settings when showing the dialog + const [pendingChangedSettings, setPendingChangedSettings] = useState(null); + + const [saved, setSaved] = useState(false); + const [rehashingStreams, setRehashingStreams] = useState(false); + const [rehashSuccess, setRehashSuccess] = useState(false); + const [rehashConfirmOpen, setRehashConfirmOpen] = useState(false); + + // Add a new state to track the dialog type + const [rehashDialogType, setRehashDialogType] = useState(null); // 'save' or 'rehash' + + const form = useForm({ + mode: 'controlled', + initialValues: getStreamSettingsFormInitialValues(), + validate: getStreamSettingsFormValidation(), + }); + + useEffect(() => { + if (!active) { + setSaved(false); + setRehashSuccess(false); + } + }, [active]); + + useEffect(() => { + if (settings) { + const formValues = parseSettings(settings); + + form.setValues(formValues); + } + }, [settings]); + + const executeSettingsSaveAndRehash = async () => { + setRehashConfirmOpen(false); + setSaved(false); + + // Use the stored pending values that were captured before the dialog was shown + const changedSettings = pendingChangedSettings || {}; + + // Update each changed setting in the backend (create if missing) + try { + await saveChangedSettings(settings, changedSettings); + + // Clear the pending values + setPendingChangedSettings(null); + setSaved(true); + } catch (error) { + // Error notifications are already shown by API functions + // Just don't show the success message + console.error('Error saving settings:', error); + setPendingChangedSettings(null); + } + }; + + const executeRehashStreamsOnly = async () => { + setRehashingStreams(true); + setRehashSuccess(false); + setRehashConfirmOpen(false); + + try { + await rehashStreams(); + setRehashSuccess(true); + setTimeout(() => setRehashSuccess(false), 5000); + } catch (error) { + console.error('Error rehashing streams:', error); + } finally { + setRehashingStreams(false); + } + }; + + const onRehashStreams = async () => { + // Skip warning if it's been suppressed + if (isWarningSuppressed('rehash-streams')) { + return executeRehashStreamsOnly(); + } + + setRehashDialogType('rehash'); // Set dialog type to rehash + setRehashConfirmOpen(true); + }; + + const handleRehashConfirm = () => { + if (rehashDialogType === 'save') { + executeSettingsSaveAndRehash(); + } else { + executeRehashStreamsOnly(); + } + }; + + const onSubmit = async () => { + setSaved(false); + + const values = form.getValues(); + const changedSettings = getChangedSettings(values, settings); + + const m3uHashKeyChanged = + settings['m3u-hash-key']?.value !== values['m3u-hash-key'].join(','); + + // If M3U hash key changed, show warning (unless suppressed) + if (m3uHashKeyChanged && !isWarningSuppressed('rehash-streams')) { + // Store the changed settings before showing dialog + setPendingChangedSettings(changedSettings); + setRehashDialogType('save'); // Set dialog type to save + setRehashConfirmOpen(true); + return; + } + + // Update each changed setting in the backend (create if missing) + try { + await saveChangedSettings(settings, changedSettings); + + setSaved(true); + } catch (error) { + // Error notifications are already shown by API functions + // Just don't show the success message + console.error('Error saving settings:', error); + } + }; + + return ( + <> +
+ {saved && ( + + )} + ({ + value: `${option.id}`, + label: option.name, + }))} + /> + onUISettingsChange('table-size', val)} + data={[ + { + value: 'default', + label: 'Default', + }, + { + value: 'compact', + label: 'Compact', + }, + { + value: 'large', + label: 'Large', + }, + ]} + /> + onUISettingsChange('date-format', val)} + data={[ + { + value: 'mdy', + label: 'MM/DD/YYYY', + }, + { + value: 'dmy', + label: 'DD/MM/YYYY', + }, + ]} + /> + onUISettingsChange('table-size', val)} - data={[ - { - value: 'default', - label: 'Default', - }, - { - value: 'compact', - label: 'Compact', - }, - { - value: 'large', - label: 'Large', - }, - ]} - /> - onUISettingsChange('date-format', val)} - data={[ - { - value: 'mdy', - label: 'MM/DD/YYYY', - }, - { - value: 'dmy', - label: 'DD/MM/YYYY', - }, - ]} - /> - ({ - value: `${option.id}`, - label: option.name, - }))} - /> - ({ - label: r.label, - value: `${r.value}`, - }))} - /> + + DVR + + + }> + + + + + - - - Auto-Import Mapped Files - - - + + Stream Settings + + + }> + + + + + - + + System Settings + + + }> + + + + + - {rehashSuccess && ( - - )} + + User-Agents + + + }> + + + + + - - - - - - - + + Stream Profiles + + + }> + + + + + - - System Settings - - - {generalSettingsSaved && ( - - )} - - Configure how many system events (channel start/stop, - buffering, etc.) to keep in the database. Events are - displayed on the Stats page. - - { - form.setFieldValue('max-system-events', value); - }} - min={10} - max={1000} - step={10} - /> - - - - - - - - - User-Agents - - - - - - - Stream Profiles - - - - - - - + + Network Access - {accordianValue == 'network-access' && ( + {accordianValue === 'network-access' && ( Comma-Delimited CIDR ranges )} - - -
- - {networkAccessSaved && ( - - )} - {networkAccessError && ( - - )} - {Object.entries(NETWORK_ACCESS_OPTIONS).map( - ([key, config]) => { - return ( - - ); - } - )} + + + + }> + + + + + - - - - -
-
-
- - - + + Proxy Settings - - -
- - {proxySettingsSaved && ( - - )} - {Object.entries(PROXY_SETTINGS_OPTIONS).map( - ([key, config]) => { - // Determine if this field should be a NumberInput - const isNumericField = [ - 'buffering_timeout', - 'redis_chunk_ttl', - 'channel_shutdown_delay', - 'channel_init_grace_period', - ].includes(key); + + + + }> + + + + + - const isFloatField = key === 'buffering_speed'; - - if (isNumericField) { - return ( - - ); - } else if (isFloatField) { - return ( - - ); - } else { - return ( - - ); - } - } - )} - - - - - - -
-
-
- - - Backup & Restore - - - - + + Backup & Restore + + + }> + + + + + )}
- - { - setRehashConfirmOpen(false); - setRehashDialogType(null); - // Clear pending values when dialog is cancelled - setPendingChangedSettings(null); - }} - onConfirm={handleRehashConfirm} - title={ - rehashDialogType === 'save' - ? 'Save Settings and Rehash Streams' - : 'Confirm Stream Rehash' - } - message={ -
- {`Are you sure you want to rehash all streams? - -This process may take a while depending on the number of streams. -Do not shut down Dispatcharr until the rehashing is complete. -M3U refreshes will be blocked until this process finishes. - -Please ensure you have time to let this complete before proceeding.`} -
- } - confirmLabel={ - rehashDialogType === 'save' ? 'Save and Rehash' : 'Start Rehash' - } - cancelLabel="Cancel" - actionKey="rehash-streams" - onSuppressChange={suppressWarning} - size="md" - /> - - setNetworkAccessConfirmOpen(false)} - onConfirm={saveNetworkAccess} - title={`Confirm Network Access Blocks`} - message={ - <> - - Your client {clientIpAddress && `(${clientIpAddress}) `}is not included in the allowed networks for the web - UI. Are you sure you want to proceed? - - -
    - {netNetworkAccessConfirmCIDRs.map((cidr) => ( -
  • {cidr}
  • - ))} -
- - } - confirmLabel="Save" - cancelLabel="Cancel" - size="md" - /> ); }; diff --git a/frontend/src/utils/dateTimeUtils.js b/frontend/src/utils/dateTimeUtils.js index b7490f88..7b6c6f2f 100644 --- a/frontend/src/utils/dateTimeUtils.js +++ b/frontend/src/utils/dateTimeUtils.js @@ -1,4 +1,4 @@ -import { useEffect, useCallback } from 'react'; +import { useCallback, useEffect } from 'react'; import dayjs from 'dayjs'; import duration from 'dayjs/plugin/duration'; import relativeTime from 'dayjs/plugin/relativeTime'; @@ -12,6 +12,41 @@ dayjs.extend(relativeTime); dayjs.extend(utc); dayjs.extend(timezone); +export const convertToMs = (dateTime) => dayjs(dateTime).valueOf(); + +export const initializeTime = (dateTime) => dayjs(dateTime); + +export const startOfDay = (dateTime) => dayjs(dateTime).startOf('day'); + +export const isBefore = (date1, date2) => dayjs(date1).isBefore(date2); + +export const isAfter = (date1, date2) => dayjs(date1).isAfter(date2); + +export const isSame = (date1, date2, unit = 'day') => + dayjs(date1).isSame(date2, unit); + +export const add = (dateTime, value, unit) => dayjs(dateTime).add(value, unit); + +export const diff = (date1, date2, unit = 'millisecond') => + dayjs(date1).diff(date2, unit); + +export const format = (dateTime, formatStr) => + dayjs(dateTime).format(formatStr); + +export const getNow = () => dayjs(); + +export const getNowMs = () => Date.now(); + +export const roundToNearest = (dateTime, minutes) => { + const current = initializeTime(dateTime); + const minute = current.minute(); + const snappedMinute = Math.round(minute / minutes) * minutes; + + return snappedMinute === 60 + ? current.add(1, 'hour').minute(0) + : current.minute(snappedMinute); +}; + export const useUserTimeZone = () => { const settings = useSettingsStore((s) => s.settings); const [timeZone, setTimeZone] = useLocalStorage( @@ -68,7 +103,7 @@ export const useDateTimeFormat = () => { const timeFormat = timeFormatSetting === '12h' ? 'h:mma' : 'HH:mm'; const dateFormat = dateFormatSetting === 'mdy' ? 'MMM D' : 'D MMM'; - return [timeFormat, dateFormat] + return [timeFormat, dateFormat]; }; export const toTimeString = (value) => { @@ -86,4 +121,138 @@ export const parseDate = (value) => { if (!value) return null; const parsed = dayjs(value, ['YYYY-MM-DD', dayjs.ISO_8601], true); return parsed.isValid() ? parsed.toDate() : null; +}; + +const TIMEZONE_FALLBACKS = [ + 'UTC', + 'America/New_York', + 'America/Chicago', + 'America/Denver', + 'America/Los_Angeles', + 'America/Phoenix', + 'America/Anchorage', + 'Pacific/Honolulu', + 'Europe/London', + 'Europe/Paris', + 'Europe/Berlin', + 'Europe/Madrid', + 'Europe/Warsaw', + 'Europe/Moscow', + 'Asia/Dubai', + 'Asia/Kolkata', + 'Asia/Shanghai', + 'Asia/Tokyo', + 'Asia/Seoul', + 'Australia/Sydney', +]; + +const getSupportedTimeZones = () => { + try { + if (typeof Intl.supportedValuesOf === 'function') { + return Intl.supportedValuesOf('timeZone'); + } + } catch (error) { + console.warn('Unable to enumerate supported time zones:', error); + } + return TIMEZONE_FALLBACKS; +}; + +const getTimeZoneOffsetMinutes = (date, timeZone) => { + try { + const dtf = new Intl.DateTimeFormat('en-US', { + timeZone, + year: 'numeric', + month: '2-digit', + day: '2-digit', + hour: '2-digit', + minute: '2-digit', + second: '2-digit', + hourCycle: 'h23', + }); + const parts = dtf.formatToParts(date).reduce((acc, part) => { + if (part.type !== 'literal') acc[part.type] = part.value; + return acc; + }, {}); + const asUTC = Date.UTC( + Number(parts.year), + Number(parts.month) - 1, + Number(parts.day), + Number(parts.hour), + Number(parts.minute), + Number(parts.second) + ); + return (asUTC - date.getTime()) / 60000; + } catch (error) { + console.warn(`Failed to compute offset for ${timeZone}:`, error); + return 0; + } +}; + +const formatOffset = (minutes) => { + const rounded = Math.round(minutes); + const sign = rounded < 0 ? '-' : '+'; + const absolute = Math.abs(rounded); + const hours = String(Math.floor(absolute / 60)).padStart(2, '0'); + const mins = String(absolute % 60).padStart(2, '0'); + return `UTC${sign}${hours}:${mins}`; +}; + +export const buildTimeZoneOptions = (preferredZone) => { + const zones = getSupportedTimeZones(); + const referenceYear = new Date().getUTCFullYear(); + const janDate = new Date(Date.UTC(referenceYear, 0, 1, 12, 0, 0)); + const julDate = new Date(Date.UTC(referenceYear, 6, 1, 12, 0, 0)); + + const options = zones + .map((zone) => { + const janOffset = getTimeZoneOffsetMinutes(janDate, zone); + const julOffset = getTimeZoneOffsetMinutes(julDate, zone); + const currentOffset = getTimeZoneOffsetMinutes(new Date(), zone); + const minOffset = Math.min(janOffset, julOffset); + const maxOffset = Math.max(janOffset, julOffset); + const usesDst = minOffset !== maxOffset; + const labelParts = [`now ${formatOffset(currentOffset)}`]; + if (usesDst) { + labelParts.push( + `DST range ${formatOffset(minOffset)} to ${formatOffset(maxOffset)}` + ); + } + return { + value: zone, + label: `${zone} (${labelParts.join(' | ')})`, + numericOffset: minOffset, + }; + }) + .sort((a, b) => { + if (a.numericOffset !== b.numericOffset) { + return a.numericOffset - b.numericOffset; + } + return a.value.localeCompare(b.value); + }); + if ( + preferredZone && + !options.some((option) => option.value === preferredZone) + ) { + const currentOffset = getTimeZoneOffsetMinutes(new Date(), preferredZone); + options.push({ + value: preferredZone, + label: `${preferredZone} (now ${formatOffset(currentOffset)})`, + numericOffset: currentOffset, + }); + options.sort((a, b) => { + if (a.numericOffset !== b.numericOffset) { + return a.numericOffset - b.numericOffset; + } + return a.value.localeCompare(b.value); + }); + } + return options; +}; + +export const getDefaultTimeZone = () => { + try { + return Intl.DateTimeFormat().resolvedOptions().timeZone || 'UTC'; + } catch (error) { + return 'UTC'; + } }; \ No newline at end of file diff --git a/frontend/src/utils/forms/settings/DvrSettingsFormUtils.js b/frontend/src/utils/forms/settings/DvrSettingsFormUtils.js new file mode 100644 index 00000000..7fa272d0 --- /dev/null +++ b/frontend/src/utils/forms/settings/DvrSettingsFormUtils.js @@ -0,0 +1,22 @@ +import API from '../../../api.js'; + +export const getComskipConfig = async () => { + return await API.getComskipConfig(); +}; + +export const uploadComskipIni = async (file) => { + return await API.uploadComskipIni(file); +}; + +export const getDvrSettingsFormInitialValues = () => { + return { + 'dvr-tv-template': '', + 'dvr-movie-template': '', + 'dvr-tv-fallback-template': '', + 'dvr-movie-fallback-template': '', + 'dvr-comskip-enabled': false, + 'dvr-comskip-custom-path': '', + 'dvr-pre-offset-minutes': 0, + 'dvr-post-offset-minutes': 0, + }; +}; \ No newline at end of file diff --git a/frontend/src/utils/forms/settings/NetworkAccessFormUtils.js b/frontend/src/utils/forms/settings/NetworkAccessFormUtils.js new file mode 100644 index 00000000..fe1eea8a --- /dev/null +++ b/frontend/src/utils/forms/settings/NetworkAccessFormUtils.js @@ -0,0 +1,29 @@ +import { NETWORK_ACCESS_OPTIONS } from '../../../constants.js'; +import { IPV4_CIDR_REGEX, IPV6_CIDR_REGEX } from '../../networkUtils.js'; + +export const getNetworkAccessFormInitialValues = () => { + return Object.keys(NETWORK_ACCESS_OPTIONS).reduce((acc, key) => { + acc[key] = '0.0.0.0/0,::/0'; + return acc; + }, {}); +}; + +export const getNetworkAccessFormValidation = () => { + return Object.keys(NETWORK_ACCESS_OPTIONS).reduce((acc, key) => { + acc[key] = (value) => { + if ( + value + .split(',') + .some( + (cidr) => + !(cidr.match(IPV4_CIDR_REGEX) || cidr.match(IPV6_CIDR_REGEX)) + ) + ) { + return 'Invalid CIDR range'; + } + + return null; + }; + return acc; + }, {}); +}; \ No newline at end of file diff --git a/frontend/src/utils/forms/settings/ProxySettingsFormUtils.js b/frontend/src/utils/forms/settings/ProxySettingsFormUtils.js new file mode 100644 index 00000000..864dd9b1 --- /dev/null +++ b/frontend/src/utils/forms/settings/ProxySettingsFormUtils.js @@ -0,0 +1,18 @@ +import { PROXY_SETTINGS_OPTIONS } from '../../../constants.js'; + +export const getProxySettingsFormInitialValues = () => { + return Object.keys(PROXY_SETTINGS_OPTIONS).reduce((acc, key) => { + acc[key] = ''; + return acc; + }, {}); +}; + +export const getProxySettingDefaults = () => { + return { + buffering_timeout: 15, + buffering_speed: 1.0, + redis_chunk_ttl: 60, + channel_shutdown_delay: 0, + channel_init_grace_period: 5, + }; +}; \ No newline at end of file diff --git a/frontend/src/utils/forms/settings/StreamSettingsFormUtils.js b/frontend/src/utils/forms/settings/StreamSettingsFormUtils.js new file mode 100644 index 00000000..2ff5dd55 --- /dev/null +++ b/frontend/src/utils/forms/settings/StreamSettingsFormUtils.js @@ -0,0 +1,19 @@ +import { isNotEmpty } from '@mantine/form'; + +export const getStreamSettingsFormInitialValues = () => { + return { + 'default-user-agent': '', + 'default-stream-profile': '', + 'preferred-region': '', + 'auto-import-mapped-files': true, + 'm3u-hash-key': [], + }; +}; + +export const getStreamSettingsFormValidation = () => { + return { + 'default-user-agent': isNotEmpty('Select a user agent'), + 'default-stream-profile': isNotEmpty('Select a stream profile'), + 'preferred-region': isNotEmpty('Select a region'), + }; +}; \ No newline at end of file diff --git a/frontend/src/utils/forms/settings/SystemSettingsFormUtils.js b/frontend/src/utils/forms/settings/SystemSettingsFormUtils.js new file mode 100644 index 00000000..75c4f513 --- /dev/null +++ b/frontend/src/utils/forms/settings/SystemSettingsFormUtils.js @@ -0,0 +1,5 @@ +export const getSystemSettingsFormInitialValues = () => { + return { + 'max-system-events': 100, + }; +}; diff --git a/frontend/src/utils/forms/settings/UiSettingsFormUtils.js b/frontend/src/utils/forms/settings/UiSettingsFormUtils.js new file mode 100644 index 00000000..79e99d96 --- /dev/null +++ b/frontend/src/utils/forms/settings/UiSettingsFormUtils.js @@ -0,0 +1,14 @@ +import { createSetting, updateSetting } from '../../pages/SettingsUtils.js'; + +export const saveTimeZoneSetting = async (tzValue, settings) => { + const existing = settings['system-time-zone']; + if (existing?.id) { + await updateSetting({ ...existing, value: tzValue }); + } else { + await createSetting({ + key: 'system-time-zone', + name: 'System Time Zone', + value: tzValue, + }); + } +}; \ No newline at end of file diff --git a/frontend/src/utils/networkUtils.js b/frontend/src/utils/networkUtils.js new file mode 100644 index 00000000..8562face --- /dev/null +++ b/frontend/src/utils/networkUtils.js @@ -0,0 +1,4 @@ +export const IPV4_CIDR_REGEX = /^([0-9]{1,3}\.){3}[0-9]{1,3}\/\d+$/; + +export const IPV6_CIDR_REGEX = + /(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}(?:[0-9]{1,3}\.){3}[0-9]{1,3}(?![:.\w]))(([0-9A-F]{1,4}:){0,5}|:)((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)|(?:[A-F0-9]{1,4}:){7}[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}(?![:.\w]))(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}:|:(:[A-F0-9]{1,4}){7})(?![:.\w])\/(?:12[0-8]|1[01][0-9]|[1-9]?[0-9])/; diff --git a/frontend/src/utils/pages/SettingsUtils.js b/frontend/src/utils/pages/SettingsUtils.js new file mode 100644 index 00000000..e6179f06 --- /dev/null +++ b/frontend/src/utils/pages/SettingsUtils.js @@ -0,0 +1,104 @@ +import API from '../../api.js'; + +export const checkSetting = async (values) => { + return await API.checkSetting(values); +}; + +export const updateSetting = async (values) => { + return await API.updateSetting(values); +}; + +export const createSetting = async (values) => { + return await API.createSetting(values); +}; + +export const rehashStreams = async () => { + return await API.rehashStreams(); +}; + +export const saveChangedSettings = async (settings, changedSettings) => { + for (const updatedKey in changedSettings) { + const existing = settings[updatedKey]; + if (existing?.id) { + const result = await updateSetting({ + ...existing, + value: changedSettings[updatedKey], + }); + // API functions return undefined on error + if (!result) { + throw new Error('Failed to update setting'); + } + } else { + const result = await createSetting({ + key: updatedKey, + name: updatedKey.replace(/-/g, ' '), + value: changedSettings[updatedKey], + }); + // API functions return undefined on error + if (!result) { + throw new Error('Failed to create setting'); + } + } + } +}; + +export const getChangedSettings = (values, settings) => { + const changedSettings = {}; + + for (const settingKey in values) { + // Only compare against existing value if the setting exists + const existing = settings[settingKey]; + + // Convert array values (like m3u-hash-key) to comma-separated strings + const stringValue = Array.isArray(values[settingKey]) + ? values[settingKey].join(',') + : `${values[settingKey]}`; + + // Skip empty values to avoid validation errors + if (!stringValue) { + continue; + } + + if (!existing) { + // Create new setting on save + changedSettings[settingKey] = stringValue; + } else if (stringValue !== String(existing.value)) { + // If the user changed the setting's value from what's in the DB: + changedSettings[settingKey] = stringValue; + } + } + return changedSettings; +}; + +export const parseSettings = (settings) => { + return Object.entries(settings).reduce((acc, [key, value]) => { + // Modify each value based on its own properties + switch (value.value) { + case 'true': + value.value = true; + break; + case 'false': + value.value = false; + break; + } + + let val = null; + switch (key) { + case 'm3u-hash-key': + // Split comma-separated string, filter out empty strings + val = value.value ? value.value.split(',').filter((v) => v) : []; + break; + case 'dvr-pre-offset-minutes': + case 'dvr-post-offset-minutes': + val = Number.parseInt(value.value || '0', 10); + if (Number.isNaN(val)) val = 0; + break; + default: + val = value.value; + break; + } + + acc[key] = val; + return acc; + }, {}); +}; \ No newline at end of file From 7b1a85617f1933121377cbf29bdce838a9b609ea Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Wed, 31 Dec 2025 11:55:14 -0800 Subject: [PATCH 157/220] Minor changes Exporting UiSettingsForm as default Reverted admin level type check --- frontend/src/components/forms/settings/UiSettingsForm.jsx | 6 ++++-- frontend/src/pages/Settings.jsx | 4 ++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/frontend/src/components/forms/settings/UiSettingsForm.jsx b/frontend/src/components/forms/settings/UiSettingsForm.jsx index 69feec74..c0f7b354 100644 --- a/frontend/src/components/forms/settings/UiSettingsForm.jsx +++ b/frontend/src/components/forms/settings/UiSettingsForm.jsx @@ -9,7 +9,7 @@ import { showNotification } from '../../../utils/notificationUtils.js'; import { Select } from '@mantine/core'; import { saveTimeZoneSetting } from '../../../utils/forms/settings/UiSettingsFormUtils.js'; -export const UiSettingsForm = React.memo(() => { +const UiSettingsForm = React.memo(() => { const settings = useSettingsStore((s) => s.settings); const [tableSize, setTableSize] = useLocalStorage('table-size', 'default'); @@ -139,4 +139,6 @@ export const UiSettingsForm = React.memo(() => { /> ); -}); \ No newline at end of file +}); + +export default UiSettingsForm; \ No newline at end of file diff --git a/frontend/src/pages/Settings.jsx b/frontend/src/pages/Settings.jsx index a1a54435..4ce519a3 100644 --- a/frontend/src/pages/Settings.jsx +++ b/frontend/src/pages/Settings.jsx @@ -17,7 +17,7 @@ const BackupManager = React.lazy(() => import('../components/backups/BackupManager.jsx')); import useAuthStore from '../store/auth'; import { USER_LEVELS } from '../constants'; -import { UiSettingsForm } from '../components/forms/settings/UiSettingsForm.jsx'; +import UiSettingsForm from '../components/forms/settings/UiSettingsForm.jsx'; import ErrorBoundary from '../components/ErrorBoundary.jsx'; const NetworkAccessForm = React.lazy(() => import('../components/forms/settings/NetworkAccessForm.jsx')); @@ -52,7 +52,7 @@ const SettingsPage = () => { - {authUser.user_level === USER_LEVELS.ADMIN && ( + {authUser.user_level == USER_LEVELS.ADMIN && ( <> DVR From b4b0774189bf71470c598958f8b00677e676b2bb Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Wed, 31 Dec 2025 13:20:09 -0800 Subject: [PATCH 158/220] Including notification util changes --- frontend/src/pages/Logos.jsx | 3 ++- frontend/src/utils/notificationUtils.js | 9 +++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) create mode 100644 frontend/src/utils/notificationUtils.js diff --git a/frontend/src/pages/Logos.jsx b/frontend/src/pages/Logos.jsx index dd0bb5ad..f95212d6 100644 --- a/frontend/src/pages/Logos.jsx +++ b/frontend/src/pages/Logos.jsx @@ -4,6 +4,7 @@ import useLogosStore from '../store/logos'; import useVODLogosStore from '../store/vodLogos'; import LogosTable from '../components/tables/LogosTable'; import VODLogosTable from '../components/tables/VODLogosTable'; +import { showNotification } from '../utils/notificationUtils.js'; const LogosPage = () => { const logos = useLogosStore(s => s.logos); @@ -20,7 +21,7 @@ const LogosPage = () => { await useLogosStore.getState().fetchAllLogos(); } } catch (err) { - notifications.show({ + showNotification({ title: 'Error', message: 'Failed to load channel logos', color: 'red', diff --git a/frontend/src/utils/notificationUtils.js b/frontend/src/utils/notificationUtils.js new file mode 100644 index 00000000..ba965343 --- /dev/null +++ b/frontend/src/utils/notificationUtils.js @@ -0,0 +1,9 @@ +import { notifications } from '@mantine/notifications'; + +export function showNotification(notificationObject) { + return notifications.show(notificationObject); +} + +export function updateNotification(notificationId, notificationObject) { + return notifications.update(notificationId, notificationObject); +} \ No newline at end of file From c57f9fd7e7db1db5c30cbb4a9152dcc660bb74b3 Mon Sep 17 00:00:00 2001 From: patchy8736 <55986823+patchy8736@users.noreply.github.com> Date: Thu, 1 Jan 2026 15:57:27 +0100 Subject: [PATCH 159/220] Fix episode processing issues in VOD tasks - Ensure season and episode numbers are properly converted to integers with error handling - Remove zero-padding from debug log format for season/episode numbers - Add validation to filter out relations with unsaved episodes that have no primary key - Add proper logging for skipped relations when episode is not saved to database These changes address potential crashes when API returns string values instead of integers and prevent database errors when bulk creation operations fail silently due to conflicts. Fixes issue #770 --- apps/vod/tasks.py | 28 +++++++++++++++++++++++++--- 1 file changed, 25 insertions(+), 3 deletions(-) diff --git a/apps/vod/tasks.py b/apps/vod/tasks.py index d42be946..6c874de1 100644 --- a/apps/vod/tasks.py +++ b/apps/vod/tasks.py @@ -1292,8 +1292,15 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None) try: episode_id = str(episode_data.get('id')) episode_name = episode_data.get('title', 'Unknown Episode') - season_number = episode_data['_season_number'] - episode_number = episode_data.get('episode_num', 0) + # Ensure season and episode numbers are integers (API may return strings) + try: + season_number = int(episode_data['_season_number']) + except (ValueError, TypeError): + season_number = 0 + try: + episode_number = int(episode_data.get('episode_num', 0)) + except (ValueError, TypeError): + episode_number = 0 info = episode_data.get('info', {}) # Extract episode metadata @@ -1324,7 +1331,7 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None) # Check if we already have this episode pending creation (multiple streams for same episode) if not episode and episode_key in episodes_pending_creation: episode = episodes_pending_creation[episode_key] - logger.debug(f"Reusing pending episode for S{season_number:02d}E{episode_number:02d} (stream_id: {episode_id})") + logger.debug(f"Reusing pending episode for S{season_number}E{episode_number} (stream_id: {episode_id})") if episode: # Update existing episode @@ -1432,6 +1439,21 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None) if key in episode_pk_map: relation.episode = episode_pk_map[key] + # Filter out relations with unsaved episodes (no PK) + # This can happen if bulk_create had a conflict and ignore_conflicts=True didn't save the episode + valid_relations_to_create = [] + for relation in relations_to_create: + if relation.episode.pk is not None: + valid_relations_to_create.append(relation) + else: + season_num = relation.episode.season_number + episode_num = relation.episode.episode_number + logger.warning( + f"Skipping relation for episode S{season_num}E{episode_num} " + f"- episode not saved to database" + ) + relations_to_create = valid_relations_to_create + # Update existing episodes if episodes_to_update: Episode.objects.bulk_update(episodes_to_update, [ From 13e4b19960d8f412fda380bf1e5a123716151398 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 1 Jan 2026 18:21:52 -0600 Subject: [PATCH 160/220] changelog: Add change for settings/logo refactor. --- CHANGELOG.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 00a87240..3a2dd297 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,6 +32,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Removed unused Dashboard and Home pages - Logo loading optimization: Logos now load only after both Channels and Streams tables complete loading to prevent blocking initial page render, with rendering gated by table readiness to ensure data loads before visual elements - M3U stream URLs now use `build_absolute_uri_with_port()` for consistency with EPG and logo URLs, ensuring uniform port handling across all M3U file URLs +- Settings and Logos page refactoring for improved readability and separation of concerns - Thanks [@nick4810](https://github.com/nick4810) (PR #795) + - Extracted individual settings forms (DVR, Network Access, Proxy, Stream, System, UI) into separate components with dedicated utility files + - Moved larger nested components into their own files + - Moved business logic into corresponding utils/ files + - Extracted larger in-line component logic into its own function + - Each panel in Settings now uses its own form state with the parent component handling active state management ### Fixed From e8c9432f650c781e12dd452f778d58648be8532a Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 1 Jan 2026 18:29:54 -0600 Subject: [PATCH 161/220] changelog: Update changelog for VOD category filtering. --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3a2dd297..02c852d2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -41,6 +41,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed +- VOD category filtering now correctly handles category names containing pipe "|" characters (e.g., "PL | BAJKI", "EN | MOVIES") by using `rsplit()` to split from the right instead of the left, ensuring the category type is correctly extracted as the last segment - Thanks [@Vitekant](https://github.com/Vitekant) - M3U and EPG URLs now correctly preserve non-standard HTTPS ports (e.g., `:8443`) when accessed behind reverse proxies that forward the port in headers — `get_host_and_port()` now properly checks `X-Forwarded-Port` header before falling back to other detection methods (Fixes #704) - M3U and EPG manager page no longer crashes when a playlist references a deleted channel group (Fixes screen blank on navigation) - Stream validation now returns original URL instead of redirected URL to prevent issues with temporary redirect URLs that expire before clients can connect From 6678311fa739952fafd8fe6787fc662410cb9387 Mon Sep 17 00:00:00 2001 From: Nick Sandstrom <32273437+nick4810@users.noreply.github.com> Date: Fri, 2 Jan 2026 02:03:50 -0800 Subject: [PATCH 162/220] Added loading overlay while programs are fetching --- frontend/src/pages/Guide.jsx | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/frontend/src/pages/Guide.jsx b/frontend/src/pages/Guide.jsx index 2ae80012..ac0fdf82 100644 --- a/frontend/src/pages/Guide.jsx +++ b/frontend/src/pages/Guide.jsx @@ -91,6 +91,8 @@ export default function TVChannelGuide({ startDate, endDate }) { const recordings = useChannelsStore((s) => s.recordings); const channelGroups = useChannelsStore((s) => s.channelGroups); const profiles = useChannelsStore((s) => s.profiles); + const isLoading = useChannelsStore((s) => s.isLoading); + const [isProgramsLoading, setIsProgramsLoading] = useState(true); const logos = useLogosStore((s) => s.logos); const tvgsById = useEPGsStore((s) => s.tvgsById); @@ -136,13 +138,22 @@ export default function TVChannelGuide({ startDate, endDate }) { if (Object.keys(channels).length === 0) { console.warn('No channels provided or empty channels array'); showNotification({ title: 'No channels available', color: 'red.5' }); + setIsProgramsLoading(false); return; } const sortedChannels = sortChannels(channels); - setGuideChannels(sortedChannels); - fetchPrograms().then((data) => setPrograms(data)); + + fetchPrograms() + .then((data) => { + setPrograms(data); + setIsProgramsLoading(false); + }) + .catch((error) => { + console.error('Failed to fetch programs:', error); + setIsProgramsLoading(false); + }); }, [channels]); // Apply filters when search, group, or profile changes @@ -1181,6 +1192,7 @@ export default function TVChannelGuide({ startDate, endDate }) { }} pos='relative' > + {nowPosition >= 0 && ( Date: Fri, 2 Jan 2026 09:53:45 -0600 Subject: [PATCH 163/220] Enhance error logging for invalid season and episode numbers in batch_process_episodes --- apps/vod/tasks.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/apps/vod/tasks.py b/apps/vod/tasks.py index 6c874de1..4eb9fadc 100644 --- a/apps/vod/tasks.py +++ b/apps/vod/tasks.py @@ -1295,11 +1295,13 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None) # Ensure season and episode numbers are integers (API may return strings) try: season_number = int(episode_data['_season_number']) - except (ValueError, TypeError): + except (ValueError, TypeError) as e: + logger.warning(f"Invalid season_number '{episode_data.get('_season_number')}' for episode '{episode_name}': {e}") season_number = 0 try: episode_number = int(episode_data.get('episode_num', 0)) - except (ValueError, TypeError): + except (ValueError, TypeError) as e: + logger.warning(f"Invalid episode_num '{episode_data.get('episode_num')}' for episode '{episode_name}': {e}") episode_number = 0 info = episode_data.get('info', {}) From 6a985d7a7dc202b0c4ab025e381cbae8b0da0ec8 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 2 Jan 2026 10:13:01 -0600 Subject: [PATCH 164/220] changelog: Update changelog for PR --- CHANGELOG.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 02c852d2..d41d3063 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,7 +32,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Removed unused Dashboard and Home pages - Logo loading optimization: Logos now load only after both Channels and Streams tables complete loading to prevent blocking initial page render, with rendering gated by table readiness to ensure data loads before visual elements - M3U stream URLs now use `build_absolute_uri_with_port()` for consistency with EPG and logo URLs, ensuring uniform port handling across all M3U file URLs -- Settings and Logos page refactoring for improved readability and separation of concerns - Thanks [@nick4810](https://github.com/nick4810) (PR #795) +- Settings and Logos page refactoring for improved readability and separation of concerns - Thanks [@nick4810](https://github.com/nick4810) - Extracted individual settings forms (DVR, Network Access, Proxy, Stream, System, UI) into separate components with dedicated utility files - Moved larger nested components into their own files - Moved business logic into corresponding utils/ files @@ -41,6 +41,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed +- VOD episode processing now properly handles season and episode numbers from APIs that return string values instead of integers, with comprehensive error logging to track data quality issues - Thanks [@patchy8736](https://github.com/patchy8736) (Fixes #770) +- VOD episode-to-stream relations are now validated to ensure episodes have been saved to the database before creating relations, preventing integrity errors when bulk_create operations encounter conflicts - Thanks [@patchy8736](https://github.com/patchy8736) - VOD category filtering now correctly handles category names containing pipe "|" characters (e.g., "PL | BAJKI", "EN | MOVIES") by using `rsplit()` to split from the right instead of the left, ensuring the category type is correctly extracted as the last segment - Thanks [@Vitekant](https://github.com/Vitekant) - M3U and EPG URLs now correctly preserve non-standard HTTPS ports (e.g., `:8443`) when accessed behind reverse proxies that forward the port in headers — `get_host_and_port()` now properly checks `X-Forwarded-Port` header before falling back to other detection methods (Fixes #704) - M3U and EPG manager page no longer crashes when a playlist references a deleted channel group (Fixes screen blank on navigation) From 131ebf9f55894ac260542c2ba492d31892902219 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 2 Jan 2026 11:29:01 -0600 Subject: [PATCH 165/220] changelog: Updated changelog for new refactor. --- CHANGELOG.md | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d41d3063..516a9e4c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -26,10 +26,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Stream log parsing refactored to use factory pattern: Simplified `ChannelService.parse_and_store_stream_info()` to route parsing through specialized log parsers instead of inline program-specific logic (~150 lines of code removed) - Stream profile names in fixtures updated to use proper capitalization (ffmpeg → FFmpeg, streamlink → Streamlink) - Frontend component refactoring for improved code organization and maintainability - Thanks [@nick4810](https://github.com/nick4810) - - Extracted large nested components into separate files (RecordingCard, RecordingDetailsModal, RecurringRuleModal, RecordingSynopsis) - - Moved business logic from components into dedicated utility files (dateTimeUtils, RecordingCardUtils, RecordingDetailsModalUtils, RecurringRuleModalUtils, DVRUtils) - - Lazy loaded heavy components (SuperuserForm, RecordingDetailsModal) with loading fallbacks + - Extracted large nested components into separate files (RecordingCard, RecordingDetailsModal, RecurringRuleModal, RecordingSynopsis, GuideRow, HourTimeline, PluginCard, ProgramRecordingModal, SeriesRecordingModal, Field) + - Moved business logic from components into dedicated utility files (dateTimeUtils, RecordingCardUtils, RecordingDetailsModalUtils, RecurringRuleModalUtils, DVRUtils, guideUtils, PluginsUtils, PluginCardUtils, notificationUtils) + - Lazy loaded heavy components (SuperuserForm, RecordingDetailsModal, ProgramRecordingModal, SeriesRecordingModal, PluginCard) with loading fallbacks - Removed unused Dashboard and Home pages + - Guide page refactoring: Extracted GuideRow and HourTimeline components, moved grid calculations and utility functions to guideUtils.js, added loading states for initial data fetching, improved performance through better memoization + - Plugins page refactoring: Extracted PluginCard and Field components, added Zustand store for plugin state management, improved plugin action confirmation handling, better separation of concerns between UI and business logic - Logo loading optimization: Logos now load only after both Channels and Streams tables complete loading to prevent blocking initial page render, with rendering gated by table readiness to ensure data loads before visual elements - M3U stream URLs now use `build_absolute_uri_with_port()` for consistency with EPG and logo URLs, ensuring uniform port handling across all M3U file URLs - Settings and Logos page refactoring for improved readability and separation of concerns - Thanks [@nick4810](https://github.com/nick4810) From 0cb189acba1b8c0495d40c2d29b2d3b921aa5122 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 2 Jan 2026 12:03:42 -0600 Subject: [PATCH 166/220] changelog: Document Docker container file permissions update for Django management commands --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d41d3063..c5264f9d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -48,6 +48,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - M3U and EPG manager page no longer crashes when a playlist references a deleted channel group (Fixes screen blank on navigation) - Stream validation now returns original URL instead of redirected URL to prevent issues with temporary redirect URLs that expire before clients can connect - XtreamCodes EPG limit parameter now properly converted to integer to prevent type errors when accessing EPG listings (Fixes #781) +- Docker container file permissions: Django management commands (`migrate`, `collectstatic`) now run as the non-root user to prevent root-owned `__pycache__` and static files from causing permission issues - Thanks [@sethwv](https://github.com/sethwv) - Stream validation now continues with GET request if HEAD request fails due to connection issues - Thanks [@kvnnap](https://github.com/kvnnap) (Fixes #782) - XtreamCodes M3U files now correctly set `x-tvg-url` and `url-tvg` headers to reference XC EPG URL (`xmltv.php`) instead of standard EPG endpoint when downloaded via XC API (Fixes #629) From 3f46f28a709dccbd60ae5c34053c8e0a913371c9 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 2 Jan 2026 15:22:25 -0600 Subject: [PATCH 167/220] Bug Fix: Auto Channel Sync Force EPG Source feature not properly forcing "No EPG" assignment - When selecting "Force EPG Source" > "No EPG (Disabled)", channels were still being auto-matched to EPG data instead of forcing dummy/no EPG. Now correctly sets `force_dummy_epg` flag to prevent unwanted EPG assignment. (Fixes #788) --- CHANGELOG.md | 1 + .../src/components/forms/LiveGroupFilter.jsx | 133 ++++++++++++------ 2 files changed, 90 insertions(+), 44 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 381b5570..ef933e8f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -43,6 +43,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed +- Auto Channel Sync Force EPG Source feature not properly forcing "No EPG" assignment - When selecting "Force EPG Source" > "No EPG (Disabled)", channels were still being auto-matched to EPG data instead of forcing dummy/no EPG. Now correctly sets `force_dummy_epg` flag to prevent unwanted EPG assignment. (Fixes #788) - VOD episode processing now properly handles season and episode numbers from APIs that return string values instead of integers, with comprehensive error logging to track data quality issues - Thanks [@patchy8736](https://github.com/patchy8736) (Fixes #770) - VOD episode-to-stream relations are now validated to ensure episodes have been saved to the database before creating relations, preventing integrity errors when bulk_create operations encounter conflicts - Thanks [@patchy8736](https://github.com/patchy8736) - VOD category filtering now correctly handles category names containing pipe "|" characters (e.g., "PL | BAJKI", "EN | MOVIES") by using `rsplit()` to split from the right instead of the left, ensuring the category type is correctly extracted as the last segment - Thanks [@Vitekant](https://github.com/Vitekant) diff --git a/frontend/src/components/forms/LiveGroupFilter.jsx b/frontend/src/components/forms/LiveGroupFilter.jsx index ef68bee8..b6e6494c 100644 --- a/frontend/src/components/forms/LiveGroupFilter.jsx +++ b/frontend/src/components/forms/LiveGroupFilter.jsx @@ -369,7 +369,8 @@ const LiveGroupFilter = ({ if ( group.custom_properties?.custom_epg_id !== undefined || - group.custom_properties?.force_dummy_epg + group.custom_properties?.force_dummy_epg || + group.custom_properties?.force_epg_selected ) { selectedValues.push('force_epg'); } @@ -432,23 +433,20 @@ const LiveGroupFilter = ({ // Handle force_epg if (selectedOptions.includes('force_epg')) { - // Migrate from old force_dummy_epg if present + // Set default to force_dummy_epg if no EPG settings exist yet if ( - newCustomProps.force_dummy_epg && - newCustomProps.custom_epg_id === undefined + newCustomProps.custom_epg_id === + undefined && + !newCustomProps.force_dummy_epg ) { - // Migrate: force_dummy_epg=true becomes custom_epg_id=null - newCustomProps.custom_epg_id = null; - delete newCustomProps.force_dummy_epg; - } else if ( - newCustomProps.custom_epg_id === undefined - ) { - // New configuration: initialize with null (no EPG/default dummy) - newCustomProps.custom_epg_id = null; + // Default to "No EPG (Disabled)" + newCustomProps.force_dummy_epg = true; } } else { - // Only remove custom_epg_id when deselected + // Remove all EPG settings when deselected delete newCustomProps.custom_epg_id; + delete newCustomProps.force_dummy_epg; + delete newCustomProps.force_epg_selected; } // Handle group_override @@ -1124,7 +1122,8 @@ const LiveGroupFilter = ({ {/* Show EPG selector when force_epg is selected */} {(group.custom_properties?.custom_epg_id !== undefined || - group.custom_properties?.force_dummy_epg) && ( + group.custom_properties?.force_dummy_epg || + group.custom_properties?.force_epg_selected) && ( { - // Handle migration from force_dummy_epg + // Show custom EPG if set if ( group.custom_properties?.custom_epg_id !== - undefined + undefined && + group.custom_properties?.custom_epg_id !== null ) { - // Convert to string, use '0' for null/no EPG - return group.custom_properties.custom_epg_id === - null - ? '0' - : group.custom_properties.custom_epg_id.toString(); - } else if ( - group.custom_properties?.force_dummy_epg - ) { - // Show "No EPG" for old force_dummy_epg configs + return group.custom_properties.custom_epg_id.toString(); + } + // Show "No EPG" if force_dummy_epg is set + if (group.custom_properties?.force_dummy_epg) { return '0'; } - return '0'; + // Otherwise show empty/placeholder + return null; })()} onChange={(value) => { - // Convert back: '0' means no EPG (null) - const newValue = - value === '0' ? null : parseInt(value); - setGroupStates( - groupStates.map((state) => { - if ( - state.channel_group === group.channel_group - ) { - return { - ...state, - custom_properties: { + if (value === '0') { + // "No EPG (Disabled)" selected - use force_dummy_epg + setGroupStates( + groupStates.map((state) => { + if ( + state.channel_group === + group.channel_group + ) { + const newProps = { ...state.custom_properties, - custom_epg_id: newValue, - }, - }; - } - return state; - }) - ); + }; + delete newProps.custom_epg_id; + delete newProps.force_epg_selected; + newProps.force_dummy_epg = true; + return { + ...state, + custom_properties: newProps, + }; + } + return state; + }) + ); + } else if (value) { + // Specific EPG source selected + const epgId = parseInt(value); + setGroupStates( + groupStates.map((state) => { + if ( + state.channel_group === + group.channel_group + ) { + const newProps = { + ...state.custom_properties, + }; + newProps.custom_epg_id = epgId; + delete newProps.force_dummy_epg; + delete newProps.force_epg_selected; + return { + ...state, + custom_properties: newProps, + }; + } + return state; + }) + ); + } else { + // Cleared - remove all EPG settings + setGroupStates( + groupStates.map((state) => { + if ( + state.channel_group === + group.channel_group + ) { + const newProps = { + ...state.custom_properties, + }; + delete newProps.custom_epg_id; + delete newProps.force_dummy_epg; + delete newProps.force_epg_selected; + return { + ...state, + custom_properties: newProps, + }; + } + return state; + }) + ); + } }} data={[ { value: '0', label: 'No EPG (Disabled)' }, From 9cc90354ee2fa0daa180ee015f84d4f52af87fd1 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 2 Jan 2026 15:45:05 -0600 Subject: [PATCH 168/220] changelog: Update changelog for region code addition. --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index ef933e8f..9d75289e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,6 +22,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed - Fixed event viewer arrow direction (previously inverted) — UI behavior corrected. - Thanks [@drnikcuk](https://github.com/drnikcuk) (Closes #772) +- Region code options now intentionally include both `GB` (ISO 3166-1 standard) and `UK` (commonly used by EPG/XMLTV providers) to accommodate real-world EPG data variations. Many providers use `UK` in channel identifiers (e.g., `BBCOne.uk`) despite `GB` being the official ISO country code. Users should select the region code that matches their specific EPG provider's convention for optimal region-based EPG matching bonuses - Thanks [@bigpandaaaa](https://github.com/bigpandaaaa) - Channel number inputs in stream-to-channel creation modals no longer have a maximum value restriction, allowing users to enter any valid channel number supported by the database - Stream log parsing refactored to use factory pattern: Simplified `ChannelService.parse_and_store_stream_info()` to route parsing through specialized log parsers instead of inline program-specific logic (~150 lines of code removed) - Stream profile names in fixtures updated to use proper capitalization (ffmpeg → FFmpeg, streamlink → Streamlink) From e151da27b985bdca5de5c40ecc82b1ec10aa6a8c Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Sun, 4 Jan 2026 01:15:46 +0000 Subject: [PATCH 169/220] Release v0.16.0 --- CHANGELOG.md | 2 ++ version.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9d75289e..114d42ce 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [0.16.0] - 2026-01-04 + ### Added - Advanced filtering for Channels table: Filter menu now allows toggling disabled channels visibility (when a profile is selected) and filtering to show only empty channels without streams (Closes #182) diff --git a/version.py b/version.py index 714a29fd..0ac73ddd 100644 --- a/version.py +++ b/version.py @@ -1,5 +1,5 @@ """ Dispatcharr version information. """ -__version__ = '0.15.1' # Follow semantic versioning (MAJOR.MINOR.PATCH) +__version__ = '0.16.0' # Follow semantic versioning (MAJOR.MINOR.PATCH) __timestamp__ = None # Set during CI/CD build process From 48bdcfbd653336be9abef630b07ead0ef49e9281 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 4 Jan 2026 12:05:01 -0600 Subject: [PATCH 170/220] Bug fix: Release workflow Docker tagging: Fixed issue where `latest` and version tags (e.g., `0.16.0`) were creating separate manifests instead of pointing to the same image digest, which caused old `latest` tags to become orphaned/untagged after new releases. Now creates a single multi-arch manifest with both tags, maintaining proper tag relationships and download statistics visibility on GitHub. --- .github/workflows/release.yml | 46 +++++------------------------------ CHANGELOG.md | 4 +++ 2 files changed, 10 insertions(+), 40 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a1cb27bb..9186541d 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -184,13 +184,13 @@ jobs: echo "Creating multi-arch manifest for ${OWNER}/${REPO}" # GitHub Container Registry manifests - # latest tag + # Create one manifest with both latest and version tags docker buildx imagetools create \ --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ --annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \ --annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \ - --annotation "index:org.opencontainers.image.version=latest" \ + --annotation "index:org.opencontainers.image.version=${VERSION}" \ --annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \ --annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \ --annotation "index:org.opencontainers.image.licenses=See repository" \ @@ -200,9 +200,11 @@ jobs: --annotation "index:maintainer=${{ github.actor }}" \ --annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \ --tag ghcr.io/${OWNER}/${REPO}:latest \ - ghcr.io/${OWNER}/${REPO}:latest-amd64 ghcr.io/${OWNER}/${REPO}:latest-arm64 + --tag ghcr.io/${OWNER}/${REPO}:${VERSION} \ + ghcr.io/${OWNER}/${REPO}:${VERSION}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-arm64 - # version tag + # Docker Hub manifests + # Create one manifest with both latest and version tags docker buildx imagetools create \ --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ @@ -217,43 +219,7 @@ jobs: --annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \ --annotation "index:maintainer=${{ github.actor }}" \ --annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \ - --tag ghcr.io/${OWNER}/${REPO}:${VERSION} \ - ghcr.io/${OWNER}/${REPO}:${VERSION}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-arm64 - - # Docker Hub manifests - # latest tag - docker buildx imagetools create \ - --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ - --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ - --annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \ - --annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \ - --annotation "index:org.opencontainers.image.version=latest" \ - --annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \ - --annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \ - --annotation "index:org.opencontainers.image.licenses=See repository" \ - --annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \ - --annotation "index:org.opencontainers.image.vendor=${OWNER}" \ - --annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \ - --annotation "index:maintainer=${{ github.actor }}" \ - --annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \ --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest \ - docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest-arm64 - - # version tag - docker buildx imagetools create \ - --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ - --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ - --annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \ - --annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \ - --annotation "index:org.opencontainers.image.version=${VERSION}" \ - --annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \ - --annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \ - --annotation "index:org.opencontainers.image.licenses=See repository" \ - --annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \ - --annotation "index:org.opencontainers.image.vendor=${OWNER}" \ - --annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \ - --annotation "index:maintainer=${{ github.actor }}" \ - --annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \ --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION} \ docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-arm64 diff --git a/CHANGELOG.md b/CHANGELOG.md index 114d42ce..ade00702 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +### Fixed + +- Release workflow Docker tagging: Fixed issue where `latest` and version tags (e.g., `0.16.0`) were creating separate manifests instead of pointing to the same image digest, which caused old `latest` tags to become orphaned/untagged after new releases. Now creates a single multi-arch manifest with both tags, maintaining proper tag relationships and download statistics visibility on GitHub. + ## [0.16.0] - 2026-01-04 ### Added From 8ae1a98a3b681bf088b6384149a0d20b23e7b13d Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 4 Jan 2026 14:05:30 -0600 Subject: [PATCH 171/220] Bug Fix: Fixed onboarding message appearing in the Channels Table when filtered results are empty. The onboarding message now only displays when there are no channels created at all, not when channels exist but are filtered out by current filters. --- CHANGELOG.md | 1 + frontend/src/components/tables/ChannelsTable.jsx | 9 +++++---- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ade00702..62f57a3a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed - Release workflow Docker tagging: Fixed issue where `latest` and version tags (e.g., `0.16.0`) were creating separate manifests instead of pointing to the same image digest, which caused old `latest` tags to become orphaned/untagged after new releases. Now creates a single multi-arch manifest with both tags, maintaining proper tag relationships and download statistics visibility on GitHub. +- Fixed onboarding message appearing in the Channels Table when filtered results are empty. The onboarding message now only displays when there are no channels created at all, not when channels exist but are filtered out by current filters. ## [0.16.0] - 2026-01-04 diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index efaf5ca7..80599a6e 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -1380,12 +1380,13 @@ const ChannelsTable = ({ onReady }) => { {/* Table or ghost empty state inside Paper */} - {channelsTableLength === 0 && ( - - )} + {channelsTableLength === 0 && + Object.keys(channels).length === 0 && ( + + )} - {channelsTableLength > 0 && ( + {(channelsTableLength > 0 || Object.keys(channels).length > 0) && ( Date: Sun, 4 Jan 2026 14:36:03 -0600 Subject: [PATCH 172/220] Bug Fix: `M3UMovieRelation.get_stream_url()` and `M3UEpisodeRelation.get_stream_url()` to use XC client's `_normalize_url()` method instead of simple `rstrip('/')`. This properly handles malformed M3U account URLs (e.g., containing `/player_api.php` or query parameters) before constructing VOD stream endpoints, matching behavior of live channel URL building. (Closes #722) --- CHANGELOG.md | 1 + apps/vod/models.py | 13 +++++++++---- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 62f57a3a..a66ed26f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Release workflow Docker tagging: Fixed issue where `latest` and version tags (e.g., `0.16.0`) were creating separate manifests instead of pointing to the same image digest, which caused old `latest` tags to become orphaned/untagged after new releases. Now creates a single multi-arch manifest with both tags, maintaining proper tag relationships and download statistics visibility on GitHub. - Fixed onboarding message appearing in the Channels Table when filtered results are empty. The onboarding message now only displays when there are no channels created at all, not when channels exist but are filtered out by current filters. +- Fixed `M3UMovieRelation.get_stream_url()` and `M3UEpisodeRelation.get_stream_url()` to use XC client's `_normalize_url()` method instead of simple `rstrip('/')`. This properly handles malformed M3U account URLs (e.g., containing `/player_api.php` or query parameters) before constructing VOD stream endpoints, matching behavior of live channel URL building. (Closes #722) ## [0.16.0] - 2026-01-04 diff --git a/apps/vod/models.py b/apps/vod/models.py index 69aed808..7067856e 100644 --- a/apps/vod/models.py +++ b/apps/vod/models.py @@ -245,10 +245,13 @@ class M3UMovieRelation(models.Model): """Get the full stream URL for this movie from this provider""" # Build URL dynamically for XtreamCodes accounts if self.m3u_account.account_type == 'XC': - server_url = self.m3u_account.server_url.rstrip('/') + from core.xtream_codes import Client as XCClient + # Use XC client's URL normalization to handle malformed URLs + # (e.g., URLs with /player_api.php or query parameters) + normalized_url = XCClient(self.m3u_account.server_url, '', '')._normalize_url(self.m3u_account.server_url) username = self.m3u_account.username password = self.m3u_account.password - return f"{server_url}/movie/{username}/{password}/{self.stream_id}.{self.container_extension or 'mp4'}" + return f"{normalized_url}/movie/{username}/{password}/{self.stream_id}.{self.container_extension or 'mp4'}" else: # For other account types, we would need another way to build URLs return None @@ -285,10 +288,12 @@ class M3UEpisodeRelation(models.Model): if self.m3u_account.account_type == 'XC': # For XtreamCodes accounts, build the URL dynamically - server_url = self.m3u_account.server_url.rstrip('/') + # Use XC client's URL normalization to handle malformed URLs + # (e.g., URLs with /player_api.php or query parameters) + normalized_url = XtreamCodesClient(self.m3u_account.server_url, '', '')._normalize_url(self.m3u_account.server_url) username = self.m3u_account.username password = self.m3u_account.password - return f"{server_url}/series/{username}/{password}/{self.stream_id}.{self.container_extension or 'mp4'}" + return f"{normalized_url}/series/{username}/{password}/{self.stream_id}.{self.container_extension or 'mp4'}" else: # We might support non XC accounts in the future # For now, return None From 4e65ffd113b4db37a519d176e0a75d6247ad8a33 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 4 Jan 2026 15:00:08 -0600 Subject: [PATCH 173/220] Bug fix: Fixed VOD profile connection count not being decremented when stream connection fails (timeout, 404, etc.), preventing profiles from reaching capacity limits and rejecting valid stream requests --- CHANGELOG.md | 1 + .../multi_worker_connection_manager.py | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a66ed26f..fe0f2964 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed +- Fixed VOD profile connection count not being decremented when stream connection fails (timeout, 404, etc.), preventing profiles from reaching capacity limits and rejecting valid stream requests - Release workflow Docker tagging: Fixed issue where `latest` and version tags (e.g., `0.16.0`) were creating separate manifests instead of pointing to the same image digest, which caused old `latest` tags to become orphaned/untagged after new releases. Now creates a single multi-arch manifest with both tags, maintaining proper tag relationships and download statistics visibility on GitHub. - Fixed onboarding message appearing in the Channels Table when filtered results are empty. The onboarding message now only displays when there are no channels created at all, not when channels exist but are filtered out by current filters. - Fixed `M3UMovieRelation.get_stream_url()` and `M3UEpisodeRelation.get_stream_url()` to use XC client's `_normalize_url()` method instead of simple `rstrip('/')`. This properly handles malformed M3U account URLs (e.g., containing `/player_api.php` or query parameters) before constructing VOD stream endpoints, matching behavior of live channel URL building. (Closes #722) diff --git a/apps/proxy/vod_proxy/multi_worker_connection_manager.py b/apps/proxy/vod_proxy/multi_worker_connection_manager.py index 251721c5..decda351 100644 --- a/apps/proxy/vod_proxy/multi_worker_connection_manager.py +++ b/apps/proxy/vod_proxy/multi_worker_connection_manager.py @@ -712,6 +712,10 @@ class MultiWorkerVODConnectionManager: content_name = content_obj.name if hasattr(content_obj, 'name') else str(content_obj) client_id = session_id + # Track whether we incremented profile connections (for cleanup on error) + profile_connections_incremented = False + redis_connection = None + logger.info(f"[{client_id}] Worker {self.worker_id} - Redis-backed streaming request for {content_type} {content_name}") try: @@ -802,6 +806,7 @@ class MultiWorkerVODConnectionManager: # Increment profile connections after successful connection creation self._increment_profile_connections(m3u_profile) + profile_connections_incremented = True logger.info(f"[{client_id}] Worker {self.worker_id} - Created consolidated connection with session metadata") else: @@ -1024,6 +1029,19 @@ class MultiWorkerVODConnectionManager: except Exception as e: logger.error(f"[{client_id}] Worker {self.worker_id} - Error in Redis-backed stream_content_with_session: {e}", exc_info=True) + + # Decrement profile connections if we incremented them but failed before streaming started + if profile_connections_incremented: + logger.info(f"[{client_id}] Connection error occurred after profile increment - decrementing profile connections") + self._decrement_profile_connections(m3u_profile.id) + + # Also clean up the Redis connection state since we won't be using it + if redis_connection: + try: + redis_connection.cleanup(connection_manager=self, current_worker_id=self.worker_id) + except Exception as cleanup_error: + logger.error(f"[{client_id}] Error during cleanup after connection failure: {cleanup_error}") + return HttpResponse(f"Streaming error: {str(e)}", status=500) def _apply_timeshift_parameters(self, original_url, utc_start=None, utc_end=None, offset=None): From 9612a674120992e574eb2e25fb83848b38040dff Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 4 Jan 2026 15:21:22 -0600 Subject: [PATCH 174/220] Change: VOD upstream read timeout reduced from 30 seconds to 10 seconds to minimize lock hold time when clients disconnect during connection phase --- CHANGELOG.md | 4 ++++ apps/proxy/vod_proxy/multi_worker_connection_manager.py | 4 ++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fe0f2964..59b7487b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +### Changed + +- VOD upstream read timeout reduced from 30 seconds to 10 seconds to minimize lock hold time when clients disconnect during connection phase + ### Fixed - Fixed VOD profile connection count not being decremented when stream connection fails (timeout, 404, etc.), preventing profiles from reaching capacity limits and rejecting valid stream requests diff --git a/apps/proxy/vod_proxy/multi_worker_connection_manager.py b/apps/proxy/vod_proxy/multi_worker_connection_manager.py index decda351..1534f761 100644 --- a/apps/proxy/vod_proxy/multi_worker_connection_manager.py +++ b/apps/proxy/vod_proxy/multi_worker_connection_manager.py @@ -357,12 +357,12 @@ class RedisBackedVODConnection: logger.info(f"[{self.session_id}] Making request #{state.request_count} to {'final' if state.final_url else 'original'} URL") - # Make request + # Make request (10s connect, 10s read timeout - keeps lock time reasonable if client disconnects) response = self.local_session.get( target_url, headers=headers, stream=True, - timeout=(10, 30), + timeout=(10, 10), allow_redirects=allow_redirects ) response.raise_for_status() From 16bbc1d87531cc991be4d47b1455c4551c95e3f0 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 4 Jan 2026 20:40:16 -0600 Subject: [PATCH 175/220] Refactor forms to use react-hook-form and Yup for validation - Replaced Formik with react-hook-form in Logo, M3UGroupFilter, M3UProfile, Stream, StreamProfile, and UserAgent components. - Integrated Yup for schema validation in all updated forms. - Updated form submission logic to accommodate new form handling methods. - Adjusted state management and error handling to align with react-hook-form's API. - Ensured compatibility with existing functionality while improving code readability and maintainability. --- apps/channels/api_views.py | 25 +- apps/channels/serializers.py | 12 +- frontend/package-lock.json | 100 ++---- frontend/package.json | 3 +- frontend/src/components/forms/Channel.jsx | 322 ++++++++---------- frontend/src/components/forms/Logo.jsx | 283 +++++++-------- .../src/components/forms/M3UGroupFilter.jsx | 1 - frontend/src/components/forms/M3UProfile.jsx | 232 ++++++------- frontend/src/components/forms/Stream.jsx | 148 ++++---- .../src/components/forms/StreamProfile.jsx | 109 +++--- frontend/src/components/forms/UserAgent.jsx | 108 +++--- 11 files changed, 635 insertions(+), 708 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index aebb74a3..e162f63a 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -236,12 +236,8 @@ class ChannelGroupViewSet(viewsets.ModelViewSet): return [Authenticated()] def get_queryset(self): - """Add annotation for association counts""" - from django.db.models import Count - return ChannelGroup.objects.annotate( - channel_count=Count('channels', distinct=True), - m3u_account_count=Count('m3u_accounts', distinct=True) - ) + """Return channel groups with prefetched relations for efficient counting""" + return ChannelGroup.objects.prefetch_related('channels', 'm3u_accounts').all() def update(self, request, *args, **kwargs): """Override update to check M3U associations""" @@ -277,15 +273,20 @@ class ChannelGroupViewSet(viewsets.ModelViewSet): @action(detail=False, methods=["post"], url_path="cleanup") def cleanup_unused_groups(self, request): """Delete all channel groups with no channels or M3U account associations""" - from django.db.models import Count + from django.db.models import Q, Exists, OuterRef + + # Find groups with no channels and no M3U account associations using Exists subqueries + from .models import Channel, ChannelGroupM3UAccount + + has_channels = Channel.objects.filter(channel_group_id=OuterRef('pk')) + has_accounts = ChannelGroupM3UAccount.objects.filter(channel_group_id=OuterRef('pk')) - # Find groups with no channels and no M3U account associations unused_groups = ChannelGroup.objects.annotate( - channel_count=Count('channels', distinct=True), - m3u_account_count=Count('m3u_accounts', distinct=True) + has_channels=Exists(has_channels), + has_accounts=Exists(has_accounts) ).filter( - channel_count=0, - m3u_account_count=0 + has_channels=False, + has_accounts=False ) deleted_count = unused_groups.count() diff --git a/apps/channels/serializers.py b/apps/channels/serializers.py index 635281d5..8847050d 100644 --- a/apps/channels/serializers.py +++ b/apps/channels/serializers.py @@ -179,8 +179,8 @@ class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer): # Channel Group # class ChannelGroupSerializer(serializers.ModelSerializer): - channel_count = serializers.IntegerField(read_only=True) - m3u_account_count = serializers.IntegerField(read_only=True) + channel_count = serializers.SerializerMethodField() + m3u_account_count = serializers.SerializerMethodField() m3u_accounts = ChannelGroupM3UAccountSerializer( many=True, read_only=True @@ -190,6 +190,14 @@ class ChannelGroupSerializer(serializers.ModelSerializer): model = ChannelGroup fields = ["id", "name", "channel_count", "m3u_account_count", "m3u_accounts"] + def get_channel_count(self, obj): + """Get count of channels in this group""" + return obj.channels.count() + + def get_m3u_account_count(self, obj): + """Get count of M3U accounts associated with this group""" + return obj.m3u_accounts.count() + class ChannelProfileSerializer(serializers.ModelSerializer): channels = serializers.SerializerMethodField() diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 84d18989..ed9e6010 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -12,6 +12,7 @@ "@dnd-kit/modifiers": "^9.0.0", "@dnd-kit/sortable": "^10.0.0", "@dnd-kit/utilities": "^3.2.2", + "@hookform/resolvers": "^5.2.2", "@mantine/charts": "~8.0.1", "@mantine/core": "~8.0.1", "@mantine/dates": "~8.0.1", @@ -22,13 +23,13 @@ "@tanstack/react-table": "^8.21.2", "allotment": "^1.20.4", "dayjs": "^1.11.13", - "formik": "^2.4.6", "hls.js": "^1.5.20", "lucide-react": "^0.511.0", "mpegts.js": "^1.8.0", "react": "^19.1.0", "react-dom": "^19.1.0", "react-draggable": "^4.4.6", + "react-hook-form": "^7.70.0", "react-pro-sidebar": "^1.1.0", "react-router-dom": "^7.3.0", "react-virtualized": "^9.22.6", @@ -1248,6 +1249,18 @@ "integrity": "sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==", "license": "MIT" }, + "node_modules/@hookform/resolvers": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/@hookform/resolvers/-/resolvers-5.2.2.tgz", + "integrity": "sha512-A/IxlMLShx3KjV/HeTcTfaMxdwy690+L/ZADoeaTltLx+CVuzkeVIPuybK3jrRfw7YZnmdKsVVHAlEPIAEUNlA==", + "license": "MIT", + "dependencies": { + "@standard-schema/utils": "^0.3.0" + }, + "peerDependencies": { + "react-hook-form": "^7.55.0" + } + }, "node_modules/@humanfs/core": { "version": "0.19.1", "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", @@ -1776,6 +1789,12 @@ "win32" ] }, + "node_modules/@standard-schema/utils": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@standard-schema/utils/-/utils-0.3.0.tgz", + "integrity": "sha512-e7Mew686owMaPJVNNLs55PUvgz371nKgwsc4vxE49zsODpJEnxgxRo2y/OKrqueavXgZNMDVj3DdHFlaSAeU8g==", + "license": "MIT" + }, "node_modules/@swc/core": { "name": "@swc/wasm", "version": "1.13.20", @@ -2008,18 +2027,6 @@ "dev": true, "license": "MIT" }, - "node_modules/@types/hoist-non-react-statics": { - "version": "3.3.7", - "resolved": "https://registry.npmjs.org/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.7.tgz", - "integrity": "sha512-PQTyIulDkIDro8P+IHbKCsw7U2xxBYflVzW/FgWdCAePD9xGSidgA76/GeJ6lBKoblyhf9pBY763gbrN+1dI8g==", - "license": "MIT", - "dependencies": { - "hoist-non-react-statics": "^3.3.0" - }, - "peerDependencies": { - "@types/react": "*" - } - }, "node_modules/@types/json-schema": { "version": "7.0.15", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", @@ -2037,6 +2044,7 @@ "version": "19.2.7", "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.7.tgz", "integrity": "sha512-MWtvHrGZLFttgeEj28VXHxpmwYbor/ATPYbBfSFZEIRK0ecCFLl2Qo55z52Hss+UV9CRN7trSeq1zbgx7YDWWg==", + "devOptional": true, "license": "MIT", "dependencies": { "csstype": "^3.2.2" @@ -2833,15 +2841,6 @@ "dev": true, "license": "MIT" }, - "node_modules/deepmerge": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-2.2.1.tgz", - "integrity": "sha512-R9hc1Xa/NOBi9WRVUWg19rl1UB7Tt4kuPd+thNJgFZoxXsTz7ncaPaeIm+40oSGuP33DfMb4sZt1QIGiJzC4EA==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/dequal": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", @@ -3288,31 +3287,6 @@ "dev": true, "license": "ISC" }, - "node_modules/formik": { - "version": "2.4.9", - "resolved": "https://registry.npmjs.org/formik/-/formik-2.4.9.tgz", - "integrity": "sha512-5nI94BMnlFDdQRBY4Sz39WkhxajZJ57Fzs8wVbtsQlm5ScKIR1QLYqv/ultBnobObtlUyxpxoLodpixrsf36Og==", - "funding": [ - { - "type": "individual", - "url": "https://opencollective.com/formik" - } - ], - "license": "Apache-2.0", - "dependencies": { - "@types/hoist-non-react-statics": "^3.3.1", - "deepmerge": "^2.1.1", - "hoist-non-react-statics": "^3.3.0", - "lodash": "^4.17.21", - "lodash-es": "^4.17.21", - "react-fast-compare": "^2.0.1", - "tiny-warning": "^1.0.2", - "tslib": "^2.0.0" - }, - "peerDependencies": { - "react": ">=16.8.0" - } - }, "node_modules/fsevents": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", @@ -3751,12 +3725,6 @@ "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", "license": "MIT" }, - "node_modules/lodash-es": { - "version": "4.17.22", - "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.22.tgz", - "integrity": "sha512-XEawp1t0gxSi9x01glktRZ5HDy0HXqrM0x5pXQM98EaI0NxO6jVM7omDOxsuEo5UIASAnm2bRp1Jt/e0a2XU8Q==", - "license": "MIT" - }, "node_modules/lodash.clamp": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/lodash.clamp/-/lodash.clamp-4.0.3.tgz", @@ -4334,11 +4302,21 @@ "react": ">= 16.8 || 18.0.0" } }, - "node_modules/react-fast-compare": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-2.0.4.tgz", - "integrity": "sha512-suNP+J1VU1MWFKcyt7RtjiSWUjvidmQSlqu+eHslq+342xCbGTYmC0mEhPCOHxlW0CywylOC1u2DFAT+bv4dBw==", - "license": "MIT" + "node_modules/react-hook-form": { + "version": "7.70.0", + "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.70.0.tgz", + "integrity": "sha512-COOMajS4FI3Wuwrs3GPpi/Jeef/5W1DRR84Yl5/ShlT3dKVFUfoGiEZ/QE6Uw8P4T2/CLJdcTVYKvWBMQTEpvw==", + "license": "MIT", + "engines": { + "node": ">=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/react-hook-form" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17 || ^18 || ^19" + } }, "node_modules/react-is": { "version": "16.13.1", @@ -4923,12 +4901,6 @@ "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==", "license": "MIT" }, - "node_modules/tiny-warning": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", - "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==", - "license": "MIT" - }, "node_modules/tinybench": { "version": "2.9.0", "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz", diff --git a/frontend/package.json b/frontend/package.json index ff5be72d..7b2d5927 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -23,11 +23,12 @@ "@mantine/form": "~8.0.1", "@mantine/hooks": "~8.0.1", "@mantine/notifications": "~8.0.1", + "@hookform/resolvers": "^5.2.2", "@tanstack/react-table": "^8.21.2", "allotment": "^1.20.4", "dayjs": "^1.11.13", - "formik": "^2.4.6", "hls.js": "^1.5.20", + "react-hook-form": "^7.70.0", "lucide-react": "^0.511.0", "mpegts.js": "^1.8.0", "react": "^19.1.0", diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index cc6c5f47..d9eb3f9d 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -1,5 +1,6 @@ import React, { useState, useEffect, useRef, useMemo } from 'react'; -import { useFormik } from 'formik'; +import { useForm } from 'react-hook-form'; +import { yupResolver } from '@hookform/resolvers/yup'; import * as Yup from 'yup'; import useChannelsStore from '../../store/channels'; import API from '../../api'; @@ -42,6 +43,11 @@ import useEPGsStore from '../../store/epgs'; import { FixedSizeList as List } from 'react-window'; import { USER_LEVELS, USER_LEVEL_LABELS } from '../../constants'; +const validationSchema = Yup.object({ + name: Yup.string().required('Name is required'), + channel_group_id: Yup.string().required('Channel group is required'), +}); + const ChannelForm = ({ channel = null, isOpen, onClose }) => { const theme = useMantineTheme(); @@ -100,7 +106,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { const handleLogoSuccess = ({ logo }) => { if (logo && logo.id) { - formik.setFieldValue('logo_id', logo.id); + setValue('logo_id', logo.id); ensureLogosLoaded(); // Refresh logos } setLogoModalOpen(false); @@ -124,7 +130,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { if (response.matched) { // Update the form with the new EPG data if (response.channel && response.channel.epg_data_id) { - formik.setFieldValue('epg_data_id', response.channel.epg_data_id); + setValue('epg_data_id', response.channel.epg_data_id); } notifications.show({ @@ -152,7 +158,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { }; const handleSetNameFromEpg = () => { - const epgDataId = formik.values.epg_data_id; + const epgDataId = watch('epg_data_id'); if (!epgDataId) { notifications.show({ title: 'No EPG Selected', @@ -164,7 +170,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { const tvg = tvgsById[epgDataId]; if (tvg && tvg.name) { - formik.setFieldValue('name', tvg.name); + setValue('name', tvg.name); notifications.show({ title: 'Success', message: `Channel name set to "${tvg.name}"`, @@ -180,7 +186,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { }; const handleSetLogoFromEpg = async () => { - const epgDataId = formik.values.epg_data_id; + const epgDataId = watch('epg_data_id'); if (!epgDataId) { notifications.show({ title: 'No EPG Selected', @@ -207,7 +213,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { ); if (matchingLogo) { - formik.setFieldValue('logo_id', matchingLogo.id); + setValue('logo_id', matchingLogo.id); notifications.show({ title: 'Success', message: `Logo set to "${matchingLogo.name}"`, @@ -231,7 +237,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { // Create logo by calling the Logo API directly const newLogo = await API.createLogo(newLogoData); - formik.setFieldValue('logo_id', newLogo.id); + setValue('logo_id', newLogo.id); notifications.update({ id: 'creating-logo', @@ -264,7 +270,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { }; const handleSetTvgIdFromEpg = () => { - const epgDataId = formik.values.epg_data_id; + const epgDataId = watch('epg_data_id'); if (!epgDataId) { notifications.show({ title: 'No EPG Selected', @@ -276,7 +282,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { const tvg = tvgsById[epgDataId]; if (tvg && tvg.tvg_id) { - formik.setFieldValue('tvg_id', tvg.tvg_id); + setValue('tvg_id', tvg.tvg_id); notifications.show({ title: 'Success', message: `TVG-ID set to "${tvg.tvg_id}"`, @@ -291,130 +297,130 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { } }; - const formik = useFormik({ - initialValues: { - name: '', - channel_number: '', // Change from 0 to empty string for consistency - channel_group_id: - Object.keys(channelGroups).length > 0 + const defaultValues = useMemo( + () => ({ + name: channel?.name || '', + channel_number: + channel?.channel_number !== null && + channel?.channel_number !== undefined + ? channel.channel_number + : '', + channel_group_id: channel?.channel_group_id + ? `${channel.channel_group_id}` + : Object.keys(channelGroups).length > 0 ? Object.keys(channelGroups)[0] : '', - stream_profile_id: '0', - tvg_id: '', - tvc_guide_stationid: '', - epg_data_id: '', - logo_id: '', - user_level: '0', - }, - validationSchema: Yup.object({ - name: Yup.string().required('Name is required'), - channel_group_id: Yup.string().required('Channel group is required'), + stream_profile_id: channel?.stream_profile_id + ? `${channel.stream_profile_id}` + : '0', + tvg_id: channel?.tvg_id || '', + tvc_guide_stationid: channel?.tvc_guide_stationid || '', + epg_data_id: channel?.epg_data_id ?? '', + logo_id: channel?.logo_id ? `${channel.logo_id}` : '', + user_level: `${channel?.user_level ?? '0'}`, }), - onSubmit: async (values, { setSubmitting }) => { - let response; + [channel, channelGroups] + ); - try { - const formattedValues = { ...values }; + const { + register, + handleSubmit, + setValue, + watch, + reset, + formState: { errors, isSubmitting }, + } = useForm({ + defaultValues, + resolver: yupResolver(validationSchema), + }); - // Convert empty or "0" stream_profile_id to null for the API - if ( - !formattedValues.stream_profile_id || - formattedValues.stream_profile_id === '0' - ) { - formattedValues.stream_profile_id = null; - } + const onSubmit = async (values) => { + let response; - // Ensure tvg_id is properly included (no empty strings) - formattedValues.tvg_id = formattedValues.tvg_id || null; + try { + const formattedValues = { ...values }; - // Ensure tvc_guide_stationid is properly included (no empty strings) - formattedValues.tvc_guide_stationid = - formattedValues.tvc_guide_stationid || null; + // Convert empty or "0" stream_profile_id to null for the API + if ( + !formattedValues.stream_profile_id || + formattedValues.stream_profile_id === '0' + ) { + formattedValues.stream_profile_id = null; + } - if (channel) { - // If there's an EPG to set, use our enhanced endpoint - if (values.epg_data_id !== (channel.epg_data_id ?? '')) { - // Use the special endpoint to set EPG and trigger refresh - const epgResponse = await API.setChannelEPG( - channel.id, - values.epg_data_id - ); + // Ensure tvg_id is properly included (no empty strings) + formattedValues.tvg_id = formattedValues.tvg_id || null; - // Remove epg_data_id from values since we've handled it separately - const { epg_data_id, ...otherValues } = formattedValues; + // Ensure tvc_guide_stationid is properly included (no empty strings) + formattedValues.tvc_guide_stationid = + formattedValues.tvc_guide_stationid || null; - // Update other channel fields if needed - if (Object.keys(otherValues).length > 0) { - response = await API.updateChannel({ - id: channel.id, - ...otherValues, - streams: channelStreams.map((stream) => stream.id), - }); - } - } else { - // No EPG change, regular update + if (channel) { + // If there's an EPG to set, use our enhanced endpoint + if (values.epg_data_id !== (channel.epg_data_id ?? '')) { + // Use the special endpoint to set EPG and trigger refresh + const epgResponse = await API.setChannelEPG( + channel.id, + values.epg_data_id + ); + + // Remove epg_data_id from values since we've handled it separately + const { epg_data_id, ...otherValues } = formattedValues; + + // Update other channel fields if needed + if (Object.keys(otherValues).length > 0) { response = await API.updateChannel({ id: channel.id, - ...formattedValues, + ...otherValues, streams: channelStreams.map((stream) => stream.id), }); } } else { - // New channel creation - use the standard method - response = await API.addChannel({ + // No EPG change, regular update + response = await API.updateChannel({ + id: channel.id, ...formattedValues, streams: channelStreams.map((stream) => stream.id), }); } - } catch (error) { - console.error('Error saving channel:', error); + } else { + // New channel creation - use the standard method + response = await API.addChannel({ + ...formattedValues, + streams: channelStreams.map((stream) => stream.id), + }); } + } catch (error) { + console.error('Error saving channel:', error); + } - formik.resetForm(); - API.requeryChannels(); + reset(); + API.requeryChannels(); - // Refresh channel profiles to update the membership information - useChannelsStore.getState().fetchChannelProfiles(); + // Refresh channel profiles to update the membership information + useChannelsStore.getState().fetchChannelProfiles(); - setSubmitting(false); - setTvgFilter(''); - setLogoFilter(''); - onClose(); - }, - }); + setTvgFilter(''); + setLogoFilter(''); + onClose(); + }; useEffect(() => { - if (channel) { - if (channel.epg_data_id) { - const epgSource = epgs[tvgsById[channel.epg_data_id]?.epg_source]; - setSelectedEPG(epgSource ? `${epgSource.id}` : ''); - } + reset(defaultValues); + setChannelStreams(channel?.streams || []); - formik.setValues({ - name: channel.name || '', - channel_number: - channel.channel_number !== null ? channel.channel_number : '', - channel_group_id: channel.channel_group_id - ? `${channel.channel_group_id}` - : '', - stream_profile_id: channel.stream_profile_id - ? `${channel.stream_profile_id}` - : '0', - tvg_id: channel.tvg_id || '', - tvc_guide_stationid: channel.tvc_guide_stationid || '', - epg_data_id: channel.epg_data_id ?? '', - logo_id: channel.logo_id ? `${channel.logo_id}` : '', - user_level: `${channel.user_level}`, - }); - - setChannelStreams(channel.streams || []); + if (channel?.epg_data_id) { + const epgSource = epgs[tvgsById[channel.epg_data_id]?.epg_source]; + setSelectedEPG(epgSource ? `${epgSource.id}` : ''); } else { - formik.resetForm(); + setSelectedEPG(''); + } + + if (!channel) { setTvgFilter(''); setLogoFilter(''); - setChannelStreams([]); // Ensure streams are cleared when adding a new channel } - }, [channel, tvgsById, channelGroups]); + }, [defaultValues, channel, reset, epgs, tvgsById]); // Memoize logo options to prevent infinite re-renders during background loading const logoOptions = useMemo(() => { @@ -431,10 +437,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { // If a new group was created and returned, update the form with it if (newGroup && newGroup.id) { // Preserve all current form values while updating just the channel_group_id - formik.setValues({ - ...formik.values, - channel_group_id: `${newGroup.id}`, - }); + setValue('channel_group_id', `${newGroup.id}`); } }; @@ -472,7 +475,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { } styles={{ content: { '--mantine-color-body': '#27272A' } }} > -
+ { label={ Channel Name - {formik.values.epg_data_id && ( + {watch('epg_data_id') && ( @@ -933,7 +906,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { } readOnly value={(() => { - const tvg = tvgsById[formik.values.epg_data_id]; + const tvg = tvgsById[watch('epg_data_id')]; const epgSource = tvg && epgs[tvg.epg_source]; const tvgLabel = tvg ? tvg.name || tvg.id : ''; if (epgSource && tvgLabel) { @@ -953,7 +926,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { color="white" onClick={(e) => { e.stopPropagation(); - formik.setFieldValue('epg_data_id', null); + setValue('epg_data_id', null); }} title="Create new group" size="small" @@ -1012,12 +985,9 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { size="xs" onClick={() => { if (filteredTvgs[index].id == '0') { - formik.setFieldValue('epg_data_id', null); + setValue('epg_data_id', null); } else { - formik.setFieldValue( - 'epg_data_id', - filteredTvgs[index].id - ); + setValue('epg_data_id', filteredTvgs[index].id); // Also update selectedEPG to match the EPG source of the selected tvg if (filteredTvgs[index].epg_source) { setSelectedEPG( @@ -1047,11 +1017,11 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { diff --git a/frontend/src/components/forms/Logo.jsx b/frontend/src/components/forms/Logo.jsx index 8362b891..c6e63ba6 100644 --- a/frontend/src/components/forms/Logo.jsx +++ b/frontend/src/components/forms/Logo.jsx @@ -1,5 +1,6 @@ -import React, { useState, useEffect } from 'react'; -import { useFormik } from 'formik'; +import React, { useState, useEffect, useMemo } from 'react'; +import { useForm } from 'react-hook-form'; +import { yupResolver } from '@hookform/resolvers/yup'; import * as Yup from 'yup'; import { Modal, @@ -18,143 +19,148 @@ import { Upload, FileImage, X } from 'lucide-react'; import { notifications } from '@mantine/notifications'; import API from '../../api'; +const schema = Yup.object({ + name: Yup.string().required('Name is required'), + url: Yup.string() + .required('URL is required') + .test( + 'valid-url-or-path', + 'Must be a valid URL or local file path', + (value) => { + if (!value) return false; + // Allow local file paths starting with /data/logos/ + if (value.startsWith('/data/logos/')) return true; + // Allow valid URLs + try { + new URL(value); + return true; + } catch { + return false; + } + } + ), +}); + const LogoForm = ({ logo = null, isOpen, onClose, onSuccess }) => { const [logoPreview, setLogoPreview] = useState(null); const [uploading, setUploading] = useState(false); const [selectedFile, setSelectedFile] = useState(null); // Store selected file - const formik = useFormik({ - initialValues: { - name: '', - url: '', - }, - validationSchema: Yup.object({ - name: Yup.string().required('Name is required'), - url: Yup.string() - .required('URL is required') - .test( - 'valid-url-or-path', - 'Must be a valid URL or local file path', - (value) => { - if (!value) return false; - // Allow local file paths starting with /data/logos/ - if (value.startsWith('/data/logos/')) return true; - // Allow valid URLs - try { - new URL(value); - return true; - } catch { - return false; - } - } - ), + const defaultValues = useMemo( + () => ({ + name: logo?.name || '', + url: logo?.url || '', }), - onSubmit: async (values, { setSubmitting }) => { - try { - setUploading(true); - let uploadResponse = null; // Store upload response for later use + [logo] + ); - // If we have a selected file, upload it first - if (selectedFile) { - try { - uploadResponse = await API.uploadLogo(selectedFile, values.name); - // Use the uploaded file data instead of form values - values.name = uploadResponse.name; - values.url = uploadResponse.url; - } catch (uploadError) { - let errorMessage = 'Failed to upload logo file'; - - if ( - uploadError.code === 'NETWORK_ERROR' || - uploadError.message?.includes('timeout') - ) { - errorMessage = 'Upload timed out. Please try again.'; - } else if (uploadError.status === 413) { - errorMessage = 'File too large. Please choose a smaller file.'; - } else if (uploadError.body?.error) { - errorMessage = uploadError.body.error; - } - - notifications.show({ - title: 'Upload Error', - message: errorMessage, - color: 'red', - }); - return; // Don't proceed with creation if upload fails - } - } - - // Now create or update the logo with the final values - // Only proceed if we don't already have a logo from file upload - if (logo) { - const updatedLogo = await API.updateLogo(logo.id, values); - notifications.show({ - title: 'Success', - message: 'Logo updated successfully', - color: 'green', - }); - onSuccess?.({ type: 'update', logo: updatedLogo }); // Call onSuccess for updates - } else if (!selectedFile) { - // Only create a new logo entry if we're not uploading a file - // (file upload already created the logo entry) - const newLogo = await API.createLogo(values); - notifications.show({ - title: 'Success', - message: 'Logo created successfully', - color: 'green', - }); - onSuccess?.({ type: 'create', logo: newLogo }); // Call onSuccess for creates - } else { - // File was uploaded and logo was already created - notifications.show({ - title: 'Success', - message: 'Logo uploaded successfully', - color: 'green', - }); - onSuccess?.({ type: 'create', logo: uploadResponse }); - } - onClose(); - } catch (error) { - let errorMessage = logo - ? 'Failed to update logo' - : 'Failed to create logo'; - - // Handle specific timeout errors - if ( - error.code === 'NETWORK_ERROR' || - error.message?.includes('timeout') - ) { - errorMessage = 'Request timed out. Please try again.'; - } else if (error.response?.data?.error) { - errorMessage = error.response.data.error; - } - - notifications.show({ - title: 'Error', - message: errorMessage, - color: 'red', - }); - } finally { - setSubmitting(false); - setUploading(false); - } - }, + const { + register, + handleSubmit, + formState: { errors, isSubmitting }, + reset, + setValue, + watch, + } = useForm({ + defaultValues, + resolver: yupResolver(schema), }); - useEffect(() => { - if (logo) { - formik.setValues({ - name: logo.name || '', - url: logo.url || '', + const onSubmit = async (values) => { + try { + setUploading(true); + let uploadResponse = null; // Store upload response for later use + + // If we have a selected file, upload it first + if (selectedFile) { + try { + uploadResponse = await API.uploadLogo(selectedFile, values.name); + // Use the uploaded file data instead of form values + values.name = uploadResponse.name; + values.url = uploadResponse.url; + } catch (uploadError) { + let errorMessage = 'Failed to upload logo file'; + + if ( + uploadError.code === 'NETWORK_ERROR' || + uploadError.message?.includes('timeout') + ) { + errorMessage = 'Upload timed out. Please try again.'; + } else if (uploadError.status === 413) { + errorMessage = 'File too large. Please choose a smaller file.'; + } else if (uploadError.body?.error) { + errorMessage = uploadError.body.error; + } + + notifications.show({ + title: 'Upload Error', + message: errorMessage, + color: 'red', + }); + return; // Don't proceed with creation if upload fails + } + } + + // Now create or update the logo with the final values + // Only proceed if we don't already have a logo from file upload + if (logo) { + const updatedLogo = await API.updateLogo(logo.id, values); + notifications.show({ + title: 'Success', + message: 'Logo updated successfully', + color: 'green', + }); + onSuccess?.({ type: 'update', logo: updatedLogo }); // Call onSuccess for updates + } else if (!selectedFile) { + // Only create a new logo entry if we're not uploading a file + // (file upload already created the logo entry) + const newLogo = await API.createLogo(values); + notifications.show({ + title: 'Success', + message: 'Logo created successfully', + color: 'green', + }); + onSuccess?.({ type: 'create', logo: newLogo }); // Call onSuccess for creates + } else { + // File was uploaded and logo was already created + notifications.show({ + title: 'Success', + message: 'Logo uploaded successfully', + color: 'green', + }); + onSuccess?.({ type: 'create', logo: uploadResponse }); + } + onClose(); + } catch (error) { + let errorMessage = logo + ? 'Failed to update logo' + : 'Failed to create logo'; + + // Handle specific timeout errors + if ( + error.code === 'NETWORK_ERROR' || + error.message?.includes('timeout') + ) { + errorMessage = 'Request timed out. Please try again.'; + } else if (error.response?.data?.error) { + errorMessage = error.response.data.error; + } + + notifications.show({ + title: 'Error', + message: errorMessage, + color: 'red', }); - setLogoPreview(logo.cache_url); - } else { - formik.resetForm(); - setLogoPreview(null); + } finally { + setUploading(false); } - // Clear any selected file when logo changes + }; + + useEffect(() => { + reset(defaultValues); + setLogoPreview(logo?.cache_url || null); setSelectedFile(null); - }, [logo, isOpen]); + }, [defaultValues, logo, reset]); const handleFileSelect = (files) => { if (files.length === 0) return; @@ -180,18 +186,19 @@ const LogoForm = ({ logo = null, isOpen, onClose, onSuccess }) => { setLogoPreview(previewUrl); // Auto-fill the name field if empty - if (!formik.values.name) { + const currentName = watch('name'); + if (!currentName) { const nameWithoutExtension = file.name.replace(/\.[^/.]+$/, ''); - formik.setFieldValue('name', nameWithoutExtension); + setValue('name', nameWithoutExtension); } // Set a placeholder URL (will be replaced after upload) - formik.setFieldValue('url', 'file://pending-upload'); + setValue('url', 'file://pending-upload'); }; const handleUrlChange = (event) => { const url = event.target.value; - formik.setFieldValue('url', url); + setValue('url', url); // Clear any selected file when manually entering URL if (selectedFile) { @@ -219,7 +226,7 @@ const LogoForm = ({ logo = null, isOpen, onClose, onSuccess }) => { const filename = pathname.substring(pathname.lastIndexOf('/') + 1); const nameWithoutExtension = filename.replace(/\.[^/.]+$/, ''); if (nameWithoutExtension) { - formik.setFieldValue('name', nameWithoutExtension); + setValue('name', nameWithoutExtension); } } catch (error) { // If the URL is invalid, do nothing. @@ -244,7 +251,7 @@ const LogoForm = ({ logo = null, isOpen, onClose, onSuccess }) => { title={logo ? 'Edit Logo' : 'Add Logo'} size="md" > -
+ {/* Logo Preview */} {logoPreview && ( @@ -338,18 +345,18 @@ const LogoForm = ({ logo = null, isOpen, onClose, onSuccess }) => { {selectedFile && ( @@ -363,7 +370,7 @@ const LogoForm = ({ logo = null, isOpen, onClose, onSuccess }) => { - diff --git a/frontend/src/components/forms/M3UGroupFilter.jsx b/frontend/src/components/forms/M3UGroupFilter.jsx index 542fc88a..0a7dc224 100644 --- a/frontend/src/components/forms/M3UGroupFilter.jsx +++ b/frontend/src/components/forms/M3UGroupFilter.jsx @@ -1,6 +1,5 @@ // Modal.js import React, { useState, useEffect, forwardRef } from 'react'; -import { useFormik } from 'formik'; import * as Yup from 'yup'; import API from '../../api'; import M3UProfiles from './M3UProfiles'; diff --git a/frontend/src/components/forms/M3UProfile.jsx b/frontend/src/components/forms/M3UProfile.jsx index b225ec38..025d3cae 100644 --- a/frontend/src/components/forms/M3UProfile.jsx +++ b/frontend/src/components/forms/M3UProfile.jsx @@ -1,5 +1,6 @@ -import React, { useState, useEffect } from 'react'; -import { useFormik } from 'formik'; +import React, { useState, useEffect, useMemo } from 'react'; +import { useForm } from 'react-hook-form'; +import { yupResolver } from '@hookform/resolvers/yup'; import * as Yup from 'yup'; import API from '../../api'; import { @@ -31,6 +32,89 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => { const [sampleInput, setSampleInput] = useState(''); const isDefaultProfile = profile?.is_default; + const defaultValues = useMemo( + () => ({ + name: profile?.name || '', + max_streams: profile?.max_streams || 0, + search_pattern: profile?.search_pattern || '', + replace_pattern: profile?.replace_pattern || '', + notes: profile?.custom_properties?.notes || '', + }), + [profile] + ); + + const schema = Yup.object({ + name: Yup.string().required('Name is required'), + search_pattern: Yup.string().when([], { + is: () => !isDefaultProfile, + then: (schema) => schema.required('Search pattern is required'), + otherwise: (schema) => schema.notRequired(), + }), + replace_pattern: Yup.string().when([], { + is: () => !isDefaultProfile, + then: (schema) => schema.required('Replace pattern is required'), + otherwise: (schema) => schema.notRequired(), + }), + notes: Yup.string(), // Optional field + }); + + const { + register, + handleSubmit, + formState: { errors, isSubmitting }, + reset, + setValue, + watch, + } = useForm({ + defaultValues, + resolver: yupResolver(schema), + }); + + const onSubmit = async (values) => { + console.log('submiting'); + + // For default profiles, only send name and custom_properties (notes) + let submitValues; + if (isDefaultProfile) { + submitValues = { + name: values.name, + custom_properties: { + // Preserve existing custom_properties and add/update notes + ...(profile?.custom_properties || {}), + notes: values.notes || '', + }, + }; + } else { + // For regular profiles, send all fields + submitValues = { + name: values.name, + max_streams: values.max_streams, + search_pattern: values.search_pattern, + replace_pattern: values.replace_pattern, + custom_properties: { + // Preserve existing custom_properties and add/update notes + ...(profile?.custom_properties || {}), + notes: values.notes || '', + }, + }; + } + + if (profile?.id) { + await API.updateM3UProfile(m3u.id, { + id: profile.id, + ...submitValues, + }); + } else { + await API.addM3UProfile(m3u.id, submitValues); + } + + reset(); + // Reset local state to sync with form reset + setSearchPattern(''); + setReplacePattern(''); + onClose(); + }; + useEffect(() => { async function fetchStreamUrl() { try { @@ -79,99 +163,22 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => { }, [searchPattern, replacePattern]); const onSearchPatternUpdate = (e) => { - formik.handleChange(e); - setSearchPattern(e.target.value); + const value = e.target.value; + setSearchPattern(value); + setValue('search_pattern', value); }; const onReplacePatternUpdate = (e) => { - formik.handleChange(e); - setReplacePattern(e.target.value); + const value = e.target.value; + setReplacePattern(value); + setValue('replace_pattern', value); }; - const formik = useFormik({ - initialValues: { - name: '', - max_streams: 0, - search_pattern: '', - replace_pattern: '', - notes: '', - }, - validationSchema: Yup.object({ - name: Yup.string().required('Name is required'), - search_pattern: Yup.string().when([], { - is: () => !isDefaultProfile, - then: (schema) => schema.required('Search pattern is required'), - otherwise: (schema) => schema.notRequired(), - }), - replace_pattern: Yup.string().when([], { - is: () => !isDefaultProfile, - then: (schema) => schema.required('Replace pattern is required'), - otherwise: (schema) => schema.notRequired(), - }), - notes: Yup.string(), // Optional field - }), - onSubmit: async (values, { setSubmitting, resetForm }) => { - console.log('submiting'); - - // For default profiles, only send name and custom_properties (notes) - let submitValues; - if (isDefaultProfile) { - submitValues = { - name: values.name, - custom_properties: { - // Preserve existing custom_properties and add/update notes - ...(profile?.custom_properties || {}), - notes: values.notes || '', - }, - }; - } else { - // For regular profiles, send all fields - submitValues = { - name: values.name, - max_streams: values.max_streams, - search_pattern: values.search_pattern, - replace_pattern: values.replace_pattern, - custom_properties: { - // Preserve existing custom_properties and add/update notes - ...(profile?.custom_properties || {}), - notes: values.notes || '', - }, - }; - } - - if (profile?.id) { - await API.updateM3UProfile(m3u.id, { - id: profile.id, - ...submitValues, - }); - } else { - await API.addM3UProfile(m3u.id, submitValues); - } - - resetForm(); - // Reset local state to sync with formik reset - setSearchPattern(''); - setReplacePattern(''); - setSubmitting(false); - onClose(); - }, - }); - useEffect(() => { - if (profile) { - setSearchPattern(profile.search_pattern); - setReplacePattern(profile.replace_pattern); - formik.setValues({ - name: profile.name, - max_streams: profile.max_streams, - search_pattern: profile.search_pattern, - replace_pattern: profile.replace_pattern, - notes: profile.custom_properties?.notes || '', - }); - } else { - formik.resetForm(); - } - }, [profile]); // eslint-disable-line react-hooks/exhaustive-deps + reset(defaultValues); + setSearchPattern(profile?.search_pattern || ''); + setReplacePattern(profile?.replace_pattern || ''); + }, [defaultValues, profile, reset]); const handleSampleInputChange = (e) => { setSampleInput(e.target.value); @@ -212,27 +219,21 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => { } size="lg" > - + {/* Only show max streams field for non-default profiles */} {!isDefaultProfile && ( - formik.setFieldValue('max_streams', value || 0) - } - error={formik.errors.max_streams ? formik.touched.max_streams : ''} + {...register('max_streams')} + value={watch('max_streams')} + onChange={(value) => setValue('max_streams', value || 0)} + error={errors.max_streams?.message} min={0} placeholder="0 = unlimited" /> @@ -242,40 +243,25 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => { {!isDefaultProfile && ( <> )}