diff --git a/.dockerignore b/.dockerignore index c79ca7b4..296537de 100755 --- a/.dockerignore +++ b/.dockerignore @@ -31,3 +31,4 @@ LICENSE README.md data/ +docker/data/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 67c27667..01207a18 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,7 +7,24 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] -- Update docker/dev-build.sh to support private registries, multiple architectures and pushing. Now [@jdblack](https://github.com/jblack). Now you can do things like `dev-build.sh -p -r my.private.registry -a linux/arm64,linux/amd64`. +### Added + +- VOD client stop button in Stats page: Users can now disconnect individual VOD clients from the Stats view, similar to the existing channel client disconnect functionality. +- Automated configuration backup/restore system with scheduled backups, retention policies, and async task processing - Thanks [@stlalpha](https://github.com/stlalpha) (Closes #153) + +### Changed + +- Removed unreachable code path in m3u output - Thanks [@DawtCom](https://github.com/DawtCom) +- Update docker/dev-build.sh to support private registries, multiple architectures and pushing. Now you can do things like `dev-build.sh -p -r my.private.registry -a linux/arm64,linux/amd64` - Thanks [@jdblack](https://github.com/jblack) + +### Fixed + +- VOD episode processing now correctly handles duplicate episodes (same episode in multiple languages/qualities) by reusing Episode records across multiple M3UEpisodeRelation entries instead of attempting to create duplicates (Fixes #556) +- XtreamCodes series streaming endpoint now correctly handles episodes with multiple streams (different languages/qualities) by selecting the best available stream based on account priority (Fixes #569) +- XtreamCodes series info API now returns unique episodes instead of duplicate entries when multiple streams exist for the same episode (different languages/qualities) +- nginx now gracefully handles hosts without IPv6 support by automatically disabling IPv6 binding at startup (Fixes #744) +- XtreamCodes EPG API now returns correct date/time format for start/end fields and proper string types for timestamps and channel_id +- XtreamCodes EPG API now handles None values for title and description fields to prevent AttributeError ## [0.14.0] - 2025-12-09 diff --git a/apps/api/urls.py b/apps/api/urls.py index 7d9edb52..4c92c70a 100644 --- a/apps/api/urls.py +++ b/apps/api/urls.py @@ -27,6 +27,7 @@ urlpatterns = [ path('core/', include(('core.api_urls', 'core'), namespace='core')), path('plugins/', include(('apps.plugins.api_urls', 'plugins'), namespace='plugins')), path('vod/', include(('apps.vod.api_urls', 'vod'), namespace='vod')), + path('backups/', include(('apps.backups.api_urls', 'backups'), namespace='backups')), # path('output/', include(('apps.output.api_urls', 'output'), namespace='output')), #path('player/', include(('apps.player.api_urls', 'player'), namespace='player')), #path('settings/', include(('apps.settings.api_urls', 'settings'), namespace='settings')), diff --git a/apps/backups/__init__.py b/apps/backups/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/apps/backups/api_urls.py b/apps/backups/api_urls.py new file mode 100644 index 00000000..226758cc --- /dev/null +++ b/apps/backups/api_urls.py @@ -0,0 +1,18 @@ +from django.urls import path + +from . import api_views + +app_name = "backups" + +urlpatterns = [ + path("", api_views.list_backups, name="backup-list"), + path("create/", api_views.create_backup, name="backup-create"), + path("upload/", api_views.upload_backup, name="backup-upload"), + path("schedule/", api_views.get_schedule, name="backup-schedule-get"), + path("schedule/update/", api_views.update_schedule, name="backup-schedule-update"), + path("status//", api_views.backup_status, name="backup-status"), + path("/download-token/", api_views.get_download_token, name="backup-download-token"), + path("/download/", api_views.download_backup, name="backup-download"), + path("/delete/", api_views.delete_backup, name="backup-delete"), + path("/restore/", api_views.restore_backup, name="backup-restore"), +] diff --git a/apps/backups/api_views.py b/apps/backups/api_views.py new file mode 100644 index 00000000..c6ff7d26 --- /dev/null +++ b/apps/backups/api_views.py @@ -0,0 +1,364 @@ +import hashlib +import hmac +import logging +import os +from pathlib import Path + +from celery.result import AsyncResult +from django.conf import settings +from django.http import HttpResponse, StreamingHttpResponse, Http404 +from rest_framework import status +from rest_framework.decorators import api_view, permission_classes, parser_classes +from rest_framework.permissions import IsAdminUser, AllowAny +from rest_framework.parsers import MultiPartParser, FormParser +from rest_framework.response import Response + +from . import services +from .tasks import create_backup_task, restore_backup_task +from .scheduler import get_schedule_settings, update_schedule_settings + +logger = logging.getLogger(__name__) + + +def _generate_task_token(task_id: str) -> str: + """Generate a signed token for task status access without auth.""" + secret = settings.SECRET_KEY.encode() + return hmac.new(secret, task_id.encode(), hashlib.sha256).hexdigest()[:32] + + +def _verify_task_token(task_id: str, token: str) -> bool: + """Verify a task token is valid.""" + expected = _generate_task_token(task_id) + return hmac.compare_digest(expected, token) + + +@api_view(["GET"]) +@permission_classes([IsAdminUser]) +def list_backups(request): + """List all available backup files.""" + try: + backups = services.list_backups() + return Response(backups, status=status.HTTP_200_OK) + except Exception as e: + return Response( + {"detail": f"Failed to list backups: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + +@api_view(["POST"]) +@permission_classes([IsAdminUser]) +def create_backup(request): + """Create a new backup (async via Celery).""" + try: + task = create_backup_task.delay() + return Response( + { + "detail": "Backup started", + "task_id": task.id, + "task_token": _generate_task_token(task.id), + }, + status=status.HTTP_202_ACCEPTED, + ) + except Exception as e: + return Response( + {"detail": f"Failed to start backup: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + +@api_view(["GET"]) +@permission_classes([AllowAny]) +def backup_status(request, task_id): + """Check the status of a backup/restore task. + + Requires either: + - Valid admin authentication, OR + - Valid task_token query parameter + """ + # Check for token-based auth (for restore when session is invalidated) + token = request.query_params.get("token") + if token: + if not _verify_task_token(task_id, token): + return Response( + {"detail": "Invalid task token"}, + status=status.HTTP_403_FORBIDDEN, + ) + else: + # Fall back to admin auth check + if not request.user.is_authenticated or not request.user.is_staff: + return Response( + {"detail": "Authentication required"}, + status=status.HTTP_401_UNAUTHORIZED, + ) + + try: + result = AsyncResult(task_id) + + if result.ready(): + task_result = result.get() + if task_result.get("status") == "completed": + return Response({ + "state": "completed", + "result": task_result, + }) + else: + return Response({ + "state": "failed", + "error": task_result.get("error", "Unknown error"), + }) + elif result.failed(): + return Response({ + "state": "failed", + "error": str(result.result), + }) + else: + return Response({ + "state": result.state.lower(), + }) + except Exception as e: + return Response( + {"detail": f"Failed to get task status: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + +@api_view(["GET"]) +@permission_classes([IsAdminUser]) +def get_download_token(request, filename): + """Get a signed token for downloading a backup file.""" + try: + # Security: prevent path traversal + if ".." in filename or "/" in filename or "\\" in filename: + raise Http404("Invalid filename") + + backup_dir = services.get_backup_dir() + backup_file = backup_dir / filename + + if not backup_file.exists(): + raise Http404("Backup file not found") + + token = _generate_task_token(filename) + return Response({"token": token}) + except Http404: + raise + except Exception as e: + return Response( + {"detail": f"Failed to generate token: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + +@api_view(["GET"]) +@permission_classes([AllowAny]) +def download_backup(request, filename): + """Download a backup file. + + Requires either: + - Valid admin authentication, OR + - Valid download_token query parameter + """ + # Check for token-based auth (avoids CORS preflight issues) + token = request.query_params.get("token") + if token: + if not _verify_task_token(filename, token): + return Response( + {"detail": "Invalid download token"}, + status=status.HTTP_403_FORBIDDEN, + ) + else: + # Fall back to admin auth check + if not request.user.is_authenticated or not request.user.is_staff: + return Response( + {"detail": "Authentication required"}, + status=status.HTTP_401_UNAUTHORIZED, + ) + + try: + # Security: prevent path traversal by checking for suspicious characters + if ".." in filename or "/" in filename or "\\" in filename: + raise Http404("Invalid filename") + + backup_dir = services.get_backup_dir() + backup_file = (backup_dir / filename).resolve() + + # Security: ensure the resolved path is still within backup_dir + if not str(backup_file).startswith(str(backup_dir.resolve())): + raise Http404("Invalid filename") + + if not backup_file.exists() or not backup_file.is_file(): + raise Http404("Backup file not found") + + file_size = backup_file.stat().st_size + + # Use X-Accel-Redirect for nginx (AIO container) - nginx serves file directly + # Fall back to streaming for non-nginx deployments + use_nginx_accel = os.environ.get("USE_NGINX_ACCEL", "").lower() == "true" + logger.info(f"[DOWNLOAD] File: {filename}, Size: {file_size}, USE_NGINX_ACCEL: {use_nginx_accel}") + + if use_nginx_accel: + # X-Accel-Redirect: Django returns immediately, nginx serves file + logger.info(f"[DOWNLOAD] Using X-Accel-Redirect: /protected-backups/{filename}") + response = HttpResponse() + response["X-Accel-Redirect"] = f"/protected-backups/{filename}" + response["Content-Type"] = "application/zip" + response["Content-Length"] = file_size + response["Content-Disposition"] = f'attachment; filename="{filename}"' + return response + else: + # Streaming fallback for non-nginx deployments + logger.info(f"[DOWNLOAD] Using streaming fallback (no nginx)") + def file_iterator(file_path, chunk_size=2 * 1024 * 1024): + with open(file_path, "rb") as f: + while chunk := f.read(chunk_size): + yield chunk + + response = StreamingHttpResponse( + file_iterator(backup_file), + content_type="application/zip", + ) + response["Content-Length"] = file_size + response["Content-Disposition"] = f'attachment; filename="{filename}"' + return response + except Http404: + raise + except Exception as e: + return Response( + {"detail": f"Download failed: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + +@api_view(["DELETE"]) +@permission_classes([IsAdminUser]) +def delete_backup(request, filename): + """Delete a backup file.""" + try: + # Security: prevent path traversal + if ".." in filename or "/" in filename or "\\" in filename: + raise Http404("Invalid filename") + + services.delete_backup(filename) + return Response( + {"detail": "Backup deleted successfully"}, + status=status.HTTP_204_NO_CONTENT, + ) + except FileNotFoundError: + raise Http404("Backup file not found") + except Exception as e: + return Response( + {"detail": f"Delete failed: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + +@api_view(["POST"]) +@permission_classes([IsAdminUser]) +@parser_classes([MultiPartParser, FormParser]) +def upload_backup(request): + """Upload a backup file for restoration.""" + uploaded = request.FILES.get("file") + if not uploaded: + return Response( + {"detail": "No file uploaded"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + try: + backup_dir = services.get_backup_dir() + filename = uploaded.name or "uploaded-backup.zip" + + # Ensure unique filename + backup_file = backup_dir / filename + counter = 1 + while backup_file.exists(): + name_parts = filename.rsplit(".", 1) + if len(name_parts) == 2: + backup_file = backup_dir / f"{name_parts[0]}-{counter}.{name_parts[1]}" + else: + backup_file = backup_dir / f"{filename}-{counter}" + counter += 1 + + # Save uploaded file + with backup_file.open("wb") as f: + for chunk in uploaded.chunks(): + f.write(chunk) + + return Response( + { + "detail": "Backup uploaded successfully", + "filename": backup_file.name, + }, + status=status.HTTP_201_CREATED, + ) + except Exception as e: + return Response( + {"detail": f"Upload failed: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + +@api_view(["POST"]) +@permission_classes([IsAdminUser]) +def restore_backup(request, filename): + """Restore from a backup file (async via Celery). WARNING: This will flush the database!""" + try: + # Security: prevent path traversal + if ".." in filename or "/" in filename or "\\" in filename: + raise Http404("Invalid filename") + + backup_dir = services.get_backup_dir() + backup_file = backup_dir / filename + + if not backup_file.exists(): + raise Http404("Backup file not found") + + task = restore_backup_task.delay(filename) + return Response( + { + "detail": "Restore started", + "task_id": task.id, + "task_token": _generate_task_token(task.id), + }, + status=status.HTTP_202_ACCEPTED, + ) + except Http404: + raise + except Exception as e: + return Response( + {"detail": f"Failed to start restore: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + +@api_view(["GET"]) +@permission_classes([IsAdminUser]) +def get_schedule(request): + """Get backup schedule settings.""" + try: + settings = get_schedule_settings() + return Response(settings) + except Exception as e: + return Response( + {"detail": f"Failed to get schedule: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + +@api_view(["PUT"]) +@permission_classes([IsAdminUser]) +def update_schedule(request): + """Update backup schedule settings.""" + try: + settings = update_schedule_settings(request.data) + return Response(settings) + except ValueError as e: + return Response( + {"detail": str(e)}, + status=status.HTTP_400_BAD_REQUEST, + ) + except Exception as e: + return Response( + {"detail": f"Failed to update schedule: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) diff --git a/apps/backups/apps.py b/apps/backups/apps.py new file mode 100644 index 00000000..ee644149 --- /dev/null +++ b/apps/backups/apps.py @@ -0,0 +1,7 @@ +from django.apps import AppConfig + + +class BackupsConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "apps.backups" + verbose_name = "Backups" diff --git a/apps/backups/migrations/__init__.py b/apps/backups/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/apps/backups/models.py b/apps/backups/models.py new file mode 100644 index 00000000..e69de29b diff --git a/apps/backups/scheduler.py b/apps/backups/scheduler.py new file mode 100644 index 00000000..2dd9e828 --- /dev/null +++ b/apps/backups/scheduler.py @@ -0,0 +1,203 @@ +import json +import logging + +from django_celery_beat.models import PeriodicTask, CrontabSchedule + +from core.models import CoreSettings + +logger = logging.getLogger(__name__) + +BACKUP_SCHEDULE_TASK_NAME = "backup-scheduled-task" + +SETTING_KEYS = { + "enabled": "backup_schedule_enabled", + "frequency": "backup_schedule_frequency", + "time": "backup_schedule_time", + "day_of_week": "backup_schedule_day_of_week", + "retention_count": "backup_retention_count", + "cron_expression": "backup_schedule_cron_expression", +} + +DEFAULTS = { + "enabled": False, + "frequency": "daily", + "time": "03:00", + "day_of_week": 0, # Sunday + "retention_count": 0, + "cron_expression": "", +} + + +def _get_setting(key: str, default=None): + """Get a backup setting from CoreSettings.""" + try: + setting = CoreSettings.objects.get(key=SETTING_KEYS[key]) + value = setting.value + if key == "enabled": + return value.lower() == "true" + elif key in ("day_of_week", "retention_count"): + return int(value) + return value + except CoreSettings.DoesNotExist: + return default if default is not None else DEFAULTS.get(key) + + +def _set_setting(key: str, value) -> None: + """Set a backup setting in CoreSettings.""" + str_value = str(value).lower() if isinstance(value, bool) else str(value) + CoreSettings.objects.update_or_create( + key=SETTING_KEYS[key], + defaults={ + "name": f"Backup {key.replace('_', ' ').title()}", + "value": str_value, + }, + ) + + +def get_schedule_settings() -> dict: + """Get all backup schedule settings.""" + return { + "enabled": _get_setting("enabled"), + "frequency": _get_setting("frequency"), + "time": _get_setting("time"), + "day_of_week": _get_setting("day_of_week"), + "retention_count": _get_setting("retention_count"), + "cron_expression": _get_setting("cron_expression"), + } + + +def update_schedule_settings(data: dict) -> dict: + """Update backup schedule settings and sync the PeriodicTask.""" + # Validate + if "frequency" in data and data["frequency"] not in ("daily", "weekly"): + raise ValueError("frequency must be 'daily' or 'weekly'") + + if "time" in data: + try: + hour, minute = data["time"].split(":") + int(hour) + int(minute) + except (ValueError, AttributeError): + raise ValueError("time must be in HH:MM format") + + if "day_of_week" in data: + day = int(data["day_of_week"]) + if day < 0 or day > 6: + raise ValueError("day_of_week must be 0-6 (Sunday-Saturday)") + + if "retention_count" in data: + count = int(data["retention_count"]) + if count < 0: + raise ValueError("retention_count must be >= 0") + + # Update settings + for key in ("enabled", "frequency", "time", "day_of_week", "retention_count", "cron_expression"): + if key in data: + _set_setting(key, data[key]) + + # Sync the periodic task + _sync_periodic_task() + + return get_schedule_settings() + + +def _sync_periodic_task() -> None: + """Create, update, or delete the scheduled backup task based on settings.""" + settings = get_schedule_settings() + + if not settings["enabled"]: + # Delete the task if it exists + task = PeriodicTask.objects.filter(name=BACKUP_SCHEDULE_TASK_NAME).first() + if task: + old_crontab = task.crontab + task.delete() + _cleanup_orphaned_crontab(old_crontab) + logger.info("Backup schedule disabled, removed periodic task") + return + + # Get old crontab before creating new one + old_crontab = None + try: + old_task = PeriodicTask.objects.get(name=BACKUP_SCHEDULE_TASK_NAME) + old_crontab = old_task.crontab + except PeriodicTask.DoesNotExist: + pass + + # Check if using cron expression (advanced mode) + if settings["cron_expression"]: + # Parse cron expression: "minute hour day month weekday" + try: + parts = settings["cron_expression"].split() + if len(parts) != 5: + raise ValueError("Cron expression must have 5 parts: minute hour day month weekday") + + minute, hour, day_of_month, month_of_year, day_of_week = parts + + crontab, _ = CrontabSchedule.objects.get_or_create( + minute=minute, + hour=hour, + day_of_week=day_of_week, + day_of_month=day_of_month, + month_of_year=month_of_year, + timezone=CoreSettings.get_system_time_zone(), + ) + except Exception as e: + logger.error(f"Invalid cron expression '{settings['cron_expression']}': {e}") + raise ValueError(f"Invalid cron expression: {e}") + else: + # Use simple frequency-based scheduling + # Parse time + hour, minute = settings["time"].split(":") + + # Build crontab based on frequency + system_tz = CoreSettings.get_system_time_zone() + if settings["frequency"] == "daily": + crontab, _ = CrontabSchedule.objects.get_or_create( + minute=minute, + hour=hour, + day_of_week="*", + day_of_month="*", + month_of_year="*", + timezone=system_tz, + ) + else: # weekly + crontab, _ = CrontabSchedule.objects.get_or_create( + minute=minute, + hour=hour, + day_of_week=str(settings["day_of_week"]), + day_of_month="*", + month_of_year="*", + timezone=system_tz, + ) + + # Create or update the periodic task + task, created = PeriodicTask.objects.update_or_create( + name=BACKUP_SCHEDULE_TASK_NAME, + defaults={ + "task": "apps.backups.tasks.scheduled_backup_task", + "crontab": crontab, + "enabled": True, + "kwargs": json.dumps({"retention_count": settings["retention_count"]}), + }, + ) + + # Clean up old crontab if it changed and is orphaned + if old_crontab and old_crontab.id != crontab.id: + _cleanup_orphaned_crontab(old_crontab) + + action = "Created" if created else "Updated" + logger.info(f"{action} backup schedule: {settings['frequency']} at {settings['time']}") + + +def _cleanup_orphaned_crontab(crontab_schedule): + """Delete old CrontabSchedule if no other tasks are using it.""" + if crontab_schedule is None: + return + + # Check if any other tasks are using this crontab + if PeriodicTask.objects.filter(crontab=crontab_schedule).exists(): + logger.debug(f"CrontabSchedule {crontab_schedule.id} still in use, not deleting") + return + + logger.debug(f"Cleaning up orphaned CrontabSchedule: {crontab_schedule.id}") + crontab_schedule.delete() diff --git a/apps/backups/services.py b/apps/backups/services.py new file mode 100644 index 00000000..b99fab6d --- /dev/null +++ b/apps/backups/services.py @@ -0,0 +1,320 @@ +import datetime +import json +import os +import shutil +import subprocess +import tempfile +from pathlib import Path +from zipfile import ZipFile, ZIP_DEFLATED +import logging +import pytz + +from django.conf import settings +from core.models import CoreSettings + +logger = logging.getLogger(__name__) + + +def get_backup_dir() -> Path: + """Get the backup directory, creating it if necessary.""" + backup_dir = Path(settings.BACKUP_ROOT) + backup_dir.mkdir(parents=True, exist_ok=True) + return backup_dir + + +def _is_postgresql() -> bool: + """Check if we're using PostgreSQL.""" + return settings.DATABASES["default"]["ENGINE"] == "django.db.backends.postgresql" + + +def _get_pg_env() -> dict: + """Get environment variables for PostgreSQL commands.""" + db_config = settings.DATABASES["default"] + env = os.environ.copy() + env["PGPASSWORD"] = db_config.get("PASSWORD", "") + return env + + +def _get_pg_args() -> list[str]: + """Get common PostgreSQL command arguments.""" + db_config = settings.DATABASES["default"] + return [ + "-h", db_config.get("HOST", "localhost"), + "-p", str(db_config.get("PORT", 5432)), + "-U", db_config.get("USER", "postgres"), + "-d", db_config.get("NAME", "dispatcharr"), + ] + + +def _dump_postgresql(output_file: Path) -> None: + """Dump PostgreSQL database using pg_dump.""" + logger.info("Dumping PostgreSQL database with pg_dump...") + + cmd = [ + "pg_dump", + *_get_pg_args(), + "-Fc", # Custom format for pg_restore + "-v", # Verbose + "-f", str(output_file), + ] + + result = subprocess.run( + cmd, + env=_get_pg_env(), + capture_output=True, + text=True, + ) + + if result.returncode != 0: + logger.error(f"pg_dump failed: {result.stderr}") + raise RuntimeError(f"pg_dump failed: {result.stderr}") + + logger.debug(f"pg_dump output: {result.stderr}") + + +def _restore_postgresql(dump_file: Path) -> None: + """Restore PostgreSQL database using pg_restore.""" + logger.info("[PG_RESTORE] Starting pg_restore...") + logger.info(f"[PG_RESTORE] Dump file: {dump_file}") + + pg_args = _get_pg_args() + logger.info(f"[PG_RESTORE] Connection args: {pg_args}") + + cmd = [ + "pg_restore", + "--clean", # Clean (drop) database objects before recreating + *pg_args, + "-v", # Verbose + str(dump_file), + ] + + logger.info(f"[PG_RESTORE] Running command: {' '.join(cmd)}") + + result = subprocess.run( + cmd, + env=_get_pg_env(), + capture_output=True, + text=True, + ) + + logger.info(f"[PG_RESTORE] Return code: {result.returncode}") + + # pg_restore may return non-zero even on partial success + # Check for actual errors vs warnings + if result.returncode != 0: + # Some errors during restore are expected (e.g., "does not exist" when cleaning) + # Only fail on critical errors + stderr = result.stderr.lower() + if "fatal" in stderr or "could not connect" in stderr: + logger.error(f"[PG_RESTORE] Failed critically: {result.stderr}") + raise RuntimeError(f"pg_restore failed: {result.stderr}") + else: + logger.warning(f"[PG_RESTORE] Completed with warnings: {result.stderr[:500]}...") + + logger.info("[PG_RESTORE] Completed successfully") + + +def _dump_sqlite(output_file: Path) -> None: + """Dump SQLite database using sqlite3 .backup command.""" + logger.info("Dumping SQLite database with sqlite3 .backup...") + db_path = Path(settings.DATABASES["default"]["NAME"]) + + if not db_path.exists(): + raise FileNotFoundError(f"SQLite database not found: {db_path}") + + # Use sqlite3 .backup command via stdin for reliable execution + result = subprocess.run( + ["sqlite3", str(db_path)], + input=f".backup '{output_file}'\n", + capture_output=True, + text=True, + ) + + if result.returncode != 0: + logger.error(f"sqlite3 backup failed: {result.stderr}") + raise RuntimeError(f"sqlite3 backup failed: {result.stderr}") + + # Verify the backup file was created + if not output_file.exists(): + raise RuntimeError("sqlite3 backup failed: output file not created") + + logger.info(f"sqlite3 backup completed successfully: {output_file}") + + +def _restore_sqlite(dump_file: Path) -> None: + """Restore SQLite database by replacing the database file.""" + logger.info("Restoring SQLite database...") + db_path = Path(settings.DATABASES["default"]["NAME"]) + backup_current = None + + # Backup current database before overwriting + if db_path.exists(): + backup_current = db_path.with_suffix(".db.bak") + shutil.copy2(db_path, backup_current) + logger.info(f"Backed up current database to {backup_current}") + + # Ensure parent directory exists + db_path.parent.mkdir(parents=True, exist_ok=True) + + # The backup file from _dump_sqlite is a complete SQLite database file + # We can simply copy it over the existing database + shutil.copy2(dump_file, db_path) + + # Verify the restore worked by checking if sqlite3 can read it + result = subprocess.run( + ["sqlite3", str(db_path)], + input=".tables\n", + capture_output=True, + text=True, + ) + + if result.returncode != 0: + logger.error(f"sqlite3 verification failed: {result.stderr}") + # Try to restore from backup + if backup_current and backup_current.exists(): + shutil.copy2(backup_current, db_path) + logger.info("Restored original database from backup") + raise RuntimeError(f"sqlite3 restore verification failed: {result.stderr}") + + logger.info("sqlite3 restore completed successfully") + + +def create_backup() -> Path: + """ + Create a backup archive containing database dump and data directories. + Returns the path to the created backup file. + """ + backup_dir = get_backup_dir() + + # Use system timezone for filename (user-friendly), but keep internal timestamps as UTC + system_tz_name = CoreSettings.get_system_time_zone() + try: + system_tz = pytz.timezone(system_tz_name) + now_local = datetime.datetime.now(datetime.UTC).astimezone(system_tz) + timestamp = now_local.strftime("%Y.%m.%d.%H.%M.%S") + except Exception as e: + logger.warning(f"Failed to use system timezone {system_tz_name}: {e}, falling back to UTC") + timestamp = datetime.datetime.now(datetime.UTC).strftime("%Y.%m.%d.%H.%M.%S") + + backup_name = f"dispatcharr-backup-{timestamp}.zip" + backup_file = backup_dir / backup_name + + logger.info(f"Creating backup: {backup_name}") + + with tempfile.TemporaryDirectory(prefix="dispatcharr-backup-") as temp_dir: + temp_path = Path(temp_dir) + + # Determine database type and dump accordingly + if _is_postgresql(): + db_dump_file = temp_path / "database.dump" + _dump_postgresql(db_dump_file) + db_type = "postgresql" + else: + db_dump_file = temp_path / "database.sqlite3" + _dump_sqlite(db_dump_file) + db_type = "sqlite" + + # Create ZIP archive with compression and ZIP64 support for large files + with ZipFile(backup_file, "w", compression=ZIP_DEFLATED, allowZip64=True) as zip_file: + # Add database dump + zip_file.write(db_dump_file, db_dump_file.name) + + # Add metadata + metadata = { + "format": "dispatcharr-backup", + "version": 2, + "database_type": db_type, + "database_file": db_dump_file.name, + "created_at": datetime.datetime.now(datetime.UTC).isoformat(), + } + zip_file.writestr("metadata.json", json.dumps(metadata, indent=2)) + + logger.info(f"Backup created successfully: {backup_file}") + return backup_file + + +def restore_backup(backup_file: Path) -> None: + """ + Restore from a backup archive. + WARNING: This will overwrite the database! + """ + if not backup_file.exists(): + raise FileNotFoundError(f"Backup file not found: {backup_file}") + + logger.info(f"Restoring from backup: {backup_file}") + + with tempfile.TemporaryDirectory(prefix="dispatcharr-restore-") as temp_dir: + temp_path = Path(temp_dir) + + # Extract backup + logger.debug("Extracting backup archive...") + with ZipFile(backup_file, "r") as zip_file: + zip_file.extractall(temp_path) + + # Read metadata + metadata_file = temp_path / "metadata.json" + if not metadata_file.exists(): + raise ValueError("Invalid backup: missing metadata.json") + + with open(metadata_file) as f: + metadata = json.load(f) + + # Restore database + _restore_database(temp_path, metadata) + + logger.info("Restore completed successfully") + + +def _restore_database(temp_path: Path, metadata: dict) -> None: + """Restore database from backup.""" + db_type = metadata.get("database_type", "postgresql") + db_file = metadata.get("database_file", "database.dump") + dump_file = temp_path / db_file + + if not dump_file.exists(): + raise ValueError(f"Invalid backup: missing {db_file}") + + current_db_type = "postgresql" if _is_postgresql() else "sqlite" + + if db_type != current_db_type: + raise ValueError( + f"Database type mismatch: backup is {db_type}, " + f"but current database is {current_db_type}" + ) + + if db_type == "postgresql": + _restore_postgresql(dump_file) + else: + _restore_sqlite(dump_file) + + +def list_backups() -> list[dict]: + """List all available backup files with metadata.""" + backup_dir = get_backup_dir() + backups = [] + + for backup_file in sorted(backup_dir.glob("dispatcharr-backup-*.zip"), reverse=True): + # Use UTC timezone so frontend can convert to user's local time + created_time = datetime.datetime.fromtimestamp(backup_file.stat().st_mtime, datetime.UTC) + backups.append({ + "name": backup_file.name, + "size": backup_file.stat().st_size, + "created": created_time.isoformat(), + }) + + return backups + + +def delete_backup(filename: str) -> None: + """Delete a backup file.""" + backup_dir = get_backup_dir() + backup_file = backup_dir / filename + + if not backup_file.exists(): + raise FileNotFoundError(f"Backup file not found: {filename}") + + if not backup_file.is_file(): + raise ValueError(f"Invalid backup file: {filename}") + + backup_file.unlink() + logger.info(f"Deleted backup: {filename}") diff --git a/apps/backups/tasks.py b/apps/backups/tasks.py new file mode 100644 index 00000000..f531fef8 --- /dev/null +++ b/apps/backups/tasks.py @@ -0,0 +1,106 @@ +import logging +import traceback +from celery import shared_task + +from . import services + +logger = logging.getLogger(__name__) + + +def _cleanup_old_backups(retention_count: int) -> int: + """Delete old backups, keeping only the most recent N. Returns count deleted.""" + if retention_count <= 0: + return 0 + + backups = services.list_backups() + if len(backups) <= retention_count: + return 0 + + # Backups are sorted newest first, so delete from the end + to_delete = backups[retention_count:] + deleted = 0 + + for backup in to_delete: + try: + services.delete_backup(backup["name"]) + deleted += 1 + logger.info(f"[CLEANUP] Deleted old backup: {backup['name']}") + except Exception as e: + logger.error(f"[CLEANUP] Failed to delete {backup['name']}: {e}") + + return deleted + + +@shared_task(bind=True) +def create_backup_task(self): + """Celery task to create a backup asynchronously.""" + try: + logger.info(f"[BACKUP] Starting backup task {self.request.id}") + backup_file = services.create_backup() + logger.info(f"[BACKUP] Task {self.request.id} completed: {backup_file.name}") + return { + "status": "completed", + "filename": backup_file.name, + "size": backup_file.stat().st_size, + } + except Exception as e: + logger.error(f"[BACKUP] Task {self.request.id} failed: {str(e)}") + logger.error(f"[BACKUP] Traceback: {traceback.format_exc()}") + return { + "status": "failed", + "error": str(e), + } + + +@shared_task(bind=True) +def restore_backup_task(self, filename: str): + """Celery task to restore a backup asynchronously.""" + try: + logger.info(f"[RESTORE] Starting restore task {self.request.id} for {filename}") + backup_dir = services.get_backup_dir() + backup_file = backup_dir / filename + logger.info(f"[RESTORE] Backup file path: {backup_file}") + services.restore_backup(backup_file) + logger.info(f"[RESTORE] Task {self.request.id} completed successfully") + return { + "status": "completed", + "filename": filename, + } + except Exception as e: + logger.error(f"[RESTORE] Task {self.request.id} failed: {str(e)}") + logger.error(f"[RESTORE] Traceback: {traceback.format_exc()}") + return { + "status": "failed", + "error": str(e), + } + + +@shared_task(bind=True) +def scheduled_backup_task(self, retention_count: int = 0): + """Celery task for scheduled backups with optional retention cleanup.""" + try: + logger.info(f"[SCHEDULED] Starting scheduled backup task {self.request.id}") + + # Create backup + backup_file = services.create_backup() + logger.info(f"[SCHEDULED] Backup created: {backup_file.name}") + + # Cleanup old backups if retention is set + deleted = 0 + if retention_count > 0: + deleted = _cleanup_old_backups(retention_count) + logger.info(f"[SCHEDULED] Cleanup complete, deleted {deleted} old backup(s)") + + return { + "status": "completed", + "filename": backup_file.name, + "size": backup_file.stat().st_size, + "deleted_count": deleted, + } + except Exception as e: + logger.error(f"[SCHEDULED] Task {self.request.id} failed: {str(e)}") + logger.error(f"[SCHEDULED] Traceback: {traceback.format_exc()}") + return { + "status": "failed", + "error": str(e), + } diff --git a/apps/backups/tests.py b/apps/backups/tests.py new file mode 100644 index 00000000..dc8a5136 --- /dev/null +++ b/apps/backups/tests.py @@ -0,0 +1,1163 @@ +import json +import tempfile +from io import BytesIO +from pathlib import Path +from zipfile import ZipFile +from unittest.mock import patch, MagicMock + +from django.test import TestCase +from django.contrib.auth import get_user_model +from rest_framework.test import APIClient +from rest_framework_simplejwt.tokens import RefreshToken + +from . import services + +User = get_user_model() + + +class BackupServicesTestCase(TestCase): + """Test cases for backup services""" + + def setUp(self): + self.temp_backup_dir = tempfile.mkdtemp() + + def tearDown(self): + import shutil + if Path(self.temp_backup_dir).exists(): + shutil.rmtree(self.temp_backup_dir) + + @patch('apps.backups.services.settings') + def test_get_backup_dir_creates_directory(self, mock_settings): + """Test that get_backup_dir creates the directory if it doesn't exist""" + mock_settings.BACKUP_ROOT = self.temp_backup_dir + + with patch('apps.backups.services.Path') as mock_path: + mock_path_instance = MagicMock() + mock_path_instance.mkdir = MagicMock() + mock_path.return_value = mock_path_instance + + services.get_backup_dir() + mock_path_instance.mkdir.assert_called_once_with(parents=True, exist_ok=True) + + @patch('apps.backups.services.get_backup_dir') + @patch('apps.backups.services._is_postgresql') + @patch('apps.backups.services._dump_sqlite') + def test_create_backup_success_sqlite(self, mock_dump_sqlite, mock_is_pg, mock_get_backup_dir): + """Test successful backup creation with SQLite""" + mock_get_backup_dir.return_value = Path(self.temp_backup_dir) + mock_is_pg.return_value = False + + # Mock SQLite dump to create a temp file + def mock_dump(output_file): + output_file.write_text("sqlite dump") + + mock_dump_sqlite.side_effect = mock_dump + + result = services.create_backup() + + self.assertIsInstance(result, Path) + self.assertTrue(result.exists()) + self.assertTrue(result.name.startswith('dispatcharr-backup-')) + self.assertTrue(result.name.endswith('.zip')) + + # Verify the backup contains expected files + with ZipFile(result, 'r') as zf: + names = zf.namelist() + self.assertIn('database.sqlite3', names) + self.assertIn('metadata.json', names) + + # Check metadata + metadata = json.loads(zf.read('metadata.json')) + self.assertEqual(metadata['version'], 2) + self.assertEqual(metadata['database_type'], 'sqlite') + + @patch('apps.backups.services.get_backup_dir') + @patch('apps.backups.services._is_postgresql') + @patch('apps.backups.services._dump_postgresql') + def test_create_backup_success_postgresql(self, mock_dump_pg, mock_is_pg, mock_get_backup_dir): + """Test successful backup creation with PostgreSQL""" + mock_get_backup_dir.return_value = Path(self.temp_backup_dir) + mock_is_pg.return_value = True + + # Mock PostgreSQL dump to create a temp file + def mock_dump(output_file): + output_file.write_bytes(b"pg dump data") + + mock_dump_pg.side_effect = mock_dump + + result = services.create_backup() + + self.assertIsInstance(result, Path) + self.assertTrue(result.exists()) + + # Verify the backup contains expected files + with ZipFile(result, 'r') as zf: + names = zf.namelist() + self.assertIn('database.dump', names) + self.assertIn('metadata.json', names) + + # Check metadata + metadata = json.loads(zf.read('metadata.json')) + self.assertEqual(metadata['version'], 2) + self.assertEqual(metadata['database_type'], 'postgresql') + + @patch('apps.backups.services.get_backup_dir') + def test_list_backups_empty(self, mock_get_backup_dir): + """Test listing backups when none exist""" + mock_get_backup_dir.return_value = Path(self.temp_backup_dir) + + result = services.list_backups() + + self.assertEqual(result, []) + + @patch('apps.backups.services.get_backup_dir') + def test_list_backups_with_files(self, mock_get_backup_dir): + """Test listing backups with existing backup files""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + + # Create a fake backup file + test_backup = backup_dir / "dispatcharr-backup-2025.01.01.12.00.00.zip" + test_backup.write_text("fake backup content") + + result = services.list_backups() + + self.assertEqual(len(result), 1) + self.assertEqual(result[0]['name'], test_backup.name) + self.assertIn('size', result[0]) + self.assertIn('created', result[0]) + + @patch('apps.backups.services.get_backup_dir') + def test_delete_backup_success(self, mock_get_backup_dir): + """Test successful backup deletion""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + + # Create a fake backup file + test_backup = backup_dir / "dispatcharr-backup-test.zip" + test_backup.write_text("fake backup content") + + self.assertTrue(test_backup.exists()) + + services.delete_backup(test_backup.name) + + self.assertFalse(test_backup.exists()) + + @patch('apps.backups.services.get_backup_dir') + def test_delete_backup_not_found(self, mock_get_backup_dir): + """Test deleting a non-existent backup raises error""" + mock_get_backup_dir.return_value = Path(self.temp_backup_dir) + + with self.assertRaises(FileNotFoundError): + services.delete_backup("nonexistent-backup.zip") + + @patch('apps.backups.services.get_backup_dir') + @patch('apps.backups.services._is_postgresql') + @patch('apps.backups.services._restore_postgresql') + def test_restore_backup_postgresql(self, mock_restore_pg, mock_is_pg, mock_get_backup_dir): + """Test successful restoration of PostgreSQL backup""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + mock_is_pg.return_value = True + + # Create PostgreSQL backup file + backup_file = backup_dir / "test-backup.zip" + with ZipFile(backup_file, 'w') as zf: + zf.writestr('database.dump', b'pg dump data') + zf.writestr('metadata.json', json.dumps({ + 'version': 2, + 'database_type': 'postgresql', + 'database_file': 'database.dump' + })) + + services.restore_backup(backup_file) + + mock_restore_pg.assert_called_once() + + @patch('apps.backups.services.get_backup_dir') + @patch('apps.backups.services._is_postgresql') + @patch('apps.backups.services._restore_sqlite') + def test_restore_backup_sqlite(self, mock_restore_sqlite, mock_is_pg, mock_get_backup_dir): + """Test successful restoration of SQLite backup""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + mock_is_pg.return_value = False + + # Create SQLite backup file + backup_file = backup_dir / "test-backup.zip" + with ZipFile(backup_file, 'w') as zf: + zf.writestr('database.sqlite3', 'sqlite data') + zf.writestr('metadata.json', json.dumps({ + 'version': 2, + 'database_type': 'sqlite', + 'database_file': 'database.sqlite3' + })) + + services.restore_backup(backup_file) + + mock_restore_sqlite.assert_called_once() + + @patch('apps.backups.services.get_backup_dir') + @patch('apps.backups.services._is_postgresql') + def test_restore_backup_database_type_mismatch(self, mock_is_pg, mock_get_backup_dir): + """Test restore fails when database type doesn't match""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + mock_is_pg.return_value = True # Current system is PostgreSQL + + # Create SQLite backup file + backup_file = backup_dir / "test-backup.zip" + with ZipFile(backup_file, 'w') as zf: + zf.writestr('database.sqlite3', 'sqlite data') + zf.writestr('metadata.json', json.dumps({ + 'version': 2, + 'database_type': 'sqlite', # Backup is SQLite + 'database_file': 'database.sqlite3' + })) + + with self.assertRaises(ValueError) as context: + services.restore_backup(backup_file) + + self.assertIn('mismatch', str(context.exception).lower()) + + def test_restore_backup_not_found(self): + """Test restoring from non-existent backup file""" + fake_path = Path("/tmp/nonexistent-backup-12345.zip") + + with self.assertRaises(FileNotFoundError): + services.restore_backup(fake_path) + + @patch('apps.backups.services.get_backup_dir') + def test_restore_backup_missing_metadata(self, mock_get_backup_dir): + """Test restoring from backup without metadata.json""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + + # Create a backup file missing metadata.json + backup_file = backup_dir / "invalid-backup.zip" + with ZipFile(backup_file, 'w') as zf: + zf.writestr('database.dump', b'fake dump data') + + with self.assertRaises(ValueError) as context: + services.restore_backup(backup_file) + + self.assertIn('metadata.json', str(context.exception)) + + @patch('apps.backups.services.get_backup_dir') + @patch('apps.backups.services._is_postgresql') + def test_restore_backup_missing_database(self, mock_is_pg, mock_get_backup_dir): + """Test restoring from backup missing database dump""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + mock_is_pg.return_value = True + + # Create backup file missing database dump + backup_file = backup_dir / "invalid-backup.zip" + with ZipFile(backup_file, 'w') as zf: + zf.writestr('metadata.json', json.dumps({ + 'version': 2, + 'database_type': 'postgresql', + 'database_file': 'database.dump' + })) + + with self.assertRaises(ValueError) as context: + services.restore_backup(backup_file) + + self.assertIn('database.dump', str(context.exception)) + + +class BackupAPITestCase(TestCase): + """Test cases for backup API endpoints""" + + def setUp(self): + self.client = APIClient() + self.user = User.objects.create_user( + username='testuser', + email='test@example.com', + password='testpass123' + ) + self.admin_user = User.objects.create_superuser( + username='admin', + email='admin@example.com', + password='adminpass123' + ) + self.temp_backup_dir = tempfile.mkdtemp() + + def get_auth_header(self, user): + """Helper method to get JWT auth header for a user""" + refresh = RefreshToken.for_user(user) + return f'Bearer {str(refresh.access_token)}' + + def tearDown(self): + import shutil + if Path(self.temp_backup_dir).exists(): + shutil.rmtree(self.temp_backup_dir) + + def test_list_backups_requires_admin(self): + """Test that listing backups requires admin privileges""" + url = '/api/backups/' + + # Unauthenticated request + response = self.client.get(url) + self.assertIn(response.status_code, [401, 403]) + + # Regular user request + response = self.client.get(url, HTTP_AUTHORIZATION=self.get_auth_header(self.user)) + self.assertIn(response.status_code, [401, 403]) + + @patch('apps.backups.services.list_backups') + def test_list_backups_success(self, mock_list_backups): + """Test successful backup listing""" + mock_list_backups.return_value = [ + { + 'name': 'backup-test.zip', + 'size': 1024, + 'created': '2025-01-01T12:00:00' + } + ] + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/' + response = self.client.get(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertEqual(len(data), 1) + self.assertEqual(data[0]['name'], 'backup-test.zip') + + def test_create_backup_requires_admin(self): + """Test that creating backups requires admin privileges""" + url = '/api/backups/create/' + + # Unauthenticated request + response = self.client.post(url) + self.assertIn(response.status_code, [401, 403]) + + # Regular user request + response = self.client.post(url, HTTP_AUTHORIZATION=self.get_auth_header(self.user)) + self.assertIn(response.status_code, [401, 403]) + + @patch('apps.backups.tasks.create_backup_task.delay') + def test_create_backup_success(self, mock_create_task): + """Test successful backup creation via API (async task)""" + mock_task = MagicMock() + mock_task.id = 'test-task-id-123' + mock_create_task.return_value = mock_task + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/create/' + response = self.client.post(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 202) + data = response.json() + self.assertIn('task_id', data) + self.assertIn('task_token', data) + self.assertEqual(data['task_id'], 'test-task-id-123') + + @patch('apps.backups.tasks.create_backup_task.delay') + def test_create_backup_failure(self, mock_create_task): + """Test backup creation failure handling""" + mock_create_task.side_effect = Exception("Failed to start task") + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/create/' + response = self.client.post(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 500) + data = response.json() + self.assertIn('detail', data) + + @patch('apps.backups.services.get_backup_dir') + def test_download_backup_success(self, mock_get_backup_dir): + """Test successful backup download""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + + # Create a test backup file + backup_file = backup_dir / "test-backup.zip" + backup_file.write_text("test backup content") + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/test-backup.zip/download/' + response = self.client.get(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 200) + self.assertEqual(response['Content-Type'], 'application/zip') + + @patch('apps.backups.services.get_backup_dir') + def test_download_backup_not_found(self, mock_get_backup_dir): + """Test downloading non-existent backup""" + mock_get_backup_dir.return_value = Path(self.temp_backup_dir) + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/nonexistent.zip/download/' + response = self.client.get(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 404) + + @patch('apps.backups.services.delete_backup') + def test_delete_backup_success(self, mock_delete_backup): + """Test successful backup deletion via API""" + mock_delete_backup.return_value = None + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/test-backup.zip/delete/' + response = self.client.delete(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 204) + mock_delete_backup.assert_called_once_with('test-backup.zip') + + @patch('apps.backups.services.delete_backup') + def test_delete_backup_not_found(self, mock_delete_backup): + """Test deleting non-existent backup via API""" + mock_delete_backup.side_effect = FileNotFoundError("Not found") + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/nonexistent.zip/delete/' + response = self.client.delete(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 404) + + def test_upload_backup_requires_file(self): + """Test that upload requires a file""" + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/upload/' + response = self.client.post(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 400) + data = response.json() + self.assertIn('No file uploaded', data['detail']) + + @patch('apps.backups.services.get_backup_dir') + def test_upload_backup_success(self, mock_get_backup_dir): + """Test successful backup upload""" + mock_get_backup_dir.return_value = Path(self.temp_backup_dir) + + # Create a fake backup file + fake_backup = BytesIO(b"fake backup content") + fake_backup.name = 'uploaded-backup.zip' + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/upload/' + response = self.client.post(url, {'file': fake_backup}, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 201) + data = response.json() + self.assertIn('filename', data) + + @patch('apps.backups.services.get_backup_dir') + @patch('apps.backups.tasks.restore_backup_task.delay') + def test_restore_backup_success(self, mock_restore_task, mock_get_backup_dir): + """Test successful backup restoration via API (async task)""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + + mock_task = MagicMock() + mock_task.id = 'test-restore-task-456' + mock_restore_task.return_value = mock_task + + # Create a test backup file + backup_file = backup_dir / "test-backup.zip" + backup_file.write_text("test backup content") + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/test-backup.zip/restore/' + response = self.client.post(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 202) + data = response.json() + self.assertIn('task_id', data) + self.assertIn('task_token', data) + self.assertEqual(data['task_id'], 'test-restore-task-456') + + @patch('apps.backups.services.get_backup_dir') + def test_restore_backup_not_found(self, mock_get_backup_dir): + """Test restoring from non-existent backup via API""" + mock_get_backup_dir.return_value = Path(self.temp_backup_dir) + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/nonexistent.zip/restore/' + response = self.client.post(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 404) + + # --- Backup Status Endpoint Tests --- + + def test_backup_status_requires_auth_or_token(self): + """Test that backup_status requires auth or valid token""" + url = '/api/backups/status/fake-task-id/' + + # Unauthenticated request without token + response = self.client.get(url) + self.assertEqual(response.status_code, 401) + + def test_backup_status_invalid_token(self): + """Test that backup_status rejects invalid tokens""" + url = '/api/backups/status/fake-task-id/?token=invalid-token' + response = self.client.get(url) + self.assertEqual(response.status_code, 403) + + @patch('apps.backups.api_views.AsyncResult') + def test_backup_status_with_admin_auth(self, mock_async_result): + """Test backup_status with admin authentication""" + mock_result = MagicMock() + mock_result.ready.return_value = False + mock_result.failed.return_value = False + mock_result.state = 'PENDING' + mock_async_result.return_value = mock_result + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/status/test-task-id/' + response = self.client.get(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertEqual(data['state'], 'pending') + + @patch('apps.backups.api_views.AsyncResult') + @patch('apps.backups.api_views._verify_task_token') + def test_backup_status_with_valid_token(self, mock_verify, mock_async_result): + """Test backup_status with valid token""" + mock_verify.return_value = True + mock_result = MagicMock() + mock_result.ready.return_value = True + mock_result.get.return_value = {'status': 'completed', 'filename': 'test.zip'} + mock_async_result.return_value = mock_result + + url = '/api/backups/status/test-task-id/?token=valid-token' + response = self.client.get(url) + + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertEqual(data['state'], 'completed') + + @patch('apps.backups.api_views.AsyncResult') + def test_backup_status_task_failed(self, mock_async_result): + """Test backup_status when task failed""" + mock_result = MagicMock() + mock_result.ready.return_value = True + mock_result.get.return_value = {'status': 'failed', 'error': 'Something went wrong'} + mock_async_result.return_value = mock_result + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/status/test-task-id/' + response = self.client.get(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertEqual(data['state'], 'failed') + self.assertIn('Something went wrong', data['error']) + + # --- Download Token Endpoint Tests --- + + def test_get_download_token_requires_admin(self): + """Test that get_download_token requires admin privileges""" + url = '/api/backups/test.zip/download-token/' + + response = self.client.get(url) + self.assertIn(response.status_code, [401, 403]) + + response = self.client.get(url, HTTP_AUTHORIZATION=self.get_auth_header(self.user)) + self.assertIn(response.status_code, [401, 403]) + + @patch('apps.backups.services.get_backup_dir') + def test_get_download_token_success(self, mock_get_backup_dir): + """Test successful download token generation""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + + # Create a test backup file + backup_file = backup_dir / "test-backup.zip" + backup_file.write_text("test content") + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/test-backup.zip/download-token/' + response = self.client.get(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertIn('token', data) + self.assertEqual(len(data['token']), 32) + + @patch('apps.backups.services.get_backup_dir') + def test_get_download_token_not_found(self, mock_get_backup_dir): + """Test download token for non-existent file""" + mock_get_backup_dir.return_value = Path(self.temp_backup_dir) + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/nonexistent.zip/download-token/' + response = self.client.get(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 404) + + # --- Download with Token Auth Tests --- + + @patch('apps.backups.services.get_backup_dir') + @patch('apps.backups.api_views._verify_task_token') + def test_download_backup_with_valid_token(self, mock_verify, mock_get_backup_dir): + """Test downloading backup with valid token (no auth header)""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + mock_verify.return_value = True + + # Create a test backup file + backup_file = backup_dir / "test-backup.zip" + backup_file.write_text("test backup content") + + url = '/api/backups/test-backup.zip/download/?token=valid-token' + response = self.client.get(url) + + self.assertEqual(response.status_code, 200) + + @patch('apps.backups.services.get_backup_dir') + def test_download_backup_invalid_token(self, mock_get_backup_dir): + """Test downloading backup with invalid token""" + mock_get_backup_dir.return_value = Path(self.temp_backup_dir) + + url = '/api/backups/test-backup.zip/download/?token=invalid-token' + response = self.client.get(url) + + self.assertEqual(response.status_code, 403) + + @patch('apps.backups.services.get_backup_dir') + @patch('apps.backups.tasks.restore_backup_task.delay') + def test_restore_backup_task_start_failure(self, mock_restore_task, mock_get_backup_dir): + """Test restore task start failure via API""" + backup_dir = Path(self.temp_backup_dir) + mock_get_backup_dir.return_value = backup_dir + mock_restore_task.side_effect = Exception("Failed to start restore task") + + # Create a test backup file + backup_file = backup_dir / "test-backup.zip" + backup_file.write_text("test content") + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/test-backup.zip/restore/' + response = self.client.post(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 500) + data = response.json() + self.assertIn('detail', data) + + def test_get_schedule_requires_admin(self): + """Test that getting schedule requires admin privileges""" + url = '/api/backups/schedule/' + + # Unauthenticated request + response = self.client.get(url) + self.assertIn(response.status_code, [401, 403]) + + # Regular user request + response = self.client.get(url, HTTP_AUTHORIZATION=self.get_auth_header(self.user)) + self.assertIn(response.status_code, [401, 403]) + + @patch('apps.backups.api_views.get_schedule_settings') + def test_get_schedule_success(self, mock_get_settings): + """Test successful schedule retrieval""" + mock_get_settings.return_value = { + 'enabled': True, + 'frequency': 'daily', + 'time': '03:00', + 'day_of_week': 0, + 'retention_count': 5, + 'cron_expression': '', + } + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/schedule/' + response = self.client.get(url, HTTP_AUTHORIZATION=auth_header) + + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertEqual(data['enabled'], True) + self.assertEqual(data['frequency'], 'daily') + self.assertEqual(data['retention_count'], 5) + + def test_update_schedule_requires_admin(self): + """Test that updating schedule requires admin privileges""" + url = '/api/backups/schedule/update/' + + # Unauthenticated request + response = self.client.put(url, {}, content_type='application/json') + self.assertIn(response.status_code, [401, 403]) + + # Regular user request + response = self.client.put( + url, + {}, + content_type='application/json', + HTTP_AUTHORIZATION=self.get_auth_header(self.user) + ) + self.assertIn(response.status_code, [401, 403]) + + @patch('apps.backups.api_views.update_schedule_settings') + def test_update_schedule_success(self, mock_update_settings): + """Test successful schedule update""" + mock_update_settings.return_value = { + 'enabled': True, + 'frequency': 'weekly', + 'time': '02:00', + 'day_of_week': 1, + 'retention_count': 10, + 'cron_expression': '', + } + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/schedule/update/' + response = self.client.put( + url, + {'enabled': True, 'frequency': 'weekly', 'time': '02:00', 'day_of_week': 1, 'retention_count': 10}, + content_type='application/json', + HTTP_AUTHORIZATION=auth_header + ) + + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertEqual(data['frequency'], 'weekly') + self.assertEqual(data['day_of_week'], 1) + + @patch('apps.backups.api_views.update_schedule_settings') + def test_update_schedule_validation_error(self, mock_update_settings): + """Test schedule update with invalid data""" + mock_update_settings.side_effect = ValueError("frequency must be 'daily' or 'weekly'") + + auth_header = self.get_auth_header(self.admin_user) + url = '/api/backups/schedule/update/' + response = self.client.put( + url, + {'frequency': 'invalid'}, + content_type='application/json', + HTTP_AUTHORIZATION=auth_header + ) + + self.assertEqual(response.status_code, 400) + data = response.json() + self.assertIn('frequency', data['detail']) + + +class BackupSchedulerTestCase(TestCase): + """Test cases for backup scheduler""" + + databases = {'default'} + + @classmethod + def setUpClass(cls): + pass + + @classmethod + def tearDownClass(cls): + pass + + def setUp(self): + from core.models import CoreSettings + # Clean up any existing settings + CoreSettings.objects.filter(key__startswith='backup_').delete() + + def tearDown(self): + from core.models import CoreSettings + from django_celery_beat.models import PeriodicTask + CoreSettings.objects.filter(key__startswith='backup_').delete() + PeriodicTask.objects.filter(name='backup-scheduled-task').delete() + + def test_get_schedule_settings_defaults(self): + """Test that get_schedule_settings returns defaults when no settings exist""" + from . import scheduler + + settings = scheduler.get_schedule_settings() + + self.assertEqual(settings['enabled'], False) + self.assertEqual(settings['frequency'], 'daily') + self.assertEqual(settings['time'], '03:00') + self.assertEqual(settings['day_of_week'], 0) + self.assertEqual(settings['retention_count'], 0) + self.assertEqual(settings['cron_expression'], '') + + def test_update_schedule_settings_stores_values(self): + """Test that update_schedule_settings stores values correctly""" + from . import scheduler + + result = scheduler.update_schedule_settings({ + 'enabled': True, + 'frequency': 'weekly', + 'time': '04:30', + 'day_of_week': 3, + 'retention_count': 7, + }) + + self.assertEqual(result['enabled'], True) + self.assertEqual(result['frequency'], 'weekly') + self.assertEqual(result['time'], '04:30') + self.assertEqual(result['day_of_week'], 3) + self.assertEqual(result['retention_count'], 7) + + # Verify persistence + settings = scheduler.get_schedule_settings() + self.assertEqual(settings['enabled'], True) + self.assertEqual(settings['frequency'], 'weekly') + + def test_update_schedule_settings_invalid_frequency(self): + """Test that invalid frequency raises ValueError""" + from . import scheduler + + with self.assertRaises(ValueError) as context: + scheduler.update_schedule_settings({'frequency': 'monthly'}) + + self.assertIn('frequency', str(context.exception).lower()) + + def test_update_schedule_settings_invalid_time(self): + """Test that invalid time raises ValueError""" + from . import scheduler + + with self.assertRaises(ValueError) as context: + scheduler.update_schedule_settings({'time': 'invalid'}) + + self.assertIn('HH:MM', str(context.exception)) + + def test_update_schedule_settings_invalid_day_of_week(self): + """Test that invalid day_of_week raises ValueError""" + from . import scheduler + + with self.assertRaises(ValueError) as context: + scheduler.update_schedule_settings({'day_of_week': 7}) + + self.assertIn('day_of_week', str(context.exception).lower()) + + def test_update_schedule_settings_invalid_retention(self): + """Test that negative retention_count raises ValueError""" + from . import scheduler + + with self.assertRaises(ValueError) as context: + scheduler.update_schedule_settings({'retention_count': -1}) + + self.assertIn('retention_count', str(context.exception).lower()) + + def test_sync_creates_periodic_task_when_enabled(self): + """Test that enabling schedule creates a PeriodicTask""" + from . import scheduler + from django_celery_beat.models import PeriodicTask + + scheduler.update_schedule_settings({ + 'enabled': True, + 'frequency': 'daily', + 'time': '05:00', + }) + + task = PeriodicTask.objects.get(name='backup-scheduled-task') + self.assertTrue(task.enabled) + self.assertEqual(task.crontab.hour, '05') + self.assertEqual(task.crontab.minute, '00') + + def test_sync_deletes_periodic_task_when_disabled(self): + """Test that disabling schedule removes PeriodicTask""" + from . import scheduler + from django_celery_beat.models import PeriodicTask + + # First enable + scheduler.update_schedule_settings({ + 'enabled': True, + 'frequency': 'daily', + 'time': '05:00', + }) + + self.assertTrue(PeriodicTask.objects.filter(name='backup-scheduled-task').exists()) + + # Then disable + scheduler.update_schedule_settings({'enabled': False}) + + self.assertFalse(PeriodicTask.objects.filter(name='backup-scheduled-task').exists()) + + def test_weekly_schedule_sets_day_of_week(self): + """Test that weekly schedule sets correct day_of_week in crontab""" + from . import scheduler + from django_celery_beat.models import PeriodicTask + + scheduler.update_schedule_settings({ + 'enabled': True, + 'frequency': 'weekly', + 'time': '06:00', + 'day_of_week': 3, # Wednesday + }) + + task = PeriodicTask.objects.get(name='backup-scheduled-task') + self.assertEqual(task.crontab.day_of_week, '3') + + def test_cron_expression_stores_value(self): + """Test that cron_expression is stored and retrieved correctly""" + from . import scheduler + + result = scheduler.update_schedule_settings({ + 'enabled': True, + 'cron_expression': '*/5 * * * *', + }) + + self.assertEqual(result['cron_expression'], '*/5 * * * *') + + # Verify persistence + settings = scheduler.get_schedule_settings() + self.assertEqual(settings['cron_expression'], '*/5 * * * *') + + def test_cron_expression_creates_correct_schedule(self): + """Test that cron expression creates correct CrontabSchedule""" + from . import scheduler + from django_celery_beat.models import PeriodicTask + + scheduler.update_schedule_settings({ + 'enabled': True, + 'cron_expression': '*/15 2 * * 1-5', # Every 15 mins during 2 AM hour on weekdays + }) + + task = PeriodicTask.objects.get(name='backup-scheduled-task') + self.assertEqual(task.crontab.minute, '*/15') + self.assertEqual(task.crontab.hour, '2') + self.assertEqual(task.crontab.day_of_month, '*') + self.assertEqual(task.crontab.month_of_year, '*') + self.assertEqual(task.crontab.day_of_week, '1-5') + + def test_cron_expression_invalid_format(self): + """Test that invalid cron expression raises ValueError""" + from . import scheduler + + # Too few parts + with self.assertRaises(ValueError) as context: + scheduler.update_schedule_settings({ + 'enabled': True, + 'cron_expression': '0 3 *', + }) + self.assertIn('5 parts', str(context.exception)) + + def test_cron_expression_empty_uses_simple_mode(self): + """Test that empty cron_expression falls back to simple frequency mode""" + from . import scheduler + from django_celery_beat.models import PeriodicTask + + scheduler.update_schedule_settings({ + 'enabled': True, + 'frequency': 'daily', + 'time': '04:00', + 'cron_expression': '', # Empty, should use simple mode + }) + + task = PeriodicTask.objects.get(name='backup-scheduled-task') + self.assertEqual(task.crontab.minute, '00') + self.assertEqual(task.crontab.hour, '04') + self.assertEqual(task.crontab.day_of_week, '*') + + def test_cron_expression_overrides_simple_settings(self): + """Test that cron_expression takes precedence over frequency/time""" + from . import scheduler + from django_celery_beat.models import PeriodicTask + + scheduler.update_schedule_settings({ + 'enabled': True, + 'frequency': 'daily', + 'time': '03:00', + 'cron_expression': '0 */6 * * *', # Every 6 hours (should override daily at 3 AM) + }) + + task = PeriodicTask.objects.get(name='backup-scheduled-task') + self.assertEqual(task.crontab.minute, '0') + self.assertEqual(task.crontab.hour, '*/6') + self.assertEqual(task.crontab.day_of_week, '*') + + def test_periodic_task_uses_system_timezone(self): + """Test that CrontabSchedule is created with the system timezone""" + from . import scheduler + from django_celery_beat.models import PeriodicTask + from core.models import CoreSettings + + original_tz = CoreSettings.get_system_time_zone() + + try: + # Set a non-UTC timezone + CoreSettings.set_system_time_zone('America/New_York') + + scheduler.update_schedule_settings({ + 'enabled': True, + 'frequency': 'daily', + 'time': '03:00', + }) + + task = PeriodicTask.objects.get(name='backup-scheduled-task') + self.assertEqual(str(task.crontab.timezone), 'America/New_York') + finally: + scheduler.update_schedule_settings({'enabled': False}) + CoreSettings.set_system_time_zone(original_tz) + + def test_periodic_task_timezone_updates_with_schedule(self): + """Test that CrontabSchedule timezone is updated when schedule is modified""" + from . import scheduler + from django_celery_beat.models import PeriodicTask + from core.models import CoreSettings + + original_tz = CoreSettings.get_system_time_zone() + + try: + # Create initial schedule with one timezone + CoreSettings.set_system_time_zone('America/Los_Angeles') + scheduler.update_schedule_settings({ + 'enabled': True, + 'frequency': 'daily', + 'time': '02:00', + }) + + task = PeriodicTask.objects.get(name='backup-scheduled-task') + self.assertEqual(str(task.crontab.timezone), 'America/Los_Angeles') + + # Change system timezone and update schedule + CoreSettings.set_system_time_zone('Europe/London') + scheduler.update_schedule_settings({ + 'enabled': True, + 'time': '04:00', + }) + + task.refresh_from_db() + self.assertEqual(str(task.crontab.timezone), 'Europe/London') + finally: + scheduler.update_schedule_settings({'enabled': False}) + CoreSettings.set_system_time_zone(original_tz) + + def test_orphaned_crontab_cleanup(self): + """Test that old CrontabSchedule is deleted when schedule changes""" + from . import scheduler + from django_celery_beat.models import PeriodicTask, CrontabSchedule + + # Create initial daily schedule + scheduler.update_schedule_settings({ + 'enabled': True, + 'frequency': 'daily', + 'time': '03:00', + }) + + task = PeriodicTask.objects.get(name='backup-scheduled-task') + first_crontab_id = task.crontab.id + initial_count = CrontabSchedule.objects.count() + + # Change to weekly schedule (different crontab) + scheduler.update_schedule_settings({ + 'enabled': True, + 'frequency': 'weekly', + 'day_of_week': 3, + 'time': '03:00', + }) + + task.refresh_from_db() + second_crontab_id = task.crontab.id + + # Verify old crontab was deleted + self.assertNotEqual(first_crontab_id, second_crontab_id) + self.assertFalse(CrontabSchedule.objects.filter(id=first_crontab_id).exists()) + self.assertEqual(CrontabSchedule.objects.count(), initial_count) + + # Cleanup + scheduler.update_schedule_settings({'enabled': False}) + + +class BackupTasksTestCase(TestCase): + """Test cases for backup Celery tasks""" + + def setUp(self): + self.temp_backup_dir = tempfile.mkdtemp() + + def tearDown(self): + import shutil + if Path(self.temp_backup_dir).exists(): + shutil.rmtree(self.temp_backup_dir) + + @patch('apps.backups.tasks.services.list_backups') + @patch('apps.backups.tasks.services.delete_backup') + def test_cleanup_old_backups_keeps_recent(self, mock_delete, mock_list): + """Test that cleanup keeps the most recent backups""" + from .tasks import _cleanup_old_backups + + mock_list.return_value = [ + {'name': 'backup-3.zip'}, # newest + {'name': 'backup-2.zip'}, + {'name': 'backup-1.zip'}, # oldest + ] + + deleted = _cleanup_old_backups(retention_count=2) + + self.assertEqual(deleted, 1) + mock_delete.assert_called_once_with('backup-1.zip') + + @patch('apps.backups.tasks.services.list_backups') + @patch('apps.backups.tasks.services.delete_backup') + def test_cleanup_old_backups_does_nothing_when_under_limit(self, mock_delete, mock_list): + """Test that cleanup does nothing when under retention limit""" + from .tasks import _cleanup_old_backups + + mock_list.return_value = [ + {'name': 'backup-2.zip'}, + {'name': 'backup-1.zip'}, + ] + + deleted = _cleanup_old_backups(retention_count=5) + + self.assertEqual(deleted, 0) + mock_delete.assert_not_called() + + @patch('apps.backups.tasks.services.list_backups') + @patch('apps.backups.tasks.services.delete_backup') + def test_cleanup_old_backups_zero_retention_keeps_all(self, mock_delete, mock_list): + """Test that retention_count=0 keeps all backups""" + from .tasks import _cleanup_old_backups + + mock_list.return_value = [ + {'name': 'backup-3.zip'}, + {'name': 'backup-2.zip'}, + {'name': 'backup-1.zip'}, + ] + + deleted = _cleanup_old_backups(retention_count=0) + + self.assertEqual(deleted, 0) + mock_delete.assert_not_called() + + @patch('apps.backups.tasks.services.create_backup') + @patch('apps.backups.tasks._cleanup_old_backups') + def test_scheduled_backup_task_success(self, mock_cleanup, mock_create): + """Test scheduled backup task success""" + from .tasks import scheduled_backup_task + + mock_backup_file = MagicMock() + mock_backup_file.name = 'scheduled-backup.zip' + mock_backup_file.stat.return_value.st_size = 1024 + mock_create.return_value = mock_backup_file + mock_cleanup.return_value = 2 + + result = scheduled_backup_task(retention_count=5) + + self.assertEqual(result['status'], 'completed') + self.assertEqual(result['filename'], 'scheduled-backup.zip') + self.assertEqual(result['size'], 1024) + self.assertEqual(result['deleted_count'], 2) + mock_cleanup.assert_called_once_with(5) + + @patch('apps.backups.tasks.services.create_backup') + @patch('apps.backups.tasks._cleanup_old_backups') + def test_scheduled_backup_task_no_cleanup_when_retention_zero(self, mock_cleanup, mock_create): + """Test scheduled backup skips cleanup when retention is 0""" + from .tasks import scheduled_backup_task + + mock_backup_file = MagicMock() + mock_backup_file.name = 'scheduled-backup.zip' + mock_backup_file.stat.return_value.st_size = 1024 + mock_create.return_value = mock_backup_file + + result = scheduled_backup_task(retention_count=0) + + self.assertEqual(result['status'], 'completed') + self.assertEqual(result['deleted_count'], 0) + mock_cleanup.assert_not_called() + + @patch('apps.backups.tasks.services.create_backup') + def test_scheduled_backup_task_failure(self, mock_create): + """Test scheduled backup task handles failure""" + from .tasks import scheduled_backup_task + + mock_create.side_effect = Exception("Backup failed") + + result = scheduled_backup_task(retention_count=5) + + self.assertEqual(result['status'], 'failed') + self.assertIn('Backup failed', result['error']) diff --git a/apps/channels/api_urls.py b/apps/channels/api_urls.py index 7999abd9..bd53ae45 100644 --- a/apps/channels/api_urls.py +++ b/apps/channels/api_urls.py @@ -47,7 +47,7 @@ urlpatterns = [ path('series-rules/', SeriesRulesAPIView.as_view(), name='series_rules'), path('series-rules/evaluate/', EvaluateSeriesRulesAPIView.as_view(), name='evaluate_series_rules'), path('series-rules/bulk-remove/', BulkRemoveSeriesRecordingsAPIView.as_view(), name='bulk_remove_series_recordings'), - path('series-rules//', DeleteSeriesRuleAPIView.as_view(), name='delete_series_rule'), + path('series-rules//', DeleteSeriesRuleAPIView.as_view(), name='delete_series_rule'), path('recordings/bulk-delete-upcoming/', BulkDeleteUpcomingRecordingsAPIView.as_view(), name='bulk_delete_upcoming_recordings'), path('dvr/comskip-config/', ComskipConfigAPIView.as_view(), name='comskip_config'), ] diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index eccc5028..aebb74a3 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -8,7 +8,9 @@ from drf_yasg.utils import swagger_auto_schema from drf_yasg import openapi from django.shortcuts import get_object_or_404, get_list_or_404 from django.db import transaction +from django.db.models import Q import os, json, requests, logging +from urllib.parse import unquote from apps.accounts.permissions import ( Authenticated, IsAdmin, @@ -419,10 +421,36 @@ class ChannelViewSet(viewsets.ModelViewSet): group_names = channel_group.split(",") qs = qs.filter(channel_group__name__in=group_names) - if self.request.user.user_level < 10: - qs = qs.filter(user_level__lte=self.request.user.user_level) + filters = {} + q_filters = Q() - return qs + channel_profile_id = self.request.query_params.get("channel_profile_id") + show_disabled_param = self.request.query_params.get("show_disabled", None) + only_streamless = self.request.query_params.get("only_streamless", None) + + if channel_profile_id: + try: + profile_id_int = int(channel_profile_id) + filters["channelprofilemembership__channel_profile_id"] = profile_id_int + + if show_disabled_param is None: + filters["channelprofilemembership__enabled"] = True + except (ValueError, TypeError): + # Ignore invalid profile id values + pass + + if only_streamless: + q_filters &= Q(streams__isnull=True) + + if self.request.user.user_level < 10: + filters["user_level__lte"] = self.request.user.user_level + + if filters: + qs = qs.filter(**filters) + if q_filters: + qs = qs.filter(q_filters) + + return qs.distinct() def get_serializer_context(self): context = super().get_serializer_context() @@ -2026,7 +2054,7 @@ class DeleteSeriesRuleAPIView(APIView): return [Authenticated()] def delete(self, request, tvg_id): - tvg_id = str(tvg_id) + tvg_id = unquote(str(tvg_id or "")) rules = [r for r in CoreSettings.get_dvr_series_rules() if str(r.get("tvg_id")) != tvg_id] CoreSettings.set_dvr_series_rules(rules) return Response({"success": True, "rules": rules}) diff --git a/apps/output/views.py b/apps/output/views.py index bc2bace5..1710fa4d 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -161,18 +161,7 @@ def generate_m3u(request, profile_name=None, user=None): channelprofilemembership__enabled=True ).order_by('channel_number') else: - if profile_name is not None: - try: - channel_profile = ChannelProfile.objects.get(name=profile_name) - except ChannelProfile.DoesNotExist: - logger.warning("Requested channel profile (%s) during m3u generation does not exist", profile_name) - raise Http404(f"Channel profile '{profile_name}' not found") - channels = Channel.objects.filter( - channelprofilemembership__channel_profile=channel_profile, - channelprofilemembership__enabled=True, - ).order_by("channel_number") - else: - channels = Channel.objects.order_by("channel_number") + channels = Channel.objects.order_by("channel_number") # Check if the request wants to use direct logo URLs instead of cache use_cached_logos = request.GET.get('cachedlogos', 'true').lower() != 'false' @@ -2314,20 +2303,20 @@ def xc_get_epg(request, user, short=False): program_output = { "id": f"{id}", "epg_id": f"{epg_id}", - "title": base64.b64encode(title.encode()).decode(), + "title": base64.b64encode((title or "").encode()).decode(), "lang": "", - "start": start.strftime("%Y%m%d%H%M%S"), - "end": end.strftime("%Y%m%d%H%M%S"), - "description": base64.b64encode(description.encode()).decode(), - "channel_id": channel_num_int, - "start_timestamp": int(start.timestamp()), - "stop_timestamp": int(end.timestamp()), + "start": start.strftime("%Y-%m-%d %H:%M:%S"), + "end": end.strftime("%Y-%m-%d %H:%M:%S"), + "description": base64.b64encode((description or "").encode()).decode(), + "channel_id": str(channel_num_int), + "start_timestamp": str(int(start.timestamp())), + "stop_timestamp": str(int(end.timestamp())), "stream_id": f"{channel_id}", } if short == False: program_output["now_playing"] = 1 if start <= django_timezone.now() <= end else 0 - program_output["has_archive"] = "0" + program_output["has_archive"] = 0 output['epg_listings'].append(program_output) @@ -2532,34 +2521,45 @@ def xc_get_series_info(request, user, series_id): except Exception as e: logger.error(f"Error refreshing series data for relation {series_relation.id}: {str(e)}") - # Get episodes for this series from the same M3U account - episode_relations = M3UEpisodeRelation.objects.filter( - episode__series=series, - m3u_account=series_relation.m3u_account - ).select_related('episode').order_by('episode__season_number', 'episode__episode_number') + # Get unique episodes for this series that have relations from any active M3U account + # We query episodes directly to avoid duplicates when multiple relations exist + # (e.g., same episode in different languages/qualities) + from apps.vod.models import Episode + episodes = Episode.objects.filter( + series=series, + m3u_relations__m3u_account__is_active=True + ).distinct().order_by('season_number', 'episode_number') # Group episodes by season seasons = {} - for relation in episode_relations: - episode = relation.episode + for episode in episodes: season_num = episode.season_number or 1 if season_num not in seasons: seasons[season_num] = [] - # Try to get the highest priority related M3UEpisodeRelation for this episode (for video/audio/bitrate) + # Get the highest priority relation for this episode (for container_extension, video/audio/bitrate) from apps.vod.models import M3UEpisodeRelation - first_relation = M3UEpisodeRelation.objects.filter( - episode=episode + best_relation = M3UEpisodeRelation.objects.filter( + episode=episode, + m3u_account__is_active=True ).select_related('m3u_account').order_by('-m3u_account__priority', 'id').first() + video = audio = bitrate = None - if first_relation and first_relation.custom_properties: - info = first_relation.custom_properties.get('info') - if info and isinstance(info, dict): - info_info = info.get('info') - if info_info and isinstance(info_info, dict): - video = info_info.get('video', {}) - audio = info_info.get('audio', {}) - bitrate = info_info.get('bitrate', 0) + container_extension = "mp4" + added_timestamp = str(int(episode.created_at.timestamp())) + + if best_relation: + container_extension = best_relation.container_extension or "mp4" + added_timestamp = str(int(best_relation.created_at.timestamp())) + if best_relation.custom_properties: + info = best_relation.custom_properties.get('info') + if info and isinstance(info, dict): + info_info = info.get('info') + if info_info and isinstance(info_info, dict): + video = info_info.get('video', {}) + audio = info_info.get('audio', {}) + bitrate = info_info.get('bitrate', 0) + if video is None: video = episode.custom_properties.get('video', {}) if episode.custom_properties else {} if audio is None: @@ -2572,8 +2572,8 @@ def xc_get_series_info(request, user, series_id): "season": season_num, "episode_num": episode.episode_number or 0, "title": episode.name, - "container_extension": relation.container_extension or "mp4", - "added": str(int(relation.created_at.timestamp())), + "container_extension": container_extension, + "added": added_timestamp, "custom_sid": None, "direct_source": "", "info": { @@ -2889,7 +2889,7 @@ def xc_series_stream(request, username, password, stream_id, extension): filters = {"episode_id": stream_id, "m3u_account__is_active": True} try: - episode_relation = M3UEpisodeRelation.objects.select_related('episode').get(**filters) + episode_relation = M3UEpisodeRelation.objects.select_related('episode').filter(**filters).order_by('-m3u_account__priority', 'id').first() except M3UEpisodeRelation.DoesNotExist: return JsonResponse({"error": "Episode not found"}, status=404) diff --git a/apps/proxy/vod_proxy/multi_worker_connection_manager.py b/apps/proxy/vod_proxy/multi_worker_connection_manager.py index fefc8739..251721c5 100644 --- a/apps/proxy/vod_proxy/multi_worker_connection_manager.py +++ b/apps/proxy/vod_proxy/multi_worker_connection_manager.py @@ -24,6 +24,11 @@ from apps.m3u.models import M3UAccountProfile logger = logging.getLogger("vod_proxy") +def get_vod_client_stop_key(client_id): + """Get the Redis key for signaling a VOD client to stop""" + return f"vod_proxy:client:{client_id}:stop" + + def infer_content_type_from_url(url: str) -> Optional[str]: """ Infer MIME type from file extension in URL @@ -832,6 +837,7 @@ class MultiWorkerVODConnectionManager: # Create streaming generator def stream_generator(): decremented = False + stop_signal_detected = False try: logger.info(f"[{client_id}] Worker {self.worker_id} - Starting Redis-backed stream") @@ -846,14 +852,25 @@ class MultiWorkerVODConnectionManager: bytes_sent = 0 chunk_count = 0 + # Get the stop signal key for this client + stop_key = get_vod_client_stop_key(client_id) + for chunk in upstream_response.iter_content(chunk_size=8192): if chunk: yield chunk bytes_sent += len(chunk) chunk_count += 1 - # Update activity every 100 chunks in consolidated connection state + # Check for stop signal every 100 chunks if chunk_count % 100 == 0: + # Check if stop signal has been set + if self.redis_client and self.redis_client.exists(stop_key): + logger.info(f"[{client_id}] Worker {self.worker_id} - Stop signal detected, terminating stream") + # Delete the stop key + self.redis_client.delete(stop_key) + stop_signal_detected = True + break + # Update the connection state logger.debug(f"Client: [{client_id}] Worker: {self.worker_id} sent {chunk_count} chunks for VOD: {content_name}") if redis_connection._acquire_lock(): @@ -867,7 +884,10 @@ class MultiWorkerVODConnectionManager: finally: redis_connection._release_lock() - logger.info(f"[{client_id}] Worker {self.worker_id} - Redis-backed stream completed: {bytes_sent} bytes sent") + if stop_signal_detected: + logger.info(f"[{client_id}] Worker {self.worker_id} - Stream stopped by signal: {bytes_sent} bytes sent") + else: + logger.info(f"[{client_id}] Worker {self.worker_id} - Redis-backed stream completed: {bytes_sent} bytes sent") redis_connection.decrement_active_streams() decremented = True diff --git a/apps/proxy/vod_proxy/urls.py b/apps/proxy/vod_proxy/urls.py index c06426ce..f48f70e0 100644 --- a/apps/proxy/vod_proxy/urls.py +++ b/apps/proxy/vod_proxy/urls.py @@ -21,4 +21,7 @@ urlpatterns = [ # VOD Stats path('stats/', views.VODStatsView.as_view(), name='vod_stats'), + + # Stop VOD client connection + path('stop_client/', views.stop_vod_client, name='stop_vod_client'), ] diff --git a/apps/proxy/vod_proxy/views.py b/apps/proxy/vod_proxy/views.py index 00ed8a10..f3aca3fc 100644 --- a/apps/proxy/vod_proxy/views.py +++ b/apps/proxy/vod_proxy/views.py @@ -15,7 +15,7 @@ from django.views import View from apps.vod.models import Movie, Series, Episode from apps.m3u.models import M3UAccount, M3UAccountProfile from apps.proxy.vod_proxy.connection_manager import VODConnectionManager -from apps.proxy.vod_proxy.multi_worker_connection_manager import MultiWorkerVODConnectionManager, infer_content_type_from_url +from apps.proxy.vod_proxy.multi_worker_connection_manager import MultiWorkerVODConnectionManager, infer_content_type_from_url, get_vod_client_stop_key from .utils import get_client_info, create_vod_response logger = logging.getLogger(__name__) @@ -1011,3 +1011,59 @@ class VODStatsView(View): except Exception as e: logger.error(f"Error getting VOD stats: {e}") return JsonResponse({'error': str(e)}, status=500) + + +from rest_framework.decorators import api_view, permission_classes +from apps.accounts.permissions import IsAdmin + + +@csrf_exempt +@api_view(["POST"]) +@permission_classes([IsAdmin]) +def stop_vod_client(request): + """Stop a specific VOD client connection using stop signal mechanism""" + try: + # Parse request body + import json + try: + data = json.loads(request.body) + except json.JSONDecodeError: + return JsonResponse({'error': 'Invalid JSON'}, status=400) + + client_id = data.get('client_id') + if not client_id: + return JsonResponse({'error': 'No client_id provided'}, status=400) + + logger.info(f"Request to stop VOD client: {client_id}") + + # Get Redis client + connection_manager = MultiWorkerVODConnectionManager.get_instance() + redis_client = connection_manager.redis_client + + if not redis_client: + return JsonResponse({'error': 'Redis not available'}, status=500) + + # Check if connection exists + connection_key = f"vod_persistent_connection:{client_id}" + connection_data = redis_client.hgetall(connection_key) + if not connection_data: + logger.warning(f"VOD connection not found: {client_id}") + return JsonResponse({'error': 'Connection not found'}, status=404) + + # Set a stop signal key that the worker will check + stop_key = get_vod_client_stop_key(client_id) + redis_client.setex(stop_key, 60, "true") # 60 second TTL + + logger.info(f"Set stop signal for VOD client: {client_id}") + + return JsonResponse({ + 'message': 'VOD client stop signal sent', + 'client_id': client_id, + 'stop_key': stop_key + }) + + except Exception as e: + logger.error(f"Error stopping VOD client: {e}", exc_info=True) + return JsonResponse({'error': str(e)}, status=500) + + diff --git a/apps/vod/tasks.py b/apps/vod/tasks.py index 1170543a..d42be946 100644 --- a/apps/vod/tasks.py +++ b/apps/vod/tasks.py @@ -1232,7 +1232,13 @@ def refresh_series_episodes(account, series, external_series_id, episodes_data=N def batch_process_episodes(account, series, episodes_data, scan_start_time=None): - """Process episodes in batches for better performance""" + """Process episodes in batches for better performance. + + Note: Multiple streams can represent the same episode (e.g., different languages + or qualities). Each stream has a unique stream_id, but they share the same + season/episode number. We create one Episode record per (series, season, episode) + and multiple M3UEpisodeRelation records pointing to it. + """ if not episodes_data: return @@ -1249,12 +1255,13 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None) logger.info(f"Batch processing {len(all_episodes_data)} episodes for series {series.name}") # Extract episode identifiers - episode_keys = [] + # Note: episode_keys may have duplicates when multiple streams represent same episode + episode_keys = set() # Use set to track unique episode keys episode_ids = [] for episode_data in all_episodes_data: season_num = episode_data['_season_number'] episode_num = episode_data.get('episode_num', 0) - episode_keys.append((series.id, season_num, episode_num)) + episode_keys.add((series.id, season_num, episode_num)) episode_ids.append(str(episode_data.get('id'))) # Pre-fetch existing episodes @@ -1277,6 +1284,10 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None) relations_to_create = [] relations_to_update = [] + # Track episodes we're creating in this batch to avoid duplicates + # Key: (series_id, season_number, episode_number) -> Episode object + episodes_pending_creation = {} + for episode_data in all_episodes_data: try: episode_id = str(episode_data.get('id')) @@ -1306,10 +1317,15 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None) if backdrop: custom_props['backdrop_path'] = [backdrop] - # Find existing episode + # Find existing episode - check DB first, then pending creations episode_key = (series.id, season_number, episode_number) episode = existing_episodes.get(episode_key) + # Check if we already have this episode pending creation (multiple streams for same episode) + if not episode and episode_key in episodes_pending_creation: + episode = episodes_pending_creation[episode_key] + logger.debug(f"Reusing pending episode for S{season_number:02d}E{episode_number:02d} (stream_id: {episode_id})") + if episode: # Update existing episode updated = False @@ -1338,7 +1354,9 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None) episode.custom_properties = custom_props if custom_props else None updated = True - if updated: + # Only add to update list if episode has a PK (exists in DB) and isn't already in list + # Episodes pending creation don't have PKs yet and will be created via bulk_create + if updated and episode.pk and episode not in episodes_to_update: episodes_to_update.append(episode) else: # Create new episode @@ -1356,6 +1374,8 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None) custom_properties=custom_props if custom_props else None ) episodes_to_create.append(episode) + # Track this episode so subsequent streams with same season/episode can reuse it + episodes_pending_creation[episode_key] = episode # Handle episode relation if episode_id in existing_relations: @@ -1389,9 +1409,28 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None) # Execute batch operations with transaction.atomic(): - # Create new episodes + # Create new episodes - use ignore_conflicts in case of race conditions if episodes_to_create: - Episode.objects.bulk_create(episodes_to_create) + Episode.objects.bulk_create(episodes_to_create, ignore_conflicts=True) + + # Re-fetch the created episodes to get their PKs + # We need to do this because bulk_create with ignore_conflicts doesn't set PKs + created_episode_keys = [ + (ep.series_id, ep.season_number, ep.episode_number) + for ep in episodes_to_create + ] + db_episodes = Episode.objects.filter(series=series) + episode_pk_map = { + (ep.series_id, ep.season_number, ep.episode_number): ep + for ep in db_episodes + } + + # Update relations to point to the actual DB episodes with PKs + for relation in relations_to_create: + ep = relation.episode + key = (ep.series_id, ep.season_number, ep.episode_number) + if key in episode_pk_map: + relation.episode = episode_pk_map[key] # Update existing episodes if episodes_to_update: @@ -1400,9 +1439,9 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None) 'tmdb_id', 'imdb_id', 'custom_properties' ]) - # Create new episode relations + # Create new episode relations - use ignore_conflicts for stream_id duplicates if relations_to_create: - M3UEpisodeRelation.objects.bulk_create(relations_to_create) + M3UEpisodeRelation.objects.bulk_create(relations_to_create, ignore_conflicts=True) # Update existing episode relations if relations_to_update: diff --git a/dispatcharr/settings.py b/dispatcharr/settings.py index 5f8c23e2..556fb39d 100644 --- a/dispatcharr/settings.py +++ b/dispatcharr/settings.py @@ -226,6 +226,13 @@ CELERY_BEAT_SCHEDULE = { MEDIA_ROOT = BASE_DIR / "media" MEDIA_URL = "/media/" +# Backup settings +BACKUP_ROOT = os.environ.get("BACKUP_ROOT", "/data/backups") +BACKUP_DATA_DIRS = [ + os.environ.get("LOGOS_DIR", "/data/logos"), + os.environ.get("UPLOADS_DIR", "/data/uploads"), + os.environ.get("PLUGINS_DIR", "/data/plugins"), +] SERVER_IP = "127.0.0.1" diff --git a/docker/init/03-init-dispatcharr.sh b/docker/init/03-init-dispatcharr.sh index 5fbef23d..da7d4484 100644 --- a/docker/init/03-init-dispatcharr.sh +++ b/docker/init/03-init-dispatcharr.sh @@ -29,9 +29,17 @@ if [ "$(id -u)" = "0" ] && [ -d "/app" ]; then chown $PUID:$PGID /app fi fi - +# Configure nginx port sed -i "s/NGINX_PORT/${DISPATCHARR_PORT}/g" /etc/nginx/sites-enabled/default +# Configure nginx based on IPv6 availability +if ip -6 addr show | grep -q "inet6"; then + echo "✅ IPv6 is available, enabling IPv6 in nginx" +else + echo "⚠️ IPv6 not available, disabling IPv6 in nginx" + sed -i '/listen \[::\]:/d' /etc/nginx/sites-enabled/default +fi + # NOTE: mac doesn't run as root, so only manage permissions # if this script is running as root if [ "$(id -u)" = "0" ]; then diff --git a/docker/nginx.conf b/docker/nginx.conf index 020bc99a..406d587c 100644 --- a/docker/nginx.conf +++ b/docker/nginx.conf @@ -35,6 +35,13 @@ server { root /data; } + # Internal location for X-Accel-Redirect backup downloads + # Django handles auth, nginx serves the file directly + location /protected-backups/ { + internal; + alias /data/backups/; + } + location /api/logos/(?\d+)/cache/ { proxy_pass http://127.0.0.1:5656; proxy_cache logo_cache; diff --git a/docker/uwsgi.ini b/docker/uwsgi.ini index f8fe8ab7..d831adfc 100644 --- a/docker/uwsgi.ini +++ b/docker/uwsgi.ini @@ -21,6 +21,7 @@ module = dispatcharr.wsgi:application virtualenv = /dispatcharrpy master = true env = DJANGO_SETTINGS_MODULE=dispatcharr.settings +env = USE_NGINX_ACCEL=true socket = /app/uwsgi.sock chmod-socket = 777 vacuum = true diff --git a/frontend/src/api.js b/frontend/src/api.js index 7eda6a3f..64ce4d77 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -1349,6 +1349,183 @@ export default class API { } } + // Backup API (async with Celery task polling) + static async listBackups() { + try { + const response = await request(`${host}/api/backups/`); + return response || []; + } catch (e) { + errorNotification('Failed to load backups', e); + throw e; + } + } + + static async getBackupStatus(taskId, token = null) { + try { + let url = `${host}/api/backups/status/${taskId}/`; + if (token) { + url += `?token=${encodeURIComponent(token)}`; + } + const response = await request(url, { auth: !token }); + return response; + } catch (e) { + throw e; + } + } + + static async waitForBackupTask(taskId, onProgress, token = null) { + const pollInterval = 2000; // Poll every 2 seconds + const maxAttempts = 300; // Max 10 minutes (300 * 2s) + + for (let attempt = 0; attempt < maxAttempts; attempt++) { + try { + const status = await API.getBackupStatus(taskId, token); + + if (onProgress) { + onProgress(status); + } + + if (status.state === 'completed') { + return status.result; + } else if (status.state === 'failed') { + throw new Error(status.error || 'Task failed'); + } + } catch (e) { + throw e; + } + + // Wait before next poll + await new Promise((resolve) => setTimeout(resolve, pollInterval)); + } + + throw new Error('Task timed out'); + } + + static async createBackup(onProgress) { + try { + // Start the backup task + const response = await request(`${host}/api/backups/create/`, { + method: 'POST', + }); + + // Wait for the task to complete using token for auth + const result = await API.waitForBackupTask(response.task_id, onProgress, response.task_token); + return result; + } catch (e) { + errorNotification('Failed to create backup', e); + throw e; + } + } + + static async uploadBackup(file) { + try { + const formData = new FormData(); + formData.append('file', file); + + const response = await request( + `${host}/api/backups/upload/`, + { + method: 'POST', + body: formData, + } + ); + return response; + } catch (e) { + errorNotification('Failed to upload backup', e); + throw e; + } + } + + static async deleteBackup(filename) { + try { + const encodedFilename = encodeURIComponent(filename); + await request(`${host}/api/backups/${encodedFilename}/delete/`, { + method: 'DELETE', + }); + } catch (e) { + errorNotification('Failed to delete backup', e); + throw e; + } + } + + static async getDownloadToken(filename) { + // Get a download token from the server + try { + const response = await request(`${host}/api/backups/${encodeURIComponent(filename)}/download-token/`); + return response.token; + } catch (e) { + throw e; + } + } + + static async downloadBackup(filename) { + try { + // Get a download token first (requires auth) + const token = await API.getDownloadToken(filename); + const encodedFilename = encodeURIComponent(filename); + + // Build the download URL with token + const downloadUrl = `${host}/api/backups/${encodedFilename}/download/?token=${encodeURIComponent(token)}`; + + // Use direct browser navigation instead of fetch to avoid CORS issues + const link = document.createElement('a'); + link.href = downloadUrl; + link.download = filename; + document.body.appendChild(link); + link.click(); + document.body.removeChild(link); + + return { filename }; + } catch (e) { + errorNotification('Failed to download backup', e); + throw e; + } + } + + static async restoreBackup(filename, onProgress) { + try { + // Start the restore task + const encodedFilename = encodeURIComponent(filename); + const response = await request( + `${host}/api/backups/${encodedFilename}/restore/`, + { + method: 'POST', + } + ); + + // Wait for the task to complete using token for auth + // Token-based auth allows status polling even after DB restore invalidates user sessions + const result = await API.waitForBackupTask(response.task_id, onProgress, response.task_token); + return result; + } catch (e) { + errorNotification('Failed to restore backup', e); + throw e; + } + } + + static async getBackupSchedule() { + try { + const response = await request(`${host}/api/backups/schedule/`); + return response; + } catch (e) { + errorNotification('Failed to get backup schedule', e); + throw e; + } + } + + static async updateBackupSchedule(settings) { + try { + const response = await request(`${host}/api/backups/schedule/update/`, { + method: 'PUT', + body: settings, + }); + return response; + } catch (e) { + errorNotification('Failed to update backup schedule', e); + throw e; + } + } + static async getVersion() { try { const response = await request(`${host}/api/core/version/`, { @@ -1514,6 +1691,19 @@ export default class API { } } + static async stopVODClient(clientId) { + try { + const response = await request(`${host}/proxy/vod/stop_client/`, { + method: 'POST', + body: { client_id: clientId }, + }); + + return response; + } catch (e) { + errorNotification('Failed to stop VOD client', e); + } + } + static async stopChannel(id) { try { const response = await request(`${host}/proxy/ts/stop/${id}`, { @@ -2131,7 +2321,8 @@ export default class API { static async deleteSeriesRule(tvgId) { try { - await request(`${host}/api/channels/series-rules/${tvgId}/`, { method: 'DELETE' }); + const encodedTvgId = encodeURIComponent(tvgId); + await request(`${host}/api/channels/series-rules/${encodedTvgId}/`, { method: 'DELETE' }); notifications.show({ title: 'Series rule removed' }); } catch (e) { errorNotification('Failed to remove series rule', e); diff --git a/frontend/src/components/FloatingVideo.jsx b/frontend/src/components/FloatingVideo.jsx index 6aaeecda..611d7e2a 100644 --- a/frontend/src/components/FloatingVideo.jsx +++ b/frontend/src/components/FloatingVideo.jsx @@ -1,5 +1,5 @@ // frontend/src/components/FloatingVideo.js -import React, { useEffect, useRef, useState } from 'react'; +import React, { useCallback, useEffect, useRef, useState } from 'react'; import Draggable from 'react-draggable'; import useVideoStore from '../store/useVideoStore'; import mpegts from 'mpegts.js'; @@ -17,7 +17,19 @@ export default function FloatingVideo() { const [isLoading, setIsLoading] = useState(false); const [loadError, setLoadError] = useState(null); const [showOverlay, setShowOverlay] = useState(true); + const [videoSize, setVideoSize] = useState({ width: 320, height: 180 }); + const [isResizing, setIsResizing] = useState(false); + const resizeStateRef = useRef(null); const overlayTimeoutRef = useRef(null); + const aspectRatioRef = useRef(320 / 180); + const [dragPosition, setDragPosition] = useState(null); + const dragPositionRef = useRef(null); + const dragOffsetRef = useRef({ x: 0, y: 0 }); + const initialPositionRef = useRef(null); + + const MIN_WIDTH = 220; + const MIN_HEIGHT = 124; + const VISIBLE_MARGIN = 48; // keep part of the window visible when dragging // Safely destroy the mpegts player to prevent errors const safeDestroyPlayer = () => { @@ -315,24 +327,223 @@ export default function FloatingVideo() { }, 50); }; + const clampToVisible = useCallback( + (x, y) => { + if (typeof window === 'undefined') return { x, y }; + + const minX = -(videoSize.width - VISIBLE_MARGIN); + const minY = -(videoSize.height - VISIBLE_MARGIN); + const maxX = window.innerWidth - VISIBLE_MARGIN; + const maxY = window.innerHeight - VISIBLE_MARGIN; + + return { + x: Math.min(Math.max(x, minX), maxX), + y: Math.min(Math.max(y, minY), maxY), + }; + }, + [VISIBLE_MARGIN, videoSize.height, videoSize.width] + ); + + const handleResizeMove = useCallback( + (event) => { + if (!resizeStateRef.current) return; + + const clientX = + event.touches && event.touches.length ? event.touches[0].clientX : event.clientX; + const clientY = + event.touches && event.touches.length ? event.touches[0].clientY : event.clientY; + + const deltaX = clientX - resizeStateRef.current.startX; + const deltaY = clientY - resizeStateRef.current.startY; + const aspectRatio = resizeStateRef.current.aspectRatio || aspectRatioRef.current; + + // Derive width/height while keeping the original aspect ratio + let nextWidth = resizeStateRef.current.startWidth + deltaX; + let nextHeight = nextWidth / aspectRatio; + + // Allow vertical-driven resize if the user drags mostly vertically + if (Math.abs(deltaY) > Math.abs(deltaX)) { + nextHeight = resizeStateRef.current.startHeight + deltaY; + nextWidth = nextHeight * aspectRatio; + } + + // Respect minimums while keeping the ratio + if (nextWidth < MIN_WIDTH) { + nextWidth = MIN_WIDTH; + nextHeight = nextWidth / aspectRatio; + } + + if (nextHeight < MIN_HEIGHT) { + nextHeight = MIN_HEIGHT; + nextWidth = nextHeight * aspectRatio; + } + + // Keep within viewport with a margin based on current position + const posX = dragPositionRef.current?.x ?? 0; + const posY = dragPositionRef.current?.y ?? 0; + const margin = VISIBLE_MARGIN; + + const maxWidth = Math.max(MIN_WIDTH, window.innerWidth - posX - margin); + const maxHeight = Math.max(MIN_HEIGHT, window.innerHeight - posY - margin); + + if (nextWidth > maxWidth) { + nextWidth = maxWidth; + nextHeight = nextWidth / aspectRatio; + } + + if (nextHeight > maxHeight) { + nextHeight = maxHeight; + nextWidth = nextHeight * aspectRatio; + } + + // Final pass to honor both bounds while keeping the ratio + if (nextWidth > maxWidth) { + nextWidth = maxWidth; + nextHeight = nextWidth / aspectRatio; + } + + setVideoSize({ + width: Math.round(nextWidth), + height: Math.round(nextHeight), + }); + }, + [MIN_HEIGHT, MIN_WIDTH, VISIBLE_MARGIN] + ); + + const endResize = useCallback(() => { + setIsResizing(false); + resizeStateRef.current = null; + window.removeEventListener('mousemove', handleResizeMove); + window.removeEventListener('mouseup', endResize); + window.removeEventListener('touchmove', handleResizeMove); + window.removeEventListener('touchend', endResize); + }, [handleResizeMove]); + + const startResize = (event) => { + event.stopPropagation(); + event.preventDefault(); + + const clientX = + event.touches && event.touches.length ? event.touches[0].clientX : event.clientX; + const clientY = + event.touches && event.touches.length ? event.touches[0].clientY : event.clientY; + + const aspectRatio = + videoSize.height > 0 ? videoSize.width / videoSize.height : aspectRatioRef.current; + aspectRatioRef.current = aspectRatio; + + resizeStateRef.current = { + startX: clientX, + startY: clientY, + startWidth: videoSize.width, + startHeight: videoSize.height, + aspectRatio, + }; + + setIsResizing(true); + + window.addEventListener('mousemove', handleResizeMove); + window.addEventListener('mouseup', endResize); + window.addEventListener('touchmove', handleResizeMove); + window.addEventListener('touchend', endResize); + }; + + useEffect(() => { + return () => { + endResize(); + }; + }, [endResize]); + + useEffect(() => { + dragPositionRef.current = dragPosition; + }, [dragPosition]); + + // Initialize the floating window near bottom-right once + useEffect(() => { + if (initialPositionRef.current || typeof window === 'undefined') return; + + const initialX = Math.max(10, window.innerWidth - videoSize.width - 20); + const initialY = Math.max(10, window.innerHeight - videoSize.height - 20); + const pos = clampToVisible(initialX, initialY); + + initialPositionRef.current = pos; + setDragPosition(pos); + dragPositionRef.current = pos; + }, [clampToVisible, videoSize.height, videoSize.width]); + + const handleDragStart = useCallback( + (event, data) => { + const clientX = event.touches?.[0]?.clientX ?? event.clientX; + const clientY = event.touches?.[0]?.clientY ?? event.clientY; + const rect = videoContainerRef.current?.getBoundingClientRect(); + + if (clientX != null && clientY != null && rect) { + dragOffsetRef.current = { + x: clientX - rect.left, + y: clientY - rect.top, + }; + } else { + dragOffsetRef.current = { x: 0, y: 0 }; + } + + const clamped = clampToVisible(data?.x ?? 0, data?.y ?? 0); + setDragPosition(clamped); + dragPositionRef.current = clamped; + }, + [clampToVisible] + ); + + const handleDrag = useCallback( + (event) => { + const clientX = event.touches?.[0]?.clientX ?? event.clientX; + const clientY = event.touches?.[0]?.clientY ?? event.clientY; + if (clientX == null || clientY == null) return; + + const nextX = clientX - (dragOffsetRef.current?.x ?? 0); + const nextY = clientY - (dragOffsetRef.current?.y ?? 0); + const clamped = clampToVisible(nextX, nextY); + setDragPosition(clamped); + dragPositionRef.current = clamped; + }, + [clampToVisible] + ); + + const handleDragStop = useCallback( + (_, data) => { + const clamped = clampToVisible(data?.x ?? 0, data?.y ?? 0); + setDragPosition(clamped); + dragPositionRef.current = clamped; + }, + [clampToVisible] + ); + // If the floating video is hidden or no URL is selected, do not render if (!isVisible || !streamUrl) { return null; } return ( - +
@@ -378,10 +589,12 @@ export default function FloatingVideo() {