Merge branch 'dev' into Media-Server

This commit is contained in:
Dispatcharr 2025-12-16 20:35:55 -06:00
commit 2456999052
24 changed files with 3496 additions and 56 deletions

View file

@ -31,3 +31,4 @@
LICENSE
README.md
data/
docker/data/

View file

@ -7,10 +7,22 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]
### Added
- Automated configuration backup/restore system with scheduled backups, retention policies, and async task processing - Thanks [@stlalpha](https://github.com/stlalpha) (Closes #153)
### Changed
- Removed unreachable code path in m3u output - Thanks [@DawtCom](https://github.com/DawtCom)
### Fixed
- nginx now gracefully handles hosts without IPv6 support by automatically disabling IPv6 binding at startup
- VOD episode processing now correctly handles duplicate episodes (same episode in multiple languages/qualities) by reusing Episode records across multiple M3UEpisodeRelation entries instead of attempting to create duplicates (Fixes #556)
- XtreamCodes series streaming endpoint now correctly handles episodes with multiple streams (different languages/qualities) by selecting the best available stream based on account priority (Fixes #569)
- XtreamCodes series info API now returns unique episodes instead of duplicate entries when multiple streams exist for the same episode (different languages/qualities)
- nginx now gracefully handles hosts without IPv6 support by automatically disabling IPv6 binding at startup (Fixes #744)
- XtreamCodes EPG API now returns correct date/time format for start/end fields and proper string types for timestamps and channel_id
- XtreamCodes EPG API now handles None values for title and description fields to prevent AttributeError
## [0.14.0] - 2025-12-09

View file

@ -28,6 +28,7 @@ urlpatterns = [
path('plugins/', include(('apps.plugins.api_urls', 'plugins'), namespace='plugins')),
path('vod/', include(('apps.vod.api_urls', 'vod'), namespace='vod')),
path('media/', include(('apps.media_library.api_urls', 'media'), namespace='media')),
path('backups/', include(('apps.backups.api_urls', 'backups'), namespace='backups')),
# path('output/', include(('apps.output.api_urls', 'output'), namespace='output')),
#path('player/', include(('apps.player.api_urls', 'player'), namespace='player')),
#path('settings/', include(('apps.settings.api_urls', 'settings'), namespace='settings')),

0
apps/backups/__init__.py Normal file
View file

18
apps/backups/api_urls.py Normal file
View file

@ -0,0 +1,18 @@
from django.urls import path
from . import api_views
app_name = "backups"
urlpatterns = [
path("", api_views.list_backups, name="backup-list"),
path("create/", api_views.create_backup, name="backup-create"),
path("upload/", api_views.upload_backup, name="backup-upload"),
path("schedule/", api_views.get_schedule, name="backup-schedule-get"),
path("schedule/update/", api_views.update_schedule, name="backup-schedule-update"),
path("status/<str:task_id>/", api_views.backup_status, name="backup-status"),
path("<str:filename>/download-token/", api_views.get_download_token, name="backup-download-token"),
path("<str:filename>/download/", api_views.download_backup, name="backup-download"),
path("<str:filename>/delete/", api_views.delete_backup, name="backup-delete"),
path("<str:filename>/restore/", api_views.restore_backup, name="backup-restore"),
]

364
apps/backups/api_views.py Normal file
View file

@ -0,0 +1,364 @@
import hashlib
import hmac
import logging
import os
from pathlib import Path
from celery.result import AsyncResult
from django.conf import settings
from django.http import HttpResponse, StreamingHttpResponse, Http404
from rest_framework import status
from rest_framework.decorators import api_view, permission_classes, parser_classes
from rest_framework.permissions import IsAdminUser, AllowAny
from rest_framework.parsers import MultiPartParser, FormParser
from rest_framework.response import Response
from . import services
from .tasks import create_backup_task, restore_backup_task
from .scheduler import get_schedule_settings, update_schedule_settings
logger = logging.getLogger(__name__)
def _generate_task_token(task_id: str) -> str:
"""Generate a signed token for task status access without auth."""
secret = settings.SECRET_KEY.encode()
return hmac.new(secret, task_id.encode(), hashlib.sha256).hexdigest()[:32]
def _verify_task_token(task_id: str, token: str) -> bool:
"""Verify a task token is valid."""
expected = _generate_task_token(task_id)
return hmac.compare_digest(expected, token)
@api_view(["GET"])
@permission_classes([IsAdminUser])
def list_backups(request):
"""List all available backup files."""
try:
backups = services.list_backups()
return Response(backups, status=status.HTTP_200_OK)
except Exception as e:
return Response(
{"detail": f"Failed to list backups: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@api_view(["POST"])
@permission_classes([IsAdminUser])
def create_backup(request):
"""Create a new backup (async via Celery)."""
try:
task = create_backup_task.delay()
return Response(
{
"detail": "Backup started",
"task_id": task.id,
"task_token": _generate_task_token(task.id),
},
status=status.HTTP_202_ACCEPTED,
)
except Exception as e:
return Response(
{"detail": f"Failed to start backup: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@api_view(["GET"])
@permission_classes([AllowAny])
def backup_status(request, task_id):
"""Check the status of a backup/restore task.
Requires either:
- Valid admin authentication, OR
- Valid task_token query parameter
"""
# Check for token-based auth (for restore when session is invalidated)
token = request.query_params.get("token")
if token:
if not _verify_task_token(task_id, token):
return Response(
{"detail": "Invalid task token"},
status=status.HTTP_403_FORBIDDEN,
)
else:
# Fall back to admin auth check
if not request.user.is_authenticated or not request.user.is_staff:
return Response(
{"detail": "Authentication required"},
status=status.HTTP_401_UNAUTHORIZED,
)
try:
result = AsyncResult(task_id)
if result.ready():
task_result = result.get()
if task_result.get("status") == "completed":
return Response({
"state": "completed",
"result": task_result,
})
else:
return Response({
"state": "failed",
"error": task_result.get("error", "Unknown error"),
})
elif result.failed():
return Response({
"state": "failed",
"error": str(result.result),
})
else:
return Response({
"state": result.state.lower(),
})
except Exception as e:
return Response(
{"detail": f"Failed to get task status: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@api_view(["GET"])
@permission_classes([IsAdminUser])
def get_download_token(request, filename):
"""Get a signed token for downloading a backup file."""
try:
# Security: prevent path traversal
if ".." in filename or "/" in filename or "\\" in filename:
raise Http404("Invalid filename")
backup_dir = services.get_backup_dir()
backup_file = backup_dir / filename
if not backup_file.exists():
raise Http404("Backup file not found")
token = _generate_task_token(filename)
return Response({"token": token})
except Http404:
raise
except Exception as e:
return Response(
{"detail": f"Failed to generate token: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@api_view(["GET"])
@permission_classes([AllowAny])
def download_backup(request, filename):
"""Download a backup file.
Requires either:
- Valid admin authentication, OR
- Valid download_token query parameter
"""
# Check for token-based auth (avoids CORS preflight issues)
token = request.query_params.get("token")
if token:
if not _verify_task_token(filename, token):
return Response(
{"detail": "Invalid download token"},
status=status.HTTP_403_FORBIDDEN,
)
else:
# Fall back to admin auth check
if not request.user.is_authenticated or not request.user.is_staff:
return Response(
{"detail": "Authentication required"},
status=status.HTTP_401_UNAUTHORIZED,
)
try:
# Security: prevent path traversal by checking for suspicious characters
if ".." in filename or "/" in filename or "\\" in filename:
raise Http404("Invalid filename")
backup_dir = services.get_backup_dir()
backup_file = (backup_dir / filename).resolve()
# Security: ensure the resolved path is still within backup_dir
if not str(backup_file).startswith(str(backup_dir.resolve())):
raise Http404("Invalid filename")
if not backup_file.exists() or not backup_file.is_file():
raise Http404("Backup file not found")
file_size = backup_file.stat().st_size
# Use X-Accel-Redirect for nginx (AIO container) - nginx serves file directly
# Fall back to streaming for non-nginx deployments
use_nginx_accel = os.environ.get("USE_NGINX_ACCEL", "").lower() == "true"
logger.info(f"[DOWNLOAD] File: {filename}, Size: {file_size}, USE_NGINX_ACCEL: {use_nginx_accel}")
if use_nginx_accel:
# X-Accel-Redirect: Django returns immediately, nginx serves file
logger.info(f"[DOWNLOAD] Using X-Accel-Redirect: /protected-backups/{filename}")
response = HttpResponse()
response["X-Accel-Redirect"] = f"/protected-backups/{filename}"
response["Content-Type"] = "application/zip"
response["Content-Length"] = file_size
response["Content-Disposition"] = f'attachment; filename="{filename}"'
return response
else:
# Streaming fallback for non-nginx deployments
logger.info(f"[DOWNLOAD] Using streaming fallback (no nginx)")
def file_iterator(file_path, chunk_size=2 * 1024 * 1024):
with open(file_path, "rb") as f:
while chunk := f.read(chunk_size):
yield chunk
response = StreamingHttpResponse(
file_iterator(backup_file),
content_type="application/zip",
)
response["Content-Length"] = file_size
response["Content-Disposition"] = f'attachment; filename="{filename}"'
return response
except Http404:
raise
except Exception as e:
return Response(
{"detail": f"Download failed: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@api_view(["DELETE"])
@permission_classes([IsAdminUser])
def delete_backup(request, filename):
"""Delete a backup file."""
try:
# Security: prevent path traversal
if ".." in filename or "/" in filename or "\\" in filename:
raise Http404("Invalid filename")
services.delete_backup(filename)
return Response(
{"detail": "Backup deleted successfully"},
status=status.HTTP_204_NO_CONTENT,
)
except FileNotFoundError:
raise Http404("Backup file not found")
except Exception as e:
return Response(
{"detail": f"Delete failed: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@api_view(["POST"])
@permission_classes([IsAdminUser])
@parser_classes([MultiPartParser, FormParser])
def upload_backup(request):
"""Upload a backup file for restoration."""
uploaded = request.FILES.get("file")
if not uploaded:
return Response(
{"detail": "No file uploaded"},
status=status.HTTP_400_BAD_REQUEST,
)
try:
backup_dir = services.get_backup_dir()
filename = uploaded.name or "uploaded-backup.zip"
# Ensure unique filename
backup_file = backup_dir / filename
counter = 1
while backup_file.exists():
name_parts = filename.rsplit(".", 1)
if len(name_parts) == 2:
backup_file = backup_dir / f"{name_parts[0]}-{counter}.{name_parts[1]}"
else:
backup_file = backup_dir / f"{filename}-{counter}"
counter += 1
# Save uploaded file
with backup_file.open("wb") as f:
for chunk in uploaded.chunks():
f.write(chunk)
return Response(
{
"detail": "Backup uploaded successfully",
"filename": backup_file.name,
},
status=status.HTTP_201_CREATED,
)
except Exception as e:
return Response(
{"detail": f"Upload failed: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@api_view(["POST"])
@permission_classes([IsAdminUser])
def restore_backup(request, filename):
"""Restore from a backup file (async via Celery). WARNING: This will flush the database!"""
try:
# Security: prevent path traversal
if ".." in filename or "/" in filename or "\\" in filename:
raise Http404("Invalid filename")
backup_dir = services.get_backup_dir()
backup_file = backup_dir / filename
if not backup_file.exists():
raise Http404("Backup file not found")
task = restore_backup_task.delay(filename)
return Response(
{
"detail": "Restore started",
"task_id": task.id,
"task_token": _generate_task_token(task.id),
},
status=status.HTTP_202_ACCEPTED,
)
except Http404:
raise
except Exception as e:
return Response(
{"detail": f"Failed to start restore: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@api_view(["GET"])
@permission_classes([IsAdminUser])
def get_schedule(request):
"""Get backup schedule settings."""
try:
settings = get_schedule_settings()
return Response(settings)
except Exception as e:
return Response(
{"detail": f"Failed to get schedule: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@api_view(["PUT"])
@permission_classes([IsAdminUser])
def update_schedule(request):
"""Update backup schedule settings."""
try:
settings = update_schedule_settings(request.data)
return Response(settings)
except ValueError as e:
return Response(
{"detail": str(e)},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
return Response(
{"detail": f"Failed to update schedule: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)

7
apps/backups/apps.py Normal file
View file

@ -0,0 +1,7 @@
from django.apps import AppConfig
class BackupsConfig(AppConfig):
default_auto_field = "django.db.models.BigAutoField"
name = "apps.backups"
verbose_name = "Backups"

View file

0
apps/backups/models.py Normal file
View file

203
apps/backups/scheduler.py Normal file
View file

@ -0,0 +1,203 @@
import json
import logging
from django_celery_beat.models import PeriodicTask, CrontabSchedule
from core.models import CoreSettings
logger = logging.getLogger(__name__)
BACKUP_SCHEDULE_TASK_NAME = "backup-scheduled-task"
SETTING_KEYS = {
"enabled": "backup_schedule_enabled",
"frequency": "backup_schedule_frequency",
"time": "backup_schedule_time",
"day_of_week": "backup_schedule_day_of_week",
"retention_count": "backup_retention_count",
"cron_expression": "backup_schedule_cron_expression",
}
DEFAULTS = {
"enabled": False,
"frequency": "daily",
"time": "03:00",
"day_of_week": 0, # Sunday
"retention_count": 0,
"cron_expression": "",
}
def _get_setting(key: str, default=None):
"""Get a backup setting from CoreSettings."""
try:
setting = CoreSettings.objects.get(key=SETTING_KEYS[key])
value = setting.value
if key == "enabled":
return value.lower() == "true"
elif key in ("day_of_week", "retention_count"):
return int(value)
return value
except CoreSettings.DoesNotExist:
return default if default is not None else DEFAULTS.get(key)
def _set_setting(key: str, value) -> None:
"""Set a backup setting in CoreSettings."""
str_value = str(value).lower() if isinstance(value, bool) else str(value)
CoreSettings.objects.update_or_create(
key=SETTING_KEYS[key],
defaults={
"name": f"Backup {key.replace('_', ' ').title()}",
"value": str_value,
},
)
def get_schedule_settings() -> dict:
"""Get all backup schedule settings."""
return {
"enabled": _get_setting("enabled"),
"frequency": _get_setting("frequency"),
"time": _get_setting("time"),
"day_of_week": _get_setting("day_of_week"),
"retention_count": _get_setting("retention_count"),
"cron_expression": _get_setting("cron_expression"),
}
def update_schedule_settings(data: dict) -> dict:
"""Update backup schedule settings and sync the PeriodicTask."""
# Validate
if "frequency" in data and data["frequency"] not in ("daily", "weekly"):
raise ValueError("frequency must be 'daily' or 'weekly'")
if "time" in data:
try:
hour, minute = data["time"].split(":")
int(hour)
int(minute)
except (ValueError, AttributeError):
raise ValueError("time must be in HH:MM format")
if "day_of_week" in data:
day = int(data["day_of_week"])
if day < 0 or day > 6:
raise ValueError("day_of_week must be 0-6 (Sunday-Saturday)")
if "retention_count" in data:
count = int(data["retention_count"])
if count < 0:
raise ValueError("retention_count must be >= 0")
# Update settings
for key in ("enabled", "frequency", "time", "day_of_week", "retention_count", "cron_expression"):
if key in data:
_set_setting(key, data[key])
# Sync the periodic task
_sync_periodic_task()
return get_schedule_settings()
def _sync_periodic_task() -> None:
"""Create, update, or delete the scheduled backup task based on settings."""
settings = get_schedule_settings()
if not settings["enabled"]:
# Delete the task if it exists
task = PeriodicTask.objects.filter(name=BACKUP_SCHEDULE_TASK_NAME).first()
if task:
old_crontab = task.crontab
task.delete()
_cleanup_orphaned_crontab(old_crontab)
logger.info("Backup schedule disabled, removed periodic task")
return
# Get old crontab before creating new one
old_crontab = None
try:
old_task = PeriodicTask.objects.get(name=BACKUP_SCHEDULE_TASK_NAME)
old_crontab = old_task.crontab
except PeriodicTask.DoesNotExist:
pass
# Check if using cron expression (advanced mode)
if settings["cron_expression"]:
# Parse cron expression: "minute hour day month weekday"
try:
parts = settings["cron_expression"].split()
if len(parts) != 5:
raise ValueError("Cron expression must have 5 parts: minute hour day month weekday")
minute, hour, day_of_month, month_of_year, day_of_week = parts
crontab, _ = CrontabSchedule.objects.get_or_create(
minute=minute,
hour=hour,
day_of_week=day_of_week,
day_of_month=day_of_month,
month_of_year=month_of_year,
timezone=CoreSettings.get_system_time_zone(),
)
except Exception as e:
logger.error(f"Invalid cron expression '{settings['cron_expression']}': {e}")
raise ValueError(f"Invalid cron expression: {e}")
else:
# Use simple frequency-based scheduling
# Parse time
hour, minute = settings["time"].split(":")
# Build crontab based on frequency
system_tz = CoreSettings.get_system_time_zone()
if settings["frequency"] == "daily":
crontab, _ = CrontabSchedule.objects.get_or_create(
minute=minute,
hour=hour,
day_of_week="*",
day_of_month="*",
month_of_year="*",
timezone=system_tz,
)
else: # weekly
crontab, _ = CrontabSchedule.objects.get_or_create(
minute=minute,
hour=hour,
day_of_week=str(settings["day_of_week"]),
day_of_month="*",
month_of_year="*",
timezone=system_tz,
)
# Create or update the periodic task
task, created = PeriodicTask.objects.update_or_create(
name=BACKUP_SCHEDULE_TASK_NAME,
defaults={
"task": "apps.backups.tasks.scheduled_backup_task",
"crontab": crontab,
"enabled": True,
"kwargs": json.dumps({"retention_count": settings["retention_count"]}),
},
)
# Clean up old crontab if it changed and is orphaned
if old_crontab and old_crontab.id != crontab.id:
_cleanup_orphaned_crontab(old_crontab)
action = "Created" if created else "Updated"
logger.info(f"{action} backup schedule: {settings['frequency']} at {settings['time']}")
def _cleanup_orphaned_crontab(crontab_schedule):
"""Delete old CrontabSchedule if no other tasks are using it."""
if crontab_schedule is None:
return
# Check if any other tasks are using this crontab
if PeriodicTask.objects.filter(crontab=crontab_schedule).exists():
logger.debug(f"CrontabSchedule {crontab_schedule.id} still in use, not deleting")
return
logger.debug(f"Cleaning up orphaned CrontabSchedule: {crontab_schedule.id}")
crontab_schedule.delete()

320
apps/backups/services.py Normal file
View file

@ -0,0 +1,320 @@
import datetime
import json
import os
import shutil
import subprocess
import tempfile
from pathlib import Path
from zipfile import ZipFile, ZIP_DEFLATED
import logging
import pytz
from django.conf import settings
from core.models import CoreSettings
logger = logging.getLogger(__name__)
def get_backup_dir() -> Path:
"""Get the backup directory, creating it if necessary."""
backup_dir = Path(settings.BACKUP_ROOT)
backup_dir.mkdir(parents=True, exist_ok=True)
return backup_dir
def _is_postgresql() -> bool:
"""Check if we're using PostgreSQL."""
return settings.DATABASES["default"]["ENGINE"] == "django.db.backends.postgresql"
def _get_pg_env() -> dict:
"""Get environment variables for PostgreSQL commands."""
db_config = settings.DATABASES["default"]
env = os.environ.copy()
env["PGPASSWORD"] = db_config.get("PASSWORD", "")
return env
def _get_pg_args() -> list[str]:
"""Get common PostgreSQL command arguments."""
db_config = settings.DATABASES["default"]
return [
"-h", db_config.get("HOST", "localhost"),
"-p", str(db_config.get("PORT", 5432)),
"-U", db_config.get("USER", "postgres"),
"-d", db_config.get("NAME", "dispatcharr"),
]
def _dump_postgresql(output_file: Path) -> None:
"""Dump PostgreSQL database using pg_dump."""
logger.info("Dumping PostgreSQL database with pg_dump...")
cmd = [
"pg_dump",
*_get_pg_args(),
"-Fc", # Custom format for pg_restore
"-v", # Verbose
"-f", str(output_file),
]
result = subprocess.run(
cmd,
env=_get_pg_env(),
capture_output=True,
text=True,
)
if result.returncode != 0:
logger.error(f"pg_dump failed: {result.stderr}")
raise RuntimeError(f"pg_dump failed: {result.stderr}")
logger.debug(f"pg_dump output: {result.stderr}")
def _restore_postgresql(dump_file: Path) -> None:
"""Restore PostgreSQL database using pg_restore."""
logger.info("[PG_RESTORE] Starting pg_restore...")
logger.info(f"[PG_RESTORE] Dump file: {dump_file}")
pg_args = _get_pg_args()
logger.info(f"[PG_RESTORE] Connection args: {pg_args}")
cmd = [
"pg_restore",
"--clean", # Clean (drop) database objects before recreating
*pg_args,
"-v", # Verbose
str(dump_file),
]
logger.info(f"[PG_RESTORE] Running command: {' '.join(cmd)}")
result = subprocess.run(
cmd,
env=_get_pg_env(),
capture_output=True,
text=True,
)
logger.info(f"[PG_RESTORE] Return code: {result.returncode}")
# pg_restore may return non-zero even on partial success
# Check for actual errors vs warnings
if result.returncode != 0:
# Some errors during restore are expected (e.g., "does not exist" when cleaning)
# Only fail on critical errors
stderr = result.stderr.lower()
if "fatal" in stderr or "could not connect" in stderr:
logger.error(f"[PG_RESTORE] Failed critically: {result.stderr}")
raise RuntimeError(f"pg_restore failed: {result.stderr}")
else:
logger.warning(f"[PG_RESTORE] Completed with warnings: {result.stderr[:500]}...")
logger.info("[PG_RESTORE] Completed successfully")
def _dump_sqlite(output_file: Path) -> None:
"""Dump SQLite database using sqlite3 .backup command."""
logger.info("Dumping SQLite database with sqlite3 .backup...")
db_path = Path(settings.DATABASES["default"]["NAME"])
if not db_path.exists():
raise FileNotFoundError(f"SQLite database not found: {db_path}")
# Use sqlite3 .backup command via stdin for reliable execution
result = subprocess.run(
["sqlite3", str(db_path)],
input=f".backup '{output_file}'\n",
capture_output=True,
text=True,
)
if result.returncode != 0:
logger.error(f"sqlite3 backup failed: {result.stderr}")
raise RuntimeError(f"sqlite3 backup failed: {result.stderr}")
# Verify the backup file was created
if not output_file.exists():
raise RuntimeError("sqlite3 backup failed: output file not created")
logger.info(f"sqlite3 backup completed successfully: {output_file}")
def _restore_sqlite(dump_file: Path) -> None:
"""Restore SQLite database by replacing the database file."""
logger.info("Restoring SQLite database...")
db_path = Path(settings.DATABASES["default"]["NAME"])
backup_current = None
# Backup current database before overwriting
if db_path.exists():
backup_current = db_path.with_suffix(".db.bak")
shutil.copy2(db_path, backup_current)
logger.info(f"Backed up current database to {backup_current}")
# Ensure parent directory exists
db_path.parent.mkdir(parents=True, exist_ok=True)
# The backup file from _dump_sqlite is a complete SQLite database file
# We can simply copy it over the existing database
shutil.copy2(dump_file, db_path)
# Verify the restore worked by checking if sqlite3 can read it
result = subprocess.run(
["sqlite3", str(db_path)],
input=".tables\n",
capture_output=True,
text=True,
)
if result.returncode != 0:
logger.error(f"sqlite3 verification failed: {result.stderr}")
# Try to restore from backup
if backup_current and backup_current.exists():
shutil.copy2(backup_current, db_path)
logger.info("Restored original database from backup")
raise RuntimeError(f"sqlite3 restore verification failed: {result.stderr}")
logger.info("sqlite3 restore completed successfully")
def create_backup() -> Path:
"""
Create a backup archive containing database dump and data directories.
Returns the path to the created backup file.
"""
backup_dir = get_backup_dir()
# Use system timezone for filename (user-friendly), but keep internal timestamps as UTC
system_tz_name = CoreSettings.get_system_time_zone()
try:
system_tz = pytz.timezone(system_tz_name)
now_local = datetime.datetime.now(datetime.UTC).astimezone(system_tz)
timestamp = now_local.strftime("%Y.%m.%d.%H.%M.%S")
except Exception as e:
logger.warning(f"Failed to use system timezone {system_tz_name}: {e}, falling back to UTC")
timestamp = datetime.datetime.now(datetime.UTC).strftime("%Y.%m.%d.%H.%M.%S")
backup_name = f"dispatcharr-backup-{timestamp}.zip"
backup_file = backup_dir / backup_name
logger.info(f"Creating backup: {backup_name}")
with tempfile.TemporaryDirectory(prefix="dispatcharr-backup-") as temp_dir:
temp_path = Path(temp_dir)
# Determine database type and dump accordingly
if _is_postgresql():
db_dump_file = temp_path / "database.dump"
_dump_postgresql(db_dump_file)
db_type = "postgresql"
else:
db_dump_file = temp_path / "database.sqlite3"
_dump_sqlite(db_dump_file)
db_type = "sqlite"
# Create ZIP archive with compression and ZIP64 support for large files
with ZipFile(backup_file, "w", compression=ZIP_DEFLATED, allowZip64=True) as zip_file:
# Add database dump
zip_file.write(db_dump_file, db_dump_file.name)
# Add metadata
metadata = {
"format": "dispatcharr-backup",
"version": 2,
"database_type": db_type,
"database_file": db_dump_file.name,
"created_at": datetime.datetime.now(datetime.UTC).isoformat(),
}
zip_file.writestr("metadata.json", json.dumps(metadata, indent=2))
logger.info(f"Backup created successfully: {backup_file}")
return backup_file
def restore_backup(backup_file: Path) -> None:
"""
Restore from a backup archive.
WARNING: This will overwrite the database!
"""
if not backup_file.exists():
raise FileNotFoundError(f"Backup file not found: {backup_file}")
logger.info(f"Restoring from backup: {backup_file}")
with tempfile.TemporaryDirectory(prefix="dispatcharr-restore-") as temp_dir:
temp_path = Path(temp_dir)
# Extract backup
logger.debug("Extracting backup archive...")
with ZipFile(backup_file, "r") as zip_file:
zip_file.extractall(temp_path)
# Read metadata
metadata_file = temp_path / "metadata.json"
if not metadata_file.exists():
raise ValueError("Invalid backup: missing metadata.json")
with open(metadata_file) as f:
metadata = json.load(f)
# Restore database
_restore_database(temp_path, metadata)
logger.info("Restore completed successfully")
def _restore_database(temp_path: Path, metadata: dict) -> None:
"""Restore database from backup."""
db_type = metadata.get("database_type", "postgresql")
db_file = metadata.get("database_file", "database.dump")
dump_file = temp_path / db_file
if not dump_file.exists():
raise ValueError(f"Invalid backup: missing {db_file}")
current_db_type = "postgresql" if _is_postgresql() else "sqlite"
if db_type != current_db_type:
raise ValueError(
f"Database type mismatch: backup is {db_type}, "
f"but current database is {current_db_type}"
)
if db_type == "postgresql":
_restore_postgresql(dump_file)
else:
_restore_sqlite(dump_file)
def list_backups() -> list[dict]:
"""List all available backup files with metadata."""
backup_dir = get_backup_dir()
backups = []
for backup_file in sorted(backup_dir.glob("dispatcharr-backup-*.zip"), reverse=True):
# Use UTC timezone so frontend can convert to user's local time
created_time = datetime.datetime.fromtimestamp(backup_file.stat().st_mtime, datetime.UTC)
backups.append({
"name": backup_file.name,
"size": backup_file.stat().st_size,
"created": created_time.isoformat(),
})
return backups
def delete_backup(filename: str) -> None:
"""Delete a backup file."""
backup_dir = get_backup_dir()
backup_file = backup_dir / filename
if not backup_file.exists():
raise FileNotFoundError(f"Backup file not found: {filename}")
if not backup_file.is_file():
raise ValueError(f"Invalid backup file: {filename}")
backup_file.unlink()
logger.info(f"Deleted backup: {filename}")

106
apps/backups/tasks.py Normal file
View file

@ -0,0 +1,106 @@
import logging
import traceback
from celery import shared_task
from . import services
logger = logging.getLogger(__name__)
def _cleanup_old_backups(retention_count: int) -> int:
"""Delete old backups, keeping only the most recent N. Returns count deleted."""
if retention_count <= 0:
return 0
backups = services.list_backups()
if len(backups) <= retention_count:
return 0
# Backups are sorted newest first, so delete from the end
to_delete = backups[retention_count:]
deleted = 0
for backup in to_delete:
try:
services.delete_backup(backup["name"])
deleted += 1
logger.info(f"[CLEANUP] Deleted old backup: {backup['name']}")
except Exception as e:
logger.error(f"[CLEANUP] Failed to delete {backup['name']}: {e}")
return deleted
@shared_task(bind=True)
def create_backup_task(self):
"""Celery task to create a backup asynchronously."""
try:
logger.info(f"[BACKUP] Starting backup task {self.request.id}")
backup_file = services.create_backup()
logger.info(f"[BACKUP] Task {self.request.id} completed: {backup_file.name}")
return {
"status": "completed",
"filename": backup_file.name,
"size": backup_file.stat().st_size,
}
except Exception as e:
logger.error(f"[BACKUP] Task {self.request.id} failed: {str(e)}")
logger.error(f"[BACKUP] Traceback: {traceback.format_exc()}")
return {
"status": "failed",
"error": str(e),
}
@shared_task(bind=True)
def restore_backup_task(self, filename: str):
"""Celery task to restore a backup asynchronously."""
try:
logger.info(f"[RESTORE] Starting restore task {self.request.id} for {filename}")
backup_dir = services.get_backup_dir()
backup_file = backup_dir / filename
logger.info(f"[RESTORE] Backup file path: {backup_file}")
services.restore_backup(backup_file)
logger.info(f"[RESTORE] Task {self.request.id} completed successfully")
return {
"status": "completed",
"filename": filename,
}
except Exception as e:
logger.error(f"[RESTORE] Task {self.request.id} failed: {str(e)}")
logger.error(f"[RESTORE] Traceback: {traceback.format_exc()}")
return {
"status": "failed",
"error": str(e),
}
@shared_task(bind=True)
def scheduled_backup_task(self, retention_count: int = 0):
"""Celery task for scheduled backups with optional retention cleanup."""
try:
logger.info(f"[SCHEDULED] Starting scheduled backup task {self.request.id}")
# Create backup
backup_file = services.create_backup()
logger.info(f"[SCHEDULED] Backup created: {backup_file.name}")
# Cleanup old backups if retention is set
deleted = 0
if retention_count > 0:
deleted = _cleanup_old_backups(retention_count)
logger.info(f"[SCHEDULED] Cleanup complete, deleted {deleted} old backup(s)")
return {
"status": "completed",
"filename": backup_file.name,
"size": backup_file.stat().st_size,
"deleted_count": deleted,
}
except Exception as e:
logger.error(f"[SCHEDULED] Task {self.request.id} failed: {str(e)}")
logger.error(f"[SCHEDULED] Traceback: {traceback.format_exc()}")
return {
"status": "failed",
"error": str(e),
}

1163
apps/backups/tests.py Normal file

File diff suppressed because it is too large Load diff

View file

@ -8,6 +8,7 @@ from drf_yasg.utils import swagger_auto_schema
from drf_yasg import openapi
from django.shortcuts import get_object_or_404, get_list_or_404
from django.db import transaction
from django.db.models import Q
import os, json, requests, logging
from django.conf import settings
from apps.accounts.permissions import (
@ -420,10 +421,36 @@ class ChannelViewSet(viewsets.ModelViewSet):
group_names = channel_group.split(",")
qs = qs.filter(channel_group__name__in=group_names)
if self.request.user.user_level < 10:
qs = qs.filter(user_level__lte=self.request.user.user_level)
filters = {}
q_filters = Q()
return qs
channel_profile_id = self.request.query_params.get("channel_profile_id")
show_disabled_param = self.request.query_params.get("show_disabled", None)
only_streamless = self.request.query_params.get("only_streamless", None)
if channel_profile_id:
try:
profile_id_int = int(channel_profile_id)
filters["channelprofilemembership__channel_profile_id"] = profile_id_int
if show_disabled_param is None:
filters["channelprofilemembership__enabled"] = True
except (ValueError, TypeError):
# Ignore invalid profile id values
pass
if only_streamless:
q_filters &= Q(streams__isnull=True)
if self.request.user.user_level < 10:
filters["user_level__lte"] = self.request.user.user_level
if filters:
qs = qs.filter(**filters)
if q_filters:
qs = qs.filter(q_filters)
return qs.distinct()
def get_serializer_context(self):
context = super().get_serializer_context()

View file

@ -161,18 +161,7 @@ def generate_m3u(request, profile_name=None, user=None):
channelprofilemembership__enabled=True
).order_by('channel_number')
else:
if profile_name is not None:
try:
channel_profile = ChannelProfile.objects.get(name=profile_name)
except ChannelProfile.DoesNotExist:
logger.warning("Requested channel profile (%s) during m3u generation does not exist", profile_name)
raise Http404(f"Channel profile '{profile_name}' not found")
channels = Channel.objects.filter(
channelprofilemembership__channel_profile=channel_profile,
channelprofilemembership__enabled=True,
).order_by("channel_number")
else:
channels = Channel.objects.order_by("channel_number")
channels = Channel.objects.order_by("channel_number")
# Check if the request wants to use direct logo URLs instead of cache
use_cached_logos = request.GET.get('cachedlogos', 'true').lower() != 'false'
@ -2314,11 +2303,11 @@ def xc_get_epg(request, user, short=False):
program_output = {
"id": f"{id}",
"epg_id": f"{epg_id}",
"title": base64.b64encode(title.encode()).decode(),
"title": base64.b64encode((title or "").encode()).decode(),
"lang": "",
"start": start.strftime("%Y-%m-%d %H:%M:%S"),
"end": end.strftime("%Y-%m-%d %H:%M:%S"),
"description": base64.b64encode(description.encode()).decode(),
"description": base64.b64encode((description or "").encode()).decode(),
"channel_id": str(channel_num_int),
"start_timestamp": str(int(start.timestamp())),
"stop_timestamp": str(int(end.timestamp())),
@ -2532,34 +2521,45 @@ def xc_get_series_info(request, user, series_id):
except Exception as e:
logger.error(f"Error refreshing series data for relation {series_relation.id}: {str(e)}")
# Get episodes for this series from the same M3U account
episode_relations = M3UEpisodeRelation.objects.filter(
episode__series=series,
m3u_account=series_relation.m3u_account
).select_related('episode').order_by('episode__season_number', 'episode__episode_number')
# Get unique episodes for this series that have relations from any active M3U account
# We query episodes directly to avoid duplicates when multiple relations exist
# (e.g., same episode in different languages/qualities)
from apps.vod.models import Episode
episodes = Episode.objects.filter(
series=series,
m3u_relations__m3u_account__is_active=True
).distinct().order_by('season_number', 'episode_number')
# Group episodes by season
seasons = {}
for relation in episode_relations:
episode = relation.episode
for episode in episodes:
season_num = episode.season_number or 1
if season_num not in seasons:
seasons[season_num] = []
# Try to get the highest priority related M3UEpisodeRelation for this episode (for video/audio/bitrate)
# Get the highest priority relation for this episode (for container_extension, video/audio/bitrate)
from apps.vod.models import M3UEpisodeRelation
first_relation = M3UEpisodeRelation.objects.filter(
episode=episode
best_relation = M3UEpisodeRelation.objects.filter(
episode=episode,
m3u_account__is_active=True
).select_related('m3u_account').order_by('-m3u_account__priority', 'id').first()
video = audio = bitrate = None
if first_relation and first_relation.custom_properties:
info = first_relation.custom_properties.get('info')
if info and isinstance(info, dict):
info_info = info.get('info')
if info_info and isinstance(info_info, dict):
video = info_info.get('video', {})
audio = info_info.get('audio', {})
bitrate = info_info.get('bitrate', 0)
container_extension = "mp4"
added_timestamp = str(int(episode.created_at.timestamp()))
if best_relation:
container_extension = best_relation.container_extension or "mp4"
added_timestamp = str(int(best_relation.created_at.timestamp()))
if best_relation.custom_properties:
info = best_relation.custom_properties.get('info')
if info and isinstance(info, dict):
info_info = info.get('info')
if info_info and isinstance(info_info, dict):
video = info_info.get('video', {})
audio = info_info.get('audio', {})
bitrate = info_info.get('bitrate', 0)
if video is None:
video = episode.custom_properties.get('video', {}) if episode.custom_properties else {}
if audio is None:
@ -2572,8 +2572,8 @@ def xc_get_series_info(request, user, series_id):
"season": season_num,
"episode_num": episode.episode_number or 0,
"title": episode.name,
"container_extension": relation.container_extension or "mp4",
"added": str(int(relation.created_at.timestamp())),
"container_extension": container_extension,
"added": added_timestamp,
"custom_sid": None,
"direct_source": "",
"info": {
@ -2889,7 +2889,7 @@ def xc_series_stream(request, username, password, stream_id, extension):
filters = {"episode_id": stream_id, "m3u_account__is_active": True}
try:
episode_relation = M3UEpisodeRelation.objects.select_related('episode').get(**filters)
episode_relation = M3UEpisodeRelation.objects.select_related('episode').filter(**filters).order_by('-m3u_account__priority', 'id').first()
except M3UEpisodeRelation.DoesNotExist:
return JsonResponse({"error": "Episode not found"}, status=404)

View file

@ -1232,7 +1232,13 @@ def refresh_series_episodes(account, series, external_series_id, episodes_data=N
def batch_process_episodes(account, series, episodes_data, scan_start_time=None):
"""Process episodes in batches for better performance"""
"""Process episodes in batches for better performance.
Note: Multiple streams can represent the same episode (e.g., different languages
or qualities). Each stream has a unique stream_id, but they share the same
season/episode number. We create one Episode record per (series, season, episode)
and multiple M3UEpisodeRelation records pointing to it.
"""
if not episodes_data:
return
@ -1249,12 +1255,13 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
logger.info(f"Batch processing {len(all_episodes_data)} episodes for series {series.name}")
# Extract episode identifiers
episode_keys = []
# Note: episode_keys may have duplicates when multiple streams represent same episode
episode_keys = set() # Use set to track unique episode keys
episode_ids = []
for episode_data in all_episodes_data:
season_num = episode_data['_season_number']
episode_num = episode_data.get('episode_num', 0)
episode_keys.append((series.id, season_num, episode_num))
episode_keys.add((series.id, season_num, episode_num))
episode_ids.append(str(episode_data.get('id')))
# Pre-fetch existing episodes
@ -1277,6 +1284,10 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
relations_to_create = []
relations_to_update = []
# Track episodes we're creating in this batch to avoid duplicates
# Key: (series_id, season_number, episode_number) -> Episode object
episodes_pending_creation = {}
for episode_data in all_episodes_data:
try:
episode_id = str(episode_data.get('id'))
@ -1306,10 +1317,15 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
if backdrop:
custom_props['backdrop_path'] = [backdrop]
# Find existing episode
# Find existing episode - check DB first, then pending creations
episode_key = (series.id, season_number, episode_number)
episode = existing_episodes.get(episode_key)
# Check if we already have this episode pending creation (multiple streams for same episode)
if not episode and episode_key in episodes_pending_creation:
episode = episodes_pending_creation[episode_key]
logger.debug(f"Reusing pending episode for S{season_number:02d}E{episode_number:02d} (stream_id: {episode_id})")
if episode:
# Update existing episode
updated = False
@ -1338,7 +1354,9 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
episode.custom_properties = custom_props if custom_props else None
updated = True
if updated:
# Only add to update list if episode has a PK (exists in DB) and isn't already in list
# Episodes pending creation don't have PKs yet and will be created via bulk_create
if updated and episode.pk and episode not in episodes_to_update:
episodes_to_update.append(episode)
else:
# Create new episode
@ -1356,6 +1374,8 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
custom_properties=custom_props if custom_props else None
)
episodes_to_create.append(episode)
# Track this episode so subsequent streams with same season/episode can reuse it
episodes_pending_creation[episode_key] = episode
# Handle episode relation
if episode_id in existing_relations:
@ -1389,9 +1409,28 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
# Execute batch operations
with transaction.atomic():
# Create new episodes
# Create new episodes - use ignore_conflicts in case of race conditions
if episodes_to_create:
Episode.objects.bulk_create(episodes_to_create)
Episode.objects.bulk_create(episodes_to_create, ignore_conflicts=True)
# Re-fetch the created episodes to get their PKs
# We need to do this because bulk_create with ignore_conflicts doesn't set PKs
created_episode_keys = [
(ep.series_id, ep.season_number, ep.episode_number)
for ep in episodes_to_create
]
db_episodes = Episode.objects.filter(series=series)
episode_pk_map = {
(ep.series_id, ep.season_number, ep.episode_number): ep
for ep in db_episodes
}
# Update relations to point to the actual DB episodes with PKs
for relation in relations_to_create:
ep = relation.episode
key = (ep.series_id, ep.season_number, ep.episode_number)
if key in episode_pk_map:
relation.episode = episode_pk_map[key]
# Update existing episodes
if episodes_to_update:
@ -1400,9 +1439,9 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
'tmdb_id', 'imdb_id', 'custom_properties'
])
# Create new episode relations
# Create new episode relations - use ignore_conflicts for stream_id duplicates
if relations_to_create:
M3UEpisodeRelation.objects.bulk_create(relations_to_create)
M3UEpisodeRelation.objects.bulk_create(relations_to_create, ignore_conflicts=True)
# Update existing episode relations
if relations_to_update:

View file

@ -227,6 +227,13 @@ CELERY_BEAT_SCHEDULE = {
MEDIA_ROOT = BASE_DIR / "media"
MEDIA_URL = "/media/"
# Backup settings
BACKUP_ROOT = os.environ.get("BACKUP_ROOT", "/data/backups")
BACKUP_DATA_DIRS = [
os.environ.get("LOGOS_DIR", "/data/logos"),
os.environ.get("UPLOADS_DIR", "/data/uploads"),
os.environ.get("PLUGINS_DIR", "/data/plugins"),
]
SERVER_IP = "127.0.0.1"

View file

@ -35,6 +35,13 @@ server {
root /data;
}
# Internal location for X-Accel-Redirect backup downloads
# Django handles auth, nginx serves the file directly
location /protected-backups/ {
internal;
alias /data/backups/;
}
location /api/logos/(?<logo_id>\d+)/cache/ {
proxy_pass http://127.0.0.1:5656;
proxy_cache logo_cache;

View file

@ -21,6 +21,7 @@ module = dispatcharr.wsgi:application
virtualenv = /dispatcharrpy
master = true
env = DJANGO_SETTINGS_MODULE=dispatcharr.settings
env = USE_NGINX_ACCEL=true
socket = /app/uwsgi.sock
chmod-socket = 777
vacuum = true

View file

@ -1349,6 +1349,183 @@ export default class API {
}
}
// Backup API (async with Celery task polling)
static async listBackups() {
try {
const response = await request(`${host}/api/backups/`);
return response || [];
} catch (e) {
errorNotification('Failed to load backups', e);
throw e;
}
}
static async getBackupStatus(taskId, token = null) {
try {
let url = `${host}/api/backups/status/${taskId}/`;
if (token) {
url += `?token=${encodeURIComponent(token)}`;
}
const response = await request(url, { auth: !token });
return response;
} catch (e) {
throw e;
}
}
static async waitForBackupTask(taskId, onProgress, token = null) {
const pollInterval = 2000; // Poll every 2 seconds
const maxAttempts = 300; // Max 10 minutes (300 * 2s)
for (let attempt = 0; attempt < maxAttempts; attempt++) {
try {
const status = await API.getBackupStatus(taskId, token);
if (onProgress) {
onProgress(status);
}
if (status.state === 'completed') {
return status.result;
} else if (status.state === 'failed') {
throw new Error(status.error || 'Task failed');
}
} catch (e) {
throw e;
}
// Wait before next poll
await new Promise((resolve) => setTimeout(resolve, pollInterval));
}
throw new Error('Task timed out');
}
static async createBackup(onProgress) {
try {
// Start the backup task
const response = await request(`${host}/api/backups/create/`, {
method: 'POST',
});
// Wait for the task to complete using token for auth
const result = await API.waitForBackupTask(response.task_id, onProgress, response.task_token);
return result;
} catch (e) {
errorNotification('Failed to create backup', e);
throw e;
}
}
static async uploadBackup(file) {
try {
const formData = new FormData();
formData.append('file', file);
const response = await request(
`${host}/api/backups/upload/`,
{
method: 'POST',
body: formData,
}
);
return response;
} catch (e) {
errorNotification('Failed to upload backup', e);
throw e;
}
}
static async deleteBackup(filename) {
try {
const encodedFilename = encodeURIComponent(filename);
await request(`${host}/api/backups/${encodedFilename}/delete/`, {
method: 'DELETE',
});
} catch (e) {
errorNotification('Failed to delete backup', e);
throw e;
}
}
static async getDownloadToken(filename) {
// Get a download token from the server
try {
const response = await request(`${host}/api/backups/${encodeURIComponent(filename)}/download-token/`);
return response.token;
} catch (e) {
throw e;
}
}
static async downloadBackup(filename) {
try {
// Get a download token first (requires auth)
const token = await API.getDownloadToken(filename);
const encodedFilename = encodeURIComponent(filename);
// Build the download URL with token
const downloadUrl = `${host}/api/backups/${encodedFilename}/download/?token=${encodeURIComponent(token)}`;
// Use direct browser navigation instead of fetch to avoid CORS issues
const link = document.createElement('a');
link.href = downloadUrl;
link.download = filename;
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
return { filename };
} catch (e) {
errorNotification('Failed to download backup', e);
throw e;
}
}
static async restoreBackup(filename, onProgress) {
try {
// Start the restore task
const encodedFilename = encodeURIComponent(filename);
const response = await request(
`${host}/api/backups/${encodedFilename}/restore/`,
{
method: 'POST',
}
);
// Wait for the task to complete using token for auth
// Token-based auth allows status polling even after DB restore invalidates user sessions
const result = await API.waitForBackupTask(response.task_id, onProgress, response.task_token);
return result;
} catch (e) {
errorNotification('Failed to restore backup', e);
throw e;
}
}
static async getBackupSchedule() {
try {
const response = await request(`${host}/api/backups/schedule/`);
return response;
} catch (e) {
errorNotification('Failed to get backup schedule', e);
throw e;
}
}
static async updateBackupSchedule(settings) {
try {
const response = await request(`${host}/api/backups/schedule/update/`, {
method: 'PUT',
body: settings,
});
return response;
} catch (e) {
errorNotification('Failed to update backup schedule', e);
throw e;
}
}
static async getVersion() {
try {
const response = await request(`${host}/api/core/version/`, {

View file

@ -0,0 +1,902 @@
import { useEffect, useMemo, useState } from 'react';
import {
ActionIcon,
Box,
Button,
FileInput,
Flex,
Group,
Loader,
Modal,
NumberInput,
Paper,
Select,
Stack,
Switch,
Text,
TextInput,
Tooltip,
} from '@mantine/core';
import {
Download,
RefreshCcw,
RotateCcw,
SquareMinus,
SquarePlus,
UploadCloud,
} from 'lucide-react';
import { notifications } from '@mantine/notifications';
import dayjs from 'dayjs';
import API from '../../api';
import ConfirmationDialog from '../ConfirmationDialog';
import useLocalStorage from '../../hooks/useLocalStorage';
import useWarningsStore from '../../store/warnings';
import { CustomTable, useTable } from '../tables/CustomTable';
const RowActions = ({ row, handleDownload, handleRestoreClick, handleDeleteClick, downloading }) => {
return (
<Flex gap={4} wrap="nowrap">
<Tooltip label="Download">
<ActionIcon
variant="transparent"
size="sm"
color="blue.5"
onClick={() => handleDownload(row.original.name)}
loading={downloading === row.original.name}
disabled={downloading !== null}
>
<Download size={18} />
</ActionIcon>
</Tooltip>
<Tooltip label="Restore">
<ActionIcon
variant="transparent"
size="sm"
color="yellow.5"
onClick={() => handleRestoreClick(row.original)}
>
<RotateCcw size={18} />
</ActionIcon>
</Tooltip>
<Tooltip label="Delete">
<ActionIcon
variant="transparent"
size="sm"
color="red.9"
onClick={() => handleDeleteClick(row.original)}
>
<SquareMinus size={18} />
</ActionIcon>
</Tooltip>
</Flex>
);
};
// Convert 24h time string to 12h format with period
function to12Hour(time24) {
if (!time24) return { time: '12:00', period: 'AM' };
const [hours, minutes] = time24.split(':').map(Number);
const period = hours >= 12 ? 'PM' : 'AM';
const hours12 = hours % 12 || 12;
return {
time: `${hours12}:${String(minutes).padStart(2, '0')}`,
period,
};
}
// Convert 12h time + period to 24h format
function to24Hour(time12, period) {
if (!time12) return '00:00';
const [hours, minutes] = time12.split(':').map(Number);
let hours24 = hours;
if (period === 'PM' && hours !== 12) {
hours24 = hours + 12;
} else if (period === 'AM' && hours === 12) {
hours24 = 0;
}
return `${String(hours24).padStart(2, '0')}:${String(minutes).padStart(2, '0')}`;
}
// Get default timezone (same as Settings page)
function getDefaultTimeZone() {
try {
return Intl.DateTimeFormat().resolvedOptions().timeZone || 'UTC';
} catch {
return 'UTC';
}
}
// Validate cron expression
function validateCronExpression(expression) {
if (!expression || expression.trim() === '') {
return { valid: false, error: 'Cron expression is required' };
}
const parts = expression.trim().split(/\s+/);
if (parts.length !== 5) {
return { valid: false, error: 'Cron expression must have exactly 5 parts: minute hour day month weekday' };
}
const [minute, hour, dayOfMonth, month, dayOfWeek] = parts;
// Validate each part (allowing *, */N steps, ranges, lists, steps)
// Supports: *, */2, 5, 1-5, 1-5/2, 1,3,5, etc.
const cronPartRegex = /^(\*\/\d+|\*|\d+(-\d+)?(\/\d+)?(,\d+(-\d+)?(\/\d+)?)*)$/;
if (!cronPartRegex.test(minute)) {
return { valid: false, error: 'Invalid minute field (0-59, *, or cron syntax)' };
}
if (!cronPartRegex.test(hour)) {
return { valid: false, error: 'Invalid hour field (0-23, *, or cron syntax)' };
}
if (!cronPartRegex.test(dayOfMonth)) {
return { valid: false, error: 'Invalid day field (1-31, *, or cron syntax)' };
}
if (!cronPartRegex.test(month)) {
return { valid: false, error: 'Invalid month field (1-12, *, or cron syntax)' };
}
if (!cronPartRegex.test(dayOfWeek)) {
return { valid: false, error: 'Invalid weekday field (0-6, *, or cron syntax)' };
}
// Additional range validation for numeric values
const validateRange = (value, min, max, name) => {
// Skip if it's * or contains special characters
if (value === '*' || value.includes('/') || value.includes('-') || value.includes(',')) {
return null;
}
const num = parseInt(value, 10);
if (isNaN(num) || num < min || num > max) {
return `${name} must be between ${min} and ${max}`;
}
return null;
};
const minuteError = validateRange(minute, 0, 59, 'Minute');
if (minuteError) return { valid: false, error: minuteError };
const hourError = validateRange(hour, 0, 23, 'Hour');
if (hourError) return { valid: false, error: hourError };
const dayError = validateRange(dayOfMonth, 1, 31, 'Day');
if (dayError) return { valid: false, error: dayError };
const monthError = validateRange(month, 1, 12, 'Month');
if (monthError) return { valid: false, error: monthError };
const weekdayError = validateRange(dayOfWeek, 0, 6, 'Weekday');
if (weekdayError) return { valid: false, error: weekdayError };
return { valid: true, error: null };
}
const DAYS_OF_WEEK = [
{ value: '0', label: 'Sunday' },
{ value: '1', label: 'Monday' },
{ value: '2', label: 'Tuesday' },
{ value: '3', label: 'Wednesday' },
{ value: '4', label: 'Thursday' },
{ value: '5', label: 'Friday' },
{ value: '6', label: 'Saturday' },
];
function formatBytes(bytes) {
if (bytes === 0) return '0 B';
const k = 1024;
const sizes = ['B', 'KB', 'MB', 'GB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return `${(bytes / Math.pow(k, i)).toFixed(2)} ${sizes[i]}`;
}
export default function BackupManager() {
const [backups, setBackups] = useState([]);
const [loading, setLoading] = useState(false);
const [creating, setCreating] = useState(false);
const [downloading, setDownloading] = useState(null);
const [uploadFile, setUploadFile] = useState(null);
const [uploadModalOpen, setUploadModalOpen] = useState(false);
const [restoreConfirmOpen, setRestoreConfirmOpen] = useState(false);
const [deleteConfirmOpen, setDeleteConfirmOpen] = useState(false);
const [selectedBackup, setSelectedBackup] = useState(null);
// Read user's preferences from settings
const [timeFormat] = useLocalStorage('time-format', '12h');
const [dateFormatSetting] = useLocalStorage('date-format', 'mdy');
const [tableSize] = useLocalStorage('table-size', 'default');
const [userTimezone] = useLocalStorage('time-zone', getDefaultTimeZone());
const is12Hour = timeFormat === '12h';
// Format date according to user preferences
const formatDate = (dateString) => {
const date = dayjs(dateString);
const datePart = dateFormatSetting === 'mdy' ? 'MM/DD/YYYY' : 'DD/MM/YYYY';
const timePart = is12Hour ? 'h:mm:ss A' : 'HH:mm:ss';
return date.format(`${datePart}, ${timePart}`);
};
// Warning suppression for confirmation dialogs
const suppressWarning = useWarningsStore((s) => s.suppressWarning);
// Schedule state
const [schedule, setSchedule] = useState({
enabled: false,
frequency: 'daily',
time: '03:00',
day_of_week: 0,
retention_count: 0,
cron_expression: '',
});
const [scheduleLoading, setScheduleLoading] = useState(false);
const [scheduleSaving, setScheduleSaving] = useState(false);
const [scheduleChanged, setScheduleChanged] = useState(false);
const [advancedMode, setAdvancedMode] = useState(false);
const [cronError, setCronError] = useState(null);
// For 12-hour display mode
const [displayTime, setDisplayTime] = useState('3:00');
const [timePeriod, setTimePeriod] = useState('AM');
const columns = useMemo(
() => [
{
header: 'Filename',
accessorKey: 'name',
grow: true,
cell: ({ cell }) => (
<div
style={{
whiteSpace: 'nowrap',
overflow: 'hidden',
textOverflow: 'ellipsis',
}}
>
{cell.getValue()}
</div>
),
},
{
header: 'Size',
accessorKey: 'size',
size: 80,
cell: ({ cell }) => (
<Text size="sm">{formatBytes(cell.getValue())}</Text>
),
},
{
header: 'Created',
accessorKey: 'created',
minSize: 180,
cell: ({ cell }) => (
<Text size="sm" style={{ whiteSpace: 'nowrap' }}>
{formatDate(cell.getValue())}
</Text>
),
},
{
id: 'actions',
header: 'Actions',
size: tableSize === 'compact' ? 75 : 100,
},
],
[tableSize]
);
const renderHeaderCell = (header) => {
return (
<Text size="sm" name={header.id}>
{header.column.columnDef.header}
</Text>
);
};
const renderBodyCell = ({ cell, row }) => {
switch (cell.column.id) {
case 'actions':
return (
<RowActions
row={row}
handleDownload={handleDownload}
handleRestoreClick={handleRestoreClick}
handleDeleteClick={handleDeleteClick}
downloading={downloading}
/>
);
}
};
const table = useTable({
columns,
data: backups,
allRowIds: backups.map((b) => b.name),
bodyCellRenderFns: {
actions: renderBodyCell,
},
headerCellRenderFns: {
name: renderHeaderCell,
size: renderHeaderCell,
created: renderHeaderCell,
actions: renderHeaderCell,
},
});
const loadBackups = async () => {
setLoading(true);
try {
const backupList = await API.listBackups();
setBackups(backupList);
} catch (error) {
notifications.show({
title: 'Error',
message: error?.message || 'Failed to load backups',
color: 'red',
});
} finally {
setLoading(false);
}
};
const loadSchedule = async () => {
setScheduleLoading(true);
try {
const settings = await API.getBackupSchedule();
// Check if using cron expression (advanced mode)
if (settings.cron_expression) {
setAdvancedMode(true);
}
setSchedule(settings);
// Initialize 12-hour display values
const { time, period } = to12Hour(settings.time);
setDisplayTime(time);
setTimePeriod(period);
setScheduleChanged(false);
} catch (error) {
// Ignore errors on initial load - settings may not exist yet
} finally {
setScheduleLoading(false);
}
};
useEffect(() => {
loadBackups();
loadSchedule();
}, []);
// Validate cron expression when switching to advanced mode
useEffect(() => {
if (advancedMode && schedule.cron_expression) {
const validation = validateCronExpression(schedule.cron_expression);
setCronError(validation.valid ? null : validation.error);
} else {
setCronError(null);
}
}, [advancedMode, schedule.cron_expression]);
const handleScheduleChange = (field, value) => {
setSchedule((prev) => ({ ...prev, [field]: value }));
setScheduleChanged(true);
// Validate cron expression if in advanced mode
if (field === 'cron_expression' && advancedMode) {
const validation = validateCronExpression(value);
setCronError(validation.valid ? null : validation.error);
}
};
// Handle time changes in 12-hour mode
const handleTimeChange12h = (newTime, newPeriod) => {
const time = newTime ?? displayTime;
const period = newPeriod ?? timePeriod;
setDisplayTime(time);
setTimePeriod(period);
// Convert to 24h and update schedule
const time24 = to24Hour(time, period);
handleScheduleChange('time', time24);
};
// Handle time changes in 24-hour mode
const handleTimeChange24h = (value) => {
handleScheduleChange('time', value);
// Also update 12h display state in case user switches formats
const { time, period } = to12Hour(value);
setDisplayTime(time);
setTimePeriod(period);
};
const handleSaveSchedule = async () => {
setScheduleSaving(true);
try {
const scheduleToSave = advancedMode
? schedule
: { ...schedule, cron_expression: '' };
const updated = await API.updateBackupSchedule(scheduleToSave);
setSchedule(updated);
setScheduleChanged(false);
notifications.show({
title: 'Success',
message: 'Backup schedule saved',
color: 'green',
});
} catch (error) {
notifications.show({
title: 'Error',
message: error?.message || 'Failed to save schedule',
color: 'red',
});
} finally {
setScheduleSaving(false);
}
};
const handleCreateBackup = async () => {
setCreating(true);
try {
await API.createBackup();
notifications.show({
title: 'Success',
message: 'Backup created successfully',
color: 'green',
});
await loadBackups();
} catch (error) {
notifications.show({
title: 'Error',
message: error?.message || 'Failed to create backup',
color: 'red',
});
} finally {
setCreating(false);
}
};
const handleDownload = async (filename) => {
setDownloading(filename);
try {
await API.downloadBackup(filename);
notifications.show({
title: 'Download Started',
message: `Downloading ${filename}...`,
color: 'blue',
});
} catch (error) {
notifications.show({
title: 'Error',
message: error?.message || 'Failed to download backup',
color: 'red',
});
} finally {
setDownloading(null);
}
};
const handleDeleteClick = (backup) => {
setSelectedBackup(backup);
setDeleteConfirmOpen(true);
};
const handleDeleteConfirm = async () => {
try {
await API.deleteBackup(selectedBackup.name);
notifications.show({
title: 'Success',
message: 'Backup deleted successfully',
color: 'green',
});
await loadBackups();
} catch (error) {
notifications.show({
title: 'Error',
message: error?.message || 'Failed to delete backup',
color: 'red',
});
} finally {
setDeleteConfirmOpen(false);
setSelectedBackup(null);
}
};
const handleRestoreClick = (backup) => {
setSelectedBackup(backup);
setRestoreConfirmOpen(true);
};
const handleRestoreConfirm = async () => {
try {
await API.restoreBackup(selectedBackup.name);
notifications.show({
title: 'Success',
message: 'Backup restored successfully. You may need to refresh the page.',
color: 'green',
});
setTimeout(() => window.location.reload(), 2000);
} catch (error) {
notifications.show({
title: 'Error',
message: error?.message || 'Failed to restore backup',
color: 'red',
});
} finally {
setRestoreConfirmOpen(false);
setSelectedBackup(null);
}
};
const handleUploadSubmit = async () => {
if (!uploadFile) return;
try {
await API.uploadBackup(uploadFile);
notifications.show({
title: 'Success',
message: 'Backup uploaded successfully',
color: 'green',
});
setUploadModalOpen(false);
setUploadFile(null);
await loadBackups();
} catch (error) {
notifications.show({
title: 'Error',
message: error?.message || 'Failed to upload backup',
color: 'red',
});
}
};
return (
<Stack gap="md">
{/* Schedule Settings */}
<Stack gap="sm">
<Group justify="space-between">
<Text size="sm" fw={500}>Scheduled Backups</Text>
<Switch
checked={schedule.enabled}
onChange={(e) => handleScheduleChange('enabled', e.currentTarget.checked)}
label={schedule.enabled ? 'Enabled' : 'Disabled'}
/>
</Group>
<Group justify="space-between">
<Text size="sm" fw={500}>Advanced (Cron Expression)</Text>
<Switch
checked={advancedMode}
onChange={(e) => setAdvancedMode(e.currentTarget.checked)}
label={advancedMode ? 'Enabled' : 'Disabled'}
disabled={!schedule.enabled}
size="sm"
/>
</Group>
{scheduleLoading ? (
<Loader size="sm" />
) : (
<>
{advancedMode ? (
<>
<Stack gap="sm">
<TextInput
label="Cron Expression"
value={schedule.cron_expression}
onChange={(e) => handleScheduleChange('cron_expression', e.currentTarget.value)}
placeholder="0 3 * * *"
description="Format: minute hour day month weekday (e.g., '0 3 * * *' = 3:00 AM daily)"
disabled={!schedule.enabled}
error={cronError}
/>
<Text size="xs" c="dimmed">
Examples: <br />
<code>0 3 * * *</code> - Every day at 3:00 AM<br />
<code>0 2 * * 0</code> - Every Sunday at 2:00 AM<br />
<code>0 */6 * * *</code> - Every 6 hours<br />
<code>30 14 1 * *</code> - 1st of every month at 2:30 PM
</Text>
</Stack>
<Group grow align="flex-end">
<NumberInput
label="Retention"
description="0 = keep all"
value={schedule.retention_count}
onChange={(value) => handleScheduleChange('retention_count', value || 0)}
min={0}
disabled={!schedule.enabled}
/>
<Button
onClick={handleSaveSchedule}
loading={scheduleSaving}
disabled={!scheduleChanged || (advancedMode && cronError)}
variant="default"
>
Save
</Button>
</Group>
</>
) : (
<Stack gap="sm">
<Group align="flex-end" gap="xs" wrap="nowrap">
<Select
label="Frequency"
value={schedule.frequency}
onChange={(value) => handleScheduleChange('frequency', value)}
data={[
{ value: 'daily', label: 'Daily' },
{ value: 'weekly', label: 'Weekly' },
]}
disabled={!schedule.enabled}
/>
{schedule.frequency === 'weekly' && (
<Select
label="Day"
value={String(schedule.day_of_week)}
onChange={(value) => handleScheduleChange('day_of_week', parseInt(value, 10))}
data={DAYS_OF_WEEK}
disabled={!schedule.enabled}
/>
)}
{is12Hour ? (
<>
<Select
label="Hour"
value={displayTime ? displayTime.split(':')[0] : '12'}
onChange={(value) => {
const minute = displayTime ? displayTime.split(':')[1] : '00';
handleTimeChange12h(`${value}:${minute}`, null);
}}
data={Array.from({ length: 12 }, (_, i) => ({
value: String(i + 1),
label: String(i + 1),
}))}
disabled={!schedule.enabled}
searchable
/>
<Select
label="Minute"
value={displayTime ? displayTime.split(':')[1] : '00'}
onChange={(value) => {
const hour = displayTime ? displayTime.split(':')[0] : '12';
handleTimeChange12h(`${hour}:${value}`, null);
}}
data={Array.from({ length: 60 }, (_, i) => ({
value: String(i).padStart(2, '0'),
label: String(i).padStart(2, '0'),
}))}
disabled={!schedule.enabled}
searchable
/>
<Select
label="Period"
value={timePeriod}
onChange={(value) => handleTimeChange12h(null, value)}
data={[
{ value: 'AM', label: 'AM' },
{ value: 'PM', label: 'PM' },
]}
disabled={!schedule.enabled}
/>
</>
) : (
<>
<Select
label="Hour"
value={schedule.time ? schedule.time.split(':')[0] : '00'}
onChange={(value) => {
const minute = schedule.time ? schedule.time.split(':')[1] : '00';
handleTimeChange24h(`${value}:${minute}`);
}}
data={Array.from({ length: 24 }, (_, i) => ({
value: String(i).padStart(2, '0'),
label: String(i).padStart(2, '0'),
}))}
disabled={!schedule.enabled}
searchable
/>
<Select
label="Minute"
value={schedule.time ? schedule.time.split(':')[1] : '00'}
onChange={(value) => {
const hour = schedule.time ? schedule.time.split(':')[0] : '00';
handleTimeChange24h(`${hour}:${value}`);
}}
data={Array.from({ length: 60 }, (_, i) => ({
value: String(i).padStart(2, '0'),
label: String(i).padStart(2, '0'),
}))}
disabled={!schedule.enabled}
searchable
/>
</>
)}
</Group>
<Group grow align="flex-end" gap="xs">
<NumberInput
label="Retention"
description="0 = keep all"
value={schedule.retention_count}
onChange={(value) => handleScheduleChange('retention_count', value || 0)}
min={0}
disabled={!schedule.enabled}
/>
<Button
onClick={handleSaveSchedule}
loading={scheduleSaving}
disabled={!scheduleChanged}
variant="default"
>
Save
</Button>
</Group>
</Stack>
)}
{/* Timezone info - only show in simple mode */}
{!advancedMode && schedule.enabled && schedule.time && (
<Text size="xs" c="dimmed" mt="xs">
System Timezone: {userTimezone} Backup will run at {schedule.time} {userTimezone}
</Text>
)}
</>
)}
</Stack>
{/* Backups List */}
<Stack gap={0}>
<Paper>
<Box
style={{
display: 'flex',
justifyContent: 'flex-end',
padding: 10,
}}
>
<Flex gap={6}>
<Tooltip label="Upload existing backup">
<Button
leftSection={<UploadCloud size={18} />}
variant="light"
size="xs"
onClick={() => setUploadModalOpen(true)}
p={5}
>
Upload
</Button>
</Tooltip>
<Tooltip label="Refresh list">
<Button
leftSection={<RefreshCcw size={18} />}
variant="light"
size="xs"
onClick={loadBackups}
loading={loading}
p={5}
>
Refresh
</Button>
</Tooltip>
<Tooltip label="Create new backup">
<Button
leftSection={<SquarePlus size={18} />}
variant="light"
size="xs"
onClick={handleCreateBackup}
loading={creating}
p={5}
color="green"
style={{
borderWidth: '1px',
borderColor: 'green',
color: 'white',
}}
>
Create Backup
</Button>
</Tooltip>
</Flex>
</Box>
</Paper>
<Box
style={{
display: 'flex',
flexDirection: 'column',
maxHeight: 300,
width: '100%',
overflow: 'hidden',
}}
>
<Box
style={{
flex: 1,
overflowY: 'auto',
overflowX: 'auto',
border: 'solid 1px rgb(68,68,68)',
borderRadius: 'var(--mantine-radius-default)',
}}
>
{loading ? (
<Box p="xl" style={{ display: 'flex', justifyContent: 'center' }}>
<Loader />
</Box>
) : backups.length === 0 ? (
<Text size="sm" c="dimmed" p="md" ta="center">
No backups found. Create one to get started.
</Text>
) : (
<div style={{ minWidth: 500 }}>
<CustomTable table={table} />
</div>
)}
</Box>
</Box>
</Stack>
<Modal
opened={uploadModalOpen}
onClose={() => {
setUploadModalOpen(false);
setUploadFile(null);
}}
title="Upload Backup"
>
<Stack>
<FileInput
label="Select backup file"
placeholder="Choose a .zip file"
accept=".zip,application/zip,application/x-zip-compressed"
value={uploadFile}
onChange={setUploadFile}
/>
<Group justify="flex-end">
<Button
variant="outline"
onClick={() => {
setUploadModalOpen(false);
setUploadFile(null);
}}
>
Cancel
</Button>
<Button onClick={handleUploadSubmit} disabled={!uploadFile} variant="default">
Upload
</Button>
</Group>
</Stack>
</Modal>
<ConfirmationDialog
opened={restoreConfirmOpen}
onClose={() => {
setRestoreConfirmOpen(false);
setSelectedBackup(null);
}}
onConfirm={handleRestoreConfirm}
title="Restore Backup"
message={`Are you sure you want to restore from "${selectedBackup?.name}"? This will replace all current data with the backup data. This action cannot be undone.`}
confirmLabel="Restore"
cancelLabel="Cancel"
actionKey="restore-backup"
onSuppressChange={suppressWarning}
/>
<ConfirmationDialog
opened={deleteConfirmOpen}
onClose={() => {
setDeleteConfirmOpen(false);
setSelectedBackup(null);
}}
onConfirm={handleDeleteConfirm}
title="Delete Backup"
message={`Are you sure you want to delete "${selectedBackup?.name}"? This action cannot be undone.`}
confirmLabel="Delete"
cancelLabel="Cancel"
actionKey="delete-backup"
onSuppressChange={suppressWarning}
/>
</Stack>
);
}

View file

@ -68,7 +68,7 @@ const epgUrlBase = `${window.location.protocol}//${window.location.host}/output/
const hdhrUrlBase = `${window.location.protocol}//${window.location.host}/hdhr`;
const ChannelEnabledSwitch = React.memo(
({ rowId, selectedProfileId, selectedTableIds }) => {
({ rowId, selectedProfileId, selectedTableIds, setSelectedTableIds }) => {
// Directly extract the channels set once to avoid re-renders on every change.
const isEnabled = useChannelsStore(
useCallback(
@ -79,16 +79,20 @@ const ChannelEnabledSwitch = React.memo(
)
);
const handleToggle = () => {
const handleToggle = async () => {
if (selectedTableIds.length > 1) {
API.updateProfileChannels(
await API.updateProfileChannels(
selectedTableIds,
selectedProfileId,
!isEnabled
);
} else {
API.updateProfileChannel(rowId, selectedProfileId, !isEnabled);
await API.updateProfileChannel(rowId, selectedProfileId, !isEnabled);
}
setSelectedTableIds([]);
return API.requeryChannels();
};
return (
@ -289,6 +293,9 @@ const ChannelsTable = ({}) => {
const [selectedProfile, setSelectedProfile] = useState(
profiles[selectedProfileId]
);
const [showDisabled, setShowDisabled] = useState(true);
const [showOnlyStreamlessChannels, setShowOnlyStreamlessChannels] =
useState(false);
const [paginationString, setPaginationString] = useState('');
const [filters, setFilters] = useState({
@ -369,6 +376,15 @@ const ChannelsTable = ({}) => {
params.append('page', pagination.pageIndex + 1);
params.append('page_size', pagination.pageSize);
params.append('include_streams', 'true');
if (selectedProfileId !== '0') {
params.append('channel_profile_id', selectedProfileId);
}
if (showDisabled === true) {
params.append('show_disabled', true);
}
if (showOnlyStreamlessChannels === true) {
params.append('only_streamless', true);
}
// Apply sorting
if (sorting.length > 0) {
@ -401,7 +417,14 @@ const ChannelsTable = ({}) => {
pageSize: pagination.pageSize,
});
setAllRowIds(ids);
}, [pagination, sorting, debouncedFilters]);
}, [
pagination,
sorting,
debouncedFilters,
showDisabled,
selectedProfileId,
showOnlyStreamlessChannels,
]);
const stopPropagation = useCallback((e) => {
e.stopPropagation();
@ -728,6 +751,7 @@ const ChannelsTable = ({}) => {
rowId={row.original.id}
selectedProfileId={selectedProfileId}
selectedTableIds={table.getState().selectedTableIds}
setSelectedTableIds={table.setSelectedTableIds}
/>
);
},
@ -1326,6 +1350,10 @@ const ChannelsTable = ({}) => {
deleteChannels={deleteChannels}
selectedTableIds={table.selectedTableIds}
table={table}
showDisabled={showDisabled}
setShowDisabled={setShowDisabled}
showOnlyStreamlessChannels={showOnlyStreamlessChannels}
setShowOnlyStreamlessChannels={setShowOnlyStreamlessChannels}
/>
{/* Table or ghost empty state inside Paper */}

View file

@ -12,20 +12,22 @@ import {
Text,
TextInput,
Tooltip,
UnstyledButton,
useMantineTheme,
} from '@mantine/core';
import {
ArrowDown01,
Binary,
Check,
CircleCheck,
Ellipsis,
EllipsisVertical,
SquareMinus,
SquarePen,
SquarePlus,
Settings,
Eye,
EyeOff,
Filter,
Square,
SquareCheck,
} from 'lucide-react';
import API from '../../../api';
import { notifications } from '@mantine/notifications';
@ -102,6 +104,10 @@ const ChannelTableHeader = ({
editChannel,
deleteChannels,
selectedTableIds,
showDisabled,
setShowDisabled,
showOnlyStreamlessChannels,
setShowOnlyStreamlessChannels,
}) => {
const theme = useMantineTheme();
@ -208,6 +214,14 @@ const ChannelTableHeader = ({
);
};
const toggleShowDisabled = () => {
setShowDisabled(!showDisabled);
};
const toggleShowOnlyStreamlessChannels = () => {
setShowOnlyStreamlessChannels(!showOnlyStreamlessChannels);
};
return (
<Group justify="space-between">
<Group gap={5} style={{ paddingLeft: 10 }}>
@ -236,6 +250,41 @@ const ChannelTableHeader = ({
}}
>
<Flex gap={6}>
<Menu shadow="md" width={200}>
<Menu.Target>
<Button size="xs" variant="default" onClick={() => {}}>
<Filter size={18} />
</Button>
</Menu.Target>
<Menu.Dropdown>
<Menu.Item
onClick={toggleShowDisabled}
leftSection={
showDisabled ? <Eye size={18} /> : <EyeOff size={18} />
}
disabled={selectedProfileId === '0'}
>
<Text size="xs">
{showDisabled ? 'Hide Disabled' : 'Show Disabled'}
</Text>
</Menu.Item>
<Menu.Item
onClick={toggleShowOnlyStreamlessChannels}
leftSection={
showOnlyStreamlessChannels ? (
<SquareCheck size={18} />
) : (
<Square size={18} />
)
}
>
<Text size="xs">Only Empty Channels</Text>
</Menu.Item>
</Menu.Dropdown>
</Menu>
<Button
leftSection={<SquarePen size={18} />}
variant="default"

View file

@ -38,6 +38,7 @@ import { isNotEmpty, useForm } from '@mantine/form';
import { notifications } from '@mantine/notifications';
import UserAgentsTable from '../components/tables/UserAgentsTable';
import StreamProfilesTable from '../components/tables/StreamProfilesTable';
import BackupManager from '../components/backups/BackupManager';
import useLocalStorage from '../hooks/useLocalStorage';
import useAuthStore from '../store/auth';
import {
@ -1909,6 +1910,13 @@ const SettingsPage = () => {
</form>
</Accordion.Panel>
</Accordion.Item>
<Accordion.Item value="backups">
<Accordion.Control>Backup & Restore</Accordion.Control>
<Accordion.Panel>
<BackupManager />
</Accordion.Panel>
</Accordion.Item>
</>
)}
</Accordion>