diff --git a/apps/channels/api_urls.py b/apps/channels/api_urls.py index 7cfdc1b1..7999abd9 100644 --- a/apps/channels/api_urls.py +++ b/apps/channels/api_urls.py @@ -13,12 +13,14 @@ from .api_views import ( UpdateChannelMembershipAPIView, BulkUpdateChannelMembershipAPIView, RecordingViewSet, + RecurringRecordingRuleViewSet, GetChannelStreamsAPIView, SeriesRulesAPIView, DeleteSeriesRuleAPIView, EvaluateSeriesRulesAPIView, BulkRemoveSeriesRecordingsAPIView, BulkDeleteUpcomingRecordingsAPIView, + ComskipConfigAPIView, ) app_name = 'channels' # for DRF routing @@ -30,6 +32,7 @@ router.register(r'channels', ChannelViewSet, basename='channel') router.register(r'logos', LogoViewSet, basename='logo') router.register(r'profiles', ChannelProfileViewSet, basename='profile') router.register(r'recordings', RecordingViewSet, basename='recording') +router.register(r'recurring-rules', RecurringRecordingRuleViewSet, basename='recurring-rule') urlpatterns = [ # Bulk delete is a single APIView, not a ViewSet @@ -46,6 +49,7 @@ urlpatterns = [ path('series-rules/bulk-remove/', BulkRemoveSeriesRecordingsAPIView.as_view(), name='bulk_remove_series_recordings'), path('series-rules//', DeleteSeriesRuleAPIView.as_view(), name='delete_series_rule'), path('recordings/bulk-delete-upcoming/', BulkDeleteUpcomingRecordingsAPIView.as_view(), name='bulk_delete_upcoming_recordings'), + path('dvr/comskip-config/', ComskipConfigAPIView.as_view(), name='comskip_config'), ] urlpatterns += router.urls diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index f5395c4f..0c562974 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -28,6 +28,7 @@ from .models import ( ChannelProfile, ChannelProfileMembership, Recording, + RecurringRecordingRule, ) from .serializers import ( StreamSerializer, @@ -38,8 +39,17 @@ from .serializers import ( BulkChannelProfileMembershipSerializer, ChannelProfileSerializer, RecordingSerializer, + RecurringRecordingRuleSerializer, +) +from .tasks import ( + match_epg_channels, + evaluate_series_rules, + evaluate_series_rules_impl, + match_single_channel_epg, + match_selected_channels_epg, + sync_recurring_rule_impl, + purge_recurring_rule_impl, ) -from .tasks import match_epg_channels, evaluate_series_rules, evaluate_series_rules_impl, match_single_channel_epg, match_selected_channels_epg import django_filters from django_filters.rest_framework import DjangoFilterBackend from rest_framework.filters import SearchFilter, OrderingFilter @@ -49,10 +59,12 @@ from django.db.models import Q from django.http import StreamingHttpResponse, FileResponse, Http404 from django.utils import timezone import mimetypes +from django.conf import settings from rest_framework.pagination import PageNumberPagination + logger = logging.getLogger(__name__) @@ -1684,6 +1696,41 @@ class BulkUpdateChannelMembershipAPIView(APIView): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) +class RecurringRecordingRuleViewSet(viewsets.ModelViewSet): + queryset = RecurringRecordingRule.objects.all().select_related("channel") + serializer_class = RecurringRecordingRuleSerializer + + def get_permissions(self): + return [IsAdmin()] + + def perform_create(self, serializer): + rule = serializer.save() + try: + sync_recurring_rule_impl(rule.id, drop_existing=True) + except Exception as err: + logger.warning(f"Failed to initialize recurring rule {rule.id}: {err}") + return rule + + def perform_update(self, serializer): + rule = serializer.save() + try: + if rule.enabled: + sync_recurring_rule_impl(rule.id, drop_existing=True) + else: + purge_recurring_rule_impl(rule.id) + except Exception as err: + logger.warning(f"Failed to resync recurring rule {rule.id}: {err}") + return rule + + def perform_destroy(self, instance): + rule_id = instance.id + super().perform_destroy(instance) + try: + purge_recurring_rule_impl(rule_id) + except Exception as err: + logger.warning(f"Failed to purge recordings for rule {rule_id}: {err}") + + class RecordingViewSet(viewsets.ModelViewSet): queryset = Recording.objects.all() serializer_class = RecordingSerializer @@ -1863,6 +1910,49 @@ class RecordingViewSet(viewsets.ModelViewSet): return response +class ComskipConfigAPIView(APIView): + """Upload or inspect the custom comskip.ini used by DVR processing.""" + + parser_classes = [MultiPartParser, FormParser] + + def get_permissions(self): + return [IsAdmin()] + + def get(self, request): + path = CoreSettings.get_dvr_comskip_custom_path() + exists = bool(path and os.path.exists(path)) + return Response({"path": path, "exists": exists}) + + def post(self, request): + uploaded = request.FILES.get("file") or request.FILES.get("comskip_ini") + if not uploaded: + return Response({"error": "No file provided"}, status=status.HTTP_400_BAD_REQUEST) + + name = (uploaded.name or "").lower() + if not name.endswith(".ini"): + return Response({"error": "Only .ini files are allowed"}, status=status.HTTP_400_BAD_REQUEST) + + if uploaded.size and uploaded.size > 1024 * 1024: + return Response({"error": "File too large (limit 1MB)"}, status=status.HTTP_400_BAD_REQUEST) + + dest_dir = os.path.join(settings.MEDIA_ROOT, "comskip") + os.makedirs(dest_dir, exist_ok=True) + dest_path = os.path.join(dest_dir, "comskip.ini") + + try: + with open(dest_path, "wb") as dest: + for chunk in uploaded.chunks(): + dest.write(chunk) + except Exception as e: + logger.error(f"Failed to save uploaded comskip.ini: {e}") + return Response({"error": "Unable to save file"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + + # Persist path setting so DVR processing picks it up immediately + CoreSettings.set_dvr_comskip_custom_path(dest_path) + + return Response({"success": True, "path": dest_path, "exists": os.path.exists(dest_path)}) + + class BulkDeleteUpcomingRecordingsAPIView(APIView): """Delete all upcoming (future) recordings.""" def get_permissions(self): diff --git a/apps/channels/migrations/0026_recurringrecordingrule.py b/apps/channels/migrations/0026_recurringrecordingrule.py new file mode 100644 index 00000000..1b8cfdb8 --- /dev/null +++ b/apps/channels/migrations/0026_recurringrecordingrule.py @@ -0,0 +1,31 @@ +# Generated by Django 5.0.14 on 2025-09-18 14:56 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dispatcharr_channels', '0025_alter_channelgroupm3uaccount_custom_properties_and_more'), + ] + + operations = [ + migrations.CreateModel( + name='RecurringRecordingRule', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('days_of_week', models.JSONField(default=list)), + ('start_time', models.TimeField()), + ('end_time', models.TimeField()), + ('enabled', models.BooleanField(default=True)), + ('name', models.CharField(blank=True, max_length=255)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ('channel', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='recurring_rules', to='dispatcharr_channels.channel')), + ], + options={ + 'ordering': ['channel', 'start_time'], + }, + ), + ] diff --git a/apps/channels/migrations/0027_recurringrecordingrule_end_date_and_more.py b/apps/channels/migrations/0027_recurringrecordingrule_end_date_and_more.py new file mode 100644 index 00000000..8cdb9868 --- /dev/null +++ b/apps/channels/migrations/0027_recurringrecordingrule_end_date_and_more.py @@ -0,0 +1,23 @@ +# Generated by Django 5.2.4 on 2025-10-05 20:50 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dispatcharr_channels', '0026_recurringrecordingrule'), + ] + + operations = [ + migrations.AddField( + model_name='recurringrecordingrule', + name='end_date', + field=models.DateField(blank=True, null=True), + ), + migrations.AddField( + model_name='recurringrecordingrule', + name='start_date', + field=models.DateField(blank=True, null=True), + ), + ] diff --git a/apps/channels/models.py b/apps/channels/models.py index 20c9ac42..6609f61d 100644 --- a/apps/channels/models.py +++ b/apps/channels/models.py @@ -601,3 +601,35 @@ class Recording(models.Model): def __str__(self): return f"{self.channel.name} - {self.start_time} to {self.end_time}" + + +class RecurringRecordingRule(models.Model): + """Rule describing a recurring manual DVR schedule.""" + + channel = models.ForeignKey( + "Channel", + on_delete=models.CASCADE, + related_name="recurring_rules", + ) + days_of_week = models.JSONField(default=list) + start_time = models.TimeField() + end_time = models.TimeField() + enabled = models.BooleanField(default=True) + name = models.CharField(max_length=255, blank=True) + start_date = models.DateField(null=True, blank=True) + end_date = models.DateField(null=True, blank=True) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + ordering = ["channel", "start_time"] + + def __str__(self): + channel_name = getattr(self.channel, "name", str(self.channel_id)) + return f"Recurring rule for {channel_name}" + + def cleaned_days(self): + try: + return sorted({int(d) for d in (self.days_of_week or []) if 0 <= int(d) <= 6}) + except Exception: + return [] diff --git a/apps/channels/serializers.py b/apps/channels/serializers.py index 51bfe0a0..7058ced2 100644 --- a/apps/channels/serializers.py +++ b/apps/channels/serializers.py @@ -1,4 +1,6 @@ import json +from datetime import datetime + from rest_framework import serializers from .models import ( Stream, @@ -10,6 +12,7 @@ from .models import ( ChannelProfile, ChannelProfileMembership, Recording, + RecurringRecordingRule, ) from apps.epg.serializers import EPGDataSerializer from core.models import StreamProfile @@ -454,6 +457,13 @@ class RecordingSerializer(serializers.ModelSerializer): start_time = data.get("start_time") end_time = data.get("end_time") + if start_time and timezone.is_naive(start_time): + start_time = timezone.make_aware(start_time, timezone.get_current_timezone()) + data["start_time"] = start_time + if end_time and timezone.is_naive(end_time): + end_time = timezone.make_aware(end_time, timezone.get_current_timezone()) + data["end_time"] = end_time + # If this is an EPG-based recording (program provided), apply global pre/post offsets try: cp = data.get("custom_properties") or {} @@ -497,3 +507,56 @@ class RecordingSerializer(serializers.ModelSerializer): raise serializers.ValidationError("End time must be after start time.") return data + + +class RecurringRecordingRuleSerializer(serializers.ModelSerializer): + class Meta: + model = RecurringRecordingRule + fields = "__all__" + read_only_fields = ["created_at", "updated_at"] + + def validate_days_of_week(self, value): + if not value: + raise serializers.ValidationError("Select at least one day of the week") + cleaned = [] + for entry in value: + try: + iv = int(entry) + except (TypeError, ValueError): + raise serializers.ValidationError("Days of week must be integers 0-6") + if iv < 0 or iv > 6: + raise serializers.ValidationError("Days of week must be between 0 (Monday) and 6 (Sunday)") + cleaned.append(iv) + return sorted(set(cleaned)) + + def validate(self, attrs): + start = attrs.get("start_time") or getattr(self.instance, "start_time", None) + end = attrs.get("end_time") or getattr(self.instance, "end_time", None) + start_date = attrs.get("start_date") if "start_date" in attrs else getattr(self.instance, "start_date", None) + end_date = attrs.get("end_date") if "end_date" in attrs else getattr(self.instance, "end_date", None) + if start_date is None: + existing_start = getattr(self.instance, "start_date", None) + if existing_start is None: + raise serializers.ValidationError("Start date is required") + if start_date and end_date and end_date < start_date: + raise serializers.ValidationError("End date must be on or after start date") + if end_date is None: + existing_end = getattr(self.instance, "end_date", None) + if existing_end is None: + raise serializers.ValidationError("End date is required") + if start and end and start_date and end_date: + start_dt = datetime.combine(start_date, start) + end_dt = datetime.combine(end_date, end) + if end_dt <= start_dt: + raise serializers.ValidationError("End datetime must be after start datetime") + elif start and end and end == start: + raise serializers.ValidationError("End time must be different from start time") + # Normalize empty strings to None for dates + if attrs.get("end_date") == "": + attrs["end_date"] = None + if attrs.get("start_date") == "": + attrs["start_date"] = None + return super().validate(attrs) + + def create(self, validated_data): + return super().create(validated_data) diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py index 51cb884c..40758d47 100755 --- a/apps/channels/tasks.py +++ b/apps/channels/tasks.py @@ -7,6 +7,8 @@ import requests import time import json import subprocess +import signal +from zoneinfo import ZoneInfo from datetime import datetime, timedelta import gc @@ -1115,6 +1117,148 @@ def reschedule_upcoming_recordings_for_offset_change(): return reschedule_upcoming_recordings_for_offset_change_impl() +def _notify_recordings_refresh(): + try: + from core.utils import send_websocket_update + send_websocket_update('updates', 'update', {"success": True, "type": "recordings_refreshed"}) + except Exception: + pass + + +def purge_recurring_rule_impl(rule_id: int) -> int: + """Remove all future recordings created by a recurring rule.""" + from django.utils import timezone + from .models import Recording + + now = timezone.now() + try: + removed, _ = Recording.objects.filter( + start_time__gte=now, + custom_properties__rule__id=rule_id, + ).delete() + except Exception: + removed = 0 + if removed: + _notify_recordings_refresh() + return removed + + +def sync_recurring_rule_impl(rule_id: int, drop_existing: bool = True, horizon_days: int = 14) -> int: + """Ensure recordings exist for a recurring rule within the scheduling horizon.""" + from django.utils import timezone + from .models import RecurringRecordingRule, Recording + + rule = RecurringRecordingRule.objects.filter(pk=rule_id).select_related("channel").first() + now = timezone.now() + removed = 0 + if drop_existing: + removed = purge_recurring_rule_impl(rule_id) + + if not rule or not rule.enabled: + return 0 + + days = rule.cleaned_days() + if not days: + return 0 + + tz_name = CoreSettings.get_system_time_zone() + try: + tz = ZoneInfo(tz_name) + except Exception: + logger.warning("Invalid or unsupported time zone '%s'; falling back to Server default", tz_name) + tz = timezone.get_current_timezone() + start_limit = rule.start_date or now.date() + end_limit = rule.end_date + horizon = now + timedelta(days=horizon_days) + start_window = max(start_limit, now.date()) + if drop_existing and end_limit: + end_window = end_limit + else: + end_window = horizon.date() + if end_limit and end_limit < end_window: + end_window = end_limit + if end_window < start_window: + return 0 + total_created = 0 + + for offset in range((end_window - start_window).days + 1): + target_date = start_window + timedelta(days=offset) + if target_date.weekday() not in days: + continue + if end_limit and target_date > end_limit: + continue + try: + start_dt = timezone.make_aware(datetime.combine(target_date, rule.start_time), tz) + end_dt = timezone.make_aware(datetime.combine(target_date, rule.end_time), tz) + except Exception: + continue + if end_dt <= start_dt: + end_dt = end_dt + timedelta(days=1) + if start_dt <= now: + continue + exists = Recording.objects.filter( + channel=rule.channel, + start_time=start_dt, + custom_properties__rule__id=rule.id, + ).exists() + if exists: + continue + description = rule.name or f"Recurring recording for {rule.channel.name}" + cp = { + "rule": { + "type": "recurring", + "id": rule.id, + "days_of_week": days, + "name": rule.name or "", + }, + "status": "scheduled", + "description": description, + "program": { + "title": rule.name or rule.channel.name, + "description": description, + "start_time": start_dt.isoformat(), + "end_time": end_dt.isoformat(), + }, + } + try: + Recording.objects.create( + channel=rule.channel, + start_time=start_dt, + end_time=end_dt, + custom_properties=cp, + ) + total_created += 1 + except Exception as err: + logger.warning(f"Failed to create recurring recording for rule {rule.id}: {err}") + + if removed or total_created: + _notify_recordings_refresh() + + return total_created + + +@shared_task +def rebuild_recurring_rule(rule_id: int, horizon_days: int = 14): + return sync_recurring_rule_impl(rule_id, drop_existing=True, horizon_days=horizon_days) + + +@shared_task +def maintain_recurring_recordings(): + from .models import RecurringRecordingRule + + total = 0 + for rule_id in RecurringRecordingRule.objects.filter(enabled=True).values_list("id", flat=True): + try: + total += sync_recurring_rule_impl(rule_id, drop_existing=False) + except Exception as err: + logger.warning(f"Recurring rule maintenance failed for {rule_id}: {err}") + return total + + +@shared_task +def purge_recurring_rule(rule_id: int): + return purge_recurring_rule_impl(rule_id) + @shared_task def _safe_name(s): try: @@ -1837,6 +1981,7 @@ def comskip_process_recording(recording_id: int): Safe to call even if comskip is not installed; stores status in custom_properties.comskip. """ import shutil + from django.db import DatabaseError from .models import Recording # Helper to broadcast status over websocket def _ws(status: str, extra: dict | None = None): @@ -1854,7 +1999,33 @@ def comskip_process_recording(recording_id: int): except Recording.DoesNotExist: return "not_found" - cp = rec.custom_properties or {} + cp = rec.custom_properties.copy() if isinstance(rec.custom_properties, dict) else {} + + def _persist_custom_properties(): + """Persist updated custom_properties without raising if the row disappeared.""" + try: + updated = Recording.objects.filter(pk=recording_id).update(custom_properties=cp) + if not updated: + logger.warning( + "Recording %s vanished before comskip status could be saved", + recording_id, + ) + return False + except DatabaseError as db_err: + logger.warning( + "Failed to persist comskip status for recording %s: %s", + recording_id, + db_err, + ) + return False + except Exception as unexpected: + logger.warning( + "Unexpected error while saving comskip status for recording %s: %s", + recording_id, + unexpected, + ) + return False + return True file_path = (cp or {}).get("file_path") if not file_path or not os.path.exists(file_path): return "no_file" @@ -1865,8 +2036,7 @@ def comskip_process_recording(recording_id: int): comskip_bin = shutil.which("comskip") if not comskip_bin: cp["comskip"] = {"status": "skipped", "reason": "comskip_not_installed"} - rec.custom_properties = cp - rec.save(update_fields=["custom_properties"]) + _persist_custom_properties() _ws('skipped', {"reason": "comskip_not_installed"}) return "comskip_missing" @@ -1878,24 +2048,59 @@ def comskip_process_recording(recording_id: int): try: cmd = [comskip_bin, "--output", os.path.dirname(file_path)] - # Prefer system ini if present to squelch warning and get sane defaults - for ini_path in ("/etc/comskip/comskip.ini", "/app/docker/comskip.ini"): - if os.path.exists(ini_path): + # Prefer user-specified INI, fall back to known defaults + ini_candidates = [] + try: + custom_ini = CoreSettings.get_dvr_comskip_custom_path() + if custom_ini: + ini_candidates.append(custom_ini) + except Exception as ini_err: + logger.debug(f"Unable to load custom comskip.ini path: {ini_err}") + ini_candidates.extend(["/etc/comskip/comskip.ini", "/app/docker/comskip.ini"]) + selected_ini = None + for ini_path in ini_candidates: + if ini_path and os.path.exists(ini_path): + selected_ini = ini_path cmd.extend([f"--ini={ini_path}"]) break cmd.append(file_path) - subprocess.run(cmd, check=True) + subprocess.run( + cmd, + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + ) + except subprocess.CalledProcessError as e: + stderr_tail = (e.stderr or "").strip().splitlines() + stderr_tail = stderr_tail[-5:] if stderr_tail else [] + detail = { + "status": "error", + "reason": "comskip_failed", + "returncode": e.returncode, + } + if e.returncode and e.returncode < 0: + try: + detail["signal"] = signal.Signals(-e.returncode).name + except Exception: + detail["signal"] = f"signal_{-e.returncode}" + if stderr_tail: + detail["stderr"] = "\n".join(stderr_tail) + if selected_ini: + detail["ini_path"] = selected_ini + cp["comskip"] = detail + _persist_custom_properties() + _ws('error', {"reason": "comskip_failed", "returncode": e.returncode}) + return "comskip_failed" except Exception as e: cp["comskip"] = {"status": "error", "reason": f"comskip_failed: {e}"} - rec.custom_properties = cp - rec.save(update_fields=["custom_properties"]) + _persist_custom_properties() _ws('error', {"reason": str(e)}) return "comskip_failed" if not os.path.exists(edl_path): cp["comskip"] = {"status": "error", "reason": "edl_not_found"} - rec.custom_properties = cp - rec.save(update_fields=["custom_properties"]) + _persist_custom_properties() _ws('error', {"reason": "edl_not_found"}) return "no_edl" @@ -1913,8 +2118,7 @@ def comskip_process_recording(recording_id: int): duration = _ffprobe_duration(file_path) if duration is None: cp["comskip"] = {"status": "error", "reason": "duration_unknown"} - rec.custom_properties = cp - rec.save(update_fields=["custom_properties"]) + _persist_custom_properties() _ws('error', {"reason": "duration_unknown"}) return "no_duration" @@ -1943,9 +2147,14 @@ def comskip_process_recording(recording_id: int): keep.append((cur, duration)) if not commercials or sum((e - s) for s, e in commercials) <= 0.5: - cp["comskip"] = {"status": "completed", "skipped": True, "edl": os.path.basename(edl_path)} - rec.custom_properties = cp - rec.save(update_fields=["custom_properties"]) + cp["comskip"] = { + "status": "completed", + "skipped": True, + "edl": os.path.basename(edl_path), + } + if selected_ini: + cp["comskip"]["ini_path"] = selected_ini + _persist_custom_properties() _ws('skipped', {"reason": "no_commercials", "commercials": 0}) return "no_commercials" @@ -1969,7 +2178,8 @@ def comskip_process_recording(recording_id: int): list_path = os.path.join(workdir, "concat_list.txt") with open(list_path, "w") as lf: for pth in parts: - lf.write(f"file '{pth}'\n") + escaped = pth.replace("'", "'\\''") + lf.write(f"file '{escaped}'\n") output_path = os.path.join(workdir, f"{os.path.splitext(os.path.basename(file_path))[0]}.cut.mkv") subprocess.run([ @@ -1995,14 +2205,14 @@ def comskip_process_recording(recording_id: int): "segments_kept": len(parts), "commercials": len(commercials), } - rec.custom_properties = cp - rec.save(update_fields=["custom_properties"]) + if selected_ini: + cp["comskip"]["ini_path"] = selected_ini + _persist_custom_properties() _ws('completed', {"commercials": len(commercials), "segments_kept": len(parts)}) return "ok" except Exception as e: cp["comskip"] = {"status": "error", "reason": str(e)} - rec.custom_properties = cp - rec.save(update_fields=["custom_properties"]) + _persist_custom_properties() _ws('error', {"reason": str(e)}) return f"error:{e}" def _resolve_poster_for_program(channel_name, program): diff --git a/apps/channels/tests/__init__.py b/apps/channels/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/apps/channels/tests/test_recurring_rules.py b/apps/channels/tests/test_recurring_rules.py new file mode 100644 index 00000000..982ecb93 --- /dev/null +++ b/apps/channels/tests/test_recurring_rules.py @@ -0,0 +1,40 @@ +from datetime import datetime, timedelta +from django.test import TestCase +from django.utils import timezone + +from apps.channels.models import Channel, RecurringRecordingRule, Recording +from apps.channels.tasks import sync_recurring_rule_impl, purge_recurring_rule_impl + + +class RecurringRecordingRuleTasksTests(TestCase): + def test_sync_recurring_rule_creates_and_purges_recordings(self): + now = timezone.now() + channel = Channel.objects.create(channel_number=1, name='Test Channel') + + start_time = (now + timedelta(minutes=15)).time().replace(second=0, microsecond=0) + end_time = (now + timedelta(minutes=75)).time().replace(second=0, microsecond=0) + + rule = RecurringRecordingRule.objects.create( + channel=channel, + days_of_week=[now.weekday()], + start_time=start_time, + end_time=end_time, + ) + + created = sync_recurring_rule_impl(rule.id, drop_existing=True, horizon_days=1) + self.assertEqual(created, 1) + + recording = Recording.objects.filter(custom_properties__rule__id=rule.id).first() + self.assertIsNotNone(recording) + self.assertEqual(recording.channel, channel) + self.assertEqual(recording.custom_properties.get('rule', {}).get('id'), rule.id) + + expected_start = timezone.make_aware( + datetime.combine(recording.start_time.date(), start_time), + timezone.get_current_timezone(), + ) + self.assertLess(abs((recording.start_time - expected_start).total_seconds()), 60) + + removed = purge_recurring_rule_impl(rule.id) + self.assertEqual(removed, 1) + self.assertFalse(Recording.objects.filter(custom_properties__rule__id=rule.id).exists()) diff --git a/core/models.py b/core/models.py index ba040666..3a5895ba 100644 --- a/core/models.py +++ b/core/models.py @@ -1,4 +1,5 @@ # core/models.py +from django.conf import settings from django.db import models from django.utils.text import slugify from django.core.exceptions import ValidationError @@ -158,8 +159,10 @@ DVR_TV_FALLBACK_DIR_KEY = slugify("DVR TV Fallback Dir") DVR_TV_FALLBACK_TEMPLATE_KEY = slugify("DVR TV Fallback Template") DVR_MOVIE_FALLBACK_TEMPLATE_KEY = slugify("DVR Movie Fallback Template") DVR_COMSKIP_ENABLED_KEY = slugify("DVR Comskip Enabled") +DVR_COMSKIP_CUSTOM_PATH_KEY = slugify("DVR Comskip Custom Path") DVR_PRE_OFFSET_MINUTES_KEY = slugify("DVR Pre-Offset Minutes") DVR_POST_OFFSET_MINUTES_KEY = slugify("DVR Post-Offset Minutes") +SYSTEM_TIME_ZONE_KEY = slugify("System Time Zone") class CoreSettings(models.Model): @@ -274,6 +277,27 @@ class CoreSettings(models.Model): except cls.DoesNotExist: return False + @classmethod + def get_dvr_comskip_custom_path(cls): + """Return configured comskip.ini path or empty string if unset.""" + try: + return cls.objects.get(key=DVR_COMSKIP_CUSTOM_PATH_KEY).value + except cls.DoesNotExist: + return "" + + @classmethod + def set_dvr_comskip_custom_path(cls, path: str | None): + """Persist the comskip.ini path setting, normalizing nulls to empty string.""" + value = (path or "").strip() + obj, _ = cls.objects.get_or_create( + key=DVR_COMSKIP_CUSTOM_PATH_KEY, + defaults={"name": "DVR Comskip Custom Path", "value": value}, + ) + if obj.value != value: + obj.value = value + obj.save(update_fields=["value"]) + return value + @classmethod def get_dvr_pre_offset_minutes(cls): """Minutes to start recording before scheduled start (default 0).""" @@ -302,6 +326,30 @@ class CoreSettings(models.Model): except Exception: return 0 + @classmethod + def get_system_time_zone(cls): + """Return configured system time zone or fall back to Django settings.""" + try: + value = cls.objects.get(key=SYSTEM_TIME_ZONE_KEY).value + if value: + return value + except cls.DoesNotExist: + pass + return getattr(settings, "TIME_ZONE", "UTC") or "UTC" + + @classmethod + def set_system_time_zone(cls, tz_name: str | None): + """Persist the desired system time zone identifier.""" + value = (tz_name or "").strip() or getattr(settings, "TIME_ZONE", "UTC") or "UTC" + obj, _ = cls.objects.get_or_create( + key=SYSTEM_TIME_ZONE_KEY, + defaults={"name": "System Time Zone", "value": value}, + ) + if obj.value != value: + obj.value = value + obj.save(update_fields=["value"]) + return value + @classmethod def get_dvr_series_rules(cls): """Return list of series recording rules. Each: {tvg_id, title, mode: 'all'|'new'}""" diff --git a/dispatcharr/settings.py b/dispatcharr/settings.py index 289c6794..057780de 100644 --- a/dispatcharr/settings.py +++ b/dispatcharr/settings.py @@ -211,6 +211,10 @@ CELERY_BEAT_SCHEDULE = { "task": "core.tasks.scan_and_process_files", # Direct task call "schedule": 20.0, # Every 20 seconds }, + "maintain-recurring-recordings": { + "task": "apps.channels.tasks.maintain_recurring_recordings", + "schedule": 3600.0, # Once an hour ensure recurring schedules stay ahead + }, } MEDIA_ROOT = BASE_DIR / "media" diff --git a/frontend/src/api.js b/frontend/src/api.js index fcd2b6f4..4ef5f97e 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -1896,6 +1896,83 @@ export default class API { } } + static async updateRecording(id, values) { + try { + const response = await request(`${host}/api/channels/recordings/${id}/`, { + method: 'PATCH', + body: values, + }); + useChannelsStore.getState().fetchRecordings(); + return response; + } catch (e) { + errorNotification(`Failed to update recording ${id}`, e); + } + } + + static async getComskipConfig() { + try { + return await request(`${host}/api/channels/dvr/comskip-config/`); + } catch (e) { + errorNotification('Failed to retrieve comskip configuration', e); + } + } + + static async uploadComskipIni(file) { + try { + const formData = new FormData(); + formData.append('file', file); + return await request(`${host}/api/channels/dvr/comskip-config/`, { + method: 'POST', + body: formData, + }); + } catch (e) { + errorNotification('Failed to upload comskip.ini', e); + } + } + + static async listRecurringRules() { + try { + const response = await request(`${host}/api/channels/recurring-rules/`); + return response; + } catch (e) { + errorNotification('Failed to retrieve recurring DVR rules', e); + } + } + + static async createRecurringRule(payload) { + try { + const response = await request(`${host}/api/channels/recurring-rules/`, { + method: 'POST', + body: payload, + }); + return response; + } catch (e) { + errorNotification('Failed to create recurring DVR rule', e); + } + } + + static async updateRecurringRule(ruleId, payload) { + try { + const response = await request(`${host}/api/channels/recurring-rules/${ruleId}/`, { + method: 'PATCH', + body: payload, + }); + return response; + } catch (e) { + errorNotification(`Failed to update recurring rule ${ruleId}`, e); + } + } + + static async deleteRecurringRule(ruleId) { + try { + await request(`${host}/api/channels/recurring-rules/${ruleId}/`, { + method: 'DELETE', + }); + } catch (e) { + errorNotification(`Failed to delete recurring rule ${ruleId}`, e); + } + } + static async deleteRecording(id) { try { await request(`${host}/api/channels/recordings/${id}/`, { method: 'DELETE' }); diff --git a/frontend/src/components/forms/Recording.jsx b/frontend/src/components/forms/Recording.jsx index 7ac36a0f..90080676 100644 --- a/frontend/src/components/forms/Recording.jsx +++ b/frontend/src/components/forms/Recording.jsx @@ -1,117 +1,424 @@ -// Modal.js -import React from 'react'; +import React, { useEffect, useMemo, useState } from 'react'; +import dayjs from 'dayjs'; import API from '../../api'; -import { Button, Modal, Flex, Select, Alert } from '@mantine/core'; -import useChannelsStore from '../../store/channels'; -import { DateTimePicker } from '@mantine/dates'; +import { + Alert, + Button, + Modal, + Select, + Stack, + SegmentedControl, + MultiSelect, + Group, + TextInput, +} from '@mantine/core'; +import { DateTimePicker, TimeInput, DatePickerInput } from '@mantine/dates'; import { CircleAlert } from 'lucide-react'; import { isNotEmpty, useForm } from '@mantine/form'; +import useChannelsStore from '../../store/channels'; +import { notifications } from '@mantine/notifications'; -const DVR = ({ recording = null, channel = null, isOpen, onClose }) => { +const DAY_OPTIONS = [ + { value: '6', label: 'Sun' }, + { value: '0', label: 'Mon' }, + { value: '1', label: 'Tue' }, + { value: '2', label: 'Wed' }, + { value: '3', label: 'Thu' }, + { value: '4', label: 'Fri' }, + { value: '5', label: 'Sat' }, +]; + +const asDate = (value) => { + if (!value) return null; + if (value instanceof Date) return value; + const parsed = new Date(value); + return Number.isNaN(parsed.getTime()) ? null : parsed; +}; + +const toIsoIfDate = (value) => { + const dt = asDate(value); + return dt ? dt.toISOString() : value; +}; + +// Accepts "h:mm A"/"hh:mm A"/"HH:mm"/Date, returns "HH:mm" +const toTimeString = (value) => { + if (!value) return '00:00'; + if (typeof value === 'string') { + const parsed = dayjs(value, ['HH:mm', 'hh:mm A', 'h:mm A', 'HH:mm:ss'], true); + if (parsed.isValid()) return parsed.format('HH:mm'); + return value; + } + const dt = asDate(value); + if (!dt) return '00:00'; + return dayjs(dt).format('HH:mm'); +}; + +const toDateString = (value) => { + const dt = asDate(value); + if (!dt) return null; + const year = dt.getFullYear(); + const month = String(dt.getMonth() + 1).padStart(2, '0'); + const day = String(dt.getDate()).padStart(2, '0'); + return `${year}-${month}-${day}`; +}; + +const createRoundedDate = (minutesAhead = 0) => { + const dt = new Date(); + dt.setSeconds(0); + dt.setMilliseconds(0); + dt.setMinutes(Math.ceil(dt.getMinutes() / 30) * 30); + if (minutesAhead) dt.setMinutes(dt.getMinutes() + minutesAhead); + return dt; +}; + +// robust onChange for TimeInput (string or event) +const timeChange = (setter) => (valOrEvent) => { + if (typeof valOrEvent === 'string') setter(valOrEvent); + else if (valOrEvent?.currentTarget) setter(valOrEvent.currentTarget.value); +}; + +const RecordingModal = ({ recording = null, channel = null, isOpen, onClose }) => { const channels = useChannelsStore((s) => s.channels); + const fetchRecordings = useChannelsStore((s) => s.fetchRecordings); + const fetchRecurringRules = useChannelsStore((s) => s.fetchRecurringRules); - let startTime = new Date(); - startTime.setMinutes(Math.ceil(startTime.getMinutes() / 30) * 30); - startTime.setSeconds(0); - startTime.setMilliseconds(0); + const [mode, setMode] = useState('single'); + const [submitting, setSubmitting] = useState(false); - let endTime = new Date(); - endTime.setMinutes(Math.ceil(endTime.getMinutes() / 30) * 30); - endTime.setSeconds(0); - endTime.setMilliseconds(0); - endTime.setHours(endTime.getHours() + 1); + const defaultStart = createRoundedDate(); + const defaultEnd = createRoundedDate(60); + const defaultDate = new Date(); - const form = useForm({ - mode: 'uncontrolled', + // One-time form + const singleForm = useForm({ + mode: 'controlled', initialValues: { - channel_id: recording - ? recording.channel_id - : channel - ? `${channel.id}` - : '', - start_time: recording ? recording.start_time : startTime, - end_time: recording ? recording.end_time : endTime, + channel_id: recording ? `${recording.channel}` : channel ? `${channel.id}` : '', + start_time: recording ? asDate(recording.start_time) || defaultStart : defaultStart, + end_time: recording ? asDate(recording.end_time) || defaultEnd : defaultEnd, }, - validate: { channel_id: isNotEmpty('Select a channel'), start_time: isNotEmpty('Select a start time'), - end_time: isNotEmpty('Select an end time'), + end_time: (value, values) => { + const start = asDate(values.start_time); + const end = asDate(value); + if (!end) return 'Select an end time'; + if (start && end <= start) return 'End time must be after start time'; + return null; + }, }, }); - const onSubmit = async () => { - const { channel_id, ...values } = form.getValues(); + // Recurring form stores times as "HH:mm" strings for stable editing + const recurringForm = useForm({ + mode: 'controlled', + validateInputOnChange: false, + validateInputOnBlur: true, + initialValues: { + channel_id: channel ? `${channel.id}` : '', + days_of_week: [], + start_time: dayjs(defaultStart).format('HH:mm'), + end_time: dayjs(defaultEnd).format('HH:mm'), + rule_name: '', + start_date: defaultDate, + end_date: defaultDate, + }, + validate: { + channel_id: isNotEmpty('Select a channel'), + days_of_week: (value) => (value && value.length ? null : 'Pick at least one day'), + start_time: (value) => (value ? null : 'Select a start time'), + end_time: (value, values) => { + if (!value) return 'Select an end time'; + const start = dayjs(values.start_time, ['HH:mm', 'hh:mm A', 'h:mm A'], true); + const end = dayjs(value, ['HH:mm', 'hh:mm A', 'h:mm A'], true); + if (start.isValid() && end.isValid() && end.diff(start, 'minute') === 0) { + return 'End time must differ from start time'; + } + return null; + }, + end_date: (value, values) => { + const end = asDate(value); + const start = asDate(values.start_date); + if (!end) return 'Select an end date'; + if (start && end < start) return 'End date cannot be before start date'; + return null; + }, + }, + }); - console.log(values); + useEffect(() => { + if (!isOpen) return; - await API.createRecording({ - ...values, - channel: channel_id, + const freshStart = createRoundedDate(); + const freshEnd = createRoundedDate(60); + const freshDate = new Date(); + + if (recording && recording.id) { + setMode('single'); + singleForm.setValues({ + channel_id: `${recording.channel}`, + start_time: asDate(recording.start_time) || defaultStart, + end_time: asDate(recording.end_time) || defaultEnd, + }); + } else { + // Reset forms for fresh open + singleForm.setValues({ + channel_id: channel ? `${channel.id}` : '', + start_time: freshStart, + end_time: freshEnd, + }); + + const startStr = dayjs(freshStart).format('HH:mm'); + recurringForm.setValues({ + channel_id: channel ? `${channel.id}` : '', + days_of_week: [], + start_time: startStr, + end_time: dayjs(freshEnd).format('HH:mm'), + rule_name: channel?.name || '', + start_date: freshDate, + end_date: freshDate, + }); + setMode('single'); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [isOpen, recording, channel]); + + const channelOptions = useMemo(() => { + const list = Object.values(channels || {}); + list.sort((a, b) => { + const aNum = Number(a.channel_number) || 0; + const bNum = Number(b.channel_number) || 0; + if (aNum === bNum) return (a.name || '').localeCompare(b.name || ''); + return aNum - bNum; }); + return list.map((item) => ({ value: `${item.id}`, label: item.name || `Channel ${item.id}` })); + }, [channels]); - form.reset(); - onClose(); + const resetForms = () => { + singleForm.reset(); + recurringForm.reset(); + setMode('single'); }; - if (!isOpen) { - return <>; - } + const handleClose = () => { + resetForms(); + onClose?.(); + }; + + const handleSingleSubmit = async (values) => { + try { + setSubmitting(true); + if (recording && recording.id) { + await API.updateRecording(recording.id, { + channel: values.channel_id, + start_time: toIsoIfDate(values.start_time), + end_time: toIsoIfDate(values.end_time), + }); + notifications.show({ + title: 'Recording updated', + message: 'Recording schedule updated successfully', + color: 'green', + autoClose: 2500, + }); + } else { + await API.createRecording({ + channel: values.channel_id, + start_time: toIsoIfDate(values.start_time), + end_time: toIsoIfDate(values.end_time), + }); + notifications.show({ + title: 'Recording scheduled', + message: 'One-time recording added to DVR queue', + color: 'green', + autoClose: 2500, + }); + } + await fetchRecordings(); + handleClose(); + } catch (error) { + console.error('Failed to create recording', error); + } finally { + setSubmitting(false); + } + }; + + const handleRecurringSubmit = async (values) => { + try { + setSubmitting(true); + await API.createRecurringRule({ + channel: values.channel_id, + days_of_week: (values.days_of_week || []).map((d) => Number(d)), + start_time: toTimeString(values.start_time), + end_time: toTimeString(values.end_time), + start_date: toDateString(values.start_date), + end_date: toDateString(values.end_date), + name: values.rule_name?.trim() || '', + }); + + await Promise.all([fetchRecurringRules(), fetchRecordings()]); + notifications.show({ + title: 'Recurring rule saved', + message: 'Future slots will be scheduled automatically', + color: 'green', + autoClose: 2500, + }); + handleClose(); + } catch (error) { + console.error('Failed to create recurring rule', error); + } finally { + setSubmitting(false); + } + }; + + const onSubmit = + mode === 'single' + ? singleForm.onSubmit(handleSingleSubmit) + : recurringForm.onSubmit(handleRecurringSubmit); + + if (!isOpen) return null; return ( - + } - style={{ paddingBottom: 5 }} + style={{ paddingBottom: 5, marginBottom: 12 }} > - Recordings may fail if active streams or overlapping recordings use up - all available streams + Recordings may fail if active streams or overlapping recordings use up all available tuners. -
- + ) : ( + + + ({ + value: String(opt.value), + label: opt.label, + }))} + searchable + clearable + /> + + + form.setFieldValue('start_date', value || dayjs().toDate()) + } + valueFormat="MMM D, YYYY" + /> + form.setFieldValue('end_date', value)} + valueFormat="MMM D, YYYY" + minDate={form.values.start_date || undefined} + /> + + + + form.setFieldValue('start_time', toTimeString(value)) + } + withSeconds={false} + format="12" + amLabel="AM" + pmLabel="PM" + /> + + form.setFieldValue('end_time', toTimeString(value)) + } + withSeconds={false} + format="12" + amLabel="AM" + pmLabel="PM" + /> + + + + + + + + + + + Upcoming occurrences + + {upcomingOccurrences.length} + + {upcomingOccurrences.length === 0 ? ( + + No future airings currently scheduled. + + ) : ( + + {upcomingOccurrences.map((occ) => { + const occStart = toUserTime(occ.start_time); + const occEnd = toUserTime(occ.end_time); + return ( + + + + + {occStart.format('MMM D, YYYY')} + + + {occStart.format('h:mma')} – {occEnd.format('h:mma')} + + + + + + + + + ); + })} + + )} + + +
+ ); +}; + +const RecordingCard = ({ recording, onOpenDetails, onOpenRecurring }) => { const channels = useChannelsStore((s) => s.channels); const env_mode = useSettingsStore((s) => s.environment.env_mode); const showVideo = useVideoStore((s) => s.showVideo); const fetchRecordings = useChannelsStore((s) => s.fetchRecordings); + const { toUserTime, userNow } = useTimeHelpers(); const channel = channels?.[recording.channel]; const deleteRecording = (id) => { // Optimistically remove immediately from UI - try { useChannelsStore.getState().removeRecording(id); } catch {} + try { + useChannelsStore.getState().removeRecording(id); + } catch (error) { + console.error('Failed to optimistically remove recording', error); + } // Fire-and-forget server delete; websocket will keep others in sync API.deleteRecording(id).catch(() => { // On failure, fallback to refetch to restore state - try { useChannelsStore.getState().fetchRecordings(); } catch {} + try { + useChannelsStore.getState().fetchRecordings(); + } catch (error) { + console.error('Failed to refresh recordings after delete', error); + } }); }; @@ -312,6 +963,7 @@ const RecordingCard = ({ recording, category, onOpenDetails }) => { const recordingName = program.title || 'Custom Recording'; const subTitle = program.sub_title || ''; const description = program.description || customProps.description || ''; + const isRecurringRule = customProps?.rule?.type === 'recurring'; // Poster or channel logo const posterLogoId = customProps.poster_logo_id; @@ -323,20 +975,27 @@ const RecordingCard = ({ recording, category, onOpenDetails }) => { posterUrl = `${window.location.protocol}//${window.location.hostname}:5656${posterUrl}`; } - const start = dayjs(recording.start_time); - const end = dayjs(recording.end_time); - const now = dayjs(); + const start = toUserTime(recording.start_time); + const end = toUserTime(recording.end_time); + const now = userNow(); const status = customProps.status; const isTimeActive = now.isAfter(start) && now.isBefore(end); const isInterrupted = status === 'interrupted'; const isInProgress = isTimeActive; // Show as recording by time, regardless of status glitches const isUpcoming = now.isBefore(start); - const isSeriesGroup = Boolean(recording._group_count && recording._group_count > 1); + const isSeriesGroup = Boolean( + recording._group_count && recording._group_count > 1 + ); // Season/Episode display if present const season = customProps.season ?? program?.custom_properties?.season; const episode = customProps.episode ?? program?.custom_properties?.episode; - const onscreen = customProps.onscreen_episode ?? program?.custom_properties?.onscreen_episode; - const seLabel = season && episode ? `S${String(season).padStart(2,'0')}E${String(episode).padStart(2,'0')}` : (onscreen || null); + const onscreen = + customProps.onscreen_episode ?? + program?.custom_properties?.onscreen_episode; + const seLabel = + season && episode + ? `S${String(season).padStart(2, '0')}E${String(episode).padStart(2, '0')}` + : onscreen || null; const handleWatchLive = () => { if (!channel) return; @@ -354,15 +1013,25 @@ const RecordingCard = ({ recording, category, onOpenDetails }) => { if (env_mode === 'dev' && fileUrl.startsWith('/')) { fileUrl = `${window.location.protocol}//${window.location.hostname}:5656${fileUrl}`; } - showVideo(fileUrl, 'vod', { name: recordingName, logo: { url: posterUrl } }); + showVideo(fileUrl, 'vod', { + name: recordingName, + logo: { url: posterUrl }, + }); }; const handleRunComskip = async (e) => { e?.stopPropagation?.(); try { await API.runComskip(recording.id); - notifications.show({ title: 'Removing commercials', message: 'Queued comskip for this recording', color: 'blue.5', autoClose: 2000 }); - } catch {} + notifications.show({ + title: 'Removing commercials', + message: 'Queued comskip for this recording', + color: 'blue.5', + autoClose: 2000, + }); + } catch (error) { + console.error('Failed to queue comskip for recording', error); + } }; // Cancel handling for series groups @@ -370,6 +1039,10 @@ const RecordingCard = ({ recording, category, onOpenDetails }) => { const [busy, setBusy] = React.useState(false); const handleCancelClick = (e) => { e.stopPropagation(); + if (isRecurringRule) { + onOpenRecurring?.(recording, true); + return; + } if (isSeriesGroup) { setCancelOpen(true); } else { @@ -377,11 +1050,11 @@ const RecordingCard = ({ recording, category, onOpenDetails }) => { } }; - const seriesInfo = React.useMemo(() => { + const seriesInfo = (() => { const cp = customProps || {}; const pr = cp.program || {}; return { tvg_id: pr.tvg_id, title: pr.title }; - }, [customProps]); + })(); const removeUpcomingOnly = async () => { try { @@ -390,7 +1063,11 @@ const RecordingCard = ({ recording, category, onOpenDetails }) => { } finally { setBusy(false); setCancelOpen(false); - try { await fetchRecordings(); } catch {} + try { + await fetchRecordings(); + } catch (error) { + console.error('Failed to refresh recordings', error); + } } }; @@ -399,13 +1076,32 @@ const RecordingCard = ({ recording, category, onOpenDetails }) => { setBusy(true); const { tvg_id, title } = seriesInfo; if (tvg_id) { - try { await API.bulkRemoveSeriesRecordings({ tvg_id, title, scope: 'title' }); } catch {} - try { await API.deleteSeriesRule(tvg_id); } catch {} + try { + await API.bulkRemoveSeriesRecordings({ + tvg_id, + title, + scope: 'title', + }); + } catch (error) { + console.error('Failed to remove series recordings', error); + } + try { + await API.deleteSeriesRule(tvg_id); + } catch (error) { + console.error('Failed to delete series rule', error); + } } } finally { setBusy(false); setCancelOpen(false); - try { await fetchRecordings(); } catch {} + try { + await fetchRecordings(); + } catch (error) { + console.error( + 'Failed to refresh recordings after series removal', + error + ); + } } }; @@ -422,12 +1118,34 @@ const RecordingCard = ({ recording, category, onOpenDetails }) => { height: '100%', cursor: 'pointer', }} - onClick={() => onOpenDetails?.(recording)} + onClick={() => { + if (isRecurringRule) { + onOpenRecurring?.(recording, false); + } else { + onOpenDetails?.(recording); + } + }} > - - {isInterrupted ? 'Interrupted' : isInProgress ? 'Recording' : isUpcoming ? 'Scheduled' : 'Completed'} + + {isInterrupted + ? 'Interrupted' + : isInProgress + ? 'Recording' + : isUpcoming + ? 'Scheduled' + : 'Completed'} {isInterrupted && } @@ -436,10 +1154,19 @@ const RecordingCard = ({ recording, category, onOpenDetails }) => { {recordingName} {isSeriesGroup && ( - Series + + Series + + )} + {isRecurringRule && ( + + Recurring + )} {seLabel && !isSeriesGroup && ( - {seLabel} + + {seLabel} + )} @@ -472,8 +1199,12 @@ const RecordingCard = ({ recording, category, onOpenDetails }) => { {!isSeriesGroup && subTitle && ( - Episode - {subTitle} + + Episode + + + {subTitle} + )} @@ -489,47 +1220,85 @@ const RecordingCard = ({ recording, category, onOpenDetails }) => { {isSeriesGroup ? 'Next recording' : 'Time'} - {start.format('MMM D, YYYY h:mma')} – {end.format('h:mma')} + + {start.format('MMM D, YYYY h:mma')} – {end.format('h:mma')} + {!isSeriesGroup && description && ( - onOpenDetails?.(recording)} /> + onOpenDetails?.(recording)} + /> )} {isInterrupted && customProps.interrupted_reason && ( - {customProps.interrupted_reason} + + {customProps.interrupted_reason} + )} {isInProgress && ( - )} {!isUpcoming && ( - + )} - {!isUpcoming && customProps?.status === 'completed' && (!customProps?.comskip || customProps?.comskip?.status !== 'completed') && ( - - )} + {!isUpcoming && + customProps?.status === 'completed' && + (!customProps?.comskip || + customProps?.comskip?.status !== 'completed') && ( + + )} {/* If this card is a grouped upcoming series, show count */} {recording._group_count > 1 && ( - + Next of {recording._group_count} )} @@ -540,12 +1309,27 @@ const RecordingCard = ({ recording, category, onOpenDetails }) => { // Stacked look for series groups: render two shadow layers behind the main card return ( - setCancelOpen(false)} title="Cancel Series" centered size="md" zIndex={9999}> + setCancelOpen(false)} + title="Cancel Series" + centered + size="md" + zIndex={9999} + > This is a series rule. What would you like to cancel? - - + + @@ -586,10 +1370,14 @@ const DVRPage = () => { const fetchRecordings = useChannelsStore((s) => s.fetchRecordings); const channels = useChannelsStore((s) => s.channels); const fetchChannels = useChannelsStore((s) => s.fetchChannels); + const fetchRecurringRules = useChannelsStore((s) => s.fetchRecurringRules); + const { toUserTime, userNow } = useTimeHelpers(); const [recordingModalOpen, setRecordingModalOpen] = useState(false); const [detailsOpen, setDetailsOpen] = useState(false); const [detailsRecording, setDetailsRecording] = useState(null); + const [ruleModal, setRuleModal] = useState({ open: false, ruleId: null }); + const [editRecording, setEditRecording] = useState(null); const openRecordingModal = () => { setRecordingModalOpen(true); @@ -605,27 +1393,47 @@ const DVRPage = () => { }; const closeDetails = () => setDetailsOpen(false); + const openRuleModal = (recording) => { + const ruleId = recording?.custom_properties?.rule?.id; + if (!ruleId) { + openDetails(recording); + return; + } + setDetailsOpen(false); + setDetailsRecording(null); + setEditRecording(null); + setRuleModal({ open: true, ruleId }); + }; + + const closeRuleModal = () => setRuleModal({ open: false, ruleId: null }); + useEffect(() => { - // Ensure channels and recordings are loaded for this view if (!channels || Object.keys(channels).length === 0) { fetchChannels(); } fetchRecordings(); - }, []); + fetchRecurringRules(); + }, [channels, fetchChannels, fetchRecordings, fetchRecurringRules]); // Re-render every second so time-based bucketing updates without a refresh - const [now, setNow] = useState(dayjs()); + const [now, setNow] = useState(userNow()); useEffect(() => { - const interval = setInterval(() => setNow(dayjs()), 1000); + const interval = setInterval(() => setNow(userNow()), 1000); return () => clearInterval(interval); - }, []); + }, [userNow]); + + useEffect(() => { + setNow(userNow()); + }, [userNow]); // Categorize recordings const { inProgress, upcoming, completed } = useMemo(() => { const inProgress = []; const upcoming = []; const completed = []; - const list = Array.isArray(recordings) ? recordings : Object.values(recordings || {}); + const list = Array.isArray(recordings) + ? recordings + : Object.values(recordings || {}); // ID-based dedupe guard in case store returns duplicates const seenIds = new Set(); @@ -635,8 +1443,8 @@ const DVRPage = () => { if (seenIds.has(k)) continue; seenIds.add(k); } - const s = dayjs(rec.start_time); - const e = dayjs(rec.end_time); + const s = toUserTime(rec.start_time); + const e = toUserTime(rec.end_time); const status = rec.custom_properties?.status; if (status === 'interrupted' || status === 'completed') { completed.push(rec); @@ -654,7 +1462,10 @@ const DVRPage = () => { for (const r of arr) { const cp = r.custom_properties || {}; const pr = cp.program || {}; - const sig = pr?.id != null ? `id:${pr.id}` : `slot:${r.channel}|${r.start_time}|${r.end_time}|${(pr.title||'')}`; + const sig = + pr?.id != null + ? `id:${pr.id}` + : `slot:${r.channel}|${r.start_time}|${r.end_time}|${pr.title || ''}`; if (sigs.has(sig)) continue; sigs.add(sig); out.push(r); @@ -662,11 +1473,15 @@ const DVRPage = () => { return out; }; - const inProgressDedup = dedupeByProgramOrSlot(inProgress).sort((a, b) => dayjs(b.start_time) - dayjs(a.start_time)); + const inProgressDedup = dedupeByProgramOrSlot(inProgress).sort( + (a, b) => toUserTime(b.start_time) - toUserTime(a.start_time) + ); // Group upcoming by series title+tvg_id (keep only next episode) const grouped = new Map(); - const upcomingDedup = dedupeByProgramOrSlot(upcoming).sort((a, b) => dayjs(a.start_time) - dayjs(b.start_time)); + const upcomingDedup = dedupeByProgramOrSlot(upcoming).sort( + (a, b) => toUserTime(a.start_time) - toUserTime(b.start_time) + ); for (const rec of upcomingDedup) { const cp = rec.custom_properties || {}; const prog = cp.program || {}; @@ -683,9 +1498,13 @@ const DVRPage = () => { item._group_count = e.count; return item; }); - completed.sort((a, b) => dayjs(b.end_time) - dayjs(a.end_time)); - return { inProgress: inProgressDedup, upcoming: upcomingGrouped, completed }; - }, [recordings]); + completed.sort((a, b) => toUserTime(b.end_time) - toUserTime(a.end_time)); + return { + inProgress: inProgressDedup, + upcoming: upcomingGrouped, + completed, + }; + }, [recordings, now, toUserTime]); return ( @@ -710,9 +1529,21 @@ const DVRPage = () => { Currently Recording {inProgress.length} - + {inProgress.map((rec) => ( - + ))} {inProgress.length === 0 && ( @@ -727,9 +1558,21 @@ const DVRPage = () => { Upcoming Recordings {upcoming.length} - + {upcoming.map((rec) => ( - + ))} {upcoming.length === 0 && ( @@ -744,9 +1587,21 @@ const DVRPage = () => { Previously Recorded {completed.length} - + {completed.map((rec) => ( - + ))} {completed.length === 0 && ( @@ -762,6 +1617,22 @@ const DVRPage = () => { onClose={closeRecordingModal} /> + setEditRecording(null)} + /> + + { + setRuleModal({ open: false, ruleId: null }); + setEditRecording(occ); + }} + /> + {/* Details Modal */} {detailsRecording && ( { onClose={closeDetails} recording={detailsRecording} channel={channels[detailsRecording.channel]} - posterUrl={( - detailsRecording.custom_properties?.poster_logo_id + posterUrl={ + (detailsRecording.custom_properties?.poster_logo_id ? `/api/channels/logos/${detailsRecording.custom_properties.poster_logo_id}/cache/` - : detailsRecording.custom_properties?.poster_url || channels[detailsRecording.channel]?.logo?.cache_url - ) || '/logo.png'} + : detailsRecording.custom_properties?.poster_url || + channels[detailsRecording.channel]?.logo?.cache_url) || + '/logo.png' + } env_mode={useSettingsStore.getState().environment.env_mode} onWatchLive={() => { const rec = detailsRecording; - const now = dayjs(); - const s = dayjs(rec.start_time); - const e = dayjs(rec.end_time); + const now = userNow(); + const s = toUserTime(rec.start_time); + const e = toUserTime(rec.end_time); if (now.isAfter(s) && now.isBefore(e)) { // call into child RecordingCard behavior by constructing a URL like there const channel = channels[rec.channel]; @@ -792,12 +1665,32 @@ const DVRPage = () => { } }} onWatchRecording={() => { - let fileUrl = detailsRecording.custom_properties?.file_url || detailsRecording.custom_properties?.output_file_url; + let fileUrl = + detailsRecording.custom_properties?.file_url || + detailsRecording.custom_properties?.output_file_url; if (!fileUrl) return; - if (useSettingsStore.getState().environment.env_mode === 'dev' && fileUrl.startsWith('/')) { + if ( + useSettingsStore.getState().environment.env_mode === 'dev' && + fileUrl.startsWith('/') + ) { fileUrl = `${window.location.protocol}//${window.location.hostname}:5656${fileUrl}`; } - useVideoStore.getState().showVideo(fileUrl, 'vod', { name: detailsRecording.custom_properties?.program?.title || 'Recording', logo: { url: (detailsRecording.custom_properties?.poster_logo_id ? `/api/channels/logos/${detailsRecording.custom_properties.poster_logo_id}/cache/` : channels[detailsRecording.channel]?.logo?.cache_url) || '/logo.png' } }); + useVideoStore.getState().showVideo(fileUrl, 'vod', { + name: + detailsRecording.custom_properties?.program?.title || + 'Recording', + logo: { + url: + (detailsRecording.custom_properties?.poster_logo_id + ? `/api/channels/logos/${detailsRecording.custom_properties.poster_logo_id}/cache/` + : channels[detailsRecording.channel]?.logo?.cache_url) || + '/logo.png', + }, + }); + }} + onEdit={(rec) => { + setEditRecording(rec); + closeDetails(); }} /> )} diff --git a/frontend/src/pages/Settings.jsx b/frontend/src/pages/Settings.jsx index fb1f9184..d61b3b80 100644 --- a/frontend/src/pages/Settings.jsx +++ b/frontend/src/pages/Settings.jsx @@ -1,4 +1,10 @@ -import React, { useEffect, useState } from 'react'; +import React, { + useCallback, + useEffect, + useMemo, + useRef, + useState, +} from 'react'; import API from '../api'; import useSettingsStore from '../store/settings'; import useUserAgentsStore from '../store/userAgents'; @@ -11,6 +17,7 @@ import { Center, Flex, Group, + FileInput, MultiSelect, Select, Stack, @@ -20,6 +27,7 @@ import { NumberInput, } from '@mantine/core'; import { isNotEmpty, useForm } from '@mantine/form'; +import { notifications } from '@mantine/notifications'; import UserAgentsTable from '../components/tables/UserAgentsTable'; import StreamProfilesTable from '../components/tables/StreamProfilesTable'; import useLocalStorage from '../hooks/useLocalStorage'; @@ -33,6 +41,140 @@ import { import ConfirmationDialog from '../components/ConfirmationDialog'; import useWarningsStore from '../store/warnings'; +const TIMEZONE_FALLBACKS = [ + 'UTC', + 'America/New_York', + 'America/Chicago', + 'America/Denver', + 'America/Los_Angeles', + 'America/Phoenix', + 'America/Anchorage', + 'Pacific/Honolulu', + 'Europe/London', + 'Europe/Paris', + 'Europe/Berlin', + 'Europe/Madrid', + 'Europe/Warsaw', + 'Europe/Moscow', + 'Asia/Dubai', + 'Asia/Kolkata', + 'Asia/Shanghai', + 'Asia/Tokyo', + 'Asia/Seoul', + 'Australia/Sydney', +]; + +const getSupportedTimeZones = () => { + try { + if (typeof Intl.supportedValuesOf === 'function') { + return Intl.supportedValuesOf('timeZone'); + } + } catch (error) { + console.warn('Unable to enumerate supported time zones:', error); + } + return TIMEZONE_FALLBACKS; +}; + +const getTimeZoneOffsetMinutes = (date, timeZone) => { + try { + const dtf = new Intl.DateTimeFormat('en-US', { + timeZone, + year: 'numeric', + month: '2-digit', + day: '2-digit', + hour: '2-digit', + minute: '2-digit', + second: '2-digit', + hourCycle: 'h23', + }); + const parts = dtf.formatToParts(date).reduce((acc, part) => { + if (part.type !== 'literal') acc[part.type] = part.value; + return acc; + }, {}); + const asUTC = Date.UTC( + Number(parts.year), + Number(parts.month) - 1, + Number(parts.day), + Number(parts.hour), + Number(parts.minute), + Number(parts.second) + ); + return (asUTC - date.getTime()) / 60000; + } catch (error) { + console.warn(`Failed to compute offset for ${timeZone}:`, error); + return 0; + } +}; + +const formatOffset = (minutes) => { + const rounded = Math.round(minutes); + const sign = rounded < 0 ? '-' : '+'; + const absolute = Math.abs(rounded); + const hours = String(Math.floor(absolute / 60)).padStart(2, '0'); + const mins = String(absolute % 60).padStart(2, '0'); + return `UTC${sign}${hours}:${mins}`; +}; + +const buildTimeZoneOptions = (preferredZone) => { + const zones = getSupportedTimeZones(); + const referenceYear = new Date().getUTCFullYear(); + const janDate = new Date(Date.UTC(referenceYear, 0, 1, 12, 0, 0)); + const julDate = new Date(Date.UTC(referenceYear, 6, 1, 12, 0, 0)); + + const options = zones + .map((zone) => { + const janOffset = getTimeZoneOffsetMinutes(janDate, zone); + const julOffset = getTimeZoneOffsetMinutes(julDate, zone); + const currentOffset = getTimeZoneOffsetMinutes(new Date(), zone); + const minOffset = Math.min(janOffset, julOffset); + const maxOffset = Math.max(janOffset, julOffset); + const usesDst = minOffset !== maxOffset; + const labelParts = [`now ${formatOffset(currentOffset)}`]; + if (usesDst) { + labelParts.push( + `DST range ${formatOffset(minOffset)} to ${formatOffset(maxOffset)}` + ); + } + return { + value: zone, + label: `${zone} (${labelParts.join(' | ')})`, + numericOffset: minOffset, + }; + }) + .sort((a, b) => { + if (a.numericOffset !== b.numericOffset) { + return a.numericOffset - b.numericOffset; + } + return a.value.localeCompare(b.value); + }); + if ( + preferredZone && + !options.some((option) => option.value === preferredZone) + ) { + const currentOffset = getTimeZoneOffsetMinutes(new Date(), preferredZone); + options.push({ + value: preferredZone, + label: `${preferredZone} (now ${formatOffset(currentOffset)})`, + numericOffset: currentOffset, + }); + options.sort((a, b) => { + if (a.numericOffset !== b.numericOffset) { + return a.numericOffset - b.numericOffset; + } + return a.value.localeCompare(b.value); + }); + } + return options; +}; + +const getDefaultTimeZone = () => { + try { + return Intl.DateTimeFormat().resolvedOptions().timeZone || 'UTC'; + } catch (error) { + return 'UTC'; + } +}; + const SettingsPage = () => { const settings = useSettingsStore((s) => s.settings); const userAgents = useUserAgentsStore((s) => s.userAgents); @@ -59,11 +201,51 @@ const SettingsPage = () => { // Store pending changed settings when showing the dialog const [pendingChangedSettings, setPendingChangedSettings] = useState(null); + const [comskipFile, setComskipFile] = useState(null); + const [comskipUploadLoading, setComskipUploadLoading] = useState(false); + const [comskipConfig, setComskipConfig] = useState({ + path: '', + exists: false, + }); // UI / local storage settings const [tableSize, setTableSize] = useLocalStorage('table-size', 'default'); const [timeFormat, setTimeFormat] = useLocalStorage('time-format', '12h'); const [dateFormat, setDateFormat] = useLocalStorage('date-format', 'mdy'); + const [timeZone, setTimeZone] = useLocalStorage( + 'time-zone', + getDefaultTimeZone() + ); + const timeZoneOptions = useMemo( + () => buildTimeZoneOptions(timeZone), + [timeZone] + ); + const timeZoneSyncedRef = useRef(false); + + const persistTimeZoneSetting = useCallback( + async (tzValue) => { + try { + const existing = settings['system-time-zone']; + if (existing && existing.id) { + await API.updateSetting({ ...existing, value: tzValue }); + } else { + await API.createSetting({ + key: 'system-time-zone', + name: 'System Time Zone', + value: tzValue, + }); + } + } catch (error) { + console.error('Failed to persist time zone setting', error); + notifications.show({ + title: 'Failed to update time zone', + message: 'Could not save the selected time zone. Please try again.', + color: 'red', + }); + } + }, + [settings] + ); const regionChoices = REGION_CHOICES; @@ -80,6 +262,7 @@ const SettingsPage = () => { 'dvr-tv-fallback-template': '', 'dvr-movie-fallback-template': '', 'dvr-comskip-enabled': false, + 'dvr-comskip-custom-path': '', 'dvr-pre-offset-minutes': 0, 'dvr-post-offset-minutes': 0, }, @@ -158,6 +341,12 @@ const SettingsPage = () => { ); form.setValues(formValues); + if (formValues['dvr-comskip-custom-path']) { + setComskipConfig((prev) => ({ + path: formValues['dvr-comskip-custom-path'], + exists: prev.exists, + })); + } const networkAccessSettings = JSON.parse( settings['network-access'].value || '{}' @@ -177,8 +366,39 @@ const SettingsPage = () => { console.error('Error parsing proxy settings:', error); } } + + const tzSetting = settings['system-time-zone']; + if (tzSetting?.value) { + timeZoneSyncedRef.current = true; + setTimeZone((prev) => + prev === tzSetting.value ? prev : tzSetting.value + ); + } else if (!timeZoneSyncedRef.current && timeZone) { + timeZoneSyncedRef.current = true; + persistTimeZoneSetting(timeZone); + } } - }, [settings]); + }, [settings, timeZone, setTimeZone, persistTimeZoneSetting]); + + useEffect(() => { + const loadComskipConfig = async () => { + try { + const response = await API.getComskipConfig(); + if (response) { + setComskipConfig({ + path: response.path || '', + exists: Boolean(response.exists), + }); + if (response.path) { + form.setFieldValue('dvr-comskip-custom-path', response.path); + } + } + } catch (error) { + console.error('Failed to load comskip config', error); + } + }; + loadComskipConfig(); + }, []); const onSubmit = async () => { const values = form.getValues(); @@ -263,6 +483,39 @@ const SettingsPage = () => { setProxySettingsSaved(true); }; + const onComskipUpload = async () => { + if (!comskipFile) { + return; + } + + setComskipUploadLoading(true); + try { + const response = await API.uploadComskipIni(comskipFile); + if (response?.path) { + notifications.show({ + title: 'comskip.ini uploaded', + message: response.path, + autoClose: 3000, + color: 'green', + }); + form.setFieldValue('dvr-comskip-custom-path', response.path); + useSettingsStore.getState().updateSetting({ + ...(settings['dvr-comskip-custom-path'] || { + key: 'dvr-comskip-custom-path', + name: 'DVR Comskip Custom Path', + }), + value: response.path, + }); + setComskipConfig({ path: response.path, exists: true }); + } + } catch (error) { + console.error('Failed to upload comskip.ini', error); + } finally { + setComskipUploadLoading(false); + setComskipFile(null); + } + }; + const resetProxySettingsToDefaults = () => { const defaultValues = { buffering_timeout: 15, @@ -296,13 +549,19 @@ const SettingsPage = () => { const onUISettingsChange = (name, value) => { switch (name) { case 'table-size': - setTableSize(value); + if (value) setTableSize(value); break; case 'time-format': - setTimeFormat(value); + if (value) setTimeFormat(value); break; case 'date-format': - setDateFormat(value); + if (value) setDateFormat(value); + break; + case 'time-zone': + if (value) { + setTimeZone(value); + persistTimeZoneSetting(value); + } break; } }; @@ -434,6 +693,14 @@ const SettingsPage = () => { }, ]} /> +