mirror of
https://github.com/Dispatcharr/Dispatcharr.git
synced 2026-01-23 02:35:14 +00:00
Merge branch 'DVR-Update' into dev
This commit is contained in:
commit
8db9689999
16 changed files with 2420 additions and 273 deletions
|
|
@ -13,12 +13,14 @@ from .api_views import (
|
|||
UpdateChannelMembershipAPIView,
|
||||
BulkUpdateChannelMembershipAPIView,
|
||||
RecordingViewSet,
|
||||
RecurringRecordingRuleViewSet,
|
||||
GetChannelStreamsAPIView,
|
||||
SeriesRulesAPIView,
|
||||
DeleteSeriesRuleAPIView,
|
||||
EvaluateSeriesRulesAPIView,
|
||||
BulkRemoveSeriesRecordingsAPIView,
|
||||
BulkDeleteUpcomingRecordingsAPIView,
|
||||
ComskipConfigAPIView,
|
||||
)
|
||||
|
||||
app_name = 'channels' # for DRF routing
|
||||
|
|
@ -30,6 +32,7 @@ router.register(r'channels', ChannelViewSet, basename='channel')
|
|||
router.register(r'logos', LogoViewSet, basename='logo')
|
||||
router.register(r'profiles', ChannelProfileViewSet, basename='profile')
|
||||
router.register(r'recordings', RecordingViewSet, basename='recording')
|
||||
router.register(r'recurring-rules', RecurringRecordingRuleViewSet, basename='recurring-rule')
|
||||
|
||||
urlpatterns = [
|
||||
# Bulk delete is a single APIView, not a ViewSet
|
||||
|
|
@ -46,6 +49,7 @@ urlpatterns = [
|
|||
path('series-rules/bulk-remove/', BulkRemoveSeriesRecordingsAPIView.as_view(), name='bulk_remove_series_recordings'),
|
||||
path('series-rules/<str:tvg_id>/', DeleteSeriesRuleAPIView.as_view(), name='delete_series_rule'),
|
||||
path('recordings/bulk-delete-upcoming/', BulkDeleteUpcomingRecordingsAPIView.as_view(), name='bulk_delete_upcoming_recordings'),
|
||||
path('dvr/comskip-config/', ComskipConfigAPIView.as_view(), name='comskip_config'),
|
||||
]
|
||||
|
||||
urlpatterns += router.urls
|
||||
|
|
|
|||
|
|
@ -28,6 +28,7 @@ from .models import (
|
|||
ChannelProfile,
|
||||
ChannelProfileMembership,
|
||||
Recording,
|
||||
RecurringRecordingRule,
|
||||
)
|
||||
from .serializers import (
|
||||
StreamSerializer,
|
||||
|
|
@ -38,8 +39,17 @@ from .serializers import (
|
|||
BulkChannelProfileMembershipSerializer,
|
||||
ChannelProfileSerializer,
|
||||
RecordingSerializer,
|
||||
RecurringRecordingRuleSerializer,
|
||||
)
|
||||
from .tasks import (
|
||||
match_epg_channels,
|
||||
evaluate_series_rules,
|
||||
evaluate_series_rules_impl,
|
||||
match_single_channel_epg,
|
||||
match_selected_channels_epg,
|
||||
sync_recurring_rule_impl,
|
||||
purge_recurring_rule_impl,
|
||||
)
|
||||
from .tasks import match_epg_channels, evaluate_series_rules, evaluate_series_rules_impl, match_single_channel_epg, match_selected_channels_epg
|
||||
import django_filters
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from rest_framework.filters import SearchFilter, OrderingFilter
|
||||
|
|
@ -49,10 +59,12 @@ from django.db.models import Q
|
|||
from django.http import StreamingHttpResponse, FileResponse, Http404
|
||||
from django.utils import timezone
|
||||
import mimetypes
|
||||
from django.conf import settings
|
||||
|
||||
from rest_framework.pagination import PageNumberPagination
|
||||
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
|
@ -1684,6 +1696,41 @@ class BulkUpdateChannelMembershipAPIView(APIView):
|
|||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class RecurringRecordingRuleViewSet(viewsets.ModelViewSet):
|
||||
queryset = RecurringRecordingRule.objects.all().select_related("channel")
|
||||
serializer_class = RecurringRecordingRuleSerializer
|
||||
|
||||
def get_permissions(self):
|
||||
return [IsAdmin()]
|
||||
|
||||
def perform_create(self, serializer):
|
||||
rule = serializer.save()
|
||||
try:
|
||||
sync_recurring_rule_impl(rule.id, drop_existing=True)
|
||||
except Exception as err:
|
||||
logger.warning(f"Failed to initialize recurring rule {rule.id}: {err}")
|
||||
return rule
|
||||
|
||||
def perform_update(self, serializer):
|
||||
rule = serializer.save()
|
||||
try:
|
||||
if rule.enabled:
|
||||
sync_recurring_rule_impl(rule.id, drop_existing=True)
|
||||
else:
|
||||
purge_recurring_rule_impl(rule.id)
|
||||
except Exception as err:
|
||||
logger.warning(f"Failed to resync recurring rule {rule.id}: {err}")
|
||||
return rule
|
||||
|
||||
def perform_destroy(self, instance):
|
||||
rule_id = instance.id
|
||||
super().perform_destroy(instance)
|
||||
try:
|
||||
purge_recurring_rule_impl(rule_id)
|
||||
except Exception as err:
|
||||
logger.warning(f"Failed to purge recordings for rule {rule_id}: {err}")
|
||||
|
||||
|
||||
class RecordingViewSet(viewsets.ModelViewSet):
|
||||
queryset = Recording.objects.all()
|
||||
serializer_class = RecordingSerializer
|
||||
|
|
@ -1863,6 +1910,49 @@ class RecordingViewSet(viewsets.ModelViewSet):
|
|||
return response
|
||||
|
||||
|
||||
class ComskipConfigAPIView(APIView):
|
||||
"""Upload or inspect the custom comskip.ini used by DVR processing."""
|
||||
|
||||
parser_classes = [MultiPartParser, FormParser]
|
||||
|
||||
def get_permissions(self):
|
||||
return [IsAdmin()]
|
||||
|
||||
def get(self, request):
|
||||
path = CoreSettings.get_dvr_comskip_custom_path()
|
||||
exists = bool(path and os.path.exists(path))
|
||||
return Response({"path": path, "exists": exists})
|
||||
|
||||
def post(self, request):
|
||||
uploaded = request.FILES.get("file") or request.FILES.get("comskip_ini")
|
||||
if not uploaded:
|
||||
return Response({"error": "No file provided"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
name = (uploaded.name or "").lower()
|
||||
if not name.endswith(".ini"):
|
||||
return Response({"error": "Only .ini files are allowed"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
if uploaded.size and uploaded.size > 1024 * 1024:
|
||||
return Response({"error": "File too large (limit 1MB)"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
dest_dir = os.path.join(settings.MEDIA_ROOT, "comskip")
|
||||
os.makedirs(dest_dir, exist_ok=True)
|
||||
dest_path = os.path.join(dest_dir, "comskip.ini")
|
||||
|
||||
try:
|
||||
with open(dest_path, "wb") as dest:
|
||||
for chunk in uploaded.chunks():
|
||||
dest.write(chunk)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to save uploaded comskip.ini: {e}")
|
||||
return Response({"error": "Unable to save file"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
|
||||
# Persist path setting so DVR processing picks it up immediately
|
||||
CoreSettings.set_dvr_comskip_custom_path(dest_path)
|
||||
|
||||
return Response({"success": True, "path": dest_path, "exists": os.path.exists(dest_path)})
|
||||
|
||||
|
||||
class BulkDeleteUpcomingRecordingsAPIView(APIView):
|
||||
"""Delete all upcoming (future) recordings."""
|
||||
def get_permissions(self):
|
||||
|
|
|
|||
31
apps/channels/migrations/0026_recurringrecordingrule.py
Normal file
31
apps/channels/migrations/0026_recurringrecordingrule.py
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
# Generated by Django 5.0.14 on 2025-09-18 14:56
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0025_alter_channelgroupm3uaccount_custom_properties_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='RecurringRecordingRule',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('days_of_week', models.JSONField(default=list)),
|
||||
('start_time', models.TimeField()),
|
||||
('end_time', models.TimeField()),
|
||||
('enabled', models.BooleanField(default=True)),
|
||||
('name', models.CharField(blank=True, max_length=255)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
('channel', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='recurring_rules', to='dispatcharr_channels.channel')),
|
||||
],
|
||||
options={
|
||||
'ordering': ['channel', 'start_time'],
|
||||
},
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
# Generated by Django 5.2.4 on 2025-10-05 20:50
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0026_recurringrecordingrule'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='recurringrecordingrule',
|
||||
name='end_date',
|
||||
field=models.DateField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='recurringrecordingrule',
|
||||
name='start_date',
|
||||
field=models.DateField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -601,3 +601,35 @@ class Recording(models.Model):
|
|||
|
||||
def __str__(self):
|
||||
return f"{self.channel.name} - {self.start_time} to {self.end_time}"
|
||||
|
||||
|
||||
class RecurringRecordingRule(models.Model):
|
||||
"""Rule describing a recurring manual DVR schedule."""
|
||||
|
||||
channel = models.ForeignKey(
|
||||
"Channel",
|
||||
on_delete=models.CASCADE,
|
||||
related_name="recurring_rules",
|
||||
)
|
||||
days_of_week = models.JSONField(default=list)
|
||||
start_time = models.TimeField()
|
||||
end_time = models.TimeField()
|
||||
enabled = models.BooleanField(default=True)
|
||||
name = models.CharField(max_length=255, blank=True)
|
||||
start_date = models.DateField(null=True, blank=True)
|
||||
end_date = models.DateField(null=True, blank=True)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ["channel", "start_time"]
|
||||
|
||||
def __str__(self):
|
||||
channel_name = getattr(self.channel, "name", str(self.channel_id))
|
||||
return f"Recurring rule for {channel_name}"
|
||||
|
||||
def cleaned_days(self):
|
||||
try:
|
||||
return sorted({int(d) for d in (self.days_of_week or []) if 0 <= int(d) <= 6})
|
||||
except Exception:
|
||||
return []
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
import json
|
||||
from datetime import datetime
|
||||
|
||||
from rest_framework import serializers
|
||||
from .models import (
|
||||
Stream,
|
||||
|
|
@ -10,6 +12,7 @@ from .models import (
|
|||
ChannelProfile,
|
||||
ChannelProfileMembership,
|
||||
Recording,
|
||||
RecurringRecordingRule,
|
||||
)
|
||||
from apps.epg.serializers import EPGDataSerializer
|
||||
from core.models import StreamProfile
|
||||
|
|
@ -454,6 +457,13 @@ class RecordingSerializer(serializers.ModelSerializer):
|
|||
start_time = data.get("start_time")
|
||||
end_time = data.get("end_time")
|
||||
|
||||
if start_time and timezone.is_naive(start_time):
|
||||
start_time = timezone.make_aware(start_time, timezone.get_current_timezone())
|
||||
data["start_time"] = start_time
|
||||
if end_time and timezone.is_naive(end_time):
|
||||
end_time = timezone.make_aware(end_time, timezone.get_current_timezone())
|
||||
data["end_time"] = end_time
|
||||
|
||||
# If this is an EPG-based recording (program provided), apply global pre/post offsets
|
||||
try:
|
||||
cp = data.get("custom_properties") or {}
|
||||
|
|
@ -497,3 +507,56 @@ class RecordingSerializer(serializers.ModelSerializer):
|
|||
raise serializers.ValidationError("End time must be after start time.")
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class RecurringRecordingRuleSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = RecurringRecordingRule
|
||||
fields = "__all__"
|
||||
read_only_fields = ["created_at", "updated_at"]
|
||||
|
||||
def validate_days_of_week(self, value):
|
||||
if not value:
|
||||
raise serializers.ValidationError("Select at least one day of the week")
|
||||
cleaned = []
|
||||
for entry in value:
|
||||
try:
|
||||
iv = int(entry)
|
||||
except (TypeError, ValueError):
|
||||
raise serializers.ValidationError("Days of week must be integers 0-6")
|
||||
if iv < 0 or iv > 6:
|
||||
raise serializers.ValidationError("Days of week must be between 0 (Monday) and 6 (Sunday)")
|
||||
cleaned.append(iv)
|
||||
return sorted(set(cleaned))
|
||||
|
||||
def validate(self, attrs):
|
||||
start = attrs.get("start_time") or getattr(self.instance, "start_time", None)
|
||||
end = attrs.get("end_time") or getattr(self.instance, "end_time", None)
|
||||
start_date = attrs.get("start_date") if "start_date" in attrs else getattr(self.instance, "start_date", None)
|
||||
end_date = attrs.get("end_date") if "end_date" in attrs else getattr(self.instance, "end_date", None)
|
||||
if start_date is None:
|
||||
existing_start = getattr(self.instance, "start_date", None)
|
||||
if existing_start is None:
|
||||
raise serializers.ValidationError("Start date is required")
|
||||
if start_date and end_date and end_date < start_date:
|
||||
raise serializers.ValidationError("End date must be on or after start date")
|
||||
if end_date is None:
|
||||
existing_end = getattr(self.instance, "end_date", None)
|
||||
if existing_end is None:
|
||||
raise serializers.ValidationError("End date is required")
|
||||
if start and end and start_date and end_date:
|
||||
start_dt = datetime.combine(start_date, start)
|
||||
end_dt = datetime.combine(end_date, end)
|
||||
if end_dt <= start_dt:
|
||||
raise serializers.ValidationError("End datetime must be after start datetime")
|
||||
elif start and end and end == start:
|
||||
raise serializers.ValidationError("End time must be different from start time")
|
||||
# Normalize empty strings to None for dates
|
||||
if attrs.get("end_date") == "":
|
||||
attrs["end_date"] = None
|
||||
if attrs.get("start_date") == "":
|
||||
attrs["start_date"] = None
|
||||
return super().validate(attrs)
|
||||
|
||||
def create(self, validated_data):
|
||||
return super().create(validated_data)
|
||||
|
|
|
|||
|
|
@ -7,6 +7,8 @@ import requests
|
|||
import time
|
||||
import json
|
||||
import subprocess
|
||||
import signal
|
||||
from zoneinfo import ZoneInfo
|
||||
from datetime import datetime, timedelta
|
||||
import gc
|
||||
|
||||
|
|
@ -1115,6 +1117,148 @@ def reschedule_upcoming_recordings_for_offset_change():
|
|||
return reschedule_upcoming_recordings_for_offset_change_impl()
|
||||
|
||||
|
||||
def _notify_recordings_refresh():
|
||||
try:
|
||||
from core.utils import send_websocket_update
|
||||
send_websocket_update('updates', 'update', {"success": True, "type": "recordings_refreshed"})
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def purge_recurring_rule_impl(rule_id: int) -> int:
|
||||
"""Remove all future recordings created by a recurring rule."""
|
||||
from django.utils import timezone
|
||||
from .models import Recording
|
||||
|
||||
now = timezone.now()
|
||||
try:
|
||||
removed, _ = Recording.objects.filter(
|
||||
start_time__gte=now,
|
||||
custom_properties__rule__id=rule_id,
|
||||
).delete()
|
||||
except Exception:
|
||||
removed = 0
|
||||
if removed:
|
||||
_notify_recordings_refresh()
|
||||
return removed
|
||||
|
||||
|
||||
def sync_recurring_rule_impl(rule_id: int, drop_existing: bool = True, horizon_days: int = 14) -> int:
|
||||
"""Ensure recordings exist for a recurring rule within the scheduling horizon."""
|
||||
from django.utils import timezone
|
||||
from .models import RecurringRecordingRule, Recording
|
||||
|
||||
rule = RecurringRecordingRule.objects.filter(pk=rule_id).select_related("channel").first()
|
||||
now = timezone.now()
|
||||
removed = 0
|
||||
if drop_existing:
|
||||
removed = purge_recurring_rule_impl(rule_id)
|
||||
|
||||
if not rule or not rule.enabled:
|
||||
return 0
|
||||
|
||||
days = rule.cleaned_days()
|
||||
if not days:
|
||||
return 0
|
||||
|
||||
tz_name = CoreSettings.get_system_time_zone()
|
||||
try:
|
||||
tz = ZoneInfo(tz_name)
|
||||
except Exception:
|
||||
logger.warning("Invalid or unsupported time zone '%s'; falling back to Server default", tz_name)
|
||||
tz = timezone.get_current_timezone()
|
||||
start_limit = rule.start_date or now.date()
|
||||
end_limit = rule.end_date
|
||||
horizon = now + timedelta(days=horizon_days)
|
||||
start_window = max(start_limit, now.date())
|
||||
if drop_existing and end_limit:
|
||||
end_window = end_limit
|
||||
else:
|
||||
end_window = horizon.date()
|
||||
if end_limit and end_limit < end_window:
|
||||
end_window = end_limit
|
||||
if end_window < start_window:
|
||||
return 0
|
||||
total_created = 0
|
||||
|
||||
for offset in range((end_window - start_window).days + 1):
|
||||
target_date = start_window + timedelta(days=offset)
|
||||
if target_date.weekday() not in days:
|
||||
continue
|
||||
if end_limit and target_date > end_limit:
|
||||
continue
|
||||
try:
|
||||
start_dt = timezone.make_aware(datetime.combine(target_date, rule.start_time), tz)
|
||||
end_dt = timezone.make_aware(datetime.combine(target_date, rule.end_time), tz)
|
||||
except Exception:
|
||||
continue
|
||||
if end_dt <= start_dt:
|
||||
end_dt = end_dt + timedelta(days=1)
|
||||
if start_dt <= now:
|
||||
continue
|
||||
exists = Recording.objects.filter(
|
||||
channel=rule.channel,
|
||||
start_time=start_dt,
|
||||
custom_properties__rule__id=rule.id,
|
||||
).exists()
|
||||
if exists:
|
||||
continue
|
||||
description = rule.name or f"Recurring recording for {rule.channel.name}"
|
||||
cp = {
|
||||
"rule": {
|
||||
"type": "recurring",
|
||||
"id": rule.id,
|
||||
"days_of_week": days,
|
||||
"name": rule.name or "",
|
||||
},
|
||||
"status": "scheduled",
|
||||
"description": description,
|
||||
"program": {
|
||||
"title": rule.name or rule.channel.name,
|
||||
"description": description,
|
||||
"start_time": start_dt.isoformat(),
|
||||
"end_time": end_dt.isoformat(),
|
||||
},
|
||||
}
|
||||
try:
|
||||
Recording.objects.create(
|
||||
channel=rule.channel,
|
||||
start_time=start_dt,
|
||||
end_time=end_dt,
|
||||
custom_properties=cp,
|
||||
)
|
||||
total_created += 1
|
||||
except Exception as err:
|
||||
logger.warning(f"Failed to create recurring recording for rule {rule.id}: {err}")
|
||||
|
||||
if removed or total_created:
|
||||
_notify_recordings_refresh()
|
||||
|
||||
return total_created
|
||||
|
||||
|
||||
@shared_task
|
||||
def rebuild_recurring_rule(rule_id: int, horizon_days: int = 14):
|
||||
return sync_recurring_rule_impl(rule_id, drop_existing=True, horizon_days=horizon_days)
|
||||
|
||||
|
||||
@shared_task
|
||||
def maintain_recurring_recordings():
|
||||
from .models import RecurringRecordingRule
|
||||
|
||||
total = 0
|
||||
for rule_id in RecurringRecordingRule.objects.filter(enabled=True).values_list("id", flat=True):
|
||||
try:
|
||||
total += sync_recurring_rule_impl(rule_id, drop_existing=False)
|
||||
except Exception as err:
|
||||
logger.warning(f"Recurring rule maintenance failed for {rule_id}: {err}")
|
||||
return total
|
||||
|
||||
|
||||
@shared_task
|
||||
def purge_recurring_rule(rule_id: int):
|
||||
return purge_recurring_rule_impl(rule_id)
|
||||
|
||||
@shared_task
|
||||
def _safe_name(s):
|
||||
try:
|
||||
|
|
@ -1837,6 +1981,7 @@ def comskip_process_recording(recording_id: int):
|
|||
Safe to call even if comskip is not installed; stores status in custom_properties.comskip.
|
||||
"""
|
||||
import shutil
|
||||
from django.db import DatabaseError
|
||||
from .models import Recording
|
||||
# Helper to broadcast status over websocket
|
||||
def _ws(status: str, extra: dict | None = None):
|
||||
|
|
@ -1854,7 +1999,33 @@ def comskip_process_recording(recording_id: int):
|
|||
except Recording.DoesNotExist:
|
||||
return "not_found"
|
||||
|
||||
cp = rec.custom_properties or {}
|
||||
cp = rec.custom_properties.copy() if isinstance(rec.custom_properties, dict) else {}
|
||||
|
||||
def _persist_custom_properties():
|
||||
"""Persist updated custom_properties without raising if the row disappeared."""
|
||||
try:
|
||||
updated = Recording.objects.filter(pk=recording_id).update(custom_properties=cp)
|
||||
if not updated:
|
||||
logger.warning(
|
||||
"Recording %s vanished before comskip status could be saved",
|
||||
recording_id,
|
||||
)
|
||||
return False
|
||||
except DatabaseError as db_err:
|
||||
logger.warning(
|
||||
"Failed to persist comskip status for recording %s: %s",
|
||||
recording_id,
|
||||
db_err,
|
||||
)
|
||||
return False
|
||||
except Exception as unexpected:
|
||||
logger.warning(
|
||||
"Unexpected error while saving comskip status for recording %s: %s",
|
||||
recording_id,
|
||||
unexpected,
|
||||
)
|
||||
return False
|
||||
return True
|
||||
file_path = (cp or {}).get("file_path")
|
||||
if not file_path or not os.path.exists(file_path):
|
||||
return "no_file"
|
||||
|
|
@ -1865,8 +2036,7 @@ def comskip_process_recording(recording_id: int):
|
|||
comskip_bin = shutil.which("comskip")
|
||||
if not comskip_bin:
|
||||
cp["comskip"] = {"status": "skipped", "reason": "comskip_not_installed"}
|
||||
rec.custom_properties = cp
|
||||
rec.save(update_fields=["custom_properties"])
|
||||
_persist_custom_properties()
|
||||
_ws('skipped', {"reason": "comskip_not_installed"})
|
||||
return "comskip_missing"
|
||||
|
||||
|
|
@ -1878,24 +2048,59 @@ def comskip_process_recording(recording_id: int):
|
|||
|
||||
try:
|
||||
cmd = [comskip_bin, "--output", os.path.dirname(file_path)]
|
||||
# Prefer system ini if present to squelch warning and get sane defaults
|
||||
for ini_path in ("/etc/comskip/comskip.ini", "/app/docker/comskip.ini"):
|
||||
if os.path.exists(ini_path):
|
||||
# Prefer user-specified INI, fall back to known defaults
|
||||
ini_candidates = []
|
||||
try:
|
||||
custom_ini = CoreSettings.get_dvr_comskip_custom_path()
|
||||
if custom_ini:
|
||||
ini_candidates.append(custom_ini)
|
||||
except Exception as ini_err:
|
||||
logger.debug(f"Unable to load custom comskip.ini path: {ini_err}")
|
||||
ini_candidates.extend(["/etc/comskip/comskip.ini", "/app/docker/comskip.ini"])
|
||||
selected_ini = None
|
||||
for ini_path in ini_candidates:
|
||||
if ini_path and os.path.exists(ini_path):
|
||||
selected_ini = ini_path
|
||||
cmd.extend([f"--ini={ini_path}"])
|
||||
break
|
||||
cmd.append(file_path)
|
||||
subprocess.run(cmd, check=True)
|
||||
subprocess.run(
|
||||
cmd,
|
||||
check=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
stderr_tail = (e.stderr or "").strip().splitlines()
|
||||
stderr_tail = stderr_tail[-5:] if stderr_tail else []
|
||||
detail = {
|
||||
"status": "error",
|
||||
"reason": "comskip_failed",
|
||||
"returncode": e.returncode,
|
||||
}
|
||||
if e.returncode and e.returncode < 0:
|
||||
try:
|
||||
detail["signal"] = signal.Signals(-e.returncode).name
|
||||
except Exception:
|
||||
detail["signal"] = f"signal_{-e.returncode}"
|
||||
if stderr_tail:
|
||||
detail["stderr"] = "\n".join(stderr_tail)
|
||||
if selected_ini:
|
||||
detail["ini_path"] = selected_ini
|
||||
cp["comskip"] = detail
|
||||
_persist_custom_properties()
|
||||
_ws('error', {"reason": "comskip_failed", "returncode": e.returncode})
|
||||
return "comskip_failed"
|
||||
except Exception as e:
|
||||
cp["comskip"] = {"status": "error", "reason": f"comskip_failed: {e}"}
|
||||
rec.custom_properties = cp
|
||||
rec.save(update_fields=["custom_properties"])
|
||||
_persist_custom_properties()
|
||||
_ws('error', {"reason": str(e)})
|
||||
return "comskip_failed"
|
||||
|
||||
if not os.path.exists(edl_path):
|
||||
cp["comskip"] = {"status": "error", "reason": "edl_not_found"}
|
||||
rec.custom_properties = cp
|
||||
rec.save(update_fields=["custom_properties"])
|
||||
_persist_custom_properties()
|
||||
_ws('error', {"reason": "edl_not_found"})
|
||||
return "no_edl"
|
||||
|
||||
|
|
@ -1913,8 +2118,7 @@ def comskip_process_recording(recording_id: int):
|
|||
duration = _ffprobe_duration(file_path)
|
||||
if duration is None:
|
||||
cp["comskip"] = {"status": "error", "reason": "duration_unknown"}
|
||||
rec.custom_properties = cp
|
||||
rec.save(update_fields=["custom_properties"])
|
||||
_persist_custom_properties()
|
||||
_ws('error', {"reason": "duration_unknown"})
|
||||
return "no_duration"
|
||||
|
||||
|
|
@ -1943,9 +2147,14 @@ def comskip_process_recording(recording_id: int):
|
|||
keep.append((cur, duration))
|
||||
|
||||
if not commercials or sum((e - s) for s, e in commercials) <= 0.5:
|
||||
cp["comskip"] = {"status": "completed", "skipped": True, "edl": os.path.basename(edl_path)}
|
||||
rec.custom_properties = cp
|
||||
rec.save(update_fields=["custom_properties"])
|
||||
cp["comskip"] = {
|
||||
"status": "completed",
|
||||
"skipped": True,
|
||||
"edl": os.path.basename(edl_path),
|
||||
}
|
||||
if selected_ini:
|
||||
cp["comskip"]["ini_path"] = selected_ini
|
||||
_persist_custom_properties()
|
||||
_ws('skipped', {"reason": "no_commercials", "commercials": 0})
|
||||
return "no_commercials"
|
||||
|
||||
|
|
@ -1969,7 +2178,8 @@ def comskip_process_recording(recording_id: int):
|
|||
list_path = os.path.join(workdir, "concat_list.txt")
|
||||
with open(list_path, "w") as lf:
|
||||
for pth in parts:
|
||||
lf.write(f"file '{pth}'\n")
|
||||
escaped = pth.replace("'", "'\\''")
|
||||
lf.write(f"file '{escaped}'\n")
|
||||
|
||||
output_path = os.path.join(workdir, f"{os.path.splitext(os.path.basename(file_path))[0]}.cut.mkv")
|
||||
subprocess.run([
|
||||
|
|
@ -1995,14 +2205,14 @@ def comskip_process_recording(recording_id: int):
|
|||
"segments_kept": len(parts),
|
||||
"commercials": len(commercials),
|
||||
}
|
||||
rec.custom_properties = cp
|
||||
rec.save(update_fields=["custom_properties"])
|
||||
if selected_ini:
|
||||
cp["comskip"]["ini_path"] = selected_ini
|
||||
_persist_custom_properties()
|
||||
_ws('completed', {"commercials": len(commercials), "segments_kept": len(parts)})
|
||||
return "ok"
|
||||
except Exception as e:
|
||||
cp["comskip"] = {"status": "error", "reason": str(e)}
|
||||
rec.custom_properties = cp
|
||||
rec.save(update_fields=["custom_properties"])
|
||||
_persist_custom_properties()
|
||||
_ws('error', {"reason": str(e)})
|
||||
return f"error:{e}"
|
||||
def _resolve_poster_for_program(channel_name, program):
|
||||
|
|
|
|||
0
apps/channels/tests/__init__.py
Normal file
0
apps/channels/tests/__init__.py
Normal file
40
apps/channels/tests/test_recurring_rules.py
Normal file
40
apps/channels/tests/test_recurring_rules.py
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
from datetime import datetime, timedelta
|
||||
from django.test import TestCase
|
||||
from django.utils import timezone
|
||||
|
||||
from apps.channels.models import Channel, RecurringRecordingRule, Recording
|
||||
from apps.channels.tasks import sync_recurring_rule_impl, purge_recurring_rule_impl
|
||||
|
||||
|
||||
class RecurringRecordingRuleTasksTests(TestCase):
|
||||
def test_sync_recurring_rule_creates_and_purges_recordings(self):
|
||||
now = timezone.now()
|
||||
channel = Channel.objects.create(channel_number=1, name='Test Channel')
|
||||
|
||||
start_time = (now + timedelta(minutes=15)).time().replace(second=0, microsecond=0)
|
||||
end_time = (now + timedelta(minutes=75)).time().replace(second=0, microsecond=0)
|
||||
|
||||
rule = RecurringRecordingRule.objects.create(
|
||||
channel=channel,
|
||||
days_of_week=[now.weekday()],
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
)
|
||||
|
||||
created = sync_recurring_rule_impl(rule.id, drop_existing=True, horizon_days=1)
|
||||
self.assertEqual(created, 1)
|
||||
|
||||
recording = Recording.objects.filter(custom_properties__rule__id=rule.id).first()
|
||||
self.assertIsNotNone(recording)
|
||||
self.assertEqual(recording.channel, channel)
|
||||
self.assertEqual(recording.custom_properties.get('rule', {}).get('id'), rule.id)
|
||||
|
||||
expected_start = timezone.make_aware(
|
||||
datetime.combine(recording.start_time.date(), start_time),
|
||||
timezone.get_current_timezone(),
|
||||
)
|
||||
self.assertLess(abs((recording.start_time - expected_start).total_seconds()), 60)
|
||||
|
||||
removed = purge_recurring_rule_impl(rule.id)
|
||||
self.assertEqual(removed, 1)
|
||||
self.assertFalse(Recording.objects.filter(custom_properties__rule__id=rule.id).exists())
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
# core/models.py
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
from django.utils.text import slugify
|
||||
from django.core.exceptions import ValidationError
|
||||
|
|
@ -158,8 +159,10 @@ DVR_TV_FALLBACK_DIR_KEY = slugify("DVR TV Fallback Dir")
|
|||
DVR_TV_FALLBACK_TEMPLATE_KEY = slugify("DVR TV Fallback Template")
|
||||
DVR_MOVIE_FALLBACK_TEMPLATE_KEY = slugify("DVR Movie Fallback Template")
|
||||
DVR_COMSKIP_ENABLED_KEY = slugify("DVR Comskip Enabled")
|
||||
DVR_COMSKIP_CUSTOM_PATH_KEY = slugify("DVR Comskip Custom Path")
|
||||
DVR_PRE_OFFSET_MINUTES_KEY = slugify("DVR Pre-Offset Minutes")
|
||||
DVR_POST_OFFSET_MINUTES_KEY = slugify("DVR Post-Offset Minutes")
|
||||
SYSTEM_TIME_ZONE_KEY = slugify("System Time Zone")
|
||||
|
||||
|
||||
class CoreSettings(models.Model):
|
||||
|
|
@ -274,6 +277,27 @@ class CoreSettings(models.Model):
|
|||
except cls.DoesNotExist:
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def get_dvr_comskip_custom_path(cls):
|
||||
"""Return configured comskip.ini path or empty string if unset."""
|
||||
try:
|
||||
return cls.objects.get(key=DVR_COMSKIP_CUSTOM_PATH_KEY).value
|
||||
except cls.DoesNotExist:
|
||||
return ""
|
||||
|
||||
@classmethod
|
||||
def set_dvr_comskip_custom_path(cls, path: str | None):
|
||||
"""Persist the comskip.ini path setting, normalizing nulls to empty string."""
|
||||
value = (path or "").strip()
|
||||
obj, _ = cls.objects.get_or_create(
|
||||
key=DVR_COMSKIP_CUSTOM_PATH_KEY,
|
||||
defaults={"name": "DVR Comskip Custom Path", "value": value},
|
||||
)
|
||||
if obj.value != value:
|
||||
obj.value = value
|
||||
obj.save(update_fields=["value"])
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def get_dvr_pre_offset_minutes(cls):
|
||||
"""Minutes to start recording before scheduled start (default 0)."""
|
||||
|
|
@ -302,6 +326,30 @@ class CoreSettings(models.Model):
|
|||
except Exception:
|
||||
return 0
|
||||
|
||||
@classmethod
|
||||
def get_system_time_zone(cls):
|
||||
"""Return configured system time zone or fall back to Django settings."""
|
||||
try:
|
||||
value = cls.objects.get(key=SYSTEM_TIME_ZONE_KEY).value
|
||||
if value:
|
||||
return value
|
||||
except cls.DoesNotExist:
|
||||
pass
|
||||
return getattr(settings, "TIME_ZONE", "UTC") or "UTC"
|
||||
|
||||
@classmethod
|
||||
def set_system_time_zone(cls, tz_name: str | None):
|
||||
"""Persist the desired system time zone identifier."""
|
||||
value = (tz_name or "").strip() or getattr(settings, "TIME_ZONE", "UTC") or "UTC"
|
||||
obj, _ = cls.objects.get_or_create(
|
||||
key=SYSTEM_TIME_ZONE_KEY,
|
||||
defaults={"name": "System Time Zone", "value": value},
|
||||
)
|
||||
if obj.value != value:
|
||||
obj.value = value
|
||||
obj.save(update_fields=["value"])
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def get_dvr_series_rules(cls):
|
||||
"""Return list of series recording rules. Each: {tvg_id, title, mode: 'all'|'new'}"""
|
||||
|
|
|
|||
|
|
@ -211,6 +211,10 @@ CELERY_BEAT_SCHEDULE = {
|
|||
"task": "core.tasks.scan_and_process_files", # Direct task call
|
||||
"schedule": 20.0, # Every 20 seconds
|
||||
},
|
||||
"maintain-recurring-recordings": {
|
||||
"task": "apps.channels.tasks.maintain_recurring_recordings",
|
||||
"schedule": 3600.0, # Once an hour ensure recurring schedules stay ahead
|
||||
},
|
||||
}
|
||||
|
||||
MEDIA_ROOT = BASE_DIR / "media"
|
||||
|
|
|
|||
|
|
@ -1896,6 +1896,83 @@ export default class API {
|
|||
}
|
||||
}
|
||||
|
||||
static async updateRecording(id, values) {
|
||||
try {
|
||||
const response = await request(`${host}/api/channels/recordings/${id}/`, {
|
||||
method: 'PATCH',
|
||||
body: values,
|
||||
});
|
||||
useChannelsStore.getState().fetchRecordings();
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification(`Failed to update recording ${id}`, e);
|
||||
}
|
||||
}
|
||||
|
||||
static async getComskipConfig() {
|
||||
try {
|
||||
return await request(`${host}/api/channels/dvr/comskip-config/`);
|
||||
} catch (e) {
|
||||
errorNotification('Failed to retrieve comskip configuration', e);
|
||||
}
|
||||
}
|
||||
|
||||
static async uploadComskipIni(file) {
|
||||
try {
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
return await request(`${host}/api/channels/dvr/comskip-config/`, {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
});
|
||||
} catch (e) {
|
||||
errorNotification('Failed to upload comskip.ini', e);
|
||||
}
|
||||
}
|
||||
|
||||
static async listRecurringRules() {
|
||||
try {
|
||||
const response = await request(`${host}/api/channels/recurring-rules/`);
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to retrieve recurring DVR rules', e);
|
||||
}
|
||||
}
|
||||
|
||||
static async createRecurringRule(payload) {
|
||||
try {
|
||||
const response = await request(`${host}/api/channels/recurring-rules/`, {
|
||||
method: 'POST',
|
||||
body: payload,
|
||||
});
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification('Failed to create recurring DVR rule', e);
|
||||
}
|
||||
}
|
||||
|
||||
static async updateRecurringRule(ruleId, payload) {
|
||||
try {
|
||||
const response = await request(`${host}/api/channels/recurring-rules/${ruleId}/`, {
|
||||
method: 'PATCH',
|
||||
body: payload,
|
||||
});
|
||||
return response;
|
||||
} catch (e) {
|
||||
errorNotification(`Failed to update recurring rule ${ruleId}`, e);
|
||||
}
|
||||
}
|
||||
|
||||
static async deleteRecurringRule(ruleId) {
|
||||
try {
|
||||
await request(`${host}/api/channels/recurring-rules/${ruleId}/`, {
|
||||
method: 'DELETE',
|
||||
});
|
||||
} catch (e) {
|
||||
errorNotification(`Failed to delete recurring rule ${ruleId}`, e);
|
||||
}
|
||||
}
|
||||
|
||||
static async deleteRecording(id) {
|
||||
try {
|
||||
await request(`${host}/api/channels/recordings/${id}/`, { method: 'DELETE' });
|
||||
|
|
|
|||
|
|
@ -1,117 +1,424 @@
|
|||
// Modal.js
|
||||
import React from 'react';
|
||||
import React, { useEffect, useMemo, useState } from 'react';
|
||||
import dayjs from 'dayjs';
|
||||
import API from '../../api';
|
||||
import { Button, Modal, Flex, Select, Alert } from '@mantine/core';
|
||||
import useChannelsStore from '../../store/channels';
|
||||
import { DateTimePicker } from '@mantine/dates';
|
||||
import {
|
||||
Alert,
|
||||
Button,
|
||||
Modal,
|
||||
Select,
|
||||
Stack,
|
||||
SegmentedControl,
|
||||
MultiSelect,
|
||||
Group,
|
||||
TextInput,
|
||||
} from '@mantine/core';
|
||||
import { DateTimePicker, TimeInput, DatePickerInput } from '@mantine/dates';
|
||||
import { CircleAlert } from 'lucide-react';
|
||||
import { isNotEmpty, useForm } from '@mantine/form';
|
||||
import useChannelsStore from '../../store/channels';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
|
||||
const DVR = ({ recording = null, channel = null, isOpen, onClose }) => {
|
||||
const DAY_OPTIONS = [
|
||||
{ value: '6', label: 'Sun' },
|
||||
{ value: '0', label: 'Mon' },
|
||||
{ value: '1', label: 'Tue' },
|
||||
{ value: '2', label: 'Wed' },
|
||||
{ value: '3', label: 'Thu' },
|
||||
{ value: '4', label: 'Fri' },
|
||||
{ value: '5', label: 'Sat' },
|
||||
];
|
||||
|
||||
const asDate = (value) => {
|
||||
if (!value) return null;
|
||||
if (value instanceof Date) return value;
|
||||
const parsed = new Date(value);
|
||||
return Number.isNaN(parsed.getTime()) ? null : parsed;
|
||||
};
|
||||
|
||||
const toIsoIfDate = (value) => {
|
||||
const dt = asDate(value);
|
||||
return dt ? dt.toISOString() : value;
|
||||
};
|
||||
|
||||
// Accepts "h:mm A"/"hh:mm A"/"HH:mm"/Date, returns "HH:mm"
|
||||
const toTimeString = (value) => {
|
||||
if (!value) return '00:00';
|
||||
if (typeof value === 'string') {
|
||||
const parsed = dayjs(value, ['HH:mm', 'hh:mm A', 'h:mm A', 'HH:mm:ss'], true);
|
||||
if (parsed.isValid()) return parsed.format('HH:mm');
|
||||
return value;
|
||||
}
|
||||
const dt = asDate(value);
|
||||
if (!dt) return '00:00';
|
||||
return dayjs(dt).format('HH:mm');
|
||||
};
|
||||
|
||||
const toDateString = (value) => {
|
||||
const dt = asDate(value);
|
||||
if (!dt) return null;
|
||||
const year = dt.getFullYear();
|
||||
const month = String(dt.getMonth() + 1).padStart(2, '0');
|
||||
const day = String(dt.getDate()).padStart(2, '0');
|
||||
return `${year}-${month}-${day}`;
|
||||
};
|
||||
|
||||
const createRoundedDate = (minutesAhead = 0) => {
|
||||
const dt = new Date();
|
||||
dt.setSeconds(0);
|
||||
dt.setMilliseconds(0);
|
||||
dt.setMinutes(Math.ceil(dt.getMinutes() / 30) * 30);
|
||||
if (minutesAhead) dt.setMinutes(dt.getMinutes() + minutesAhead);
|
||||
return dt;
|
||||
};
|
||||
|
||||
// robust onChange for TimeInput (string or event)
|
||||
const timeChange = (setter) => (valOrEvent) => {
|
||||
if (typeof valOrEvent === 'string') setter(valOrEvent);
|
||||
else if (valOrEvent?.currentTarget) setter(valOrEvent.currentTarget.value);
|
||||
};
|
||||
|
||||
const RecordingModal = ({ recording = null, channel = null, isOpen, onClose }) => {
|
||||
const channels = useChannelsStore((s) => s.channels);
|
||||
const fetchRecordings = useChannelsStore((s) => s.fetchRecordings);
|
||||
const fetchRecurringRules = useChannelsStore((s) => s.fetchRecurringRules);
|
||||
|
||||
let startTime = new Date();
|
||||
startTime.setMinutes(Math.ceil(startTime.getMinutes() / 30) * 30);
|
||||
startTime.setSeconds(0);
|
||||
startTime.setMilliseconds(0);
|
||||
const [mode, setMode] = useState('single');
|
||||
const [submitting, setSubmitting] = useState(false);
|
||||
|
||||
let endTime = new Date();
|
||||
endTime.setMinutes(Math.ceil(endTime.getMinutes() / 30) * 30);
|
||||
endTime.setSeconds(0);
|
||||
endTime.setMilliseconds(0);
|
||||
endTime.setHours(endTime.getHours() + 1);
|
||||
const defaultStart = createRoundedDate();
|
||||
const defaultEnd = createRoundedDate(60);
|
||||
const defaultDate = new Date();
|
||||
|
||||
const form = useForm({
|
||||
mode: 'uncontrolled',
|
||||
// One-time form
|
||||
const singleForm = useForm({
|
||||
mode: 'controlled',
|
||||
initialValues: {
|
||||
channel_id: recording
|
||||
? recording.channel_id
|
||||
: channel
|
||||
? `${channel.id}`
|
||||
: '',
|
||||
start_time: recording ? recording.start_time : startTime,
|
||||
end_time: recording ? recording.end_time : endTime,
|
||||
channel_id: recording ? `${recording.channel}` : channel ? `${channel.id}` : '',
|
||||
start_time: recording ? asDate(recording.start_time) || defaultStart : defaultStart,
|
||||
end_time: recording ? asDate(recording.end_time) || defaultEnd : defaultEnd,
|
||||
},
|
||||
|
||||
validate: {
|
||||
channel_id: isNotEmpty('Select a channel'),
|
||||
start_time: isNotEmpty('Select a start time'),
|
||||
end_time: isNotEmpty('Select an end time'),
|
||||
end_time: (value, values) => {
|
||||
const start = asDate(values.start_time);
|
||||
const end = asDate(value);
|
||||
if (!end) return 'Select an end time';
|
||||
if (start && end <= start) return 'End time must be after start time';
|
||||
return null;
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const onSubmit = async () => {
|
||||
const { channel_id, ...values } = form.getValues();
|
||||
// Recurring form stores times as "HH:mm" strings for stable editing
|
||||
const recurringForm = useForm({
|
||||
mode: 'controlled',
|
||||
validateInputOnChange: false,
|
||||
validateInputOnBlur: true,
|
||||
initialValues: {
|
||||
channel_id: channel ? `${channel.id}` : '',
|
||||
days_of_week: [],
|
||||
start_time: dayjs(defaultStart).format('HH:mm'),
|
||||
end_time: dayjs(defaultEnd).format('HH:mm'),
|
||||
rule_name: '',
|
||||
start_date: defaultDate,
|
||||
end_date: defaultDate,
|
||||
},
|
||||
validate: {
|
||||
channel_id: isNotEmpty('Select a channel'),
|
||||
days_of_week: (value) => (value && value.length ? null : 'Pick at least one day'),
|
||||
start_time: (value) => (value ? null : 'Select a start time'),
|
||||
end_time: (value, values) => {
|
||||
if (!value) return 'Select an end time';
|
||||
const start = dayjs(values.start_time, ['HH:mm', 'hh:mm A', 'h:mm A'], true);
|
||||
const end = dayjs(value, ['HH:mm', 'hh:mm A', 'h:mm A'], true);
|
||||
if (start.isValid() && end.isValid() && end.diff(start, 'minute') === 0) {
|
||||
return 'End time must differ from start time';
|
||||
}
|
||||
return null;
|
||||
},
|
||||
end_date: (value, values) => {
|
||||
const end = asDate(value);
|
||||
const start = asDate(values.start_date);
|
||||
if (!end) return 'Select an end date';
|
||||
if (start && end < start) return 'End date cannot be before start date';
|
||||
return null;
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
console.log(values);
|
||||
useEffect(() => {
|
||||
if (!isOpen) return;
|
||||
|
||||
await API.createRecording({
|
||||
...values,
|
||||
channel: channel_id,
|
||||
const freshStart = createRoundedDate();
|
||||
const freshEnd = createRoundedDate(60);
|
||||
const freshDate = new Date();
|
||||
|
||||
if (recording && recording.id) {
|
||||
setMode('single');
|
||||
singleForm.setValues({
|
||||
channel_id: `${recording.channel}`,
|
||||
start_time: asDate(recording.start_time) || defaultStart,
|
||||
end_time: asDate(recording.end_time) || defaultEnd,
|
||||
});
|
||||
} else {
|
||||
// Reset forms for fresh open
|
||||
singleForm.setValues({
|
||||
channel_id: channel ? `${channel.id}` : '',
|
||||
start_time: freshStart,
|
||||
end_time: freshEnd,
|
||||
});
|
||||
|
||||
const startStr = dayjs(freshStart).format('HH:mm');
|
||||
recurringForm.setValues({
|
||||
channel_id: channel ? `${channel.id}` : '',
|
||||
days_of_week: [],
|
||||
start_time: startStr,
|
||||
end_time: dayjs(freshEnd).format('HH:mm'),
|
||||
rule_name: channel?.name || '',
|
||||
start_date: freshDate,
|
||||
end_date: freshDate,
|
||||
});
|
||||
setMode('single');
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [isOpen, recording, channel]);
|
||||
|
||||
const channelOptions = useMemo(() => {
|
||||
const list = Object.values(channels || {});
|
||||
list.sort((a, b) => {
|
||||
const aNum = Number(a.channel_number) || 0;
|
||||
const bNum = Number(b.channel_number) || 0;
|
||||
if (aNum === bNum) return (a.name || '').localeCompare(b.name || '');
|
||||
return aNum - bNum;
|
||||
});
|
||||
return list.map((item) => ({ value: `${item.id}`, label: item.name || `Channel ${item.id}` }));
|
||||
}, [channels]);
|
||||
|
||||
form.reset();
|
||||
onClose();
|
||||
const resetForms = () => {
|
||||
singleForm.reset();
|
||||
recurringForm.reset();
|
||||
setMode('single');
|
||||
};
|
||||
|
||||
if (!isOpen) {
|
||||
return <></>;
|
||||
}
|
||||
const handleClose = () => {
|
||||
resetForms();
|
||||
onClose?.();
|
||||
};
|
||||
|
||||
const handleSingleSubmit = async (values) => {
|
||||
try {
|
||||
setSubmitting(true);
|
||||
if (recording && recording.id) {
|
||||
await API.updateRecording(recording.id, {
|
||||
channel: values.channel_id,
|
||||
start_time: toIsoIfDate(values.start_time),
|
||||
end_time: toIsoIfDate(values.end_time),
|
||||
});
|
||||
notifications.show({
|
||||
title: 'Recording updated',
|
||||
message: 'Recording schedule updated successfully',
|
||||
color: 'green',
|
||||
autoClose: 2500,
|
||||
});
|
||||
} else {
|
||||
await API.createRecording({
|
||||
channel: values.channel_id,
|
||||
start_time: toIsoIfDate(values.start_time),
|
||||
end_time: toIsoIfDate(values.end_time),
|
||||
});
|
||||
notifications.show({
|
||||
title: 'Recording scheduled',
|
||||
message: 'One-time recording added to DVR queue',
|
||||
color: 'green',
|
||||
autoClose: 2500,
|
||||
});
|
||||
}
|
||||
await fetchRecordings();
|
||||
handleClose();
|
||||
} catch (error) {
|
||||
console.error('Failed to create recording', error);
|
||||
} finally {
|
||||
setSubmitting(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleRecurringSubmit = async (values) => {
|
||||
try {
|
||||
setSubmitting(true);
|
||||
await API.createRecurringRule({
|
||||
channel: values.channel_id,
|
||||
days_of_week: (values.days_of_week || []).map((d) => Number(d)),
|
||||
start_time: toTimeString(values.start_time),
|
||||
end_time: toTimeString(values.end_time),
|
||||
start_date: toDateString(values.start_date),
|
||||
end_date: toDateString(values.end_date),
|
||||
name: values.rule_name?.trim() || '',
|
||||
});
|
||||
|
||||
await Promise.all([fetchRecurringRules(), fetchRecordings()]);
|
||||
notifications.show({
|
||||
title: 'Recurring rule saved',
|
||||
message: 'Future slots will be scheduled automatically',
|
||||
color: 'green',
|
||||
autoClose: 2500,
|
||||
});
|
||||
handleClose();
|
||||
} catch (error) {
|
||||
console.error('Failed to create recurring rule', error);
|
||||
} finally {
|
||||
setSubmitting(false);
|
||||
}
|
||||
};
|
||||
|
||||
const onSubmit =
|
||||
mode === 'single'
|
||||
? singleForm.onSubmit(handleSingleSubmit)
|
||||
: recurringForm.onSubmit(handleRecurringSubmit);
|
||||
|
||||
if (!isOpen) return null;
|
||||
|
||||
return (
|
||||
<Modal opened={isOpen} onClose={onClose} title="Channel Recording">
|
||||
<Modal opened={isOpen} onClose={handleClose} title="Channel Recording">
|
||||
<Alert
|
||||
variant="light"
|
||||
color="yellow"
|
||||
title="Scheduling Conflicts"
|
||||
icon={<CircleAlert />}
|
||||
style={{ paddingBottom: 5 }}
|
||||
style={{ paddingBottom: 5, marginBottom: 12 }}
|
||||
>
|
||||
Recordings may fail if active streams or overlapping recordings use up
|
||||
all available streams
|
||||
Recordings may fail if active streams or overlapping recordings use up all available tuners.
|
||||
</Alert>
|
||||
|
||||
<form onSubmit={form.onSubmit(onSubmit)}>
|
||||
<Select
|
||||
{...form.getInputProps('channel_id')}
|
||||
label="Channel"
|
||||
key={form.key('channel_id')}
|
||||
searchable
|
||||
data={Object.values(channels).map((channel) => ({
|
||||
value: `${channel.id}`,
|
||||
label: channel.name,
|
||||
}))}
|
||||
<Stack gap="md">
|
||||
<SegmentedControl
|
||||
value={mode}
|
||||
onChange={setMode}
|
||||
disabled={Boolean(recording && recording.id)}
|
||||
data={[
|
||||
{ value: 'single', label: 'One-time' },
|
||||
{ value: 'recurring', label: 'Recurring' },
|
||||
]}
|
||||
/>
|
||||
|
||||
<DateTimePicker
|
||||
{...form.getInputProps('start_time')}
|
||||
key={form.key('start_time')}
|
||||
id="start_time"
|
||||
label="Start Time"
|
||||
valueFormat="M/DD/YYYY hh:mm A"
|
||||
/>
|
||||
<form onSubmit={onSubmit}>
|
||||
<Stack gap="md">
|
||||
{mode === 'single' ? (
|
||||
<Select
|
||||
{...singleForm.getInputProps('channel_id')}
|
||||
key={singleForm.key('channel_id')}
|
||||
label="Channel"
|
||||
placeholder="Select channel"
|
||||
searchable
|
||||
data={channelOptions}
|
||||
/>
|
||||
) : (
|
||||
<Select
|
||||
{...recurringForm.getInputProps('channel_id')}
|
||||
key={recurringForm.key('channel_id')}
|
||||
label="Channel"
|
||||
placeholder="Select channel"
|
||||
searchable
|
||||
data={channelOptions}
|
||||
/>
|
||||
)}
|
||||
|
||||
<DateTimePicker
|
||||
{...form.getInputProps('end_time')}
|
||||
key={form.key('end_time')}
|
||||
id="end_time"
|
||||
label="End Time"
|
||||
valueFormat="M/DD/YYYY hh:mm A"
|
||||
/>
|
||||
{mode === 'single' ? (
|
||||
<>
|
||||
<DateTimePicker
|
||||
{...singleForm.getInputProps('start_time')}
|
||||
key={singleForm.key('start_time')}
|
||||
label="Start"
|
||||
valueFormat="MMM D, YYYY h:mm A"
|
||||
timeInputProps={{ format: '12', withSeconds: false, amLabel: 'AM', pmLabel: 'PM' }}
|
||||
/>
|
||||
<DateTimePicker
|
||||
{...singleForm.getInputProps('end_time')}
|
||||
key={singleForm.key('end_time')}
|
||||
label="End"
|
||||
valueFormat="MMM D, YYYY h:mm A"
|
||||
timeInputProps={{ format: '12', withSeconds: false, amLabel: 'AM', pmLabel: 'PM' }}
|
||||
/>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<TextInput
|
||||
{...recurringForm.getInputProps('rule_name')}
|
||||
key={recurringForm.key('rule_name')}
|
||||
label="Rule name"
|
||||
placeholder="Morning News, Football Sundays, ..."
|
||||
/>
|
||||
<MultiSelect
|
||||
{...recurringForm.getInputProps('days_of_week')}
|
||||
key={recurringForm.key('days_of_week')}
|
||||
label="Every"
|
||||
placeholder="Select days"
|
||||
data={DAY_OPTIONS}
|
||||
searchable
|
||||
clearable
|
||||
nothingFoundMessage="No match"
|
||||
/>
|
||||
|
||||
<Flex mih={50} gap="xs" justify="flex-end" align="flex-end">
|
||||
<Button
|
||||
type="submit"
|
||||
variant="contained"
|
||||
size="small"
|
||||
disabled={form.submitting}
|
||||
>
|
||||
Submit
|
||||
</Button>
|
||||
</Flex>
|
||||
</form>
|
||||
<Group grow>
|
||||
<DatePickerInput
|
||||
label="Start date"
|
||||
value={recurringForm.values.start_date}
|
||||
onChange={(value) =>
|
||||
recurringForm.setFieldValue('start_date', value || new Date())
|
||||
}
|
||||
valueFormat="MMM D, YYYY"
|
||||
/>
|
||||
<DatePickerInput
|
||||
label="End date"
|
||||
value={recurringForm.values.end_date}
|
||||
onChange={(value) => recurringForm.setFieldValue('end_date', value)}
|
||||
valueFormat="MMM D, YYYY"
|
||||
minDate={recurringForm.values.start_date || undefined}
|
||||
/>
|
||||
</Group>
|
||||
|
||||
<Group grow>
|
||||
<TimeInput
|
||||
label="Start time"
|
||||
value={recurringForm.values.start_time}
|
||||
onChange={timeChange((val) =>
|
||||
recurringForm.setFieldValue('start_time', toTimeString(val))
|
||||
)}
|
||||
onBlur={() => recurringForm.validateField('start_time')}
|
||||
withSeconds={false}
|
||||
format="12" // shows 12-hour (so "00:00" renders "12:00 AM")
|
||||
inputMode="numeric"
|
||||
amLabel="AM"
|
||||
pmLabel="PM"
|
||||
/>
|
||||
|
||||
<TimeInput
|
||||
label="End time"
|
||||
value={recurringForm.values.end_time}
|
||||
onChange={timeChange((val) =>
|
||||
recurringForm.setFieldValue('end_time', toTimeString(val))
|
||||
)}
|
||||
onBlur={() => recurringForm.validateField('end_time')}
|
||||
withSeconds={false}
|
||||
format="12"
|
||||
inputMode="numeric"
|
||||
amLabel="AM"
|
||||
pmLabel="PM"
|
||||
/>
|
||||
</Group>
|
||||
</>
|
||||
)}
|
||||
|
||||
<Group justify="flex-end">
|
||||
<Button type="submit" loading={submitting}>
|
||||
{mode === 'single' ? 'Schedule Recording' : 'Save Rule'}
|
||||
</Button>
|
||||
</Group>
|
||||
</Stack>
|
||||
</form>
|
||||
</Stack>
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
|
||||
export default DVR;
|
||||
export default RecordingModal;
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,4 +1,10 @@
|
|||
import React, { useEffect, useState } from 'react';
|
||||
import React, {
|
||||
useCallback,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react';
|
||||
import API from '../api';
|
||||
import useSettingsStore from '../store/settings';
|
||||
import useUserAgentsStore from '../store/userAgents';
|
||||
|
|
@ -11,6 +17,7 @@ import {
|
|||
Center,
|
||||
Flex,
|
||||
Group,
|
||||
FileInput,
|
||||
MultiSelect,
|
||||
Select,
|
||||
Stack,
|
||||
|
|
@ -20,6 +27,7 @@ import {
|
|||
NumberInput,
|
||||
} from '@mantine/core';
|
||||
import { isNotEmpty, useForm } from '@mantine/form';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
import UserAgentsTable from '../components/tables/UserAgentsTable';
|
||||
import StreamProfilesTable from '../components/tables/StreamProfilesTable';
|
||||
import useLocalStorage from '../hooks/useLocalStorage';
|
||||
|
|
@ -33,6 +41,140 @@ import {
|
|||
import ConfirmationDialog from '../components/ConfirmationDialog';
|
||||
import useWarningsStore from '../store/warnings';
|
||||
|
||||
const TIMEZONE_FALLBACKS = [
|
||||
'UTC',
|
||||
'America/New_York',
|
||||
'America/Chicago',
|
||||
'America/Denver',
|
||||
'America/Los_Angeles',
|
||||
'America/Phoenix',
|
||||
'America/Anchorage',
|
||||
'Pacific/Honolulu',
|
||||
'Europe/London',
|
||||
'Europe/Paris',
|
||||
'Europe/Berlin',
|
||||
'Europe/Madrid',
|
||||
'Europe/Warsaw',
|
||||
'Europe/Moscow',
|
||||
'Asia/Dubai',
|
||||
'Asia/Kolkata',
|
||||
'Asia/Shanghai',
|
||||
'Asia/Tokyo',
|
||||
'Asia/Seoul',
|
||||
'Australia/Sydney',
|
||||
];
|
||||
|
||||
const getSupportedTimeZones = () => {
|
||||
try {
|
||||
if (typeof Intl.supportedValuesOf === 'function') {
|
||||
return Intl.supportedValuesOf('timeZone');
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Unable to enumerate supported time zones:', error);
|
||||
}
|
||||
return TIMEZONE_FALLBACKS;
|
||||
};
|
||||
|
||||
const getTimeZoneOffsetMinutes = (date, timeZone) => {
|
||||
try {
|
||||
const dtf = new Intl.DateTimeFormat('en-US', {
|
||||
timeZone,
|
||||
year: 'numeric',
|
||||
month: '2-digit',
|
||||
day: '2-digit',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
second: '2-digit',
|
||||
hourCycle: 'h23',
|
||||
});
|
||||
const parts = dtf.formatToParts(date).reduce((acc, part) => {
|
||||
if (part.type !== 'literal') acc[part.type] = part.value;
|
||||
return acc;
|
||||
}, {});
|
||||
const asUTC = Date.UTC(
|
||||
Number(parts.year),
|
||||
Number(parts.month) - 1,
|
||||
Number(parts.day),
|
||||
Number(parts.hour),
|
||||
Number(parts.minute),
|
||||
Number(parts.second)
|
||||
);
|
||||
return (asUTC - date.getTime()) / 60000;
|
||||
} catch (error) {
|
||||
console.warn(`Failed to compute offset for ${timeZone}:`, error);
|
||||
return 0;
|
||||
}
|
||||
};
|
||||
|
||||
const formatOffset = (minutes) => {
|
||||
const rounded = Math.round(minutes);
|
||||
const sign = rounded < 0 ? '-' : '+';
|
||||
const absolute = Math.abs(rounded);
|
||||
const hours = String(Math.floor(absolute / 60)).padStart(2, '0');
|
||||
const mins = String(absolute % 60).padStart(2, '0');
|
||||
return `UTC${sign}${hours}:${mins}`;
|
||||
};
|
||||
|
||||
const buildTimeZoneOptions = (preferredZone) => {
|
||||
const zones = getSupportedTimeZones();
|
||||
const referenceYear = new Date().getUTCFullYear();
|
||||
const janDate = new Date(Date.UTC(referenceYear, 0, 1, 12, 0, 0));
|
||||
const julDate = new Date(Date.UTC(referenceYear, 6, 1, 12, 0, 0));
|
||||
|
||||
const options = zones
|
||||
.map((zone) => {
|
||||
const janOffset = getTimeZoneOffsetMinutes(janDate, zone);
|
||||
const julOffset = getTimeZoneOffsetMinutes(julDate, zone);
|
||||
const currentOffset = getTimeZoneOffsetMinutes(new Date(), zone);
|
||||
const minOffset = Math.min(janOffset, julOffset);
|
||||
const maxOffset = Math.max(janOffset, julOffset);
|
||||
const usesDst = minOffset !== maxOffset;
|
||||
const labelParts = [`now ${formatOffset(currentOffset)}`];
|
||||
if (usesDst) {
|
||||
labelParts.push(
|
||||
`DST range ${formatOffset(minOffset)} to ${formatOffset(maxOffset)}`
|
||||
);
|
||||
}
|
||||
return {
|
||||
value: zone,
|
||||
label: `${zone} (${labelParts.join(' | ')})`,
|
||||
numericOffset: minOffset,
|
||||
};
|
||||
})
|
||||
.sort((a, b) => {
|
||||
if (a.numericOffset !== b.numericOffset) {
|
||||
return a.numericOffset - b.numericOffset;
|
||||
}
|
||||
return a.value.localeCompare(b.value);
|
||||
});
|
||||
if (
|
||||
preferredZone &&
|
||||
!options.some((option) => option.value === preferredZone)
|
||||
) {
|
||||
const currentOffset = getTimeZoneOffsetMinutes(new Date(), preferredZone);
|
||||
options.push({
|
||||
value: preferredZone,
|
||||
label: `${preferredZone} (now ${formatOffset(currentOffset)})`,
|
||||
numericOffset: currentOffset,
|
||||
});
|
||||
options.sort((a, b) => {
|
||||
if (a.numericOffset !== b.numericOffset) {
|
||||
return a.numericOffset - b.numericOffset;
|
||||
}
|
||||
return a.value.localeCompare(b.value);
|
||||
});
|
||||
}
|
||||
return options;
|
||||
};
|
||||
|
||||
const getDefaultTimeZone = () => {
|
||||
try {
|
||||
return Intl.DateTimeFormat().resolvedOptions().timeZone || 'UTC';
|
||||
} catch (error) {
|
||||
return 'UTC';
|
||||
}
|
||||
};
|
||||
|
||||
const SettingsPage = () => {
|
||||
const settings = useSettingsStore((s) => s.settings);
|
||||
const userAgents = useUserAgentsStore((s) => s.userAgents);
|
||||
|
|
@ -59,11 +201,51 @@ const SettingsPage = () => {
|
|||
|
||||
// Store pending changed settings when showing the dialog
|
||||
const [pendingChangedSettings, setPendingChangedSettings] = useState(null);
|
||||
const [comskipFile, setComskipFile] = useState(null);
|
||||
const [comskipUploadLoading, setComskipUploadLoading] = useState(false);
|
||||
const [comskipConfig, setComskipConfig] = useState({
|
||||
path: '',
|
||||
exists: false,
|
||||
});
|
||||
|
||||
// UI / local storage settings
|
||||
const [tableSize, setTableSize] = useLocalStorage('table-size', 'default');
|
||||
const [timeFormat, setTimeFormat] = useLocalStorage('time-format', '12h');
|
||||
const [dateFormat, setDateFormat] = useLocalStorage('date-format', 'mdy');
|
||||
const [timeZone, setTimeZone] = useLocalStorage(
|
||||
'time-zone',
|
||||
getDefaultTimeZone()
|
||||
);
|
||||
const timeZoneOptions = useMemo(
|
||||
() => buildTimeZoneOptions(timeZone),
|
||||
[timeZone]
|
||||
);
|
||||
const timeZoneSyncedRef = useRef(false);
|
||||
|
||||
const persistTimeZoneSetting = useCallback(
|
||||
async (tzValue) => {
|
||||
try {
|
||||
const existing = settings['system-time-zone'];
|
||||
if (existing && existing.id) {
|
||||
await API.updateSetting({ ...existing, value: tzValue });
|
||||
} else {
|
||||
await API.createSetting({
|
||||
key: 'system-time-zone',
|
||||
name: 'System Time Zone',
|
||||
value: tzValue,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to persist time zone setting', error);
|
||||
notifications.show({
|
||||
title: 'Failed to update time zone',
|
||||
message: 'Could not save the selected time zone. Please try again.',
|
||||
color: 'red',
|
||||
});
|
||||
}
|
||||
},
|
||||
[settings]
|
||||
);
|
||||
|
||||
const regionChoices = REGION_CHOICES;
|
||||
|
||||
|
|
@ -80,6 +262,7 @@ const SettingsPage = () => {
|
|||
'dvr-tv-fallback-template': '',
|
||||
'dvr-movie-fallback-template': '',
|
||||
'dvr-comskip-enabled': false,
|
||||
'dvr-comskip-custom-path': '',
|
||||
'dvr-pre-offset-minutes': 0,
|
||||
'dvr-post-offset-minutes': 0,
|
||||
},
|
||||
|
|
@ -158,6 +341,12 @@ const SettingsPage = () => {
|
|||
);
|
||||
|
||||
form.setValues(formValues);
|
||||
if (formValues['dvr-comskip-custom-path']) {
|
||||
setComskipConfig((prev) => ({
|
||||
path: formValues['dvr-comskip-custom-path'],
|
||||
exists: prev.exists,
|
||||
}));
|
||||
}
|
||||
|
||||
const networkAccessSettings = JSON.parse(
|
||||
settings['network-access'].value || '{}'
|
||||
|
|
@ -177,8 +366,39 @@ const SettingsPage = () => {
|
|||
console.error('Error parsing proxy settings:', error);
|
||||
}
|
||||
}
|
||||
|
||||
const tzSetting = settings['system-time-zone'];
|
||||
if (tzSetting?.value) {
|
||||
timeZoneSyncedRef.current = true;
|
||||
setTimeZone((prev) =>
|
||||
prev === tzSetting.value ? prev : tzSetting.value
|
||||
);
|
||||
} else if (!timeZoneSyncedRef.current && timeZone) {
|
||||
timeZoneSyncedRef.current = true;
|
||||
persistTimeZoneSetting(timeZone);
|
||||
}
|
||||
}
|
||||
}, [settings]);
|
||||
}, [settings, timeZone, setTimeZone, persistTimeZoneSetting]);
|
||||
|
||||
useEffect(() => {
|
||||
const loadComskipConfig = async () => {
|
||||
try {
|
||||
const response = await API.getComskipConfig();
|
||||
if (response) {
|
||||
setComskipConfig({
|
||||
path: response.path || '',
|
||||
exists: Boolean(response.exists),
|
||||
});
|
||||
if (response.path) {
|
||||
form.setFieldValue('dvr-comskip-custom-path', response.path);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to load comskip config', error);
|
||||
}
|
||||
};
|
||||
loadComskipConfig();
|
||||
}, []);
|
||||
|
||||
const onSubmit = async () => {
|
||||
const values = form.getValues();
|
||||
|
|
@ -263,6 +483,39 @@ const SettingsPage = () => {
|
|||
setProxySettingsSaved(true);
|
||||
};
|
||||
|
||||
const onComskipUpload = async () => {
|
||||
if (!comskipFile) {
|
||||
return;
|
||||
}
|
||||
|
||||
setComskipUploadLoading(true);
|
||||
try {
|
||||
const response = await API.uploadComskipIni(comskipFile);
|
||||
if (response?.path) {
|
||||
notifications.show({
|
||||
title: 'comskip.ini uploaded',
|
||||
message: response.path,
|
||||
autoClose: 3000,
|
||||
color: 'green',
|
||||
});
|
||||
form.setFieldValue('dvr-comskip-custom-path', response.path);
|
||||
useSettingsStore.getState().updateSetting({
|
||||
...(settings['dvr-comskip-custom-path'] || {
|
||||
key: 'dvr-comskip-custom-path',
|
||||
name: 'DVR Comskip Custom Path',
|
||||
}),
|
||||
value: response.path,
|
||||
});
|
||||
setComskipConfig({ path: response.path, exists: true });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to upload comskip.ini', error);
|
||||
} finally {
|
||||
setComskipUploadLoading(false);
|
||||
setComskipFile(null);
|
||||
}
|
||||
};
|
||||
|
||||
const resetProxySettingsToDefaults = () => {
|
||||
const defaultValues = {
|
||||
buffering_timeout: 15,
|
||||
|
|
@ -296,13 +549,19 @@ const SettingsPage = () => {
|
|||
const onUISettingsChange = (name, value) => {
|
||||
switch (name) {
|
||||
case 'table-size':
|
||||
setTableSize(value);
|
||||
if (value) setTableSize(value);
|
||||
break;
|
||||
case 'time-format':
|
||||
setTimeFormat(value);
|
||||
if (value) setTimeFormat(value);
|
||||
break;
|
||||
case 'date-format':
|
||||
setDateFormat(value);
|
||||
if (value) setDateFormat(value);
|
||||
break;
|
||||
case 'time-zone':
|
||||
if (value) {
|
||||
setTimeZone(value);
|
||||
persistTimeZoneSetting(value);
|
||||
}
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
|
@ -434,6 +693,14 @@ const SettingsPage = () => {
|
|||
},
|
||||
]}
|
||||
/>
|
||||
<Select
|
||||
label="Time zone"
|
||||
searchable
|
||||
nothingFoundMessage="No matches"
|
||||
value={timeZone}
|
||||
onChange={(val) => onUISettingsChange('time-zone', val)}
|
||||
data={timeZoneOptions}
|
||||
/>
|
||||
</Accordion.Panel>
|
||||
</Accordion.Item>
|
||||
|
||||
|
|
@ -459,6 +726,46 @@ const SettingsPage = () => {
|
|||
'dvr-comskip-enabled'
|
||||
}
|
||||
/>
|
||||
<TextInput
|
||||
label="Custom comskip.ini path"
|
||||
description="Leave blank to use the built-in defaults."
|
||||
placeholder="/app/docker/comskip.ini"
|
||||
{...form.getInputProps('dvr-comskip-custom-path')}
|
||||
key={form.key('dvr-comskip-custom-path')}
|
||||
id={
|
||||
settings['dvr-comskip-custom-path']?.id ||
|
||||
'dvr-comskip-custom-path'
|
||||
}
|
||||
name={
|
||||
settings['dvr-comskip-custom-path']?.key ||
|
||||
'dvr-comskip-custom-path'
|
||||
}
|
||||
/>
|
||||
<Group align="flex-end" gap="sm">
|
||||
<FileInput
|
||||
placeholder="Select comskip.ini"
|
||||
accept=".ini"
|
||||
value={comskipFile}
|
||||
onChange={setComskipFile}
|
||||
clearable
|
||||
disabled={comskipUploadLoading}
|
||||
style={{ flex: 1 }}
|
||||
/>
|
||||
<Button
|
||||
variant="light"
|
||||
onClick={onComskipUpload}
|
||||
disabled={!comskipFile || comskipUploadLoading}
|
||||
>
|
||||
{comskipUploadLoading
|
||||
? 'Uploading...'
|
||||
: 'Upload comskip.ini'}
|
||||
</Button>
|
||||
</Group>
|
||||
<Text size="xs" c="dimmed">
|
||||
{comskipConfig.exists && comskipConfig.path
|
||||
? `Using ${comskipConfig.path}`
|
||||
: 'No custom comskip.ini uploaded.'}
|
||||
</Text>
|
||||
<NumberInput
|
||||
label="Start early (minutes)"
|
||||
description="Begin recording this many minutes before the scheduled start."
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ const useChannelsStore = create((set, get) => ({
|
|||
activeChannels: {},
|
||||
activeClients: {},
|
||||
recordings: [],
|
||||
recurringRules: [],
|
||||
isLoading: false,
|
||||
error: null,
|
||||
forceUpdate: 0,
|
||||
|
|
@ -408,6 +409,23 @@ const useChannelsStore = create((set, get) => ({
|
|||
}
|
||||
},
|
||||
|
||||
fetchRecurringRules: async () => {
|
||||
try {
|
||||
const rules = await api.listRecurringRules();
|
||||
set({ recurringRules: Array.isArray(rules) ? rules : [] });
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch recurring DVR rules:', error);
|
||||
set({ error: 'Failed to load recurring DVR rules.' });
|
||||
}
|
||||
},
|
||||
|
||||
removeRecurringRule: (id) =>
|
||||
set((state) => ({
|
||||
recurringRules: Array.isArray(state.recurringRules)
|
||||
? state.recurringRules.filter((rule) => String(rule?.id) !== String(id))
|
||||
: [],
|
||||
})),
|
||||
|
||||
// Optimistically remove a single recording from the local store
|
||||
removeRecording: (id) =>
|
||||
set((state) => {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue