diff --git a/.github/workflows/frontend-tests.yml b/.github/workflows/frontend-tests.yml new file mode 100644 index 00000000..4e9e2505 --- /dev/null +++ b/.github/workflows/frontend-tests.yml @@ -0,0 +1,41 @@ +name: Frontend Tests + +on: + push: + branches: [main, dev] + paths: + - 'frontend/**' + - '.github/workflows/frontend-tests.yml' + pull_request: + branches: [main, dev] + paths: + - 'frontend/**' + - '.github/workflows/frontend-tests.yml' + +jobs: + test: + runs-on: ubuntu-latest + + defaults: + run: + working-directory: ./frontend + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '24' + cache: 'npm' + cache-dependency-path: './frontend/package-lock.json' + + - name: Install dependencies + run: npm ci + + # - name: Run linter + # run: npm run lint + + - name: Run tests + run: npm test diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a1cb27bb..9186541d 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -184,13 +184,13 @@ jobs: echo "Creating multi-arch manifest for ${OWNER}/${REPO}" # GitHub Container Registry manifests - # latest tag + # Create one manifest with both latest and version tags docker buildx imagetools create \ --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ --annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \ --annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \ - --annotation "index:org.opencontainers.image.version=latest" \ + --annotation "index:org.opencontainers.image.version=${VERSION}" \ --annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \ --annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \ --annotation "index:org.opencontainers.image.licenses=See repository" \ @@ -200,9 +200,11 @@ jobs: --annotation "index:maintainer=${{ github.actor }}" \ --annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \ --tag ghcr.io/${OWNER}/${REPO}:latest \ - ghcr.io/${OWNER}/${REPO}:latest-amd64 ghcr.io/${OWNER}/${REPO}:latest-arm64 + --tag ghcr.io/${OWNER}/${REPO}:${VERSION} \ + ghcr.io/${OWNER}/${REPO}:${VERSION}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-arm64 - # version tag + # Docker Hub manifests + # Create one manifest with both latest and version tags docker buildx imagetools create \ --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ @@ -217,43 +219,7 @@ jobs: --annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \ --annotation "index:maintainer=${{ github.actor }}" \ --annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \ - --tag ghcr.io/${OWNER}/${REPO}:${VERSION} \ - ghcr.io/${OWNER}/${REPO}:${VERSION}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-arm64 - - # Docker Hub manifests - # latest tag - docker buildx imagetools create \ - --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ - --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ - --annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \ - --annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \ - --annotation "index:org.opencontainers.image.version=latest" \ - --annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \ - --annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \ - --annotation "index:org.opencontainers.image.licenses=See repository" \ - --annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \ - --annotation "index:org.opencontainers.image.vendor=${OWNER}" \ - --annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \ - --annotation "index:maintainer=${{ github.actor }}" \ - --annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \ --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest \ - docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest-arm64 - - # version tag - docker buildx imagetools create \ - --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ - --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ - --annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \ - --annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \ - --annotation "index:org.opencontainers.image.version=${VERSION}" \ - --annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \ - --annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \ - --annotation "index:org.opencontainers.image.licenses=See repository" \ - --annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \ - --annotation "index:org.opencontainers.image.vendor=${OWNER}" \ - --annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \ - --annotation "index:maintainer=${{ github.actor }}" \ - --annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \ --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION} \ docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-arm64 diff --git a/.gitignore b/.gitignore index a9d76412..20968f46 100755 --- a/.gitignore +++ b/.gitignore @@ -18,4 +18,5 @@ dump.rdb debugpy* uwsgi.sock package-lock.json -models \ No newline at end of file +models +.idea \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 114d42ce..610c2ee5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,61 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +### Changed + +- Frontend tests GitHub workflow now uses Node.js 24 (matching Dockerfile) and runs on both `main` and `dev` branch pushes and pull requests for comprehensive CI coverage. + +### Fixed + +- Fixed NumPy baseline detection in Docker entrypoint. Now calls `numpy.show_config()` directly with case-insensitive grep instead of incorrectly wrapping the output. +- Fixed SettingsUtils frontend tests for new grouped settings architecture. Updated test suite to properly verify grouped JSON settings (stream_settings, dvr_settings, etc.) instead of individual CharField settings, including tests for type conversions, array-to-CSV transformations, and special handling of proxy_settings and network_access. + +## [0.17.0] - 2026-01-13 + +### Added + +- Loading feedback for all confirmation dialogs: Extended visual loading indicators across all confirmation dialogs throughout the application. Delete, cleanup, and bulk operation dialogs now show an animated dots loader and disabled state during async operations, providing consistent user feedback for backups (restore/delete), channels, EPGs, logos, VOD logos, M3U accounts, streams, users, groups, filters, profiles, batch operations, and network access changes. +- Channel profile edit and duplicate functionality: Users can now rename existing channel profiles and create duplicates with automatic channel membership cloning. Each profile action (edit, duplicate, delete) in the profile dropdown for quick access. +- ProfileModal component extracted for improved code organization and maintainability of channel profile management operations. +- Frontend unit tests for pages and utilities: Added comprehensive unit test coverage for frontend components within pages/ and JS files within utils/, along with a GitHub Actions workflow (`frontend-tests.yml`) to automatically run tests on commits and pull requests - Thanks [@nick4810](https://github.com/nick4810) +- Channel Profile membership control for manual channel creation and bulk operations: Extended the existing `channel_profile_ids` parameter from `POST /api/channels/from-stream/` to also support `POST /api/channels/` (manual creation) and bulk creation tasks with the same flexible semantics: + - Omitted parameter (default): Channels are added to ALL profiles (preserves backward compatibility) + - Empty array `[]`: Channels are added to NO profiles + - Sentinel value `[0]`: Channels are added to ALL profiles (explicit) + - Specific IDs `[1, 2, ...]`: Channels are added only to the specified profiles + This allows API consumers to control profile membership across all channel creation methods without requiring all channels to be added to every profile by default. +- Channel profile selection in creation modal: Users can now choose which profiles to add channels to when creating channels from streams (both single and bulk operations). Options include adding to all profiles, no profiles, or specific profiles with mutual exclusivity between special options ("All Profiles", "None") and specific profile selections. Profile selection defaults to the current table filter for intuitive workflow. +- Group retention policy for M3U accounts: Groups now follow the same stale retention logic as streams, using the account's `stale_stream_days` setting. Groups that temporarily disappear from an M3U source are retained for the configured retention period instead of being immediately deleted, preserving user settings and preventing data loss when providers temporarily remove/re-add groups. (Closes #809) +- Visual stale indicators for streams and groups: Added `is_stale` field to Stream and both `is_stale` and `last_seen` fields to ChannelGroupM3UAccount models to track items in their retention grace period. Stale groups display with orange buttons and a warning tooltip, while stale streams show with a red background color matching the visual treatment of empty channels. + +### Changed + +- Settings architecture refactored to use grouped JSON storage: Migrated from individual CharField settings to grouped JSONField settings for improved performance, maintainability, and type safety. Settings are now organized into logical groups (stream_settings, dvr_settings, backup_settings, system_settings, proxy_settings, network_access) with automatic migration handling. Backend provides helper methods (`get_stream_settings()`, `get_default_user_agent_id()`, etc.) for easy access. Frontend simplified by removing complex key mapping logic and standardizing on underscore-based field names throughout. +- Docker setup enhanced for legacy CPU support: Added `USE_LEGACY_NUMPY` environment variable to enable custom-built NumPy with no CPU baseline, allowing Dispatcharr to run on older CPUs (circa 2009) that lack support for newer baseline CPU features. When set to `true`, the entrypoint script will install the legacy NumPy build instead of the standard distribution. (Fixes #805) +- VOD upstream read timeout reduced from 30 seconds to 10 seconds to minimize lock hold time when clients disconnect during connection phase +- Form management refactored across application: Migrated Channel, Stream, M3U Profile, Stream Profile, Logo, and User Agent forms from Formik to React Hook Form (RHF) with Yup validation for improved form handling, better validation feedback, and enhanced code maintainability +- Stats and VOD pages refactored for clearer separation of concerns: extracted Stream/VOD connection cards (StreamConnectionCard, VodConnectionCard, VODCard, SeriesCard), moved page logic into dedicated utils, and lazy-loaded heavy components with ErrorBoundary fallbacks to improve readability and maintainability - Thanks [@nick4810](https://github.com/nick4810) +- Channel creation modal refactored: Extracted and unified channel numbering dialogs from StreamsTable into a dedicated CreateChannelModal component that handles both single and bulk channel creation with cleaner, more maintainable implementation and integrated profile selection controls. + +### Fixed + +- Fixed bulk channel profile membership update endpoint silently ignoring channels without existing membership records. The endpoint now creates missing memberships automatically (matching single-channel endpoint behavior), validates that all channel IDs exist before processing, and provides detailed response feedback including counts of updated vs. created memberships. Added comprehensive Swagger documentation with request/response schemas. +- Fixed bulk channel edit endpoint crashing with `ValueError: Field names must be given to bulk_update()` when the first channel in the update list had no actual field changes. The endpoint now collects all unique field names from all channels being updated instead of only looking at the first channel, properly handling cases where different channels update different fields or when some channels have no changes - Thanks [@mdellavo](https://github.com/mdellavo) (Fixes #804) +- Fixed PostgreSQL backup restore not completely cleaning database before restoration. The restore process now drops and recreates the entire `public` schema before running `pg_restore`, ensuring a truly clean restore that removes all tables, functions, and other objects not present in the backup file. This prevents leftover database objects from persisting when restoring backups from older branches or versions. Added `--no-owner` flag to `pg_restore` to avoid role permission errors when the backup was created by a different PostgreSQL user. +- Fixed TV Guide loading overlay not disappearing after navigating from DVR page. The `fetchRecordings()` function in the channels store was setting `isLoading: true` on start but never resetting it to `false` on successful completion, causing the Guide page's loading overlay to remain visible indefinitely when accessed after the DVR page. +- Fixed stream profile parameters not properly handling quoted arguments. Switched from basic `.split()` to `shlex.split()` for parsing command-line parameters, allowing proper handling of multi-word arguments in quotes (e.g., OAuth tokens in HTTP headers like `"--twitch-api-header=Authorization=OAuth token123"`). This ensures external streaming tools like Streamlink and FFmpeg receive correctly formatted arguments when using stream profiles with complex parameters - Thanks [@justinforlenza](https://github.com/justinforlenza) (Fixes #833) +- Fixed bulk and manual channel creation not refreshing channel profile memberships in the UI for all connected clients. WebSocket `channels_created` event now calls `fetchChannelProfiles()` to ensure profile membership updates are reflected in real-time for all users without requiring a page refresh. +- Fixed Channel Profile filter incorrectly applying profile membership filtering even when "Show Disabled" was enabled, preventing all channels from being displayed. Profile filter now only applies when hiding disabled channels. (Fixes #825) +- Fixed manual channel creation not adding channels to channel profiles. Manually created channels are now added to the selected profile if one is active, or to all profiles if "All" is selected, matching the behavior of channels created from streams. +- Fixed VOD streams disappearing from stats page during playback by adding `socket-timeout = 600` to production uWSGI config. The missing directive caused uWSGI to use its default 4-second timeout, triggering premature cleanup when clients buffered content. Now matches the existing `http-timeout = 600` value and prevents timeout errors during normal client buffering - Thanks [@patchy8736](https://github.com/patchy8736) +- Fixed Channels table EPG column showing "Not Assigned" on initial load for users with large EPG datasets. Added `tvgsLoaded` flag to EPG store to track when EPG data has finished loading, ensuring the table waits for EPG data before displaying. EPG cells now show animated skeleton placeholders while loading instead of incorrectly showing "Not Assigned". (Fixes #810) +- Fixed VOD profile connection count not being decremented when stream connection fails (timeout, 404, etc.), preventing profiles from reaching capacity limits and rejecting valid stream requests +- Fixed React warning in Channel form by removing invalid `removeTrailingZeros` prop from NumberInput component +- Release workflow Docker tagging: Fixed issue where `latest` and version tags (e.g., `0.16.0`) were creating separate manifests instead of pointing to the same image digest, which caused old `latest` tags to become orphaned/untagged after new releases. Now creates a single multi-arch manifest with both tags, maintaining proper tag relationships and download statistics visibility on GitHub. +- Fixed onboarding message appearing in the Channels Table when filtered results are empty. The onboarding message now only displays when there are no channels created at all, not when channels exist but are filtered out by current filters. +- Fixed `M3UMovieRelation.get_stream_url()` and `M3UEpisodeRelation.get_stream_url()` to use XC client's `_normalize_url()` method instead of simple `rstrip('/')`. This properly handles malformed M3U account URLs (e.g., containing `/player_api.php` or query parameters) before constructing VOD stream endpoints, matching behavior of live channel URL building. (Closes #722) +- Fixed bulk_create and bulk_update errors during VOD content refresh by pre-checking object existence with optimized bulk queries (3 queries total instead of N per batch) before creating new objects. This ensures all movie/series objects have primary keys before relation operations, preventing "prohibited to prevent data loss due to unsaved related object" errors. Additionally fixed duplicate key constraint violations by treating TMDB/IMDB ID values of `0` or `'0'` as invalid (some providers use this to indicate "no ID"), converting them to NULL to prevent multiple items from incorrectly sharing the same ID. (Fixes #813) + ## [0.16.0] - 2026-01-04 ### Added diff --git a/apps/backups/scheduler.py b/apps/backups/scheduler.py index b5f99fe5..aa7e9bcd 100644 --- a/apps/backups/scheduler.py +++ b/apps/backups/scheduler.py @@ -9,60 +9,47 @@ logger = logging.getLogger(__name__) BACKUP_SCHEDULE_TASK_NAME = "backup-scheduled-task" -SETTING_KEYS = { - "enabled": "backup_schedule_enabled", - "frequency": "backup_schedule_frequency", - "time": "backup_schedule_time", - "day_of_week": "backup_schedule_day_of_week", - "retention_count": "backup_retention_count", - "cron_expression": "backup_schedule_cron_expression", -} - DEFAULTS = { - "enabled": True, - "frequency": "daily", - "time": "03:00", - "day_of_week": 0, # Sunday + "schedule_enabled": True, + "schedule_frequency": "daily", + "schedule_time": "03:00", + "schedule_day_of_week": 0, # Sunday "retention_count": 3, - "cron_expression": "", + "schedule_cron_expression": "", } -def _get_setting(key: str, default=None): - """Get a backup setting from CoreSettings.""" +def _get_backup_settings(): + """Get all backup settings from CoreSettings grouped JSON.""" try: - setting = CoreSettings.objects.get(key=SETTING_KEYS[key]) - value = setting.value - if key == "enabled": - return value.lower() == "true" - elif key in ("day_of_week", "retention_count"): - return int(value) - return value + settings_obj = CoreSettings.objects.get(key="backup_settings") + return settings_obj.value if isinstance(settings_obj.value, dict) else DEFAULTS.copy() except CoreSettings.DoesNotExist: - return default if default is not None else DEFAULTS.get(key) + return DEFAULTS.copy() -def _set_setting(key: str, value) -> None: - """Set a backup setting in CoreSettings.""" - str_value = str(value).lower() if isinstance(value, bool) else str(value) - CoreSettings.objects.update_or_create( - key=SETTING_KEYS[key], - defaults={ - "name": f"Backup {key.replace('_', ' ').title()}", - "value": str_value, - }, +def _update_backup_settings(updates: dict) -> None: + """Update backup settings in the grouped JSON.""" + obj, created = CoreSettings.objects.get_or_create( + key="backup_settings", + defaults={"name": "Backup Settings", "value": DEFAULTS.copy()} ) + current = obj.value if isinstance(obj.value, dict) else {} + current.update(updates) + obj.value = current + obj.save() def get_schedule_settings() -> dict: """Get all backup schedule settings.""" + settings = _get_backup_settings() return { - "enabled": _get_setting("enabled"), - "frequency": _get_setting("frequency"), - "time": _get_setting("time"), - "day_of_week": _get_setting("day_of_week"), - "retention_count": _get_setting("retention_count"), - "cron_expression": _get_setting("cron_expression"), + "enabled": bool(settings.get("schedule_enabled", DEFAULTS["schedule_enabled"])), + "frequency": str(settings.get("schedule_frequency", DEFAULTS["schedule_frequency"])), + "time": str(settings.get("schedule_time", DEFAULTS["schedule_time"])), + "day_of_week": int(settings.get("schedule_day_of_week", DEFAULTS["schedule_day_of_week"])), + "retention_count": int(settings.get("retention_count", DEFAULTS["retention_count"])), + "cron_expression": str(settings.get("schedule_cron_expression", DEFAULTS["schedule_cron_expression"])), } @@ -90,10 +77,22 @@ def update_schedule_settings(data: dict) -> dict: if count < 0: raise ValueError("retention_count must be >= 0") - # Update settings - for key in ("enabled", "frequency", "time", "day_of_week", "retention_count", "cron_expression"): - if key in data: - _set_setting(key, data[key]) + # Update settings with proper key names + updates = {} + if "enabled" in data: + updates["schedule_enabled"] = bool(data["enabled"]) + if "frequency" in data: + updates["schedule_frequency"] = str(data["frequency"]) + if "time" in data: + updates["schedule_time"] = str(data["time"]) + if "day_of_week" in data: + updates["schedule_day_of_week"] = int(data["day_of_week"]) + if "retention_count" in data: + updates["retention_count"] = int(data["retention_count"]) + if "cron_expression" in data: + updates["schedule_cron_expression"] = str(data["cron_expression"]) + + _update_backup_settings(updates) # Sync the periodic task _sync_periodic_task() diff --git a/apps/backups/services.py b/apps/backups/services.py index b99fab6d..b638e701 100644 --- a/apps/backups/services.py +++ b/apps/backups/services.py @@ -72,17 +72,47 @@ def _dump_postgresql(output_file: Path) -> None: logger.debug(f"pg_dump output: {result.stderr}") +def _clean_postgresql_schema() -> None: + """Drop and recreate the public schema to ensure a completely clean restore.""" + logger.info("[PG_CLEAN] Dropping and recreating public schema...") + + # Commands to drop and recreate schema + sql_commands = "DROP SCHEMA IF EXISTS public CASCADE; CREATE SCHEMA public; GRANT ALL ON SCHEMA public TO public;" + + cmd = [ + "psql", + *_get_pg_args(), + "-c", sql_commands, + ] + + result = subprocess.run( + cmd, + env=_get_pg_env(), + capture_output=True, + text=True, + ) + + if result.returncode != 0: + logger.error(f"[PG_CLEAN] Failed to clean schema: {result.stderr}") + raise RuntimeError(f"Failed to clean PostgreSQL schema: {result.stderr}") + + logger.info("[PG_CLEAN] Schema cleaned successfully") + + def _restore_postgresql(dump_file: Path) -> None: """Restore PostgreSQL database using pg_restore.""" logger.info("[PG_RESTORE] Starting pg_restore...") logger.info(f"[PG_RESTORE] Dump file: {dump_file}") + # Drop and recreate schema to ensure a completely clean restore + _clean_postgresql_schema() + pg_args = _get_pg_args() logger.info(f"[PG_RESTORE] Connection args: {pg_args}") cmd = [ "pg_restore", - "--clean", # Clean (drop) database objects before recreating + "--no-owner", # Skip ownership commands (we already created schema) *pg_args, "-v", # Verbose str(dump_file), diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index aebb74a3..c2ba7a06 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -9,7 +9,8 @@ from drf_yasg import openapi from django.shortcuts import get_object_or_404, get_list_or_404 from django.db import transaction from django.db.models import Q -import os, json, requests, logging +import os, json, requests, logging, mimetypes +from django.utils.http import http_date from urllib.parse import unquote from apps.accounts.permissions import ( Authenticated, @@ -130,6 +131,8 @@ class StreamViewSet(viewsets.ModelViewSet): ordering = ["-name"] def get_permissions(self): + if self.action == "duplicate": + return [IsAdmin()] try: return [perm() for perm in permission_classes_by_action[self.action]] except KeyError: @@ -236,12 +239,8 @@ class ChannelGroupViewSet(viewsets.ModelViewSet): return [Authenticated()] def get_queryset(self): - """Add annotation for association counts""" - from django.db.models import Count - return ChannelGroup.objects.annotate( - channel_count=Count('channels', distinct=True), - m3u_account_count=Count('m3u_accounts', distinct=True) - ) + """Return channel groups with prefetched relations for efficient counting""" + return ChannelGroup.objects.prefetch_related('channels', 'm3u_accounts').all() def update(self, request, *args, **kwargs): """Override update to check M3U associations""" @@ -277,15 +276,20 @@ class ChannelGroupViewSet(viewsets.ModelViewSet): @action(detail=False, methods=["post"], url_path="cleanup") def cleanup_unused_groups(self, request): """Delete all channel groups with no channels or M3U account associations""" - from django.db.models import Count + from django.db.models import Q, Exists, OuterRef + + # Find groups with no channels and no M3U account associations using Exists subqueries + from .models import Channel, ChannelGroupM3UAccount + + has_channels = Channel.objects.filter(channel_group_id=OuterRef('pk')) + has_accounts = ChannelGroupM3UAccount.objects.filter(channel_group_id=OuterRef('pk')) - # Find groups with no channels and no M3U account associations unused_groups = ChannelGroup.objects.annotate( - channel_count=Count('channels', distinct=True), - m3u_account_count=Count('m3u_accounts', distinct=True) + has_channels=Exists(has_channels), + has_accounts=Exists(has_accounts) ).filter( - channel_count=0, - m3u_account_count=0 + has_channels=False, + has_accounts=False ) deleted_count = unused_groups.count() @@ -386,6 +390,72 @@ class ChannelViewSet(viewsets.ModelViewSet): ordering_fields = ["channel_number", "name", "channel_group__name"] ordering = ["-channel_number"] + def create(self, request, *args, **kwargs): + """Override create to handle channel profile membership""" + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + + with transaction.atomic(): + channel = serializer.save() + + # Handle channel profile membership + # Semantics: + # - Omitted (None): add to ALL profiles (backward compatible default) + # - Empty array []: add to NO profiles + # - Sentinel [0] or 0: add to ALL profiles (explicit) + # - [1,2,...]: add to specified profile IDs only + channel_profile_ids = request.data.get("channel_profile_ids") + if channel_profile_ids is not None: + # Normalize single ID to array + if not isinstance(channel_profile_ids, list): + channel_profile_ids = [channel_profile_ids] + + # Determine action based on semantics + if channel_profile_ids is None: + # Omitted -> add to all profiles (backward compatible) + profiles = ChannelProfile.objects.all() + ChannelProfileMembership.objects.bulk_create([ + ChannelProfileMembership(channel_profile=profile, channel=channel, enabled=True) + for profile in profiles + ]) + elif isinstance(channel_profile_ids, list) and len(channel_profile_ids) == 0: + # Empty array -> add to no profiles + pass + elif isinstance(channel_profile_ids, list) and 0 in channel_profile_ids: + # Sentinel 0 -> add to all profiles (explicit) + profiles = ChannelProfile.objects.all() + ChannelProfileMembership.objects.bulk_create([ + ChannelProfileMembership(channel_profile=profile, channel=channel, enabled=True) + for profile in profiles + ]) + else: + # Specific profile IDs + try: + channel_profiles = ChannelProfile.objects.filter(id__in=channel_profile_ids) + if len(channel_profiles) != len(channel_profile_ids): + missing_ids = set(channel_profile_ids) - set(channel_profiles.values_list('id', flat=True)) + return Response( + {"error": f"Channel profiles with IDs {list(missing_ids)} not found"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + ChannelProfileMembership.objects.bulk_create([ + ChannelProfileMembership( + channel_profile=profile, + channel=channel, + enabled=True + ) + for profile in channel_profiles + ]) + except Exception as e: + return Response( + {"error": f"Error creating profile memberships: {str(e)}"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + headers = self.get_success_headers(serializer.data) + return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers) + def get_permissions(self): if self.action in [ "edit_bulk", @@ -431,10 +501,15 @@ class ChannelViewSet(viewsets.ModelViewSet): if channel_profile_id: try: profile_id_int = int(channel_profile_id) - filters["channelprofilemembership__channel_profile_id"] = profile_id_int if show_disabled_param is None: + # Show only enabled channels: channels that have a membership + # record for this profile with enabled=True + # Default is DISABLED (channels without membership are hidden) + filters["channelprofilemembership__channel_profile_id"] = profile_id_int filters["channelprofilemembership__enabled"] = True + # If show_disabled is True, show all channels (no filtering needed) + except (ValueError, TypeError): # Ignore invalid profile id values pass @@ -546,11 +621,18 @@ class ChannelViewSet(viewsets.ModelViewSet): # Single bulk_update query instead of individual saves channels_to_update = [channel for channel, _ in validated_updates] if channels_to_update: - Channel.objects.bulk_update( - channels_to_update, - fields=list(validated_updates[0][1].keys()), - batch_size=100 - ) + # Collect all unique field names from all updates + all_fields = set() + for _, validated_data in validated_updates: + all_fields.update(validated_data.keys()) + + # Only call bulk_update if there are fields to update + if all_fields: + Channel.objects.bulk_update( + channels_to_update, + fields=list(all_fields), + batch_size=100 + ) # Return the updated objects (already in memory) serialized_channels = ChannelSerializer( @@ -735,7 +817,7 @@ class ChannelViewSet(viewsets.ModelViewSet): "channel_profile_ids": openapi.Schema( type=openapi.TYPE_ARRAY, items=openapi.Items(type=openapi.TYPE_INTEGER), - description="(Optional) Channel profile ID(s) to add the channel to. Can be a single ID or array of IDs. If not provided, channel is added to all profiles." + description="(Optional) Channel profile ID(s). Behavior: omitted = add to ALL profiles (default); empty array [] = add to NO profiles; [0] = add to ALL profiles (explicit); [1,2,...] = add only to specified profiles." ), }, ), @@ -828,14 +910,37 @@ class ChannelViewSet(viewsets.ModelViewSet): channel.streams.add(stream) # Handle channel profile membership + # Semantics: + # - Omitted (None): add to ALL profiles (backward compatible default) + # - Empty array []: add to NO profiles + # - Sentinel [0] or 0: add to ALL profiles (explicit) + # - [1,2,...]: add to specified profile IDs only channel_profile_ids = request.data.get("channel_profile_ids") if channel_profile_ids is not None: # Normalize single ID to array if not isinstance(channel_profile_ids, list): channel_profile_ids = [channel_profile_ids] - if channel_profile_ids: - # Add channel only to the specified profiles + # Determine action based on semantics + if channel_profile_ids is None: + # Omitted -> add to all profiles (backward compatible) + profiles = ChannelProfile.objects.all() + ChannelProfileMembership.objects.bulk_create([ + ChannelProfileMembership(channel_profile=profile, channel=channel, enabled=True) + for profile in profiles + ]) + elif isinstance(channel_profile_ids, list) and len(channel_profile_ids) == 0: + # Empty array -> add to no profiles + pass + elif isinstance(channel_profile_ids, list) and 0 in channel_profile_ids: + # Sentinel 0 -> add to all profiles (explicit) + profiles = ChannelProfile.objects.all() + ChannelProfileMembership.objects.bulk_create([ + ChannelProfileMembership(channel_profile=profile, channel=channel, enabled=True) + for profile in profiles + ]) + else: + # Specific profile IDs try: channel_profiles = ChannelProfile.objects.filter(id__in=channel_profile_ids) if len(channel_profiles) != len(channel_profile_ids): @@ -858,13 +963,6 @@ class ChannelViewSet(viewsets.ModelViewSet): {"error": f"Error creating profile memberships: {str(e)}"}, status=status.HTTP_400_BAD_REQUEST, ) - else: - # Default behavior: add to all profiles - profiles = ChannelProfile.objects.all() - ChannelProfileMembership.objects.bulk_create([ - ChannelProfileMembership(channel_profile=profile, channel=channel, enabled=True) - for profile in profiles - ]) # Send WebSocket notification for single channel creation from core.utils import send_websocket_update @@ -897,7 +995,7 @@ class ChannelViewSet(viewsets.ModelViewSet): "channel_profile_ids": openapi.Schema( type=openapi.TYPE_ARRAY, items=openapi.Items(type=openapi.TYPE_INTEGER), - description="(Optional) Channel profile ID(s) to add the channels to. If not provided, channels are added to all profiles." + description="(Optional) Channel profile ID(s). Behavior: omitted = add to ALL profiles (default); empty array [] = add to NO profiles; [0] = add to ALL profiles (explicit); [1,2,...] = add only to specified profiles." ), "starting_channel_number": openapi.Schema( type=openapi.TYPE_INTEGER, @@ -1556,11 +1654,10 @@ class LogoViewSet(viewsets.ModelViewSet): """Streams the logo file, whether it's local or remote.""" logo = self.get_object() logo_url = logo.url - if logo_url.startswith("/data"): # Local file if not os.path.exists(logo_url): raise Http404("Image not found") - + stat = os.stat(logo_url) # Get proper mime type (first item of the tuple) content_type, _ = mimetypes.guess_type(logo_url) if not content_type: @@ -1570,6 +1667,8 @@ class LogoViewSet(viewsets.ModelViewSet): response = StreamingHttpResponse( open(logo_url, "rb"), content_type=content_type ) + response["Cache-Control"] = "public, max-age=14400" # Cache in browser for 4 hours + response["Last-Modified"] = http_date(stat.st_mtime) response["Content-Disposition"] = 'inline; filename="{}"'.format( os.path.basename(logo_url) ) @@ -1609,6 +1708,10 @@ class LogoViewSet(viewsets.ModelViewSet): remote_response.iter_content(chunk_size=8192), content_type=content_type, ) + if(remote_response.headers.get("Cache-Control")): + response["Cache-Control"] = remote_response.headers.get("Cache-Control") + if(remote_response.headers.get("Last-Modified")): + response["Last-Modified"] = remote_response.headers.get("Last-Modified") response["Content-Disposition"] = 'inline; filename="{}"'.format( os.path.basename(logo_url) ) @@ -1640,11 +1743,58 @@ class ChannelProfileViewSet(viewsets.ModelViewSet): return self.request.user.channel_profiles.all() def get_permissions(self): + if self.action == "duplicate": + return [IsAdmin()] try: return [perm() for perm in permission_classes_by_action[self.action]] except KeyError: return [Authenticated()] + @action(detail=True, methods=["post"], url_path="duplicate", permission_classes=[IsAdmin]) + def duplicate(self, request, pk=None): + requested_name = str(request.data.get("name", "")).strip() + + if not requested_name: + return Response( + {"detail": "Name is required to duplicate a profile."}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if ChannelProfile.objects.filter(name=requested_name).exists(): + return Response( + {"detail": "A channel profile with this name already exists."}, + status=status.HTTP_400_BAD_REQUEST, + ) + + source_profile = self.get_object() + + with transaction.atomic(): + new_profile = ChannelProfile.objects.create(name=requested_name) + + source_memberships = ChannelProfileMembership.objects.filter( + channel_profile=source_profile + ) + source_enabled_map = { + membership.channel_id: membership.enabled + for membership in source_memberships + } + + new_memberships = list( + ChannelProfileMembership.objects.filter(channel_profile=new_profile) + ) + for membership in new_memberships: + membership.enabled = source_enabled_map.get( + membership.channel_id, False + ) + + if new_memberships: + ChannelProfileMembership.objects.bulk_update( + new_memberships, ["enabled"] + ) + + serializer = self.get_serializer(new_profile) + return Response(serializer.data, status=status.HTTP_201_CREATED) + class GetChannelStreamsAPIView(APIView): def get_permissions(self): @@ -1701,6 +1851,30 @@ class BulkUpdateChannelMembershipAPIView(APIView): except KeyError: return [Authenticated()] + @swagger_auto_schema( + operation_description="Bulk enable or disable channels for a specific profile. Creates membership records if they don't exist.", + request_body=BulkChannelProfileMembershipSerializer, + responses={ + 200: openapi.Response( + description="Channels updated successfully", + schema=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "status": openapi.Schema(type=openapi.TYPE_STRING, example="success"), + "updated": openapi.Schema(type=openapi.TYPE_INTEGER, description="Number of channels updated"), + "created": openapi.Schema(type=openapi.TYPE_INTEGER, description="Number of new memberships created"), + "invalid_channels": openapi.Schema( + type=openapi.TYPE_ARRAY, + items=openapi.Schema(type=openapi.TYPE_INTEGER), + description="List of channel IDs that don't exist" + ), + }, + ), + ), + 400: "Invalid request data", + 404: "Profile not found", + }, + ) def patch(self, request, profile_id): """Bulk enable or disable channels for a specific profile""" # Get the channel profile @@ -1713,21 +1887,67 @@ class BulkUpdateChannelMembershipAPIView(APIView): updates = serializer.validated_data["channels"] channel_ids = [entry["channel_id"] for entry in updates] - memberships = ChannelProfileMembership.objects.filter( + # Validate that all channels exist + existing_channels = set( + Channel.objects.filter(id__in=channel_ids).values_list("id", flat=True) + ) + invalid_channels = [cid for cid in channel_ids if cid not in existing_channels] + + if invalid_channels: + return Response( + { + "error": "Some channels do not exist", + "invalid_channels": invalid_channels, + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Get existing memberships + existing_memberships = ChannelProfileMembership.objects.filter( channel_profile=channel_profile, channel_id__in=channel_ids ) + membership_dict = {m.channel_id: m for m in existing_memberships} - membership_dict = {m.channel.id: m for m in memberships} + # Prepare lists for bulk operations + memberships_to_update = [] + memberships_to_create = [] for entry in updates: channel_id = entry["channel_id"] enabled_status = entry["enabled"] + if channel_id in membership_dict: + # Update existing membership membership_dict[channel_id].enabled = enabled_status + memberships_to_update.append(membership_dict[channel_id]) + else: + # Create new membership + memberships_to_create.append( + ChannelProfileMembership( + channel_profile=channel_profile, + channel_id=channel_id, + enabled=enabled_status, + ) + ) - ChannelProfileMembership.objects.bulk_update(memberships, ["enabled"]) + # Perform bulk operations + with transaction.atomic(): + if memberships_to_update: + ChannelProfileMembership.objects.bulk_update( + memberships_to_update, ["enabled"] + ) + if memberships_to_create: + ChannelProfileMembership.objects.bulk_create(memberships_to_create) - return Response({"status": "success"}, status=status.HTTP_200_OK) + return Response( + { + "status": "success", + "updated": len(memberships_to_update), + "created": len(memberships_to_create), + "invalid_channels": [], + }, + status=status.HTTP_200_OK, + ) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -1773,7 +1993,7 @@ class RecordingViewSet(viewsets.ModelViewSet): def get_permissions(self): # Allow unauthenticated playback of recording files (like other streaming endpoints) - if getattr(self, 'action', None) == 'file': + if self.action == 'file': return [AllowAny()] try: return [perm() for perm in permission_classes_by_action[self.action]] diff --git a/apps/channels/migrations/0031_channelgroupm3uaccount_is_stale_and_more.py b/apps/channels/migrations/0031_channelgroupm3uaccount_is_stale_and_more.py new file mode 100644 index 00000000..2428a97b --- /dev/null +++ b/apps/channels/migrations/0031_channelgroupm3uaccount_is_stale_and_more.py @@ -0,0 +1,29 @@ +# Generated by Django 5.2.9 on 2026-01-09 18:19 + +import datetime +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dispatcharr_channels', '0030_alter_stream_url'), + ] + + operations = [ + migrations.AddField( + model_name='channelgroupm3uaccount', + name='is_stale', + field=models.BooleanField(db_index=True, default=False, help_text='Whether this group relationship is stale (not seen in recent refresh, pending deletion)'), + ), + migrations.AddField( + model_name='channelgroupm3uaccount', + name='last_seen', + field=models.DateTimeField(db_index=True, default=datetime.datetime.now, help_text='Last time this group was seen in the M3U source during a refresh'), + ), + migrations.AddField( + model_name='stream', + name='is_stale', + field=models.BooleanField(db_index=True, default=False, help_text='Whether this stream is stale (not seen in recent refresh, pending deletion)'), + ), + ] diff --git a/apps/channels/models.py b/apps/channels/models.py index 88df3661..6d199520 100644 --- a/apps/channels/models.py +++ b/apps/channels/models.py @@ -94,6 +94,11 @@ class Stream(models.Model): db_index=True, ) last_seen = models.DateTimeField(db_index=True, default=datetime.now) + is_stale = models.BooleanField( + default=False, + db_index=True, + help_text="Whether this stream is stale (not seen in recent refresh, pending deletion)" + ) custom_properties = models.JSONField(default=dict, blank=True, null=True) # Stream statistics fields @@ -589,6 +594,16 @@ class ChannelGroupM3UAccount(models.Model): blank=True, help_text='Starting channel number for auto-created channels in this group' ) + last_seen = models.DateTimeField( + default=datetime.now, + db_index=True, + help_text='Last time this group was seen in the M3U source during a refresh' + ) + is_stale = models.BooleanField( + default=False, + db_index=True, + help_text='Whether this group relationship is stale (not seen in recent refresh, pending deletion)' + ) class Meta: unique_together = ("channel_group", "m3u_account") diff --git a/apps/channels/serializers.py b/apps/channels/serializers.py index 635281d5..c1919e24 100644 --- a/apps/channels/serializers.py +++ b/apps/channels/serializers.py @@ -119,6 +119,7 @@ class StreamSerializer(serializers.ModelSerializer): "current_viewers", "updated_at", "last_seen", + "is_stale", "stream_profile_id", "is_custom", "channel_group", @@ -155,7 +156,7 @@ class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer): class Meta: model = ChannelGroupM3UAccount - fields = ["m3u_accounts", "channel_group", "enabled", "auto_channel_sync", "auto_sync_channel_start", "custom_properties"] + fields = ["m3u_accounts", "channel_group", "enabled", "auto_channel_sync", "auto_sync_channel_start", "custom_properties", "is_stale", "last_seen"] def to_representation(self, instance): data = super().to_representation(instance) @@ -179,8 +180,8 @@ class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer): # Channel Group # class ChannelGroupSerializer(serializers.ModelSerializer): - channel_count = serializers.IntegerField(read_only=True) - m3u_account_count = serializers.IntegerField(read_only=True) + channel_count = serializers.SerializerMethodField() + m3u_account_count = serializers.SerializerMethodField() m3u_accounts = ChannelGroupM3UAccountSerializer( many=True, read_only=True @@ -190,6 +191,14 @@ class ChannelGroupSerializer(serializers.ModelSerializer): model = ChannelGroup fields = ["id", "name", "channel_count", "m3u_account_count", "m3u_accounts"] + def get_channel_count(self, obj): + """Get count of channels in this group""" + return obj.channels.count() + + def get_m3u_account_count(self, obj): + """Get count of M3U accounts associated with this group""" + return obj.m3u_accounts.count() + class ChannelProfileSerializer(serializers.ModelSerializer): channels = serializers.SerializerMethodField() diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py index 7ca73ac2..b3e11251 100755 --- a/apps/channels/tasks.py +++ b/apps/channels/tasks.py @@ -2679,7 +2679,38 @@ def bulk_create_channels_from_streams(self, stream_ids, channel_profile_ids=None ) # Handle channel profile membership - if profile_ids: + # Semantics: + # - None: add to ALL profiles (backward compatible default) + # - Empty array []: add to NO profiles + # - Sentinel [0] or 0 in array: add to ALL profiles (explicit) + # - [1,2,...]: add to specified profile IDs only + if profile_ids is None: + # Omitted -> add to all profiles (backward compatible) + all_profiles = ChannelProfile.objects.all() + channel_profile_memberships.extend([ + ChannelProfileMembership( + channel_profile=profile, + channel=channel, + enabled=True + ) + for profile in all_profiles + ]) + elif isinstance(profile_ids, list) and len(profile_ids) == 0: + # Empty array -> add to no profiles + pass + elif isinstance(profile_ids, list) and 0 in profile_ids: + # Sentinel 0 -> add to all profiles (explicit) + all_profiles = ChannelProfile.objects.all() + channel_profile_memberships.extend([ + ChannelProfileMembership( + channel_profile=profile, + channel=channel, + enabled=True + ) + for profile in all_profiles + ]) + else: + # Specific profile IDs try: specific_profiles = ChannelProfile.objects.filter(id__in=profile_ids) channel_profile_memberships.extend([ @@ -2695,17 +2726,6 @@ def bulk_create_channels_from_streams(self, stream_ids, channel_profile_ids=None 'channel_id': channel.id, 'error': f'Failed to add to profiles: {str(e)}' }) - else: - # Add to all profiles by default - all_profiles = ChannelProfile.objects.all() - channel_profile_memberships.extend([ - ChannelProfileMembership( - channel_profile=profile, - channel=channel, - enabled=True - ) - for profile in all_profiles - ]) # Bulk update channels with logos if update: diff --git a/apps/channels/tests/test_channel_api.py b/apps/channels/tests/test_channel_api.py new file mode 100644 index 00000000..bb245da1 --- /dev/null +++ b/apps/channels/tests/test_channel_api.py @@ -0,0 +1,211 @@ +from django.test import TestCase +from django.contrib.auth import get_user_model +from rest_framework.test import APIClient +from rest_framework import status + +from apps.channels.models import Channel, ChannelGroup + +User = get_user_model() + + +class ChannelBulkEditAPITests(TestCase): + def setUp(self): + # Create a test admin user (user_level >= 10) and authenticate + self.user = User.objects.create_user(username="testuser", password="testpass123") + self.user.user_level = 10 # Set admin level + self.user.save() + self.client = APIClient() + self.client.force_authenticate(user=self.user) + self.bulk_edit_url = "/api/channels/channels/edit/bulk/" + + # Create test channel group + self.group1 = ChannelGroup.objects.create(name="Test Group 1") + self.group2 = ChannelGroup.objects.create(name="Test Group 2") + + # Create test channels + self.channel1 = Channel.objects.create( + channel_number=1.0, + name="Channel 1", + tvg_id="channel1", + channel_group=self.group1 + ) + self.channel2 = Channel.objects.create( + channel_number=2.0, + name="Channel 2", + tvg_id="channel2", + channel_group=self.group1 + ) + self.channel3 = Channel.objects.create( + channel_number=3.0, + name="Channel 3", + tvg_id="channel3" + ) + + def test_bulk_edit_success(self): + """Test successful bulk update of multiple channels""" + data = [ + {"id": self.channel1.id, "name": "Updated Channel 1"}, + {"id": self.channel2.id, "name": "Updated Channel 2", "channel_number": 22.0}, + ] + + response = self.client.patch(self.bulk_edit_url, data, format="json") + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["message"], "Successfully updated 2 channels") + self.assertEqual(len(response.data["channels"]), 2) + + # Verify database changes + self.channel1.refresh_from_db() + self.channel2.refresh_from_db() + self.assertEqual(self.channel1.name, "Updated Channel 1") + self.assertEqual(self.channel2.name, "Updated Channel 2") + self.assertEqual(self.channel2.channel_number, 22.0) + + def test_bulk_edit_with_empty_validated_data_first(self): + """ + Test the bug fix: when first channel has empty validated_data. + This was causing: ValueError: Field names must be given to bulk_update() + """ + # Create a channel with data that will be "unchanged" (empty validated_data) + # We'll send the same data it already has + data = [ + # First channel: no actual changes (this would create empty validated_data) + {"id": self.channel1.id}, + # Second channel: has changes + {"id": self.channel2.id, "name": "Updated Channel 2"}, + ] + + response = self.client.patch(self.bulk_edit_url, data, format="json") + + # Should not crash with ValueError + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["message"], "Successfully updated 2 channels") + + # Verify the channel with changes was updated + self.channel2.refresh_from_db() + self.assertEqual(self.channel2.name, "Updated Channel 2") + + def test_bulk_edit_all_empty_updates(self): + """Test when all channels have empty updates (no actual changes)""" + data = [ + {"id": self.channel1.id}, + {"id": self.channel2.id}, + ] + + response = self.client.patch(self.bulk_edit_url, data, format="json") + + # Should succeed without calling bulk_update + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["message"], "Successfully updated 2 channels") + + def test_bulk_edit_mixed_fields(self): + """Test bulk update where different channels update different fields""" + data = [ + {"id": self.channel1.id, "name": "New Name 1"}, + {"id": self.channel2.id, "channel_number": 99.0}, + {"id": self.channel3.id, "tvg_id": "new_tvg_id", "name": "New Name 3"}, + ] + + response = self.client.patch(self.bulk_edit_url, data, format="json") + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["message"], "Successfully updated 3 channels") + + # Verify all updates + self.channel1.refresh_from_db() + self.channel2.refresh_from_db() + self.channel3.refresh_from_db() + + self.assertEqual(self.channel1.name, "New Name 1") + self.assertEqual(self.channel2.channel_number, 99.0) + self.assertEqual(self.channel3.tvg_id, "new_tvg_id") + self.assertEqual(self.channel3.name, "New Name 3") + + def test_bulk_edit_with_channel_group(self): + """Test bulk update with channel_group_id changes""" + data = [ + {"id": self.channel1.id, "channel_group_id": self.group2.id}, + {"id": self.channel3.id, "channel_group_id": self.group1.id}, + ] + + response = self.client.patch(self.bulk_edit_url, data, format="json") + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + # Verify group changes + self.channel1.refresh_from_db() + self.channel3.refresh_from_db() + self.assertEqual(self.channel1.channel_group, self.group2) + self.assertEqual(self.channel3.channel_group, self.group1) + + def test_bulk_edit_nonexistent_channel(self): + """Test bulk update with a channel that doesn't exist""" + nonexistent_id = 99999 + data = [ + {"id": nonexistent_id, "name": "Should Fail"}, + {"id": self.channel1.id, "name": "Should Still Update"}, + ] + + response = self.client.patch(self.bulk_edit_url, data, format="json") + + # Should return 400 with errors + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn("errors", response.data) + self.assertEqual(len(response.data["errors"]), 1) + self.assertEqual(response.data["errors"][0]["channel_id"], nonexistent_id) + self.assertEqual(response.data["errors"][0]["error"], "Channel not found") + + # The valid channel should still be updated + self.assertEqual(response.data["updated_count"], 1) + + def test_bulk_edit_validation_error(self): + """Test bulk update with invalid data (validation error)""" + data = [ + {"id": self.channel1.id, "channel_number": "invalid_number"}, + ] + + response = self.client.patch(self.bulk_edit_url, data, format="json") + + # Should return 400 with validation errors + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn("errors", response.data) + self.assertEqual(len(response.data["errors"]), 1) + self.assertIn("channel_number", response.data["errors"][0]["errors"]) + + def test_bulk_edit_empty_channel_updates(self): + """Test bulk update with empty list""" + data = [] + + response = self.client.patch(self.bulk_edit_url, data, format="json") + + # Empty list is accepted and returns success with 0 updates + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["message"], "Successfully updated 0 channels") + + def test_bulk_edit_missing_channel_updates(self): + """Test bulk update without proper format (dict instead of list)""" + data = {"channel_updates": {}} + + response = self.client.patch(self.bulk_edit_url, data, format="json") + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual(response.data["error"], "Expected a list of channel updates") + + def test_bulk_edit_preserves_other_fields(self): + """Test that bulk update only changes specified fields""" + original_channel_number = self.channel1.channel_number + original_tvg_id = self.channel1.tvg_id + + data = [ + {"id": self.channel1.id, "name": "Only Name Changed"}, + ] + + response = self.client.patch(self.bulk_edit_url, data, format="json") + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + # Verify only name changed, other fields preserved + self.channel1.refresh_from_db() + self.assertEqual(self.channel1.name, "Only Name Changed") + self.assertEqual(self.channel1.channel_number, original_channel_number) + self.assertEqual(self.channel1.tvg_id, original_tvg_id) diff --git a/apps/epg/tasks.py b/apps/epg/tasks.py index bd78c6a3..97552171 100644 --- a/apps/epg/tasks.py +++ b/apps/epg/tasks.py @@ -286,11 +286,12 @@ def fetch_xmltv(source): logger.info(f"Fetching XMLTV data from source: {source.name}") try: # Get default user agent from settings - default_user_agent_setting = CoreSettings.objects.filter(key='default-user-agent').first() + stream_settings = CoreSettings.get_stream_settings() user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:138.0) Gecko/20100101 Firefox/138.0" # Fallback default - if default_user_agent_setting and default_user_agent_setting.value: + default_user_agent_id = stream_settings.get('default_user_agent') + if default_user_agent_id: try: - user_agent_obj = UserAgent.objects.filter(id=int(default_user_agent_setting.value)).first() + user_agent_obj = UserAgent.objects.filter(id=int(default_user_agent_id)).first() if user_agent_obj and user_agent_obj.user_agent: user_agent = user_agent_obj.user_agent logger.debug(f"Using default user agent: {user_agent}") @@ -1714,12 +1715,13 @@ def fetch_schedules_direct(source): logger.info(f"Fetching Schedules Direct data from source: {source.name}") try: # Get default user agent from settings - default_user_agent_setting = CoreSettings.objects.filter(key='default-user-agent').first() + stream_settings = CoreSettings.get_stream_settings() user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:138.0) Gecko/20100101 Firefox/138.0" # Fallback default + default_user_agent_id = stream_settings.get('default_user_agent') - if default_user_agent_setting and default_user_agent_setting.value: + if default_user_agent_id: try: - user_agent_obj = UserAgent.objects.filter(id=int(default_user_agent_setting.value)).first() + user_agent_obj = UserAgent.objects.filter(id=int(default_user_agent_id)).first() if user_agent_obj and user_agent_obj.user_agent: user_agent = user_agent_obj.user_agent logger.debug(f"Using default user agent: {user_agent}") diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index 87759ab9..ed9eb465 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -513,7 +513,19 @@ def check_field_lengths(streams_to_create): @shared_task -def process_groups(account, groups): +def process_groups(account, groups, scan_start_time=None): + """Process groups and update their relationships with the M3U account. + + Args: + account: M3UAccount instance + groups: Dict of group names to custom properties + scan_start_time: Timestamp when the scan started (for consistent last_seen marking) + """ + # Use scan_start_time if provided, otherwise current time + # This ensures consistency with stream processing and cleanup logic + if scan_start_time is None: + scan_start_time = timezone.now() + existing_groups = { group.name: group for group in ChannelGroup.objects.filter(name__in=groups.keys()) @@ -553,24 +565,8 @@ def process_groups(account, groups): ).select_related('channel_group') } - # Get ALL existing relationships for this account to identify orphaned ones - all_existing_relationships = { - rel.channel_group.name: rel - for rel in ChannelGroupM3UAccount.objects.filter( - m3u_account=account - ).select_related('channel_group') - } - relations_to_create = [] relations_to_update = [] - relations_to_delete = [] - - # Find orphaned relationships (groups that no longer exist in the source) - current_group_names = set(groups.keys()) - for group_name, rel in all_existing_relationships.items(): - if group_name not in current_group_names: - relations_to_delete.append(rel) - logger.debug(f"Marking relationship for deletion: group '{group_name}' no longer exists in source for account {account.id}") for group in all_group_objs: custom_props = groups.get(group.name, {}) @@ -597,9 +593,15 @@ def process_groups(account, groups): del updated_custom_props["xc_id"] existing_rel.custom_properties = updated_custom_props + existing_rel.last_seen = scan_start_time + existing_rel.is_stale = False relations_to_update.append(existing_rel) logger.debug(f"Updated xc_id for group '{group.name}' from '{existing_xc_id}' to '{new_xc_id}' - account {account.id}") else: + # Update last_seen even if xc_id hasn't changed + existing_rel.last_seen = scan_start_time + existing_rel.is_stale = False + relations_to_update.append(existing_rel) logger.debug(f"xc_id unchanged for group '{group.name}' - account {account.id}") else: # Create new relationship - this group is new to this M3U account @@ -613,6 +615,8 @@ def process_groups(account, groups): m3u_account=account, custom_properties=custom_props, enabled=auto_enable_new_groups_live, + last_seen=scan_start_time, + is_stale=False, ) ) @@ -623,15 +627,38 @@ def process_groups(account, groups): # Bulk update existing relationships if relations_to_update: - ChannelGroupM3UAccount.objects.bulk_update(relations_to_update, ['custom_properties']) - logger.info(f"Updated {len(relations_to_update)} existing group relationships with new xc_id values for account {account.id}") + ChannelGroupM3UAccount.objects.bulk_update(relations_to_update, ['custom_properties', 'last_seen', 'is_stale']) + logger.info(f"Updated {len(relations_to_update)} existing group relationships for account {account.id}") - # Delete orphaned relationships - if relations_to_delete: - ChannelGroupM3UAccount.objects.filter( - id__in=[rel.id for rel in relations_to_delete] - ).delete() - logger.info(f"Deleted {len(relations_to_delete)} orphaned group relationships for account {account.id}: {[rel.channel_group.name for rel in relations_to_delete]}") + +def cleanup_stale_group_relationships(account, scan_start_time): + """ + Remove group relationships that haven't been seen since the stale retention period. + This follows the same logic as stream cleanup for consistency. + """ + # Calculate cutoff date for stale group relationships + stale_cutoff = scan_start_time - timezone.timedelta(days=account.stale_stream_days) + logger.info( + f"Removing group relationships not seen since {stale_cutoff} for M3U account {account.id}" + ) + + # Find stale relationships + stale_relationships = ChannelGroupM3UAccount.objects.filter( + m3u_account=account, + last_seen__lt=stale_cutoff + ).select_related('channel_group') + + relations_to_delete = list(stale_relationships) + deleted_count = len(relations_to_delete) + + if deleted_count > 0: + logger.info( + f"Found {deleted_count} stale group relationships for account {account.id}: " + f"{[rel.channel_group.name for rel in relations_to_delete]}" + ) + + # Delete the stale relationships + stale_relationships.delete() # Check if any of the deleted relationships left groups with no remaining associations orphaned_group_ids = [] @@ -656,6 +683,10 @@ def process_groups(account, groups): deleted_groups = list(ChannelGroup.objects.filter(id__in=orphaned_group_ids).values_list('name', flat=True)) ChannelGroup.objects.filter(id__in=orphaned_group_ids).delete() logger.info(f"Deleted {len(orphaned_group_ids)} orphaned groups that had no remaining associations: {deleted_groups}") + else: + logger.debug(f"No stale group relationships found for account {account.id}") + + return deleted_count def collect_xc_streams(account_id, enabled_groups): @@ -803,6 +834,7 @@ def process_xc_category_direct(account_id, batch, groups, hash_keys): "channel_group_id": int(group_id), "stream_hash": stream_hash, "custom_properties": stream, + "is_stale": False, } if stream_hash not in stream_hashes: @@ -838,10 +870,12 @@ def process_xc_category_direct(account_id, batch, groups, hash_keys): setattr(obj, key, value) obj.last_seen = timezone.now() obj.updated_at = timezone.now() # Update timestamp only for changed streams + obj.is_stale = False streams_to_update.append(obj) else: # Always update last_seen, even if nothing else changed obj.last_seen = timezone.now() + obj.is_stale = False # Don't update updated_at for unchanged streams streams_to_update.append(obj) @@ -852,6 +886,7 @@ def process_xc_category_direct(account_id, batch, groups, hash_keys): stream_props["updated_at"] = ( timezone.now() ) # Set initial updated_at for new streams + stream_props["is_stale"] = False streams_to_create.append(Stream(**stream_props)) try: @@ -863,7 +898,7 @@ def process_xc_category_direct(account_id, batch, groups, hash_keys): # Simplified bulk update for better performance Stream.objects.bulk_update( streams_to_update, - ['name', 'url', 'logo_url', 'tvg_id', 'custom_properties', 'last_seen', 'updated_at'], + ['name', 'url', 'logo_url', 'tvg_id', 'custom_properties', 'last_seen', 'updated_at', 'is_stale'], batch_size=150 # Smaller batch size for XC processing ) @@ -976,6 +1011,7 @@ def process_m3u_batch_direct(account_id, batch, groups, hash_keys): "channel_group_id": int(groups.get(group_title)), "stream_hash": stream_hash, "custom_properties": stream_info["attributes"], + "is_stale": False, } if stream_hash not in stream_hashes: @@ -1015,11 +1051,15 @@ def process_m3u_batch_direct(account_id, batch, groups, hash_keys): obj.custom_properties = stream_props["custom_properties"] obj.updated_at = timezone.now() + # Always mark as not stale since we saw it in this refresh + obj.is_stale = False + streams_to_update.append(obj) else: # New stream stream_props["last_seen"] = timezone.now() stream_props["updated_at"] = timezone.now() + stream_props["is_stale"] = False streams_to_create.append(Stream(**stream_props)) try: @@ -1031,7 +1071,7 @@ def process_m3u_batch_direct(account_id, batch, groups, hash_keys): # Update all streams in a single bulk operation Stream.objects.bulk_update( streams_to_update, - ['name', 'url', 'logo_url', 'tvg_id', 'custom_properties', 'last_seen', 'updated_at'], + ['name', 'url', 'logo_url', 'tvg_id', 'custom_properties', 'last_seen', 'updated_at', 'is_stale'], batch_size=200 ) except Exception as e: @@ -1092,7 +1132,15 @@ def cleanup_streams(account_id, scan_start_time=timezone.now): @shared_task -def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): +def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False, scan_start_time=None): + """Refresh M3U groups for an account. + + Args: + account_id: ID of the M3U account + use_cache: Whether to use cached M3U file + full_refresh: Whether this is part of a full refresh + scan_start_time: Timestamp when the scan started (for consistent last_seen marking) + """ if not acquire_task_lock("refresh_m3u_account_groups", account_id): return f"Task already running for account_id={account_id}.", None @@ -1419,7 +1467,7 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): send_m3u_update(account_id, "processing_groups", 0) - process_groups(account, groups) + process_groups(account, groups, scan_start_time) release_task_lock("refresh_m3u_account_groups", account_id) @@ -2526,7 +2574,7 @@ def refresh_single_m3u_account(account_id): if not extinf_data: try: logger.info(f"Calling refresh_m3u_groups for account {account_id}") - result = refresh_m3u_groups(account_id, full_refresh=True) + result = refresh_m3u_groups(account_id, full_refresh=True, scan_start_time=refresh_start_timestamp) logger.trace(f"refresh_m3u_groups result: {result}") # Check for completely empty result or missing groups @@ -2806,9 +2854,26 @@ def refresh_single_m3u_account(account_id): id=-1 ).exists() # This will never find anything but ensures DB sync + # Mark streams that weren't seen in this refresh as stale (pending deletion) + stale_stream_count = Stream.objects.filter( + m3u_account=account, + last_seen__lt=refresh_start_timestamp + ).update(is_stale=True) + logger.info(f"Marked {stale_stream_count} streams as stale for account {account_id}") + + # Mark group relationships that weren't seen in this refresh as stale (pending deletion) + stale_group_count = ChannelGroupM3UAccount.objects.filter( + m3u_account=account, + last_seen__lt=refresh_start_timestamp + ).update(is_stale=True) + logger.info(f"Marked {stale_group_count} group relationships as stale for account {account_id}") + # Now run cleanup streams_deleted = cleanup_streams(account_id, refresh_start_timestamp) + # Cleanup stale group relationships (follows same retention policy as streams) + cleanup_stale_group_relationships(account, refresh_start_timestamp) + # Run auto channel sync after successful refresh auto_sync_message = "" try: diff --git a/apps/output/views.py b/apps/output/views.py index aa7fd1bb..47798ee2 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -7,7 +7,6 @@ from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_http_methods from apps.epg.models import ProgramData from apps.accounts.models import User -from core.models import CoreSettings, NETWORK_ACCESS from dispatcharr.utils import network_access_allowed from django.utils import timezone as django_timezone from django.shortcuts import get_object_or_404 diff --git a/apps/proxy/vod_proxy/multi_worker_connection_manager.py b/apps/proxy/vod_proxy/multi_worker_connection_manager.py index 251721c5..1534f761 100644 --- a/apps/proxy/vod_proxy/multi_worker_connection_manager.py +++ b/apps/proxy/vod_proxy/multi_worker_connection_manager.py @@ -357,12 +357,12 @@ class RedisBackedVODConnection: logger.info(f"[{self.session_id}] Making request #{state.request_count} to {'final' if state.final_url else 'original'} URL") - # Make request + # Make request (10s connect, 10s read timeout - keeps lock time reasonable if client disconnects) response = self.local_session.get( target_url, headers=headers, stream=True, - timeout=(10, 30), + timeout=(10, 10), allow_redirects=allow_redirects ) response.raise_for_status() @@ -712,6 +712,10 @@ class MultiWorkerVODConnectionManager: content_name = content_obj.name if hasattr(content_obj, 'name') else str(content_obj) client_id = session_id + # Track whether we incremented profile connections (for cleanup on error) + profile_connections_incremented = False + redis_connection = None + logger.info(f"[{client_id}] Worker {self.worker_id} - Redis-backed streaming request for {content_type} {content_name}") try: @@ -802,6 +806,7 @@ class MultiWorkerVODConnectionManager: # Increment profile connections after successful connection creation self._increment_profile_connections(m3u_profile) + profile_connections_incremented = True logger.info(f"[{client_id}] Worker {self.worker_id} - Created consolidated connection with session metadata") else: @@ -1024,6 +1029,19 @@ class MultiWorkerVODConnectionManager: except Exception as e: logger.error(f"[{client_id}] Worker {self.worker_id} - Error in Redis-backed stream_content_with_session: {e}", exc_info=True) + + # Decrement profile connections if we incremented them but failed before streaming started + if profile_connections_incremented: + logger.info(f"[{client_id}] Connection error occurred after profile increment - decrementing profile connections") + self._decrement_profile_connections(m3u_profile.id) + + # Also clean up the Redis connection state since we won't be using it + if redis_connection: + try: + redis_connection.cleanup(connection_manager=self, current_worker_id=self.worker_id) + except Exception as cleanup_error: + logger.error(f"[{client_id}] Error during cleanup after connection failure: {cleanup_error}") + return HttpResponse(f"Streaming error: {str(e)}", status=500) def _apply_timeshift_parameters(self, original_url, utc_start=None, utc_end=None, offset=None): diff --git a/apps/vod/models.py b/apps/vod/models.py index 69aed808..7067856e 100644 --- a/apps/vod/models.py +++ b/apps/vod/models.py @@ -245,10 +245,13 @@ class M3UMovieRelation(models.Model): """Get the full stream URL for this movie from this provider""" # Build URL dynamically for XtreamCodes accounts if self.m3u_account.account_type == 'XC': - server_url = self.m3u_account.server_url.rstrip('/') + from core.xtream_codes import Client as XCClient + # Use XC client's URL normalization to handle malformed URLs + # (e.g., URLs with /player_api.php or query parameters) + normalized_url = XCClient(self.m3u_account.server_url, '', '')._normalize_url(self.m3u_account.server_url) username = self.m3u_account.username password = self.m3u_account.password - return f"{server_url}/movie/{username}/{password}/{self.stream_id}.{self.container_extension or 'mp4'}" + return f"{normalized_url}/movie/{username}/{password}/{self.stream_id}.{self.container_extension or 'mp4'}" else: # For other account types, we would need another way to build URLs return None @@ -285,10 +288,12 @@ class M3UEpisodeRelation(models.Model): if self.m3u_account.account_type == 'XC': # For XtreamCodes accounts, build the URL dynamically - server_url = self.m3u_account.server_url.rstrip('/') + # Use XC client's URL normalization to handle malformed URLs + # (e.g., URLs with /player_api.php or query parameters) + normalized_url = XtreamCodesClient(self.m3u_account.server_url, '', '')._normalize_url(self.m3u_account.server_url) username = self.m3u_account.username password = self.m3u_account.password - return f"{server_url}/series/{username}/{password}/{self.stream_id}.{self.container_extension or 'mp4'}" + return f"{normalized_url}/series/{username}/{password}/{self.stream_id}.{self.container_extension or 'mp4'}" else: # We might support non XC accounts in the future # For now, return None diff --git a/apps/vod/tasks.py b/apps/vod/tasks.py index 4eb9fadc..0dcd9cfd 100644 --- a/apps/vod/tasks.py +++ b/apps/vod/tasks.py @@ -410,10 +410,10 @@ def process_movie_batch(account, batch, categories, relations, scan_start_time=N tmdb_id = movie_data.get('tmdb_id') or movie_data.get('tmdb') imdb_id = movie_data.get('imdb_id') or movie_data.get('imdb') - # Clean empty string IDs - if tmdb_id == '': + # Clean empty string IDs and zero values (some providers use 0 to indicate no ID) + if tmdb_id == '' or tmdb_id == 0 or tmdb_id == '0': tmdb_id = None - if imdb_id == '': + if imdb_id == '' or imdb_id == 0 or imdb_id == '0': imdb_id = None # Create a unique key for this movie (priority: TMDB > IMDB > name+year) @@ -614,26 +614,41 @@ def process_movie_batch(account, batch, categories, relations, scan_start_time=N # First, create new movies and get their IDs created_movies = {} if movies_to_create: - Movie.objects.bulk_create(movies_to_create, ignore_conflicts=True) + # Bulk query to check which movies already exist + tmdb_ids = [m.tmdb_id for m in movies_to_create if m.tmdb_id] + imdb_ids = [m.imdb_id for m in movies_to_create if m.imdb_id] + name_year_pairs = [(m.name, m.year) for m in movies_to_create if not m.tmdb_id and not m.imdb_id] - # Get the newly created movies with their IDs - # We need to re-fetch them to get the primary keys + existing_by_tmdb = {m.tmdb_id: m for m in Movie.objects.filter(tmdb_id__in=tmdb_ids)} if tmdb_ids else {} + existing_by_imdb = {m.imdb_id: m for m in Movie.objects.filter(imdb_id__in=imdb_ids)} if imdb_ids else {} + + existing_by_name_year = {} + if name_year_pairs: + for movie in Movie.objects.filter(tmdb_id__isnull=True, imdb_id__isnull=True): + key = (movie.name, movie.year) + if key in name_year_pairs: + existing_by_name_year[key] = movie + + # Check each movie against the bulk query results + movies_actually_created = [] for movie in movies_to_create: - # Find the movie by its unique identifiers - if movie.tmdb_id: - db_movie = Movie.objects.filter(tmdb_id=movie.tmdb_id).first() - elif movie.imdb_id: - db_movie = Movie.objects.filter(imdb_id=movie.imdb_id).first() - else: - db_movie = Movie.objects.filter( - name=movie.name, - year=movie.year, - tmdb_id__isnull=True, - imdb_id__isnull=True - ).first() + existing = None + if movie.tmdb_id and movie.tmdb_id in existing_by_tmdb: + existing = existing_by_tmdb[movie.tmdb_id] + elif movie.imdb_id and movie.imdb_id in existing_by_imdb: + existing = existing_by_imdb[movie.imdb_id] + elif not movie.tmdb_id and not movie.imdb_id: + existing = existing_by_name_year.get((movie.name, movie.year)) - if db_movie: - created_movies[id(movie)] = db_movie + if existing: + created_movies[id(movie)] = existing + else: + movies_actually_created.append(movie) + created_movies[id(movie)] = movie + + # Bulk create only movies that don't exist + if movies_actually_created: + Movie.objects.bulk_create(movies_actually_created) # Update existing movies if movies_to_update: @@ -649,12 +664,16 @@ def process_movie_batch(account, batch, categories, relations, scan_start_time=N movie.logo = movie._logo_to_update movie.save(update_fields=['logo']) - # Update relations to reference the correct movie objects + # Update relations to reference the correct movie objects (with PKs) for relation in relations_to_create: if id(relation.movie) in created_movies: relation.movie = created_movies[id(relation.movie)] - # Handle relations + for relation in relations_to_update: + if id(relation.movie) in created_movies: + relation.movie = created_movies[id(relation.movie)] + + # All movies now have PKs, safe to bulk create/update relations if relations_to_create: M3UMovieRelation.objects.bulk_create(relations_to_create, ignore_conflicts=True) @@ -724,10 +743,10 @@ def process_series_batch(account, batch, categories, relations, scan_start_time= tmdb_id = series_data.get('tmdb') or series_data.get('tmdb_id') imdb_id = series_data.get('imdb') or series_data.get('imdb_id') - # Clean empty string IDs - if tmdb_id == '': + # Clean empty string IDs and zero values (some providers use 0 to indicate no ID) + if tmdb_id == '' or tmdb_id == 0 or tmdb_id == '0': tmdb_id = None - if imdb_id == '': + if imdb_id == '' or imdb_id == 0 or imdb_id == '0': imdb_id = None # Create a unique key for this series (priority: TMDB > IMDB > name+year) @@ -945,26 +964,41 @@ def process_series_batch(account, batch, categories, relations, scan_start_time= # First, create new series and get their IDs created_series = {} if series_to_create: - Series.objects.bulk_create(series_to_create, ignore_conflicts=True) + # Bulk query to check which series already exist + tmdb_ids = [s.tmdb_id for s in series_to_create if s.tmdb_id] + imdb_ids = [s.imdb_id for s in series_to_create if s.imdb_id] + name_year_pairs = [(s.name, s.year) for s in series_to_create if not s.tmdb_id and not s.imdb_id] - # Get the newly created series with their IDs - # We need to re-fetch them to get the primary keys + existing_by_tmdb = {s.tmdb_id: s for s in Series.objects.filter(tmdb_id__in=tmdb_ids)} if tmdb_ids else {} + existing_by_imdb = {s.imdb_id: s for s in Series.objects.filter(imdb_id__in=imdb_ids)} if imdb_ids else {} + + existing_by_name_year = {} + if name_year_pairs: + for series in Series.objects.filter(tmdb_id__isnull=True, imdb_id__isnull=True): + key = (series.name, series.year) + if key in name_year_pairs: + existing_by_name_year[key] = series + + # Check each series against the bulk query results + series_actually_created = [] for series in series_to_create: - # Find the series by its unique identifiers - if series.tmdb_id: - db_series = Series.objects.filter(tmdb_id=series.tmdb_id).first() - elif series.imdb_id: - db_series = Series.objects.filter(imdb_id=series.imdb_id).first() - else: - db_series = Series.objects.filter( - name=series.name, - year=series.year, - tmdb_id__isnull=True, - imdb_id__isnull=True - ).first() + existing = None + if series.tmdb_id and series.tmdb_id in existing_by_tmdb: + existing = existing_by_tmdb[series.tmdb_id] + elif series.imdb_id and series.imdb_id in existing_by_imdb: + existing = existing_by_imdb[series.imdb_id] + elif not series.tmdb_id and not series.imdb_id: + existing = existing_by_name_year.get((series.name, series.year)) - if db_series: - created_series[id(series)] = db_series + if existing: + created_series[id(series)] = existing + else: + series_actually_created.append(series) + created_series[id(series)] = series + + # Bulk create only series that don't exist + if series_actually_created: + Series.objects.bulk_create(series_actually_created) # Update existing series if series_to_update: @@ -980,12 +1014,16 @@ def process_series_batch(account, batch, categories, relations, scan_start_time= series.logo = series._logo_to_update series.save(update_fields=['logo']) - # Update relations to reference the correct series objects + # Update relations to reference the correct series objects (with PKs) for relation in relations_to_create: if id(relation.series) in created_series: relation.series = created_series[id(relation.series)] - # Handle relations + for relation in relations_to_update: + if id(relation.series) in created_series: + relation.series = created_series[id(relation.series)] + + # All series now have PKs, safe to bulk create/update relations if relations_to_create: M3USeriesRelation.objects.bulk_create(relations_to_create, ignore_conflicts=True) diff --git a/core/api_views.py b/core/api_views.py index e3459a38..30829174 100644 --- a/core/api_views.py +++ b/core/api_views.py @@ -15,8 +15,9 @@ from .models import ( UserAgent, StreamProfile, CoreSettings, - STREAM_HASH_KEY, - NETWORK_ACCESS, + STREAM_SETTINGS_KEY, + DVR_SETTINGS_KEY, + NETWORK_ACCESS_KEY, PROXY_SETTINGS_KEY, ) from .serializers import ( @@ -68,16 +69,28 @@ class CoreSettingsViewSet(viewsets.ModelViewSet): def update(self, request, *args, **kwargs): instance = self.get_object() + old_value = instance.value response = super().update(request, *args, **kwargs) - if instance.key == STREAM_HASH_KEY: - if instance.value != request.data["value"]: - rehash_streams.delay(request.data["value"].split(",")) - # If DVR pre/post offsets changed, reschedule upcoming recordings - try: - from core.models import DVR_PRE_OFFSET_MINUTES_KEY, DVR_POST_OFFSET_MINUTES_KEY - if instance.key in (DVR_PRE_OFFSET_MINUTES_KEY, DVR_POST_OFFSET_MINUTES_KEY): - if instance.value != request.data.get("value"): + # If stream settings changed and m3u_hash_key is different, rehash streams + if instance.key == STREAM_SETTINGS_KEY: + new_value = request.data.get("value", {}) + if isinstance(new_value, dict) and isinstance(old_value, dict): + old_hash = old_value.get("m3u_hash_key", "") + new_hash = new_value.get("m3u_hash_key", "") + if old_hash != new_hash: + hash_keys = new_hash.split(",") if isinstance(new_hash, str) else new_hash + rehash_streams.delay(hash_keys) + + # If DVR settings changed and pre/post offsets are different, reschedule upcoming recordings + if instance.key == DVR_SETTINGS_KEY: + new_value = request.data.get("value", {}) + if isinstance(new_value, dict) and isinstance(old_value, dict): + old_pre = old_value.get("pre_offset_minutes") + new_pre = new_value.get("pre_offset_minutes") + old_post = old_value.get("post_offset_minutes") + new_post = new_value.get("post_offset_minutes") + if old_pre != new_pre or old_post != new_post: try: # Prefer async task if Celery is available from apps.channels.tasks import reschedule_upcoming_recordings_for_offset_change @@ -86,24 +99,23 @@ class CoreSettingsViewSet(viewsets.ModelViewSet): # Fallback to synchronous implementation from apps.channels.tasks import reschedule_upcoming_recordings_for_offset_change_impl reschedule_upcoming_recordings_for_offset_change_impl() - except Exception: - pass return response def create(self, request, *args, **kwargs): response = super().create(request, *args, **kwargs) - # If creating DVR pre/post offset settings, also reschedule upcoming recordings + # If creating DVR settings with offset values, reschedule upcoming recordings try: key = request.data.get("key") - from core.models import DVR_PRE_OFFSET_MINUTES_KEY, DVR_POST_OFFSET_MINUTES_KEY - if key in (DVR_PRE_OFFSET_MINUTES_KEY, DVR_POST_OFFSET_MINUTES_KEY): - try: - from apps.channels.tasks import reschedule_upcoming_recordings_for_offset_change - reschedule_upcoming_recordings_for_offset_change.delay() - except Exception: - from apps.channels.tasks import reschedule_upcoming_recordings_for_offset_change_impl - reschedule_upcoming_recordings_for_offset_change_impl() + if key == DVR_SETTINGS_KEY: + value = request.data.get("value", {}) + if isinstance(value, dict) and ("pre_offset_minutes" in value or "post_offset_minutes" in value): + try: + from apps.channels.tasks import reschedule_upcoming_recordings_for_offset_change + reschedule_upcoming_recordings_for_offset_change.delay() + except Exception: + from apps.channels.tasks import reschedule_upcoming_recordings_for_offset_change_impl + reschedule_upcoming_recordings_for_offset_change_impl() except Exception: pass return response @@ -111,13 +123,13 @@ class CoreSettingsViewSet(viewsets.ModelViewSet): def check(self, request, *args, **kwargs): data = request.data - if data.get("key") == NETWORK_ACCESS: + if data.get("key") == NETWORK_ACCESS_KEY: client_ip = ipaddress.ip_address(get_client_ip(request)) in_network = {} invalid = [] - value = json.loads(data.get("value", "{}")) + value = data.get("value", {}) for key, val in value.items(): in_network[key] = [] cidrs = val.split(",") @@ -142,7 +154,7 @@ class CoreSettingsViewSet(viewsets.ModelViewSet): }, status=status.HTTP_200_OK, ) - + response_data = { **in_network, "client_ip": str(client_ip) @@ -161,8 +173,8 @@ class ProxySettingsViewSet(viewsets.ViewSet): """Get or create the proxy settings CoreSettings entry""" try: settings_obj = CoreSettings.objects.get(key=PROXY_SETTINGS_KEY) - settings_data = json.loads(settings_obj.value) - except (CoreSettings.DoesNotExist, json.JSONDecodeError): + settings_data = settings_obj.value + except CoreSettings.DoesNotExist: # Create default settings settings_data = { "buffering_timeout": 15, @@ -175,7 +187,7 @@ class ProxySettingsViewSet(viewsets.ViewSet): key=PROXY_SETTINGS_KEY, defaults={ "name": "Proxy Settings", - "value": json.dumps(settings_data) + "value": settings_data } ) return settings_obj, settings_data @@ -197,8 +209,8 @@ class ProxySettingsViewSet(viewsets.ViewSet): serializer = ProxySettingsSerializer(data=request.data) serializer.is_valid(raise_exception=True) - # Update the JSON data - settings_obj.value = json.dumps(serializer.validated_data) + # Update the JSON data - store as dict directly + settings_obj.value = serializer.validated_data settings_obj.save() return Response(serializer.validated_data) @@ -213,8 +225,8 @@ class ProxySettingsViewSet(viewsets.ViewSet): serializer = ProxySettingsSerializer(data=updated_data) serializer.is_valid(raise_exception=True) - # Update the JSON data - settings_obj.value = json.dumps(serializer.validated_data) + # Update the JSON data - store as dict directly + settings_obj.value = serializer.validated_data settings_obj.save() return Response(serializer.validated_data) @@ -332,8 +344,8 @@ def rehash_streams_endpoint(request): """Trigger the rehash streams task""" try: # Get the current hash keys from settings - hash_key_setting = CoreSettings.objects.get(key=STREAM_HASH_KEY) - hash_keys = hash_key_setting.value.split(",") + hash_key = CoreSettings.get_m3u_hash_key() + hash_keys = hash_key.split(",") if isinstance(hash_key, str) else hash_key # Queue the rehash task task = rehash_streams.delay(hash_keys) @@ -344,10 +356,10 @@ def rehash_streams_endpoint(request): "task_id": task.id }, status=status.HTTP_200_OK) - except CoreSettings.DoesNotExist: + except Exception as e: return Response({ "success": False, - "message": "Hash key settings not found" + "message": f"Error triggering rehash: {str(e)}" }, status=status.HTTP_400_BAD_REQUEST) except Exception as e: diff --git a/core/management/commands/reset_network_access.py b/core/management/commands/reset_network_access.py index 3b0e5a55..a31d247c 100644 --- a/core/management/commands/reset_network_access.py +++ b/core/management/commands/reset_network_access.py @@ -1,13 +1,13 @@ # your_app/management/commands/update_column.py from django.core.management.base import BaseCommand -from core.models import CoreSettings, NETWORK_ACCESS +from core.models import CoreSettings, NETWORK_ACCESS_KEY class Command(BaseCommand): help = "Reset network access settings" def handle(self, *args, **options): - setting = CoreSettings.objects.get(key=NETWORK_ACCESS) - setting.value = "{}" + setting = CoreSettings.objects.get(key=NETWORK_ACCESS_KEY) + setting.value = {} setting.save() diff --git a/core/migrations/0020_change_coresettings_value_to_jsonfield.py b/core/migrations/0020_change_coresettings_value_to_jsonfield.py new file mode 100644 index 00000000..ac6ad089 --- /dev/null +++ b/core/migrations/0020_change_coresettings_value_to_jsonfield.py @@ -0,0 +1,267 @@ +# Generated migration to change CoreSettings value field to JSONField and consolidate settings + +import json +from django.db import migrations, models + + +def convert_string_to_json(apps, schema_editor): + """Convert existing string values to appropriate JSON types before changing column type""" + CoreSettings = apps.get_model("core", "CoreSettings") + + for setting in CoreSettings.objects.all(): + value = setting.value + + if not value: + # Empty strings become empty string in JSON + setting.value = json.dumps("") + setting.save(update_fields=['value']) + continue + + # Try to parse as JSON if it looks like JSON (objects/arrays) + if value.startswith('{') or value.startswith('['): + try: + parsed = json.loads(value) + # Store as JSON string temporarily (column is still CharField) + setting.value = json.dumps(parsed) + setting.save(update_fields=['value']) + continue + except (json.JSONDecodeError, ValueError): + pass + + # Try to parse as number + try: + # Check if it's an integer + if '.' not in value and value.lstrip('-').isdigit(): + setting.value = json.dumps(int(value)) + setting.save(update_fields=['value']) + continue + # Check if it's a float + float_val = float(value) + setting.value = json.dumps(float_val) + setting.save(update_fields=['value']) + continue + except (ValueError, AttributeError): + pass + + # Check for booleans + if value.lower() in ('true', 'false', '1', '0', 'yes', 'no', 'on', 'off'): + bool_val = value.lower() in ('true', '1', 'yes', 'on') + setting.value = json.dumps(bool_val) + setting.save(update_fields=['value']) + continue + + # Default: store as JSON string + setting.value = json.dumps(value) + setting.save(update_fields=['value']) + + +def consolidate_settings(apps, schema_editor): + """Consolidate individual setting rows into grouped JSON objects.""" + CoreSettings = apps.get_model("core", "CoreSettings") + + # Helper to get setting value + def get_value(key, default=None): + try: + obj = CoreSettings.objects.get(key=key) + return obj.value if obj.value is not None else default + except CoreSettings.DoesNotExist: + return default + + # STREAM SETTINGS + stream_settings = { + "default_user_agent": get_value("default-user-agent"), + "default_stream_profile": get_value("default-stream-profile"), + "m3u_hash_key": get_value("m3u-hash-key", ""), + "preferred_region": get_value("preferred-region"), + "auto_import_mapped_files": get_value("auto-import-mapped-files"), + } + CoreSettings.objects.update_or_create( + key="stream_settings", + defaults={"name": "Stream Settings", "value": stream_settings} + ) + + # DVR SETTINGS + dvr_settings = { + "tv_template": get_value("dvr-tv-template", "TV_Shows/{show}/S{season:02d}E{episode:02d}.mkv"), + "movie_template": get_value("dvr-movie-template", "Movies/{title} ({year}).mkv"), + "tv_fallback_dir": get_value("dvr-tv-fallback-dir", "TV_Shows"), + "tv_fallback_template": get_value("dvr-tv-fallback-template", "TV_Shows/{show}/{start}.mkv"), + "movie_fallback_template": get_value("dvr-movie-fallback-template", "Movies/{start}.mkv"), + "comskip_enabled": bool(get_value("dvr-comskip-enabled", False)), + "comskip_custom_path": get_value("dvr-comskip-custom-path", ""), + "pre_offset_minutes": int(get_value("dvr-pre-offset-minutes", 0) or 0), + "post_offset_minutes": int(get_value("dvr-post-offset-minutes", 0) or 0), + "series_rules": get_value("dvr-series-rules", []), + } + CoreSettings.objects.update_or_create( + key="dvr_settings", + defaults={"name": "DVR Settings", "value": dvr_settings} + ) + + # BACKUP SETTINGS - using underscore keys (not dashes) + backup_settings = { + "schedule_enabled": get_value("backup_schedule_enabled") if get_value("backup_schedule_enabled") is not None else True, + "schedule_frequency": get_value("backup_schedule_frequency") or "daily", + "schedule_time": get_value("backup_schedule_time") or "03:00", + "schedule_day_of_week": get_value("backup_schedule_day_of_week") if get_value("backup_schedule_day_of_week") is not None else 0, + "retention_count": get_value("backup_retention_count") if get_value("backup_retention_count") is not None else 3, + "schedule_cron_expression": get_value("backup_schedule_cron_expression") or "", + } + CoreSettings.objects.update_or_create( + key="backup_settings", + defaults={"name": "Backup Settings", "value": backup_settings} + ) + + # SYSTEM SETTINGS + system_settings = { + "time_zone": get_value("system-time-zone", "UTC"), + "max_system_events": int(get_value("max-system-events", 100) or 100), + } + CoreSettings.objects.update_or_create( + key="system_settings", + defaults={"name": "System Settings", "value": system_settings} + ) + + # Rename proxy-settings to proxy_settings (if it exists with old name) + try: + old_proxy = CoreSettings.objects.get(key="proxy-settings") + old_proxy.key = "proxy_settings" + old_proxy.save() + except CoreSettings.DoesNotExist: + pass + + # Ensure proxy_settings exists with defaults if not present + proxy_obj, proxy_created = CoreSettings.objects.get_or_create( + key="proxy_settings", + defaults={ + "name": "Proxy Settings", + "value": { + "buffering_timeout": 15, + "buffering_speed": 1.0, + "redis_chunk_ttl": 60, + "channel_shutdown_delay": 0, + "channel_init_grace_period": 5, + } + } + ) + + # Rename network-access to network_access (if it exists with old name) + try: + old_network = CoreSettings.objects.get(key="network-access") + old_network.key = "network_access" + old_network.save() + except CoreSettings.DoesNotExist: + pass + + # Ensure network_access exists with defaults if not present + network_obj, network_created = CoreSettings.objects.get_or_create( + key="network_access", + defaults={ + "name": "Network Access", + "value": {} + } + ) + # Delete old individual setting rows (keep only the new grouped settings) + grouped_keys = ["stream_settings", "dvr_settings", "backup_settings", "system_settings", "proxy_settings", "network_access"] + CoreSettings.objects.exclude(key__in=grouped_keys).delete() + + +def reverse_migration(apps, schema_editor): + """Reverse migration: split grouped settings and convert JSON back to strings""" + CoreSettings = apps.get_model("core", "CoreSettings") + + # Helper to create individual setting + def create_setting(key, name, value): + # Convert value back to string representation for CharField + if isinstance(value, str): + str_value = value + elif isinstance(value, bool): + str_value = "true" if value else "false" + elif isinstance(value, (int, float)): + str_value = str(value) + elif isinstance(value, (dict, list)): + str_value = json.dumps(value) + elif value is None: + str_value = "" + else: + str_value = str(value) + + CoreSettings.objects.update_or_create( + key=key, + defaults={"name": name, "value": str_value} + ) + + # Split stream_settings + try: + stream = CoreSettings.objects.get(key="stream_settings") + if isinstance(stream.value, dict): + create_setting("default_user_agent", "Default User Agent", stream.value.get("default_user_agent")) + create_setting("default_stream_profile", "Default Stream Profile", stream.value.get("default_stream_profile")) + create_setting("stream_hash_key", "Stream Hash Key", stream.value.get("m3u_hash_key", "")) + create_setting("preferred_region", "Preferred Region", stream.value.get("preferred_region")) + create_setting("auto_import_mapped_files", "Auto Import Mapped Files", stream.value.get("auto_import_mapped_files")) + stream.delete() + except CoreSettings.DoesNotExist: + pass + + # Split dvr_settings + try: + dvr = CoreSettings.objects.get(key="dvr_settings") + if isinstance(dvr.value, dict): + create_setting("dvr_tv_template", "DVR TV Template", dvr.value.get("tv_template", "TV_Shows/{show}/S{season:02d}E{episode:02d}.mkv")) + create_setting("dvr_movie_template", "DVR Movie Template", dvr.value.get("movie_template", "Movies/{title} ({year}).mkv")) + create_setting("dvr_tv_fallback_dir", "DVR TV Fallback Dir", dvr.value.get("tv_fallback_dir", "TV_Shows")) + create_setting("dvr_tv_fallback_template", "DVR TV Fallback Template", dvr.value.get("tv_fallback_template", "TV_Shows/{show}/{start}.mkv")) + create_setting("dvr_movie_fallback_template", "DVR Movie Fallback Template", dvr.value.get("movie_fallback_template", "Movies/{start}.mkv")) + create_setting("dvr_comskip_enabled", "DVR Comskip Enabled", dvr.value.get("comskip_enabled", False)) + create_setting("dvr_comskip_custom_path", "DVR Comskip Custom Path", dvr.value.get("comskip_custom_path", "")) + create_setting("dvr_pre_offset_minutes", "DVR Pre Offset Minutes", dvr.value.get("pre_offset_minutes", 0)) + create_setting("dvr_post_offset_minutes", "DVR Post Offset Minutes", dvr.value.get("post_offset_minutes", 0)) + create_setting("dvr_series_rules", "DVR Series Rules", dvr.value.get("series_rules", [])) + dvr.delete() + except CoreSettings.DoesNotExist: + pass + + # Split backup_settings + try: + backup = CoreSettings.objects.get(key="backup_settings") + if isinstance(backup.value, dict): + create_setting("backup_schedule_enabled", "Backup Schedule Enabled", backup.value.get("schedule_enabled", False)) + create_setting("backup_schedule_frequency", "Backup Schedule Frequency", backup.value.get("schedule_frequency", "weekly")) + create_setting("backup_schedule_time", "Backup Schedule Time", backup.value.get("schedule_time", "02:00")) + create_setting("backup_schedule_day_of_week", "Backup Schedule Day of Week", backup.value.get("schedule_day_of_week", 0)) + create_setting("backup_retention_count", "Backup Retention Count", backup.value.get("retention_count", 7)) + create_setting("backup_schedule_cron_expression", "Backup Schedule Cron Expression", backup.value.get("schedule_cron_expression", "")) + backup.delete() + except CoreSettings.DoesNotExist: + pass + + # Split system_settings + try: + system = CoreSettings.objects.get(key="system_settings") + if isinstance(system.value, dict): + create_setting("system_time_zone", "System Time Zone", system.value.get("time_zone", "UTC")) + create_setting("max_system_events", "Max System Events", system.value.get("max_system_events", 100)) + system.delete() + except CoreSettings.DoesNotExist: + pass + + +class Migration(migrations.Migration): + + dependencies = [ + ('core', '0019_add_vlc_stream_profile'), + ] + + operations = [ + # First, convert all data to valid JSON strings while column is still CharField + migrations.RunPython(convert_string_to_json, migrations.RunPython.noop), + # Then change the field type to JSONField + migrations.AlterField( + model_name='coresettings', + name='value', + field=models.JSONField(blank=True, default=dict), + ), + # Finally, consolidate individual settings into grouped JSON objects + migrations.RunPython(consolidate_settings, reverse_migration), + ] diff --git a/core/models.py b/core/models.py index b9166f66..683acb0d 100644 --- a/core/models.py +++ b/core/models.py @@ -1,4 +1,7 @@ # core/models.py + +from shlex import split as shlex_split + from django.conf import settings from django.db import models from django.utils.text import slugify @@ -133,7 +136,7 @@ class StreamProfile(models.Model): # Split the command and iterate through each part to apply replacements cmd = [self.command] + [ self._replace_in_part(part, replacements) - for part in self.parameters.split() + for part in shlex_split(self.parameters) # use shlex to handle quoted strings ] return cmd @@ -145,24 +148,13 @@ class StreamProfile(models.Model): return part -DEFAULT_USER_AGENT_KEY = slugify("Default User-Agent") -DEFAULT_STREAM_PROFILE_KEY = slugify("Default Stream Profile") -STREAM_HASH_KEY = slugify("M3U Hash Key") -PREFERRED_REGION_KEY = slugify("Preferred Region") -AUTO_IMPORT_MAPPED_FILES = slugify("Auto-Import Mapped Files") -NETWORK_ACCESS = slugify("Network Access") -PROXY_SETTINGS_KEY = slugify("Proxy Settings") -DVR_TV_TEMPLATE_KEY = slugify("DVR TV Template") -DVR_MOVIE_TEMPLATE_KEY = slugify("DVR Movie Template") -DVR_SERIES_RULES_KEY = slugify("DVR Series Rules") -DVR_TV_FALLBACK_DIR_KEY = slugify("DVR TV Fallback Dir") -DVR_TV_FALLBACK_TEMPLATE_KEY = slugify("DVR TV Fallback Template") -DVR_MOVIE_FALLBACK_TEMPLATE_KEY = slugify("DVR Movie Fallback Template") -DVR_COMSKIP_ENABLED_KEY = slugify("DVR Comskip Enabled") -DVR_COMSKIP_CUSTOM_PATH_KEY = slugify("DVR Comskip Custom Path") -DVR_PRE_OFFSET_MINUTES_KEY = slugify("DVR Pre-Offset Minutes") -DVR_POST_OFFSET_MINUTES_KEY = slugify("DVR Post-Offset Minutes") -SYSTEM_TIME_ZONE_KEY = slugify("System Time Zone") +# Setting group keys +STREAM_SETTINGS_KEY = "stream_settings" +DVR_SETTINGS_KEY = "dvr_settings" +BACKUP_SETTINGS_KEY = "backup_settings" +PROXY_SETTINGS_KEY = "proxy_settings" +NETWORK_ACCESS_KEY = "network_access" +SYSTEM_SETTINGS_KEY = "system_settings" class CoreSettings(models.Model): @@ -173,208 +165,166 @@ class CoreSettings(models.Model): name = models.CharField( max_length=255, ) - value = models.CharField( - max_length=255, + value = models.JSONField( + default=dict, + blank=True, ) def __str__(self): return "Core Settings" + # Helper methods to get/set grouped settings + @classmethod + def _get_group(cls, key, defaults=None): + """Get a settings group, returning defaults if not found.""" + try: + return cls.objects.get(key=key).value or (defaults or {}) + except cls.DoesNotExist: + return defaults or {} + + @classmethod + def _update_group(cls, key, name, updates): + """Update specific fields in a settings group.""" + obj, created = cls.objects.get_or_create( + key=key, + defaults={"name": name, "value": {}} + ) + current = obj.value if isinstance(obj.value, dict) else {} + current.update(updates) + obj.value = current + obj.save() + return current + + # Stream Settings + @classmethod + def get_stream_settings(cls): + """Get all stream-related settings.""" + return cls._get_group(STREAM_SETTINGS_KEY, { + "default_user_agent": None, + "default_stream_profile": None, + "m3u_hash_key": "", + "preferred_region": None, + "auto_import_mapped_files": None, + }) + @classmethod def get_default_user_agent_id(cls): - """Retrieve a system profile by name (or return None if not found).""" - return cls.objects.get(key=DEFAULT_USER_AGENT_KEY).value + return cls.get_stream_settings().get("default_user_agent") @classmethod def get_default_stream_profile_id(cls): - return cls.objects.get(key=DEFAULT_STREAM_PROFILE_KEY).value + return cls.get_stream_settings().get("default_stream_profile") @classmethod def get_m3u_hash_key(cls): - return cls.objects.get(key=STREAM_HASH_KEY).value + return cls.get_stream_settings().get("m3u_hash_key", "") @classmethod def get_preferred_region(cls): - """Retrieve the preferred region setting (or return None if not found).""" - try: - return cls.objects.get(key=PREFERRED_REGION_KEY).value - except cls.DoesNotExist: - return None + return cls.get_stream_settings().get("preferred_region") @classmethod def get_auto_import_mapped_files(cls): - """Retrieve the preferred region setting (or return None if not found).""" - try: - return cls.objects.get(key=AUTO_IMPORT_MAPPED_FILES).value - except cls.DoesNotExist: - return None + return cls.get_stream_settings().get("auto_import_mapped_files") + # DVR Settings @classmethod - def get_proxy_settings(cls): - """Retrieve proxy settings as dict (or return defaults if not found).""" - try: - import json - settings_json = cls.objects.get(key=PROXY_SETTINGS_KEY).value - return json.loads(settings_json) - except (cls.DoesNotExist, json.JSONDecodeError): - # Return defaults if not found or invalid JSON - return { - "buffering_timeout": 15, - "buffering_speed": 1.0, - "redis_chunk_ttl": 60, - "channel_shutdown_delay": 0, - "channel_init_grace_period": 5, - } + def get_dvr_settings(cls): + """Get all DVR-related settings.""" + return cls._get_group(DVR_SETTINGS_KEY, { + "tv_template": "TV_Shows/{show}/S{season:02d}E{episode:02d}.mkv", + "movie_template": "Movies/{title} ({year}).mkv", + "tv_fallback_dir": "TV_Shows", + "tv_fallback_template": "TV_Shows/{show}/{start}.mkv", + "movie_fallback_template": "Movies/{start}.mkv", + "comskip_enabled": False, + "comskip_custom_path": "", + "pre_offset_minutes": 0, + "post_offset_minutes": 0, + "series_rules": [], + }) @classmethod def get_dvr_tv_template(cls): - try: - return cls.objects.get(key=DVR_TV_TEMPLATE_KEY).value - except cls.DoesNotExist: - # Default: relative to recordings root (/data/recordings) - return "TV_Shows/{show}/S{season:02d}E{episode:02d}.mkv" + return cls.get_dvr_settings().get("tv_template", "TV_Shows/{show}/S{season:02d}E{episode:02d}.mkv") @classmethod def get_dvr_movie_template(cls): - try: - return cls.objects.get(key=DVR_MOVIE_TEMPLATE_KEY).value - except cls.DoesNotExist: - return "Movies/{title} ({year}).mkv" + return cls.get_dvr_settings().get("movie_template", "Movies/{title} ({year}).mkv") @classmethod def get_dvr_tv_fallback_dir(cls): - """Folder name to use when a TV episode has no season/episode information. - Defaults to 'TV_Show' to match existing behavior but can be overridden in settings. - """ - try: - return cls.objects.get(key=DVR_TV_FALLBACK_DIR_KEY).value or "TV_Shows" - except cls.DoesNotExist: - return "TV_Shows" + return cls.get_dvr_settings().get("tv_fallback_dir", "TV_Shows") @classmethod def get_dvr_tv_fallback_template(cls): - """Full path template used when season/episode are missing for a TV airing.""" - try: - return cls.objects.get(key=DVR_TV_FALLBACK_TEMPLATE_KEY).value - except cls.DoesNotExist: - # default requested by user - return "TV_Shows/{show}/{start}.mkv" + return cls.get_dvr_settings().get("tv_fallback_template", "TV_Shows/{show}/{start}.mkv") @classmethod def get_dvr_movie_fallback_template(cls): - """Full path template used when movie metadata is incomplete.""" - try: - return cls.objects.get(key=DVR_MOVIE_FALLBACK_TEMPLATE_KEY).value - except cls.DoesNotExist: - return "Movies/{start}.mkv" + return cls.get_dvr_settings().get("movie_fallback_template", "Movies/{start}.mkv") @classmethod def get_dvr_comskip_enabled(cls): - """Return boolean-like string value ('true'/'false') for comskip enablement.""" - try: - val = cls.objects.get(key=DVR_COMSKIP_ENABLED_KEY).value - return str(val).lower() in ("1", "true", "yes", "on") - except cls.DoesNotExist: - return False + return bool(cls.get_dvr_settings().get("comskip_enabled", False)) @classmethod def get_dvr_comskip_custom_path(cls): - """Return configured comskip.ini path or empty string if unset.""" - try: - return cls.objects.get(key=DVR_COMSKIP_CUSTOM_PATH_KEY).value - except cls.DoesNotExist: - return "" + return cls.get_dvr_settings().get("comskip_custom_path", "") @classmethod def set_dvr_comskip_custom_path(cls, path: str | None): - """Persist the comskip.ini path setting, normalizing nulls to empty string.""" value = (path or "").strip() - obj, _ = cls.objects.get_or_create( - key=DVR_COMSKIP_CUSTOM_PATH_KEY, - defaults={"name": "DVR Comskip Custom Path", "value": value}, - ) - if obj.value != value: - obj.value = value - obj.save(update_fields=["value"]) + cls._update_group(DVR_SETTINGS_KEY, "DVR Settings", {"comskip_custom_path": value}) return value @classmethod def get_dvr_pre_offset_minutes(cls): - """Minutes to start recording before scheduled start (default 0).""" - try: - val = cls.objects.get(key=DVR_PRE_OFFSET_MINUTES_KEY).value - return int(val) - except cls.DoesNotExist: - return 0 - except Exception: - try: - return int(float(val)) - except Exception: - return 0 + return int(cls.get_dvr_settings().get("pre_offset_minutes", 0) or 0) @classmethod def get_dvr_post_offset_minutes(cls): - """Minutes to stop recording after scheduled end (default 0).""" - try: - val = cls.objects.get(key=DVR_POST_OFFSET_MINUTES_KEY).value - return int(val) - except cls.DoesNotExist: - return 0 - except Exception: - try: - return int(float(val)) - except Exception: - return 0 - - @classmethod - def get_system_time_zone(cls): - """Return configured system time zone or fall back to Django settings.""" - try: - value = cls.objects.get(key=SYSTEM_TIME_ZONE_KEY).value - if value: - return value - except cls.DoesNotExist: - pass - return getattr(settings, "TIME_ZONE", "UTC") or "UTC" - - @classmethod - def set_system_time_zone(cls, tz_name: str | None): - """Persist the desired system time zone identifier.""" - value = (tz_name or "").strip() or getattr(settings, "TIME_ZONE", "UTC") or "UTC" - obj, _ = cls.objects.get_or_create( - key=SYSTEM_TIME_ZONE_KEY, - defaults={"name": "System Time Zone", "value": value}, - ) - if obj.value != value: - obj.value = value - obj.save(update_fields=["value"]) - return value + return int(cls.get_dvr_settings().get("post_offset_minutes", 0) or 0) @classmethod def get_dvr_series_rules(cls): - """Return list of series recording rules. Each: {tvg_id, title, mode: 'all'|'new'}""" - import json - try: - raw = cls.objects.get(key=DVR_SERIES_RULES_KEY).value - rules = json.loads(raw) if raw else [] - if isinstance(rules, list): - return rules - return [] - except cls.DoesNotExist: - # Initialize empty if missing - cls.objects.create(key=DVR_SERIES_RULES_KEY, name="DVR Series Rules", value="[]") - return [] + return cls.get_dvr_settings().get("series_rules", []) @classmethod def set_dvr_series_rules(cls, rules): - import json - try: - obj, _ = cls.objects.get_or_create(key=DVR_SERIES_RULES_KEY, defaults={"name": "DVR Series Rules", "value": "[]"}) - obj.value = json.dumps(rules) - obj.save(update_fields=["value"]) - return rules - except Exception: - return rules + cls._update_group(DVR_SETTINGS_KEY, "DVR Settings", {"series_rules": rules}) + return rules + + # Proxy Settings + @classmethod + def get_proxy_settings(cls): + """Get proxy settings.""" + return cls._get_group(PROXY_SETTINGS_KEY, { + "buffering_timeout": 15, + "buffering_speed": 1.0, + "redis_chunk_ttl": 60, + "channel_shutdown_delay": 0, + "channel_init_grace_period": 5, + }) + + # System Settings + @classmethod + def get_system_settings(cls): + """Get all system-related settings.""" + return cls._get_group(SYSTEM_SETTINGS_KEY, { + "time_zone": getattr(settings, "TIME_ZONE", "UTC") or "UTC", + "max_system_events": 100, + }) + + @classmethod + def get_system_time_zone(cls): + return cls.get_system_settings().get("time_zone") or getattr(settings, "TIME_ZONE", "UTC") or "UTC" + + @classmethod + def set_system_time_zone(cls, tz_name: str | None): + value = (tz_name or "").strip() or getattr(settings, "TIME_ZONE", "UTC") or "UTC" + cls._update_group(SYSTEM_SETTINGS_KEY, "System Settings", {"time_zone": value}) + return value class SystemEvent(models.Model): diff --git a/core/serializers.py b/core/serializers.py index c6029bc4..b2bd8ecc 100644 --- a/core/serializers.py +++ b/core/serializers.py @@ -3,7 +3,7 @@ import json import ipaddress from rest_framework import serializers -from .models import CoreSettings, UserAgent, StreamProfile, NETWORK_ACCESS +from .models import CoreSettings, UserAgent, StreamProfile, NETWORK_ACCESS_KEY class UserAgentSerializer(serializers.ModelSerializer): @@ -40,10 +40,10 @@ class CoreSettingsSerializer(serializers.ModelSerializer): fields = "__all__" def update(self, instance, validated_data): - if instance.key == NETWORK_ACCESS: + if instance.key == NETWORK_ACCESS_KEY: errors = False invalid = {} - value = json.loads(validated_data.get("value")) + value = validated_data.get("value") for key, val in value.items(): cidrs = val.split(",") for cidr in cidrs: diff --git a/core/utils.py b/core/utils.py index 7b6dd9b0..e3d6c389 100644 --- a/core/utils.py +++ b/core/utils.py @@ -417,8 +417,12 @@ def log_system_event(event_type, channel_id=None, channel_name=None, **details): # Get max events from settings (default 100) try: - max_events_setting = CoreSettings.objects.filter(key='max-system-events').first() - max_events = int(max_events_setting.value) if max_events_setting else 100 + from .models import CoreSettings + system_settings = CoreSettings.objects.filter(key='system_settings').first() + if system_settings and isinstance(system_settings.value, dict): + max_events = int(system_settings.value.get('max_system_events', 100)) + else: + max_events = 100 except Exception: max_events = 100 diff --git a/core/views.py b/core/views.py index 312d8836..5806d63c 100644 --- a/core/views.py +++ b/core/views.py @@ -1,5 +1,6 @@ # core/views.py import os +from shlex import split as shlex_split import sys import subprocess import logging @@ -131,7 +132,7 @@ def stream_view(request, channel_uuid): stream_profile = channel.stream_profile if not stream_profile: logger.error("No stream profile set for channel ID=%s, using default", channel.id) - stream_profile = StreamProfile.objects.get(id=CoreSettings.objects.get(key="default-stream-profile").value) + stream_profile = StreamProfile.objects.get(id=CoreSettings.get_default_stream_profile_id()) logger.debug("Stream profile used: %s", stream_profile.name) @@ -144,7 +145,7 @@ def stream_view(request, channel_uuid): logger.debug("Formatted parameters: %s", parameters) # Build the final command. - cmd = [stream_profile.command] + parameters.split() + cmd = [stream_profile.command] + shlex_split(parameters) logger.debug("Executing command: %s", cmd) try: diff --git a/dispatcharr/utils.py b/dispatcharr/utils.py index 56243b7a..e588bcaa 100644 --- a/dispatcharr/utils.py +++ b/dispatcharr/utils.py @@ -3,7 +3,7 @@ import json import ipaddress from django.http import JsonResponse from django.core.exceptions import ValidationError -from core.models import CoreSettings, NETWORK_ACCESS +from core.models import CoreSettings, NETWORK_ACCESS_KEY def json_error_response(message, status=400): @@ -39,7 +39,10 @@ def get_client_ip(request): def network_access_allowed(request, settings_key): - network_access = json.loads(CoreSettings.objects.get(key=NETWORK_ACCESS).value) + try: + network_access = CoreSettings.objects.get(key=NETWORK_ACCESS_KEY).value + except CoreSettings.DoesNotExist: + network_access = {} cidrs = ( network_access[settings_key].split(",") diff --git a/docker/DispatcharrBase b/docker/DispatcharrBase index 8bda1ed9..149bfffb 100644 --- a/docker/DispatcharrBase +++ b/docker/DispatcharrBase @@ -4,27 +4,44 @@ ENV DEBIAN_FRONTEND=noninteractive ENV VIRTUAL_ENV=/dispatcharrpy ENV PATH="$VIRTUAL_ENV/bin:$PATH" -# --- Install Python 3.13 and system dependencies --- +# --- Install Python 3.13 and build dependencies --- # Note: Hardware acceleration (VA-API, VDPAU, NVENC) already included in base ffmpeg image RUN apt-get update && apt-get install --no-install-recommends -y \ ca-certificates software-properties-common gnupg2 curl wget \ && add-apt-repository ppa:deadsnakes/ppa \ && apt-get update \ && apt-get install --no-install-recommends -y \ - python3.13 python3.13-dev python3.13-venv \ + python3.13 python3.13-dev python3.13-venv libpython3.13 \ python-is-python3 python3-pip \ - libpcre3 libpcre3-dev libpq-dev procps \ - build-essential gcc pciutils \ + libpcre3 libpcre3-dev libpq-dev procps pciutils \ nginx streamlink comskip \ vlc-bin vlc-plugin-base \ - && apt-get clean && rm -rf /var/lib/apt/lists/* + build-essential gcc g++ gfortran libopenblas-dev libopenblas0 ninja-build # --- Create Python virtual environment --- RUN python3.13 -m venv $VIRTUAL_ENV && $VIRTUAL_ENV/bin/pip install --upgrade pip # --- Install Python dependencies --- COPY requirements.txt /tmp/requirements.txt -RUN $VIRTUAL_ENV/bin/pip install --no-cache-dir -r /tmp/requirements.txt && rm /tmp/requirements.txt +RUN $VIRTUAL_ENV/bin/pip install --no-cache-dir -r /tmp/requirements.txt && \ + rm /tmp/requirements.txt + +# --- Build legacy NumPy wheel for old hardware (store for runtime switching) --- +RUN $VIRTUAL_ENV/bin/pip install --no-cache-dir build && \ + cd /tmp && \ + $VIRTUAL_ENV/bin/pip download --no-binary numpy --no-deps numpy && \ + tar -xzf numpy-*.tar.gz && \ + cd numpy-*/ && \ + $VIRTUAL_ENV/bin/python -m build --wheel -Csetup-args=-Dcpu-baseline="none" -Csetup-args=-Dcpu-dispatch="none" && \ + mv dist/*.whl /opt/ && \ + cd / && rm -rf /tmp/numpy-* /tmp/*.tar.gz && \ + $VIRTUAL_ENV/bin/pip uninstall -y build + +# --- Clean up build dependencies to reduce image size --- +RUN apt-get remove -y build-essential gcc g++ gfortran libopenblas-dev libpcre3-dev python3.13-dev ninja-build && \ + apt-get autoremove -y --purge && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* /root/.cache /tmp/* # --- Set up Redis 7.x --- RUN curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg && \ diff --git a/docker/docker-compose.aio.yml b/docker/docker-compose.aio.yml index fe5e1507..2b1fd2ae 100644 --- a/docker/docker-compose.aio.yml +++ b/docker/docker-compose.aio.yml @@ -14,6 +14,10 @@ services: - REDIS_HOST=localhost - CELERY_BROKER_URL=redis://localhost:6379/0 - DISPATCHARR_LOG_LEVEL=info + # Legacy CPU Support (Optional) + # Uncomment to enable legacy NumPy build for older CPUs (circa 2009) + # that lack support for newer baseline CPU features + #- USE_LEGACY_NUMPY=true # Process Priority Configuration (Optional) # Lower values = higher priority. Range: -20 (highest) to 19 (lowest) # Negative values require cap_add: SYS_NICE (uncomment below) diff --git a/docker/docker-compose.debug.yml b/docker/docker-compose.debug.yml index d9dbef0e..c576cfd1 100644 --- a/docker/docker-compose.debug.yml +++ b/docker/docker-compose.debug.yml @@ -18,6 +18,10 @@ services: - REDIS_HOST=localhost - CELERY_BROKER_URL=redis://localhost:6379/0 - DISPATCHARR_LOG_LEVEL=trace + # Legacy CPU Support (Optional) + # Uncomment to enable legacy NumPy build for older CPUs (circa 2009) + # that lack support for newer baseline CPU features + #- USE_LEGACY_NUMPY=true # Process Priority Configuration (Optional) # Lower values = higher priority. Range: -20 (highest) to 19 (lowest) # Negative values require cap_add: SYS_NICE (uncomment below) diff --git a/docker/docker-compose.dev.yml b/docker/docker-compose.dev.yml index d1bb3680..b20c3296 100644 --- a/docker/docker-compose.dev.yml +++ b/docker/docker-compose.dev.yml @@ -17,6 +17,10 @@ services: - REDIS_HOST=localhost - CELERY_BROKER_URL=redis://localhost:6379/0 - DISPATCHARR_LOG_LEVEL=debug + # Legacy CPU Support (Optional) + # Uncomment to enable legacy NumPy build for older CPUs (circa 2009) + # that lack support for newer baseline CPU features + #- USE_LEGACY_NUMPY=true # Process Priority Configuration (Optional) # Lower values = higher priority. Range: -20 (highest) to 19 (lowest) # Negative values require cap_add: SYS_NICE (uncomment below) diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index aaa63990..e4093e4b 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -17,6 +17,10 @@ services: - REDIS_HOST=redis - CELERY_BROKER_URL=redis://redis:6379/0 - DISPATCHARR_LOG_LEVEL=info + # Legacy CPU Support (Optional) + # Uncomment to enable legacy NumPy build for older CPUs (circa 2009) + # that lack support for newer baseline CPU features + #- USE_LEGACY_NUMPY=true # Process Priority Configuration (Optional) # Lower values = higher priority. Range: -20 (highest) to 19 (lowest) # Negative values require cap_add: SYS_NICE (uncomment below) diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh index 5de9bf0a..a50f2f49 100755 --- a/docker/entrypoint.sh +++ b/docker/entrypoint.sh @@ -27,6 +27,18 @@ echo_with_timestamp() { echo "$(date '+%Y-%m-%d %H:%M:%S') - $1" } +# --- NumPy version switching for legacy hardware --- +if [ "$USE_LEGACY_NUMPY" = "true" ]; then + # Check if NumPy was compiled with baseline support + if /dispatcharrpy/bin/python -c "import numpy; numpy.show_config()" 2>&1 | grep -qi "baseline"; then + echo_with_timestamp "🔧 Switching to legacy NumPy (no CPU baseline)..." + /dispatcharrpy/bin/pip install --no-cache-dir --force-reinstall --no-deps /opt/numpy-*.whl + echo_with_timestamp "✅ Legacy NumPy installed" + else + echo_with_timestamp "✅ Legacy NumPy (no baseline) already installed, skipping reinstallation" + fi +fi + # Set PostgreSQL environment variables export POSTGRES_DB=${POSTGRES_DB:-dispatcharr} export POSTGRES_USER=${POSTGRES_USER:-dispatch} diff --git a/docker/uwsgi.ini b/docker/uwsgi.ini index d831adfc..920bac48 100644 --- a/docker/uwsgi.ini +++ b/docker/uwsgi.ini @@ -37,6 +37,7 @@ http-keepalive = 1 buffer-size = 65536 # Increase buffer for large payloads post-buffering = 4096 # Reduce buffering for real-time streaming http-timeout = 600 # Prevent disconnects from long streams +socket-timeout = 600 # Prevent write timeouts when client buffers lazy-apps = true # Improve memory efficiency # Async mode (use gevent for high concurrency) @@ -58,4 +59,4 @@ logformat-strftime = true log-date = %%Y-%%m-%%d %%H:%%M:%%S,000 # Use formatted time with environment variable for log level log-format = %(ftime) $(DISPATCHARR_LOG_LEVEL) uwsgi.requests Worker ID: %(wid) %(method) %(status) %(uri) %(msecs)ms -log-buffering = 1024 # Add buffer size limit for logging \ No newline at end of file +log-buffering = 1024 # Add buffer size limit for logging diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 84d18989..ed9e6010 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -12,6 +12,7 @@ "@dnd-kit/modifiers": "^9.0.0", "@dnd-kit/sortable": "^10.0.0", "@dnd-kit/utilities": "^3.2.2", + "@hookform/resolvers": "^5.2.2", "@mantine/charts": "~8.0.1", "@mantine/core": "~8.0.1", "@mantine/dates": "~8.0.1", @@ -22,13 +23,13 @@ "@tanstack/react-table": "^8.21.2", "allotment": "^1.20.4", "dayjs": "^1.11.13", - "formik": "^2.4.6", "hls.js": "^1.5.20", "lucide-react": "^0.511.0", "mpegts.js": "^1.8.0", "react": "^19.1.0", "react-dom": "^19.1.0", "react-draggable": "^4.4.6", + "react-hook-form": "^7.70.0", "react-pro-sidebar": "^1.1.0", "react-router-dom": "^7.3.0", "react-virtualized": "^9.22.6", @@ -1248,6 +1249,18 @@ "integrity": "sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==", "license": "MIT" }, + "node_modules/@hookform/resolvers": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/@hookform/resolvers/-/resolvers-5.2.2.tgz", + "integrity": "sha512-A/IxlMLShx3KjV/HeTcTfaMxdwy690+L/ZADoeaTltLx+CVuzkeVIPuybK3jrRfw7YZnmdKsVVHAlEPIAEUNlA==", + "license": "MIT", + "dependencies": { + "@standard-schema/utils": "^0.3.0" + }, + "peerDependencies": { + "react-hook-form": "^7.55.0" + } + }, "node_modules/@humanfs/core": { "version": "0.19.1", "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", @@ -1776,6 +1789,12 @@ "win32" ] }, + "node_modules/@standard-schema/utils": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@standard-schema/utils/-/utils-0.3.0.tgz", + "integrity": "sha512-e7Mew686owMaPJVNNLs55PUvgz371nKgwsc4vxE49zsODpJEnxgxRo2y/OKrqueavXgZNMDVj3DdHFlaSAeU8g==", + "license": "MIT" + }, "node_modules/@swc/core": { "name": "@swc/wasm", "version": "1.13.20", @@ -2008,18 +2027,6 @@ "dev": true, "license": "MIT" }, - "node_modules/@types/hoist-non-react-statics": { - "version": "3.3.7", - "resolved": "https://registry.npmjs.org/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.7.tgz", - "integrity": "sha512-PQTyIulDkIDro8P+IHbKCsw7U2xxBYflVzW/FgWdCAePD9xGSidgA76/GeJ6lBKoblyhf9pBY763gbrN+1dI8g==", - "license": "MIT", - "dependencies": { - "hoist-non-react-statics": "^3.3.0" - }, - "peerDependencies": { - "@types/react": "*" - } - }, "node_modules/@types/json-schema": { "version": "7.0.15", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", @@ -2037,6 +2044,7 @@ "version": "19.2.7", "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.7.tgz", "integrity": "sha512-MWtvHrGZLFttgeEj28VXHxpmwYbor/ATPYbBfSFZEIRK0ecCFLl2Qo55z52Hss+UV9CRN7trSeq1zbgx7YDWWg==", + "devOptional": true, "license": "MIT", "dependencies": { "csstype": "^3.2.2" @@ -2833,15 +2841,6 @@ "dev": true, "license": "MIT" }, - "node_modules/deepmerge": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-2.2.1.tgz", - "integrity": "sha512-R9hc1Xa/NOBi9WRVUWg19rl1UB7Tt4kuPd+thNJgFZoxXsTz7ncaPaeIm+40oSGuP33DfMb4sZt1QIGiJzC4EA==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/dequal": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", @@ -3288,31 +3287,6 @@ "dev": true, "license": "ISC" }, - "node_modules/formik": { - "version": "2.4.9", - "resolved": "https://registry.npmjs.org/formik/-/formik-2.4.9.tgz", - "integrity": "sha512-5nI94BMnlFDdQRBY4Sz39WkhxajZJ57Fzs8wVbtsQlm5ScKIR1QLYqv/ultBnobObtlUyxpxoLodpixrsf36Og==", - "funding": [ - { - "type": "individual", - "url": "https://opencollective.com/formik" - } - ], - "license": "Apache-2.0", - "dependencies": { - "@types/hoist-non-react-statics": "^3.3.1", - "deepmerge": "^2.1.1", - "hoist-non-react-statics": "^3.3.0", - "lodash": "^4.17.21", - "lodash-es": "^4.17.21", - "react-fast-compare": "^2.0.1", - "tiny-warning": "^1.0.2", - "tslib": "^2.0.0" - }, - "peerDependencies": { - "react": ">=16.8.0" - } - }, "node_modules/fsevents": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", @@ -3751,12 +3725,6 @@ "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", "license": "MIT" }, - "node_modules/lodash-es": { - "version": "4.17.22", - "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.22.tgz", - "integrity": "sha512-XEawp1t0gxSi9x01glktRZ5HDy0HXqrM0x5pXQM98EaI0NxO6jVM7omDOxsuEo5UIASAnm2bRp1Jt/e0a2XU8Q==", - "license": "MIT" - }, "node_modules/lodash.clamp": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/lodash.clamp/-/lodash.clamp-4.0.3.tgz", @@ -4334,11 +4302,21 @@ "react": ">= 16.8 || 18.0.0" } }, - "node_modules/react-fast-compare": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-2.0.4.tgz", - "integrity": "sha512-suNP+J1VU1MWFKcyt7RtjiSWUjvidmQSlqu+eHslq+342xCbGTYmC0mEhPCOHxlW0CywylOC1u2DFAT+bv4dBw==", - "license": "MIT" + "node_modules/react-hook-form": { + "version": "7.70.0", + "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.70.0.tgz", + "integrity": "sha512-COOMajS4FI3Wuwrs3GPpi/Jeef/5W1DRR84Yl5/ShlT3dKVFUfoGiEZ/QE6Uw8P4T2/CLJdcTVYKvWBMQTEpvw==", + "license": "MIT", + "engines": { + "node": ">=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/react-hook-form" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17 || ^18 || ^19" + } }, "node_modules/react-is": { "version": "16.13.1", @@ -4923,12 +4901,6 @@ "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==", "license": "MIT" }, - "node_modules/tiny-warning": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", - "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==", - "license": "MIT" - }, "node_modules/tinybench": { "version": "2.9.0", "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz", diff --git a/frontend/package.json b/frontend/package.json index ff5be72d..7b2d5927 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -23,11 +23,12 @@ "@mantine/form": "~8.0.1", "@mantine/hooks": "~8.0.1", "@mantine/notifications": "~8.0.1", + "@hookform/resolvers": "^5.2.2", "@tanstack/react-table": "^8.21.2", "allotment": "^1.20.4", "dayjs": "^1.11.13", - "formik": "^2.4.6", "hls.js": "^1.5.20", + "react-hook-form": "^7.70.0", "lucide-react": "^0.511.0", "mpegts.js": "^1.8.0", "react": "^19.1.0", diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index 40035d33..87d80953 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -756,6 +756,7 @@ export const WebsocketProvider = ({ children }) => { try { await API.requeryChannels(); await useChannelsStore.getState().fetchChannels(); + await fetchChannelProfiles(); console.log('Channels refreshed after bulk creation'); } catch (error) { console.error( diff --git a/frontend/src/api.js b/frontend/src/api.js index 64ce4d77..c33ff1ee 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -336,6 +336,15 @@ export default class API { delete channelData.channel_number; } + // Add channel profile IDs based on current selection + const selectedProfileId = useChannelsStore.getState().selectedProfileId; + if (selectedProfileId && selectedProfileId !== '0') { + // Specific profile selected - add only to that profile + channelData.channel_profile_ids = [parseInt(selectedProfileId)]; + } + // If selectedProfileId is '0' or not set, don't include channel_profile_ids + // which will trigger the backend's default behavior of adding to all profiles + if (channel.logo_file) { // Must send FormData for file upload body = new FormData(); @@ -2112,6 +2121,24 @@ export default class API { } } + static async duplicateChannelProfile(id, name) { + try { + const response = await request( + `${host}/api/channels/profiles/${id}/duplicate/`, + { + method: 'POST', + body: { name }, + } + ); + + useChannelsStore.getState().addProfile(response); + + return response; + } catch (e) { + errorNotification(`Failed to duplicate channel profile ${id}`, e); + } + } + static async deleteChannelProfile(id) { try { await request(`${host}/api/channels/profiles/${id}/`, { diff --git a/frontend/src/components/ConfirmationDialog.jsx b/frontend/src/components/ConfirmationDialog.jsx index 73805513..94fb169c 100644 --- a/frontend/src/components/ConfirmationDialog.jsx +++ b/frontend/src/components/ConfirmationDialog.jsx @@ -16,6 +16,7 @@ import useWarningsStore from '../store/warnings'; * @param {string} props.actionKey - Unique key for this type of action (used for suppression) * @param {Function} props.onSuppressChange - Called when "don't show again" option changes * @param {string} [props.size='md'] - Size of the modal + * @param {boolean} [props.loading=false] - Whether the confirm button should show loading state */ const ConfirmationDialog = ({ opened, @@ -31,6 +32,7 @@ const ConfirmationDialog = ({ zIndex = 1000, showDeleteFileOption = false, deleteFileLabel = 'Also delete files from disk', + loading = false, }) => { const suppressWarning = useWarningsStore((s) => s.suppressWarning); const isWarningSuppressed = useWarningsStore((s) => s.isWarningSuppressed); @@ -93,10 +95,16 @@ const ConfirmationDialog = ({ )} - - diff --git a/frontend/src/components/backups/BackupManager.jsx b/frontend/src/components/backups/BackupManager.jsx index fed0dcfa..dc130254 100644 --- a/frontend/src/components/backups/BackupManager.jsx +++ b/frontend/src/components/backups/BackupManager.jsx @@ -34,7 +34,13 @@ import useLocalStorage from '../../hooks/useLocalStorage'; import useWarningsStore from '../../store/warnings'; import { CustomTable, useTable } from '../tables/CustomTable'; -const RowActions = ({ row, handleDownload, handleRestoreClick, handleDeleteClick, downloading }) => { +const RowActions = ({ + row, + handleDownload, + handleRestoreClick, + handleDeleteClick, + downloading, +}) => { return ( @@ -98,7 +104,6 @@ function to24Hour(time12, period) { return `${String(hours24).padStart(2, '0')}:${String(minutes).padStart(2, '0')}`; } - // Get default timezone (same as Settings page) function getDefaultTimeZone() { try { @@ -116,35 +121,60 @@ function validateCronExpression(expression) { const parts = expression.trim().split(/\s+/); if (parts.length !== 5) { - return { valid: false, error: 'Cron expression must have exactly 5 parts: minute hour day month weekday' }; + return { + valid: false, + error: + 'Cron expression must have exactly 5 parts: minute hour day month weekday', + }; } const [minute, hour, dayOfMonth, month, dayOfWeek] = parts; // Validate each part (allowing *, */N steps, ranges, lists, steps) // Supports: *, */2, 5, 1-5, 1-5/2, 1,3,5, etc. - const cronPartRegex = /^(\*\/\d+|\*|\d+(-\d+)?(\/\d+)?(,\d+(-\d+)?(\/\d+)?)*)$/; + const cronPartRegex = + /^(\*\/\d+|\*|\d+(-\d+)?(\/\d+)?(,\d+(-\d+)?(\/\d+)?)*)$/; if (!cronPartRegex.test(minute)) { - return { valid: false, error: 'Invalid minute field (0-59, *, or cron syntax)' }; + return { + valid: false, + error: 'Invalid minute field (0-59, *, or cron syntax)', + }; } if (!cronPartRegex.test(hour)) { - return { valid: false, error: 'Invalid hour field (0-23, *, or cron syntax)' }; + return { + valid: false, + error: 'Invalid hour field (0-23, *, or cron syntax)', + }; } if (!cronPartRegex.test(dayOfMonth)) { - return { valid: false, error: 'Invalid day field (1-31, *, or cron syntax)' }; + return { + valid: false, + error: 'Invalid day field (1-31, *, or cron syntax)', + }; } if (!cronPartRegex.test(month)) { - return { valid: false, error: 'Invalid month field (1-12, *, or cron syntax)' }; + return { + valid: false, + error: 'Invalid month field (1-12, *, or cron syntax)', + }; } if (!cronPartRegex.test(dayOfWeek)) { - return { valid: false, error: 'Invalid weekday field (0-6, *, or cron syntax)' }; + return { + valid: false, + error: 'Invalid weekday field (0-6, *, or cron syntax)', + }; } // Additional range validation for numeric values const validateRange = (value, min, max, name) => { // Skip if it's * or contains special characters - if (value === '*' || value.includes('/') || value.includes('-') || value.includes(',')) { + if ( + value === '*' || + value.includes('/') || + value.includes('-') || + value.includes(',') + ) { return null; } const num = parseInt(value, 10); @@ -200,6 +230,8 @@ export default function BackupManager() { const [restoreConfirmOpen, setRestoreConfirmOpen] = useState(false); const [deleteConfirmOpen, setDeleteConfirmOpen] = useState(false); const [selectedBackup, setSelectedBackup] = useState(null); + const [restoring, setRestoring] = useState(false); + const [deleting, setDeleting] = useState(false); // Read user's preferences from settings const [timeFormat] = useLocalStorage('time-format', '12h'); @@ -482,6 +514,7 @@ export default function BackupManager() { }; const handleDeleteConfirm = async () => { + setDeleting(true); try { await API.deleteBackup(selectedBackup.name); notifications.show({ @@ -497,6 +530,7 @@ export default function BackupManager() { color: 'red', }); } finally { + setDeleting(false); setDeleteConfirmOpen(false); setSelectedBackup(null); } @@ -508,11 +542,13 @@ export default function BackupManager() { }; const handleRestoreConfirm = async () => { + setRestoring(true); try { await API.restoreBackup(selectedBackup.name); notifications.show({ title: 'Success', - message: 'Backup restored successfully. You may need to refresh the page.', + message: + 'Backup restored successfully. You may need to refresh the page.', color: 'green', }); setTimeout(() => window.location.reload(), 2000); @@ -523,6 +559,7 @@ export default function BackupManager() { color: 'red', }); } finally { + setRestoring(false); setRestoreConfirmOpen(false); setSelectedBackup(null); } @@ -555,16 +592,22 @@ export default function BackupManager() { {/* Schedule Settings */} - Scheduled Backups + + Scheduled Backups + handleScheduleChange('enabled', e.currentTarget.checked)} + onChange={(e) => + handleScheduleChange('enabled', e.currentTarget.checked) + } label={schedule.enabled ? 'Enabled' : 'Disabled'} /> - Advanced (Cron Expression) + + Advanced (Cron Expression) + setAdvancedMode(e.currentTarget.checked)} @@ -584,18 +627,24 @@ export default function BackupManager() { handleScheduleChange('cron_expression', e.currentTarget.value)} + onChange={(e) => + handleScheduleChange( + 'cron_expression', + e.currentTarget.value + ) + } placeholder="0 3 * * *" description="Format: minute hour day month weekday (e.g., '0 3 * * *' = 3:00 AM daily)" disabled={!schedule.enabled} error={cronError} /> - Examples:
- • 0 3 * * * - Every day at 3:00 AM
- • 0 2 * * 0 - Every Sunday at 2:00 AM
- • 0 */6 * * * - Every 6 hours
- • 30 14 1 * * - 1st of every month at 2:30 PM + Examples:
• 0 3 * * * - Every day at 3:00 + AM +
• 0 2 * * 0 - Every Sunday at 2:00 AM +
• 0 */6 * * * - Every 6 hours +
• 30 14 1 * * - 1st of every month at + 2:30 PM
@@ -603,7 +652,9 @@ export default function BackupManager() { label="Retention" description="0 = keep all" value={schedule.retention_count} - onChange={(value) => handleScheduleChange('retention_count', value || 0)} + onChange={(value) => + handleScheduleChange('retention_count', value || 0) + } min={0} disabled={!schedule.enabled} /> @@ -623,7 +674,9 @@ export default function BackupManager() { handleScheduleChange('day_of_week', parseInt(value, 10))} + onChange={(value) => + handleScheduleChange('day_of_week', parseInt(value, 10)) + } data={DAYS_OF_WEEK} disabled={!schedule.enabled} /> @@ -645,7 +700,9 @@ export default function BackupManager() { label="Hour" value={displayTime ? displayTime.split(':')[0] : '12'} onChange={(value) => { - const minute = displayTime ? displayTime.split(':')[1] : '00'; + const minute = displayTime + ? displayTime.split(':')[1] + : '00'; handleTimeChange12h(`${value}:${minute}`, null); }} data={Array.from({ length: 12 }, (_, i) => ({ @@ -659,7 +716,9 @@ export default function BackupManager() { label="Minute" value={displayTime ? displayTime.split(':')[1] : '00'} onChange={(value) => { - const hour = displayTime ? displayTime.split(':')[0] : '12'; + const hour = displayTime + ? displayTime.split(':')[0] + : '12'; handleTimeChange12h(`${hour}:${value}`, null); }} data={Array.from({ length: 60 }, (_, i) => ({ @@ -684,9 +743,13 @@ export default function BackupManager() { <> { - const hour = schedule.time ? schedule.time.split(':')[0] : '00'; + const hour = schedule.time + ? schedule.time.split(':')[0] + : '00'; handleTimeChange24h(`${hour}:${value}`); }} data={Array.from({ length: 60 }, (_, i) => ({ @@ -718,7 +785,9 @@ export default function BackupManager() { label="Retention" description="0 = keep all" value={schedule.retention_count} - onChange={(value) => handleScheduleChange('retention_count', value || 0)} + onChange={(value) => + handleScheduleChange('retention_count', value || 0) + } min={0} disabled={!schedule.enabled} /> @@ -737,7 +806,8 @@ export default function BackupManager() { {/* Timezone info - only show in simple mode */} {!advancedMode && schedule.enabled && schedule.time && ( - System Timezone: {userTimezone} • Backup will run at {schedule.time} {userTimezone} + System Timezone: {userTimezone} • Backup will run at{' '} + {schedule.time} {userTimezone} )} @@ -861,7 +931,11 @@ export default function BackupManager() { > Cancel - @@ -881,6 +955,7 @@ export default function BackupManager() { cancelLabel="Cancel" actionKey="restore-backup" onSuppressChange={suppressWarning} + loading={restoring} /> ); diff --git a/frontend/src/components/cards/SeriesCard.jsx b/frontend/src/components/cards/SeriesCard.jsx new file mode 100644 index 00000000..f010cb44 --- /dev/null +++ b/frontend/src/components/cards/SeriesCard.jsx @@ -0,0 +1,85 @@ +import { + Badge, + Box, + Card, + CardSection, + Group, + Image, + Stack, + Text, +} from '@mantine/core'; +import {Calendar, Play, Star} from "lucide-react"; +import React from "react"; + +const SeriesCard = ({ series, onClick }) => { + return ( + onClick(series)} + > + + + {series.logo?.url ? ( + {series.name} + ) : ( + + + + )} + {/* Add Series badge in the same position as Movie badge */} + + Series + + + + + + {series.name} + + + {series.year && ( + + + + {series.year} + + + )} + {series.rating && ( + + + + {series.rating} + + + )} + + + {series.genre && ( + + {series.genre} + + )} + + + ); +}; + +export default SeriesCard; \ No newline at end of file diff --git a/frontend/src/components/cards/StreamConnectionCard.jsx b/frontend/src/components/cards/StreamConnectionCard.jsx new file mode 100644 index 00000000..62d6e62f --- /dev/null +++ b/frontend/src/components/cards/StreamConnectionCard.jsx @@ -0,0 +1,613 @@ +import { useLocation } from 'react-router-dom'; +import React, { useEffect, useMemo, useState } from 'react'; +import useLocalStorage from '../../hooks/useLocalStorage.jsx'; +import usePlaylistsStore from '../../store/playlists.jsx'; +import useSettingsStore from '../../store/settings.jsx'; +import { + ActionIcon, + Badge, + Box, + Card, + Center, + Flex, + Group, + Select, + Stack, + Text, + Tooltip, +} from '@mantine/core'; +import { + Gauge, + HardDriveDownload, + HardDriveUpload, + SquareX, + Timer, + Users, + Video, +} from 'lucide-react'; +import { toFriendlyDuration } from '../../utils/dateTimeUtils.js'; +import { CustomTable, useTable } from '../tables/CustomTable/index.jsx'; +import { TableHelper } from '../../helpers/index.jsx'; +import logo from '../../images/logo.png'; +import { formatBytes, formatSpeed } from '../../utils/networkUtils.js'; +import { showNotification } from '../../utils/notificationUtils.js'; +import { + connectedAccessor, + durationAccessor, + getBufferingSpeedThreshold, + getChannelStreams, + getLogoUrl, + getM3uAccountsMap, + getMatchingStreamByUrl, + getSelectedStream, + getStartDate, + getStreamOptions, + getStreamsByIds, + switchStream, +} from '../../utils/cards/StreamConnectionCardUtils.js'; + +// Create a separate component for each channel card to properly handle the hook +const StreamConnectionCard = ({ + channel, + clients, + stopClient, + stopChannel, + logos, + channelsByUUID, +}) => { + const location = useLocation(); + const [availableStreams, setAvailableStreams] = useState([]); + const [isLoadingStreams, setIsLoadingStreams] = useState(false); + const [activeStreamId, setActiveStreamId] = useState(null); + const [currentM3UProfile, setCurrentM3UProfile] = useState(null); // Add state for current M3U profile + const [data, setData] = useState([]); + const [previewedStream, setPreviewedStream] = useState(null); + + // Get M3U account data from the playlists store + const m3uAccounts = usePlaylistsStore((s) => s.playlists); + // Get settings for speed threshold + const settings = useSettingsStore((s) => s.settings); + + // Get Date-format from localStorage + const [dateFormatSetting] = useLocalStorage('date-format', 'mdy'); + const dateFormat = dateFormatSetting === 'mdy' ? 'MM/DD' : 'DD/MM'; + const [tableSize] = useLocalStorage('table-size', 'default'); + + // Create a map of M3U account IDs to names for quick lookup + const m3uAccountsMap = useMemo(() => { + return getM3uAccountsMap(m3uAccounts); + }, [m3uAccounts]); + + // Update M3U profile information when channel data changes + useEffect(() => { + // If the channel data includes M3U profile information, update our state + if (channel.m3u_profile || channel.m3u_profile_name) { + setCurrentM3UProfile({ + name: + channel.m3u_profile?.name || + channel.m3u_profile_name || + 'Default M3U', + }); + } + }, [channel.m3u_profile, channel.m3u_profile_name, channel.stream_id]); + + // Fetch available streams for this channel + useEffect(() => { + const fetchStreams = async () => { + setIsLoadingStreams(true); + try { + // Get channel ID from UUID + const channelId = channelsByUUID[channel.channel_id]; + if (channelId) { + const streamData = await getChannelStreams(channelId); + + // Use streams in the order returned by the API without sorting + setAvailableStreams(streamData); + + // If we have a channel URL, try to find the matching stream + if (channel.url && streamData.length > 0) { + // Try to find matching stream based on URL + const matchingStream = getMatchingStreamByUrl( + streamData, + channel.url + ); + + if (matchingStream) { + setActiveStreamId(matchingStream.id.toString()); + + // If the stream has M3U profile info, save it + if (matchingStream.m3u_profile) { + setCurrentM3UProfile(matchingStream.m3u_profile); + } + } + } + } + } catch (error) { + console.error('Error fetching streams:', error); + } finally { + setIsLoadingStreams(false); + } + }; + + fetchStreams(); + }, [channel.channel_id, channel.url, channelsByUUID]); + + useEffect(() => { + setData( + clients + .filter((client) => client.channel.channel_id === channel.channel_id) + .map((client) => ({ + id: client.client_id, + ...client, + })) + ); + }, [clients, channel.channel_id]); + + const renderHeaderCell = (header) => { + switch (header.id) { + default: + return ( + + + {header.column.columnDef.header} + + + ); + } + }; + + const renderBodyCell = ({ cell, row }) => { + switch (cell.column.id) { + case 'actions': + return ( + +
+ + + stopClient( + row.original.channel.uuid, + row.original.client_id + ) + } + > + + + +
+
+ ); + } + }; + + const checkStreamsAfterChange = (streamId) => { + return async () => { + try { + const channelId = channelsByUUID[channel.channel_id]; + if (channelId) { + const updatedStreamData = await getChannelStreams(channelId); + console.log('Channel streams after switch:', updatedStreamData); + + // Update current stream information with fresh data + const updatedStream = getSelectedStream(updatedStreamData, streamId); + if (updatedStream?.m3u_profile) { + setCurrentM3UProfile(updatedStream.m3u_profile); + } + } + } catch (error) { + console.error('Error checking streams after switch:', error); + } + }; + }; + + // Handle stream switching + const handleStreamChange = async (streamId) => { + try { + console.log('Switching to stream ID:', streamId); + // Find the selected stream in availableStreams for debugging + const selectedStream = getSelectedStream(availableStreams, streamId); + console.log('Selected stream details:', selectedStream); + + // Make sure we're passing the correct ID to the API + const response = await switchStream(channel, streamId); + console.log('Stream switch API response:', response); + + // Update the local active stream ID immediately + setActiveStreamId(streamId); + + // Update M3U profile information if available in the response + if (response?.m3u_profile) { + setCurrentM3UProfile(response.m3u_profile); + } else if (selectedStream && selectedStream.m3u_profile) { + // Fallback to the profile from the selected stream + setCurrentM3UProfile(selectedStream.m3u_profile); + } + + // Show detailed notification with stream name + showNotification({ + title: 'Stream switching', + message: `Switching to "${selectedStream?.name}" for ${channel.name}`, + color: 'blue.5', + }); + + // After a short delay, fetch streams again to confirm the switch + setTimeout(checkStreamsAfterChange(streamId), 2000); + } catch (error) { + console.error('Stream switch error:', error); + showNotification({ + title: 'Error switching stream', + message: error.toString(), + color: 'red.5', + }); + } + }; + + const clientsColumns = useMemo( + () => [ + { + id: 'expand', + size: 20, + }, + { + header: 'IP Address', + accessorKey: 'ip_address', + }, + // Updated Connected column with tooltip + { + id: 'connected', + header: 'Connected', + accessorFn: connectedAccessor(dateFormat), + cell: ({ cell }) => ( + + {cell.getValue()} + + ), + }, + // Update Duration column with tooltip showing exact seconds + { + id: 'duration', + header: 'Duration', + accessorFn: durationAccessor(), + cell: ({ cell, row }) => { + const exactDuration = + row.original.connected_since || row.original.connection_duration; + return ( + + {cell.getValue()} + + ); + }, + }, + { + id: 'actions', + header: 'Actions', + size: tableSize == 'compact' ? 75 : 100, + }, + ], + [] + ); + + const channelClientsTable = useTable({ + ...TableHelper.defaultProperties, + columns: clientsColumns, + data, + allRowIds: data.map((client) => client.id), + tableCellProps: () => ({ + padding: 4, + borderColor: '#444', + color: '#E0E0E0', + fontSize: '0.85rem', + }), + headerCellRenderFns: { + ip_address: renderHeaderCell, + connected: renderHeaderCell, + duration: renderHeaderCell, + actions: renderHeaderCell, + }, + bodyCellRenderFns: { + actions: renderBodyCell, + }, + getExpandedRowHeight: (row) => { + return 20 + 28 * row.original.streams.length; + }, + expandedRowRenderer: ({ row }) => { + return ( + + + + User Agent: + + {row.original.user_agent || 'Unknown'} + + + ); + }, + mantineExpandButtonProps: ({ row, table }) => ({ + size: 'xs', + style: { + transform: row.getIsExpanded() ? 'rotate(180deg)' : 'rotate(-90deg)', + transition: 'transform 0.2s', + }, + }), + displayColumnDefOptions: { + 'mrt-row-expand': { + size: 15, + header: '', + }, + 'mrt-row-actions': { + size: 74, + }, + }, + }); + + // Get logo URL from the logos object if available + const logoUrl = getLogoUrl(channel.logo_id, logos, previewedStream); + + useEffect(() => { + let isMounted = true; + // Only fetch if we have a stream_id and NO channel.name + if (!channel.name && channel.stream_id) { + getStreamsByIds(channel.stream_id).then((streams) => { + if (isMounted && streams && streams.length > 0) { + setPreviewedStream(streams[0]); + } + }); + } + return () => { + isMounted = false; + }; + }, [channel.name, channel.stream_id]); + + const channelName = + channel.name || previewedStream?.name || 'Unnamed Channel'; + const uptime = channel.uptime || 0; + const bitrates = channel.bitrates || []; + const totalBytes = channel.total_bytes || 0; + const clientCount = channel.client_count || 0; + const avgBitrate = channel.avg_bitrate || '0 Kbps'; + const streamProfileName = channel.stream_profile?.name || 'Unknown Profile'; + + // Use currentM3UProfile if available, otherwise fall back to channel data + const m3uProfileName = + currentM3UProfile?.name || + channel.m3u_profile?.name || + channel.m3u_profile_name || + 'Unknown M3U Profile'; + + // Create select options for available streams + const streamOptions = getStreamOptions(availableStreams, m3uAccountsMap); + + if (location.pathname !== '/stats') { + return <>; + } + + // Safety check - if channel doesn't have required data, don't render + if (!channel || !channel.channel_id) { + return null; + } + + return ( + + + + + channel logo + + + + + +
+ + {toFriendlyDuration(uptime, 'seconds')} +
+
+
+
+ + stopChannel(channel.channel_id)} + > + + + +
+
+
+ + + + {channelName} + + + + + + + + + {/* Display M3U profile information */} + + + + + {m3uProfileName} + + + + + {/* Add stream selection dropdown */} + {availableStreams.length > 0 && ( + +