diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a1cb27bb..9186541d 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -184,13 +184,13 @@ jobs: echo "Creating multi-arch manifest for ${OWNER}/${REPO}" # GitHub Container Registry manifests - # latest tag + # Create one manifest with both latest and version tags docker buildx imagetools create \ --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ --annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \ --annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \ - --annotation "index:org.opencontainers.image.version=latest" \ + --annotation "index:org.opencontainers.image.version=${VERSION}" \ --annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \ --annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \ --annotation "index:org.opencontainers.image.licenses=See repository" \ @@ -200,9 +200,11 @@ jobs: --annotation "index:maintainer=${{ github.actor }}" \ --annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \ --tag ghcr.io/${OWNER}/${REPO}:latest \ - ghcr.io/${OWNER}/${REPO}:latest-amd64 ghcr.io/${OWNER}/${REPO}:latest-arm64 + --tag ghcr.io/${OWNER}/${REPO}:${VERSION} \ + ghcr.io/${OWNER}/${REPO}:${VERSION}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-arm64 - # version tag + # Docker Hub manifests + # Create one manifest with both latest and version tags docker buildx imagetools create \ --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ @@ -217,43 +219,7 @@ jobs: --annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \ --annotation "index:maintainer=${{ github.actor }}" \ --annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \ - --tag ghcr.io/${OWNER}/${REPO}:${VERSION} \ - ghcr.io/${OWNER}/${REPO}:${VERSION}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-arm64 - - # Docker Hub manifests - # latest tag - docker buildx imagetools create \ - --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ - --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ - --annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \ - --annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \ - --annotation "index:org.opencontainers.image.version=latest" \ - --annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \ - --annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \ - --annotation "index:org.opencontainers.image.licenses=See repository" \ - --annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \ - --annotation "index:org.opencontainers.image.vendor=${OWNER}" \ - --annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \ - --annotation "index:maintainer=${{ github.actor }}" \ - --annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \ --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest \ - docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest-arm64 - - # version tag - docker buildx imagetools create \ - --annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \ - --annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \ - --annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \ - --annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \ - --annotation "index:org.opencontainers.image.version=${VERSION}" \ - --annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \ - --annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \ - --annotation "index:org.opencontainers.image.licenses=See repository" \ - --annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \ - --annotation "index:org.opencontainers.image.vendor=${OWNER}" \ - --annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \ - --annotation "index:maintainer=${{ github.actor }}" \ - --annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \ --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION} \ docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-arm64 diff --git a/CHANGELOG.md b/CHANGELOG.md index 516a9e4c..44663dc9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,24 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +### Changed + +- Docker setup enhanced for legacy CPU support: Added `USE_LEGACY_NUMPY` environment variable to enable custom-built NumPy with no CPU baseline, allowing Dispatcharr to run on older CPUs (circa 2009) that lack support for newer baseline CPU features. When set to `true`, the entrypoint script will install the legacy NumPy build instead of the standard distribution. +- VOD upstream read timeout reduced from 30 seconds to 10 seconds to minimize lock hold time when clients disconnect during connection phase +- Form management refactored across application: Migrated Channel, Stream, M3U Profile, Stream Profile, Logo, and User Agent forms from Formik to React Hook Form (RHF) with Yup validation for improved form handling, better validation feedback, and enhanced code maintainability + +### Fixed + +- Fixed Channels table EPG column showing "Not Assigned" on initial load for users with large EPG datasets. Added `tvgsLoaded` flag to EPG store to track when EPG data has finished loading, ensuring the table waits for EPG data before displaying. EPG cells now show animated skeleton placeholders while loading instead of incorrectly showing "Not Assigned". (Fixes #810) +- Fixed VOD profile connection count not being decremented when stream connection fails (timeout, 404, etc.), preventing profiles from reaching capacity limits and rejecting valid stream requests +- Fixed React warning in Channel form by removing invalid `removeTrailingZeros` prop from NumberInput component +- Release workflow Docker tagging: Fixed issue where `latest` and version tags (e.g., `0.16.0`) were creating separate manifests instead of pointing to the same image digest, which caused old `latest` tags to become orphaned/untagged after new releases. Now creates a single multi-arch manifest with both tags, maintaining proper tag relationships and download statistics visibility on GitHub. +- Fixed onboarding message appearing in the Channels Table when filtered results are empty. The onboarding message now only displays when there are no channels created at all, not when channels exist but are filtered out by current filters. +- Fixed `M3UMovieRelation.get_stream_url()` and `M3UEpisodeRelation.get_stream_url()` to use XC client's `_normalize_url()` method instead of simple `rstrip('/')`. This properly handles malformed M3U account URLs (e.g., containing `/player_api.php` or query parameters) before constructing VOD stream endpoints, matching behavior of live channel URL building. (Closes #722) +- Fixed bulk_create and bulk_update errors during VOD content refresh by pre-checking object existence with optimized bulk queries (3 queries total instead of N per batch) before creating new objects. This ensures all movie/series objects have primary keys before relation operations, preventing "prohibited to prevent data loss due to unsaved related object" errors. Additionally fixed duplicate key constraint violations by treating TMDB/IMDB ID values of `0` or `'0'` as invalid (some providers use this to indicate "no ID"), converting them to NULL to prevent multiple items from incorrectly sharing the same ID. (Fixes #813) + +## [0.16.0] - 2026-01-04 + ### Added - Advanced filtering for Channels table: Filter menu now allows toggling disabled channels visibility (when a profile is selected) and filtering to show only empty channels without streams (Closes #182) @@ -22,6 +40,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed - Fixed event viewer arrow direction (previously inverted) — UI behavior corrected. - Thanks [@drnikcuk](https://github.com/drnikcuk) (Closes #772) +- Region code options now intentionally include both `GB` (ISO 3166-1 standard) and `UK` (commonly used by EPG/XMLTV providers) to accommodate real-world EPG data variations. Many providers use `UK` in channel identifiers (e.g., `BBCOne.uk`) despite `GB` being the official ISO country code. Users should select the region code that matches their specific EPG provider's convention for optimal region-based EPG matching bonuses - Thanks [@bigpandaaaa](https://github.com/bigpandaaaa) - Channel number inputs in stream-to-channel creation modals no longer have a maximum value restriction, allowing users to enter any valid channel number supported by the database - Stream log parsing refactored to use factory pattern: Simplified `ChannelService.parse_and_store_stream_info()` to route parsing through specialized log parsers instead of inline program-specific logic (~150 lines of code removed) - Stream profile names in fixtures updated to use proper capitalization (ffmpeg → FFmpeg, streamlink → Streamlink) @@ -43,6 +62,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed +- Auto Channel Sync Force EPG Source feature not properly forcing "No EPG" assignment - When selecting "Force EPG Source" > "No EPG (Disabled)", channels were still being auto-matched to EPG data instead of forcing dummy/no EPG. Now correctly sets `force_dummy_epg` flag to prevent unwanted EPG assignment. (Fixes #788) - VOD episode processing now properly handles season and episode numbers from APIs that return string values instead of integers, with comprehensive error logging to track data quality issues - Thanks [@patchy8736](https://github.com/patchy8736) (Fixes #770) - VOD episode-to-stream relations are now validated to ensure episodes have been saved to the database before creating relations, preventing integrity errors when bulk_create operations encounter conflicts - Thanks [@patchy8736](https://github.com/patchy8736) - VOD category filtering now correctly handles category names containing pipe "|" characters (e.g., "PL | BAJKI", "EN | MOVIES") by using `rsplit()` to split from the right instead of the left, ensuring the category type is correctly extracted as the last segment - Thanks [@Vitekant](https://github.com/Vitekant) @@ -50,6 +70,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - M3U and EPG manager page no longer crashes when a playlist references a deleted channel group (Fixes screen blank on navigation) - Stream validation now returns original URL instead of redirected URL to prevent issues with temporary redirect URLs that expire before clients can connect - XtreamCodes EPG limit parameter now properly converted to integer to prevent type errors when accessing EPG listings (Fixes #781) +- Docker container file permissions: Django management commands (`migrate`, `collectstatic`) now run as the non-root user to prevent root-owned `__pycache__` and static files from causing permission issues - Thanks [@sethwv](https://github.com/sethwv) - Stream validation now continues with GET request if HEAD request fails due to connection issues - Thanks [@kvnnap](https://github.com/kvnnap) (Fixes #782) - XtreamCodes M3U files now correctly set `x-tvg-url` and `url-tvg` headers to reference XC EPG URL (`xmltv.php`) instead of standard EPG endpoint when downloaded via XC API (Fixes #629) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index aebb74a3..e162f63a 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -236,12 +236,8 @@ class ChannelGroupViewSet(viewsets.ModelViewSet): return [Authenticated()] def get_queryset(self): - """Add annotation for association counts""" - from django.db.models import Count - return ChannelGroup.objects.annotate( - channel_count=Count('channels', distinct=True), - m3u_account_count=Count('m3u_accounts', distinct=True) - ) + """Return channel groups with prefetched relations for efficient counting""" + return ChannelGroup.objects.prefetch_related('channels', 'm3u_accounts').all() def update(self, request, *args, **kwargs): """Override update to check M3U associations""" @@ -277,15 +273,20 @@ class ChannelGroupViewSet(viewsets.ModelViewSet): @action(detail=False, methods=["post"], url_path="cleanup") def cleanup_unused_groups(self, request): """Delete all channel groups with no channels or M3U account associations""" - from django.db.models import Count + from django.db.models import Q, Exists, OuterRef + + # Find groups with no channels and no M3U account associations using Exists subqueries + from .models import Channel, ChannelGroupM3UAccount + + has_channels = Channel.objects.filter(channel_group_id=OuterRef('pk')) + has_accounts = ChannelGroupM3UAccount.objects.filter(channel_group_id=OuterRef('pk')) - # Find groups with no channels and no M3U account associations unused_groups = ChannelGroup.objects.annotate( - channel_count=Count('channels', distinct=True), - m3u_account_count=Count('m3u_accounts', distinct=True) + has_channels=Exists(has_channels), + has_accounts=Exists(has_accounts) ).filter( - channel_count=0, - m3u_account_count=0 + has_channels=False, + has_accounts=False ) deleted_count = unused_groups.count() diff --git a/apps/channels/serializers.py b/apps/channels/serializers.py index 635281d5..8847050d 100644 --- a/apps/channels/serializers.py +++ b/apps/channels/serializers.py @@ -179,8 +179,8 @@ class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer): # Channel Group # class ChannelGroupSerializer(serializers.ModelSerializer): - channel_count = serializers.IntegerField(read_only=True) - m3u_account_count = serializers.IntegerField(read_only=True) + channel_count = serializers.SerializerMethodField() + m3u_account_count = serializers.SerializerMethodField() m3u_accounts = ChannelGroupM3UAccountSerializer( many=True, read_only=True @@ -190,6 +190,14 @@ class ChannelGroupSerializer(serializers.ModelSerializer): model = ChannelGroup fields = ["id", "name", "channel_count", "m3u_account_count", "m3u_accounts"] + def get_channel_count(self, obj): + """Get count of channels in this group""" + return obj.channels.count() + + def get_m3u_account_count(self, obj): + """Get count of M3U accounts associated with this group""" + return obj.m3u_accounts.count() + class ChannelProfileSerializer(serializers.ModelSerializer): channels = serializers.SerializerMethodField() diff --git a/apps/proxy/vod_proxy/multi_worker_connection_manager.py b/apps/proxy/vod_proxy/multi_worker_connection_manager.py index 251721c5..1534f761 100644 --- a/apps/proxy/vod_proxy/multi_worker_connection_manager.py +++ b/apps/proxy/vod_proxy/multi_worker_connection_manager.py @@ -357,12 +357,12 @@ class RedisBackedVODConnection: logger.info(f"[{self.session_id}] Making request #{state.request_count} to {'final' if state.final_url else 'original'} URL") - # Make request + # Make request (10s connect, 10s read timeout - keeps lock time reasonable if client disconnects) response = self.local_session.get( target_url, headers=headers, stream=True, - timeout=(10, 30), + timeout=(10, 10), allow_redirects=allow_redirects ) response.raise_for_status() @@ -712,6 +712,10 @@ class MultiWorkerVODConnectionManager: content_name = content_obj.name if hasattr(content_obj, 'name') else str(content_obj) client_id = session_id + # Track whether we incremented profile connections (for cleanup on error) + profile_connections_incremented = False + redis_connection = None + logger.info(f"[{client_id}] Worker {self.worker_id} - Redis-backed streaming request for {content_type} {content_name}") try: @@ -802,6 +806,7 @@ class MultiWorkerVODConnectionManager: # Increment profile connections after successful connection creation self._increment_profile_connections(m3u_profile) + profile_connections_incremented = True logger.info(f"[{client_id}] Worker {self.worker_id} - Created consolidated connection with session metadata") else: @@ -1024,6 +1029,19 @@ class MultiWorkerVODConnectionManager: except Exception as e: logger.error(f"[{client_id}] Worker {self.worker_id} - Error in Redis-backed stream_content_with_session: {e}", exc_info=True) + + # Decrement profile connections if we incremented them but failed before streaming started + if profile_connections_incremented: + logger.info(f"[{client_id}] Connection error occurred after profile increment - decrementing profile connections") + self._decrement_profile_connections(m3u_profile.id) + + # Also clean up the Redis connection state since we won't be using it + if redis_connection: + try: + redis_connection.cleanup(connection_manager=self, current_worker_id=self.worker_id) + except Exception as cleanup_error: + logger.error(f"[{client_id}] Error during cleanup after connection failure: {cleanup_error}") + return HttpResponse(f"Streaming error: {str(e)}", status=500) def _apply_timeshift_parameters(self, original_url, utc_start=None, utc_end=None, offset=None): diff --git a/apps/vod/models.py b/apps/vod/models.py index 69aed808..7067856e 100644 --- a/apps/vod/models.py +++ b/apps/vod/models.py @@ -245,10 +245,13 @@ class M3UMovieRelation(models.Model): """Get the full stream URL for this movie from this provider""" # Build URL dynamically for XtreamCodes accounts if self.m3u_account.account_type == 'XC': - server_url = self.m3u_account.server_url.rstrip('/') + from core.xtream_codes import Client as XCClient + # Use XC client's URL normalization to handle malformed URLs + # (e.g., URLs with /player_api.php or query parameters) + normalized_url = XCClient(self.m3u_account.server_url, '', '')._normalize_url(self.m3u_account.server_url) username = self.m3u_account.username password = self.m3u_account.password - return f"{server_url}/movie/{username}/{password}/{self.stream_id}.{self.container_extension or 'mp4'}" + return f"{normalized_url}/movie/{username}/{password}/{self.stream_id}.{self.container_extension or 'mp4'}" else: # For other account types, we would need another way to build URLs return None @@ -285,10 +288,12 @@ class M3UEpisodeRelation(models.Model): if self.m3u_account.account_type == 'XC': # For XtreamCodes accounts, build the URL dynamically - server_url = self.m3u_account.server_url.rstrip('/') + # Use XC client's URL normalization to handle malformed URLs + # (e.g., URLs with /player_api.php or query parameters) + normalized_url = XtreamCodesClient(self.m3u_account.server_url, '', '')._normalize_url(self.m3u_account.server_url) username = self.m3u_account.username password = self.m3u_account.password - return f"{server_url}/series/{username}/{password}/{self.stream_id}.{self.container_extension or 'mp4'}" + return f"{normalized_url}/series/{username}/{password}/{self.stream_id}.{self.container_extension or 'mp4'}" else: # We might support non XC accounts in the future # For now, return None diff --git a/apps/vod/tasks.py b/apps/vod/tasks.py index 4eb9fadc..0dcd9cfd 100644 --- a/apps/vod/tasks.py +++ b/apps/vod/tasks.py @@ -410,10 +410,10 @@ def process_movie_batch(account, batch, categories, relations, scan_start_time=N tmdb_id = movie_data.get('tmdb_id') or movie_data.get('tmdb') imdb_id = movie_data.get('imdb_id') or movie_data.get('imdb') - # Clean empty string IDs - if tmdb_id == '': + # Clean empty string IDs and zero values (some providers use 0 to indicate no ID) + if tmdb_id == '' or tmdb_id == 0 or tmdb_id == '0': tmdb_id = None - if imdb_id == '': + if imdb_id == '' or imdb_id == 0 or imdb_id == '0': imdb_id = None # Create a unique key for this movie (priority: TMDB > IMDB > name+year) @@ -614,26 +614,41 @@ def process_movie_batch(account, batch, categories, relations, scan_start_time=N # First, create new movies and get their IDs created_movies = {} if movies_to_create: - Movie.objects.bulk_create(movies_to_create, ignore_conflicts=True) + # Bulk query to check which movies already exist + tmdb_ids = [m.tmdb_id for m in movies_to_create if m.tmdb_id] + imdb_ids = [m.imdb_id for m in movies_to_create if m.imdb_id] + name_year_pairs = [(m.name, m.year) for m in movies_to_create if not m.tmdb_id and not m.imdb_id] - # Get the newly created movies with their IDs - # We need to re-fetch them to get the primary keys + existing_by_tmdb = {m.tmdb_id: m for m in Movie.objects.filter(tmdb_id__in=tmdb_ids)} if tmdb_ids else {} + existing_by_imdb = {m.imdb_id: m for m in Movie.objects.filter(imdb_id__in=imdb_ids)} if imdb_ids else {} + + existing_by_name_year = {} + if name_year_pairs: + for movie in Movie.objects.filter(tmdb_id__isnull=True, imdb_id__isnull=True): + key = (movie.name, movie.year) + if key in name_year_pairs: + existing_by_name_year[key] = movie + + # Check each movie against the bulk query results + movies_actually_created = [] for movie in movies_to_create: - # Find the movie by its unique identifiers - if movie.tmdb_id: - db_movie = Movie.objects.filter(tmdb_id=movie.tmdb_id).first() - elif movie.imdb_id: - db_movie = Movie.objects.filter(imdb_id=movie.imdb_id).first() - else: - db_movie = Movie.objects.filter( - name=movie.name, - year=movie.year, - tmdb_id__isnull=True, - imdb_id__isnull=True - ).first() + existing = None + if movie.tmdb_id and movie.tmdb_id in existing_by_tmdb: + existing = existing_by_tmdb[movie.tmdb_id] + elif movie.imdb_id and movie.imdb_id in existing_by_imdb: + existing = existing_by_imdb[movie.imdb_id] + elif not movie.tmdb_id and not movie.imdb_id: + existing = existing_by_name_year.get((movie.name, movie.year)) - if db_movie: - created_movies[id(movie)] = db_movie + if existing: + created_movies[id(movie)] = existing + else: + movies_actually_created.append(movie) + created_movies[id(movie)] = movie + + # Bulk create only movies that don't exist + if movies_actually_created: + Movie.objects.bulk_create(movies_actually_created) # Update existing movies if movies_to_update: @@ -649,12 +664,16 @@ def process_movie_batch(account, batch, categories, relations, scan_start_time=N movie.logo = movie._logo_to_update movie.save(update_fields=['logo']) - # Update relations to reference the correct movie objects + # Update relations to reference the correct movie objects (with PKs) for relation in relations_to_create: if id(relation.movie) in created_movies: relation.movie = created_movies[id(relation.movie)] - # Handle relations + for relation in relations_to_update: + if id(relation.movie) in created_movies: + relation.movie = created_movies[id(relation.movie)] + + # All movies now have PKs, safe to bulk create/update relations if relations_to_create: M3UMovieRelation.objects.bulk_create(relations_to_create, ignore_conflicts=True) @@ -724,10 +743,10 @@ def process_series_batch(account, batch, categories, relations, scan_start_time= tmdb_id = series_data.get('tmdb') or series_data.get('tmdb_id') imdb_id = series_data.get('imdb') or series_data.get('imdb_id') - # Clean empty string IDs - if tmdb_id == '': + # Clean empty string IDs and zero values (some providers use 0 to indicate no ID) + if tmdb_id == '' or tmdb_id == 0 or tmdb_id == '0': tmdb_id = None - if imdb_id == '': + if imdb_id == '' or imdb_id == 0 or imdb_id == '0': imdb_id = None # Create a unique key for this series (priority: TMDB > IMDB > name+year) @@ -945,26 +964,41 @@ def process_series_batch(account, batch, categories, relations, scan_start_time= # First, create new series and get their IDs created_series = {} if series_to_create: - Series.objects.bulk_create(series_to_create, ignore_conflicts=True) + # Bulk query to check which series already exist + tmdb_ids = [s.tmdb_id for s in series_to_create if s.tmdb_id] + imdb_ids = [s.imdb_id for s in series_to_create if s.imdb_id] + name_year_pairs = [(s.name, s.year) for s in series_to_create if not s.tmdb_id and not s.imdb_id] - # Get the newly created series with their IDs - # We need to re-fetch them to get the primary keys + existing_by_tmdb = {s.tmdb_id: s for s in Series.objects.filter(tmdb_id__in=tmdb_ids)} if tmdb_ids else {} + existing_by_imdb = {s.imdb_id: s for s in Series.objects.filter(imdb_id__in=imdb_ids)} if imdb_ids else {} + + existing_by_name_year = {} + if name_year_pairs: + for series in Series.objects.filter(tmdb_id__isnull=True, imdb_id__isnull=True): + key = (series.name, series.year) + if key in name_year_pairs: + existing_by_name_year[key] = series + + # Check each series against the bulk query results + series_actually_created = [] for series in series_to_create: - # Find the series by its unique identifiers - if series.tmdb_id: - db_series = Series.objects.filter(tmdb_id=series.tmdb_id).first() - elif series.imdb_id: - db_series = Series.objects.filter(imdb_id=series.imdb_id).first() - else: - db_series = Series.objects.filter( - name=series.name, - year=series.year, - tmdb_id__isnull=True, - imdb_id__isnull=True - ).first() + existing = None + if series.tmdb_id and series.tmdb_id in existing_by_tmdb: + existing = existing_by_tmdb[series.tmdb_id] + elif series.imdb_id and series.imdb_id in existing_by_imdb: + existing = existing_by_imdb[series.imdb_id] + elif not series.tmdb_id and not series.imdb_id: + existing = existing_by_name_year.get((series.name, series.year)) - if db_series: - created_series[id(series)] = db_series + if existing: + created_series[id(series)] = existing + else: + series_actually_created.append(series) + created_series[id(series)] = series + + # Bulk create only series that don't exist + if series_actually_created: + Series.objects.bulk_create(series_actually_created) # Update existing series if series_to_update: @@ -980,12 +1014,16 @@ def process_series_batch(account, batch, categories, relations, scan_start_time= series.logo = series._logo_to_update series.save(update_fields=['logo']) - # Update relations to reference the correct series objects + # Update relations to reference the correct series objects (with PKs) for relation in relations_to_create: if id(relation.series) in created_series: relation.series = created_series[id(relation.series)] - # Handle relations + for relation in relations_to_update: + if id(relation.series) in created_series: + relation.series = created_series[id(relation.series)] + + # All series now have PKs, safe to bulk create/update relations if relations_to_create: M3USeriesRelation.objects.bulk_create(relations_to_create, ignore_conflicts=True) diff --git a/docker/DispatcharrBase b/docker/DispatcharrBase index 8bda1ed9..aefbcfe2 100644 --- a/docker/DispatcharrBase +++ b/docker/DispatcharrBase @@ -4,7 +4,7 @@ ENV DEBIAN_FRONTEND=noninteractive ENV VIRTUAL_ENV=/dispatcharrpy ENV PATH="$VIRTUAL_ENV/bin:$PATH" -# --- Install Python 3.13 and system dependencies --- +# --- Install Python 3.13 and build dependencies --- # Note: Hardware acceleration (VA-API, VDPAU, NVENC) already included in base ffmpeg image RUN apt-get update && apt-get install --no-install-recommends -y \ ca-certificates software-properties-common gnupg2 curl wget \ @@ -13,18 +13,34 @@ RUN apt-get update && apt-get install --no-install-recommends -y \ && apt-get install --no-install-recommends -y \ python3.13 python3.13-dev python3.13-venv \ python-is-python3 python3-pip \ - libpcre3 libpcre3-dev libpq-dev procps \ - build-essential gcc pciutils \ + libpcre3 libpcre3-dev libpq-dev procps pciutils \ nginx streamlink comskip \ vlc-bin vlc-plugin-base \ - && apt-get clean && rm -rf /var/lib/apt/lists/* + build-essential gcc g++ gfortran libopenblas-dev libopenblas0 ninja-build # --- Create Python virtual environment --- RUN python3.13 -m venv $VIRTUAL_ENV && $VIRTUAL_ENV/bin/pip install --upgrade pip # --- Install Python dependencies --- COPY requirements.txt /tmp/requirements.txt -RUN $VIRTUAL_ENV/bin/pip install --no-cache-dir -r /tmp/requirements.txt && rm /tmp/requirements.txt +RUN $VIRTUAL_ENV/bin/pip install --no-cache-dir -r /tmp/requirements.txt && \ + rm /tmp/requirements.txt + +# --- Build legacy NumPy wheel for old hardware (store for runtime switching) --- +RUN $VIRTUAL_ENV/bin/pip install --no-cache-dir build && \ + cd /tmp && \ + $VIRTUAL_ENV/bin/pip download --no-binary numpy --no-deps numpy && \ + tar -xzf numpy-*.tar.gz && \ + cd numpy-*/ && \ + $VIRTUAL_ENV/bin/python -m build --wheel -Csetup-args=-Dcpu-baseline="none" -Csetup-args=-Dcpu-dispatch="none" && \ + mv dist/*.whl /opt/ && \ + cd / && rm -rf /tmp/numpy-* + +# --- Clean up build dependencies to reduce image size --- +RUN apt-get remove -y build-essential gcc g++ gfortran libopenblas-dev ninja-build && \ + apt-get autoremove -y --purge && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* # --- Set up Redis 7.x --- RUN curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg && \ diff --git a/docker/Dockerfile b/docker/Dockerfile index dc437227..bfb35c11 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -35,9 +35,6 @@ RUN rm -rf /app/frontend # Copy built frontend assets COPY --from=frontend-builder /app/frontend/dist /app/frontend/dist -# Run Django collectstatic -RUN python manage.py collectstatic --noinput - # Add timestamp argument ARG TIMESTAMP diff --git a/docker/docker-compose.aio.yml b/docker/docker-compose.aio.yml index fe5e1507..2b1fd2ae 100644 --- a/docker/docker-compose.aio.yml +++ b/docker/docker-compose.aio.yml @@ -14,6 +14,10 @@ services: - REDIS_HOST=localhost - CELERY_BROKER_URL=redis://localhost:6379/0 - DISPATCHARR_LOG_LEVEL=info + # Legacy CPU Support (Optional) + # Uncomment to enable legacy NumPy build for older CPUs (circa 2009) + # that lack support for newer baseline CPU features + #- USE_LEGACY_NUMPY=true # Process Priority Configuration (Optional) # Lower values = higher priority. Range: -20 (highest) to 19 (lowest) # Negative values require cap_add: SYS_NICE (uncomment below) diff --git a/docker/docker-compose.debug.yml b/docker/docker-compose.debug.yml index d9dbef0e..c576cfd1 100644 --- a/docker/docker-compose.debug.yml +++ b/docker/docker-compose.debug.yml @@ -18,6 +18,10 @@ services: - REDIS_HOST=localhost - CELERY_BROKER_URL=redis://localhost:6379/0 - DISPATCHARR_LOG_LEVEL=trace + # Legacy CPU Support (Optional) + # Uncomment to enable legacy NumPy build for older CPUs (circa 2009) + # that lack support for newer baseline CPU features + #- USE_LEGACY_NUMPY=true # Process Priority Configuration (Optional) # Lower values = higher priority. Range: -20 (highest) to 19 (lowest) # Negative values require cap_add: SYS_NICE (uncomment below) diff --git a/docker/docker-compose.dev.yml b/docker/docker-compose.dev.yml index d1bb3680..b20c3296 100644 --- a/docker/docker-compose.dev.yml +++ b/docker/docker-compose.dev.yml @@ -17,6 +17,10 @@ services: - REDIS_HOST=localhost - CELERY_BROKER_URL=redis://localhost:6379/0 - DISPATCHARR_LOG_LEVEL=debug + # Legacy CPU Support (Optional) + # Uncomment to enable legacy NumPy build for older CPUs (circa 2009) + # that lack support for newer baseline CPU features + #- USE_LEGACY_NUMPY=true # Process Priority Configuration (Optional) # Lower values = higher priority. Range: -20 (highest) to 19 (lowest) # Negative values require cap_add: SYS_NICE (uncomment below) diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index aaa63990..e4093e4b 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -17,6 +17,10 @@ services: - REDIS_HOST=redis - CELERY_BROKER_URL=redis://redis:6379/0 - DISPATCHARR_LOG_LEVEL=info + # Legacy CPU Support (Optional) + # Uncomment to enable legacy NumPy build for older CPUs (circa 2009) + # that lack support for newer baseline CPU features + #- USE_LEGACY_NUMPY=true # Process Priority Configuration (Optional) # Lower values = higher priority. Range: -20 (highest) to 19 (lowest) # Negative values require cap_add: SYS_NICE (uncomment below) diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh index 72eb5928..b28311fc 100755 --- a/docker/entrypoint.sh +++ b/docker/entrypoint.sh @@ -27,6 +27,13 @@ echo_with_timestamp() { echo "$(date '+%Y-%m-%d %H:%M:%S') - $1" } +# --- NumPy version switching for legacy hardware --- +if [ "$USE_LEGACY_NUMPY" = "true" ]; then + echo_with_timestamp "🔧 Switching to legacy NumPy (no CPU baseline)..." + /dispatcharrpy/bin/pip install --no-cache-dir --force-reinstall --no-deps /opt/numpy-*.whl + echo_with_timestamp "✅ Legacy NumPy installed" +fi + # Set PostgreSQL environment variables export POSTGRES_DB=${POSTGRES_DB:-dispatcharr} export POSTGRES_USER=${POSTGRES_USER:-dispatch} @@ -100,7 +107,7 @@ export POSTGRES_DIR=/data/db if [[ ! -f /etc/profile.d/dispatcharr.sh ]]; then # Define all variables to process variables=( - PATH VIRTUAL_ENV DJANGO_SETTINGS_MODULE PYTHONUNBUFFERED + PATH VIRTUAL_ENV DJANGO_SETTINGS_MODULE PYTHONUNBUFFERED PYTHONDONTWRITEBYTECODE POSTGRES_DB POSTGRES_USER POSTGRES_PASSWORD POSTGRES_HOST POSTGRES_PORT DISPATCHARR_ENV DISPATCHARR_DEBUG DISPATCHARR_LOG_LEVEL REDIS_HOST REDIS_DB POSTGRES_DIR DISPATCHARR_PORT @@ -174,9 +181,9 @@ else pids+=("$nginx_pid") fi -cd /app -python manage.py migrate --noinput -python manage.py collectstatic --noinput +# Run Django commands as non-root user to prevent permission issues +su - $POSTGRES_USER -c "cd /app && python manage.py migrate --noinput" +su - $POSTGRES_USER -c "cd /app && python manage.py collectstatic --noinput" # Select proper uwsgi config based on environment if [ "$DISPATCHARR_ENV" = "dev" ] && [ "$DISPATCHARR_DEBUG" != "true" ]; then diff --git a/docker/init/03-init-dispatcharr.sh b/docker/init/03-init-dispatcharr.sh index 03fe6816..0c317017 100644 --- a/docker/init/03-init-dispatcharr.sh +++ b/docker/init/03-init-dispatcharr.sh @@ -15,6 +15,7 @@ DATA_DIRS=( APP_DIRS=( "/app/logo_cache" "/app/media" + "/app/static" ) # Create all directories diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 84d18989..ed9e6010 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -12,6 +12,7 @@ "@dnd-kit/modifiers": "^9.0.0", "@dnd-kit/sortable": "^10.0.0", "@dnd-kit/utilities": "^3.2.2", + "@hookform/resolvers": "^5.2.2", "@mantine/charts": "~8.0.1", "@mantine/core": "~8.0.1", "@mantine/dates": "~8.0.1", @@ -22,13 +23,13 @@ "@tanstack/react-table": "^8.21.2", "allotment": "^1.20.4", "dayjs": "^1.11.13", - "formik": "^2.4.6", "hls.js": "^1.5.20", "lucide-react": "^0.511.0", "mpegts.js": "^1.8.0", "react": "^19.1.0", "react-dom": "^19.1.0", "react-draggable": "^4.4.6", + "react-hook-form": "^7.70.0", "react-pro-sidebar": "^1.1.0", "react-router-dom": "^7.3.0", "react-virtualized": "^9.22.6", @@ -1248,6 +1249,18 @@ "integrity": "sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==", "license": "MIT" }, + "node_modules/@hookform/resolvers": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/@hookform/resolvers/-/resolvers-5.2.2.tgz", + "integrity": "sha512-A/IxlMLShx3KjV/HeTcTfaMxdwy690+L/ZADoeaTltLx+CVuzkeVIPuybK3jrRfw7YZnmdKsVVHAlEPIAEUNlA==", + "license": "MIT", + "dependencies": { + "@standard-schema/utils": "^0.3.0" + }, + "peerDependencies": { + "react-hook-form": "^7.55.0" + } + }, "node_modules/@humanfs/core": { "version": "0.19.1", "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", @@ -1776,6 +1789,12 @@ "win32" ] }, + "node_modules/@standard-schema/utils": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@standard-schema/utils/-/utils-0.3.0.tgz", + "integrity": "sha512-e7Mew686owMaPJVNNLs55PUvgz371nKgwsc4vxE49zsODpJEnxgxRo2y/OKrqueavXgZNMDVj3DdHFlaSAeU8g==", + "license": "MIT" + }, "node_modules/@swc/core": { "name": "@swc/wasm", "version": "1.13.20", @@ -2008,18 +2027,6 @@ "dev": true, "license": "MIT" }, - "node_modules/@types/hoist-non-react-statics": { - "version": "3.3.7", - "resolved": "https://registry.npmjs.org/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.7.tgz", - "integrity": "sha512-PQTyIulDkIDro8P+IHbKCsw7U2xxBYflVzW/FgWdCAePD9xGSidgA76/GeJ6lBKoblyhf9pBY763gbrN+1dI8g==", - "license": "MIT", - "dependencies": { - "hoist-non-react-statics": "^3.3.0" - }, - "peerDependencies": { - "@types/react": "*" - } - }, "node_modules/@types/json-schema": { "version": "7.0.15", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", @@ -2037,6 +2044,7 @@ "version": "19.2.7", "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.7.tgz", "integrity": "sha512-MWtvHrGZLFttgeEj28VXHxpmwYbor/ATPYbBfSFZEIRK0ecCFLl2Qo55z52Hss+UV9CRN7trSeq1zbgx7YDWWg==", + "devOptional": true, "license": "MIT", "dependencies": { "csstype": "^3.2.2" @@ -2833,15 +2841,6 @@ "dev": true, "license": "MIT" }, - "node_modules/deepmerge": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-2.2.1.tgz", - "integrity": "sha512-R9hc1Xa/NOBi9WRVUWg19rl1UB7Tt4kuPd+thNJgFZoxXsTz7ncaPaeIm+40oSGuP33DfMb4sZt1QIGiJzC4EA==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/dequal": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", @@ -3288,31 +3287,6 @@ "dev": true, "license": "ISC" }, - "node_modules/formik": { - "version": "2.4.9", - "resolved": "https://registry.npmjs.org/formik/-/formik-2.4.9.tgz", - "integrity": "sha512-5nI94BMnlFDdQRBY4Sz39WkhxajZJ57Fzs8wVbtsQlm5ScKIR1QLYqv/ultBnobObtlUyxpxoLodpixrsf36Og==", - "funding": [ - { - "type": "individual", - "url": "https://opencollective.com/formik" - } - ], - "license": "Apache-2.0", - "dependencies": { - "@types/hoist-non-react-statics": "^3.3.1", - "deepmerge": "^2.1.1", - "hoist-non-react-statics": "^3.3.0", - "lodash": "^4.17.21", - "lodash-es": "^4.17.21", - "react-fast-compare": "^2.0.1", - "tiny-warning": "^1.0.2", - "tslib": "^2.0.0" - }, - "peerDependencies": { - "react": ">=16.8.0" - } - }, "node_modules/fsevents": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", @@ -3751,12 +3725,6 @@ "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", "license": "MIT" }, - "node_modules/lodash-es": { - "version": "4.17.22", - "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.22.tgz", - "integrity": "sha512-XEawp1t0gxSi9x01glktRZ5HDy0HXqrM0x5pXQM98EaI0NxO6jVM7omDOxsuEo5UIASAnm2bRp1Jt/e0a2XU8Q==", - "license": "MIT" - }, "node_modules/lodash.clamp": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/lodash.clamp/-/lodash.clamp-4.0.3.tgz", @@ -4334,11 +4302,21 @@ "react": ">= 16.8 || 18.0.0" } }, - "node_modules/react-fast-compare": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-2.0.4.tgz", - "integrity": "sha512-suNP+J1VU1MWFKcyt7RtjiSWUjvidmQSlqu+eHslq+342xCbGTYmC0mEhPCOHxlW0CywylOC1u2DFAT+bv4dBw==", - "license": "MIT" + "node_modules/react-hook-form": { + "version": "7.70.0", + "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.70.0.tgz", + "integrity": "sha512-COOMajS4FI3Wuwrs3GPpi/Jeef/5W1DRR84Yl5/ShlT3dKVFUfoGiEZ/QE6Uw8P4T2/CLJdcTVYKvWBMQTEpvw==", + "license": "MIT", + "engines": { + "node": ">=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/react-hook-form" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17 || ^18 || ^19" + } }, "node_modules/react-is": { "version": "16.13.1", @@ -4923,12 +4901,6 @@ "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==", "license": "MIT" }, - "node_modules/tiny-warning": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", - "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==", - "license": "MIT" - }, "node_modules/tinybench": { "version": "2.9.0", "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz", diff --git a/frontend/package.json b/frontend/package.json index ff5be72d..7b2d5927 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -23,11 +23,12 @@ "@mantine/form": "~8.0.1", "@mantine/hooks": "~8.0.1", "@mantine/notifications": "~8.0.1", + "@hookform/resolvers": "^5.2.2", "@tanstack/react-table": "^8.21.2", "allotment": "^1.20.4", "dayjs": "^1.11.13", - "formik": "^2.4.6", "hls.js": "^1.5.20", + "react-hook-form": "^7.70.0", "lucide-react": "^0.511.0", "mpegts.js": "^1.8.0", "react": "^19.1.0", diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index cc6c5f47..4f12dd2c 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -1,5 +1,6 @@ import React, { useState, useEffect, useRef, useMemo } from 'react'; -import { useFormik } from 'formik'; +import { useForm } from 'react-hook-form'; +import { yupResolver } from '@hookform/resolvers/yup'; import * as Yup from 'yup'; import useChannelsStore from '../../store/channels'; import API from '../../api'; @@ -42,6 +43,11 @@ import useEPGsStore from '../../store/epgs'; import { FixedSizeList as List } from 'react-window'; import { USER_LEVELS, USER_LEVEL_LABELS } from '../../constants'; +const validationSchema = Yup.object({ + name: Yup.string().required('Name is required'), + channel_group_id: Yup.string().required('Channel group is required'), +}); + const ChannelForm = ({ channel = null, isOpen, onClose }) => { const theme = useMantineTheme(); @@ -100,7 +106,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { const handleLogoSuccess = ({ logo }) => { if (logo && logo.id) { - formik.setFieldValue('logo_id', logo.id); + setValue('logo_id', logo.id); ensureLogosLoaded(); // Refresh logos } setLogoModalOpen(false); @@ -124,7 +130,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { if (response.matched) { // Update the form with the new EPG data if (response.channel && response.channel.epg_data_id) { - formik.setFieldValue('epg_data_id', response.channel.epg_data_id); + setValue('epg_data_id', response.channel.epg_data_id); } notifications.show({ @@ -152,7 +158,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { }; const handleSetNameFromEpg = () => { - const epgDataId = formik.values.epg_data_id; + const epgDataId = watch('epg_data_id'); if (!epgDataId) { notifications.show({ title: 'No EPG Selected', @@ -164,7 +170,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { const tvg = tvgsById[epgDataId]; if (tvg && tvg.name) { - formik.setFieldValue('name', tvg.name); + setValue('name', tvg.name); notifications.show({ title: 'Success', message: `Channel name set to "${tvg.name}"`, @@ -180,7 +186,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { }; const handleSetLogoFromEpg = async () => { - const epgDataId = formik.values.epg_data_id; + const epgDataId = watch('epg_data_id'); if (!epgDataId) { notifications.show({ title: 'No EPG Selected', @@ -207,7 +213,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { ); if (matchingLogo) { - formik.setFieldValue('logo_id', matchingLogo.id); + setValue('logo_id', matchingLogo.id); notifications.show({ title: 'Success', message: `Logo set to "${matchingLogo.name}"`, @@ -231,7 +237,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { // Create logo by calling the Logo API directly const newLogo = await API.createLogo(newLogoData); - formik.setFieldValue('logo_id', newLogo.id); + setValue('logo_id', newLogo.id); notifications.update({ id: 'creating-logo', @@ -264,7 +270,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { }; const handleSetTvgIdFromEpg = () => { - const epgDataId = formik.values.epg_data_id; + const epgDataId = watch('epg_data_id'); if (!epgDataId) { notifications.show({ title: 'No EPG Selected', @@ -276,7 +282,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { const tvg = tvgsById[epgDataId]; if (tvg && tvg.tvg_id) { - formik.setFieldValue('tvg_id', tvg.tvg_id); + setValue('tvg_id', tvg.tvg_id); notifications.show({ title: 'Success', message: `TVG-ID set to "${tvg.tvg_id}"`, @@ -291,130 +297,130 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { } }; - const formik = useFormik({ - initialValues: { - name: '', - channel_number: '', // Change from 0 to empty string for consistency - channel_group_id: - Object.keys(channelGroups).length > 0 + const defaultValues = useMemo( + () => ({ + name: channel?.name || '', + channel_number: + channel?.channel_number !== null && + channel?.channel_number !== undefined + ? channel.channel_number + : '', + channel_group_id: channel?.channel_group_id + ? `${channel.channel_group_id}` + : Object.keys(channelGroups).length > 0 ? Object.keys(channelGroups)[0] : '', - stream_profile_id: '0', - tvg_id: '', - tvc_guide_stationid: '', - epg_data_id: '', - logo_id: '', - user_level: '0', - }, - validationSchema: Yup.object({ - name: Yup.string().required('Name is required'), - channel_group_id: Yup.string().required('Channel group is required'), + stream_profile_id: channel?.stream_profile_id + ? `${channel.stream_profile_id}` + : '0', + tvg_id: channel?.tvg_id || '', + tvc_guide_stationid: channel?.tvc_guide_stationid || '', + epg_data_id: channel?.epg_data_id ?? '', + logo_id: channel?.logo_id ? `${channel.logo_id}` : '', + user_level: `${channel?.user_level ?? '0'}`, }), - onSubmit: async (values, { setSubmitting }) => { - let response; + [channel, channelGroups] + ); - try { - const formattedValues = { ...values }; + const { + register, + handleSubmit, + setValue, + watch, + reset, + formState: { errors, isSubmitting }, + } = useForm({ + defaultValues, + resolver: yupResolver(validationSchema), + }); - // Convert empty or "0" stream_profile_id to null for the API - if ( - !formattedValues.stream_profile_id || - formattedValues.stream_profile_id === '0' - ) { - formattedValues.stream_profile_id = null; - } + const onSubmit = async (values) => { + let response; - // Ensure tvg_id is properly included (no empty strings) - formattedValues.tvg_id = formattedValues.tvg_id || null; + try { + const formattedValues = { ...values }; - // Ensure tvc_guide_stationid is properly included (no empty strings) - formattedValues.tvc_guide_stationid = - formattedValues.tvc_guide_stationid || null; + // Convert empty or "0" stream_profile_id to null for the API + if ( + !formattedValues.stream_profile_id || + formattedValues.stream_profile_id === '0' + ) { + formattedValues.stream_profile_id = null; + } - if (channel) { - // If there's an EPG to set, use our enhanced endpoint - if (values.epg_data_id !== (channel.epg_data_id ?? '')) { - // Use the special endpoint to set EPG and trigger refresh - const epgResponse = await API.setChannelEPG( - channel.id, - values.epg_data_id - ); + // Ensure tvg_id is properly included (no empty strings) + formattedValues.tvg_id = formattedValues.tvg_id || null; - // Remove epg_data_id from values since we've handled it separately - const { epg_data_id, ...otherValues } = formattedValues; + // Ensure tvc_guide_stationid is properly included (no empty strings) + formattedValues.tvc_guide_stationid = + formattedValues.tvc_guide_stationid || null; - // Update other channel fields if needed - if (Object.keys(otherValues).length > 0) { - response = await API.updateChannel({ - id: channel.id, - ...otherValues, - streams: channelStreams.map((stream) => stream.id), - }); - } - } else { - // No EPG change, regular update + if (channel) { + // If there's an EPG to set, use our enhanced endpoint + if (values.epg_data_id !== (channel.epg_data_id ?? '')) { + // Use the special endpoint to set EPG and trigger refresh + const epgResponse = await API.setChannelEPG( + channel.id, + values.epg_data_id + ); + + // Remove epg_data_id from values since we've handled it separately + const { epg_data_id, ...otherValues } = formattedValues; + + // Update other channel fields if needed + if (Object.keys(otherValues).length > 0) { response = await API.updateChannel({ id: channel.id, - ...formattedValues, + ...otherValues, streams: channelStreams.map((stream) => stream.id), }); } } else { - // New channel creation - use the standard method - response = await API.addChannel({ + // No EPG change, regular update + response = await API.updateChannel({ + id: channel.id, ...formattedValues, streams: channelStreams.map((stream) => stream.id), }); } - } catch (error) { - console.error('Error saving channel:', error); + } else { + // New channel creation - use the standard method + response = await API.addChannel({ + ...formattedValues, + streams: channelStreams.map((stream) => stream.id), + }); } + } catch (error) { + console.error('Error saving channel:', error); + } - formik.resetForm(); - API.requeryChannels(); + reset(); + API.requeryChannels(); - // Refresh channel profiles to update the membership information - useChannelsStore.getState().fetchChannelProfiles(); + // Refresh channel profiles to update the membership information + useChannelsStore.getState().fetchChannelProfiles(); - setSubmitting(false); - setTvgFilter(''); - setLogoFilter(''); - onClose(); - }, - }); + setTvgFilter(''); + setLogoFilter(''); + onClose(); + }; useEffect(() => { - if (channel) { - if (channel.epg_data_id) { - const epgSource = epgs[tvgsById[channel.epg_data_id]?.epg_source]; - setSelectedEPG(epgSource ? `${epgSource.id}` : ''); - } + reset(defaultValues); + setChannelStreams(channel?.streams || []); - formik.setValues({ - name: channel.name || '', - channel_number: - channel.channel_number !== null ? channel.channel_number : '', - channel_group_id: channel.channel_group_id - ? `${channel.channel_group_id}` - : '', - stream_profile_id: channel.stream_profile_id - ? `${channel.stream_profile_id}` - : '0', - tvg_id: channel.tvg_id || '', - tvc_guide_stationid: channel.tvc_guide_stationid || '', - epg_data_id: channel.epg_data_id ?? '', - logo_id: channel.logo_id ? `${channel.logo_id}` : '', - user_level: `${channel.user_level}`, - }); - - setChannelStreams(channel.streams || []); + if (channel?.epg_data_id) { + const epgSource = epgs[tvgsById[channel.epg_data_id]?.epg_source]; + setSelectedEPG(epgSource ? `${epgSource.id}` : ''); } else { - formik.resetForm(); + setSelectedEPG(''); + } + + if (!channel) { setTvgFilter(''); setLogoFilter(''); - setChannelStreams([]); // Ensure streams are cleared when adding a new channel } - }, [channel, tvgsById, channelGroups]); + }, [defaultValues, channel, reset, epgs, tvgsById]); // Memoize logo options to prevent infinite re-renders during background loading const logoOptions = useMemo(() => { @@ -431,10 +437,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { // If a new group was created and returned, update the form with it if (newGroup && newGroup.id) { // Preserve all current form values while updating just the channel_group_id - formik.setValues({ - ...formik.values, - channel_group_id: `${newGroup.id}`, - }); + setValue('channel_group_id', `${newGroup.id}`); } }; @@ -472,7 +475,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { } styles={{ content: { '--mantine-color-body': '#27272A' } }} > -
+ { label={ Channel Name - {formik.values.epg_data_id && ( + {watch('epg_data_id') && ( @@ -933,7 +905,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { } readOnly value={(() => { - const tvg = tvgsById[formik.values.epg_data_id]; + const tvg = tvgsById[watch('epg_data_id')]; const epgSource = tvg && epgs[tvg.epg_source]; const tvgLabel = tvg ? tvg.name || tvg.id : ''; if (epgSource && tvgLabel) { @@ -953,7 +925,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { color="white" onClick={(e) => { e.stopPropagation(); - formik.setFieldValue('epg_data_id', null); + setValue('epg_data_id', null); }} title="Create new group" size="small" @@ -1012,12 +984,9 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { size="xs" onClick={() => { if (filteredTvgs[index].id == '0') { - formik.setFieldValue('epg_data_id', null); + setValue('epg_data_id', null); } else { - formik.setFieldValue( - 'epg_data_id', - filteredTvgs[index].id - ); + setValue('epg_data_id', filteredTvgs[index].id); // Also update selectedEPG to match the EPG source of the selected tvg if (filteredTvgs[index].epg_source) { setSelectedEPG( @@ -1047,11 +1016,11 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { diff --git a/frontend/src/components/forms/LiveGroupFilter.jsx b/frontend/src/components/forms/LiveGroupFilter.jsx index ef68bee8..b6e6494c 100644 --- a/frontend/src/components/forms/LiveGroupFilter.jsx +++ b/frontend/src/components/forms/LiveGroupFilter.jsx @@ -369,7 +369,8 @@ const LiveGroupFilter = ({ if ( group.custom_properties?.custom_epg_id !== undefined || - group.custom_properties?.force_dummy_epg + group.custom_properties?.force_dummy_epg || + group.custom_properties?.force_epg_selected ) { selectedValues.push('force_epg'); } @@ -432,23 +433,20 @@ const LiveGroupFilter = ({ // Handle force_epg if (selectedOptions.includes('force_epg')) { - // Migrate from old force_dummy_epg if present + // Set default to force_dummy_epg if no EPG settings exist yet if ( - newCustomProps.force_dummy_epg && - newCustomProps.custom_epg_id === undefined + newCustomProps.custom_epg_id === + undefined && + !newCustomProps.force_dummy_epg ) { - // Migrate: force_dummy_epg=true becomes custom_epg_id=null - newCustomProps.custom_epg_id = null; - delete newCustomProps.force_dummy_epg; - } else if ( - newCustomProps.custom_epg_id === undefined - ) { - // New configuration: initialize with null (no EPG/default dummy) - newCustomProps.custom_epg_id = null; + // Default to "No EPG (Disabled)" + newCustomProps.force_dummy_epg = true; } } else { - // Only remove custom_epg_id when deselected + // Remove all EPG settings when deselected delete newCustomProps.custom_epg_id; + delete newCustomProps.force_dummy_epg; + delete newCustomProps.force_epg_selected; } // Handle group_override @@ -1124,7 +1122,8 @@ const LiveGroupFilter = ({ {/* Show EPG selector when force_epg is selected */} {(group.custom_properties?.custom_epg_id !== undefined || - group.custom_properties?.force_dummy_epg) && ( + group.custom_properties?.force_dummy_epg || + group.custom_properties?.force_epg_selected) && ( { - // Handle migration from force_dummy_epg + // Show custom EPG if set if ( group.custom_properties?.custom_epg_id !== - undefined + undefined && + group.custom_properties?.custom_epg_id !== null ) { - // Convert to string, use '0' for null/no EPG - return group.custom_properties.custom_epg_id === - null - ? '0' - : group.custom_properties.custom_epg_id.toString(); - } else if ( - group.custom_properties?.force_dummy_epg - ) { - // Show "No EPG" for old force_dummy_epg configs + return group.custom_properties.custom_epg_id.toString(); + } + // Show "No EPG" if force_dummy_epg is set + if (group.custom_properties?.force_dummy_epg) { return '0'; } - return '0'; + // Otherwise show empty/placeholder + return null; })()} onChange={(value) => { - // Convert back: '0' means no EPG (null) - const newValue = - value === '0' ? null : parseInt(value); - setGroupStates( - groupStates.map((state) => { - if ( - state.channel_group === group.channel_group - ) { - return { - ...state, - custom_properties: { + if (value === '0') { + // "No EPG (Disabled)" selected - use force_dummy_epg + setGroupStates( + groupStates.map((state) => { + if ( + state.channel_group === + group.channel_group + ) { + const newProps = { ...state.custom_properties, - custom_epg_id: newValue, - }, - }; - } - return state; - }) - ); + }; + delete newProps.custom_epg_id; + delete newProps.force_epg_selected; + newProps.force_dummy_epg = true; + return { + ...state, + custom_properties: newProps, + }; + } + return state; + }) + ); + } else if (value) { + // Specific EPG source selected + const epgId = parseInt(value); + setGroupStates( + groupStates.map((state) => { + if ( + state.channel_group === + group.channel_group + ) { + const newProps = { + ...state.custom_properties, + }; + newProps.custom_epg_id = epgId; + delete newProps.force_dummy_epg; + delete newProps.force_epg_selected; + return { + ...state, + custom_properties: newProps, + }; + } + return state; + }) + ); + } else { + // Cleared - remove all EPG settings + setGroupStates( + groupStates.map((state) => { + if ( + state.channel_group === + group.channel_group + ) { + const newProps = { + ...state.custom_properties, + }; + delete newProps.custom_epg_id; + delete newProps.force_dummy_epg; + delete newProps.force_epg_selected; + return { + ...state, + custom_properties: newProps, + }; + } + return state; + }) + ); + } }} data={[ { value: '0', label: 'No EPG (Disabled)' }, diff --git a/frontend/src/components/forms/Logo.jsx b/frontend/src/components/forms/Logo.jsx index 8362b891..c6e63ba6 100644 --- a/frontend/src/components/forms/Logo.jsx +++ b/frontend/src/components/forms/Logo.jsx @@ -1,5 +1,6 @@ -import React, { useState, useEffect } from 'react'; -import { useFormik } from 'formik'; +import React, { useState, useEffect, useMemo } from 'react'; +import { useForm } from 'react-hook-form'; +import { yupResolver } from '@hookform/resolvers/yup'; import * as Yup from 'yup'; import { Modal, @@ -18,143 +19,148 @@ import { Upload, FileImage, X } from 'lucide-react'; import { notifications } from '@mantine/notifications'; import API from '../../api'; +const schema = Yup.object({ + name: Yup.string().required('Name is required'), + url: Yup.string() + .required('URL is required') + .test( + 'valid-url-or-path', + 'Must be a valid URL or local file path', + (value) => { + if (!value) return false; + // Allow local file paths starting with /data/logos/ + if (value.startsWith('/data/logos/')) return true; + // Allow valid URLs + try { + new URL(value); + return true; + } catch { + return false; + } + } + ), +}); + const LogoForm = ({ logo = null, isOpen, onClose, onSuccess }) => { const [logoPreview, setLogoPreview] = useState(null); const [uploading, setUploading] = useState(false); const [selectedFile, setSelectedFile] = useState(null); // Store selected file - const formik = useFormik({ - initialValues: { - name: '', - url: '', - }, - validationSchema: Yup.object({ - name: Yup.string().required('Name is required'), - url: Yup.string() - .required('URL is required') - .test( - 'valid-url-or-path', - 'Must be a valid URL or local file path', - (value) => { - if (!value) return false; - // Allow local file paths starting with /data/logos/ - if (value.startsWith('/data/logos/')) return true; - // Allow valid URLs - try { - new URL(value); - return true; - } catch { - return false; - } - } - ), + const defaultValues = useMemo( + () => ({ + name: logo?.name || '', + url: logo?.url || '', }), - onSubmit: async (values, { setSubmitting }) => { - try { - setUploading(true); - let uploadResponse = null; // Store upload response for later use + [logo] + ); - // If we have a selected file, upload it first - if (selectedFile) { - try { - uploadResponse = await API.uploadLogo(selectedFile, values.name); - // Use the uploaded file data instead of form values - values.name = uploadResponse.name; - values.url = uploadResponse.url; - } catch (uploadError) { - let errorMessage = 'Failed to upload logo file'; - - if ( - uploadError.code === 'NETWORK_ERROR' || - uploadError.message?.includes('timeout') - ) { - errorMessage = 'Upload timed out. Please try again.'; - } else if (uploadError.status === 413) { - errorMessage = 'File too large. Please choose a smaller file.'; - } else if (uploadError.body?.error) { - errorMessage = uploadError.body.error; - } - - notifications.show({ - title: 'Upload Error', - message: errorMessage, - color: 'red', - }); - return; // Don't proceed with creation if upload fails - } - } - - // Now create or update the logo with the final values - // Only proceed if we don't already have a logo from file upload - if (logo) { - const updatedLogo = await API.updateLogo(logo.id, values); - notifications.show({ - title: 'Success', - message: 'Logo updated successfully', - color: 'green', - }); - onSuccess?.({ type: 'update', logo: updatedLogo }); // Call onSuccess for updates - } else if (!selectedFile) { - // Only create a new logo entry if we're not uploading a file - // (file upload already created the logo entry) - const newLogo = await API.createLogo(values); - notifications.show({ - title: 'Success', - message: 'Logo created successfully', - color: 'green', - }); - onSuccess?.({ type: 'create', logo: newLogo }); // Call onSuccess for creates - } else { - // File was uploaded and logo was already created - notifications.show({ - title: 'Success', - message: 'Logo uploaded successfully', - color: 'green', - }); - onSuccess?.({ type: 'create', logo: uploadResponse }); - } - onClose(); - } catch (error) { - let errorMessage = logo - ? 'Failed to update logo' - : 'Failed to create logo'; - - // Handle specific timeout errors - if ( - error.code === 'NETWORK_ERROR' || - error.message?.includes('timeout') - ) { - errorMessage = 'Request timed out. Please try again.'; - } else if (error.response?.data?.error) { - errorMessage = error.response.data.error; - } - - notifications.show({ - title: 'Error', - message: errorMessage, - color: 'red', - }); - } finally { - setSubmitting(false); - setUploading(false); - } - }, + const { + register, + handleSubmit, + formState: { errors, isSubmitting }, + reset, + setValue, + watch, + } = useForm({ + defaultValues, + resolver: yupResolver(schema), }); - useEffect(() => { - if (logo) { - formik.setValues({ - name: logo.name || '', - url: logo.url || '', + const onSubmit = async (values) => { + try { + setUploading(true); + let uploadResponse = null; // Store upload response for later use + + // If we have a selected file, upload it first + if (selectedFile) { + try { + uploadResponse = await API.uploadLogo(selectedFile, values.name); + // Use the uploaded file data instead of form values + values.name = uploadResponse.name; + values.url = uploadResponse.url; + } catch (uploadError) { + let errorMessage = 'Failed to upload logo file'; + + if ( + uploadError.code === 'NETWORK_ERROR' || + uploadError.message?.includes('timeout') + ) { + errorMessage = 'Upload timed out. Please try again.'; + } else if (uploadError.status === 413) { + errorMessage = 'File too large. Please choose a smaller file.'; + } else if (uploadError.body?.error) { + errorMessage = uploadError.body.error; + } + + notifications.show({ + title: 'Upload Error', + message: errorMessage, + color: 'red', + }); + return; // Don't proceed with creation if upload fails + } + } + + // Now create or update the logo with the final values + // Only proceed if we don't already have a logo from file upload + if (logo) { + const updatedLogo = await API.updateLogo(logo.id, values); + notifications.show({ + title: 'Success', + message: 'Logo updated successfully', + color: 'green', + }); + onSuccess?.({ type: 'update', logo: updatedLogo }); // Call onSuccess for updates + } else if (!selectedFile) { + // Only create a new logo entry if we're not uploading a file + // (file upload already created the logo entry) + const newLogo = await API.createLogo(values); + notifications.show({ + title: 'Success', + message: 'Logo created successfully', + color: 'green', + }); + onSuccess?.({ type: 'create', logo: newLogo }); // Call onSuccess for creates + } else { + // File was uploaded and logo was already created + notifications.show({ + title: 'Success', + message: 'Logo uploaded successfully', + color: 'green', + }); + onSuccess?.({ type: 'create', logo: uploadResponse }); + } + onClose(); + } catch (error) { + let errorMessage = logo + ? 'Failed to update logo' + : 'Failed to create logo'; + + // Handle specific timeout errors + if ( + error.code === 'NETWORK_ERROR' || + error.message?.includes('timeout') + ) { + errorMessage = 'Request timed out. Please try again.'; + } else if (error.response?.data?.error) { + errorMessage = error.response.data.error; + } + + notifications.show({ + title: 'Error', + message: errorMessage, + color: 'red', }); - setLogoPreview(logo.cache_url); - } else { - formik.resetForm(); - setLogoPreview(null); + } finally { + setUploading(false); } - // Clear any selected file when logo changes + }; + + useEffect(() => { + reset(defaultValues); + setLogoPreview(logo?.cache_url || null); setSelectedFile(null); - }, [logo, isOpen]); + }, [defaultValues, logo, reset]); const handleFileSelect = (files) => { if (files.length === 0) return; @@ -180,18 +186,19 @@ const LogoForm = ({ logo = null, isOpen, onClose, onSuccess }) => { setLogoPreview(previewUrl); // Auto-fill the name field if empty - if (!formik.values.name) { + const currentName = watch('name'); + if (!currentName) { const nameWithoutExtension = file.name.replace(/\.[^/.]+$/, ''); - formik.setFieldValue('name', nameWithoutExtension); + setValue('name', nameWithoutExtension); } // Set a placeholder URL (will be replaced after upload) - formik.setFieldValue('url', 'file://pending-upload'); + setValue('url', 'file://pending-upload'); }; const handleUrlChange = (event) => { const url = event.target.value; - formik.setFieldValue('url', url); + setValue('url', url); // Clear any selected file when manually entering URL if (selectedFile) { @@ -219,7 +226,7 @@ const LogoForm = ({ logo = null, isOpen, onClose, onSuccess }) => { const filename = pathname.substring(pathname.lastIndexOf('/') + 1); const nameWithoutExtension = filename.replace(/\.[^/.]+$/, ''); if (nameWithoutExtension) { - formik.setFieldValue('name', nameWithoutExtension); + setValue('name', nameWithoutExtension); } } catch (error) { // If the URL is invalid, do nothing. @@ -244,7 +251,7 @@ const LogoForm = ({ logo = null, isOpen, onClose, onSuccess }) => { title={logo ? 'Edit Logo' : 'Add Logo'} size="md" > -
+ {/* Logo Preview */} {logoPreview && ( @@ -338,18 +345,18 @@ const LogoForm = ({ logo = null, isOpen, onClose, onSuccess }) => { {selectedFile && ( @@ -363,7 +370,7 @@ const LogoForm = ({ logo = null, isOpen, onClose, onSuccess }) => { - diff --git a/frontend/src/components/forms/M3UGroupFilter.jsx b/frontend/src/components/forms/M3UGroupFilter.jsx index 542fc88a..0a7dc224 100644 --- a/frontend/src/components/forms/M3UGroupFilter.jsx +++ b/frontend/src/components/forms/M3UGroupFilter.jsx @@ -1,6 +1,5 @@ // Modal.js import React, { useState, useEffect, forwardRef } from 'react'; -import { useFormik } from 'formik'; import * as Yup from 'yup'; import API from '../../api'; import M3UProfiles from './M3UProfiles'; diff --git a/frontend/src/components/forms/M3UProfile.jsx b/frontend/src/components/forms/M3UProfile.jsx index b225ec38..025d3cae 100644 --- a/frontend/src/components/forms/M3UProfile.jsx +++ b/frontend/src/components/forms/M3UProfile.jsx @@ -1,5 +1,6 @@ -import React, { useState, useEffect } from 'react'; -import { useFormik } from 'formik'; +import React, { useState, useEffect, useMemo } from 'react'; +import { useForm } from 'react-hook-form'; +import { yupResolver } from '@hookform/resolvers/yup'; import * as Yup from 'yup'; import API from '../../api'; import { @@ -31,6 +32,89 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => { const [sampleInput, setSampleInput] = useState(''); const isDefaultProfile = profile?.is_default; + const defaultValues = useMemo( + () => ({ + name: profile?.name || '', + max_streams: profile?.max_streams || 0, + search_pattern: profile?.search_pattern || '', + replace_pattern: profile?.replace_pattern || '', + notes: profile?.custom_properties?.notes || '', + }), + [profile] + ); + + const schema = Yup.object({ + name: Yup.string().required('Name is required'), + search_pattern: Yup.string().when([], { + is: () => !isDefaultProfile, + then: (schema) => schema.required('Search pattern is required'), + otherwise: (schema) => schema.notRequired(), + }), + replace_pattern: Yup.string().when([], { + is: () => !isDefaultProfile, + then: (schema) => schema.required('Replace pattern is required'), + otherwise: (schema) => schema.notRequired(), + }), + notes: Yup.string(), // Optional field + }); + + const { + register, + handleSubmit, + formState: { errors, isSubmitting }, + reset, + setValue, + watch, + } = useForm({ + defaultValues, + resolver: yupResolver(schema), + }); + + const onSubmit = async (values) => { + console.log('submiting'); + + // For default profiles, only send name and custom_properties (notes) + let submitValues; + if (isDefaultProfile) { + submitValues = { + name: values.name, + custom_properties: { + // Preserve existing custom_properties and add/update notes + ...(profile?.custom_properties || {}), + notes: values.notes || '', + }, + }; + } else { + // For regular profiles, send all fields + submitValues = { + name: values.name, + max_streams: values.max_streams, + search_pattern: values.search_pattern, + replace_pattern: values.replace_pattern, + custom_properties: { + // Preserve existing custom_properties and add/update notes + ...(profile?.custom_properties || {}), + notes: values.notes || '', + }, + }; + } + + if (profile?.id) { + await API.updateM3UProfile(m3u.id, { + id: profile.id, + ...submitValues, + }); + } else { + await API.addM3UProfile(m3u.id, submitValues); + } + + reset(); + // Reset local state to sync with form reset + setSearchPattern(''); + setReplacePattern(''); + onClose(); + }; + useEffect(() => { async function fetchStreamUrl() { try { @@ -79,99 +163,22 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => { }, [searchPattern, replacePattern]); const onSearchPatternUpdate = (e) => { - formik.handleChange(e); - setSearchPattern(e.target.value); + const value = e.target.value; + setSearchPattern(value); + setValue('search_pattern', value); }; const onReplacePatternUpdate = (e) => { - formik.handleChange(e); - setReplacePattern(e.target.value); + const value = e.target.value; + setReplacePattern(value); + setValue('replace_pattern', value); }; - const formik = useFormik({ - initialValues: { - name: '', - max_streams: 0, - search_pattern: '', - replace_pattern: '', - notes: '', - }, - validationSchema: Yup.object({ - name: Yup.string().required('Name is required'), - search_pattern: Yup.string().when([], { - is: () => !isDefaultProfile, - then: (schema) => schema.required('Search pattern is required'), - otherwise: (schema) => schema.notRequired(), - }), - replace_pattern: Yup.string().when([], { - is: () => !isDefaultProfile, - then: (schema) => schema.required('Replace pattern is required'), - otherwise: (schema) => schema.notRequired(), - }), - notes: Yup.string(), // Optional field - }), - onSubmit: async (values, { setSubmitting, resetForm }) => { - console.log('submiting'); - - // For default profiles, only send name and custom_properties (notes) - let submitValues; - if (isDefaultProfile) { - submitValues = { - name: values.name, - custom_properties: { - // Preserve existing custom_properties and add/update notes - ...(profile?.custom_properties || {}), - notes: values.notes || '', - }, - }; - } else { - // For regular profiles, send all fields - submitValues = { - name: values.name, - max_streams: values.max_streams, - search_pattern: values.search_pattern, - replace_pattern: values.replace_pattern, - custom_properties: { - // Preserve existing custom_properties and add/update notes - ...(profile?.custom_properties || {}), - notes: values.notes || '', - }, - }; - } - - if (profile?.id) { - await API.updateM3UProfile(m3u.id, { - id: profile.id, - ...submitValues, - }); - } else { - await API.addM3UProfile(m3u.id, submitValues); - } - - resetForm(); - // Reset local state to sync with formik reset - setSearchPattern(''); - setReplacePattern(''); - setSubmitting(false); - onClose(); - }, - }); - useEffect(() => { - if (profile) { - setSearchPattern(profile.search_pattern); - setReplacePattern(profile.replace_pattern); - formik.setValues({ - name: profile.name, - max_streams: profile.max_streams, - search_pattern: profile.search_pattern, - replace_pattern: profile.replace_pattern, - notes: profile.custom_properties?.notes || '', - }); - } else { - formik.resetForm(); - } - }, [profile]); // eslint-disable-line react-hooks/exhaustive-deps + reset(defaultValues); + setSearchPattern(profile?.search_pattern || ''); + setReplacePattern(profile?.replace_pattern || ''); + }, [defaultValues, profile, reset]); const handleSampleInputChange = (e) => { setSampleInput(e.target.value); @@ -212,27 +219,21 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => { } size="lg" > - + {/* Only show max streams field for non-default profiles */} {!isDefaultProfile && ( - formik.setFieldValue('max_streams', value || 0) - } - error={formik.errors.max_streams ? formik.touched.max_streams : ''} + {...register('max_streams')} + value={watch('max_streams')} + onChange={(value) => setValue('max_streams', value || 0)} + error={errors.max_streams?.message} min={0} placeholder="0 = unlimited" /> @@ -242,40 +243,25 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => { {!isDefaultProfile && ( <> )}