mirror of
https://github.com/Dispatcharr/Dispatcharr.git
synced 2026-01-22 18:28:00 +00:00
Merge branch 'dev' of https://github.com/Dispatcharr/Dispatcharr into pr/nick4810/811
This commit is contained in:
commit
10447f8c86
29 changed files with 955 additions and 865 deletions
46
.github/workflows/release.yml
vendored
46
.github/workflows/release.yml
vendored
|
|
@ -184,13 +184,13 @@ jobs:
|
|||
echo "Creating multi-arch manifest for ${OWNER}/${REPO}"
|
||||
|
||||
# GitHub Container Registry manifests
|
||||
# latest tag
|
||||
# Create one manifest with both latest and version tags
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=latest" \
|
||||
--annotation "index:org.opencontainers.image.version=${VERSION}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
|
|
@ -200,9 +200,11 @@ jobs:
|
|||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \
|
||||
--tag ghcr.io/${OWNER}/${REPO}:latest \
|
||||
ghcr.io/${OWNER}/${REPO}:latest-amd64 ghcr.io/${OWNER}/${REPO}:latest-arm64
|
||||
--tag ghcr.io/${OWNER}/${REPO}:${VERSION} \
|
||||
ghcr.io/${OWNER}/${REPO}:${VERSION}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-arm64
|
||||
|
||||
# version tag
|
||||
# Docker Hub manifests
|
||||
# Create one manifest with both latest and version tags
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
|
|
@ -217,43 +219,7 @@ jobs:
|
|||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \
|
||||
--tag ghcr.io/${OWNER}/${REPO}:${VERSION} \
|
||||
ghcr.io/${OWNER}/${REPO}:${VERSION}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-arm64
|
||||
|
||||
# Docker Hub manifests
|
||||
# latest tag
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=latest" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \
|
||||
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest-arm64
|
||||
|
||||
# version tag
|
||||
docker buildx imagetools create \
|
||||
--annotation "index:org.opencontainers.image.title=${{ needs.prepare.outputs.repo_name }}" \
|
||||
--annotation "index:org.opencontainers.image.description=Your ultimate IPTV & stream Management companion." \
|
||||
--annotation "index:org.opencontainers.image.url=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.source=https://github.com/${{ github.repository }}" \
|
||||
--annotation "index:org.opencontainers.image.version=${VERSION}" \
|
||||
--annotation "index:org.opencontainers.image.created=${TIMESTAMP}" \
|
||||
--annotation "index:org.opencontainers.image.revision=${{ github.sha }}" \
|
||||
--annotation "index:org.opencontainers.image.licenses=See repository" \
|
||||
--annotation "index:org.opencontainers.image.documentation=https://dispatcharr.github.io/Dispatcharr-Docs/" \
|
||||
--annotation "index:org.opencontainers.image.vendor=${OWNER}" \
|
||||
--annotation "index:org.opencontainers.image.authors=${{ github.actor }}" \
|
||||
--annotation "index:maintainer=${{ github.actor }}" \
|
||||
--annotation "index:build_version=Dispatcharr version: ${VERSION} Build date: ${TIMESTAMP}" \
|
||||
--tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-arm64
|
||||
|
||||
|
|
|
|||
21
CHANGELOG.md
21
CHANGELOG.md
|
|
@ -7,6 +7,24 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
|
||||
## [Unreleased]
|
||||
|
||||
### Changed
|
||||
|
||||
- Docker setup enhanced for legacy CPU support: Added `USE_LEGACY_NUMPY` environment variable to enable custom-built NumPy with no CPU baseline, allowing Dispatcharr to run on older CPUs (circa 2009) that lack support for newer baseline CPU features. When set to `true`, the entrypoint script will install the legacy NumPy build instead of the standard distribution.
|
||||
- VOD upstream read timeout reduced from 30 seconds to 10 seconds to minimize lock hold time when clients disconnect during connection phase
|
||||
- Form management refactored across application: Migrated Channel, Stream, M3U Profile, Stream Profile, Logo, and User Agent forms from Formik to React Hook Form (RHF) with Yup validation for improved form handling, better validation feedback, and enhanced code maintainability
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fixed Channels table EPG column showing "Not Assigned" on initial load for users with large EPG datasets. Added `tvgsLoaded` flag to EPG store to track when EPG data has finished loading, ensuring the table waits for EPG data before displaying. EPG cells now show animated skeleton placeholders while loading instead of incorrectly showing "Not Assigned". (Fixes #810)
|
||||
- Fixed VOD profile connection count not being decremented when stream connection fails (timeout, 404, etc.), preventing profiles from reaching capacity limits and rejecting valid stream requests
|
||||
- Fixed React warning in Channel form by removing invalid `removeTrailingZeros` prop from NumberInput component
|
||||
- Release workflow Docker tagging: Fixed issue where `latest` and version tags (e.g., `0.16.0`) were creating separate manifests instead of pointing to the same image digest, which caused old `latest` tags to become orphaned/untagged after new releases. Now creates a single multi-arch manifest with both tags, maintaining proper tag relationships and download statistics visibility on GitHub.
|
||||
- Fixed onboarding message appearing in the Channels Table when filtered results are empty. The onboarding message now only displays when there are no channels created at all, not when channels exist but are filtered out by current filters.
|
||||
- Fixed `M3UMovieRelation.get_stream_url()` and `M3UEpisodeRelation.get_stream_url()` to use XC client's `_normalize_url()` method instead of simple `rstrip('/')`. This properly handles malformed M3U account URLs (e.g., containing `/player_api.php` or query parameters) before constructing VOD stream endpoints, matching behavior of live channel URL building. (Closes #722)
|
||||
- Fixed bulk_create and bulk_update errors during VOD content refresh by pre-checking object existence with optimized bulk queries (3 queries total instead of N per batch) before creating new objects. This ensures all movie/series objects have primary keys before relation operations, preventing "prohibited to prevent data loss due to unsaved related object" errors. Additionally fixed duplicate key constraint violations by treating TMDB/IMDB ID values of `0` or `'0'` as invalid (some providers use this to indicate "no ID"), converting them to NULL to prevent multiple items from incorrectly sharing the same ID. (Fixes #813)
|
||||
|
||||
## [0.16.0] - 2026-01-04
|
||||
|
||||
### Added
|
||||
|
||||
- Advanced filtering for Channels table: Filter menu now allows toggling disabled channels visibility (when a profile is selected) and filtering to show only empty channels without streams (Closes #182)
|
||||
|
|
@ -22,6 +40,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
### Changed
|
||||
|
||||
- Fixed event viewer arrow direction (previously inverted) — UI behavior corrected. - Thanks [@drnikcuk](https://github.com/drnikcuk) (Closes #772)
|
||||
- Region code options now intentionally include both `GB` (ISO 3166-1 standard) and `UK` (commonly used by EPG/XMLTV providers) to accommodate real-world EPG data variations. Many providers use `UK` in channel identifiers (e.g., `BBCOne.uk`) despite `GB` being the official ISO country code. Users should select the region code that matches their specific EPG provider's convention for optimal region-based EPG matching bonuses - Thanks [@bigpandaaaa](https://github.com/bigpandaaaa)
|
||||
- Channel number inputs in stream-to-channel creation modals no longer have a maximum value restriction, allowing users to enter any valid channel number supported by the database
|
||||
- Stream log parsing refactored to use factory pattern: Simplified `ChannelService.parse_and_store_stream_info()` to route parsing through specialized log parsers instead of inline program-specific logic (~150 lines of code removed)
|
||||
- Stream profile names in fixtures updated to use proper capitalization (ffmpeg → FFmpeg, streamlink → Streamlink)
|
||||
|
|
@ -43,6 +62,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
|
||||
### Fixed
|
||||
|
||||
- Auto Channel Sync Force EPG Source feature not properly forcing "No EPG" assignment - When selecting "Force EPG Source" > "No EPG (Disabled)", channels were still being auto-matched to EPG data instead of forcing dummy/no EPG. Now correctly sets `force_dummy_epg` flag to prevent unwanted EPG assignment. (Fixes #788)
|
||||
- VOD episode processing now properly handles season and episode numbers from APIs that return string values instead of integers, with comprehensive error logging to track data quality issues - Thanks [@patchy8736](https://github.com/patchy8736) (Fixes #770)
|
||||
- VOD episode-to-stream relations are now validated to ensure episodes have been saved to the database before creating relations, preventing integrity errors when bulk_create operations encounter conflicts - Thanks [@patchy8736](https://github.com/patchy8736)
|
||||
- VOD category filtering now correctly handles category names containing pipe "|" characters (e.g., "PL | BAJKI", "EN | MOVIES") by using `rsplit()` to split from the right instead of the left, ensuring the category type is correctly extracted as the last segment - Thanks [@Vitekant](https://github.com/Vitekant)
|
||||
|
|
@ -50,6 +70,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
- M3U and EPG manager page no longer crashes when a playlist references a deleted channel group (Fixes screen blank on navigation)
|
||||
- Stream validation now returns original URL instead of redirected URL to prevent issues with temporary redirect URLs that expire before clients can connect
|
||||
- XtreamCodes EPG limit parameter now properly converted to integer to prevent type errors when accessing EPG listings (Fixes #781)
|
||||
- Docker container file permissions: Django management commands (`migrate`, `collectstatic`) now run as the non-root user to prevent root-owned `__pycache__` and static files from causing permission issues - Thanks [@sethwv](https://github.com/sethwv)
|
||||
- Stream validation now continues with GET request if HEAD request fails due to connection issues - Thanks [@kvnnap](https://github.com/kvnnap) (Fixes #782)
|
||||
- XtreamCodes M3U files now correctly set `x-tvg-url` and `url-tvg` headers to reference XC EPG URL (`xmltv.php`) instead of standard EPG endpoint when downloaded via XC API (Fixes #629)
|
||||
|
||||
|
|
|
|||
|
|
@ -236,12 +236,8 @@ class ChannelGroupViewSet(viewsets.ModelViewSet):
|
|||
return [Authenticated()]
|
||||
|
||||
def get_queryset(self):
|
||||
"""Add annotation for association counts"""
|
||||
from django.db.models import Count
|
||||
return ChannelGroup.objects.annotate(
|
||||
channel_count=Count('channels', distinct=True),
|
||||
m3u_account_count=Count('m3u_accounts', distinct=True)
|
||||
)
|
||||
"""Return channel groups with prefetched relations for efficient counting"""
|
||||
return ChannelGroup.objects.prefetch_related('channels', 'm3u_accounts').all()
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
"""Override update to check M3U associations"""
|
||||
|
|
@ -277,15 +273,20 @@ class ChannelGroupViewSet(viewsets.ModelViewSet):
|
|||
@action(detail=False, methods=["post"], url_path="cleanup")
|
||||
def cleanup_unused_groups(self, request):
|
||||
"""Delete all channel groups with no channels or M3U account associations"""
|
||||
from django.db.models import Count
|
||||
from django.db.models import Q, Exists, OuterRef
|
||||
|
||||
# Find groups with no channels and no M3U account associations using Exists subqueries
|
||||
from .models import Channel, ChannelGroupM3UAccount
|
||||
|
||||
has_channels = Channel.objects.filter(channel_group_id=OuterRef('pk'))
|
||||
has_accounts = ChannelGroupM3UAccount.objects.filter(channel_group_id=OuterRef('pk'))
|
||||
|
||||
# Find groups with no channels and no M3U account associations
|
||||
unused_groups = ChannelGroup.objects.annotate(
|
||||
channel_count=Count('channels', distinct=True),
|
||||
m3u_account_count=Count('m3u_accounts', distinct=True)
|
||||
has_channels=Exists(has_channels),
|
||||
has_accounts=Exists(has_accounts)
|
||||
).filter(
|
||||
channel_count=0,
|
||||
m3u_account_count=0
|
||||
has_channels=False,
|
||||
has_accounts=False
|
||||
)
|
||||
|
||||
deleted_count = unused_groups.count()
|
||||
|
|
|
|||
|
|
@ -179,8 +179,8 @@ class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer):
|
|||
# Channel Group
|
||||
#
|
||||
class ChannelGroupSerializer(serializers.ModelSerializer):
|
||||
channel_count = serializers.IntegerField(read_only=True)
|
||||
m3u_account_count = serializers.IntegerField(read_only=True)
|
||||
channel_count = serializers.SerializerMethodField()
|
||||
m3u_account_count = serializers.SerializerMethodField()
|
||||
m3u_accounts = ChannelGroupM3UAccountSerializer(
|
||||
many=True,
|
||||
read_only=True
|
||||
|
|
@ -190,6 +190,14 @@ class ChannelGroupSerializer(serializers.ModelSerializer):
|
|||
model = ChannelGroup
|
||||
fields = ["id", "name", "channel_count", "m3u_account_count", "m3u_accounts"]
|
||||
|
||||
def get_channel_count(self, obj):
|
||||
"""Get count of channels in this group"""
|
||||
return obj.channels.count()
|
||||
|
||||
def get_m3u_account_count(self, obj):
|
||||
"""Get count of M3U accounts associated with this group"""
|
||||
return obj.m3u_accounts.count()
|
||||
|
||||
|
||||
class ChannelProfileSerializer(serializers.ModelSerializer):
|
||||
channels = serializers.SerializerMethodField()
|
||||
|
|
|
|||
|
|
@ -357,12 +357,12 @@ class RedisBackedVODConnection:
|
|||
|
||||
logger.info(f"[{self.session_id}] Making request #{state.request_count} to {'final' if state.final_url else 'original'} URL")
|
||||
|
||||
# Make request
|
||||
# Make request (10s connect, 10s read timeout - keeps lock time reasonable if client disconnects)
|
||||
response = self.local_session.get(
|
||||
target_url,
|
||||
headers=headers,
|
||||
stream=True,
|
||||
timeout=(10, 30),
|
||||
timeout=(10, 10),
|
||||
allow_redirects=allow_redirects
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
|
@ -712,6 +712,10 @@ class MultiWorkerVODConnectionManager:
|
|||
content_name = content_obj.name if hasattr(content_obj, 'name') else str(content_obj)
|
||||
client_id = session_id
|
||||
|
||||
# Track whether we incremented profile connections (for cleanup on error)
|
||||
profile_connections_incremented = False
|
||||
redis_connection = None
|
||||
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Redis-backed streaming request for {content_type} {content_name}")
|
||||
|
||||
try:
|
||||
|
|
@ -802,6 +806,7 @@ class MultiWorkerVODConnectionManager:
|
|||
|
||||
# Increment profile connections after successful connection creation
|
||||
self._increment_profile_connections(m3u_profile)
|
||||
profile_connections_incremented = True
|
||||
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Created consolidated connection with session metadata")
|
||||
else:
|
||||
|
|
@ -1024,6 +1029,19 @@ class MultiWorkerVODConnectionManager:
|
|||
|
||||
except Exception as e:
|
||||
logger.error(f"[{client_id}] Worker {self.worker_id} - Error in Redis-backed stream_content_with_session: {e}", exc_info=True)
|
||||
|
||||
# Decrement profile connections if we incremented them but failed before streaming started
|
||||
if profile_connections_incremented:
|
||||
logger.info(f"[{client_id}] Connection error occurred after profile increment - decrementing profile connections")
|
||||
self._decrement_profile_connections(m3u_profile.id)
|
||||
|
||||
# Also clean up the Redis connection state since we won't be using it
|
||||
if redis_connection:
|
||||
try:
|
||||
redis_connection.cleanup(connection_manager=self, current_worker_id=self.worker_id)
|
||||
except Exception as cleanup_error:
|
||||
logger.error(f"[{client_id}] Error during cleanup after connection failure: {cleanup_error}")
|
||||
|
||||
return HttpResponse(f"Streaming error: {str(e)}", status=500)
|
||||
|
||||
def _apply_timeshift_parameters(self, original_url, utc_start=None, utc_end=None, offset=None):
|
||||
|
|
|
|||
|
|
@ -245,10 +245,13 @@ class M3UMovieRelation(models.Model):
|
|||
"""Get the full stream URL for this movie from this provider"""
|
||||
# Build URL dynamically for XtreamCodes accounts
|
||||
if self.m3u_account.account_type == 'XC':
|
||||
server_url = self.m3u_account.server_url.rstrip('/')
|
||||
from core.xtream_codes import Client as XCClient
|
||||
# Use XC client's URL normalization to handle malformed URLs
|
||||
# (e.g., URLs with /player_api.php or query parameters)
|
||||
normalized_url = XCClient(self.m3u_account.server_url, '', '')._normalize_url(self.m3u_account.server_url)
|
||||
username = self.m3u_account.username
|
||||
password = self.m3u_account.password
|
||||
return f"{server_url}/movie/{username}/{password}/{self.stream_id}.{self.container_extension or 'mp4'}"
|
||||
return f"{normalized_url}/movie/{username}/{password}/{self.stream_id}.{self.container_extension or 'mp4'}"
|
||||
else:
|
||||
# For other account types, we would need another way to build URLs
|
||||
return None
|
||||
|
|
@ -285,10 +288,12 @@ class M3UEpisodeRelation(models.Model):
|
|||
|
||||
if self.m3u_account.account_type == 'XC':
|
||||
# For XtreamCodes accounts, build the URL dynamically
|
||||
server_url = self.m3u_account.server_url.rstrip('/')
|
||||
# Use XC client's URL normalization to handle malformed URLs
|
||||
# (e.g., URLs with /player_api.php or query parameters)
|
||||
normalized_url = XtreamCodesClient(self.m3u_account.server_url, '', '')._normalize_url(self.m3u_account.server_url)
|
||||
username = self.m3u_account.username
|
||||
password = self.m3u_account.password
|
||||
return f"{server_url}/series/{username}/{password}/{self.stream_id}.{self.container_extension or 'mp4'}"
|
||||
return f"{normalized_url}/series/{username}/{password}/{self.stream_id}.{self.container_extension or 'mp4'}"
|
||||
else:
|
||||
# We might support non XC accounts in the future
|
||||
# For now, return None
|
||||
|
|
|
|||
|
|
@ -410,10 +410,10 @@ def process_movie_batch(account, batch, categories, relations, scan_start_time=N
|
|||
tmdb_id = movie_data.get('tmdb_id') or movie_data.get('tmdb')
|
||||
imdb_id = movie_data.get('imdb_id') or movie_data.get('imdb')
|
||||
|
||||
# Clean empty string IDs
|
||||
if tmdb_id == '':
|
||||
# Clean empty string IDs and zero values (some providers use 0 to indicate no ID)
|
||||
if tmdb_id == '' or tmdb_id == 0 or tmdb_id == '0':
|
||||
tmdb_id = None
|
||||
if imdb_id == '':
|
||||
if imdb_id == '' or imdb_id == 0 or imdb_id == '0':
|
||||
imdb_id = None
|
||||
|
||||
# Create a unique key for this movie (priority: TMDB > IMDB > name+year)
|
||||
|
|
@ -614,26 +614,41 @@ def process_movie_batch(account, batch, categories, relations, scan_start_time=N
|
|||
# First, create new movies and get their IDs
|
||||
created_movies = {}
|
||||
if movies_to_create:
|
||||
Movie.objects.bulk_create(movies_to_create, ignore_conflicts=True)
|
||||
# Bulk query to check which movies already exist
|
||||
tmdb_ids = [m.tmdb_id for m in movies_to_create if m.tmdb_id]
|
||||
imdb_ids = [m.imdb_id for m in movies_to_create if m.imdb_id]
|
||||
name_year_pairs = [(m.name, m.year) for m in movies_to_create if not m.tmdb_id and not m.imdb_id]
|
||||
|
||||
# Get the newly created movies with their IDs
|
||||
# We need to re-fetch them to get the primary keys
|
||||
existing_by_tmdb = {m.tmdb_id: m for m in Movie.objects.filter(tmdb_id__in=tmdb_ids)} if tmdb_ids else {}
|
||||
existing_by_imdb = {m.imdb_id: m for m in Movie.objects.filter(imdb_id__in=imdb_ids)} if imdb_ids else {}
|
||||
|
||||
existing_by_name_year = {}
|
||||
if name_year_pairs:
|
||||
for movie in Movie.objects.filter(tmdb_id__isnull=True, imdb_id__isnull=True):
|
||||
key = (movie.name, movie.year)
|
||||
if key in name_year_pairs:
|
||||
existing_by_name_year[key] = movie
|
||||
|
||||
# Check each movie against the bulk query results
|
||||
movies_actually_created = []
|
||||
for movie in movies_to_create:
|
||||
# Find the movie by its unique identifiers
|
||||
if movie.tmdb_id:
|
||||
db_movie = Movie.objects.filter(tmdb_id=movie.tmdb_id).first()
|
||||
elif movie.imdb_id:
|
||||
db_movie = Movie.objects.filter(imdb_id=movie.imdb_id).first()
|
||||
else:
|
||||
db_movie = Movie.objects.filter(
|
||||
name=movie.name,
|
||||
year=movie.year,
|
||||
tmdb_id__isnull=True,
|
||||
imdb_id__isnull=True
|
||||
).first()
|
||||
existing = None
|
||||
if movie.tmdb_id and movie.tmdb_id in existing_by_tmdb:
|
||||
existing = existing_by_tmdb[movie.tmdb_id]
|
||||
elif movie.imdb_id and movie.imdb_id in existing_by_imdb:
|
||||
existing = existing_by_imdb[movie.imdb_id]
|
||||
elif not movie.tmdb_id and not movie.imdb_id:
|
||||
existing = existing_by_name_year.get((movie.name, movie.year))
|
||||
|
||||
if db_movie:
|
||||
created_movies[id(movie)] = db_movie
|
||||
if existing:
|
||||
created_movies[id(movie)] = existing
|
||||
else:
|
||||
movies_actually_created.append(movie)
|
||||
created_movies[id(movie)] = movie
|
||||
|
||||
# Bulk create only movies that don't exist
|
||||
if movies_actually_created:
|
||||
Movie.objects.bulk_create(movies_actually_created)
|
||||
|
||||
# Update existing movies
|
||||
if movies_to_update:
|
||||
|
|
@ -649,12 +664,16 @@ def process_movie_batch(account, batch, categories, relations, scan_start_time=N
|
|||
movie.logo = movie._logo_to_update
|
||||
movie.save(update_fields=['logo'])
|
||||
|
||||
# Update relations to reference the correct movie objects
|
||||
# Update relations to reference the correct movie objects (with PKs)
|
||||
for relation in relations_to_create:
|
||||
if id(relation.movie) in created_movies:
|
||||
relation.movie = created_movies[id(relation.movie)]
|
||||
|
||||
# Handle relations
|
||||
for relation in relations_to_update:
|
||||
if id(relation.movie) in created_movies:
|
||||
relation.movie = created_movies[id(relation.movie)]
|
||||
|
||||
# All movies now have PKs, safe to bulk create/update relations
|
||||
if relations_to_create:
|
||||
M3UMovieRelation.objects.bulk_create(relations_to_create, ignore_conflicts=True)
|
||||
|
||||
|
|
@ -724,10 +743,10 @@ def process_series_batch(account, batch, categories, relations, scan_start_time=
|
|||
tmdb_id = series_data.get('tmdb') or series_data.get('tmdb_id')
|
||||
imdb_id = series_data.get('imdb') or series_data.get('imdb_id')
|
||||
|
||||
# Clean empty string IDs
|
||||
if tmdb_id == '':
|
||||
# Clean empty string IDs and zero values (some providers use 0 to indicate no ID)
|
||||
if tmdb_id == '' or tmdb_id == 0 or tmdb_id == '0':
|
||||
tmdb_id = None
|
||||
if imdb_id == '':
|
||||
if imdb_id == '' or imdb_id == 0 or imdb_id == '0':
|
||||
imdb_id = None
|
||||
|
||||
# Create a unique key for this series (priority: TMDB > IMDB > name+year)
|
||||
|
|
@ -945,26 +964,41 @@ def process_series_batch(account, batch, categories, relations, scan_start_time=
|
|||
# First, create new series and get their IDs
|
||||
created_series = {}
|
||||
if series_to_create:
|
||||
Series.objects.bulk_create(series_to_create, ignore_conflicts=True)
|
||||
# Bulk query to check which series already exist
|
||||
tmdb_ids = [s.tmdb_id for s in series_to_create if s.tmdb_id]
|
||||
imdb_ids = [s.imdb_id for s in series_to_create if s.imdb_id]
|
||||
name_year_pairs = [(s.name, s.year) for s in series_to_create if not s.tmdb_id and not s.imdb_id]
|
||||
|
||||
# Get the newly created series with their IDs
|
||||
# We need to re-fetch them to get the primary keys
|
||||
existing_by_tmdb = {s.tmdb_id: s for s in Series.objects.filter(tmdb_id__in=tmdb_ids)} if tmdb_ids else {}
|
||||
existing_by_imdb = {s.imdb_id: s for s in Series.objects.filter(imdb_id__in=imdb_ids)} if imdb_ids else {}
|
||||
|
||||
existing_by_name_year = {}
|
||||
if name_year_pairs:
|
||||
for series in Series.objects.filter(tmdb_id__isnull=True, imdb_id__isnull=True):
|
||||
key = (series.name, series.year)
|
||||
if key in name_year_pairs:
|
||||
existing_by_name_year[key] = series
|
||||
|
||||
# Check each series against the bulk query results
|
||||
series_actually_created = []
|
||||
for series in series_to_create:
|
||||
# Find the series by its unique identifiers
|
||||
if series.tmdb_id:
|
||||
db_series = Series.objects.filter(tmdb_id=series.tmdb_id).first()
|
||||
elif series.imdb_id:
|
||||
db_series = Series.objects.filter(imdb_id=series.imdb_id).first()
|
||||
else:
|
||||
db_series = Series.objects.filter(
|
||||
name=series.name,
|
||||
year=series.year,
|
||||
tmdb_id__isnull=True,
|
||||
imdb_id__isnull=True
|
||||
).first()
|
||||
existing = None
|
||||
if series.tmdb_id and series.tmdb_id in existing_by_tmdb:
|
||||
existing = existing_by_tmdb[series.tmdb_id]
|
||||
elif series.imdb_id and series.imdb_id in existing_by_imdb:
|
||||
existing = existing_by_imdb[series.imdb_id]
|
||||
elif not series.tmdb_id and not series.imdb_id:
|
||||
existing = existing_by_name_year.get((series.name, series.year))
|
||||
|
||||
if db_series:
|
||||
created_series[id(series)] = db_series
|
||||
if existing:
|
||||
created_series[id(series)] = existing
|
||||
else:
|
||||
series_actually_created.append(series)
|
||||
created_series[id(series)] = series
|
||||
|
||||
# Bulk create only series that don't exist
|
||||
if series_actually_created:
|
||||
Series.objects.bulk_create(series_actually_created)
|
||||
|
||||
# Update existing series
|
||||
if series_to_update:
|
||||
|
|
@ -980,12 +1014,16 @@ def process_series_batch(account, batch, categories, relations, scan_start_time=
|
|||
series.logo = series._logo_to_update
|
||||
series.save(update_fields=['logo'])
|
||||
|
||||
# Update relations to reference the correct series objects
|
||||
# Update relations to reference the correct series objects (with PKs)
|
||||
for relation in relations_to_create:
|
||||
if id(relation.series) in created_series:
|
||||
relation.series = created_series[id(relation.series)]
|
||||
|
||||
# Handle relations
|
||||
for relation in relations_to_update:
|
||||
if id(relation.series) in created_series:
|
||||
relation.series = created_series[id(relation.series)]
|
||||
|
||||
# All series now have PKs, safe to bulk create/update relations
|
||||
if relations_to_create:
|
||||
M3USeriesRelation.objects.bulk_create(relations_to_create, ignore_conflicts=True)
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ ENV DEBIAN_FRONTEND=noninteractive
|
|||
ENV VIRTUAL_ENV=/dispatcharrpy
|
||||
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
|
||||
# --- Install Python 3.13 and system dependencies ---
|
||||
# --- Install Python 3.13 and build dependencies ---
|
||||
# Note: Hardware acceleration (VA-API, VDPAU, NVENC) already included in base ffmpeg image
|
||||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
ca-certificates software-properties-common gnupg2 curl wget \
|
||||
|
|
@ -13,18 +13,34 @@ RUN apt-get update && apt-get install --no-install-recommends -y \
|
|||
&& apt-get install --no-install-recommends -y \
|
||||
python3.13 python3.13-dev python3.13-venv \
|
||||
python-is-python3 python3-pip \
|
||||
libpcre3 libpcre3-dev libpq-dev procps \
|
||||
build-essential gcc pciutils \
|
||||
libpcre3 libpcre3-dev libpq-dev procps pciutils \
|
||||
nginx streamlink comskip \
|
||||
vlc-bin vlc-plugin-base \
|
||||
&& apt-get clean && rm -rf /var/lib/apt/lists/*
|
||||
build-essential gcc g++ gfortran libopenblas-dev libopenblas0 ninja-build
|
||||
|
||||
# --- Create Python virtual environment ---
|
||||
RUN python3.13 -m venv $VIRTUAL_ENV && $VIRTUAL_ENV/bin/pip install --upgrade pip
|
||||
|
||||
# --- Install Python dependencies ---
|
||||
COPY requirements.txt /tmp/requirements.txt
|
||||
RUN $VIRTUAL_ENV/bin/pip install --no-cache-dir -r /tmp/requirements.txt && rm /tmp/requirements.txt
|
||||
RUN $VIRTUAL_ENV/bin/pip install --no-cache-dir -r /tmp/requirements.txt && \
|
||||
rm /tmp/requirements.txt
|
||||
|
||||
# --- Build legacy NumPy wheel for old hardware (store for runtime switching) ---
|
||||
RUN $VIRTUAL_ENV/bin/pip install --no-cache-dir build && \
|
||||
cd /tmp && \
|
||||
$VIRTUAL_ENV/bin/pip download --no-binary numpy --no-deps numpy && \
|
||||
tar -xzf numpy-*.tar.gz && \
|
||||
cd numpy-*/ && \
|
||||
$VIRTUAL_ENV/bin/python -m build --wheel -Csetup-args=-Dcpu-baseline="none" -Csetup-args=-Dcpu-dispatch="none" && \
|
||||
mv dist/*.whl /opt/ && \
|
||||
cd / && rm -rf /tmp/numpy-*
|
||||
|
||||
# --- Clean up build dependencies to reduce image size ---
|
||||
RUN apt-get remove -y build-essential gcc g++ gfortran libopenblas-dev ninja-build && \
|
||||
apt-get autoremove -y --purge && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# --- Set up Redis 7.x ---
|
||||
RUN curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg && \
|
||||
|
|
|
|||
|
|
@ -35,9 +35,6 @@ RUN rm -rf /app/frontend
|
|||
# Copy built frontend assets
|
||||
COPY --from=frontend-builder /app/frontend/dist /app/frontend/dist
|
||||
|
||||
# Run Django collectstatic
|
||||
RUN python manage.py collectstatic --noinput
|
||||
|
||||
# Add timestamp argument
|
||||
ARG TIMESTAMP
|
||||
|
||||
|
|
|
|||
|
|
@ -14,6 +14,10 @@ services:
|
|||
- REDIS_HOST=localhost
|
||||
- CELERY_BROKER_URL=redis://localhost:6379/0
|
||||
- DISPATCHARR_LOG_LEVEL=info
|
||||
# Legacy CPU Support (Optional)
|
||||
# Uncomment to enable legacy NumPy build for older CPUs (circa 2009)
|
||||
# that lack support for newer baseline CPU features
|
||||
#- USE_LEGACY_NUMPY=true
|
||||
# Process Priority Configuration (Optional)
|
||||
# Lower values = higher priority. Range: -20 (highest) to 19 (lowest)
|
||||
# Negative values require cap_add: SYS_NICE (uncomment below)
|
||||
|
|
|
|||
|
|
@ -18,6 +18,10 @@ services:
|
|||
- REDIS_HOST=localhost
|
||||
- CELERY_BROKER_URL=redis://localhost:6379/0
|
||||
- DISPATCHARR_LOG_LEVEL=trace
|
||||
# Legacy CPU Support (Optional)
|
||||
# Uncomment to enable legacy NumPy build for older CPUs (circa 2009)
|
||||
# that lack support for newer baseline CPU features
|
||||
#- USE_LEGACY_NUMPY=true
|
||||
# Process Priority Configuration (Optional)
|
||||
# Lower values = higher priority. Range: -20 (highest) to 19 (lowest)
|
||||
# Negative values require cap_add: SYS_NICE (uncomment below)
|
||||
|
|
|
|||
|
|
@ -17,6 +17,10 @@ services:
|
|||
- REDIS_HOST=localhost
|
||||
- CELERY_BROKER_URL=redis://localhost:6379/0
|
||||
- DISPATCHARR_LOG_LEVEL=debug
|
||||
# Legacy CPU Support (Optional)
|
||||
# Uncomment to enable legacy NumPy build for older CPUs (circa 2009)
|
||||
# that lack support for newer baseline CPU features
|
||||
#- USE_LEGACY_NUMPY=true
|
||||
# Process Priority Configuration (Optional)
|
||||
# Lower values = higher priority. Range: -20 (highest) to 19 (lowest)
|
||||
# Negative values require cap_add: SYS_NICE (uncomment below)
|
||||
|
|
|
|||
|
|
@ -17,6 +17,10 @@ services:
|
|||
- REDIS_HOST=redis
|
||||
- CELERY_BROKER_URL=redis://redis:6379/0
|
||||
- DISPATCHARR_LOG_LEVEL=info
|
||||
# Legacy CPU Support (Optional)
|
||||
# Uncomment to enable legacy NumPy build for older CPUs (circa 2009)
|
||||
# that lack support for newer baseline CPU features
|
||||
#- USE_LEGACY_NUMPY=true
|
||||
# Process Priority Configuration (Optional)
|
||||
# Lower values = higher priority. Range: -20 (highest) to 19 (lowest)
|
||||
# Negative values require cap_add: SYS_NICE (uncomment below)
|
||||
|
|
|
|||
|
|
@ -27,6 +27,13 @@ echo_with_timestamp() {
|
|||
echo "$(date '+%Y-%m-%d %H:%M:%S') - $1"
|
||||
}
|
||||
|
||||
# --- NumPy version switching for legacy hardware ---
|
||||
if [ "$USE_LEGACY_NUMPY" = "true" ]; then
|
||||
echo_with_timestamp "🔧 Switching to legacy NumPy (no CPU baseline)..."
|
||||
/dispatcharrpy/bin/pip install --no-cache-dir --force-reinstall --no-deps /opt/numpy-*.whl
|
||||
echo_with_timestamp "✅ Legacy NumPy installed"
|
||||
fi
|
||||
|
||||
# Set PostgreSQL environment variables
|
||||
export POSTGRES_DB=${POSTGRES_DB:-dispatcharr}
|
||||
export POSTGRES_USER=${POSTGRES_USER:-dispatch}
|
||||
|
|
@ -100,7 +107,7 @@ export POSTGRES_DIR=/data/db
|
|||
if [[ ! -f /etc/profile.d/dispatcharr.sh ]]; then
|
||||
# Define all variables to process
|
||||
variables=(
|
||||
PATH VIRTUAL_ENV DJANGO_SETTINGS_MODULE PYTHONUNBUFFERED
|
||||
PATH VIRTUAL_ENV DJANGO_SETTINGS_MODULE PYTHONUNBUFFERED PYTHONDONTWRITEBYTECODE
|
||||
POSTGRES_DB POSTGRES_USER POSTGRES_PASSWORD POSTGRES_HOST POSTGRES_PORT
|
||||
DISPATCHARR_ENV DISPATCHARR_DEBUG DISPATCHARR_LOG_LEVEL
|
||||
REDIS_HOST REDIS_DB POSTGRES_DIR DISPATCHARR_PORT
|
||||
|
|
@ -174,9 +181,9 @@ else
|
|||
pids+=("$nginx_pid")
|
||||
fi
|
||||
|
||||
cd /app
|
||||
python manage.py migrate --noinput
|
||||
python manage.py collectstatic --noinput
|
||||
# Run Django commands as non-root user to prevent permission issues
|
||||
su - $POSTGRES_USER -c "cd /app && python manage.py migrate --noinput"
|
||||
su - $POSTGRES_USER -c "cd /app && python manage.py collectstatic --noinput"
|
||||
|
||||
# Select proper uwsgi config based on environment
|
||||
if [ "$DISPATCHARR_ENV" = "dev" ] && [ "$DISPATCHARR_DEBUG" != "true" ]; then
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ DATA_DIRS=(
|
|||
APP_DIRS=(
|
||||
"/app/logo_cache"
|
||||
"/app/media"
|
||||
"/app/static"
|
||||
)
|
||||
|
||||
# Create all directories
|
||||
|
|
|
|||
100
frontend/package-lock.json
generated
100
frontend/package-lock.json
generated
|
|
@ -12,6 +12,7 @@
|
|||
"@dnd-kit/modifiers": "^9.0.0",
|
||||
"@dnd-kit/sortable": "^10.0.0",
|
||||
"@dnd-kit/utilities": "^3.2.2",
|
||||
"@hookform/resolvers": "^5.2.2",
|
||||
"@mantine/charts": "~8.0.1",
|
||||
"@mantine/core": "~8.0.1",
|
||||
"@mantine/dates": "~8.0.1",
|
||||
|
|
@ -22,13 +23,13 @@
|
|||
"@tanstack/react-table": "^8.21.2",
|
||||
"allotment": "^1.20.4",
|
||||
"dayjs": "^1.11.13",
|
||||
"formik": "^2.4.6",
|
||||
"hls.js": "^1.5.20",
|
||||
"lucide-react": "^0.511.0",
|
||||
"mpegts.js": "^1.8.0",
|
||||
"react": "^19.1.0",
|
||||
"react-dom": "^19.1.0",
|
||||
"react-draggable": "^4.4.6",
|
||||
"react-hook-form": "^7.70.0",
|
||||
"react-pro-sidebar": "^1.1.0",
|
||||
"react-router-dom": "^7.3.0",
|
||||
"react-virtualized": "^9.22.6",
|
||||
|
|
@ -1248,6 +1249,18 @@
|
|||
"integrity": "sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@hookform/resolvers": {
|
||||
"version": "5.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@hookform/resolvers/-/resolvers-5.2.2.tgz",
|
||||
"integrity": "sha512-A/IxlMLShx3KjV/HeTcTfaMxdwy690+L/ZADoeaTltLx+CVuzkeVIPuybK3jrRfw7YZnmdKsVVHAlEPIAEUNlA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@standard-schema/utils": "^0.3.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react-hook-form": "^7.55.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@humanfs/core": {
|
||||
"version": "0.19.1",
|
||||
"resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz",
|
||||
|
|
@ -1776,6 +1789,12 @@
|
|||
"win32"
|
||||
]
|
||||
},
|
||||
"node_modules/@standard-schema/utils": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@standard-schema/utils/-/utils-0.3.0.tgz",
|
||||
"integrity": "sha512-e7Mew686owMaPJVNNLs55PUvgz371nKgwsc4vxE49zsODpJEnxgxRo2y/OKrqueavXgZNMDVj3DdHFlaSAeU8g==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@swc/core": {
|
||||
"name": "@swc/wasm",
|
||||
"version": "1.13.20",
|
||||
|
|
@ -2008,18 +2027,6 @@
|
|||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/hoist-non-react-statics": {
|
||||
"version": "3.3.7",
|
||||
"resolved": "https://registry.npmjs.org/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.7.tgz",
|
||||
"integrity": "sha512-PQTyIulDkIDro8P+IHbKCsw7U2xxBYflVzW/FgWdCAePD9xGSidgA76/GeJ6lBKoblyhf9pBY763gbrN+1dI8g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"hoist-non-react-statics": "^3.3.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/json-schema": {
|
||||
"version": "7.0.15",
|
||||
"resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz",
|
||||
|
|
@ -2037,6 +2044,7 @@
|
|||
"version": "19.2.7",
|
||||
"resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.7.tgz",
|
||||
"integrity": "sha512-MWtvHrGZLFttgeEj28VXHxpmwYbor/ATPYbBfSFZEIRK0ecCFLl2Qo55z52Hss+UV9CRN7trSeq1zbgx7YDWWg==",
|
||||
"devOptional": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"csstype": "^3.2.2"
|
||||
|
|
@ -2833,15 +2841,6 @@
|
|||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/deepmerge": {
|
||||
"version": "2.2.1",
|
||||
"resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-2.2.1.tgz",
|
||||
"integrity": "sha512-R9hc1Xa/NOBi9WRVUWg19rl1UB7Tt4kuPd+thNJgFZoxXsTz7ncaPaeIm+40oSGuP33DfMb4sZt1QIGiJzC4EA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/dequal": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz",
|
||||
|
|
@ -3288,31 +3287,6 @@
|
|||
"dev": true,
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/formik": {
|
||||
"version": "2.4.9",
|
||||
"resolved": "https://registry.npmjs.org/formik/-/formik-2.4.9.tgz",
|
||||
"integrity": "sha512-5nI94BMnlFDdQRBY4Sz39WkhxajZJ57Fzs8wVbtsQlm5ScKIR1QLYqv/ultBnobObtlUyxpxoLodpixrsf36Og==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "individual",
|
||||
"url": "https://opencollective.com/formik"
|
||||
}
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@types/hoist-non-react-statics": "^3.3.1",
|
||||
"deepmerge": "^2.1.1",
|
||||
"hoist-non-react-statics": "^3.3.0",
|
||||
"lodash": "^4.17.21",
|
||||
"lodash-es": "^4.17.21",
|
||||
"react-fast-compare": "^2.0.1",
|
||||
"tiny-warning": "^1.0.2",
|
||||
"tslib": "^2.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": ">=16.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/fsevents": {
|
||||
"version": "2.3.3",
|
||||
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
|
||||
|
|
@ -3751,12 +3725,6 @@
|
|||
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/lodash-es": {
|
||||
"version": "4.17.22",
|
||||
"resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.22.tgz",
|
||||
"integrity": "sha512-XEawp1t0gxSi9x01glktRZ5HDy0HXqrM0x5pXQM98EaI0NxO6jVM7omDOxsuEo5UIASAnm2bRp1Jt/e0a2XU8Q==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/lodash.clamp": {
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/lodash.clamp/-/lodash.clamp-4.0.3.tgz",
|
||||
|
|
@ -4334,11 +4302,21 @@
|
|||
"react": ">= 16.8 || 18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/react-fast-compare": {
|
||||
"version": "2.0.4",
|
||||
"resolved": "https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-2.0.4.tgz",
|
||||
"integrity": "sha512-suNP+J1VU1MWFKcyt7RtjiSWUjvidmQSlqu+eHslq+342xCbGTYmC0mEhPCOHxlW0CywylOC1u2DFAT+bv4dBw==",
|
||||
"license": "MIT"
|
||||
"node_modules/react-hook-form": {
|
||||
"version": "7.70.0",
|
||||
"resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.70.0.tgz",
|
||||
"integrity": "sha512-COOMajS4FI3Wuwrs3GPpi/Jeef/5W1DRR84Yl5/ShlT3dKVFUfoGiEZ/QE6Uw8P4T2/CLJdcTVYKvWBMQTEpvw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/react-hook-form"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^16.8.0 || ^17 || ^18 || ^19"
|
||||
}
|
||||
},
|
||||
"node_modules/react-is": {
|
||||
"version": "16.13.1",
|
||||
|
|
@ -4923,12 +4901,6 @@
|
|||
"integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/tiny-warning": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz",
|
||||
"integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/tinybench": {
|
||||
"version": "2.9.0",
|
||||
"resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz",
|
||||
|
|
|
|||
|
|
@ -23,11 +23,12 @@
|
|||
"@mantine/form": "~8.0.1",
|
||||
"@mantine/hooks": "~8.0.1",
|
||||
"@mantine/notifications": "~8.0.1",
|
||||
"@hookform/resolvers": "^5.2.2",
|
||||
"@tanstack/react-table": "^8.21.2",
|
||||
"allotment": "^1.20.4",
|
||||
"dayjs": "^1.11.13",
|
||||
"formik": "^2.4.6",
|
||||
"hls.js": "^1.5.20",
|
||||
"react-hook-form": "^7.70.0",
|
||||
"lucide-react": "^0.511.0",
|
||||
"mpegts.js": "^1.8.0",
|
||||
"react": "^19.1.0",
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import React, { useState, useEffect, useRef, useMemo } from 'react';
|
||||
import { useFormik } from 'formik';
|
||||
import { useForm } from 'react-hook-form';
|
||||
import { yupResolver } from '@hookform/resolvers/yup';
|
||||
import * as Yup from 'yup';
|
||||
import useChannelsStore from '../../store/channels';
|
||||
import API from '../../api';
|
||||
|
|
@ -42,6 +43,11 @@ import useEPGsStore from '../../store/epgs';
|
|||
import { FixedSizeList as List } from 'react-window';
|
||||
import { USER_LEVELS, USER_LEVEL_LABELS } from '../../constants';
|
||||
|
||||
const validationSchema = Yup.object({
|
||||
name: Yup.string().required('Name is required'),
|
||||
channel_group_id: Yup.string().required('Channel group is required'),
|
||||
});
|
||||
|
||||
const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
||||
const theme = useMantineTheme();
|
||||
|
||||
|
|
@ -100,7 +106,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
|
||||
const handleLogoSuccess = ({ logo }) => {
|
||||
if (logo && logo.id) {
|
||||
formik.setFieldValue('logo_id', logo.id);
|
||||
setValue('logo_id', logo.id);
|
||||
ensureLogosLoaded(); // Refresh logos
|
||||
}
|
||||
setLogoModalOpen(false);
|
||||
|
|
@ -124,7 +130,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
if (response.matched) {
|
||||
// Update the form with the new EPG data
|
||||
if (response.channel && response.channel.epg_data_id) {
|
||||
formik.setFieldValue('epg_data_id', response.channel.epg_data_id);
|
||||
setValue('epg_data_id', response.channel.epg_data_id);
|
||||
}
|
||||
|
||||
notifications.show({
|
||||
|
|
@ -152,7 +158,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
};
|
||||
|
||||
const handleSetNameFromEpg = () => {
|
||||
const epgDataId = formik.values.epg_data_id;
|
||||
const epgDataId = watch('epg_data_id');
|
||||
if (!epgDataId) {
|
||||
notifications.show({
|
||||
title: 'No EPG Selected',
|
||||
|
|
@ -164,7 +170,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
|
||||
const tvg = tvgsById[epgDataId];
|
||||
if (tvg && tvg.name) {
|
||||
formik.setFieldValue('name', tvg.name);
|
||||
setValue('name', tvg.name);
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: `Channel name set to "${tvg.name}"`,
|
||||
|
|
@ -180,7 +186,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
};
|
||||
|
||||
const handleSetLogoFromEpg = async () => {
|
||||
const epgDataId = formik.values.epg_data_id;
|
||||
const epgDataId = watch('epg_data_id');
|
||||
if (!epgDataId) {
|
||||
notifications.show({
|
||||
title: 'No EPG Selected',
|
||||
|
|
@ -207,7 +213,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
);
|
||||
|
||||
if (matchingLogo) {
|
||||
formik.setFieldValue('logo_id', matchingLogo.id);
|
||||
setValue('logo_id', matchingLogo.id);
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: `Logo set to "${matchingLogo.name}"`,
|
||||
|
|
@ -231,7 +237,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
// Create logo by calling the Logo API directly
|
||||
const newLogo = await API.createLogo(newLogoData);
|
||||
|
||||
formik.setFieldValue('logo_id', newLogo.id);
|
||||
setValue('logo_id', newLogo.id);
|
||||
|
||||
notifications.update({
|
||||
id: 'creating-logo',
|
||||
|
|
@ -264,7 +270,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
};
|
||||
|
||||
const handleSetTvgIdFromEpg = () => {
|
||||
const epgDataId = formik.values.epg_data_id;
|
||||
const epgDataId = watch('epg_data_id');
|
||||
if (!epgDataId) {
|
||||
notifications.show({
|
||||
title: 'No EPG Selected',
|
||||
|
|
@ -276,7 +282,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
|
||||
const tvg = tvgsById[epgDataId];
|
||||
if (tvg && tvg.tvg_id) {
|
||||
formik.setFieldValue('tvg_id', tvg.tvg_id);
|
||||
setValue('tvg_id', tvg.tvg_id);
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: `TVG-ID set to "${tvg.tvg_id}"`,
|
||||
|
|
@ -291,130 +297,130 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
}
|
||||
};
|
||||
|
||||
const formik = useFormik({
|
||||
initialValues: {
|
||||
name: '',
|
||||
channel_number: '', // Change from 0 to empty string for consistency
|
||||
channel_group_id:
|
||||
Object.keys(channelGroups).length > 0
|
||||
const defaultValues = useMemo(
|
||||
() => ({
|
||||
name: channel?.name || '',
|
||||
channel_number:
|
||||
channel?.channel_number !== null &&
|
||||
channel?.channel_number !== undefined
|
||||
? channel.channel_number
|
||||
: '',
|
||||
channel_group_id: channel?.channel_group_id
|
||||
? `${channel.channel_group_id}`
|
||||
: Object.keys(channelGroups).length > 0
|
||||
? Object.keys(channelGroups)[0]
|
||||
: '',
|
||||
stream_profile_id: '0',
|
||||
tvg_id: '',
|
||||
tvc_guide_stationid: '',
|
||||
epg_data_id: '',
|
||||
logo_id: '',
|
||||
user_level: '0',
|
||||
},
|
||||
validationSchema: Yup.object({
|
||||
name: Yup.string().required('Name is required'),
|
||||
channel_group_id: Yup.string().required('Channel group is required'),
|
||||
stream_profile_id: channel?.stream_profile_id
|
||||
? `${channel.stream_profile_id}`
|
||||
: '0',
|
||||
tvg_id: channel?.tvg_id || '',
|
||||
tvc_guide_stationid: channel?.tvc_guide_stationid || '',
|
||||
epg_data_id: channel?.epg_data_id ?? '',
|
||||
logo_id: channel?.logo_id ? `${channel.logo_id}` : '',
|
||||
user_level: `${channel?.user_level ?? '0'}`,
|
||||
}),
|
||||
onSubmit: async (values, { setSubmitting }) => {
|
||||
let response;
|
||||
[channel, channelGroups]
|
||||
);
|
||||
|
||||
try {
|
||||
const formattedValues = { ...values };
|
||||
const {
|
||||
register,
|
||||
handleSubmit,
|
||||
setValue,
|
||||
watch,
|
||||
reset,
|
||||
formState: { errors, isSubmitting },
|
||||
} = useForm({
|
||||
defaultValues,
|
||||
resolver: yupResolver(validationSchema),
|
||||
});
|
||||
|
||||
// Convert empty or "0" stream_profile_id to null for the API
|
||||
if (
|
||||
!formattedValues.stream_profile_id ||
|
||||
formattedValues.stream_profile_id === '0'
|
||||
) {
|
||||
formattedValues.stream_profile_id = null;
|
||||
}
|
||||
const onSubmit = async (values) => {
|
||||
let response;
|
||||
|
||||
// Ensure tvg_id is properly included (no empty strings)
|
||||
formattedValues.tvg_id = formattedValues.tvg_id || null;
|
||||
try {
|
||||
const formattedValues = { ...values };
|
||||
|
||||
// Ensure tvc_guide_stationid is properly included (no empty strings)
|
||||
formattedValues.tvc_guide_stationid =
|
||||
formattedValues.tvc_guide_stationid || null;
|
||||
// Convert empty or "0" stream_profile_id to null for the API
|
||||
if (
|
||||
!formattedValues.stream_profile_id ||
|
||||
formattedValues.stream_profile_id === '0'
|
||||
) {
|
||||
formattedValues.stream_profile_id = null;
|
||||
}
|
||||
|
||||
if (channel) {
|
||||
// If there's an EPG to set, use our enhanced endpoint
|
||||
if (values.epg_data_id !== (channel.epg_data_id ?? '')) {
|
||||
// Use the special endpoint to set EPG and trigger refresh
|
||||
const epgResponse = await API.setChannelEPG(
|
||||
channel.id,
|
||||
values.epg_data_id
|
||||
);
|
||||
// Ensure tvg_id is properly included (no empty strings)
|
||||
formattedValues.tvg_id = formattedValues.tvg_id || null;
|
||||
|
||||
// Remove epg_data_id from values since we've handled it separately
|
||||
const { epg_data_id, ...otherValues } = formattedValues;
|
||||
// Ensure tvc_guide_stationid is properly included (no empty strings)
|
||||
formattedValues.tvc_guide_stationid =
|
||||
formattedValues.tvc_guide_stationid || null;
|
||||
|
||||
// Update other channel fields if needed
|
||||
if (Object.keys(otherValues).length > 0) {
|
||||
response = await API.updateChannel({
|
||||
id: channel.id,
|
||||
...otherValues,
|
||||
streams: channelStreams.map((stream) => stream.id),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// No EPG change, regular update
|
||||
if (channel) {
|
||||
// If there's an EPG to set, use our enhanced endpoint
|
||||
if (values.epg_data_id !== (channel.epg_data_id ?? '')) {
|
||||
// Use the special endpoint to set EPG and trigger refresh
|
||||
const epgResponse = await API.setChannelEPG(
|
||||
channel.id,
|
||||
values.epg_data_id
|
||||
);
|
||||
|
||||
// Remove epg_data_id from values since we've handled it separately
|
||||
const { epg_data_id, ...otherValues } = formattedValues;
|
||||
|
||||
// Update other channel fields if needed
|
||||
if (Object.keys(otherValues).length > 0) {
|
||||
response = await API.updateChannel({
|
||||
id: channel.id,
|
||||
...formattedValues,
|
||||
...otherValues,
|
||||
streams: channelStreams.map((stream) => stream.id),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// New channel creation - use the standard method
|
||||
response = await API.addChannel({
|
||||
// No EPG change, regular update
|
||||
response = await API.updateChannel({
|
||||
id: channel.id,
|
||||
...formattedValues,
|
||||
streams: channelStreams.map((stream) => stream.id),
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error saving channel:', error);
|
||||
} else {
|
||||
// New channel creation - use the standard method
|
||||
response = await API.addChannel({
|
||||
...formattedValues,
|
||||
streams: channelStreams.map((stream) => stream.id),
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error saving channel:', error);
|
||||
}
|
||||
|
||||
formik.resetForm();
|
||||
API.requeryChannels();
|
||||
reset();
|
||||
API.requeryChannels();
|
||||
|
||||
// Refresh channel profiles to update the membership information
|
||||
useChannelsStore.getState().fetchChannelProfiles();
|
||||
// Refresh channel profiles to update the membership information
|
||||
useChannelsStore.getState().fetchChannelProfiles();
|
||||
|
||||
setSubmitting(false);
|
||||
setTvgFilter('');
|
||||
setLogoFilter('');
|
||||
onClose();
|
||||
},
|
||||
});
|
||||
setTvgFilter('');
|
||||
setLogoFilter('');
|
||||
onClose();
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (channel) {
|
||||
if (channel.epg_data_id) {
|
||||
const epgSource = epgs[tvgsById[channel.epg_data_id]?.epg_source];
|
||||
setSelectedEPG(epgSource ? `${epgSource.id}` : '');
|
||||
}
|
||||
reset(defaultValues);
|
||||
setChannelStreams(channel?.streams || []);
|
||||
|
||||
formik.setValues({
|
||||
name: channel.name || '',
|
||||
channel_number:
|
||||
channel.channel_number !== null ? channel.channel_number : '',
|
||||
channel_group_id: channel.channel_group_id
|
||||
? `${channel.channel_group_id}`
|
||||
: '',
|
||||
stream_profile_id: channel.stream_profile_id
|
||||
? `${channel.stream_profile_id}`
|
||||
: '0',
|
||||
tvg_id: channel.tvg_id || '',
|
||||
tvc_guide_stationid: channel.tvc_guide_stationid || '',
|
||||
epg_data_id: channel.epg_data_id ?? '',
|
||||
logo_id: channel.logo_id ? `${channel.logo_id}` : '',
|
||||
user_level: `${channel.user_level}`,
|
||||
});
|
||||
|
||||
setChannelStreams(channel.streams || []);
|
||||
if (channel?.epg_data_id) {
|
||||
const epgSource = epgs[tvgsById[channel.epg_data_id]?.epg_source];
|
||||
setSelectedEPG(epgSource ? `${epgSource.id}` : '');
|
||||
} else {
|
||||
formik.resetForm();
|
||||
setSelectedEPG('');
|
||||
}
|
||||
|
||||
if (!channel) {
|
||||
setTvgFilter('');
|
||||
setLogoFilter('');
|
||||
setChannelStreams([]); // Ensure streams are cleared when adding a new channel
|
||||
}
|
||||
}, [channel, tvgsById, channelGroups]);
|
||||
}, [defaultValues, channel, reset, epgs, tvgsById]);
|
||||
|
||||
// Memoize logo options to prevent infinite re-renders during background loading
|
||||
const logoOptions = useMemo(() => {
|
||||
|
|
@ -431,10 +437,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
// If a new group was created and returned, update the form with it
|
||||
if (newGroup && newGroup.id) {
|
||||
// Preserve all current form values while updating just the channel_group_id
|
||||
formik.setValues({
|
||||
...formik.values,
|
||||
channel_group_id: `${newGroup.id}`,
|
||||
});
|
||||
setValue('channel_group_id', `${newGroup.id}`);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -472,7 +475,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
}
|
||||
styles={{ content: { '--mantine-color-body': '#27272A' } }}
|
||||
>
|
||||
<form onSubmit={formik.handleSubmit}>
|
||||
<form onSubmit={handleSubmit(onSubmit)}>
|
||||
<Group justify="space-between" align="top">
|
||||
<Stack gap="5" style={{ flex: 1 }}>
|
||||
<TextInput
|
||||
|
|
@ -481,7 +484,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
label={
|
||||
<Group gap="xs">
|
||||
<span>Channel Name</span>
|
||||
{formik.values.epg_data_id && (
|
||||
{watch('epg_data_id') && (
|
||||
<Button
|
||||
size="xs"
|
||||
variant="transparent"
|
||||
|
|
@ -495,9 +498,8 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
)}
|
||||
</Group>
|
||||
}
|
||||
value={formik.values.name}
|
||||
onChange={formik.handleChange}
|
||||
error={formik.errors.name ? formik.touched.name : ''}
|
||||
{...register('name')}
|
||||
error={errors.name?.message}
|
||||
size="xs"
|
||||
style={{ flex: 1 }}
|
||||
/>
|
||||
|
|
@ -516,8 +518,8 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
label="Channel Group"
|
||||
readOnly
|
||||
value={
|
||||
channelGroups[formik.values.channel_group_id]
|
||||
? channelGroups[formik.values.channel_group_id].name
|
||||
channelGroups[watch('channel_group_id')]
|
||||
? channelGroups[watch('channel_group_id')].name
|
||||
: ''
|
||||
}
|
||||
onClick={() => setGroupPopoverOpened(true)}
|
||||
|
|
@ -557,7 +559,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
>
|
||||
<UnstyledButton
|
||||
onClick={() => {
|
||||
formik.setFieldValue(
|
||||
setValue(
|
||||
'channel_group_id',
|
||||
filteredGroups[index].id
|
||||
);
|
||||
|
|
@ -587,16 +589,12 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
id="channel_group_id"
|
||||
name="channel_group_id"
|
||||
label="Channel Group"
|
||||
value={formik.values.channel_group_id}
|
||||
value={watch('channel_group_id')}
|
||||
searchable
|
||||
onChange={(value) => {
|
||||
formik.setFieldValue('channel_group_id', value); // Update Formik's state with the new value
|
||||
setValue('channel_group_id', value);
|
||||
}}
|
||||
error={
|
||||
formik.errors.channel_group_id
|
||||
? formik.touched.channel_group_id
|
||||
: ''
|
||||
}
|
||||
error={errors.channel_group_id?.message}
|
||||
data={Object.values(channelGroups).map((option, index) => ({
|
||||
value: `${option.id}`,
|
||||
label: option.name,
|
||||
|
|
@ -622,15 +620,11 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
id="stream_profile_id"
|
||||
label="Stream Profile"
|
||||
name="stream_profile_id"
|
||||
value={formik.values.stream_profile_id}
|
||||
value={watch('stream_profile_id')}
|
||||
onChange={(value) => {
|
||||
formik.setFieldValue('stream_profile_id', value); // Update Formik's state with the new value
|
||||
setValue('stream_profile_id', value);
|
||||
}}
|
||||
error={
|
||||
formik.errors.stream_profile_id
|
||||
? formik.touched.stream_profile_id
|
||||
: ''
|
||||
}
|
||||
error={errors.stream_profile_id?.message}
|
||||
data={[{ value: '0', label: '(use default)' }].concat(
|
||||
streamProfiles.map((option) => ({
|
||||
value: `${option.id}`,
|
||||
|
|
@ -648,13 +642,11 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
value: `${value}`,
|
||||
};
|
||||
})}
|
||||
value={formik.values.user_level}
|
||||
value={watch('user_level')}
|
||||
onChange={(value) => {
|
||||
formik.setFieldValue('user_level', value);
|
||||
setValue('user_level', value);
|
||||
}}
|
||||
error={
|
||||
formik.errors.user_level ? formik.touched.user_level : ''
|
||||
}
|
||||
error={errors.user_level?.message}
|
||||
/>
|
||||
</Stack>
|
||||
|
||||
|
|
@ -684,7 +676,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
label={
|
||||
<Group gap="xs">
|
||||
<span>Logo</span>
|
||||
{formik.values.epg_data_id && (
|
||||
{watch('epg_data_id') && (
|
||||
<Button
|
||||
size="xs"
|
||||
variant="transparent"
|
||||
|
|
@ -699,9 +691,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
</Group>
|
||||
}
|
||||
readOnly
|
||||
value={
|
||||
channelLogos[formik.values.logo_id]?.name || 'Default'
|
||||
}
|
||||
value={channelLogos[watch('logo_id')]?.name || 'Default'}
|
||||
onClick={() => {
|
||||
console.log(
|
||||
'Logo input clicked, setting popover opened to true'
|
||||
|
|
@ -756,10 +746,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
borderRadius: '4px',
|
||||
}}
|
||||
onClick={() => {
|
||||
formik.setFieldValue(
|
||||
'logo_id',
|
||||
filteredLogos[index].id
|
||||
);
|
||||
setValue('logo_id', filteredLogos[index].id);
|
||||
setLogoPopoverOpened(false);
|
||||
}}
|
||||
onMouseEnter={(e) => {
|
||||
|
|
@ -810,7 +797,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
|
||||
<Stack gap="xs" align="center">
|
||||
<LazyLogo
|
||||
logoId={formik.values.logo_id}
|
||||
logoId={watch('logo_id')}
|
||||
alt="channel logo"
|
||||
style={{ height: 40 }}
|
||||
/>
|
||||
|
|
@ -833,19 +820,12 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
id="channel_number"
|
||||
name="channel_number"
|
||||
label="Channel # (blank to auto-assign)"
|
||||
value={formik.values.channel_number}
|
||||
onChange={(value) =>
|
||||
formik.setFieldValue('channel_number', value)
|
||||
}
|
||||
error={
|
||||
formik.errors.channel_number
|
||||
? formik.touched.channel_number
|
||||
: ''
|
||||
}
|
||||
value={watch('channel_number')}
|
||||
onChange={(value) => setValue('channel_number', value)}
|
||||
error={errors.channel_number?.message}
|
||||
size="xs"
|
||||
step={0.1} // Add step prop to allow decimal inputs
|
||||
precision={1} // Specify decimal precision
|
||||
removeTrailingZeros // Optional: remove trailing zeros for cleaner display
|
||||
/>
|
||||
|
||||
<TextInput
|
||||
|
|
@ -854,7 +834,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
label={
|
||||
<Group gap="xs">
|
||||
<span>TVG-ID</span>
|
||||
{formik.values.epg_data_id && (
|
||||
{watch('epg_data_id') && (
|
||||
<Button
|
||||
size="xs"
|
||||
variant="transparent"
|
||||
|
|
@ -868,9 +848,8 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
)}
|
||||
</Group>
|
||||
}
|
||||
value={formik.values.tvg_id}
|
||||
onChange={formik.handleChange}
|
||||
error={formik.errors.tvg_id ? formik.touched.tvg_id : ''}
|
||||
{...register('tvg_id')}
|
||||
error={errors.tvg_id?.message}
|
||||
size="xs"
|
||||
/>
|
||||
|
||||
|
|
@ -878,13 +857,8 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
id="tvc_guide_stationid"
|
||||
name="tvc_guide_stationid"
|
||||
label="Gracenote StationId"
|
||||
value={formik.values.tvc_guide_stationid}
|
||||
onChange={formik.handleChange}
|
||||
error={
|
||||
formik.errors.tvc_guide_stationid
|
||||
? formik.touched.tvc_guide_stationid
|
||||
: ''
|
||||
}
|
||||
{...register('tvc_guide_stationid')}
|
||||
error={errors.tvc_guide_stationid?.message}
|
||||
size="xs"
|
||||
/>
|
||||
|
||||
|
|
@ -904,9 +878,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
<Button
|
||||
size="xs"
|
||||
variant="transparent"
|
||||
onClick={() =>
|
||||
formik.setFieldValue('epg_data_id', null)
|
||||
}
|
||||
onClick={() => setValue('epg_data_id', null)}
|
||||
>
|
||||
Use Dummy
|
||||
</Button>
|
||||
|
|
@ -933,7 +905,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
}
|
||||
readOnly
|
||||
value={(() => {
|
||||
const tvg = tvgsById[formik.values.epg_data_id];
|
||||
const tvg = tvgsById[watch('epg_data_id')];
|
||||
const epgSource = tvg && epgs[tvg.epg_source];
|
||||
const tvgLabel = tvg ? tvg.name || tvg.id : '';
|
||||
if (epgSource && tvgLabel) {
|
||||
|
|
@ -953,7 +925,7 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
color="white"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
formik.setFieldValue('epg_data_id', null);
|
||||
setValue('epg_data_id', null);
|
||||
}}
|
||||
title="Create new group"
|
||||
size="small"
|
||||
|
|
@ -1012,12 +984,9 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
size="xs"
|
||||
onClick={() => {
|
||||
if (filteredTvgs[index].id == '0') {
|
||||
formik.setFieldValue('epg_data_id', null);
|
||||
setValue('epg_data_id', null);
|
||||
} else {
|
||||
formik.setFieldValue(
|
||||
'epg_data_id',
|
||||
filteredTvgs[index].id
|
||||
);
|
||||
setValue('epg_data_id', filteredTvgs[index].id);
|
||||
// Also update selectedEPG to match the EPG source of the selected tvg
|
||||
if (filteredTvgs[index].epg_source) {
|
||||
setSelectedEPG(
|
||||
|
|
@ -1047,11 +1016,11 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => {
|
|||
<Button
|
||||
type="submit"
|
||||
variant="default"
|
||||
disabled={formik.isSubmitting}
|
||||
loading={formik.isSubmitting}
|
||||
disabled={isSubmitting}
|
||||
loading={isSubmitting}
|
||||
loaderProps={{ type: 'dots' }}
|
||||
>
|
||||
{formik.isSubmitting ? 'Saving...' : 'Submit'}
|
||||
{isSubmitting ? 'Saving...' : 'Submit'}
|
||||
</Button>
|
||||
</Flex>
|
||||
</form>
|
||||
|
|
|
|||
|
|
@ -369,7 +369,8 @@ const LiveGroupFilter = ({
|
|||
if (
|
||||
group.custom_properties?.custom_epg_id !==
|
||||
undefined ||
|
||||
group.custom_properties?.force_dummy_epg
|
||||
group.custom_properties?.force_dummy_epg ||
|
||||
group.custom_properties?.force_epg_selected
|
||||
) {
|
||||
selectedValues.push('force_epg');
|
||||
}
|
||||
|
|
@ -432,23 +433,20 @@ const LiveGroupFilter = ({
|
|||
|
||||
// Handle force_epg
|
||||
if (selectedOptions.includes('force_epg')) {
|
||||
// Migrate from old force_dummy_epg if present
|
||||
// Set default to force_dummy_epg if no EPG settings exist yet
|
||||
if (
|
||||
newCustomProps.force_dummy_epg &&
|
||||
newCustomProps.custom_epg_id === undefined
|
||||
newCustomProps.custom_epg_id ===
|
||||
undefined &&
|
||||
!newCustomProps.force_dummy_epg
|
||||
) {
|
||||
// Migrate: force_dummy_epg=true becomes custom_epg_id=null
|
||||
newCustomProps.custom_epg_id = null;
|
||||
delete newCustomProps.force_dummy_epg;
|
||||
} else if (
|
||||
newCustomProps.custom_epg_id === undefined
|
||||
) {
|
||||
// New configuration: initialize with null (no EPG/default dummy)
|
||||
newCustomProps.custom_epg_id = null;
|
||||
// Default to "No EPG (Disabled)"
|
||||
newCustomProps.force_dummy_epg = true;
|
||||
}
|
||||
} else {
|
||||
// Only remove custom_epg_id when deselected
|
||||
// Remove all EPG settings when deselected
|
||||
delete newCustomProps.custom_epg_id;
|
||||
delete newCustomProps.force_dummy_epg;
|
||||
delete newCustomProps.force_epg_selected;
|
||||
}
|
||||
|
||||
// Handle group_override
|
||||
|
|
@ -1124,7 +1122,8 @@ const LiveGroupFilter = ({
|
|||
|
||||
{/* Show EPG selector when force_epg is selected */}
|
||||
{(group.custom_properties?.custom_epg_id !== undefined ||
|
||||
group.custom_properties?.force_dummy_epg) && (
|
||||
group.custom_properties?.force_dummy_epg ||
|
||||
group.custom_properties?.force_epg_selected) && (
|
||||
<Tooltip
|
||||
label="Force a specific EPG source for all auto-synced channels in this group. For dummy EPGs, all channels will share the same EPG data. For regular EPG sources (XMLTV, Schedules Direct), channels will be matched by their tvg_id within that source. Select 'No EPG' to disable EPG assignment."
|
||||
withArrow
|
||||
|
|
@ -1133,44 +1132,90 @@ const LiveGroupFilter = ({
|
|||
label="EPG Source"
|
||||
placeholder="No EPG (Disabled)"
|
||||
value={(() => {
|
||||
// Handle migration from force_dummy_epg
|
||||
// Show custom EPG if set
|
||||
if (
|
||||
group.custom_properties?.custom_epg_id !==
|
||||
undefined
|
||||
undefined &&
|
||||
group.custom_properties?.custom_epg_id !== null
|
||||
) {
|
||||
// Convert to string, use '0' for null/no EPG
|
||||
return group.custom_properties.custom_epg_id ===
|
||||
null
|
||||
? '0'
|
||||
: group.custom_properties.custom_epg_id.toString();
|
||||
} else if (
|
||||
group.custom_properties?.force_dummy_epg
|
||||
) {
|
||||
// Show "No EPG" for old force_dummy_epg configs
|
||||
return group.custom_properties.custom_epg_id.toString();
|
||||
}
|
||||
// Show "No EPG" if force_dummy_epg is set
|
||||
if (group.custom_properties?.force_dummy_epg) {
|
||||
return '0';
|
||||
}
|
||||
return '0';
|
||||
// Otherwise show empty/placeholder
|
||||
return null;
|
||||
})()}
|
||||
onChange={(value) => {
|
||||
// Convert back: '0' means no EPG (null)
|
||||
const newValue =
|
||||
value === '0' ? null : parseInt(value);
|
||||
setGroupStates(
|
||||
groupStates.map((state) => {
|
||||
if (
|
||||
state.channel_group === group.channel_group
|
||||
) {
|
||||
return {
|
||||
...state,
|
||||
custom_properties: {
|
||||
if (value === '0') {
|
||||
// "No EPG (Disabled)" selected - use force_dummy_epg
|
||||
setGroupStates(
|
||||
groupStates.map((state) => {
|
||||
if (
|
||||
state.channel_group ===
|
||||
group.channel_group
|
||||
) {
|
||||
const newProps = {
|
||||
...state.custom_properties,
|
||||
custom_epg_id: newValue,
|
||||
},
|
||||
};
|
||||
}
|
||||
return state;
|
||||
})
|
||||
);
|
||||
};
|
||||
delete newProps.custom_epg_id;
|
||||
delete newProps.force_epg_selected;
|
||||
newProps.force_dummy_epg = true;
|
||||
return {
|
||||
...state,
|
||||
custom_properties: newProps,
|
||||
};
|
||||
}
|
||||
return state;
|
||||
})
|
||||
);
|
||||
} else if (value) {
|
||||
// Specific EPG source selected
|
||||
const epgId = parseInt(value);
|
||||
setGroupStates(
|
||||
groupStates.map((state) => {
|
||||
if (
|
||||
state.channel_group ===
|
||||
group.channel_group
|
||||
) {
|
||||
const newProps = {
|
||||
...state.custom_properties,
|
||||
};
|
||||
newProps.custom_epg_id = epgId;
|
||||
delete newProps.force_dummy_epg;
|
||||
delete newProps.force_epg_selected;
|
||||
return {
|
||||
...state,
|
||||
custom_properties: newProps,
|
||||
};
|
||||
}
|
||||
return state;
|
||||
})
|
||||
);
|
||||
} else {
|
||||
// Cleared - remove all EPG settings
|
||||
setGroupStates(
|
||||
groupStates.map((state) => {
|
||||
if (
|
||||
state.channel_group ===
|
||||
group.channel_group
|
||||
) {
|
||||
const newProps = {
|
||||
...state.custom_properties,
|
||||
};
|
||||
delete newProps.custom_epg_id;
|
||||
delete newProps.force_dummy_epg;
|
||||
delete newProps.force_epg_selected;
|
||||
return {
|
||||
...state,
|
||||
custom_properties: newProps,
|
||||
};
|
||||
}
|
||||
return state;
|
||||
})
|
||||
);
|
||||
}
|
||||
}}
|
||||
data={[
|
||||
{ value: '0', label: 'No EPG (Disabled)' },
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import React, { useState, useEffect } from 'react';
|
||||
import { useFormik } from 'formik';
|
||||
import React, { useState, useEffect, useMemo } from 'react';
|
||||
import { useForm } from 'react-hook-form';
|
||||
import { yupResolver } from '@hookform/resolvers/yup';
|
||||
import * as Yup from 'yup';
|
||||
import {
|
||||
Modal,
|
||||
|
|
@ -18,143 +19,148 @@ import { Upload, FileImage, X } from 'lucide-react';
|
|||
import { notifications } from '@mantine/notifications';
|
||||
import API from '../../api';
|
||||
|
||||
const schema = Yup.object({
|
||||
name: Yup.string().required('Name is required'),
|
||||
url: Yup.string()
|
||||
.required('URL is required')
|
||||
.test(
|
||||
'valid-url-or-path',
|
||||
'Must be a valid URL or local file path',
|
||||
(value) => {
|
||||
if (!value) return false;
|
||||
// Allow local file paths starting with /data/logos/
|
||||
if (value.startsWith('/data/logos/')) return true;
|
||||
// Allow valid URLs
|
||||
try {
|
||||
new URL(value);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
),
|
||||
});
|
||||
|
||||
const LogoForm = ({ logo = null, isOpen, onClose, onSuccess }) => {
|
||||
const [logoPreview, setLogoPreview] = useState(null);
|
||||
const [uploading, setUploading] = useState(false);
|
||||
const [selectedFile, setSelectedFile] = useState(null); // Store selected file
|
||||
|
||||
const formik = useFormik({
|
||||
initialValues: {
|
||||
name: '',
|
||||
url: '',
|
||||
},
|
||||
validationSchema: Yup.object({
|
||||
name: Yup.string().required('Name is required'),
|
||||
url: Yup.string()
|
||||
.required('URL is required')
|
||||
.test(
|
||||
'valid-url-or-path',
|
||||
'Must be a valid URL or local file path',
|
||||
(value) => {
|
||||
if (!value) return false;
|
||||
// Allow local file paths starting with /data/logos/
|
||||
if (value.startsWith('/data/logos/')) return true;
|
||||
// Allow valid URLs
|
||||
try {
|
||||
new URL(value);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
),
|
||||
const defaultValues = useMemo(
|
||||
() => ({
|
||||
name: logo?.name || '',
|
||||
url: logo?.url || '',
|
||||
}),
|
||||
onSubmit: async (values, { setSubmitting }) => {
|
||||
try {
|
||||
setUploading(true);
|
||||
let uploadResponse = null; // Store upload response for later use
|
||||
[logo]
|
||||
);
|
||||
|
||||
// If we have a selected file, upload it first
|
||||
if (selectedFile) {
|
||||
try {
|
||||
uploadResponse = await API.uploadLogo(selectedFile, values.name);
|
||||
// Use the uploaded file data instead of form values
|
||||
values.name = uploadResponse.name;
|
||||
values.url = uploadResponse.url;
|
||||
} catch (uploadError) {
|
||||
let errorMessage = 'Failed to upload logo file';
|
||||
|
||||
if (
|
||||
uploadError.code === 'NETWORK_ERROR' ||
|
||||
uploadError.message?.includes('timeout')
|
||||
) {
|
||||
errorMessage = 'Upload timed out. Please try again.';
|
||||
} else if (uploadError.status === 413) {
|
||||
errorMessage = 'File too large. Please choose a smaller file.';
|
||||
} else if (uploadError.body?.error) {
|
||||
errorMessage = uploadError.body.error;
|
||||
}
|
||||
|
||||
notifications.show({
|
||||
title: 'Upload Error',
|
||||
message: errorMessage,
|
||||
color: 'red',
|
||||
});
|
||||
return; // Don't proceed with creation if upload fails
|
||||
}
|
||||
}
|
||||
|
||||
// Now create or update the logo with the final values
|
||||
// Only proceed if we don't already have a logo from file upload
|
||||
if (logo) {
|
||||
const updatedLogo = await API.updateLogo(logo.id, values);
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: 'Logo updated successfully',
|
||||
color: 'green',
|
||||
});
|
||||
onSuccess?.({ type: 'update', logo: updatedLogo }); // Call onSuccess for updates
|
||||
} else if (!selectedFile) {
|
||||
// Only create a new logo entry if we're not uploading a file
|
||||
// (file upload already created the logo entry)
|
||||
const newLogo = await API.createLogo(values);
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: 'Logo created successfully',
|
||||
color: 'green',
|
||||
});
|
||||
onSuccess?.({ type: 'create', logo: newLogo }); // Call onSuccess for creates
|
||||
} else {
|
||||
// File was uploaded and logo was already created
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: 'Logo uploaded successfully',
|
||||
color: 'green',
|
||||
});
|
||||
onSuccess?.({ type: 'create', logo: uploadResponse });
|
||||
}
|
||||
onClose();
|
||||
} catch (error) {
|
||||
let errorMessage = logo
|
||||
? 'Failed to update logo'
|
||||
: 'Failed to create logo';
|
||||
|
||||
// Handle specific timeout errors
|
||||
if (
|
||||
error.code === 'NETWORK_ERROR' ||
|
||||
error.message?.includes('timeout')
|
||||
) {
|
||||
errorMessage = 'Request timed out. Please try again.';
|
||||
} else if (error.response?.data?.error) {
|
||||
errorMessage = error.response.data.error;
|
||||
}
|
||||
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: errorMessage,
|
||||
color: 'red',
|
||||
});
|
||||
} finally {
|
||||
setSubmitting(false);
|
||||
setUploading(false);
|
||||
}
|
||||
},
|
||||
const {
|
||||
register,
|
||||
handleSubmit,
|
||||
formState: { errors, isSubmitting },
|
||||
reset,
|
||||
setValue,
|
||||
watch,
|
||||
} = useForm({
|
||||
defaultValues,
|
||||
resolver: yupResolver(schema),
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (logo) {
|
||||
formik.setValues({
|
||||
name: logo.name || '',
|
||||
url: logo.url || '',
|
||||
const onSubmit = async (values) => {
|
||||
try {
|
||||
setUploading(true);
|
||||
let uploadResponse = null; // Store upload response for later use
|
||||
|
||||
// If we have a selected file, upload it first
|
||||
if (selectedFile) {
|
||||
try {
|
||||
uploadResponse = await API.uploadLogo(selectedFile, values.name);
|
||||
// Use the uploaded file data instead of form values
|
||||
values.name = uploadResponse.name;
|
||||
values.url = uploadResponse.url;
|
||||
} catch (uploadError) {
|
||||
let errorMessage = 'Failed to upload logo file';
|
||||
|
||||
if (
|
||||
uploadError.code === 'NETWORK_ERROR' ||
|
||||
uploadError.message?.includes('timeout')
|
||||
) {
|
||||
errorMessage = 'Upload timed out. Please try again.';
|
||||
} else if (uploadError.status === 413) {
|
||||
errorMessage = 'File too large. Please choose a smaller file.';
|
||||
} else if (uploadError.body?.error) {
|
||||
errorMessage = uploadError.body.error;
|
||||
}
|
||||
|
||||
notifications.show({
|
||||
title: 'Upload Error',
|
||||
message: errorMessage,
|
||||
color: 'red',
|
||||
});
|
||||
return; // Don't proceed with creation if upload fails
|
||||
}
|
||||
}
|
||||
|
||||
// Now create or update the logo with the final values
|
||||
// Only proceed if we don't already have a logo from file upload
|
||||
if (logo) {
|
||||
const updatedLogo = await API.updateLogo(logo.id, values);
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: 'Logo updated successfully',
|
||||
color: 'green',
|
||||
});
|
||||
onSuccess?.({ type: 'update', logo: updatedLogo }); // Call onSuccess for updates
|
||||
} else if (!selectedFile) {
|
||||
// Only create a new logo entry if we're not uploading a file
|
||||
// (file upload already created the logo entry)
|
||||
const newLogo = await API.createLogo(values);
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: 'Logo created successfully',
|
||||
color: 'green',
|
||||
});
|
||||
onSuccess?.({ type: 'create', logo: newLogo }); // Call onSuccess for creates
|
||||
} else {
|
||||
// File was uploaded and logo was already created
|
||||
notifications.show({
|
||||
title: 'Success',
|
||||
message: 'Logo uploaded successfully',
|
||||
color: 'green',
|
||||
});
|
||||
onSuccess?.({ type: 'create', logo: uploadResponse });
|
||||
}
|
||||
onClose();
|
||||
} catch (error) {
|
||||
let errorMessage = logo
|
||||
? 'Failed to update logo'
|
||||
: 'Failed to create logo';
|
||||
|
||||
// Handle specific timeout errors
|
||||
if (
|
||||
error.code === 'NETWORK_ERROR' ||
|
||||
error.message?.includes('timeout')
|
||||
) {
|
||||
errorMessage = 'Request timed out. Please try again.';
|
||||
} else if (error.response?.data?.error) {
|
||||
errorMessage = error.response.data.error;
|
||||
}
|
||||
|
||||
notifications.show({
|
||||
title: 'Error',
|
||||
message: errorMessage,
|
||||
color: 'red',
|
||||
});
|
||||
setLogoPreview(logo.cache_url);
|
||||
} else {
|
||||
formik.resetForm();
|
||||
setLogoPreview(null);
|
||||
} finally {
|
||||
setUploading(false);
|
||||
}
|
||||
// Clear any selected file when logo changes
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
reset(defaultValues);
|
||||
setLogoPreview(logo?.cache_url || null);
|
||||
setSelectedFile(null);
|
||||
}, [logo, isOpen]);
|
||||
}, [defaultValues, logo, reset]);
|
||||
|
||||
const handleFileSelect = (files) => {
|
||||
if (files.length === 0) return;
|
||||
|
|
@ -180,18 +186,19 @@ const LogoForm = ({ logo = null, isOpen, onClose, onSuccess }) => {
|
|||
setLogoPreview(previewUrl);
|
||||
|
||||
// Auto-fill the name field if empty
|
||||
if (!formik.values.name) {
|
||||
const currentName = watch('name');
|
||||
if (!currentName) {
|
||||
const nameWithoutExtension = file.name.replace(/\.[^/.]+$/, '');
|
||||
formik.setFieldValue('name', nameWithoutExtension);
|
||||
setValue('name', nameWithoutExtension);
|
||||
}
|
||||
|
||||
// Set a placeholder URL (will be replaced after upload)
|
||||
formik.setFieldValue('url', 'file://pending-upload');
|
||||
setValue('url', 'file://pending-upload');
|
||||
};
|
||||
|
||||
const handleUrlChange = (event) => {
|
||||
const url = event.target.value;
|
||||
formik.setFieldValue('url', url);
|
||||
setValue('url', url);
|
||||
|
||||
// Clear any selected file when manually entering URL
|
||||
if (selectedFile) {
|
||||
|
|
@ -219,7 +226,7 @@ const LogoForm = ({ logo = null, isOpen, onClose, onSuccess }) => {
|
|||
const filename = pathname.substring(pathname.lastIndexOf('/') + 1);
|
||||
const nameWithoutExtension = filename.replace(/\.[^/.]+$/, '');
|
||||
if (nameWithoutExtension) {
|
||||
formik.setFieldValue('name', nameWithoutExtension);
|
||||
setValue('name', nameWithoutExtension);
|
||||
}
|
||||
} catch (error) {
|
||||
// If the URL is invalid, do nothing.
|
||||
|
|
@ -244,7 +251,7 @@ const LogoForm = ({ logo = null, isOpen, onClose, onSuccess }) => {
|
|||
title={logo ? 'Edit Logo' : 'Add Logo'}
|
||||
size="md"
|
||||
>
|
||||
<form onSubmit={formik.handleSubmit}>
|
||||
<form onSubmit={handleSubmit(onSubmit)}>
|
||||
<Stack spacing="md">
|
||||
{/* Logo Preview */}
|
||||
{logoPreview && (
|
||||
|
|
@ -338,18 +345,18 @@ const LogoForm = ({ logo = null, isOpen, onClose, onSuccess }) => {
|
|||
<TextInput
|
||||
label="Logo URL"
|
||||
placeholder="https://example.com/logo.png"
|
||||
{...formik.getFieldProps('url')}
|
||||
{...register('url')}
|
||||
onChange={handleUrlChange}
|
||||
onBlur={handleUrlBlur}
|
||||
error={formik.touched.url && formik.errors.url}
|
||||
error={errors.url?.message}
|
||||
disabled={!!selectedFile} // Disable when file is selected
|
||||
/>
|
||||
|
||||
<TextInput
|
||||
label="Name"
|
||||
placeholder="Enter logo name"
|
||||
{...formik.getFieldProps('name')}
|
||||
error={formik.touched.name && formik.errors.name}
|
||||
{...register('name')}
|
||||
error={errors.name?.message}
|
||||
/>
|
||||
|
||||
{selectedFile && (
|
||||
|
|
@ -363,7 +370,7 @@ const LogoForm = ({ logo = null, isOpen, onClose, onSuccess }) => {
|
|||
<Button variant="light" onClick={onClose}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button type="submit" loading={formik.isSubmitting || uploading}>
|
||||
<Button type="submit" loading={isSubmitting || uploading}>
|
||||
{logo ? 'Update' : 'Create'}
|
||||
</Button>
|
||||
</Group>
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
// Modal.js
|
||||
import React, { useState, useEffect, forwardRef } from 'react';
|
||||
import { useFormik } from 'formik';
|
||||
import * as Yup from 'yup';
|
||||
import API from '../../api';
|
||||
import M3UProfiles from './M3UProfiles';
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import React, { useState, useEffect } from 'react';
|
||||
import { useFormik } from 'formik';
|
||||
import React, { useState, useEffect, useMemo } from 'react';
|
||||
import { useForm } from 'react-hook-form';
|
||||
import { yupResolver } from '@hookform/resolvers/yup';
|
||||
import * as Yup from 'yup';
|
||||
import API from '../../api';
|
||||
import {
|
||||
|
|
@ -31,6 +32,89 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => {
|
|||
const [sampleInput, setSampleInput] = useState('');
|
||||
const isDefaultProfile = profile?.is_default;
|
||||
|
||||
const defaultValues = useMemo(
|
||||
() => ({
|
||||
name: profile?.name || '',
|
||||
max_streams: profile?.max_streams || 0,
|
||||
search_pattern: profile?.search_pattern || '',
|
||||
replace_pattern: profile?.replace_pattern || '',
|
||||
notes: profile?.custom_properties?.notes || '',
|
||||
}),
|
||||
[profile]
|
||||
);
|
||||
|
||||
const schema = Yup.object({
|
||||
name: Yup.string().required('Name is required'),
|
||||
search_pattern: Yup.string().when([], {
|
||||
is: () => !isDefaultProfile,
|
||||
then: (schema) => schema.required('Search pattern is required'),
|
||||
otherwise: (schema) => schema.notRequired(),
|
||||
}),
|
||||
replace_pattern: Yup.string().when([], {
|
||||
is: () => !isDefaultProfile,
|
||||
then: (schema) => schema.required('Replace pattern is required'),
|
||||
otherwise: (schema) => schema.notRequired(),
|
||||
}),
|
||||
notes: Yup.string(), // Optional field
|
||||
});
|
||||
|
||||
const {
|
||||
register,
|
||||
handleSubmit,
|
||||
formState: { errors, isSubmitting },
|
||||
reset,
|
||||
setValue,
|
||||
watch,
|
||||
} = useForm({
|
||||
defaultValues,
|
||||
resolver: yupResolver(schema),
|
||||
});
|
||||
|
||||
const onSubmit = async (values) => {
|
||||
console.log('submiting');
|
||||
|
||||
// For default profiles, only send name and custom_properties (notes)
|
||||
let submitValues;
|
||||
if (isDefaultProfile) {
|
||||
submitValues = {
|
||||
name: values.name,
|
||||
custom_properties: {
|
||||
// Preserve existing custom_properties and add/update notes
|
||||
...(profile?.custom_properties || {}),
|
||||
notes: values.notes || '',
|
||||
},
|
||||
};
|
||||
} else {
|
||||
// For regular profiles, send all fields
|
||||
submitValues = {
|
||||
name: values.name,
|
||||
max_streams: values.max_streams,
|
||||
search_pattern: values.search_pattern,
|
||||
replace_pattern: values.replace_pattern,
|
||||
custom_properties: {
|
||||
// Preserve existing custom_properties and add/update notes
|
||||
...(profile?.custom_properties || {}),
|
||||
notes: values.notes || '',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (profile?.id) {
|
||||
await API.updateM3UProfile(m3u.id, {
|
||||
id: profile.id,
|
||||
...submitValues,
|
||||
});
|
||||
} else {
|
||||
await API.addM3UProfile(m3u.id, submitValues);
|
||||
}
|
||||
|
||||
reset();
|
||||
// Reset local state to sync with form reset
|
||||
setSearchPattern('');
|
||||
setReplacePattern('');
|
||||
onClose();
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
async function fetchStreamUrl() {
|
||||
try {
|
||||
|
|
@ -79,99 +163,22 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => {
|
|||
}, [searchPattern, replacePattern]);
|
||||
|
||||
const onSearchPatternUpdate = (e) => {
|
||||
formik.handleChange(e);
|
||||
setSearchPattern(e.target.value);
|
||||
const value = e.target.value;
|
||||
setSearchPattern(value);
|
||||
setValue('search_pattern', value);
|
||||
};
|
||||
|
||||
const onReplacePatternUpdate = (e) => {
|
||||
formik.handleChange(e);
|
||||
setReplacePattern(e.target.value);
|
||||
const value = e.target.value;
|
||||
setReplacePattern(value);
|
||||
setValue('replace_pattern', value);
|
||||
};
|
||||
|
||||
const formik = useFormik({
|
||||
initialValues: {
|
||||
name: '',
|
||||
max_streams: 0,
|
||||
search_pattern: '',
|
||||
replace_pattern: '',
|
||||
notes: '',
|
||||
},
|
||||
validationSchema: Yup.object({
|
||||
name: Yup.string().required('Name is required'),
|
||||
search_pattern: Yup.string().when([], {
|
||||
is: () => !isDefaultProfile,
|
||||
then: (schema) => schema.required('Search pattern is required'),
|
||||
otherwise: (schema) => schema.notRequired(),
|
||||
}),
|
||||
replace_pattern: Yup.string().when([], {
|
||||
is: () => !isDefaultProfile,
|
||||
then: (schema) => schema.required('Replace pattern is required'),
|
||||
otherwise: (schema) => schema.notRequired(),
|
||||
}),
|
||||
notes: Yup.string(), // Optional field
|
||||
}),
|
||||
onSubmit: async (values, { setSubmitting, resetForm }) => {
|
||||
console.log('submiting');
|
||||
|
||||
// For default profiles, only send name and custom_properties (notes)
|
||||
let submitValues;
|
||||
if (isDefaultProfile) {
|
||||
submitValues = {
|
||||
name: values.name,
|
||||
custom_properties: {
|
||||
// Preserve existing custom_properties and add/update notes
|
||||
...(profile?.custom_properties || {}),
|
||||
notes: values.notes || '',
|
||||
},
|
||||
};
|
||||
} else {
|
||||
// For regular profiles, send all fields
|
||||
submitValues = {
|
||||
name: values.name,
|
||||
max_streams: values.max_streams,
|
||||
search_pattern: values.search_pattern,
|
||||
replace_pattern: values.replace_pattern,
|
||||
custom_properties: {
|
||||
// Preserve existing custom_properties and add/update notes
|
||||
...(profile?.custom_properties || {}),
|
||||
notes: values.notes || '',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (profile?.id) {
|
||||
await API.updateM3UProfile(m3u.id, {
|
||||
id: profile.id,
|
||||
...submitValues,
|
||||
});
|
||||
} else {
|
||||
await API.addM3UProfile(m3u.id, submitValues);
|
||||
}
|
||||
|
||||
resetForm();
|
||||
// Reset local state to sync with formik reset
|
||||
setSearchPattern('');
|
||||
setReplacePattern('');
|
||||
setSubmitting(false);
|
||||
onClose();
|
||||
},
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (profile) {
|
||||
setSearchPattern(profile.search_pattern);
|
||||
setReplacePattern(profile.replace_pattern);
|
||||
formik.setValues({
|
||||
name: profile.name,
|
||||
max_streams: profile.max_streams,
|
||||
search_pattern: profile.search_pattern,
|
||||
replace_pattern: profile.replace_pattern,
|
||||
notes: profile.custom_properties?.notes || '',
|
||||
});
|
||||
} else {
|
||||
formik.resetForm();
|
||||
}
|
||||
}, [profile]); // eslint-disable-line react-hooks/exhaustive-deps
|
||||
reset(defaultValues);
|
||||
setSearchPattern(profile?.search_pattern || '');
|
||||
setReplacePattern(profile?.replace_pattern || '');
|
||||
}, [defaultValues, profile, reset]);
|
||||
|
||||
const handleSampleInputChange = (e) => {
|
||||
setSampleInput(e.target.value);
|
||||
|
|
@ -212,27 +219,21 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => {
|
|||
}
|
||||
size="lg"
|
||||
>
|
||||
<form onSubmit={formik.handleSubmit}>
|
||||
<form onSubmit={handleSubmit(onSubmit)}>
|
||||
<TextInput
|
||||
id="name"
|
||||
name="name"
|
||||
label="Name"
|
||||
value={formik.values.name}
|
||||
onChange={formik.handleChange}
|
||||
error={formik.errors.name ? formik.touched.name : ''}
|
||||
{...register('name')}
|
||||
error={errors.name?.message}
|
||||
/>
|
||||
|
||||
{/* Only show max streams field for non-default profiles */}
|
||||
{!isDefaultProfile && (
|
||||
<NumberInput
|
||||
id="max_streams"
|
||||
name="max_streams"
|
||||
label="Max Streams"
|
||||
value={formik.values.max_streams}
|
||||
onChange={(value) =>
|
||||
formik.setFieldValue('max_streams', value || 0)
|
||||
}
|
||||
error={formik.errors.max_streams ? formik.touched.max_streams : ''}
|
||||
{...register('max_streams')}
|
||||
value={watch('max_streams')}
|
||||
onChange={(value) => setValue('max_streams', value || 0)}
|
||||
error={errors.max_streams?.message}
|
||||
min={0}
|
||||
placeholder="0 = unlimited"
|
||||
/>
|
||||
|
|
@ -242,40 +243,25 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => {
|
|||
{!isDefaultProfile && (
|
||||
<>
|
||||
<TextInput
|
||||
id="search_pattern"
|
||||
name="search_pattern"
|
||||
label="Search Pattern (Regex)"
|
||||
value={searchPattern}
|
||||
onChange={onSearchPatternUpdate}
|
||||
error={
|
||||
formik.errors.search_pattern
|
||||
? formik.touched.search_pattern
|
||||
: ''
|
||||
}
|
||||
error={errors.search_pattern?.message}
|
||||
/>
|
||||
<TextInput
|
||||
id="replace_pattern"
|
||||
name="replace_pattern"
|
||||
label="Replace Pattern"
|
||||
value={replacePattern}
|
||||
onChange={onReplacePatternUpdate}
|
||||
error={
|
||||
formik.errors.replace_pattern
|
||||
? formik.touched.replace_pattern
|
||||
: ''
|
||||
}
|
||||
error={errors.replace_pattern?.message}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
|
||||
<Textarea
|
||||
id="notes"
|
||||
name="notes"
|
||||
label="Notes"
|
||||
placeholder="Add any notes or comments about this profile..."
|
||||
value={formik.values.notes}
|
||||
onChange={formik.handleChange}
|
||||
error={formik.errors.notes ? formik.touched.notes : ''}
|
||||
{...register('notes')}
|
||||
error={errors.notes?.message}
|
||||
minRows={2}
|
||||
maxRows={4}
|
||||
autosize
|
||||
|
|
@ -290,9 +276,9 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => {
|
|||
>
|
||||
<Button
|
||||
type="submit"
|
||||
disabled={formik.isSubmitting}
|
||||
disabled={isSubmitting}
|
||||
size="xs"
|
||||
style={{ width: formik.isSubmitting ? 'auto' : 'auto' }}
|
||||
style={{ width: isSubmitting ? 'auto' : 'auto' }}
|
||||
>
|
||||
Submit
|
||||
</Button>
|
||||
|
|
|
|||
|
|
@ -1,108 +1,104 @@
|
|||
// Modal.js
|
||||
import React, { useEffect } from 'react';
|
||||
import { useFormik } from 'formik';
|
||||
import React, { useEffect, useMemo } from 'react';
|
||||
import { useForm } from 'react-hook-form';
|
||||
import { yupResolver } from '@hookform/resolvers/yup';
|
||||
import * as Yup from 'yup';
|
||||
import API from '../../api';
|
||||
import useStreamProfilesStore from '../../store/streamProfiles';
|
||||
import { Modal, TextInput, Select, Button, Flex } from '@mantine/core';
|
||||
import useChannelsStore from '../../store/channels';
|
||||
|
||||
const schema = Yup.object({
|
||||
name: Yup.string().required('Name is required'),
|
||||
url: Yup.string().required('URL is required').min(0),
|
||||
});
|
||||
|
||||
const Stream = ({ stream = null, isOpen, onClose }) => {
|
||||
const streamProfiles = useStreamProfilesStore((state) => state.profiles);
|
||||
const channelGroups = useChannelsStore((s) => s.channelGroups);
|
||||
|
||||
const formik = useFormik({
|
||||
initialValues: {
|
||||
name: '',
|
||||
url: '',
|
||||
channel_group: null,
|
||||
stream_profile_id: '',
|
||||
},
|
||||
validationSchema: Yup.object({
|
||||
name: Yup.string().required('Name is required'),
|
||||
url: Yup.string().required('URL is required').min(0),
|
||||
// stream_profile_id: Yup.string().required('Stream profile is required'),
|
||||
const defaultValues = useMemo(
|
||||
() => ({
|
||||
name: stream?.name || '',
|
||||
url: stream?.url || '',
|
||||
channel_group: stream?.channel_group
|
||||
? String(stream.channel_group)
|
||||
: null,
|
||||
stream_profile_id: stream?.stream_profile_id
|
||||
? String(stream.stream_profile_id)
|
||||
: '',
|
||||
}),
|
||||
onSubmit: async (values, { setSubmitting, resetForm }) => {
|
||||
console.log(values);
|
||||
[stream]
|
||||
);
|
||||
|
||||
// Convert string IDs back to integers for the API
|
||||
const payload = {
|
||||
...values,
|
||||
channel_group: values.channel_group
|
||||
? parseInt(values.channel_group, 10)
|
||||
: null,
|
||||
stream_profile_id: values.stream_profile_id
|
||||
? parseInt(values.stream_profile_id, 10)
|
||||
: null,
|
||||
};
|
||||
|
||||
if (stream?.id) {
|
||||
await API.updateStream({ id: stream.id, ...payload });
|
||||
} else {
|
||||
await API.addStream(payload);
|
||||
}
|
||||
|
||||
resetForm();
|
||||
setSubmitting(false);
|
||||
onClose();
|
||||
},
|
||||
const {
|
||||
register,
|
||||
handleSubmit,
|
||||
formState: { errors, isSubmitting },
|
||||
reset,
|
||||
setValue,
|
||||
watch,
|
||||
} = useForm({
|
||||
defaultValues,
|
||||
resolver: yupResolver(schema),
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (stream) {
|
||||
formik.setValues({
|
||||
name: stream.name,
|
||||
url: stream.url,
|
||||
// Convert IDs to strings to match Select component values
|
||||
channel_group: stream.channel_group
|
||||
? String(stream.channel_group)
|
||||
: null,
|
||||
stream_profile_id: stream.stream_profile_id
|
||||
? String(stream.stream_profile_id)
|
||||
: '',
|
||||
});
|
||||
const onSubmit = async (values) => {
|
||||
console.log(values);
|
||||
|
||||
// Convert string IDs back to integers for the API
|
||||
const payload = {
|
||||
...values,
|
||||
channel_group: values.channel_group
|
||||
? parseInt(values.channel_group, 10)
|
||||
: null,
|
||||
stream_profile_id: values.stream_profile_id
|
||||
? parseInt(values.stream_profile_id, 10)
|
||||
: null,
|
||||
};
|
||||
|
||||
if (stream?.id) {
|
||||
await API.updateStream({ id: stream.id, ...payload });
|
||||
} else {
|
||||
formik.resetForm();
|
||||
await API.addStream(payload);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [stream]);
|
||||
|
||||
reset();
|
||||
onClose();
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
reset(defaultValues);
|
||||
}, [defaultValues, reset]);
|
||||
|
||||
if (!isOpen) {
|
||||
return <></>;
|
||||
}
|
||||
|
||||
const channelGroupValue = watch('channel_group');
|
||||
const streamProfileValue = watch('stream_profile_id');
|
||||
|
||||
return (
|
||||
<Modal opened={isOpen} onClose={onClose} title="Stream" zIndex={10}>
|
||||
<form onSubmit={formik.handleSubmit}>
|
||||
<form onSubmit={handleSubmit(onSubmit)}>
|
||||
<TextInput
|
||||
id="name"
|
||||
name="name"
|
||||
label="Stream Name"
|
||||
value={formik.values.name}
|
||||
onChange={formik.handleChange}
|
||||
error={formik.errors.name}
|
||||
{...register('name')}
|
||||
error={errors.name?.message}
|
||||
/>
|
||||
|
||||
<TextInput
|
||||
id="url"
|
||||
name="url"
|
||||
label="Stream URL"
|
||||
value={formik.values.url}
|
||||
onChange={formik.handleChange}
|
||||
error={formik.errors.url}
|
||||
{...register('url')}
|
||||
error={errors.url?.message}
|
||||
/>
|
||||
|
||||
<Select
|
||||
id="channel_group"
|
||||
name="channel_group"
|
||||
label="Group"
|
||||
searchable
|
||||
value={formik.values.channel_group}
|
||||
onChange={(value) => {
|
||||
formik.setFieldValue('channel_group', value); // Update Formik's state with the new value
|
||||
}}
|
||||
error={formik.errors.channel_group}
|
||||
value={channelGroupValue}
|
||||
onChange={(value) => setValue('channel_group', value)}
|
||||
error={errors.channel_group?.message}
|
||||
data={Object.values(channelGroups).map((group) => ({
|
||||
label: group.name,
|
||||
value: `${group.id}`,
|
||||
|
|
@ -110,16 +106,12 @@ const Stream = ({ stream = null, isOpen, onClose }) => {
|
|||
/>
|
||||
|
||||
<Select
|
||||
id="stream_profile_id"
|
||||
name="stream_profile_id"
|
||||
label="Stream Profile"
|
||||
placeholder="Optional"
|
||||
searchable
|
||||
value={formik.values.stream_profile_id}
|
||||
onChange={(value) => {
|
||||
formik.setFieldValue('stream_profile_id', value); // Update Formik's state with the new value
|
||||
}}
|
||||
error={formik.errors.stream_profile_id}
|
||||
value={streamProfileValue}
|
||||
onChange={(value) => setValue('stream_profile_id', value)}
|
||||
error={errors.stream_profile_id?.message}
|
||||
data={streamProfiles.map((profile) => ({
|
||||
label: profile.name,
|
||||
value: `${profile.id}`,
|
||||
|
|
@ -132,7 +124,7 @@ const Stream = ({ stream = null, isOpen, onClose }) => {
|
|||
type="submit"
|
||||
variant="contained"
|
||||
color="primary"
|
||||
disabled={formik.isSubmitting}
|
||||
disabled={isSubmitting}
|
||||
>
|
||||
Submit
|
||||
</Button>
|
||||
|
|
|
|||
|
|
@ -1,96 +1,91 @@
|
|||
// Modal.js
|
||||
import React, { useEffect } from 'react';
|
||||
import { useFormik } from 'formik';
|
||||
import React, { useEffect, useMemo } from 'react';
|
||||
import { useForm } from 'react-hook-form';
|
||||
import { yupResolver } from '@hookform/resolvers/yup';
|
||||
import * as Yup from 'yup';
|
||||
import API from '../../api';
|
||||
import useUserAgentsStore from '../../store/userAgents';
|
||||
import { Modal, TextInput, Select, Button, Flex } from '@mantine/core';
|
||||
|
||||
const schema = Yup.object({
|
||||
name: Yup.string().required('Name is required'),
|
||||
command: Yup.string().required('Command is required'),
|
||||
parameters: Yup.string().required('Parameters are is required'),
|
||||
});
|
||||
|
||||
const StreamProfile = ({ profile = null, isOpen, onClose }) => {
|
||||
const userAgents = useUserAgentsStore((state) => state.userAgents);
|
||||
|
||||
const formik = useFormik({
|
||||
initialValues: {
|
||||
name: '',
|
||||
command: '',
|
||||
parameters: '',
|
||||
is_active: true,
|
||||
user_agent: '',
|
||||
},
|
||||
validationSchema: Yup.object({
|
||||
name: Yup.string().required('Name is required'),
|
||||
command: Yup.string().required('Command is required'),
|
||||
parameters: Yup.string().required('Parameters are is required'),
|
||||
const defaultValues = useMemo(
|
||||
() => ({
|
||||
name: profile?.name || '',
|
||||
command: profile?.command || '',
|
||||
parameters: profile?.parameters || '',
|
||||
is_active: profile?.is_active ?? true,
|
||||
user_agent: profile?.user_agent || '',
|
||||
}),
|
||||
onSubmit: async (values, { setSubmitting, resetForm }) => {
|
||||
if (profile?.id) {
|
||||
await API.updateStreamProfile({ id: profile.id, ...values });
|
||||
} else {
|
||||
await API.addStreamProfile(values);
|
||||
}
|
||||
[profile]
|
||||
);
|
||||
|
||||
resetForm();
|
||||
setSubmitting(false);
|
||||
onClose();
|
||||
},
|
||||
const {
|
||||
register,
|
||||
handleSubmit,
|
||||
formState: { errors, isSubmitting },
|
||||
reset,
|
||||
watch,
|
||||
} = useForm({
|
||||
defaultValues,
|
||||
resolver: yupResolver(schema),
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (profile) {
|
||||
formik.setValues({
|
||||
name: profile.name,
|
||||
command: profile.command,
|
||||
parameters: profile.parameters,
|
||||
is_active: profile.is_active,
|
||||
user_agent: profile.user_agent,
|
||||
});
|
||||
const onSubmit = async (values) => {
|
||||
if (profile?.id) {
|
||||
await API.updateStreamProfile({ id: profile.id, ...values });
|
||||
} else {
|
||||
formik.resetForm();
|
||||
await API.addStreamProfile(values);
|
||||
}
|
||||
}, [profile]);
|
||||
|
||||
reset();
|
||||
onClose();
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
reset(defaultValues);
|
||||
}, [defaultValues, reset]);
|
||||
|
||||
if (!isOpen) {
|
||||
return <></>;
|
||||
}
|
||||
|
||||
const userAgentValue = watch('user_agent');
|
||||
|
||||
return (
|
||||
<Modal opened={isOpen} onClose={onClose} title="Stream Profile">
|
||||
<form onSubmit={formik.handleSubmit}>
|
||||
<form onSubmit={handleSubmit(onSubmit)}>
|
||||
<TextInput
|
||||
id="name"
|
||||
name="name"
|
||||
label="Name"
|
||||
value={formik.values.name}
|
||||
onChange={formik.handleChange}
|
||||
error={formik.errors.name}
|
||||
{...register('name')}
|
||||
error={errors.name?.message}
|
||||
disabled={profile ? profile.locked : false}
|
||||
/>
|
||||
<TextInput
|
||||
id="command"
|
||||
name="command"
|
||||
label="Command"
|
||||
value={formik.values.command}
|
||||
onChange={formik.handleChange}
|
||||
error={formik.errors.command}
|
||||
{...register('command')}
|
||||
error={errors.command?.message}
|
||||
disabled={profile ? profile.locked : false}
|
||||
/>
|
||||
<TextInput
|
||||
id="parameters"
|
||||
name="parameters"
|
||||
label="Parameters"
|
||||
value={formik.values.parameters}
|
||||
onChange={formik.handleChange}
|
||||
error={formik.errors.parameters}
|
||||
{...register('parameters')}
|
||||
error={errors.parameters?.message}
|
||||
disabled={profile ? profile.locked : false}
|
||||
/>
|
||||
|
||||
<Select
|
||||
id="user_agent"
|
||||
name="user_agent"
|
||||
label="User-Agent"
|
||||
value={formik.values.user_agent}
|
||||
onChange={formik.handleChange}
|
||||
error={formik.errors.user_agent}
|
||||
{...register('user_agent')}
|
||||
value={userAgentValue}
|
||||
error={errors.user_agent?.message}
|
||||
data={userAgents.map((ua) => ({
|
||||
label: ua.name,
|
||||
value: `${ua.id}`,
|
||||
|
|
@ -102,7 +97,7 @@ const StreamProfile = ({ profile = null, isOpen, onClose }) => {
|
|||
type="submit"
|
||||
variant="contained"
|
||||
color="primary"
|
||||
disabled={formik.isSubmitting}
|
||||
disabled={isSubmitting}
|
||||
size="small"
|
||||
>
|
||||
Submit
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
// Modal.js
|
||||
import React, { useEffect } from 'react';
|
||||
import { useFormik } from 'formik';
|
||||
import React, { useEffect, useMemo } from 'react';
|
||||
import { useForm } from 'react-hook-form';
|
||||
import { yupResolver } from '@hookform/resolvers/yup';
|
||||
import * as Yup from 'yup';
|
||||
import API from '../../api';
|
||||
import {
|
||||
|
|
@ -16,87 +17,82 @@ import {
|
|||
} from '@mantine/core';
|
||||
import { NETWORK_ACCESS_OPTIONS } from '../../constants';
|
||||
|
||||
const UserAgent = ({ userAgent = null, isOpen, onClose }) => {
|
||||
const formik = useFormik({
|
||||
initialValues: {
|
||||
name: '',
|
||||
user_agent: '',
|
||||
description: '',
|
||||
is_active: true,
|
||||
},
|
||||
validationSchema: Yup.object({
|
||||
name: Yup.string().required('Name is required'),
|
||||
user_agent: Yup.string().required('User-Agent is required'),
|
||||
}),
|
||||
onSubmit: async (values, { setSubmitting, resetForm }) => {
|
||||
if (userAgent?.id) {
|
||||
await API.updateUserAgent({ id: userAgent.id, ...values });
|
||||
} else {
|
||||
await API.addUserAgent(values);
|
||||
}
|
||||
const schema = Yup.object({
|
||||
name: Yup.string().required('Name is required'),
|
||||
user_agent: Yup.string().required('User-Agent is required'),
|
||||
});
|
||||
|
||||
resetForm();
|
||||
setSubmitting(false);
|
||||
onClose();
|
||||
},
|
||||
const UserAgent = ({ userAgent = null, isOpen, onClose }) => {
|
||||
const defaultValues = useMemo(
|
||||
() => ({
|
||||
name: userAgent?.name || '',
|
||||
user_agent: userAgent?.user_agent || '',
|
||||
description: userAgent?.description || '',
|
||||
is_active: userAgent?.is_active ?? true,
|
||||
}),
|
||||
[userAgent]
|
||||
);
|
||||
|
||||
const {
|
||||
register,
|
||||
handleSubmit,
|
||||
formState: { errors, isSubmitting },
|
||||
reset,
|
||||
setValue,
|
||||
watch,
|
||||
} = useForm({
|
||||
defaultValues,
|
||||
resolver: yupResolver(schema),
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (userAgent) {
|
||||
formik.setValues({
|
||||
name: userAgent.name,
|
||||
user_agent: userAgent.user_agent,
|
||||
description: userAgent.description,
|
||||
is_active: userAgent.is_active,
|
||||
});
|
||||
const onSubmit = async (values) => {
|
||||
if (userAgent?.id) {
|
||||
await API.updateUserAgent({ id: userAgent.id, ...values });
|
||||
} else {
|
||||
formik.resetForm();
|
||||
await API.addUserAgent(values);
|
||||
}
|
||||
}, [userAgent]);
|
||||
|
||||
reset();
|
||||
onClose();
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
reset(defaultValues);
|
||||
}, [defaultValues, reset]);
|
||||
|
||||
if (!isOpen) {
|
||||
return <></>;
|
||||
}
|
||||
|
||||
const isActive = watch('is_active');
|
||||
|
||||
return (
|
||||
<Modal opened={isOpen} onClose={onClose} title="User-Agent">
|
||||
<form onSubmit={formik.handleSubmit}>
|
||||
<form onSubmit={handleSubmit(onSubmit)}>
|
||||
<TextInput
|
||||
id="name"
|
||||
name="name"
|
||||
label="Name"
|
||||
value={formik.values.name}
|
||||
onChange={formik.handleChange}
|
||||
error={formik.touched.name && Boolean(formik.errors.name)}
|
||||
{...register('name')}
|
||||
error={errors.name?.message}
|
||||
/>
|
||||
|
||||
<TextInput
|
||||
id="user_agent"
|
||||
name="user_agent"
|
||||
label="User-Agent"
|
||||
value={formik.values.user_agent}
|
||||
onChange={formik.handleChange}
|
||||
error={formik.touched.user_agent && Boolean(formik.errors.user_agent)}
|
||||
{...register('user_agent')}
|
||||
error={errors.user_agent?.message}
|
||||
/>
|
||||
|
||||
<TextInput
|
||||
id="description"
|
||||
name="description"
|
||||
label="Description"
|
||||
value={formik.values.description}
|
||||
onChange={formik.handleChange}
|
||||
error={
|
||||
formik.touched.description && Boolean(formik.errors.description)
|
||||
}
|
||||
{...register('description')}
|
||||
error={errors.description?.message}
|
||||
/>
|
||||
|
||||
<Space h="md" />
|
||||
|
||||
<Checkbox
|
||||
name="is_active"
|
||||
label="Is Active"
|
||||
checked={formik.values.is_active}
|
||||
onChange={formik.handleChange}
|
||||
checked={isActive}
|
||||
onChange={(e) => setValue('is_active', e.currentTarget.checked)}
|
||||
/>
|
||||
|
||||
<Flex mih={50} gap="xs" justify="flex-end" align="flex-end">
|
||||
|
|
@ -104,7 +100,7 @@ const UserAgent = ({ userAgent = null, isOpen, onClose }) => {
|
|||
size="small"
|
||||
type="submit"
|
||||
variant="contained"
|
||||
disabled={formik.isSubmitting}
|
||||
disabled={isSubmitting}
|
||||
>
|
||||
Submit
|
||||
</Button>
|
||||
|
|
|
|||
|
|
@ -52,6 +52,7 @@ import {
|
|||
Select,
|
||||
NumberInput,
|
||||
Tooltip,
|
||||
Skeleton,
|
||||
} from '@mantine/core';
|
||||
import { getCoreRowModel, flexRender } from '@tanstack/react-table';
|
||||
import './table.css';
|
||||
|
|
@ -228,6 +229,7 @@ const ChannelsTable = ({ onReady }) => {
|
|||
// EPG data lookup
|
||||
const tvgsById = useEPGsStore((s) => s.tvgsById);
|
||||
const epgs = useEPGsStore((s) => s.epgs);
|
||||
const tvgsLoaded = useEPGsStore((s) => s.tvgsLoaded);
|
||||
const theme = useMantineTheme();
|
||||
const channelGroups = useChannelsStore((s) => s.channelGroups);
|
||||
const canEditChannelGroup = useChannelsStore((s) => s.canEditChannelGroup);
|
||||
|
|
@ -431,9 +433,9 @@ const ChannelsTable = ({ onReady }) => {
|
|||
});
|
||||
setAllRowIds(ids);
|
||||
|
||||
// Signal ready after first successful data fetch
|
||||
// EPG data is already loaded in initData before this component mounts
|
||||
if (!hasSignaledReady.current && onReady) {
|
||||
// Signal ready after first successful data fetch AND EPG data is loaded
|
||||
// This prevents the EPG column from showing "Not Assigned" while EPG data is still loading
|
||||
if (!hasSignaledReady.current && onReady && tvgsLoaded) {
|
||||
hasSignaledReady.current = true;
|
||||
onReady();
|
||||
}
|
||||
|
|
@ -445,6 +447,7 @@ const ChannelsTable = ({ onReady }) => {
|
|||
showDisabled,
|
||||
selectedProfileId,
|
||||
showOnlyStreamlessChannels,
|
||||
tvgsLoaded,
|
||||
]);
|
||||
|
||||
const stopPropagation = useCallback((e) => {
|
||||
|
|
@ -750,6 +753,19 @@ const ChannelsTable = ({ onReady }) => {
|
|||
setPaginationString(`${startItem} to ${endItem} of ${totalCount}`);
|
||||
}, [pagination.pageIndex, pagination.pageSize, totalCount]);
|
||||
|
||||
// Signal ready when EPG data finishes loading (if channels were already fetched)
|
||||
useEffect(() => {
|
||||
if (
|
||||
hasFetchedData.current &&
|
||||
!hasSignaledReady.current &&
|
||||
onReady &&
|
||||
tvgsLoaded
|
||||
) {
|
||||
hasSignaledReady.current = true;
|
||||
onReady();
|
||||
}
|
||||
}, [tvgsLoaded, onReady]);
|
||||
|
||||
const columns = useMemo(
|
||||
() => [
|
||||
{
|
||||
|
|
@ -834,6 +850,10 @@ const ChannelsTable = ({ onReady }) => {
|
|||
const tooltip = epgObj
|
||||
? `${epgName ? `EPG Name: ${epgName}\n` : ''}${tvgName ? `TVG Name: ${tvgName}\n` : ''}${tvgId ? `TVG-ID: ${tvgId}` : ''}`.trim()
|
||||
: '';
|
||||
|
||||
// If channel has an EPG assignment but tvgsById hasn't loaded yet, show loading
|
||||
const isEpgDataPending = epgDataId && !epgObj && !tvgsLoaded;
|
||||
|
||||
return (
|
||||
<Box
|
||||
style={{
|
||||
|
|
@ -856,6 +876,12 @@ const ChannelsTable = ({ onReady }) => {
|
|||
</Tooltip>
|
||||
) : epgObj ? (
|
||||
<span>{epgObj.name}</span>
|
||||
) : isEpgDataPending ? (
|
||||
<Skeleton
|
||||
height={14}
|
||||
width={(columnSizing.epg || 200) * 0.7}
|
||||
style={{ borderRadius: 4 }}
|
||||
/>
|
||||
) : (
|
||||
<span style={{ color: '#888' }}>Not Assigned</span>
|
||||
)}
|
||||
|
|
@ -935,7 +961,7 @@ const ChannelsTable = ({ onReady }) => {
|
|||
// Note: logos is intentionally excluded - LazyLogo components handle their own logo data
|
||||
// from the store, so we don't need to recreate columns when logos load.
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[selectedProfileId, channelGroups, theme]
|
||||
[selectedProfileId, channelGroups, theme, tvgsById, epgs, tvgsLoaded]
|
||||
);
|
||||
|
||||
const renderHeaderCell = (header) => {
|
||||
|
|
@ -1380,12 +1406,13 @@ const ChannelsTable = ({ onReady }) => {
|
|||
|
||||
{/* Table or ghost empty state inside Paper */}
|
||||
<Box>
|
||||
{channelsTableLength === 0 && (
|
||||
<ChannelsTableOnboarding editChannel={editChannel} />
|
||||
)}
|
||||
{channelsTableLength === 0 &&
|
||||
Object.keys(channels).length === 0 && (
|
||||
<ChannelsTableOnboarding editChannel={editChannel} />
|
||||
)}
|
||||
</Box>
|
||||
|
||||
{channelsTableLength > 0 && (
|
||||
{(channelsTableLength > 0 || Object.keys(channels).length > 0) && (
|
||||
<Box
|
||||
style={{
|
||||
display: 'flex',
|
||||
|
|
|
|||
|
|
@ -303,6 +303,7 @@ export const REGION_CHOICES = [
|
|||
{ value: 'tz', label: 'TZ' },
|
||||
{ value: 'ua', label: 'UA' },
|
||||
{ value: 'ug', label: 'UG' },
|
||||
{ value: 'uk', label: 'UK' },
|
||||
{ value: 'um', label: 'UM' },
|
||||
{ value: 'us', label: 'US' },
|
||||
{ value: 'uy', label: 'UY' },
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ const useEPGsStore = create((set) => ({
|
|||
epgs: {},
|
||||
tvgs: [],
|
||||
tvgsById: {},
|
||||
tvgsLoaded: false,
|
||||
isLoading: false,
|
||||
error: null,
|
||||
refreshProgress: {},
|
||||
|
|
@ -36,11 +37,16 @@ const useEPGsStore = create((set) => ({
|
|||
acc[tvg.id] = tvg;
|
||||
return acc;
|
||||
}, {}),
|
||||
tvgsLoaded: true,
|
||||
isLoading: false,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch tvgs:', error);
|
||||
set({ error: 'Failed to load tvgs.', isLoading: false });
|
||||
set({
|
||||
error: 'Failed to load tvgs.',
|
||||
tvgsLoaded: true,
|
||||
isLoading: false,
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
"""
|
||||
Dispatcharr version information.
|
||||
"""
|
||||
__version__ = '0.15.1' # Follow semantic versioning (MAJOR.MINOR.PATCH)
|
||||
__version__ = '0.16.0' # Follow semantic versioning (MAJOR.MINOR.PATCH)
|
||||
__timestamp__ = None # Set during CI/CD build process
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue