From 8cdf9a40cfbfdade712fc8fcd3d6c937991be4d0 Mon Sep 17 00:00:00 2001 From: dekzter Date: Wed, 19 Mar 2025 16:35:49 -0400 Subject: [PATCH 01/24] m3u modifications so streams are identified by hash, configurable, also streams now have channel_groups instead of a string for groups --- apps/channels/admin.py | 9 +- apps/channels/api_views.py | 20 ++-- apps/channels/forms.py | 2 +- ...channel_group_stream_last_seen_and_more.py | 44 +++++++++ .../migrations/0006_migrate_stream_groups.py | 51 ++++++++++ .../0007_remove_stream_group_name.py | 17 ++++ .../migrations/0008_stream_stream_hash.py | 18 ++++ apps/channels/models.py | 96 ++++++++++++++++--- apps/channels/serializers.py | 14 ++- apps/channels/views.py | 2 +- .../0004_m3uaccount_stream_profile.py | 20 ++++ apps/m3u/models.py | 18 ++++ apps/m3u/serializers.py | 5 +- apps/m3u/tasks.py | 23 +++-- apps/proxy/ts_proxy/views.py | 2 +- core/migrations/0009_m3u_hash_settings.py | 22 +++++ core/models.py | 5 + dispatcharr/settings.py | 13 +++ 18 files changed, 347 insertions(+), 34 deletions(-) create mode 100644 apps/channels/migrations/0005_stream_channel_group_stream_last_seen_and_more.py create mode 100644 apps/channels/migrations/0006_migrate_stream_groups.py create mode 100644 apps/channels/migrations/0007_remove_stream_group_name.py create mode 100644 apps/channels/migrations/0008_stream_stream_hash.py create mode 100644 apps/m3u/migrations/0004_m3uaccount_stream_profile.py create mode 100644 core/migrations/0009_m3u_hash_settings.py diff --git a/apps/channels/admin.py b/apps/channels/admin.py index dd7e09a1..302811af 100644 --- a/apps/channels/admin.py +++ b/apps/channels/admin.py @@ -6,13 +6,16 @@ class StreamAdmin(admin.ModelAdmin): list_display = ( 'id', # Primary Key 'name', - 'group_name', + 'channel_group', 'url', 'current_viewers', 'updated_at', ) - list_filter = ('group_name',) - search_fields = ('id', 'name', 'url', 'group_name') # Added 'id' for searching by ID + + list_filter = ('channel_group',) # Filter by 'channel_group' (foreign key) + + search_fields = ('id', 'name', 'url', 'channel_group__name') # Search by 'ChannelGroup' name + ordering = ('-updated_at',) @admin.register(Channel) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 9744f7f0..f6189d3e 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -23,14 +23,14 @@ class StreamPagination(PageNumberPagination): class StreamFilter(django_filters.FilterSet): name = django_filters.CharFilter(lookup_expr='icontains') - group_name = django_filters.CharFilter(lookup_expr='icontains') + channel_group_name = django_filters.CharFilter(field_name="channel_group__name", lookup_expr="icontains") m3u_account = django_filters.NumberFilter(field_name="m3u_account__id") m3u_account_name = django_filters.CharFilter(field_name="m3u_account__name", lookup_expr="icontains") m3u_account_is_active = django_filters.BooleanFilter(field_name="m3u_account__is_active") class Meta: model = Stream - fields = ['name', 'group_name', 'm3u_account', 'm3u_account_name', 'm3u_account_is_active'] + fields = ['name', 'channel_group_name', 'm3u_account', 'm3u_account_name', 'm3u_account_is_active'] # ───────────────────────────────────────────────────────── # 1) Stream API (CRUD) @@ -43,20 +43,27 @@ class StreamViewSet(viewsets.ModelViewSet): filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter] filterset_class = StreamFilter - search_fields = ['name', 'group_name'] - ordering_fields = ['name', 'group_name'] + search_fields = ['name', 'channel_group__name'] + ordering_fields = ['name', 'channel_group__name'] ordering = ['-name'] def get_queryset(self): qs = super().get_queryset() # Exclude streams from inactive M3U accounts qs = qs.exclude(m3u_account__is_active=False) + assigned = self.request.query_params.get('assigned') if assigned is not None: qs = qs.filter(channels__id=assigned) + unassigned = self.request.query_params.get('unassigned') if unassigned == '1': qs = qs.filter(channels__isnull=True) + + channel_group = self.request.query_params.get('channel_group') + if channel_group: + qs = qs.filter(channel_group__name=channel_group) + return qs @action(detail=False, methods=['get'], url_path='ids') @@ -75,7 +82,8 @@ class StreamViewSet(viewsets.ModelViewSet): @action(detail=False, methods=['get'], url_path='groups') def get_groups(self, request, *args, **kwargs): - group_names = Stream.objects.exclude(group_name__isnull=True).exclude(group_name="").order_by().values_list('group_name', flat=True).distinct() + # Get unique ChannelGroup names that are linked to streams + group_names = ChannelGroup.objects.filter(streams__isnull=False).order_by('name').values_list('name', flat=True).distinct() # Return the response with the list of unique group names return Response(list(group_names)) @@ -158,7 +166,7 @@ class ChannelViewSet(viewsets.ModelViewSet): if not stream_id: return Response({"error": "Missing stream_id"}, status=status.HTTP_400_BAD_REQUEST) stream = get_object_or_404(Stream, pk=stream_id) - channel_group, _ = ChannelGroup.objects.get_or_create(name=stream.group_name) + channel_group = stream.channel_group # Check if client provided a channel_number; if not, auto-assign one. provided_number = request.data.get('channel_number') diff --git a/apps/channels/forms.py b/apps/channels/forms.py index baf169af..bee073b6 100644 --- a/apps/channels/forms.py +++ b/apps/channels/forms.py @@ -42,5 +42,5 @@ class StreamForm(forms.ModelForm): 'logo_url', 'tvg_id', 'local_file', - 'group_name', + 'channel_group', ] diff --git a/apps/channels/migrations/0005_stream_channel_group_stream_last_seen_and_more.py b/apps/channels/migrations/0005_stream_channel_group_stream_last_seen_and_more.py new file mode 100644 index 00000000..61a95220 --- /dev/null +++ b/apps/channels/migrations/0005_stream_channel_group_stream_last_seen_and_more.py @@ -0,0 +1,44 @@ +# Generated by Django 5.1.6 on 2025-03-19 16:33 + +import datetime +import django.db.models.deletion +import uuid +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dispatcharr_channels', '0004_stream_is_custom'), + ('m3u', '0003_create_custom_account'), + ] + + operations = [ + migrations.AddField( + model_name='stream', + name='channel_group', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='streams', to='dispatcharr_channels.channelgroup'), + ), + migrations.AddField( + model_name='stream', + name='last_seen', + field=models.DateTimeField(db_index=True, default=datetime.datetime.now), + ), + migrations.AlterField( + model_name='channel', + name='uuid', + field=models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, unique=True), + ), + migrations.CreateModel( + name='ChannelGroupM3UAccount', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('enabled', models.BooleanField(default=True)), + ('channel_group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='m3u_account', to='dispatcharr_channels.channelgroup')), + ('m3u_account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='channel_group', to='m3u.m3uaccount')), + ], + options={ + 'unique_together': {('channel_group', 'm3u_account')}, + }, + ), + ] diff --git a/apps/channels/migrations/0006_migrate_stream_groups.py b/apps/channels/migrations/0006_migrate_stream_groups.py new file mode 100644 index 00000000..94fc8235 --- /dev/null +++ b/apps/channels/migrations/0006_migrate_stream_groups.py @@ -0,0 +1,51 @@ +# In your app's migrations folder, create a new migration file +# e.g., migrations/000X_migrate_channel_group_to_foreign_key.py + +from django.db import migrations + +def migrate_channel_group(apps, schema_editor): + Stream = apps.get_model('dispatcharr_channels', 'Stream') + ChannelGroup = apps.get_model('dispatcharr_channels', 'ChannelGroup') + ChannelGroupM3UAccount = apps.get_model('dispatcharr_channels', 'ChannelGroup') + M3UAccount = apps.get_model('m3u', 'M3UAccount') + + streams_to_update = [] + for stream in Stream.objects.all(): + # If the stream has a 'channel_group' string, try to find or create the ChannelGroup + if stream.group_name: # group_name holds the channel group string + channel_group_name = stream.group_name.strip() + + # Try to find the ChannelGroup by name + channel_group, created = ChannelGroup.objects.get_or_create(name=channel_group_name) + + # Set the foreign key to the found or newly created ChannelGroup + stream.channel_group = channel_group + + streams_to_update.append(stream) + + # If the stream has an M3U account, ensure the M3U account is linked + if stream.m3u_account: + ChannelGroupM3UAccount.objects.get_or_create( + channel_group=channel_group, + m3u_account=stream.m3u_account, + enabled=True # Or set it to whatever the default logic is + ) + + Stream.objects.bulk_update(streams_to_update, ['channel_group']) + +def reverse_migration(apps, schema_editor): + # This reverse migration would undo the changes, setting `channel_group` to `None` and clearing any relationships. + Stream = apps.get_model('yourapp', 'Stream') + for stream in Stream.objects.all(): + stream.channel_group = None + stream.save() + +class Migration(migrations.Migration): + + dependencies = [ + ('dispatcharr_channels', '0005_stream_channel_group_stream_last_seen_and_more'), + ] + + operations = [ + migrations.RunPython(migrate_channel_group, reverse_code=reverse_migration), + ] diff --git a/apps/channels/migrations/0007_remove_stream_group_name.py b/apps/channels/migrations/0007_remove_stream_group_name.py new file mode 100644 index 00000000..3d7b567a --- /dev/null +++ b/apps/channels/migrations/0007_remove_stream_group_name.py @@ -0,0 +1,17 @@ +# Generated by Django 5.1.6 on 2025-03-19 16:43 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('dispatcharr_channels', '0006_migrate_stream_groups'), + ] + + operations = [ + migrations.RemoveField( + model_name='stream', + name='group_name', + ), + ] diff --git a/apps/channels/migrations/0008_stream_stream_hash.py b/apps/channels/migrations/0008_stream_stream_hash.py new file mode 100644 index 00000000..d8109a2b --- /dev/null +++ b/apps/channels/migrations/0008_stream_stream_hash.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.6 on 2025-03-19 18:21 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dispatcharr_channels', '0007_remove_stream_group_name'), + ] + + operations = [ + migrations.AddField( + model_name='stream', + name='stream_hash', + field=models.CharField(help_text='Unique hash for this stream from the M3U account', max_length=255, null=True, unique=True), + ), + ] diff --git a/apps/channels/models.py b/apps/channels/models.py index c501d927..8706e635 100644 --- a/apps/channels/models.py +++ b/apps/channels/models.py @@ -6,12 +6,25 @@ from core.models import StreamProfile, CoreSettings from core.utils import redis_client import logging import uuid +from datetime import datetime +import hashlib +import json logger = logging.getLogger(__name__) # If you have an M3UAccount model in apps.m3u, you can still import it: from apps.m3u.models import M3UAccount +class ChannelGroup(models.Model): + name = models.CharField(max_length=100, unique=True) + + def related_channels(self): + # local import if needed to avoid cyc. Usually fine in a single file though + return Channel.objects.filter(channel_group=self) + + def __str__(self): + return self.name + class Stream(models.Model): """ Represents a single stream (e.g. from an M3U source or custom URL). @@ -30,7 +43,13 @@ class Stream(models.Model): local_file = models.FileField(upload_to='uploads/', blank=True, null=True) current_viewers = models.PositiveIntegerField(default=0) updated_at = models.DateTimeField(auto_now=True) - group_name = models.CharField(max_length=255, blank=True, null=True) + channel_group = models.ForeignKey( + ChannelGroup, + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name='streams' + ) stream_profile = models.ForeignKey( StreamProfile, null=True, @@ -42,6 +61,13 @@ class Stream(models.Model): default=False, help_text="Whether this is a user-created stream or from an M3U account" ) + stream_hash = models.CharField( + max_length=255, + null=True, + unique=True, + help_text="Unique hash for this stream from the M3U account" + ) + last_seen = models.DateTimeField(db_index=True, default=datetime.now) class Meta: # If you use m3u_account, you might do unique_together = ('name','url','m3u_account') @@ -52,6 +78,43 @@ class Stream(models.Model): def __str__(self): return self.name or self.url or f"Stream ID {self.id}" + @classmethod + def generate_hash_key(cls, stream): + # Check if the passed object is an instance or a dictionary + if isinstance(stream, dict): + # Handle dictionary case (e.g., when the input is a dict of stream data) + hash_parts = {key: stream[key] for key in CoreSettings.get_m3u_hash_key().split(",") if key in stream} + if 'm3u_account_id' in stream: + hash_parts['m3u_account_id'] = stream['m3u_account_id'] + elif isinstance(stream, Stream): + # Handle the case where the input is a Stream instance + key_parts = CoreSettings.get_m3u_hash_key().split(",") + hash_parts = {key: getattr(stream, key) for key in key_parts if hasattr(stream, key)} + if stream.m3u_account: + hash_parts['m3u_account_id'] = stream.m3u_account.id + else: + raise ValueError("stream must be either a dictionary or a Stream instance") + + # Serialize and hash the dictionary + serialized_obj = json.dumps(hash_parts, sort_keys=True) # sort_keys ensures consistent ordering + hash_object = hashlib.sha256(serialized_obj.encode()) + return hash_object.hexdigest() + + @classmethod + def update_or_create_by_hash(cls, hash_value, **fields_to_update): + try: + # Try to find the Stream object with the given hash + stream = cls.objects.get(stream_hash=hash_value) + # If it exists, update the fields + for field, value in fields_to_update.items(): + setattr(stream, field, value) + stream.save() # Save the updated object + return stream, False # False means it was updated, not created + except cls.DoesNotExist: + # If it doesn't exist, create a new object with the given hash + fields_to_update['stream_hash'] = hash_value # Make sure the hash field is set + stream = cls.objects.create(**fields_to_update) + return stream, True # True means it was created class ChannelManager(models.Manager): def active(self): @@ -95,7 +158,7 @@ class Channel(models.Model): related_name='channels' ) - uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True) + uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True, db_index=True) def clean(self): # Enforce unique channel_number within a given group @@ -198,16 +261,6 @@ class Channel(models.Model): if current_count > 0: redis_client.decr(profile_connections_key) -class ChannelGroup(models.Model): - name = models.CharField(max_length=100, unique=True) - - def related_channels(self): - # local import if needed to avoid cyc. Usually fine in a single file though - return Channel.objects.filter(channel_group=self) - - def __str__(self): - return self.name - class ChannelStream(models.Model): channel = models.ForeignKey(Channel, on_delete=models.CASCADE) stream = models.ForeignKey(Stream, on_delete=models.CASCADE) @@ -215,3 +268,22 @@ class ChannelStream(models.Model): class Meta: ordering = ['order'] # Ensure streams are retrieved in order + +class ChannelGroupM3UAccount(models.Model): + channel_group = models.ForeignKey( + ChannelGroup, + on_delete=models.CASCADE, + related_name='m3u_account' + ) + m3u_account = models.ForeignKey( + M3UAccount, + on_delete=models.CASCADE, + related_name='channel_group' + ) + enabled = models.BooleanField(default=True) + + class Meta: + unique_together = ('channel_group', 'm3u_account') + + def __str__(self): + return f"{self.channel_group.name} - {self.m3u_account.name} (Enabled: {self.enabled})" diff --git a/apps/channels/serializers.py b/apps/channels/serializers.py index b1f7e022..dd92c47a 100644 --- a/apps/channels/serializers.py +++ b/apps/channels/serializers.py @@ -1,5 +1,5 @@ from rest_framework import serializers -from .models import Stream, Channel, ChannelGroup, ChannelStream +from .models import Stream, Channel, ChannelGroup, ChannelStream, ChannelGroupM3UAccount from core.models import StreamProfile # @@ -26,9 +26,9 @@ class StreamSerializer(serializers.ModelSerializer): 'local_file', 'current_viewers', 'updated_at', - 'group_name', 'stream_profile_id', 'is_custom', + 'channel_group', ] def get_fields(self): @@ -41,7 +41,7 @@ class StreamSerializer(serializers.ModelSerializer): fields['url'].read_only = True fields['m3u_account'].read_only = True fields['tvg_id'].read_only = True - fields['group_name'].read_only = True + fields['channel_group'].read_only = True return fields @@ -146,3 +146,11 @@ class ChannelSerializer(serializers.ModelSerializer): ChannelStream.objects.create(channel=instance, stream_id=stream.id, order=index) return instance + +class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer): + class Meta: + model = ChannelGroupM3UAccount + fields = ['channel_group', 'enabled'] + + # Optionally, if you only need the id of the ChannelGroup, you can customize it like this: + channel_group = serializers.PrimaryKeyRelatedField(queryset=ChannelGroup.objects.all()) diff --git a/apps/channels/views.py b/apps/channels/views.py index b28cc123..8af3fa30 100644 --- a/apps/channels/views.py +++ b/apps/channels/views.py @@ -16,7 +16,7 @@ class StreamDashboardView(View): def get(self, request, *args, **kwargs): streams = Stream.objects.values( 'id', 'name', 'url', - 'group_name', 'current_viewers' + 'channel_group', 'current_viewers' ) return JsonResponse({'data': list(streams)}, safe=False) diff --git a/apps/m3u/migrations/0004_m3uaccount_stream_profile.py b/apps/m3u/migrations/0004_m3uaccount_stream_profile.py new file mode 100644 index 00000000..65f69fcd --- /dev/null +++ b/apps/m3u/migrations/0004_m3uaccount_stream_profile.py @@ -0,0 +1,20 @@ +# Generated by Django 5.1.6 on 2025-03-19 16:33 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('core', '0009_m3u_hash_settings'), + ('m3u', '0003_create_custom_account'), + ] + + operations = [ + migrations.AddField( + model_name='m3uaccount', + name='stream_profile', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='m3u_accounts', to='core.streamprofile'), + ), + ] diff --git a/apps/m3u/models.py b/apps/m3u/models.py index 2b3a3020..773261df 100644 --- a/apps/m3u/models.py +++ b/apps/m3u/models.py @@ -3,6 +3,7 @@ from django.core.exceptions import ValidationError from core.models import UserAgent import re from django.dispatch import receiver +from apps.channels.models import StreamProfile CUSTOM_M3U_ACCOUNT_NAME="custom" @@ -59,6 +60,13 @@ class M3UAccount(models.Model): default=False, help_text="Protected - can't be deleted or modified" ) + stream_profile = models.ForeignKey( + StreamProfile, + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name='m3u_accounts' + ) def __str__(self): return self.name @@ -86,6 +94,16 @@ class M3UAccount(models.Model): def get_custom_account(cls): return cls.objects.get(name=CUSTOM_M3U_ACCOUNT_NAME, locked=True) + # def get_channel_groups(self): + # return ChannelGroup.objects.filter(m3u_account__m3u_account=self) + + # def is_channel_group_enabled(self, channel_group): + # """Check if the specified ChannelGroup is enabled for this M3UAccount.""" + # return self.channel_group.filter(channel_group=channel_group, enabled=True).exists() + + # def get_enabled_streams(self): + # """Return all streams linked to this account with enabled ChannelGroups.""" + # return self.streams.filter(channel_group__in=ChannelGroup.objects.filter(m3u_account__enabled=True)) class M3UFilter(models.Model): """Defines filters for M3U accounts based on stream name or group title.""" diff --git a/apps/m3u/serializers.py b/apps/m3u/serializers.py index 391794a1..dbd635ef 100644 --- a/apps/m3u/serializers.py +++ b/apps/m3u/serializers.py @@ -1,13 +1,16 @@ from rest_framework import serializers from .models import M3UAccount, M3UFilter, ServerGroup, M3UAccountProfile from core.models import UserAgent +from apps.channels.models import ChannelGroup +from apps.channels.serializers import ChannelGroupM3UAccountSerializer class M3UFilterSerializer(serializers.ModelSerializer): """Serializer for M3U Filters""" + channel_groups = ChannelGroupM3UAccountSerializer(source='m3u_account', many=True) class Meta: model = M3UFilter - fields = ['id', 'filter_type', 'regex_pattern', 'exclude'] + fields = ['id', 'filter_type', 'regex_pattern', 'exclude', 'channel_groups'] from rest_framework import serializers from .models import M3UAccountProfile diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index b3de8567..5bec769b 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -8,9 +8,10 @@ from celery import shared_task, current_app from django.conf import settings from django.core.cache import cache from .models import M3UAccount -from apps.channels.models import Stream +from apps.channels.models import Stream, ChannelGroup, ChannelGroupM3UAccount from asgiref.sync import async_to_sync from channels.layers import get_channel_layer +from django.utils import timezone logger = logging.getLogger(__name__) @@ -184,13 +185,23 @@ def refresh_single_m3u_account(account_id): "tvg_id": current_info["tvg_id"] } try: - obj, created = Stream.objects.update_or_create( - name=current_info["name"], - url=line, + channel_group, created = ChannelGroup.objects.get_or_create(name=current_info["group_title"]) + ChannelGroupM3UAccount.objects.get_or_create( + channel_group=channel_group, m3u_account=account, - group_name=current_info["group_title"], - defaults=defaults ) + + stream_props = defaults | { + "name": current_info["name"], + "url": line, + "m3u_account": account, + "channel_group": channel_group, + "last_seen": timezone.now(), + } + + stream_hash = Stream.generate_hash_key(stream_props) + obj, created = Stream.update_or_create_by_hash(stream_hash, **stream_props) + if created: created_count += 1 else: diff --git a/apps/proxy/ts_proxy/views.py b/apps/proxy/ts_proxy/views.py index f7424f38..913d0391 100644 --- a/apps/proxy/ts_proxy/views.py +++ b/apps/proxy/ts_proxy/views.py @@ -101,7 +101,7 @@ def stream_ts(request, channel_id): success = proxy_server.initialize_channel(stream_url, channel_id, stream_user_agent, transcode) if proxy_server.redis_client: metadata_key = f"ts_proxy:channel:{channel_id}:metadata" - profile_value = str(stream_profile) + profile_value = stream_profile.id proxy_server.redis_client.hset(metadata_key, "profile", profile_value) if not success: return JsonResponse({'error': 'Failed to initialize channel'}, status=500) diff --git a/core/migrations/0009_m3u_hash_settings.py b/core/migrations/0009_m3u_hash_settings.py new file mode 100644 index 00000000..eab5f141 --- /dev/null +++ b/core/migrations/0009_m3u_hash_settings.py @@ -0,0 +1,22 @@ +# Generated by Django 5.1.6 on 2025-03-01 14:01 + +from django.db import migrations +from django.utils.text import slugify + +def preload_core_settings(apps, schema_editor): + CoreSettings = apps.get_model("core", "CoreSettings") + CoreSettings.objects.create( + key=slugify("M3U Hash Key"), + name="M3U Hash Key", + value="name,url,tvg_id", + ) + +class Migration(migrations.Migration): + + dependencies = [ + ('core', '0008_rename_profile_name_streamprofile_name_and_more'), + ] + + operations = [ + migrations.RunPython(preload_core_settings), + ] diff --git a/core/models.py b/core/models.py index 081537e6..9878d357 100644 --- a/core/models.py +++ b/core/models.py @@ -142,6 +142,7 @@ class StreamProfile(models.Model): DEFAULT_USER_AGENT_KEY= slugify("Default User-Agent") DEFAULT_STREAM_PROFILE_KEY = slugify("Default Stream Profile") +STREAM_HASH_KEY = slugify("M3U Hash Key") class CoreSettings(models.Model): key = models.CharField( @@ -166,3 +167,7 @@ class CoreSettings(models.Model): @classmethod def get_default_stream_profile_id(cls): return cls.objects.get(key=DEFAULT_STREAM_PROFILE_KEY).value + + @classmethod + def get_m3u_hash_key(cls): + return cls.objects.get(key=STREAM_HASH_KEY).value diff --git a/dispatcharr/settings.py b/dispatcharr/settings.py index 5cd21169..03ad0efa 100644 --- a/dispatcharr/settings.py +++ b/dispatcharr/settings.py @@ -151,6 +151,19 @@ AUTH_USER_MODEL = 'accounts.User' CELERY_BROKER_URL = os.environ.get('CELERY_BROKER_URL', 'redis://localhost:6379/0') CELERY_RESULT_BACKEND = CELERY_BROKER_URL +# Configure Redis key prefix +CELERY_RESULT_BACKEND_TRANSPORT_OPTIONS = { + 'prefix': 'celery-task:', # Set the Redis key prefix for Celery +} + +# Set TTL (Time-to-Live) for task results (in seconds) +CELERY_RESULT_EXPIRES = 3600 # 1 hour TTL for task results + +# Optionally, set visibility timeout for task retries (if using Redis) +CELERY_BROKER_TRANSPORT_OPTIONS = { + 'visibility_timeout': 3600, # Time in seconds that a task remains invisible during retries +} + CELERY_BEAT_SCHEDULE = { 'fetch-channel-statuses': { 'task': 'apps.proxy.tasks.fetch_channel_stats', From 19aa76e55d64ef59ef3728952356cb082dcc2665 Mon Sep 17 00:00:00 2001 From: dekzter Date: Wed, 19 Mar 2025 17:05:22 -0400 Subject: [PATCH 02/24] Frontend updates with new stream group changes --- frontend/src/components/forms/Stream.jsx | 31 ++-- .../src/components/tables/StreamsTable.jsx | 9 +- frontend/src/pages/Stats.jsx | 158 ++++++++++-------- 3 files changed, 115 insertions(+), 83 deletions(-) diff --git a/frontend/src/components/forms/Stream.jsx b/frontend/src/components/forms/Stream.jsx index 6c058345..05720b34 100644 --- a/frontend/src/components/forms/Stream.jsx +++ b/frontend/src/components/forms/Stream.jsx @@ -5,16 +5,18 @@ import * as Yup from 'yup'; import API from '../../api'; import useStreamProfilesStore from '../../store/streamProfiles'; import { Modal, TextInput, Select, Button, Flex } from '@mantine/core'; +import useChannelsStore from '../../store/channels'; const Stream = ({ stream = null, isOpen, onClose }) => { const streamProfiles = useStreamProfilesStore((state) => state.profiles); + const { channelGroups } = useChannelsStore(); const [selectedStreamProfile, setSelectedStreamProfile] = useState(''); const formik = useFormik({ initialValues: { name: '', url: '', - group_name: '', + channel_group: null, stream_profile_id: '', }, validationSchema: Yup.object({ @@ -23,6 +25,7 @@ const Stream = ({ stream = null, isOpen, onClose }) => { // stream_profile_id: Yup.string().required('Stream profile is required'), }), onSubmit: async (values, { setSubmitting, resetForm }) => { + console.log(values); if (stream?.id) { await API.updateStream({ id: stream.id, ...values }); } else { @@ -40,7 +43,7 @@ const Stream = ({ stream = null, isOpen, onClose }) => { formik.setValues({ name: stream.name, url: stream.url, - group_name: stream.group_name, + channel_group: stream.channel_group, stream_profile_id: stream.stream_profile_id, }); } else { @@ -73,13 +76,19 @@ const Stream = ({ stream = null, isOpen, onClose }) => { error={formik.errors.url} /> - { + formik.setFieldValue('channel_group', value); // Update Formik's state with the new value + }} + error={formik.errors.channel_group} + data={channelGroups.map((group) => ({ + label: group.name, + value: `${group.id}`, + }))} /> ({ channels: [], channelsByUUID: {}, - channelGroups: [], + channelGroups: {}, channelsPageSelection: [], stats: {}, activeChannels: {}, @@ -38,7 +38,13 @@ const useChannelsStore = create((set, get) => ({ set({ isLoading: true, error: null }); try { const channelGroups = await api.getChannelGroups(); - set({ channelGroups: channelGroups, isLoading: false }); + set({ + channelGroups: channelGroups.reduce((acc, group) => { + acc[group.id] = group; + return acc; + }, {}), + isLoading: false, + }); } catch (error) { console.error('Failed to fetch channel groups:', error); set({ error: 'Failed to load channel groups.', isLoading: false }); @@ -108,14 +114,16 @@ const useChannelsStore = create((set, get) => ({ addChannelGroup: (newChannelGroup) => set((state) => ({ - channelGroups: [...state.channelGroups, newChannelGroup], + channelGroups: { + ...state.channelGroups, + [newChannelGroup.id]: newChannelGroup, + }, })), updateChannelGroup: (channelGroup) => set((state) => ({ - channelGroups: state.channelGroups.map((group) => - group.id === channelGroup.id ? channelGroup : group - ), + ...state.channelGroups, + [channelGroup.id]: channelGroup, })), setChannelsPageSelection: (channelsPageSelection) => From 071efaf0173a7de3445bdf6418b345d3f042f351 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 22 Mar 2025 11:36:28 -0500 Subject: [PATCH 18/24] Lowered max retries and timeout for redis client. --- core/utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/core/utils.py b/core/utils.py index e61b0c14..4e472ef2 100644 --- a/core/utils.py +++ b/core/utils.py @@ -57,7 +57,7 @@ def get_redis_client(max_retries=5, retry_interval=1): logger.error(f"Unexpected error connecting to Redis: {e}") return None -def get_redis_pubsub_client(max_retries=5, retry_interval=3): +def get_redis_pubsub_client(max_retries=5, retry_interval=1): """Get Redis client optimized for PubSub operations""" retry_count = 0 while retry_count < max_retries: @@ -133,8 +133,8 @@ def execute_redis_command(redis_client, command_func, default_return=None): return default_return # Initialize the global clients with retry logic -redis_client = get_redis_client(max_retries=10, retry_interval=1) -redis_pubsub_client = get_redis_pubsub_client(max_retries=10, retry_interval=1) +redis_client = get_redis_client() +redis_pubsub_client = get_redis_pubsub_client() # Import and initialize the PubSub manager from .redis_pubsub import get_pubsub_manager From 3c3961bb3aa462fbc144674ba309ceb20db0a3e1 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 22 Mar 2025 12:23:54 -0500 Subject: [PATCH 19/24] Bypass redis for management commands. --- apps/channels/models.py | 13 ++++++++++- core/command_utils.py | 34 +++++++++++++++++++++++++++ core/redis_pubsub.py | 52 +++++++++++++++++++++++++++++++++++++---- core/utils.py | 32 +++++++++++++++++++++---- docker/Dockerfile | 3 ++- 5 files changed, 124 insertions(+), 10 deletions(-) create mode 100644 core/command_utils.py diff --git a/apps/channels/models.py b/apps/channels/models.py index df291e09..ec95e309 100644 --- a/apps/channels/models.py +++ b/apps/channels/models.py @@ -3,7 +3,7 @@ from django.core.exceptions import ValidationError from core.models import StreamProfile from django.conf import settings from core.models import StreamProfile, CoreSettings -from core.utils import redis_client +from core.utils import redis_client, execute_redis_command import logging import uuid from datetime import datetime @@ -15,6 +15,17 @@ logger = logging.getLogger(__name__) # If you have an M3UAccount model in apps.m3u, you can still import it: from apps.m3u.models import M3UAccount +# Add fallback functions if Redis isn't available +def get_total_viewers(channel_id): + """Get viewer count from Redis or return 0 if Redis isn't available""" + if redis_client is None: + return 0 + + try: + return int(redis_client.get(f"channel:{channel_id}:viewers") or 0) + except Exception: + return 0 + class ChannelGroup(models.Model): name = models.CharField(max_length=100, unique=True) diff --git a/core/command_utils.py b/core/command_utils.py new file mode 100644 index 00000000..f6adfd88 --- /dev/null +++ b/core/command_utils.py @@ -0,0 +1,34 @@ +import sys +import os + +def is_management_command(excluded_commands=None): + """ + Detect if we're running a Django management command like migrate, collectstatic, etc. + + Args: + excluded_commands: List of commands that should still use Redis (e.g. runserver) + + Returns: + bool: True if we're running a management command + """ + # First check if we're in build mode + if os.environ.get("DISPATCHARR_BUILD") == "1": + return True + + if excluded_commands is None: + excluded_commands = ['runserver', 'runworker', 'daphne'] + + # Check if we're running via manage.py + if not ('manage.py' in sys.argv[0]): + return False + + # Check if we have a command argument + if len(sys.argv) > 1: + command = sys.argv[1] + # Return False if command is in excluded list - these commands DO need Redis + if command in excluded_commands: + return False + # Otherwise it's a command that should work without Redis + return True + + return False diff --git a/core/redis_pubsub.py b/core/redis_pubsub.py index 5fb57334..5d0032b0 100644 --- a/core/redis_pubsub.py +++ b/core/redis_pubsub.py @@ -10,6 +10,23 @@ from redis.exceptions import ConnectionError, TimeoutError logger = logging.getLogger(__name__) +class DummyPubSub: + """Dummy PubSub implementation when Redis isn't available""" + def __init__(self): + pass + + def subscribe(self, *args, **kwargs): + pass + + def psubscribe(self, *args, **kwargs): + pass + + def get_message(self, *args, **kwargs): + return None + + def close(self): + pass + class RedisPubSubManager: """ A robust Redis PubSub manager that handles disconnections and reconnections. @@ -23,9 +40,7 @@ class RedisPubSubManager: redis_client: An existing Redis client to use auto_reconnect: Whether to automatically reconnect on failure """ - from .utils import get_redis_client - - self.redis_client = redis_client or get_redis_client() + self.redis_client = redis_client self.pubsub = None self.subscriptions = set() self.pattern_subscriptions = set() @@ -34,6 +49,7 @@ class RedisPubSubManager: self.lock = threading.RLock() self.message_handlers = {} # Map of channels to handler functions self.message_thread = None + self.is_dummy = redis_client is None def subscribe(self, channel, handler=None): """ @@ -43,6 +59,9 @@ class RedisPubSubManager: channel: The channel to subscribe to handler: Optional function to call when messages are received """ + if self.is_dummy: + return + with self.lock: self.subscriptions.add(channel) if handler: @@ -60,6 +79,9 @@ class RedisPubSubManager: pattern: The pattern to subscribe to handler: Optional function to call when messages are received """ + if self.is_dummy: + return + with self.lock: self.pattern_subscriptions.add(pattern) if handler: @@ -80,6 +102,9 @@ class RedisPubSubManager: Returns: Number of clients that received the message """ + if self.is_dummy: + return 0 + try: if not isinstance(message, str): message = json.dumps(message) @@ -92,6 +117,10 @@ class RedisPubSubManager: """ Start listening for messages in a background thread. """ + if self.is_dummy: + logger.debug("Running with dummy Redis client - not starting listener") + return + if not self.message_thread: self._connect() self.message_thread = threading.Thread( @@ -106,6 +135,9 @@ class RedisPubSubManager: """ Stop listening and clean up resources. """ + if self.is_dummy: + return + self.running = False if self.pubsub: try: @@ -118,6 +150,10 @@ class RedisPubSubManager: """ Establish a new PubSub connection and subscribe to all channels. """ + if self.is_dummy: + self.pubsub = DummyPubSub() + return + with self.lock: # Close any existing connection if self.pubsub: @@ -144,6 +180,9 @@ class RedisPubSubManager: """ Background thread that listens for messages and handles reconnections. """ + if self.is_dummy: + return + consecutive_errors = 0 while self.running: @@ -218,6 +257,11 @@ def get_pubsub_manager(redis_client=None): if pubsub_manager is None: pubsub_manager = RedisPubSubManager(redis_client) - pubsub_manager.start_listening() + # Only start if redis_client is not None + if redis_client is not None: + try: + pubsub_manager.start_listening() + except Exception as e: + logger.error(f"Failed to start PubSub listener: {e}") return pubsub_manager diff --git a/core/utils.py b/core/utils.py index 4e472ef2..073c2169 100644 --- a/core/utils.py +++ b/core/utils.py @@ -8,8 +8,16 @@ from redis.exceptions import ConnectionError, TimeoutError logger = logging.getLogger(__name__) +# Import the command detector +from .command_utils import is_management_command + def get_redis_client(max_retries=5, retry_interval=1): """Get Redis client with connection validation and retry logic""" + # Skip Redis connection for management commands like collectstatic + if is_management_command(): + logger.info("Running as management command - skipping Redis initialization") + return None + retry_count = 0 while retry_count < max_retries: try: @@ -59,6 +67,11 @@ def get_redis_client(max_retries=5, retry_interval=1): def get_redis_pubsub_client(max_retries=5, retry_interval=1): """Get Redis client optimized for PubSub operations""" + # Skip Redis connection for management commands like collectstatic + if is_management_command(): + logger.info("Running as management command - skipping Redis PubSub initialization") + return None + retry_count = 0 while retry_count < max_retries: try: @@ -133,9 +146,20 @@ def execute_redis_command(redis_client, command_func, default_return=None): return default_return # Initialize the global clients with retry logic -redis_client = get_redis_client() -redis_pubsub_client = get_redis_pubsub_client() +# Skip Redis initialization if running as a management command +if is_management_command(): + redis_client = None + redis_pubsub_client = None + logger.info("Running as management command - Redis clients set to None") +else: + redis_client = get_redis_client() + redis_pubsub_client = get_redis_pubsub_client() # Import and initialize the PubSub manager -from .redis_pubsub import get_pubsub_manager -pubsub_manager = get_pubsub_manager(redis_client) \ No newline at end of file +# Skip if running as management command or if Redis client is None +if not is_management_command() and redis_client is not None: + from .redis_pubsub import get_pubsub_manager + pubsub_manager = get_pubsub_manager(redis_client) +else: + logger.info("PubSub manager not initialized (running as management command or Redis not available)") + pubsub_manager = None \ No newline at end of file diff --git a/docker/Dockerfile b/docker/Dockerfile index 54aa75e5..f6984f8b 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -3,7 +3,8 @@ FROM python:3.13-slim AS builder ENV PATH="/dispatcharrpy/bin:$PATH" \ VIRTUAL_ENV=/dispatcharrpy \ DJANGO_SETTINGS_MODULE=dispatcharr.settings \ - PYTHONUNBUFFERED=1 + PYTHONUNBUFFERED=1 \ + DISPATCHARR_BUILD=1 RUN apt-get update && \ apt-get install -y --no-install-recommends \ From e6097c4a8095bedc2ca6f306a8da3ebca5da76e6 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 22 Mar 2025 12:32:14 -0500 Subject: [PATCH 20/24] Added folder creation and permission changes for /app/media/cached_m3u --- docker/init/03-init-dispatcharr.sh | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docker/init/03-init-dispatcharr.sh b/docker/init/03-init-dispatcharr.sh index 951958a2..722e916b 100644 --- a/docker/init/03-init-dispatcharr.sh +++ b/docker/init/03-init-dispatcharr.sh @@ -10,4 +10,10 @@ if [ "$(id -u)" = "0" ]; then chown -R $PUID:$PGID /app chown $PUID:www-data /app/uwsgi.sock chmod 777 /app/uwsgi.sock + + # Create and set permissions for the cached_m3u directory + mkdir -p /app/media/cached_m3u + chown -R $PUID:$PGID /app/media/cached_m3u + chmod 777 /app/media/cached_m3u + echo "Created and set permissions for cached_m3u directory" fi From 61bac361514ca3ff6b1bcca0a40ccbcc053f278f Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 22 Mar 2025 13:28:46 -0500 Subject: [PATCH 21/24] Modified dockerfile to fix automated build (hopefully) --- docker/Dockerfile | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index f6984f8b..0a1cf34a 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -50,32 +50,35 @@ RUN apt-get update && \ apt-get install -y --no-install-recommends \ curl \ ffmpeg \ - gnupg2 \ - gpg \ libpcre3 \ libpq-dev \ - lsb-release \ nginx \ procps \ streamlink \ - wget && \ + wget \ + gnupg2 \ + lsb-release && \ cp /app/docker/nginx.conf /etc/nginx/sites-enabled/default && \ + # Set up PostgreSQL repository using more modern method echo "=== setting up postgres ====" && \ - echo "deb http://apt.postgresql.org/pub/repos/apt/ bookworm-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \ - wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \ + sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' && \ + wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor -o /usr/share/keyrings/postgresql-keyring.gpg && \ + echo "deb [signed-by=/usr/share/keyrings/postgresql-keyring.gpg] http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \ + # Set up Redis repository echo "=== setting up redis ===" && \ curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg && \ chmod 644 /usr/share/keyrings/redis-archive-keyring.gpg && \ echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" | tee /etc/apt/sources.list.d/redis.list && \ + # Update and install database packages apt-get update && \ apt-get install -y \ postgresql-14 \ postgresql-contrib-14 \ redis-server && \ - mkdir /data && \ + mkdir -p /data && \ + # Cleanup apt-get remove -y \ gnupg2 \ - gpg \ lsb-release && \ apt-get clean && \ apt-get autoremove -y && \ From fef4e556c7b52a1df1fc89e29673f0d4ed1e2fd7 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 22 Mar 2025 14:17:47 -0500 Subject: [PATCH 22/24] Another try to fix automatic builds. --- docker/Dockerfile | 42 ++++++++++++++++++++---------------------- 1 file changed, 20 insertions(+), 22 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 0a1cf34a..be4ce21b 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -31,7 +31,7 @@ RUN apt-get update && \ pip install --no-cache-dir -r requirements.txt && \ python manage.py collectstatic --noinput && \ cd /app/frontend && \ - npm install && \ + npm install --legacy-peer-deps && \ npm run build && \ find . -maxdepth 1 ! -name '.' ! -name 'dist' -exec rm -rf '{}' \; @@ -46,8 +46,9 @@ ENV PATH="/dispatcharrpy/bin:$PATH" \ COPY --from=builder /dispatcharrpy /dispatcharrpy COPY --from=builder /app /app +# Install base dependencies with memory optimization RUN apt-get update && \ - apt-get install -y --no-install-recommends \ + DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ curl \ ffmpeg \ libpcre3 \ @@ -59,33 +60,30 @@ RUN apt-get update && \ gnupg2 \ lsb-release && \ cp /app/docker/nginx.conf /etc/nginx/sites-enabled/default && \ - # Set up PostgreSQL repository using more modern method - echo "=== setting up postgres ====" && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* + +# Set up Redis repository in a separate step +RUN curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg && \ + chmod 644 /usr/share/keyrings/redis-archive-keyring.gpg && \ + echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" | tee /etc/apt/sources.list.d/redis.list && \ + apt-get update && \ + DEBIAN_FRONTEND=noninteractive apt-get install -y redis-server && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* + +# Set up PostgreSQL repository and install in a separate step +RUN echo "=== setting up postgres ====" && \ sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' && \ wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor -o /usr/share/keyrings/postgresql-keyring.gpg && \ echo "deb [signed-by=/usr/share/keyrings/postgresql-keyring.gpg] http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \ - # Set up Redis repository - echo "=== setting up redis ===" && \ - curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg && \ - chmod 644 /usr/share/keyrings/redis-archive-keyring.gpg && \ - echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" | tee /etc/apt/sources.list.d/redis.list && \ - # Update and install database packages apt-get update && \ - apt-get install -y \ - postgresql-14 \ - postgresql-contrib-14 \ - redis-server && \ + DEBIAN_FRONTEND=noninteractive apt-get install -y postgresql-14 postgresql-contrib-14 && \ mkdir -p /data && \ - # Cleanup - apt-get remove -y \ - gnupg2 \ - lsb-release && \ + apt-get remove -y gnupg2 lsb-release && \ apt-get clean && \ apt-get autoremove -y && \ - rm -rf \ - /tmp/* \ - /var/lib/apt/lists/* \ - /var/tmp/* + rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/* WORKDIR /app From b9cae416ecd4d2cc534fe4f0c5e6b3ec8debb681 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 22 Mar 2025 16:40:11 -0500 Subject: [PATCH 23/24] Use dev branch for dev builds. --- .github/workflows/docker-build.yml | 8 ++++++-- docker/Dockerfile | 5 ++++- docker/build-dev.sh | 2 +- 3 files changed, 11 insertions(+), 4 deletions(-) diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml index 7883ea86..46f7ee8b 100644 --- a/.github/workflows/docker-build.yml +++ b/.github/workflows/docker-build.yml @@ -27,13 +27,15 @@ jobs: username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - name: Determine image tag - id: set-tag + - name: Determine image tag and branch + id: set-tag-branch run: | if [[ "${{ github.ref }}" == "refs/heads/main" ]]; then echo "TAG=latest" >> $GITHUB_ENV + echo "BRANCH=main" >> $GITHUB_ENV elif [[ "${{ github.ref }}" == "refs/heads/dev" ]]; then echo "TAG=dev" >> $GITHUB_ENV + echo "BRANCH=dev" >> $GITHUB_ENV fi - name: Convert repository name to lowercase @@ -46,6 +48,8 @@ jobs: file: docker/Dockerfile push: true platforms: linux/amd64,linux/arm64 + build-args: | + BRANCH=${{ env.BRANCH }} tags: | ghcr.io/${{ env.REPO_NAME }}:${{ env.TAG }} ghcr.io/${{ env.REPO_NAME }}:${{ github.sha }} diff --git a/docker/Dockerfile b/docker/Dockerfile index be4ce21b..b833a9f7 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,5 +1,8 @@ FROM python:3.13-slim AS builder +# Define build argument with default value of "main" +ARG BRANCH=main + ENV PATH="/dispatcharrpy/bin:$PATH" \ VIRTUAL_ENV=/dispatcharrpy \ DJANGO_SETTINGS_MODULE=dispatcharr.settings \ @@ -24,7 +27,7 @@ RUN apt-get update && \ nodejs && \ python -m pip install virtualenv && \ virtualenv /dispatcharrpy && \ - git clone https://github.com/Dispatcharr/Dispatcharr /app && \ + git clone -b ${BRANCH} https://github.com/Dispatcharr/Dispatcharr /app && \ cd /app && \ rm -rf .git && \ cd /app && \ diff --git a/docker/build-dev.sh b/docker/build-dev.sh index 0b723671..c5c79474 100755 --- a/docker/build-dev.sh +++ b/docker/build-dev.sh @@ -1,3 +1,3 @@ #!/bin/bash -docker build -t dispatcharr/dispatcharr:dev -f Dockerfile .. +docker build --build-arg BRANCH=dev -t dispatcharr/dispatcharr:dev -f Dockerfile .. From edf2e360ee8b4255fed52b751deebaaec8ad7cc7 Mon Sep 17 00:00:00 2001 From: dekzter Date: Sun, 23 Mar 2025 17:55:21 -0400 Subject: [PATCH 24/24] fixed first m3u channel group load bug, added searchable selection for tvg-id --- frontend/src/WebSocket.jsx | 3 ++ frontend/src/api.js | 12 +++++++ frontend/src/components/forms/Channel.jsx | 35 +++++++++++++------ frontend/src/components/forms/M3U.jsx | 7 +++- .../src/components/forms/M3UGroupFilter.jsx | 5 ++- frontend/src/store/auth.jsx | 1 + frontend/src/store/epgs.jsx | 20 ++++++++--- 7 files changed, 66 insertions(+), 17 deletions(-) diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index 4f4838b9..0538887d 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -9,6 +9,7 @@ import useStreamsStore from './store/streams'; import { notifications } from '@mantine/notifications'; import useChannelsStore from './store/channels'; import usePlaylistsStore from './store/playlists'; +import useEPGsStore from './store/epgs'; export const WebsocketContext = createContext(false, null, () => {}); @@ -19,6 +20,7 @@ export const WebsocketProvider = ({ children }) => { const { fetchStreams } = useStreamsStore(); const { setChannelStats, fetchChannelGroups } = useChannelsStore(); const { setRefreshProgress } = usePlaylistsStore(); + const { fetchEPGData } = useEPGsStore(); const ws = useRef(null); @@ -65,6 +67,7 @@ export const WebsocketProvider = ({ children }) => { if (event.data.progress == 100) { fetchStreams(); fetchChannelGroups(); + fetchEPGData(); } setRefreshProgress(event.data.account, event.data.progress); } diff --git a/frontend/src/api.js b/frontend/src/api.js index 60403bd0..0590e2d4 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -581,6 +581,18 @@ export default class API { return retval; } + static async getEPGData() { + const response = await fetch(`${host}/api/epg/epgdata/`, { + headers: { + Authorization: `Bearer ${await API.getAuthToken()}`, + 'Content-Type': 'application/json', + }, + }); + + const retval = await response.json(); + return retval; + } + // Notice there's a duplicated "refreshPlaylist" method above; // you might want to rename or remove one if it's not needed. diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index e59c0c2b..3f778032 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -21,14 +21,17 @@ import { Center, Grid, Flex, + Select, } from '@mantine/core'; import { SquarePlus } from 'lucide-react'; +import useEPGsStore from '../../store/epgs'; const Channel = ({ channel = null, isOpen, onClose }) => { const channelGroups = useChannelsStore((state) => state.channelGroups); const streams = useStreamsStore((state) => state.streams); const { profiles: streamProfiles } = useStreamProfilesStore(); const { playlists } = usePlaylistsStore(); + const { tvgs } = useEPGsStore(); const [logoFile, setLogoFile] = useState(null); const [logoPreview, setLogoPreview] = useState(logo); @@ -60,7 +63,7 @@ const Channel = ({ channel = null, isOpen, onClose }) => { name: '', channel_number: '', channel_group_id: '', - stream_profile_id: null, + stream_profile_id: '0', tvg_id: '', tvg_name: '', }, @@ -74,7 +77,6 @@ const Channel = ({ channel = null, isOpen, onClose }) => { values.stream_profile_id = null; } - console.log(values); if (channel?.id) { await API.updateChannel({ id: channel.id, @@ -104,7 +106,7 @@ const Channel = ({ channel = null, isOpen, onClose }) => { name: channel.name, channel_number: channel.channel_number, channel_group_id: channel.channel_group?.id, - stream_profile_id: channel.stream_profile_id, + stream_profile_id: channel.stream_profile_id || '0', tvg_id: channel.tvg_id, tvg_name: channel.tvg_name, }); @@ -248,6 +250,8 @@ const Channel = ({ channel = null, isOpen, onClose }) => { return <>; } + console.log(streamProfiles); + return ( <> @@ -265,7 +269,7 @@ const Channel = ({ channel = null, isOpen, onClose }) => { - { ? formik.touched.channel_group_id : '' } - data={channelGroups.map((option, index) => ({ + data={Object.values(channelGroups).map((option, index) => ({ value: `${option.id}`, label: option.name, }))} @@ -296,18 +300,20 @@ const Channel = ({ channel = null, isOpen, onClose }) => { - { + formik.setFieldValue('stream_profile_id', value); // Update Formik's state with the new value + }} error={ formik.errors.stream_profile_id ? formik.touched.stream_profile_id : '' } - data={[{ value: null, label: '(use default)' }].concat( + data={[{ value: '0', label: '(use default)' }].concat( streamProfiles.map((option) => ({ value: `${option.id}`, label: option.name, @@ -339,13 +345,20 @@ const Channel = ({ channel = null, isOpen, onClose }) => { error={formik.errors.tvg_name ? formik.touched.tvg_name : ''} /> - { + formik.setFieldValue('tvg_id', value); // Update Formik's state with the new value + }} + error={formik.errors.tvg_id} + data={tvgs.map((tvg) => ({ + value: tvg.name, + label: tvg.tvg_id, + }))} /> { - const userAgents = useUserAgentsStore((state) => state.userAgents); + const { userAgents } = useUserAgentsStore(); + const { fetchChannelGroups } = useChannelsStore(); + const [file, setFile] = useState(null); const [profileModalOpen, setProfileModalOpen] = useState(false); const [groupFilterModalOpen, setGroupFilterModalOpen] = useState(false); @@ -59,6 +62,8 @@ const M3U = ({ playlist = null, isOpen, onClose, playlistCreated = false }) => { ...values, uploaded_file: file, }); + + await fetchChannelGroups(); } resetForm(); diff --git a/frontend/src/components/forms/M3UGroupFilter.jsx b/frontend/src/components/forms/M3UGroupFilter.jsx index 5bfe5424..51e0468f 100644 --- a/frontend/src/components/forms/M3UGroupFilter.jsx +++ b/frontend/src/components/forms/M3UGroupFilter.jsx @@ -31,7 +31,10 @@ const M3UGroupFilter = ({ playlist = null, isOpen, onClose }) => { const [groupFilter, setGroupFilter] = useState(''); useEffect(() => { - console.log(playlist.channel_groups); + if (Object.keys(channelGroups).length === 0) { + return; + } + setGroupStates( playlist.channel_groups.map((group) => ({ ...group, diff --git a/frontend/src/store/auth.jsx b/frontend/src/store/auth.jsx index f5302713..a3b3a2bb 100644 --- a/frontend/src/store/auth.jsx +++ b/frontend/src/store/auth.jsx @@ -37,6 +37,7 @@ const useAuthStore = create((set, get) => ({ useUserAgentsStore.getState().fetchUserAgents(), usePlaylistsStore.getState().fetchPlaylists(), useEPGsStore.getState().fetchEPGs(), + useEPGsStore.getState().fetchEPGData(), useStreamProfilesStore.getState().fetchProfiles(), useSettingsStore.getState().fetchSettings(), ]); diff --git a/frontend/src/store/epgs.jsx b/frontend/src/store/epgs.jsx index 14cfd623..c749c6bb 100644 --- a/frontend/src/store/epgs.jsx +++ b/frontend/src/store/epgs.jsx @@ -1,8 +1,9 @@ -import { create } from "zustand"; -import api from "../api"; +import { create } from 'zustand'; +import api from '../api'; const useEPGsStore = create((set) => ({ epgs: [], + tvgs: [], isLoading: false, error: null, @@ -12,8 +13,19 @@ const useEPGsStore = create((set) => ({ const epgs = await api.getEPGs(); set({ epgs: epgs, isLoading: false }); } catch (error) { - console.error("Failed to fetch epgs:", error); - set({ error: "Failed to load epgs.", isLoading: false }); + console.error('Failed to fetch epgs:', error); + set({ error: 'Failed to load epgs.', isLoading: false }); + } + }, + + fetchEPGData: async () => { + set({ isLoading: true, error: null }); + try { + const tvgs = await api.getEPGData(); + set({ tvgs: tvgs, isLoading: false }); + } catch (error) { + console.error('Failed to fetch tvgs:', error); + set({ error: 'Failed to load tvgs.', isLoading: false }); } },