From c5e0de5d48da76dba6a30fe693b2085fb93c5dc4 Mon Sep 17 00:00:00 2001 From: dekzter Date: Wed, 2 Apr 2025 16:27:28 -0400 Subject: [PATCH] logos, channel numbers, websocket regex test so we can properly test against python regex and not javascript --- apps/channels/api_urls.py | 2 + apps/channels/api_views.py | 67 +++++++++++-- .../0010_stream_custom_properties.py | 18 ++++ ..._logo_remove_channel_logo_file_and_more.py | 35 +++++++ apps/channels/models.py | 17 +++- apps/channels/serializers.py | 26 ++++- apps/channels/signals.py | 10 -- apps/channels/tasks.py | 16 +-- apps/output/views.py | 2 +- apps/proxy/ts_proxy/url_utils.py | 12 +-- core/apps.py | 21 ++++ core/utils.py | 14 ++- dispatcharr/consumers.py | 27 ++++- frontend/src/WebSocket.jsx | 8 +- frontend/src/api.js | 31 ++++++ frontend/src/components/forms/Channel.jsx | 99 ++++++++++--------- frontend/src/components/forms/M3UProfile.jsx | 60 ++++++----- .../src/components/tables/ChannelsTable.jsx | 2 +- .../src/components/tables/StreamsTable.jsx | 5 +- frontend/src/store/auth.jsx | 1 + frontend/src/store/channels.jsx | 40 ++++++++ frontend/src/store/playlists.jsx | 9 ++ frontend/vite.config.js | 2 + 23 files changed, 403 insertions(+), 121 deletions(-) create mode 100644 apps/channels/migrations/0010_stream_custom_properties.py create mode 100644 apps/channels/migrations/0011_logo_remove_channel_logo_file_and_more.py diff --git a/apps/channels/api_urls.py b/apps/channels/api_urls.py index b2cf387e..4be83683 100644 --- a/apps/channels/api_urls.py +++ b/apps/channels/api_urls.py @@ -6,6 +6,7 @@ from .api_views import ( ChannelGroupViewSet, BulkDeleteStreamsAPIView, BulkDeleteChannelsAPIView, + LogoViewSet, ) app_name = 'channels' # for DRF routing @@ -14,6 +15,7 @@ router = DefaultRouter() router.register(r'streams', StreamViewSet, basename='stream') router.register(r'groups', ChannelGroupViewSet, basename='channel-group') router.register(r'channels', ChannelViewSet, basename='channel') +router.register(r'logos', LogoViewSet, basename='logos') urlpatterns = [ # Bulk delete is a single APIView, not a ViewSet diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 061f5bca..085111c7 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -3,21 +3,38 @@ from rest_framework.response import Response from rest_framework.views import APIView from rest_framework.permissions import IsAuthenticated from rest_framework.decorators import action +from rest_framework.parsers import MultiPartParser, FormParser from drf_yasg.utils import swagger_auto_schema from drf_yasg import openapi from django.shortcuts import get_object_or_404 from django.db import transaction +import os, json -from .models import Stream, Channel, ChannelGroup -from .serializers import StreamSerializer, ChannelSerializer, ChannelGroupSerializer +from .models import Stream, Channel, ChannelGroup, Logo +from .serializers import StreamSerializer, ChannelSerializer, ChannelGroupSerializer, LogoSerializer from .tasks import match_epg_channels import django_filters from django_filters.rest_framework import DjangoFilterBackend from rest_framework.filters import SearchFilter, OrderingFilter from apps.epg.models import EPGData +from django.db.models import Q from rest_framework.pagination import PageNumberPagination + +class OrInFilter(django_filters.Filter): + """ + Custom filter that handles the OR condition instead of AND. + """ + def filter(self, queryset, value): + if value: + # Create a Q object for each value and combine them with OR + query = Q() + for val in value.split(','): + query |= Q(**{self.field_name: val}) + return queryset.filter(query) + return queryset + class StreamPagination(PageNumberPagination): page_size = 25 # Default page size page_size_query_param = 'page_size' # Allow clients to specify page size @@ -25,7 +42,7 @@ class StreamPagination(PageNumberPagination): class StreamFilter(django_filters.FilterSet): name = django_filters.CharFilter(lookup_expr='icontains') - channel_group_name = django_filters.CharFilter(field_name="channel_group__name", lookup_expr="icontains") + channel_group_name = OrInFilter(field_name="channel_group__name", lookup_expr="icontains") m3u_account = django_filters.NumberFilter(field_name="m3u_account__id") m3u_account_name = django_filters.CharFilter(field_name="m3u_account__name", lookup_expr="icontains") m3u_account_is_active = django_filters.BooleanFilter(field_name="m3u_account__is_active") @@ -64,7 +81,8 @@ class StreamViewSet(viewsets.ModelViewSet): channel_group = self.request.query_params.get('channel_group') if channel_group: - qs = qs.filter(channel_group__name=channel_group) + group_names = channel_group.split(',') + qs = qs.filter(channel_group__name__in=group_names) return qs @@ -192,15 +210,26 @@ class ChannelViewSet(viewsets.ModelViewSet): if name is None: name = stream.name + stream_custom_props = json.loads(stream.custom_properties) if stream.custom_properties else {} channel_data = { 'channel_number': channel_number, 'name': name, 'tvg_id': stream.tvg_id, 'channel_group_id': channel_group.id, - 'logo_url': stream.logo_url, - 'streams': [stream_id] + 'streams': [stream_id], } + if 'tv-chno' in stream_custom_props: + channel_data['channel_number'] = int(stream_custom_props['tv-chno']) + elif 'channel-number' in stream_custom_props: + channel_data['channel_number'] = int(stream_custom_props['channel-number']) + + if stream.logo_url: + logo, _ = Logo.objects.get_or_create(url=stream.logo_url, defaults={ + "name": stream.name or stream.tvg_id + }) + channel_data["logo_id"] = logo.id + # Attempt to find existing EPGs with the same tvg-id epgs = EPGData.objects.filter(tvg_id=stream.tvg_id) if epgs: @@ -387,3 +416,29 @@ class BulkDeleteChannelsAPIView(APIView): channel_ids = request.data.get('channel_ids', []) Channel.objects.filter(id__in=channel_ids).delete() return Response({"message": "Channels deleted"}, status=status.HTTP_204_NO_CONTENT) + +class LogoViewSet(viewsets.ModelViewSet): + permission_classes = [IsAuthenticated] + queryset = Logo.objects.all() + serializer_class = LogoSerializer + parser_classes = (MultiPartParser, FormParser) + + @action(detail=False, methods=['post']) + def upload(self, request): + if 'file' not in request.FILES: + return Response({'error': 'No file uploaded'}, status=status.HTTP_400_BAD_REQUEST) + + file = request.FILES['file'] + file_name = file.name + file_path = os.path.join('/data/logos', file_name) + + os.makedirs(os.path.dirname(file_path), exist_ok=True) + with open(file_path, 'wb+') as destination: + for chunk in file.chunks(): + destination.write(chunk) + + logo, _ = Logo.objects.get_or_create(url=file_path, defaults={ + "name": file_name, + }) + + return Response({'id': logo.id, 'name': logo.name, 'url': logo.url}, status=status.HTTP_201_CREATED) diff --git a/apps/channels/migrations/0010_stream_custom_properties.py b/apps/channels/migrations/0010_stream_custom_properties.py new file mode 100644 index 00000000..0c21f12f --- /dev/null +++ b/apps/channels/migrations/0010_stream_custom_properties.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.6 on 2025-04-01 17:36 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dispatcharr_channels', '0009_remove_channel_tvg_name_channel_epg_data'), + ] + + operations = [ + migrations.AddField( + model_name='stream', + name='custom_properties', + field=models.TextField(blank=True, null=True), + ), + ] diff --git a/apps/channels/migrations/0011_logo_remove_channel_logo_file_and_more.py b/apps/channels/migrations/0011_logo_remove_channel_logo_file_and_more.py new file mode 100644 index 00000000..0f0db44f --- /dev/null +++ b/apps/channels/migrations/0011_logo_remove_channel_logo_file_and_more.py @@ -0,0 +1,35 @@ +# Generated by Django 5.1.6 on 2025-04-01 22:14 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dispatcharr_channels', '0010_stream_custom_properties'), + ] + + operations = [ + migrations.CreateModel( + name='Logo', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=255)), + ('url', models.URLField(unique=True)), + ], + ), + migrations.RemoveField( + model_name='channel', + name='logo_file', + ), + migrations.RemoveField( + model_name='channel', + name='logo_url', + ), + migrations.AddField( + model_name='channel', + name='logo', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='channels', to='dispatcharr_channels.logo'), + ), + ] diff --git a/apps/channels/models.py b/apps/channels/models.py index e2178804..90b9cfd3 100644 --- a/apps/channels/models.py +++ b/apps/channels/models.py @@ -211,11 +211,12 @@ class ChannelManager(models.Manager): class Channel(models.Model): channel_number = models.IntegerField() name = models.CharField(max_length=255) - logo_url = models.URLField(max_length=2000, blank=True, null=True) - logo_file = models.ImageField( - upload_to='logos/', # Will store in MEDIA_ROOT/logos + logo = models.ForeignKey( + 'Logo', + on_delete=models.SET_NULL, + null=True, blank=True, - null=True + related_name='channels', ) # M2M to Stream now in the same file @@ -379,3 +380,11 @@ class ChannelGroupM3UAccount(models.Model): def __str__(self): return f"{self.channel_group.name} - {self.m3u_account.name} (Enabled: {self.enabled})" + + +class Logo(models.Model): + name = models.CharField(max_length=255) + url = models.URLField(unique=True) + + def __str__(self): + return self.name diff --git a/apps/channels/serializers.py b/apps/channels/serializers.py index a7d82358..91e2ce89 100644 --- a/apps/channels/serializers.py +++ b/apps/channels/serializers.py @@ -1,5 +1,5 @@ from rest_framework import serializers -from .models import Stream, Channel, ChannelGroup, ChannelStream, ChannelGroupM3UAccount +from .models import Stream, Channel, ChannelGroup, ChannelStream, ChannelGroupM3UAccount, Logo from apps.epg.serializers import EPGDataSerializer from core.models import StreamProfile from apps.epg.models import EPGData @@ -92,14 +92,21 @@ class ChannelSerializer(serializers.ModelSerializer): queryset=Stream.objects.all(), many=True, write_only=True, required=False ) + logo = serializers.SerializerMethodField() + logo_id = serializers.PrimaryKeyRelatedField( + queryset=Logo.objects.all(), + source='logo', + allow_null=True, + required=False, + write_only=True, + ) + class Meta: model = Channel fields = [ 'id', 'channel_number', 'name', - 'logo_url', - 'logo_file', 'channel_group', 'channel_group_id', 'tvg_id', @@ -109,6 +116,8 @@ class ChannelSerializer(serializers.ModelSerializer): 'stream_ids', 'stream_profile_id', 'uuid', + 'logo', + 'logo_id', ] def get_streams(self, obj): @@ -116,6 +125,9 @@ class ChannelSerializer(serializers.ModelSerializer): ordered_streams = obj.streams.all().order_by('channelstream__order') return StreamSerializer(ordered_streams, many=True).data + def get_logo(self, obj): + return LogoSerializer(obj.logo).data + # def get_stream_ids(self, obj): # """Retrieve ordered stream IDs for GET requests.""" # return list(obj.streams.all().order_by('channelstream__order').values_list('id', flat=True)) @@ -136,7 +148,6 @@ class ChannelSerializer(serializers.ModelSerializer): # Update the actual Channel fields instance.channel_number = validated_data.get('channel_number', instance.channel_number) instance.name = validated_data.get('name', instance.name) - instance.logo_url = validated_data.get('logo_url', instance.logo_url) instance.tvg_id = validated_data.get('tvg_id', instance.tvg_id) instance.epg_data = validated_data.get('epg_data', None) @@ -145,6 +156,8 @@ class ChannelSerializer(serializers.ModelSerializer): instance.channel_group = validated_data['channel_group'] if 'stream_profile' in validated_data: instance.stream_profile = validated_data['stream_profile'] + if 'logo' in validated_data: + instance.logo = validated_data['logo'] instance.save() @@ -168,3 +181,8 @@ class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer): # Optionally, if you only need the id of the ChannelGroup, you can customize it like this: # channel_group = serializers.PrimaryKeyRelatedField(queryset=ChannelGroup.objects.all()) + +class LogoSerializer(serializers.ModelSerializer): + class Meta: + model = Logo + fields = ['id', 'name', 'url'] diff --git a/apps/channels/signals.py b/apps/channels/signals.py index c5379870..9e23086f 100644 --- a/apps/channels/signals.py +++ b/apps/channels/signals.py @@ -14,8 +14,6 @@ def update_channel_tvg_id_and_logo(sender, instance, action, reverse, model, pk_ """ Whenever streams are added to a channel: 1) If the channel doesn't have a tvg_id, fill it from the first newly-added stream that has one. - 2) If the channel doesn't have a logo_url, fill it from the first newly-added stream that has one. - This way if an M3U or EPG entry carried a logo, newly created channels automatically get that logo. """ # We only care about post_add, i.e. once the new streams are fully associated if action == "post_add": @@ -27,14 +25,6 @@ def update_channel_tvg_id_and_logo(sender, instance, action, reverse, model, pk_ instance.tvg_id = streams_with_tvg.first().tvg_id instance.save(update_fields=['tvg_id']) - # --- 2) Populate channel.logo_url if empty --- - if not instance.logo_url: - # Look for newly added streams that have a nonempty logo_url - streams_with_logo = model.objects.filter(pk__in=pk_set).exclude(logo_url__exact='') - if streams_with_logo.exists(): - instance.logo_url = streams_with_logo.first().logo_url - instance.save(update_fields=['logo_url']) - @receiver(pre_save, sender=Stream) def set_default_m3u_account(sender, instance, **kwargs): """ diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py index d5cdb2c8..b1e9bab1 100644 --- a/apps/channels/tasks.py +++ b/apps/channels/tasks.py @@ -5,7 +5,7 @@ import re from celery import shared_task from rapidfuzz import fuzz -from sentence_transformers import SentenceTransformer, util +from sentence_transformers import util from django.conf import settings from django.db import transaction @@ -15,22 +15,10 @@ from core.models import CoreSettings from asgiref.sync import async_to_sync from channels.layers import get_channel_layer +from core.apps import st_model logger = logging.getLogger(__name__) -# Load the sentence-transformers model once at the module level -SENTENCE_MODEL_NAME = "sentence-transformers/all-MiniLM-L6-v2" -MODEL_PATH = os.path.join(settings.MEDIA_ROOT, "models", "all-MiniLM-L6-v2") -os.makedirs(MODEL_PATH, exist_ok=True) - -# If not present locally, download: -if not os.path.exists(os.path.join(MODEL_PATH, "config.json")): - logger.info(f"Local model not found in {MODEL_PATH}; downloading from {SENTENCE_MODEL_NAME}...") - st_model = SentenceTransformer(SENTENCE_MODEL_NAME, cache_folder=MODEL_PATH) -else: - logger.info(f"Loading local model from {MODEL_PATH}") - st_model = SentenceTransformer(MODEL_PATH) - # Thresholds BEST_FUZZY_THRESHOLD = 85 LOWER_FUZZY_THRESHOLD = 40 diff --git a/apps/output/views.py b/apps/output/views.py index 1881bf8d..717dd614 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -16,7 +16,7 @@ def generate_m3u(request): group_title = channel.channel_group.name if channel.channel_group else "Default" tvg_id = channel.tvg_id or "" tvg_name = channel.tvg_id or channel.name - tvg_logo = channel.logo_url or "" + tvg_logo = channel.logo.url if channel.logo else "" channel_number = channel.channel_number extinf_line = ( diff --git a/apps/proxy/ts_proxy/url_utils.py b/apps/proxy/ts_proxy/url_utils.py index 04440c03..641a794e 100644 --- a/apps/proxy/ts_proxy/url_utils.py +++ b/apps/proxy/ts_proxy/url_utils.py @@ -82,18 +82,18 @@ def transform_url(input_url: str, search_pattern: str, replace_pattern: str) -> str: The transformed URL """ try: - logger.debug("Executing URL pattern replacement:") - logger.debug(f" base URL: {input_url}") - logger.debug(f" search: {search_pattern}") + logger.info("Executing URL pattern replacement:") + logger.info(f" base URL: {input_url}") + logger.info(f" search: {search_pattern}") # Handle backreferences in the replacement pattern safe_replace_pattern = re.sub(r'\$(\d+)', r'\\\1', replace_pattern) - logger.debug(f" replace: {replace_pattern}") - logger.debug(f" safe replace: {safe_replace_pattern}") + logger.info(f" replace: {replace_pattern}") + logger.info(f" safe replace: {safe_replace_pattern}") # Apply the transformation stream_url = re.sub(search_pattern, safe_replace_pattern, input_url) - logger.debug(f"Generated stream url: {stream_url}") + logger.info(f"Generated stream url: {stream_url}") return stream_url except Exception as e: diff --git a/core/apps.py b/core/apps.py index 8115ae60..0d23849c 100644 --- a/core/apps.py +++ b/core/apps.py @@ -1,6 +1,27 @@ from django.apps import AppConfig +from django.conf import settings +import os, logging +logger = logging.getLogger(__name__) +st_model = None class CoreConfig(AppConfig): default_auto_field = 'django.db.models.BigAutoField' name = 'core' + + def ready(self): + global st_model + from sentence_transformers import SentenceTransformer + + # Load the sentence-transformers model once at the module level + SENTENCE_MODEL_NAME = "sentence-transformers/all-MiniLM-L6-v2" + MODEL_PATH = os.path.join(settings.MEDIA_ROOT, "models", "all-MiniLM-L6-v2") + os.makedirs(MODEL_PATH, exist_ok=True) + + # If not present locally, download: + if not os.path.exists(os.path.join(MODEL_PATH, "config.json")): + logger.info(f"Local model not found in {MODEL_PATH}; downloading from {SENTENCE_MODEL_NAME}...") + st_model = SentenceTransformer(SENTENCE_MODEL_NAME, cache_folder=MODEL_PATH) + else: + logger.info(f"Loading local model from {MODEL_PATH}") + st_model = SentenceTransformer(MODEL_PATH) diff --git a/core/utils.py b/core/utils.py index 2132db3c..2020c264 100644 --- a/core/utils.py +++ b/core/utils.py @@ -6,6 +6,8 @@ import threading from django.conf import settings from redis.exceptions import ConnectionError, TimeoutError from django.core.cache import cache +from asgiref.sync import async_to_sync +from channels.layers import get_channel_layer logger = logging.getLogger(__name__) @@ -167,9 +169,19 @@ def release_task_lock(task_name, id): # Remove the lock redis_client.delete(lock_id) +def send_websocket_event(event, success, data): + channel_layer = get_channel_layer() + async_to_sync(channel_layer.group_send)( + 'updates', + { + 'type': 'update', + "data": {"success": True, "type": "epg_channels"} + } + ) + # Initialize the global clients with retry logic # Skip Redis initialization if running as a management command -if is_management_command(): +if __name__ == '__main__': redis_client = None redis_pubsub_client = None logger.info("Running as management command - Redis clients set to None") diff --git a/dispatcharr/consumers.py b/dispatcharr/consumers.py index 356422d7..8d92c4fa 100644 --- a/dispatcharr/consumers.py +++ b/dispatcharr/consumers.py @@ -1,5 +1,8 @@ import json from channels.generic.websocket import AsyncWebsocketConsumer +import re, logging + +logger = logging.getLogger(__name__) class MyWebSocketConsumer(AsyncWebsocketConsumer): async def connect(self): @@ -12,7 +15,29 @@ class MyWebSocketConsumer(AsyncWebsocketConsumer): async def receive(self, text_data): data = json.loads(text_data) - print("Received:", data) + + if data["type"] == "m3u_profile_test": + from apps.proxy.ts_proxy.url_utils import transform_url + + def replace_with_mark(match): + # Wrap the match in tags + return f"{match.group(0)}" + + # Apply the transformation using the replace_with_mark function + try: + search_preview = re.sub(data["search"], replace_with_mark, data["url"]) + except Exception as e: + search_preview = data["search"] + logger.error(f"Failed to generate replace preview: {e}") + + result = transform_url(data["url"], data["search"], data["replace"]) + await self.send(text_data=json.dumps({ + "data": { + 'type': 'm3u_profile_test', + 'search_preview': search_preview, + 'result': result, + } + })) async def update(self, event): await self.send(text_data=json.dumps(event)) diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index b401f9a5..8dded3fc 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -20,7 +20,8 @@ export const WebsocketProvider = ({ children }) => { const { fetchStreams } = useStreamsStore(); const { fetchChannels, setChannelStats, fetchChannelGroups } = useChannelsStore(); - const { fetchPlaylists, setRefreshProgress } = usePlaylistsStore(); + const { fetchPlaylists, setRefreshProgress, setProfilePreview } = + usePlaylistsStore(); const { fetchEPGData } = useEPGsStore(); const ws = useRef(null); @@ -95,6 +96,9 @@ export const WebsocketProvider = ({ children }) => { fetchEPGData(); break; + case 'm3u_profile_test': + setProfilePreview(event.data.search_preview, event.data.result); + default: console.error(`Unknown websocket event type: ${event.type}`); break; @@ -108,7 +112,7 @@ export const WebsocketProvider = ({ children }) => { }; }, []); - const ret = [isReady, val, ws.current?.send.bind(ws.current)]; + const ret = [isReady, ws.current?.send.bind(ws.current), val]; return ( diff --git a/frontend/src/api.js b/frontend/src/api.js index 369c49c2..9a1f1689 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -901,4 +901,35 @@ export default class API { const retval = await response.json(); return retval; } + + static async getLogos() { + const response = await fetch(`${host}/api/channels/logos/`, { + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${await API.getAuthToken()}`, + }, + }); + + const retval = await response.json(); + return retval; + } + + static async uploadLogo(file) { + const formData = new FormData(); + formData.append('file', file); + + const response = await fetch(`${host}/api/channels/logos/upload/`, { + method: 'POST', + headers: { + Authorization: `Bearer ${await API.getAuthToken()}`, + }, + body: formData, + }); + + const retval = await response.json(); + + useChannelsStore.getState().addLogo(retval); + + return retval; + } } diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index ff0745f2..d40aacf1 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -39,13 +39,12 @@ const Channel = ({ channel = null, isOpen, onClose }) => { const listRef = useRef(null); - const channelGroups = useChannelsStore((state) => state.channelGroups); + const { channelGroups, logos } = useChannelsStore(); const streams = useStreamsStore((state) => state.streams); const { profiles: streamProfiles } = useStreamProfilesStore(); const { playlists } = usePlaylistsStore(); const { epgs, tvgs, tvgsById } = useEPGsStore(); - const [logoFile, setLogoFile] = useState(null); const [logoPreview, setLogoPreview] = useState(null); const [channelStreams, setChannelStreams] = useState([]); const [channelGroupModelOpen, setChannelGroupModalOpen] = useState(false); @@ -65,13 +64,13 @@ const Channel = ({ channel = null, isOpen, onClose }) => { setChannelStreams(Array.from(streamSet)); }; - const handleLogoChange = (files) => { + const handleLogoChange = async (files) => { if (files.length === 1) { console.log(files[0]); - setLogoFile(files[0]); - setLogoPreview(URL.createObjectURL(files[0])); + const retval = await API.uploadLogo(files[0]); + setLogoPreview(retval.url); + formik.setFieldValue('logo_id', retval.id); } else { - setLogoFile(null); setLogoPreview(null); } }; @@ -84,6 +83,7 @@ const Channel = ({ channel = null, isOpen, onClose }) => { stream_profile_id: '0', tvg_id: '', epg_data_id: '', + logo_id: '', }, validationSchema: Yup.object({ name: Yup.string().required('Name is required'), @@ -95,23 +95,24 @@ const Channel = ({ channel = null, isOpen, onClose }) => { values.stream_profile_id = null; } + if (!values.logo_id) { + delete values.logo_id; + } + if (channel?.id) { await API.updateChannel({ id: channel.id, ...values, - logo_file: logoFile, streams: channelStreams.map((stream) => stream.id), }); } else { await API.addChannel({ ...values, - logo_file: logoFile, streams: channelStreams.map((stream) => stream.id), }); } resetForm(); - setLogoFile(null); setLogoPreview(null); setSubmitting(false); setTvgFilter(''); @@ -135,6 +136,7 @@ const Channel = ({ channel = null, isOpen, onClose }) => { : '0', tvg_id: channel.tvg_id, epg_data_id: channel.epg_data ? `${channel.epg_data?.id}` : '', + logo_id: `${channel.logo?.id}`, }); console.log(channel); @@ -145,6 +147,14 @@ const Channel = ({ channel = null, isOpen, onClose }) => { } }, [channel, tvgsById]); + const renderLogoOption = ({ option, checked }) => { + return ( +
+ +
+ ); + }; + // const activeStreamsTable = useMantineReactTable({ // data: channelStreams, // columns: useMemo( @@ -370,15 +380,36 @@ const Channel = ({ channel = null, isOpen, onClose }) => { - + +