mirror of
https://github.com/Dispatcharr/Dispatcharr.git
synced 2026-01-23 02:35:14 +00:00
logos, channel numbers, websocket regex test so we can properly test against python regex and not javascript
This commit is contained in:
parent
393324e5ee
commit
c5e0de5d48
23 changed files with 403 additions and 121 deletions
|
|
@ -6,6 +6,7 @@ from .api_views import (
|
|||
ChannelGroupViewSet,
|
||||
BulkDeleteStreamsAPIView,
|
||||
BulkDeleteChannelsAPIView,
|
||||
LogoViewSet,
|
||||
)
|
||||
|
||||
app_name = 'channels' # for DRF routing
|
||||
|
|
@ -14,6 +15,7 @@ router = DefaultRouter()
|
|||
router.register(r'streams', StreamViewSet, basename='stream')
|
||||
router.register(r'groups', ChannelGroupViewSet, basename='channel-group')
|
||||
router.register(r'channels', ChannelViewSet, basename='channel')
|
||||
router.register(r'logos', LogoViewSet, basename='logos')
|
||||
|
||||
urlpatterns = [
|
||||
# Bulk delete is a single APIView, not a ViewSet
|
||||
|
|
|
|||
|
|
@ -3,21 +3,38 @@ from rest_framework.response import Response
|
|||
from rest_framework.views import APIView
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.parsers import MultiPartParser, FormParser
|
||||
from drf_yasg.utils import swagger_auto_schema
|
||||
from drf_yasg import openapi
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.db import transaction
|
||||
import os, json
|
||||
|
||||
from .models import Stream, Channel, ChannelGroup
|
||||
from .serializers import StreamSerializer, ChannelSerializer, ChannelGroupSerializer
|
||||
from .models import Stream, Channel, ChannelGroup, Logo
|
||||
from .serializers import StreamSerializer, ChannelSerializer, ChannelGroupSerializer, LogoSerializer
|
||||
from .tasks import match_epg_channels
|
||||
import django_filters
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from rest_framework.filters import SearchFilter, OrderingFilter
|
||||
from apps.epg.models import EPGData
|
||||
from django.db.models import Q
|
||||
|
||||
from rest_framework.pagination import PageNumberPagination
|
||||
|
||||
|
||||
class OrInFilter(django_filters.Filter):
|
||||
"""
|
||||
Custom filter that handles the OR condition instead of AND.
|
||||
"""
|
||||
def filter(self, queryset, value):
|
||||
if value:
|
||||
# Create a Q object for each value and combine them with OR
|
||||
query = Q()
|
||||
for val in value.split(','):
|
||||
query |= Q(**{self.field_name: val})
|
||||
return queryset.filter(query)
|
||||
return queryset
|
||||
|
||||
class StreamPagination(PageNumberPagination):
|
||||
page_size = 25 # Default page size
|
||||
page_size_query_param = 'page_size' # Allow clients to specify page size
|
||||
|
|
@ -25,7 +42,7 @@ class StreamPagination(PageNumberPagination):
|
|||
|
||||
class StreamFilter(django_filters.FilterSet):
|
||||
name = django_filters.CharFilter(lookup_expr='icontains')
|
||||
channel_group_name = django_filters.CharFilter(field_name="channel_group__name", lookup_expr="icontains")
|
||||
channel_group_name = OrInFilter(field_name="channel_group__name", lookup_expr="icontains")
|
||||
m3u_account = django_filters.NumberFilter(field_name="m3u_account__id")
|
||||
m3u_account_name = django_filters.CharFilter(field_name="m3u_account__name", lookup_expr="icontains")
|
||||
m3u_account_is_active = django_filters.BooleanFilter(field_name="m3u_account__is_active")
|
||||
|
|
@ -64,7 +81,8 @@ class StreamViewSet(viewsets.ModelViewSet):
|
|||
|
||||
channel_group = self.request.query_params.get('channel_group')
|
||||
if channel_group:
|
||||
qs = qs.filter(channel_group__name=channel_group)
|
||||
group_names = channel_group.split(',')
|
||||
qs = qs.filter(channel_group__name__in=group_names)
|
||||
|
||||
return qs
|
||||
|
||||
|
|
@ -192,15 +210,26 @@ class ChannelViewSet(viewsets.ModelViewSet):
|
|||
if name is None:
|
||||
name = stream.name
|
||||
|
||||
stream_custom_props = json.loads(stream.custom_properties) if stream.custom_properties else {}
|
||||
channel_data = {
|
||||
'channel_number': channel_number,
|
||||
'name': name,
|
||||
'tvg_id': stream.tvg_id,
|
||||
'channel_group_id': channel_group.id,
|
||||
'logo_url': stream.logo_url,
|
||||
'streams': [stream_id]
|
||||
'streams': [stream_id],
|
||||
}
|
||||
|
||||
if 'tv-chno' in stream_custom_props:
|
||||
channel_data['channel_number'] = int(stream_custom_props['tv-chno'])
|
||||
elif 'channel-number' in stream_custom_props:
|
||||
channel_data['channel_number'] = int(stream_custom_props['channel-number'])
|
||||
|
||||
if stream.logo_url:
|
||||
logo, _ = Logo.objects.get_or_create(url=stream.logo_url, defaults={
|
||||
"name": stream.name or stream.tvg_id
|
||||
})
|
||||
channel_data["logo_id"] = logo.id
|
||||
|
||||
# Attempt to find existing EPGs with the same tvg-id
|
||||
epgs = EPGData.objects.filter(tvg_id=stream.tvg_id)
|
||||
if epgs:
|
||||
|
|
@ -387,3 +416,29 @@ class BulkDeleteChannelsAPIView(APIView):
|
|||
channel_ids = request.data.get('channel_ids', [])
|
||||
Channel.objects.filter(id__in=channel_ids).delete()
|
||||
return Response({"message": "Channels deleted"}, status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
class LogoViewSet(viewsets.ModelViewSet):
|
||||
permission_classes = [IsAuthenticated]
|
||||
queryset = Logo.objects.all()
|
||||
serializer_class = LogoSerializer
|
||||
parser_classes = (MultiPartParser, FormParser)
|
||||
|
||||
@action(detail=False, methods=['post'])
|
||||
def upload(self, request):
|
||||
if 'file' not in request.FILES:
|
||||
return Response({'error': 'No file uploaded'}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
file = request.FILES['file']
|
||||
file_name = file.name
|
||||
file_path = os.path.join('/data/logos', file_name)
|
||||
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
with open(file_path, 'wb+') as destination:
|
||||
for chunk in file.chunks():
|
||||
destination.write(chunk)
|
||||
|
||||
logo, _ = Logo.objects.get_or_create(url=file_path, defaults={
|
||||
"name": file_name,
|
||||
})
|
||||
|
||||
return Response({'id': logo.id, 'name': logo.name, 'url': logo.url}, status=status.HTTP_201_CREATED)
|
||||
|
|
|
|||
18
apps/channels/migrations/0010_stream_custom_properties.py
Normal file
18
apps/channels/migrations/0010_stream_custom_properties.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.1.6 on 2025-04-01 17:36
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0009_remove_channel_tvg_name_channel_epg_data'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='stream',
|
||||
name='custom_properties',
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
# Generated by Django 5.1.6 on 2025-04-01 22:14
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0010_stream_custom_properties'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Logo',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(max_length=255)),
|
||||
('url', models.URLField(unique=True)),
|
||||
],
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='channel',
|
||||
name='logo_file',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='channel',
|
||||
name='logo_url',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='channel',
|
||||
name='logo',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='channels', to='dispatcharr_channels.logo'),
|
||||
),
|
||||
]
|
||||
|
|
@ -211,11 +211,12 @@ class ChannelManager(models.Manager):
|
|||
class Channel(models.Model):
|
||||
channel_number = models.IntegerField()
|
||||
name = models.CharField(max_length=255)
|
||||
logo_url = models.URLField(max_length=2000, blank=True, null=True)
|
||||
logo_file = models.ImageField(
|
||||
upload_to='logos/', # Will store in MEDIA_ROOT/logos
|
||||
logo = models.ForeignKey(
|
||||
'Logo',
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
null=True
|
||||
related_name='channels',
|
||||
)
|
||||
|
||||
# M2M to Stream now in the same file
|
||||
|
|
@ -379,3 +380,11 @@ class ChannelGroupM3UAccount(models.Model):
|
|||
|
||||
def __str__(self):
|
||||
return f"{self.channel_group.name} - {self.m3u_account.name} (Enabled: {self.enabled})"
|
||||
|
||||
|
||||
class Logo(models.Model):
|
||||
name = models.CharField(max_length=255)
|
||||
url = models.URLField(unique=True)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from rest_framework import serializers
|
||||
from .models import Stream, Channel, ChannelGroup, ChannelStream, ChannelGroupM3UAccount
|
||||
from .models import Stream, Channel, ChannelGroup, ChannelStream, ChannelGroupM3UAccount, Logo
|
||||
from apps.epg.serializers import EPGDataSerializer
|
||||
from core.models import StreamProfile
|
||||
from apps.epg.models import EPGData
|
||||
|
|
@ -92,14 +92,21 @@ class ChannelSerializer(serializers.ModelSerializer):
|
|||
queryset=Stream.objects.all(), many=True, write_only=True, required=False
|
||||
)
|
||||
|
||||
logo = serializers.SerializerMethodField()
|
||||
logo_id = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Logo.objects.all(),
|
||||
source='logo',
|
||||
allow_null=True,
|
||||
required=False,
|
||||
write_only=True,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Channel
|
||||
fields = [
|
||||
'id',
|
||||
'channel_number',
|
||||
'name',
|
||||
'logo_url',
|
||||
'logo_file',
|
||||
'channel_group',
|
||||
'channel_group_id',
|
||||
'tvg_id',
|
||||
|
|
@ -109,6 +116,8 @@ class ChannelSerializer(serializers.ModelSerializer):
|
|||
'stream_ids',
|
||||
'stream_profile_id',
|
||||
'uuid',
|
||||
'logo',
|
||||
'logo_id',
|
||||
]
|
||||
|
||||
def get_streams(self, obj):
|
||||
|
|
@ -116,6 +125,9 @@ class ChannelSerializer(serializers.ModelSerializer):
|
|||
ordered_streams = obj.streams.all().order_by('channelstream__order')
|
||||
return StreamSerializer(ordered_streams, many=True).data
|
||||
|
||||
def get_logo(self, obj):
|
||||
return LogoSerializer(obj.logo).data
|
||||
|
||||
# def get_stream_ids(self, obj):
|
||||
# """Retrieve ordered stream IDs for GET requests."""
|
||||
# return list(obj.streams.all().order_by('channelstream__order').values_list('id', flat=True))
|
||||
|
|
@ -136,7 +148,6 @@ class ChannelSerializer(serializers.ModelSerializer):
|
|||
# Update the actual Channel fields
|
||||
instance.channel_number = validated_data.get('channel_number', instance.channel_number)
|
||||
instance.name = validated_data.get('name', instance.name)
|
||||
instance.logo_url = validated_data.get('logo_url', instance.logo_url)
|
||||
instance.tvg_id = validated_data.get('tvg_id', instance.tvg_id)
|
||||
instance.epg_data = validated_data.get('epg_data', None)
|
||||
|
||||
|
|
@ -145,6 +156,8 @@ class ChannelSerializer(serializers.ModelSerializer):
|
|||
instance.channel_group = validated_data['channel_group']
|
||||
if 'stream_profile' in validated_data:
|
||||
instance.stream_profile = validated_data['stream_profile']
|
||||
if 'logo' in validated_data:
|
||||
instance.logo = validated_data['logo']
|
||||
|
||||
instance.save()
|
||||
|
||||
|
|
@ -168,3 +181,8 @@ class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer):
|
|||
|
||||
# Optionally, if you only need the id of the ChannelGroup, you can customize it like this:
|
||||
# channel_group = serializers.PrimaryKeyRelatedField(queryset=ChannelGroup.objects.all())
|
||||
|
||||
class LogoSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Logo
|
||||
fields = ['id', 'name', 'url']
|
||||
|
|
|
|||
|
|
@ -14,8 +14,6 @@ def update_channel_tvg_id_and_logo(sender, instance, action, reverse, model, pk_
|
|||
"""
|
||||
Whenever streams are added to a channel:
|
||||
1) If the channel doesn't have a tvg_id, fill it from the first newly-added stream that has one.
|
||||
2) If the channel doesn't have a logo_url, fill it from the first newly-added stream that has one.
|
||||
This way if an M3U or EPG entry carried a logo, newly created channels automatically get that logo.
|
||||
"""
|
||||
# We only care about post_add, i.e. once the new streams are fully associated
|
||||
if action == "post_add":
|
||||
|
|
@ -27,14 +25,6 @@ def update_channel_tvg_id_and_logo(sender, instance, action, reverse, model, pk_
|
|||
instance.tvg_id = streams_with_tvg.first().tvg_id
|
||||
instance.save(update_fields=['tvg_id'])
|
||||
|
||||
# --- 2) Populate channel.logo_url if empty ---
|
||||
if not instance.logo_url:
|
||||
# Look for newly added streams that have a nonempty logo_url
|
||||
streams_with_logo = model.objects.filter(pk__in=pk_set).exclude(logo_url__exact='')
|
||||
if streams_with_logo.exists():
|
||||
instance.logo_url = streams_with_logo.first().logo_url
|
||||
instance.save(update_fields=['logo_url'])
|
||||
|
||||
@receiver(pre_save, sender=Stream)
|
||||
def set_default_m3u_account(sender, instance, **kwargs):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import re
|
|||
|
||||
from celery import shared_task
|
||||
from rapidfuzz import fuzz
|
||||
from sentence_transformers import SentenceTransformer, util
|
||||
from sentence_transformers import util
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
|
||||
|
|
@ -15,22 +15,10 @@ from core.models import CoreSettings
|
|||
|
||||
from asgiref.sync import async_to_sync
|
||||
from channels.layers import get_channel_layer
|
||||
from core.apps import st_model
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Load the sentence-transformers model once at the module level
|
||||
SENTENCE_MODEL_NAME = "sentence-transformers/all-MiniLM-L6-v2"
|
||||
MODEL_PATH = os.path.join(settings.MEDIA_ROOT, "models", "all-MiniLM-L6-v2")
|
||||
os.makedirs(MODEL_PATH, exist_ok=True)
|
||||
|
||||
# If not present locally, download:
|
||||
if not os.path.exists(os.path.join(MODEL_PATH, "config.json")):
|
||||
logger.info(f"Local model not found in {MODEL_PATH}; downloading from {SENTENCE_MODEL_NAME}...")
|
||||
st_model = SentenceTransformer(SENTENCE_MODEL_NAME, cache_folder=MODEL_PATH)
|
||||
else:
|
||||
logger.info(f"Loading local model from {MODEL_PATH}")
|
||||
st_model = SentenceTransformer(MODEL_PATH)
|
||||
|
||||
# Thresholds
|
||||
BEST_FUZZY_THRESHOLD = 85
|
||||
LOWER_FUZZY_THRESHOLD = 40
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ def generate_m3u(request):
|
|||
group_title = channel.channel_group.name if channel.channel_group else "Default"
|
||||
tvg_id = channel.tvg_id or ""
|
||||
tvg_name = channel.tvg_id or channel.name
|
||||
tvg_logo = channel.logo_url or ""
|
||||
tvg_logo = channel.logo.url if channel.logo else ""
|
||||
channel_number = channel.channel_number
|
||||
|
||||
extinf_line = (
|
||||
|
|
|
|||
|
|
@ -82,18 +82,18 @@ def transform_url(input_url: str, search_pattern: str, replace_pattern: str) ->
|
|||
str: The transformed URL
|
||||
"""
|
||||
try:
|
||||
logger.debug("Executing URL pattern replacement:")
|
||||
logger.debug(f" base URL: {input_url}")
|
||||
logger.debug(f" search: {search_pattern}")
|
||||
logger.info("Executing URL pattern replacement:")
|
||||
logger.info(f" base URL: {input_url}")
|
||||
logger.info(f" search: {search_pattern}")
|
||||
|
||||
# Handle backreferences in the replacement pattern
|
||||
safe_replace_pattern = re.sub(r'\$(\d+)', r'\\\1', replace_pattern)
|
||||
logger.debug(f" replace: {replace_pattern}")
|
||||
logger.debug(f" safe replace: {safe_replace_pattern}")
|
||||
logger.info(f" replace: {replace_pattern}")
|
||||
logger.info(f" safe replace: {safe_replace_pattern}")
|
||||
|
||||
# Apply the transformation
|
||||
stream_url = re.sub(search_pattern, safe_replace_pattern, input_url)
|
||||
logger.debug(f"Generated stream url: {stream_url}")
|
||||
logger.info(f"Generated stream url: {stream_url}")
|
||||
|
||||
return stream_url
|
||||
except Exception as e:
|
||||
|
|
|
|||
21
core/apps.py
21
core/apps.py
|
|
@ -1,6 +1,27 @@
|
|||
from django.apps import AppConfig
|
||||
from django.conf import settings
|
||||
import os, logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
st_model = None
|
||||
|
||||
class CoreConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'core'
|
||||
|
||||
def ready(self):
|
||||
global st_model
|
||||
from sentence_transformers import SentenceTransformer
|
||||
|
||||
# Load the sentence-transformers model once at the module level
|
||||
SENTENCE_MODEL_NAME = "sentence-transformers/all-MiniLM-L6-v2"
|
||||
MODEL_PATH = os.path.join(settings.MEDIA_ROOT, "models", "all-MiniLM-L6-v2")
|
||||
os.makedirs(MODEL_PATH, exist_ok=True)
|
||||
|
||||
# If not present locally, download:
|
||||
if not os.path.exists(os.path.join(MODEL_PATH, "config.json")):
|
||||
logger.info(f"Local model not found in {MODEL_PATH}; downloading from {SENTENCE_MODEL_NAME}...")
|
||||
st_model = SentenceTransformer(SENTENCE_MODEL_NAME, cache_folder=MODEL_PATH)
|
||||
else:
|
||||
logger.info(f"Loading local model from {MODEL_PATH}")
|
||||
st_model = SentenceTransformer(MODEL_PATH)
|
||||
|
|
|
|||
|
|
@ -6,6 +6,8 @@ import threading
|
|||
from django.conf import settings
|
||||
from redis.exceptions import ConnectionError, TimeoutError
|
||||
from django.core.cache import cache
|
||||
from asgiref.sync import async_to_sync
|
||||
from channels.layers import get_channel_layer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -167,9 +169,19 @@ def release_task_lock(task_name, id):
|
|||
# Remove the lock
|
||||
redis_client.delete(lock_id)
|
||||
|
||||
def send_websocket_event(event, success, data):
|
||||
channel_layer = get_channel_layer()
|
||||
async_to_sync(channel_layer.group_send)(
|
||||
'updates',
|
||||
{
|
||||
'type': 'update',
|
||||
"data": {"success": True, "type": "epg_channels"}
|
||||
}
|
||||
)
|
||||
|
||||
# Initialize the global clients with retry logic
|
||||
# Skip Redis initialization if running as a management command
|
||||
if is_management_command():
|
||||
if __name__ == '__main__':
|
||||
redis_client = None
|
||||
redis_pubsub_client = None
|
||||
logger.info("Running as management command - Redis clients set to None")
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
import json
|
||||
from channels.generic.websocket import AsyncWebsocketConsumer
|
||||
import re, logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class MyWebSocketConsumer(AsyncWebsocketConsumer):
|
||||
async def connect(self):
|
||||
|
|
@ -12,7 +15,29 @@ class MyWebSocketConsumer(AsyncWebsocketConsumer):
|
|||
|
||||
async def receive(self, text_data):
|
||||
data = json.loads(text_data)
|
||||
print("Received:", data)
|
||||
|
||||
if data["type"] == "m3u_profile_test":
|
||||
from apps.proxy.ts_proxy.url_utils import transform_url
|
||||
|
||||
def replace_with_mark(match):
|
||||
# Wrap the match in <mark> tags
|
||||
return f"<mark>{match.group(0)}</mark>"
|
||||
|
||||
# Apply the transformation using the replace_with_mark function
|
||||
try:
|
||||
search_preview = re.sub(data["search"], replace_with_mark, data["url"])
|
||||
except Exception as e:
|
||||
search_preview = data["search"]
|
||||
logger.error(f"Failed to generate replace preview: {e}")
|
||||
|
||||
result = transform_url(data["url"], data["search"], data["replace"])
|
||||
await self.send(text_data=json.dumps({
|
||||
"data": {
|
||||
'type': 'm3u_profile_test',
|
||||
'search_preview': search_preview,
|
||||
'result': result,
|
||||
}
|
||||
}))
|
||||
|
||||
async def update(self, event):
|
||||
await self.send(text_data=json.dumps(event))
|
||||
|
|
|
|||
|
|
@ -20,7 +20,8 @@ export const WebsocketProvider = ({ children }) => {
|
|||
const { fetchStreams } = useStreamsStore();
|
||||
const { fetchChannels, setChannelStats, fetchChannelGroups } =
|
||||
useChannelsStore();
|
||||
const { fetchPlaylists, setRefreshProgress } = usePlaylistsStore();
|
||||
const { fetchPlaylists, setRefreshProgress, setProfilePreview } =
|
||||
usePlaylistsStore();
|
||||
const { fetchEPGData } = useEPGsStore();
|
||||
|
||||
const ws = useRef(null);
|
||||
|
|
@ -95,6 +96,9 @@ export const WebsocketProvider = ({ children }) => {
|
|||
fetchEPGData();
|
||||
break;
|
||||
|
||||
case 'm3u_profile_test':
|
||||
setProfilePreview(event.data.search_preview, event.data.result);
|
||||
|
||||
default:
|
||||
console.error(`Unknown websocket event type: ${event.type}`);
|
||||
break;
|
||||
|
|
@ -108,7 +112,7 @@ export const WebsocketProvider = ({ children }) => {
|
|||
};
|
||||
}, []);
|
||||
|
||||
const ret = [isReady, val, ws.current?.send.bind(ws.current)];
|
||||
const ret = [isReady, ws.current?.send.bind(ws.current), val];
|
||||
|
||||
return (
|
||||
<WebsocketContext.Provider value={ret}>
|
||||
|
|
|
|||
|
|
@ -901,4 +901,35 @@ export default class API {
|
|||
const retval = await response.json();
|
||||
return retval;
|
||||
}
|
||||
|
||||
static async getLogos() {
|
||||
const response = await fetch(`${host}/api/channels/logos/`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${await API.getAuthToken()}`,
|
||||
},
|
||||
});
|
||||
|
||||
const retval = await response.json();
|
||||
return retval;
|
||||
}
|
||||
|
||||
static async uploadLogo(file) {
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
|
||||
const response = await fetch(`${host}/api/channels/logos/upload/`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${await API.getAuthToken()}`,
|
||||
},
|
||||
body: formData,
|
||||
});
|
||||
|
||||
const retval = await response.json();
|
||||
|
||||
useChannelsStore.getState().addLogo(retval);
|
||||
|
||||
return retval;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -39,13 +39,12 @@ const Channel = ({ channel = null, isOpen, onClose }) => {
|
|||
|
||||
const listRef = useRef(null);
|
||||
|
||||
const channelGroups = useChannelsStore((state) => state.channelGroups);
|
||||
const { channelGroups, logos } = useChannelsStore();
|
||||
const streams = useStreamsStore((state) => state.streams);
|
||||
const { profiles: streamProfiles } = useStreamProfilesStore();
|
||||
const { playlists } = usePlaylistsStore();
|
||||
const { epgs, tvgs, tvgsById } = useEPGsStore();
|
||||
|
||||
const [logoFile, setLogoFile] = useState(null);
|
||||
const [logoPreview, setLogoPreview] = useState(null);
|
||||
const [channelStreams, setChannelStreams] = useState([]);
|
||||
const [channelGroupModelOpen, setChannelGroupModalOpen] = useState(false);
|
||||
|
|
@ -65,13 +64,13 @@ const Channel = ({ channel = null, isOpen, onClose }) => {
|
|||
setChannelStreams(Array.from(streamSet));
|
||||
};
|
||||
|
||||
const handleLogoChange = (files) => {
|
||||
const handleLogoChange = async (files) => {
|
||||
if (files.length === 1) {
|
||||
console.log(files[0]);
|
||||
setLogoFile(files[0]);
|
||||
setLogoPreview(URL.createObjectURL(files[0]));
|
||||
const retval = await API.uploadLogo(files[0]);
|
||||
setLogoPreview(retval.url);
|
||||
formik.setFieldValue('logo_id', retval.id);
|
||||
} else {
|
||||
setLogoFile(null);
|
||||
setLogoPreview(null);
|
||||
}
|
||||
};
|
||||
|
|
@ -84,6 +83,7 @@ const Channel = ({ channel = null, isOpen, onClose }) => {
|
|||
stream_profile_id: '0',
|
||||
tvg_id: '',
|
||||
epg_data_id: '',
|
||||
logo_id: '',
|
||||
},
|
||||
validationSchema: Yup.object({
|
||||
name: Yup.string().required('Name is required'),
|
||||
|
|
@ -95,23 +95,24 @@ const Channel = ({ channel = null, isOpen, onClose }) => {
|
|||
values.stream_profile_id = null;
|
||||
}
|
||||
|
||||
if (!values.logo_id) {
|
||||
delete values.logo_id;
|
||||
}
|
||||
|
||||
if (channel?.id) {
|
||||
await API.updateChannel({
|
||||
id: channel.id,
|
||||
...values,
|
||||
logo_file: logoFile,
|
||||
streams: channelStreams.map((stream) => stream.id),
|
||||
});
|
||||
} else {
|
||||
await API.addChannel({
|
||||
...values,
|
||||
logo_file: logoFile,
|
||||
streams: channelStreams.map((stream) => stream.id),
|
||||
});
|
||||
}
|
||||
|
||||
resetForm();
|
||||
setLogoFile(null);
|
||||
setLogoPreview(null);
|
||||
setSubmitting(false);
|
||||
setTvgFilter('');
|
||||
|
|
@ -135,6 +136,7 @@ const Channel = ({ channel = null, isOpen, onClose }) => {
|
|||
: '0',
|
||||
tvg_id: channel.tvg_id,
|
||||
epg_data_id: channel.epg_data ? `${channel.epg_data?.id}` : '',
|
||||
logo_id: `${channel.logo?.id}`,
|
||||
});
|
||||
|
||||
console.log(channel);
|
||||
|
|
@ -145,6 +147,14 @@ const Channel = ({ channel = null, isOpen, onClose }) => {
|
|||
}
|
||||
}, [channel, tvgsById]);
|
||||
|
||||
const renderLogoOption = ({ option, checked }) => {
|
||||
return (
|
||||
<Center style={{ width: '100%' }}>
|
||||
<img src={logos[option.value].url} width="30" />
|
||||
</Center>
|
||||
);
|
||||
};
|
||||
|
||||
// const activeStreamsTable = useMantineReactTable({
|
||||
// data: channelStreams,
|
||||
// columns: useMemo(
|
||||
|
|
@ -370,15 +380,36 @@ const Channel = ({ channel = null, isOpen, onClose }) => {
|
|||
<Divider size="sm" orientation="vertical" />
|
||||
|
||||
<Stack justify="flex-start" style={{ flex: 1 }}>
|
||||
<TextInput
|
||||
id="logo_url"
|
||||
name="logo_url"
|
||||
label="Logo (URL)"
|
||||
value={formik.values.logo_url}
|
||||
onChange={formik.handleChange}
|
||||
error={formik.errors.logo_url ? formik.touched.logo_url : ''}
|
||||
size="xs"
|
||||
/>
|
||||
<Flex gap="sm">
|
||||
<Select
|
||||
id="logo_id"
|
||||
name="logo_id"
|
||||
label="Logo"
|
||||
searchable
|
||||
value={formik.values.logo_id}
|
||||
onChange={(value) => {
|
||||
formik.setFieldValue('logo_id', value);
|
||||
}}
|
||||
error={formik.errors.logo_id ? formik.touched.logo_id : ''}
|
||||
size="xs"
|
||||
data={Object.values(logos).map((logo) => ({
|
||||
label: logo.name,
|
||||
value: `${logo.id}`,
|
||||
}))}
|
||||
renderOption={renderLogoOption}
|
||||
comboboxProps={{ width: 75, position: 'bottom-start' }}
|
||||
/>
|
||||
<Flex align="flex-end" style={{ marginTop: 10 }}>
|
||||
<img
|
||||
src={
|
||||
logos[formik.values.logo_id]
|
||||
? logos[formik.values.logo_id].url
|
||||
: logo
|
||||
}
|
||||
height="40"
|
||||
/>
|
||||
</Flex>
|
||||
</Flex>
|
||||
|
||||
<Group>
|
||||
<Divider size="xs" style={{ flex: 1 }} />
|
||||
|
|
@ -389,18 +420,7 @@ const Channel = ({ channel = null, isOpen, onClose }) => {
|
|||
</Group>
|
||||
|
||||
<Stack>
|
||||
<Group justify="space-between">
|
||||
<Text size="sm">Upload Logo</Text>
|
||||
{logoPreview && (
|
||||
<ActionIcon
|
||||
variant="transparent"
|
||||
color="red.9"
|
||||
onClick={handleLogoChange}
|
||||
>
|
||||
<SquareX />
|
||||
</ActionIcon>
|
||||
)}
|
||||
</Group>
|
||||
<Text size="sm">Upload Logo</Text>
|
||||
<Dropzone
|
||||
onDrop={handleLogoChange}
|
||||
onReject={(files) => console.log('rejected files', files)}
|
||||
|
|
@ -412,22 +432,9 @@ const Channel = ({ channel = null, isOpen, onClose }) => {
|
|||
mih={40}
|
||||
style={{ pointerEvents: 'none' }}
|
||||
>
|
||||
<div>
|
||||
{logoPreview && (
|
||||
<Center>
|
||||
<img
|
||||
src={logoPreview || logo}
|
||||
alt="Selected"
|
||||
style={{ maxWidth: 50, height: 'auto' }}
|
||||
/>
|
||||
</Center>
|
||||
)}
|
||||
{!logoPreview && (
|
||||
<Text size="sm" inline>
|
||||
Drag images here or click to select files
|
||||
</Text>
|
||||
)}
|
||||
</div>
|
||||
<Text size="sm" inline>
|
||||
Drag images here or click to select files
|
||||
</Text>
|
||||
</Group>
|
||||
</Dropzone>
|
||||
|
||||
|
|
|
|||
|
|
@ -11,26 +11,36 @@ import {
|
|||
Text,
|
||||
Paper,
|
||||
} from '@mantine/core';
|
||||
import { useWebSocket } from '../../WebSocket';
|
||||
import usePlaylistsStore from '../../store/playlists';
|
||||
import { useDebounce } from '../../utils';
|
||||
|
||||
const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => {
|
||||
const [websocketReady, sendMessage] = useWebSocket();
|
||||
const { profileSearchPreview, profileResult } = usePlaylistsStore();
|
||||
|
||||
const [searchPattern, setSearchPattern] = useState('');
|
||||
const [replacePattern, setReplacePattern] = useState('');
|
||||
const [debouncedPatterns, setDebouncedPatterns] = useState({});
|
||||
|
||||
let regex;
|
||||
try {
|
||||
regex = new RegExp(searchPattern, 'g');
|
||||
} catch (e) {
|
||||
regex = null;
|
||||
}
|
||||
useEffect(() => {
|
||||
sendMessage(
|
||||
JSON.stringify({
|
||||
type: 'm3u_profile_test',
|
||||
url: m3u.server_url,
|
||||
search: debouncedPatterns['search'] || '',
|
||||
replace: debouncedPatterns['replace'] || '',
|
||||
})
|
||||
);
|
||||
}, [m3u, debouncedPatterns]);
|
||||
|
||||
const highlightedUrl = regex
|
||||
? m3u.server_url.replace(regex, (match) => `<mark>${match}</mark>`)
|
||||
: m3u.server_url;
|
||||
useEffect(() => {
|
||||
const handler = setTimeout(() => {
|
||||
setDebouncedPatterns({ search: searchPattern, replace: replacePattern });
|
||||
}, 500);
|
||||
|
||||
const resultUrl =
|
||||
regex && replacePattern
|
||||
? m3u.server_url.replace(regex, replacePattern)
|
||||
: m3u.server_url;
|
||||
return () => clearTimeout(handler); // Cleanup timeout on unmount or value change
|
||||
}, [searchPattern, replacePattern]);
|
||||
|
||||
const onSearchPatternUpdate = (e) => {
|
||||
formik.handleChange(e);
|
||||
|
|
@ -126,14 +136,14 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => {
|
|||
}
|
||||
/>
|
||||
|
||||
<Flex mih={50} gap="xs" justify="flex-end" align="flex-end">
|
||||
<Button
|
||||
type="submit"
|
||||
variant="contained"
|
||||
color="primary"
|
||||
disabled={formik.isSubmitting}
|
||||
size="small"
|
||||
>
|
||||
<Flex
|
||||
mih={50}
|
||||
gap="xs"
|
||||
justify="flex-end"
|
||||
align="flex-end"
|
||||
style={{ marginBottom: 5 }}
|
||||
>
|
||||
<Button type="submit" disabled={formik.isSubmitting} size="xs">
|
||||
Submit
|
||||
</Button>
|
||||
</Flex>
|
||||
|
|
@ -142,14 +152,16 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => {
|
|||
<Paper shadow="sm" p="md" radius="md" withBorder>
|
||||
<Text>Search</Text>
|
||||
<Text
|
||||
dangerouslySetInnerHTML={{ __html: highlightedUrl }}
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: profileSearchPreview || m3u.server_url,
|
||||
}}
|
||||
sx={{ whiteSpace: 'pre-wrap', wordBreak: 'break-all' }}
|
||||
/>
|
||||
</Paper>
|
||||
|
||||
<Paper p="md" withBorder>
|
||||
<Paper p="md" radius="md" withBorder>
|
||||
<Text>Replace</Text>
|
||||
<Text>{resultUrl}</Text>
|
||||
<Text>{profileResult || m3u.server_url}</Text>
|
||||
</Paper>
|
||||
</Modal>
|
||||
);
|
||||
|
|
|
|||
|
|
@ -282,7 +282,7 @@ const ChannelsTable = ({}) => {
|
|||
},
|
||||
{
|
||||
header: 'Logo',
|
||||
accessorKey: 'logo_url',
|
||||
accessorKey: 'logo.url',
|
||||
enableSorting: false,
|
||||
size: 55,
|
||||
mantineTableBodyCellProps: {
|
||||
|
|
|
|||
|
|
@ -82,7 +82,8 @@ const StreamsTable = ({}) => {
|
|||
* Stores
|
||||
*/
|
||||
const { playlists } = usePlaylistsStore();
|
||||
const { channelGroups, channelsPageSelection } = useChannelsStore();
|
||||
const { channelGroups, channelsPageSelection, fetchLogos } =
|
||||
useChannelsStore();
|
||||
const channelSelectionStreams = useChannelsStore(
|
||||
(state) => state.channels[state.channelsPageSelection[0]?.id]?.streams
|
||||
);
|
||||
|
|
@ -291,6 +292,7 @@ const StreamsTable = ({}) => {
|
|||
channel_number: null,
|
||||
stream_id: stream.id,
|
||||
});
|
||||
fetchLogos();
|
||||
};
|
||||
|
||||
// Bulk creation: create channels from selected streams in one API call
|
||||
|
|
@ -301,6 +303,7 @@ const StreamsTable = ({}) => {
|
|||
stream_id,
|
||||
}))
|
||||
);
|
||||
fetchLogos();
|
||||
setIsLoading(false);
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -34,6 +34,7 @@ const useAuthStore = create((set, get) => ({
|
|||
await Promise.all([
|
||||
useChannelsStore.getState().fetchChannels(),
|
||||
useChannelsStore.getState().fetchChannelGroups(),
|
||||
useChannelsStore.getState().fetchLogos(),
|
||||
useUserAgentsStore.getState().fetchUserAgents(),
|
||||
usePlaylistsStore.getState().fetchPlaylists(),
|
||||
useEPGsStore.getState().fetchEPGs(),
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ const useChannelsStore = create((set, get) => ({
|
|||
stats: {},
|
||||
activeChannels: {},
|
||||
activeClients: {},
|
||||
logos: {},
|
||||
isLoading: false,
|
||||
error: null,
|
||||
|
||||
|
|
@ -65,9 +66,13 @@ const useChannelsStore = create((set, get) => ({
|
|||
|
||||
addChannels: (newChannels) => {
|
||||
const channelsByUUID = {};
|
||||
const logos = {};
|
||||
const channelsByID = newChannels.reduce((acc, channel) => {
|
||||
acc[channel.id] = channel;
|
||||
channelsByUUID[channel.uuid] = channel.id;
|
||||
if (channel.logo) {
|
||||
logos[channel.logo.id] = channel.logo;
|
||||
}
|
||||
return acc;
|
||||
}, {});
|
||||
return set((state) => ({
|
||||
|
|
@ -79,6 +84,10 @@ const useChannelsStore = create((set, get) => ({
|
|||
...state.channelsByUUID,
|
||||
...channelsByUUID,
|
||||
},
|
||||
logos: {
|
||||
...state.logos,
|
||||
...logos,
|
||||
},
|
||||
}));
|
||||
},
|
||||
|
||||
|
|
@ -126,6 +135,37 @@ const useChannelsStore = create((set, get) => ({
|
|||
[channelGroup.id]: channelGroup,
|
||||
})),
|
||||
|
||||
fetchLogos: async () => {
|
||||
set({ isLoading: true, error: null });
|
||||
try {
|
||||
const logos = await api.getLogos();
|
||||
set({
|
||||
logos: logos.reduce((acc, logo) => {
|
||||
acc[logo.id] = {
|
||||
...logo,
|
||||
url: logo.url.replace(/^\/data/, ''),
|
||||
};
|
||||
return acc;
|
||||
}, {}),
|
||||
isLoading: false,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch logos:', error);
|
||||
set({ error: 'Failed to load logos.', isLoading: false });
|
||||
}
|
||||
},
|
||||
|
||||
addLogo: (newLogo) =>
|
||||
set((state) => ({
|
||||
logos: {
|
||||
...state.logos,
|
||||
[newLogo.id]: {
|
||||
...newLogo,
|
||||
url: newLogo.url.replace(/^\/data/, ''),
|
||||
},
|
||||
},
|
||||
})),
|
||||
|
||||
setChannelsPageSelection: (channelsPageSelection) =>
|
||||
set((state) => ({ channelsPageSelection })),
|
||||
|
||||
|
|
|
|||
|
|
@ -8,6 +8,9 @@ const usePlaylistsStore = create((set) => ({
|
|||
isLoading: false,
|
||||
error: null,
|
||||
|
||||
profileSearchPreview: '',
|
||||
profileResult: '',
|
||||
|
||||
fetchPlaylists: async () => {
|
||||
set({ isLoading: true, error: null });
|
||||
try {
|
||||
|
|
@ -79,6 +82,12 @@ const usePlaylistsStore = create((set) => ({
|
|||
refreshProgress: updatedProgress,
|
||||
};
|
||||
}),
|
||||
|
||||
setProfilePreview: (profileSearchPreview, profileResult) =>
|
||||
set((state) => ({
|
||||
profileSearchPreview,
|
||||
profileResult,
|
||||
})),
|
||||
}));
|
||||
|
||||
export default usePlaylistsStore;
|
||||
|
|
|
|||
|
|
@ -6,6 +6,8 @@ export default defineConfig({
|
|||
// The base URL for the build, adjust this to match your desired path
|
||||
plugins: [react()],
|
||||
|
||||
publicDir: '/data',
|
||||
|
||||
server: {
|
||||
port: 9191,
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue