From e4cb4bd1d2013a1790d83639fbbf9b00a2b0e9e3 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Fri, 25 Apr 2025 17:47:25 +0000 Subject: [PATCH 01/42] Increment build number to 28 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 869719f9..a8aae639 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '27' # Auto-incremented on builds +__build__ = '28' # Auto-incremented on builds From 575d696c35303ed90c0e0a7973b8d1a4b62220a4 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 25 Apr 2025 15:32:34 -0500 Subject: [PATCH 02/42] Changed proxy to use uwsgi socket and increased client_max_body_size to 128MB. --- docker/nginx.conf | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/docker/nginx.conf b/docker/nginx.conf index 06fa742a..8382b73f 100644 --- a/docker/nginx.conf +++ b/docker/nginx.conf @@ -7,6 +7,7 @@ server { proxy_connect_timeout 75; proxy_send_timeout 300; proxy_read_timeout 300; + client_max_body_size 128M; # Allow file uploads up to 128MB # Serve Django via uWSGI location / { @@ -84,13 +85,15 @@ server { # Route TS proxy requests to the dedicated instance location /proxy/ { - proxy_pass http://127.0.0.1:5656; + include uwsgi_params; + uwsgi_pass unix:/app/uwsgi.sock; + uwsgi_buffering off; # Explicitly disable uwsgi buffering for streaming proxy_http_version 1.1; proxy_set_header Connection ""; proxy_buffering off; proxy_cache off; - proxy_read_timeout 3600s; - proxy_send_timeout 3600s; + proxy_read_timeout 300s; + proxy_send_timeout 300s; client_max_body_size 0; proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; From 00a8609b352eb909915adab025097ab352b27cc1 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Fri, 25 Apr 2025 20:32:59 +0000 Subject: [PATCH 03/42] Increment build number to 29 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index a8aae639..8ad97b43 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '28' # Auto-incremented on builds +__build__ = '29' # Auto-incremented on builds From b6fe53ba2b3570abb3fcd8c0e8e64921824b5e27 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 25 Apr 2025 15:49:42 -0500 Subject: [PATCH 04/42] Removed no buffer for uwsgi. Caused issues. --- docker/nginx.conf | 1 - 1 file changed, 1 deletion(-) diff --git a/docker/nginx.conf b/docker/nginx.conf index 8382b73f..b440f773 100644 --- a/docker/nginx.conf +++ b/docker/nginx.conf @@ -87,7 +87,6 @@ server { location /proxy/ { include uwsgi_params; uwsgi_pass unix:/app/uwsgi.sock; - uwsgi_buffering off; # Explicitly disable uwsgi buffering for streaming proxy_http_version 1.1; proxy_set_header Connection ""; proxy_buffering off; From a2b499d45373cba7f272d99a9a839c12b9b1735f Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Fri, 25 Apr 2025 20:50:04 +0000 Subject: [PATCH 05/42] Increment build number to 30 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 8ad97b43..a0bbdd9e 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '29' # Auto-incremented on builds +__build__ = '30' # Auto-incremented on builds From 8b057818b74d95d40562c7bf557fd0b21d525187 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 25 Apr 2025 16:55:30 -0500 Subject: [PATCH 06/42] Adjusted logo box so data didn't shift with different size logos. --- frontend/src/pages/Stats.jsx | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index 4754f68a..e2e90a7d 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -325,12 +325,23 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel > - channel logo + + channel logo + From ff0deffe3672a74792ca1371ae14ca9b52d9b4c5 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Fri, 25 Apr 2025 21:56:53 +0000 Subject: [PATCH 07/42] Increment build number to 31 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index a0bbdd9e..360dc0ae 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '30' # Auto-incremented on builds +__build__ = '31' # Auto-incremented on builds From 81fecde3b56f59bf55ec231754247c695a6a8497 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 26 Apr 2025 08:29:18 -0500 Subject: [PATCH 08/42] Add stream name to channel status. --- apps/proxy/ts_proxy/channel_status.py | 39 +++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/apps/proxy/ts_proxy/channel_status.py b/apps/proxy/ts_proxy/channel_status.py index d4e33f02..fe5b1f3b 100644 --- a/apps/proxy/ts_proxy/channel_status.py +++ b/apps/proxy/ts_proxy/channel_status.py @@ -6,6 +6,7 @@ from .redis_keys import RedisKeys from .constants import TS_PACKET_SIZE, ChannelMetadataField from redis.exceptions import ConnectionError, TimeoutError from .utils import get_logger +from django.db import DatabaseError # Add import for error handling logger = get_logger() @@ -45,6 +46,25 @@ class ChannelStatus: 'buffer_index': int(buffer_index_value.decode('utf-8')) if buffer_index_value else 0, } + # Add stream ID and name information + stream_id_bytes = metadata.get(ChannelMetadataField.STREAM_ID.encode('utf-8')) + if stream_id_bytes: + try: + stream_id = int(stream_id_bytes.decode('utf-8')) + info['stream_id'] = stream_id + + # Look up stream name from database + try: + from apps.channels.models import Stream + stream = Stream.objects.filter(id=stream_id).first() + if stream: + info['stream_name'] = stream.name + logger.debug(f"Added stream name '{stream.name}' for stream ID {stream_id}") + except (ImportError, DatabaseError) as e: + logger.warning(f"Failed to get stream name for ID {stream_id}: {e}") + except ValueError: + logger.warning(f"Invalid stream_id format in Redis: {stream_id_bytes}") + # Add timing information state_changed_field = ChannelMetadataField.STATE_CHANGED_AT.encode('utf-8') if state_changed_field in metadata: @@ -285,6 +305,25 @@ class ChannelStatus: 'uptime': uptime } + # Add stream ID and name information + stream_id_bytes = metadata.get(ChannelMetadataField.STREAM_ID.encode('utf-8')) + if stream_id_bytes: + try: + stream_id = int(stream_id_bytes.decode('utf-8')) + info['stream_id'] = stream_id + + # Look up stream name from database + try: + from apps.channels.models import Stream + stream = Stream.objects.filter(id=stream_id).first() + if stream: + info['stream_name'] = stream.name + logger.debug(f"Added stream name '{stream.name}' for stream ID {stream_id}") + except (ImportError, DatabaseError) as e: + logger.warning(f"Failed to get stream name for ID {stream_id}: {e}") + except ValueError: + logger.warning(f"Invalid stream_id format in Redis: {stream_id_bytes}") + # Add data throughput information to basic info total_bytes_bytes = proxy_server.redis_client.hget(metadata_key, ChannelMetadataField.TOTAL_BYTES.encode('utf-8')) if total_bytes_bytes: From 7cfe7c2998ab101d0815fe25c57afcf60892b381 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Sat, 26 Apr 2025 13:29:51 +0000 Subject: [PATCH 09/42] Increment build number to 32 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 360dc0ae..0fedf3fc 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '31' # Auto-incremented on builds +__build__ = '32' # Auto-incremented on builds From d3a7dbca1074058a77cbc4e1a43a1d07c5166680 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 26 Apr 2025 08:36:46 -0500 Subject: [PATCH 10/42] Imported missing os --- apps/epg/models.py | 1 + 1 file changed, 1 insertion(+) diff --git a/apps/epg/models.py b/apps/epg/models.py index f026c558..2f7d5990 100644 --- a/apps/epg/models.py +++ b/apps/epg/models.py @@ -2,6 +2,7 @@ from django.db import models from django.utils import timezone from django_celery_beat.models import PeriodicTask from django.conf import settings +import os class EPGSource(models.Model): SOURCE_TYPE_CHOICES = [ From e02e1458fa74459f3a950c153e44002d6fb9ceac Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Sat, 26 Apr 2025 13:37:06 +0000 Subject: [PATCH 11/42] Increment build number to 33 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 0fedf3fc..7bb9e07f 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '32' # Auto-incremented on builds +__build__ = '33' # Auto-incremented on builds From 88f27d62f1e72d26b11173bd449fdca03bdac452 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 27 Apr 2025 14:43:09 -0500 Subject: [PATCH 12/42] Adds current m3u profile to stats. --- apps/proxy/ts_proxy/channel_status.py | 38 +++++++++++++++ .../ts_proxy/services/channel_service.py | 25 ++++++---- apps/proxy/ts_proxy/views.py | 3 +- frontend/src/pages/Stats.jsx | 46 +++++++++++++++++++ 4 files changed, 102 insertions(+), 10 deletions(-) diff --git a/apps/proxy/ts_proxy/channel_status.py b/apps/proxy/ts_proxy/channel_status.py index fe5b1f3b..43e01df8 100644 --- a/apps/proxy/ts_proxy/channel_status.py +++ b/apps/proxy/ts_proxy/channel_status.py @@ -65,6 +65,25 @@ class ChannelStatus: except ValueError: logger.warning(f"Invalid stream_id format in Redis: {stream_id_bytes}") + # Add M3U profile information + m3u_profile_id_bytes = metadata.get(ChannelMetadataField.M3U_PROFILE.encode('utf-8')) + if m3u_profile_id_bytes: + try: + m3u_profile_id = int(m3u_profile_id_bytes.decode('utf-8')) + info['m3u_profile_id'] = m3u_profile_id + + # Look up M3U profile name from database + try: + from apps.m3u.models import M3UAccountProfile + m3u_profile = M3UAccountProfile.objects.filter(id=m3u_profile_id).first() + if m3u_profile: + info['m3u_profile_name'] = m3u_profile.name + logger.debug(f"Added M3U profile name '{m3u_profile.name}' for profile ID {m3u_profile_id}") + except (ImportError, DatabaseError) as e: + logger.warning(f"Failed to get M3U profile name for ID {m3u_profile_id}: {e}") + except ValueError: + logger.warning(f"Invalid m3u_profile_id format in Redis: {m3u_profile_id_bytes}") + # Add timing information state_changed_field = ChannelMetadataField.STATE_CHANGED_AT.encode('utf-8') if state_changed_field in metadata: @@ -380,6 +399,25 @@ class ChannelStatus: # Add clients to info info['clients'] = clients + # Add M3U profile information + m3u_profile_id_bytes = metadata.get(ChannelMetadataField.M3U_PROFILE.encode('utf-8')) + if m3u_profile_id_bytes: + try: + m3u_profile_id = int(m3u_profile_id_bytes.decode('utf-8')) + info['m3u_profile_id'] = m3u_profile_id + + # Look up M3U profile name from database + try: + from apps.m3u.models import M3UAccountProfile + m3u_profile = M3UAccountProfile.objects.filter(id=m3u_profile_id).first() + if m3u_profile: + info['m3u_profile_name'] = m3u_profile.name + logger.debug(f"Added M3U profile name '{m3u_profile.name}' for profile ID {m3u_profile_id}") + except (ImportError, DatabaseError) as e: + logger.warning(f"Failed to get M3U profile name for ID {m3u_profile_id}: {e}") + except ValueError: + logger.warning(f"Invalid m3u_profile_id format in Redis: {m3u_profile_id_bytes}") + return info except Exception as e: logger.error(f"Error getting channel info: {e}") diff --git a/apps/proxy/ts_proxy/services/channel_service.py b/apps/proxy/ts_proxy/services/channel_service.py index 3ac62af4..9aa8c66b 100644 --- a/apps/proxy/ts_proxy/services/channel_service.py +++ b/apps/proxy/ts_proxy/services/channel_service.py @@ -7,7 +7,7 @@ import logging import time import json from django.shortcuts import get_object_or_404 -from apps.channels.models import Channel +from apps.channels.models import Channel, Stream from apps.proxy.config import TSConfig as Config from ..server import ProxyServer from ..redis_keys import RedisKeys @@ -58,7 +58,7 @@ class ChannelService: # Verify the stream_id was set stream_id_value = proxy_server.redis_client.hget(metadata_key, ChannelMetadataField.STREAM_ID) if stream_id_value: - logger.info(f"Verified stream_id {stream_id_value.decode('utf-8')} is now set in Redis") + logger.debug(f"Verified stream_id {stream_id_value.decode('utf-8')} is now set in Redis") else: logger.error(f"Failed to set stream_id {stream_id} in Redis before initialization") @@ -82,7 +82,7 @@ class ChannelService: return success @staticmethod - def change_stream_url(channel_id, new_url=None, user_agent=None, target_stream_id=None): + def change_stream_url(channel_id, new_url=None, user_agent=None, target_stream_id=None, m3u_profile_id=None): """ Change the URL of an existing stream. @@ -91,6 +91,7 @@ class ChannelService: new_url: New stream URL (optional if target_stream_id is provided) user_agent: Optional user agent to update target_stream_id: Optional target stream ID to switch to + m3u_profile_id: Optional M3U profile ID to update Returns: dict: Result information including success status and diagnostics @@ -109,6 +110,10 @@ class ChannelService: new_url = stream_info['url'] user_agent = stream_info['user_agent'] stream_id = target_stream_id + # Extract M3U profile ID from stream info if available + if 'm3u_profile_id' in stream_info: + m3u_profile_id = stream_info['m3u_profile_id'] + logger.info(f"Found M3U profile ID {m3u_profile_id} for stream ID {stream_id}") elif target_stream_id: # If we have both URL and target_stream_id, use the target_stream_id stream_id = target_stream_id @@ -163,7 +168,7 @@ class ChannelService: # Update metadata in Redis regardless of ownership if proxy_server.redis_client: try: - ChannelService._update_channel_metadata(channel_id, new_url, user_agent, stream_id) + ChannelService._update_channel_metadata(channel_id, new_url, user_agent, stream_id, m3u_profile_id) result['metadata_updated'] = True except Exception as e: logger.error(f"Error updating Redis metadata: {e}", exc_info=True) @@ -188,7 +193,7 @@ class ChannelService: # If we're not the owner, publish an event for the owner to pick up logger.info(f"Not the owner, requesting URL change via Redis PubSub") if proxy_server.redis_client: - ChannelService._publish_stream_switch_event(channel_id, new_url, user_agent, stream_id) + ChannelService._publish_stream_switch_event(channel_id, new_url, user_agent, stream_id, m3u_profile_id) result.update({ 'direct_update': False, 'event_published': True, @@ -413,7 +418,7 @@ class ChannelService: # Helper methods for Redis operations @staticmethod - def _update_channel_metadata(channel_id, url, user_agent=None, stream_id=None): + def _update_channel_metadata(channel_id, url, user_agent=None, stream_id=None, m3u_profile_id=None): """Update channel metadata in Redis""" proxy_server = ProxyServer.get_instance() @@ -432,7 +437,8 @@ class ChannelService: metadata[ChannelMetadataField.USER_AGENT] = user_agent if stream_id: metadata[ChannelMetadataField.STREAM_ID] = str(stream_id) - logger.info(f"Updating stream ID to {stream_id} in Redis for channel {channel_id}") + if m3u_profile_id: + metadata[ChannelMetadataField.M3U_PROFILE] = str(m3u_profile_id) # Use the appropriate method based on the key type if key_type == 'hash': @@ -448,11 +454,11 @@ class ChannelService: switch_key = RedisKeys.switch_request(channel_id) proxy_server.redis_client.setex(switch_key, 30, url) # 30 second TTL - logger.info(f"Updated metadata for channel {channel_id} in Redis") + logger.debug(f"Updated metadata for channel {channel_id} in Redis") return True @staticmethod - def _publish_stream_switch_event(channel_id, new_url, user_agent=None, stream_id=None): + def _publish_stream_switch_event(channel_id, new_url, user_agent=None, stream_id=None, m3u_profile_id=None): """Publish a stream switch event to Redis pubsub""" proxy_server = ProxyServer.get_instance() @@ -465,6 +471,7 @@ class ChannelService: "url": new_url, "user_agent": user_agent, "stream_id": stream_id, + "m3u_profile_id": m3u_profile_id, "requester": proxy_server.worker_id, "timestamp": time.time() } diff --git a/apps/proxy/ts_proxy/views.py b/apps/proxy/ts_proxy/views.py index d71da8d4..87a8e51b 100644 --- a/apps/proxy/ts_proxy/views.py +++ b/apps/proxy/ts_proxy/views.py @@ -351,6 +351,7 @@ def change_stream(request, channel_id): # Use the info from the stream new_url = stream_info['url'] user_agent = stream_info['user_agent'] + m3u_profile_id = stream_info.get('m3u_profile_id') # Stream ID will be passed to change_stream_url later elif not new_url: return JsonResponse({'error': 'Either url or stream_id must be provided'}, status=400) @@ -359,7 +360,7 @@ def change_stream(request, channel_id): # Use the service layer instead of direct implementation # Pass stream_id to ensure proper connection tracking - result = ChannelService.change_stream_url(channel_id, new_url, user_agent, stream_id) + result = ChannelService.change_stream_url(channel_id, new_url, user_agent, stream_id, m3u_profile_id) # Get the stream manager before updating URL stream_manager = proxy_server.stream_managers.get(channel_id) diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index e2e90a7d..ef15d605 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -82,12 +82,23 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel const [availableStreams, setAvailableStreams] = useState([]); const [isLoadingStreams, setIsLoadingStreams] = useState(false); const [activeStreamId, setActiveStreamId] = useState(null); + const [currentM3UProfile, setCurrentM3UProfile] = useState(null); // Add state for current M3U profile // Safety check - if channel doesn't have required data, don't render if (!channel || !channel.channel_id) { return null; } + // Update M3U profile information when channel data changes + useEffect(() => { + // If the channel data includes M3U profile information, update our state + if (channel.m3u_profile || channel.m3u_profile_name) { + setCurrentM3UProfile({ + name: channel.m3u_profile?.name || channel.m3u_profile_name || 'Default M3U' + }); + } + }, [channel.m3u_profile, channel.m3u_profile_name, channel.stream_id]); + // Fetch available streams for this channel useEffect(() => { const fetchStreams = async () => { @@ -110,6 +121,11 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel if (matchingStream) { setActiveStreamId(matchingStream.id.toString()); + + // If the stream has M3U profile info, save it + if (matchingStream.m3u_profile) { + setCurrentM3UProfile(matchingStream.m3u_profile); + } } } } @@ -138,6 +154,14 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel // Update the local active stream ID immediately setActiveStreamId(streamId); + // Update M3U profile information if available in the response + if (response && response.m3u_profile) { + setCurrentM3UProfile(response.m3u_profile); + } else if (selectedStream && selectedStream.m3u_profile) { + // Fallback to the profile from the selected stream + setCurrentM3UProfile(selectedStream.m3u_profile); + } + // Show detailed notification with stream name notifications.show({ title: 'Stream switching', @@ -152,6 +176,12 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel if (channelId) { const updatedStreamData = await API.getChannelStreams(channelId); console.log("Channel streams after switch:", updatedStreamData); + + // Update current stream information with fresh data + const updatedStream = updatedStreamData.find(s => s.id.toString() === streamId); + if (updatedStream && updatedStream.m3u_profile) { + setCurrentM3UProfile(updatedStream.m3u_profile); + } } } catch (error) { console.error("Error checking streams after switch:", error); @@ -305,6 +335,12 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel const avgBitrate = channel.avg_bitrate || '0 Kbps'; const streamProfileName = channel.stream_profile?.name || 'Unknown Profile'; + // Use currentM3UProfile if available, otherwise fall back to channel data + const m3uProfileName = currentM3UProfile?.name || + channel.m3u_profile?.name || + channel.m3u_profile_name || + 'Default M3U'; + // Create select options for available streams const streamOptions = availableStreams.map(stream => ({ value: stream.id.toString(), @@ -377,6 +413,16 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel + {/* Display M3U profile information */} + + + + + {m3uProfileName} + + + + {/* Add stream selection dropdown */} {availableStreams.length > 0 && ( From 77c92d52bdfbd0665087a1d6dbff9f6841c26558 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Sun, 27 Apr 2025 19:43:43 +0000 Subject: [PATCH 13/42] Increment build number to 34 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 7bb9e07f..5f76a589 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '33' # Auto-incremented on builds +__build__ = '34' # Auto-incremented on builds From c51f3136ddad6b3cc689f397cffec13bead3cf3f Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 27 Apr 2025 14:51:13 -0500 Subject: [PATCH 14/42] Add tooltip for stream profile and add clarity to user-agent. --- frontend/src/pages/Stats.jsx | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index ef15d605..3fb818e3 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -299,7 +299,14 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel ), - renderDetailPanel: ({ row }) => {row.original.user_agent}, + renderDetailPanel: ({ row }) => ( + + + User Agent: + {row.original.user_agent || "Unknown"} + + + ), mantineExpandButtonProps: ({ row, table }) => ({ size: 'xs', style: { @@ -407,10 +414,12 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel {channelName} - - + + + + {/* Display M3U profile information */} From 9e99de77ec2cfdb27e8cd268d71b11b72f978597 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Sun, 27 Apr 2025 19:51:37 +0000 Subject: [PATCH 15/42] Increment build number to 35 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 5f76a589..a05aeda2 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '34' # Auto-incremented on builds +__build__ = '35' # Auto-incremented on builds From d59c8a9e3369ff38a1f2daa24bd2e325283d836d Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 27 Apr 2025 15:52:10 -0500 Subject: [PATCH 16/42] Properly track current stream id during stream switches. --- apps/proxy/ts_proxy/services/channel_service.py | 5 ++++- apps/proxy/ts_proxy/stream_manager.py | 14 +++++++++++--- 2 files changed, 15 insertions(+), 4 deletions(-) diff --git a/apps/proxy/ts_proxy/services/channel_service.py b/apps/proxy/ts_proxy/services/channel_service.py index 9aa8c66b..bd1f2f81 100644 --- a/apps/proxy/ts_proxy/services/channel_service.py +++ b/apps/proxy/ts_proxy/services/channel_service.py @@ -181,7 +181,7 @@ class ChannelService: old_url = manager.url # Update the stream - success = manager.update_url(new_url) + success = manager.update_url(new_url, stream_id) logger.info(f"Stream URL changed from {old_url} to {new_url}, result: {success}") result.update({ @@ -440,6 +440,9 @@ class ChannelService: if m3u_profile_id: metadata[ChannelMetadataField.M3U_PROFILE] = str(m3u_profile_id) + # Also update the stream switch time field + metadata[ChannelMetadataField.STREAM_SWITCH_TIME] = str(time.time()) + # Use the appropriate method based on the key type if key_type == 'hash': proxy_server.redis_client.hset(metadata_key, mapping=metadata) diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index 9d2847c1..2615758f 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -544,7 +544,7 @@ class StreamManager: # Set running to false to ensure thread exits self.running = False - def update_url(self, new_url): + def update_url(self, new_url, stream_id=None): """Update stream URL and reconnect with proper cleanup for both HTTP and transcode sessions""" if new_url == self.url: logger.info(f"URL unchanged: {new_url}") @@ -568,6 +568,14 @@ class StreamManager: self.url = new_url self.connected = False + # Update stream ID if provided + if stream_id: + old_stream_id = self.current_stream_id + self.current_stream_id = stream_id + # Add stream ID to tried streams for proper tracking + self.tried_stream_ids.add(stream_id) + logger.info(f"Updated stream ID from {old_stream_id} to {stream_id} for channel {self.buffer.channel_id}") + # Reset retry counter to allow immediate reconnect self.retry_count = 0 @@ -1005,7 +1013,7 @@ class StreamManager: logger.info(f"Stream metadata updated for channel {self.channel_id} to stream ID {stream_id}") # IMPORTANT: Just update the URL, don't stop the channel or release resources - switch_result = self.update_url(new_url) + switch_result = self.update_url(new_url, stream_id) if not switch_result: logger.error(f"Failed to update URL for stream ID {stream_id}") return False @@ -1015,4 +1023,4 @@ class StreamManager: except Exception as e: logger.error(f"Error trying next stream for channel {self.channel_id}: {e}", exc_info=True) - return False + return False \ No newline at end of file From 768ca0e353375460decdfc19aa6834c3a6cbaa95 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Sun, 27 Apr 2025 20:53:27 +0000 Subject: [PATCH 17/42] Increment build number to 36 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index a05aeda2..4b1e4acc 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '35' # Auto-incremented on builds +__build__ = '36' # Auto-incremented on builds From c049e48c0877f40fcfeb42a2ec248d50afea6179 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 27 Apr 2025 17:46:27 -0500 Subject: [PATCH 18/42] Use timestamp instead of build number increase. --- .github/workflows/ci.yml | 23 +++++++---------------- core/api_views.py | 4 ++-- docker/build-dev.sh | 6 ++---- frontend/src/components/Sidebar.jsx | 6 +++--- version.py | 2 +- 5 files changed, 15 insertions(+), 26 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8aa06d0d..62f01629 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -44,28 +44,20 @@ jobs: username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - name: Increment Build Number - if: steps.check_actor.outputs.is_bot != 'true' - id: increment_build + - name: Generate timestamp for build + id: timestamp run: | - python scripts/increment_build.py - BUILD=$(python -c "import version; print(version.__build__)") - echo "build=${BUILD}" >> $GITHUB_OUTPUT + TIMESTAMP=$(date -u +'%Y%m%d%H%M%S') + echo "timestamp=${TIMESTAMP}" >> $GITHUB_OUTPUT - - name: Commit Build Number Update - if: steps.check_actor.outputs.is_bot != 'true' - run: | - git add version.py - git commit -m "Increment build number to ${{ steps.increment_build.outputs.build }} [skip ci]" - git push + # Update the timestamp in version.py + sed -i "s/__timestamp__ = None/__timestamp__ = '${TIMESTAMP}'/" version.py - name: Extract version info id: version run: | VERSION=$(python -c "import version; print(version.__version__)") - BUILD=$(python -c "import version; print(version.__build__)") echo "version=${VERSION}" >> $GITHUB_OUTPUT - echo "build=${BUILD}" >> $GITHUB_OUTPUT echo "sha_short=${GITHUB_SHA::7}" >> $GITHUB_OUTPUT - name: Set repository and image metadata @@ -98,7 +90,6 @@ jobs: echo "is_fork=true" >> $GITHUB_OUTPUT else echo "is_fork=false" >> $GITHUB_OUTPUT - fi - name: Build and push Docker image uses: docker/build-push-action@v4 @@ -108,7 +99,7 @@ jobs: platforms: linux/amd64 # Fast build - amd64 only tags: | ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:${{ steps.meta.outputs.branch_tag }} - ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:${{ steps.version.outputs.version }}-${{ steps.version.outputs.build }} + ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:${{ steps.version.outputs.version }}-${{ steps.timestamp.outputs.timestamp }} ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:${{ steps.version.outputs.sha_short }} build-args: | BRANCH=${{ github.ref_name }} diff --git a/core/api_views.py b/core/api_views.py index d9c0aba4..7f3ecf57 100644 --- a/core/api_views.py +++ b/core/api_views.py @@ -95,8 +95,8 @@ def environment(request): @api_view(['GET']) def version(request): # Import version information - from version import __version__, __build__ + from version import __version__, __timestamp__ return Response({ 'version': __version__, - 'build': __build__, + 'timestamp': __timestamp__, }) diff --git a/docker/build-dev.sh b/docker/build-dev.sh index 65d643a7..b02c314e 100755 --- a/docker/build-dev.sh +++ b/docker/build-dev.sh @@ -3,11 +3,9 @@ docker build --build-arg BRANCH=dev -t dispatcharr/dispatcharr:dev -f Dockerfile # Get version information VERSION=$(python -c "import sys; sys.path.append('..'); import version; print(version.__version__)") -BUILD=$(python -c "import sys; sys.path.append('..'); import version; print(version.__build__)") -# Build with version tags +# Build with version tag docker build --build-arg BRANCH=dev \ -t dispatcharr/dispatcharr:dev \ - -t dispatcharr/dispatcharr:${VERSION}-${BUILD} \ + -t dispatcharr/dispatcharr:${VERSION} \ -f Dockerfile .. -. diff --git a/frontend/src/components/Sidebar.jsx b/frontend/src/components/Sidebar.jsx index b5dc15b2..eb5a2226 100644 --- a/frontend/src/components/Sidebar.jsx +++ b/frontend/src/components/Sidebar.jsx @@ -67,7 +67,7 @@ const Sidebar = ({ collapsed, toggleDrawer, drawerWidth, miniDrawerWidth }) => { const environment = useSettingsStore((s) => s.environment); const isAuthenticated = useAuthStore((s) => s.isAuthenticated); const publicIPRef = useRef(null); - const [appVersion, setAppVersion] = useState({ version: '', build: '' }); + const [appVersion, setAppVersion] = useState({ version: '', timestamp: null }); // Fetch environment settings including version on component mount useEffect(() => { @@ -89,7 +89,7 @@ const Sidebar = ({ collapsed, toggleDrawer, drawerWidth, miniDrawerWidth }) => { const versionData = await API.getVersion(); setAppVersion({ version: versionData.version || '', - build: versionData.build || '', + timestamp: versionData.timestamp || null, }); } catch (error) { console.error('Failed to fetch version information:', error); @@ -266,7 +266,7 @@ const Sidebar = ({ collapsed, toggleDrawer, drawerWidth, miniDrawerWidth }) => { {!collapsed && ( v{appVersion?.version || '0.0.0'} - {appVersion?.build !== '0' ? `-${appVersion?.build}` : ''} + {appVersion?.timestamp ? `-${appVersion.timestamp}` : ''} )} diff --git a/version.py b/version.py index 4b1e4acc..9339fcbd 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '36' # Auto-incremented on builds +__timestamp__ = None # Set during CI/CD build process From a8a6322e3057fe60e4f5d9fb43a69c1287d21652 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 27 Apr 2025 17:50:25 -0500 Subject: [PATCH 19/42] Missed closing if statement. --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 62f01629..20d3f150 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -90,6 +90,7 @@ jobs: echo "is_fork=true" >> $GITHUB_OUTPUT else echo "is_fork=false" >> $GITHUB_OUTPUT + fi - name: Build and push Docker image uses: docker/build-push-action@v4 From cb62a13c40b41d8f6201e64fcc893eee2d0a2c1c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 27 Apr 2025 18:27:33 -0500 Subject: [PATCH 20/42] Attempt at fixing timestamp not being added to version. --- .github/workflows/ci.yml | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 20d3f150..4ee8db93 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -51,7 +51,13 @@ jobs: echo "timestamp=${TIMESTAMP}" >> $GITHUB_OUTPUT # Update the timestamp in version.py - sed -i "s/__timestamp__ = None/__timestamp__ = '${TIMESTAMP}'/" version.py + echo "Updating timestamp to ${TIMESTAMP} in version.py" + sed -i "s/__timestamp__ = None.*/__timestamp__ = '${TIMESTAMP}' # Set during CI/CD build process/" version.py + cat version.py # Verify the file was updated correctly + + # Make the version.py change part of the Docker build context + git add version.py + git commit -m "Update build timestamp [skip ci]" || echo "No changes to commit" - name: Extract version info id: version @@ -90,7 +96,6 @@ jobs: echo "is_fork=true" >> $GITHUB_OUTPUT else echo "is_fork=false" >> $GITHUB_OUTPUT - fi - name: Build and push Docker image uses: docker/build-push-action@v4 From 202ef265dea0bfa9293e831d5865dab753147a03 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 27 Apr 2025 18:30:15 -0500 Subject: [PATCH 21/42] Fix sed command delimiter for updating timestamp in version.py --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4ee8db93..6a98d50f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -52,7 +52,7 @@ jobs: # Update the timestamp in version.py echo "Updating timestamp to ${TIMESTAMP} in version.py" - sed -i "s/__timestamp__ = None.*/__timestamp__ = '${TIMESTAMP}' # Set during CI/CD build process/" version.py + sed -i "s|__timestamp__ = None.*|__timestamp__ = '${TIMESTAMP}' # Set during CI/CD build process|" version.py cat version.py # Verify the file was updated correctly # Make the version.py change part of the Docker build context From a81daaea44bf46e472561512258f82ce3faf0746 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 27 Apr 2025 18:32:01 -0500 Subject: [PATCH 22/42] IDK WHY THAT KEEPS GETTING DELETED --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6a98d50f..5964f614 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -96,6 +96,7 @@ jobs: echo "is_fork=true" >> $GITHUB_OUTPUT else echo "is_fork=false" >> $GITHUB_OUTPUT + fi - name: Build and push Docker image uses: docker/build-push-action@v4 From 07edf270fb373c1439ddcedc1b1aa459d27e0b67 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 27 Apr 2025 18:53:39 -0500 Subject: [PATCH 23/42] Refactor CI workflow to update version.py with build timestamp in Dockerfile --- .github/workflows/ci.yml | 10 +--------- docker/Dockerfile | 14 ++++++++++++-- 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5964f614..1418cbf5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -50,15 +50,6 @@ jobs: TIMESTAMP=$(date -u +'%Y%m%d%H%M%S') echo "timestamp=${TIMESTAMP}" >> $GITHUB_OUTPUT - # Update the timestamp in version.py - echo "Updating timestamp to ${TIMESTAMP} in version.py" - sed -i "s|__timestamp__ = None.*|__timestamp__ = '${TIMESTAMP}' # Set during CI/CD build process|" version.py - cat version.py # Verify the file was updated correctly - - # Make the version.py change part of the Docker build context - git add version.py - git commit -m "Update build timestamp [skip ci]" || echo "No changes to commit" - - name: Extract version info id: version run: | @@ -111,4 +102,5 @@ jobs: build-args: | BRANCH=${{ github.ref_name }} REPO_URL=https://github.com/${{ github.repository }} + TIMESTAMP=${{ steps.timestamp.outputs.timestamp }} file: ./docker/Dockerfile diff --git a/docker/Dockerfile b/docker/Dockerfile index e3f8a165..4d313e2c 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -5,6 +5,8 @@ ARG BRANCH=main # This will be overridden by the GitHub Actions workflow # when building the Docker image for production. ARG REPO_URL=https://github.com/Dispatcharr/Dispatcharr +# Add timestamp argument +ARG TIMESTAMP ENV PATH="/dispatcharrpy/bin:$PATH" \ VIRTUAL_ENV=/dispatcharrpy \ @@ -26,8 +28,16 @@ RUN apt-get update && \ virtualenv /dispatcharrpy && \ git clone -b ${BRANCH} ${REPO_URL} /app && \ cd /app && \ - rm -rf .git && \ - cd /app && \ + rm -rf .git + +# Update version.py with build timestamp if provided +RUN if [ -n "$TIMESTAMP" ]; then \ + echo "Updating timestamp to ${TIMESTAMP} in version.py" && \ + sed -i "s|__timestamp__ = None.*|__timestamp__ = '${TIMESTAMP}' # Set during CI/CD build process|" /app/version.py && \ + cat /app/version.py; \ + fi + +RUN cd /app && \ pip install --no-cache-dir -r requirements.txt # Use a dedicated Node.js stage for frontend building From 164f0cdbb510e05f2cb86040634d057196ecb97a Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 27 Apr 2025 19:15:00 -0500 Subject: [PATCH 24/42] Increase thread stack size in uwsgi configuration for improved performance --- docker/uwsgi.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/uwsgi.ini b/docker/uwsgi.ini index 9db61495..326f4b5d 100644 --- a/docker/uwsgi.ini +++ b/docker/uwsgi.ini @@ -28,6 +28,7 @@ static-map = /static=/app/static workers = 4 threads = 4 enable-threads = true +thread-stacksize=512 # Optimize for streaming http = 0.0.0.0:5656 From 0b8f20dc22ae86606a128235237a0b7f62906b3c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 27 Apr 2025 19:19:48 -0500 Subject: [PATCH 25/42] Fixes not being able to set logo to default. --- frontend/src/api.js | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/frontend/src/api.js b/frontend/src/api.js index 38aac846..e54e628a 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -337,6 +337,11 @@ export default class API { payload.stream_profile_id = null; } + // Handle logo_id properly (0 means "no logo") + if (payload.logo_id === '0' || payload.logo_id === 0) { + payload.logo_id = null; + } + // Ensure tvg_id is included properly (not as empty string) if (payload.tvg_id === '') { payload.tvg_id = null; From b439eb810c38244aa688e03babc940dd8bfc81c9 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 28 Apr 2025 15:05:58 -0500 Subject: [PATCH 26/42] Cleanup channel lock instead of stream lock. --- apps/proxy/ts_proxy/stream_generator.py | 11 ++++++----- apps/proxy/ts_proxy/stream_manager.py | 19 ++++++++++--------- apps/proxy/ts_proxy/views.py | 1 - 3 files changed, 16 insertions(+), 15 deletions(-) diff --git a/apps/proxy/ts_proxy/stream_generator.py b/apps/proxy/ts_proxy/stream_generator.py index 9377a079..17e53b9d 100644 --- a/apps/proxy/ts_proxy/stream_generator.py +++ b/apps/proxy/ts_proxy/stream_generator.py @@ -380,14 +380,15 @@ class StreamGenerator: client_count = proxy_server.client_managers[self.channel_id].get_total_client_count() # Only the last client or owner should release the stream if client_count <= 1 and proxy_server.am_i_owner(self.channel_id): - from apps.channels.models import Stream + from apps.channels.models import Channel try: - stream = Stream.objects.get(pk=stream_id) - stream.release_stream() + # Get the channel by UUID + channel = Channel.objects.get(uuid=self.channel_id) + channel.release_stream() stream_released = True - logger.debug(f"[{self.client_id}] Released stream {stream_id} for channel {self.channel_id}") + logger.debug(f"[{self.client_id}] Released stream for channel {self.channel_id}") except Exception as e: - logger.error(f"[{self.client_id}] Error releasing stream {stream_id}: {e}") + logger.error(f"[{self.client_id}] Error releasing stream for channel {self.channel_id}: {e}") except Exception as e: logger.error(f"[{self.client_id}] Error checking stream data for release: {e}") diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index 2615758f..8f7b1817 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -502,15 +502,6 @@ class StreamManager: owner_key = RedisKeys.channel_owner(self.channel_id) current_owner = self.buffer.redis_client.get(owner_key) - if current_owner and current_owner.decode('utf-8') == self.worker_id: - try: - from apps.channels.models import Stream - stream = Stream.objects.get(pk=self.current_stream_id) - stream.release_stream() - logger.info(f"Released stream {self.current_stream_id} for channel {self.channel_id}") - except Exception as e: - logger.error(f"Error releasing stream {self.current_stream_id}: {e}") - # Cancel all buffer check timers for timer in list(self._buffer_check_timers): try: @@ -552,6 +543,16 @@ class StreamManager: logger.info(f"Switching stream URL from {self.url} to {new_url}") + # Release old stream resources if we have a current stream ID + if self.current_stream_id: + try: + from apps.channels.models import Stream + stream = Stream.objects.get(pk=self.current_stream_id) + stream.release_stream() + logger.info(f"Released stream {self.current_stream_id} for channel {self.channel_id}") + except Exception as e: + logger.error(f"Error releasing stream {self.current_stream_id}: {e}") + # CRITICAL: Set a flag to prevent immediate reconnection with old URL self.url_switching = True diff --git a/apps/proxy/ts_proxy/views.py b/apps/proxy/ts_proxy/views.py index 87a8e51b..222da4e3 100644 --- a/apps/proxy/ts_proxy/views.py +++ b/apps/proxy/ts_proxy/views.py @@ -261,7 +261,6 @@ def stream_ts(request, channel_id): logger.info(f"[{client_id}] Successfully initialized channel {channel_id}") channel_initializing = True - logger.info(f"[{client_id}] Channel {channel_id} initialization started") # Register client - can do this regardless of initialization state # Create local resources if needed From cd1da5a61c1fb8b4dc8115c5b39b3a2a02bb95d2 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 28 Apr 2025 17:25:03 -0500 Subject: [PATCH 27/42] Added a new channel model to update m3u profile counts and utilize it during stream switches. --- apps/channels/models.py | 47 +++++++++++++++++++++++++++ apps/proxy/ts_proxy/stream_manager.py | 26 +++++++++++---- 2 files changed, 66 insertions(+), 7 deletions(-) diff --git a/apps/channels/models.py b/apps/channels/models.py index 0b66c468..4485936e 100644 --- a/apps/channels/models.py +++ b/apps/channels/models.py @@ -407,6 +407,53 @@ class Channel(models.Model): if current_count > 0: redis_client.decr(profile_connections_key) + def update_stream_profile(self, new_profile_id): + """ + Updates the profile for the current stream and adjusts connection counts. + + Args: + new_profile_id: The ID of the new stream profile to use + + Returns: + bool: True if successful, False otherwise + """ + redis_client = RedisClient.get_client() + + # Get current stream ID + stream_id_bytes = redis_client.get(f"channel_stream:{self.id}") + if not stream_id_bytes: + logger.debug("No active stream found for channel") + return False + + stream_id = int(stream_id_bytes) + + # Get current profile ID + current_profile_id_bytes = redis_client.get(f"stream_profile:{stream_id}") + if not current_profile_id_bytes: + logger.debug("No profile found for current stream") + return False + + current_profile_id = int(current_profile_id_bytes) + + # Don't do anything if the profile is already set to the requested one + if current_profile_id == new_profile_id: + return True + + # Decrement connection count for old profile + old_profile_connections_key = f"profile_connections:{current_profile_id}" + old_count = int(redis_client.get(old_profile_connections_key) or 0) + if old_count > 0: + redis_client.decr(old_profile_connections_key) + + # Update the profile mapping + redis_client.set(f"stream_profile:{stream_id}", new_profile_id) + + # Increment connection count for new profile + new_profile_connections_key = f"profile_connections:{new_profile_id}" + redis_client.incr(new_profile_connections_key) + logger.info(f"Updated stream {stream_id} profile from {current_profile_id} to {new_profile_id}") + return True + class ChannelProfile(models.Model): name = models.CharField(max_length=100, unique=True) diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index 8f7b1817..6fd2b4b8 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -543,15 +543,27 @@ class StreamManager: logger.info(f"Switching stream URL from {self.url} to {new_url}") - # Release old stream resources if we have a current stream ID - if self.current_stream_id: + # Import both models for proper resource management + from apps.channels.models import Stream, Channel + + # Update stream profile if we're switching streams + if self.current_stream_id and stream_id and self.current_stream_id != stream_id: try: - from apps.channels.models import Stream - stream = Stream.objects.get(pk=self.current_stream_id) - stream.release_stream() - logger.info(f"Released stream {self.current_stream_id} for channel {self.channel_id}") + # Get the channel by UUID + channel = Channel.objects.get(uuid=self.channel_id) + + # Get stream to find its profile + new_stream = Stream.objects.get(pk=stream_id) + + # Use the new method to update the profile and manage connection counts + if new_stream.m3u_account_id: + success = channel.update_stream_profile(new_stream.m3u_account_id) + if success: + logger.debug(f"Updated stream profile for channel {self.channel_id} to use profile from stream {stream_id}") + else: + logger.warning(f"Failed to update stream profile for channel {self.channel_id}") except Exception as e: - logger.error(f"Error releasing stream {self.current_stream_id}: {e}") + logger.error(f"Error updating stream profile for channel {self.channel_id}: {e}") # CRITICAL: Set a flag to prevent immediate reconnection with old URL self.url_switching = True From 482803b24143569a0377e165fefcc22fc2d9d754 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 28 Apr 2025 17:29:27 -0500 Subject: [PATCH 28/42] Removed unnecessary logs. --- apps/proxy/ts_proxy/channel_status.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/apps/proxy/ts_proxy/channel_status.py b/apps/proxy/ts_proxy/channel_status.py index 43e01df8..dd18d922 100644 --- a/apps/proxy/ts_proxy/channel_status.py +++ b/apps/proxy/ts_proxy/channel_status.py @@ -59,7 +59,6 @@ class ChannelStatus: stream = Stream.objects.filter(id=stream_id).first() if stream: info['stream_name'] = stream.name - logger.debug(f"Added stream name '{stream.name}' for stream ID {stream_id}") except (ImportError, DatabaseError) as e: logger.warning(f"Failed to get stream name for ID {stream_id}: {e}") except ValueError: @@ -78,7 +77,6 @@ class ChannelStatus: m3u_profile = M3UAccountProfile.objects.filter(id=m3u_profile_id).first() if m3u_profile: info['m3u_profile_name'] = m3u_profile.name - logger.debug(f"Added M3U profile name '{m3u_profile.name}' for profile ID {m3u_profile_id}") except (ImportError, DatabaseError) as e: logger.warning(f"Failed to get M3U profile name for ID {m3u_profile_id}: {e}") except ValueError: @@ -337,7 +335,6 @@ class ChannelStatus: stream = Stream.objects.filter(id=stream_id).first() if stream: info['stream_name'] = stream.name - logger.debug(f"Added stream name '{stream.name}' for stream ID {stream_id}") except (ImportError, DatabaseError) as e: logger.warning(f"Failed to get stream name for ID {stream_id}: {e}") except ValueError: @@ -412,7 +409,6 @@ class ChannelStatus: m3u_profile = M3UAccountProfile.objects.filter(id=m3u_profile_id).first() if m3u_profile: info['m3u_profile_name'] = m3u_profile.name - logger.debug(f"Added M3U profile name '{m3u_profile.name}' for profile ID {m3u_profile_id}") except (ImportError, DatabaseError) as e: logger.warning(f"Failed to get M3U profile name for ID {m3u_profile_id}: {e}") except ValueError: From 06d30667830320342fec139cf944b34f06219808 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 28 Apr 2025 20:22:44 -0500 Subject: [PATCH 29/42] Improve logo handling in LogoViewSet: set default content type and add Content-Disposition for inline display. --- apps/channels/api_views.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 78907f8f..f3fb800a 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -629,14 +629,24 @@ class LogoViewSet(viewsets.ModelViewSet): if logo_url.startswith("/data"): # Local file if not os.path.exists(logo_url): raise Http404("Image not found") - mimetype = mimetypes.guess_type(logo_url) - return FileResponse(open(logo_url, "rb"), content_type=mimetype) + + # Get proper mime type (first item of the tuple) + content_type, _ = mimetypes.guess_type(logo_url) + if not content_type: + content_type = 'image/jpeg' # Default to a common image type + + # Use context manager and set Content-Disposition to inline + response = StreamingHttpResponse(open(logo_url, "rb"), content_type=content_type) + response['Content-Disposition'] = 'inline; filename="{}"'.format(os.path.basename(logo_url)) + return response else: # Remote image try: remote_response = requests.get(logo_url, stream=True) if remote_response.status_code == 200: - return StreamingHttpResponse(remote_response.iter_content(chunk_size=8192), content_type=remote_response.headers['Content-Type']) + response = StreamingHttpResponse(remote_response.iter_content(chunk_size=8192), content_type=remote_response.headers['Content-Type']) + response['Content-Disposition'] = 'inline; filename="{}"'.format(os.path.basename(logo_url)) + return response raise Http404("Remote image not found") except requests.RequestException: raise Http404("Error fetching remote image") @@ -679,7 +689,7 @@ class BulkUpdateChannelMembershipAPIView(APIView): if serializer.is_valid(): updates = serializer.validated_data['channels'] - channel_ids = [entry['channel_id'] for entry in updates] + channel_ids = [entry['channel_id'] for entry['channel_id'] in updates] memberships = ChannelProfileMembership.objects.filter( channel_profile=channel_profile, From 4cf4a0d68dbcb6057ba0a167dcc1aec8f1499045 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 28 Apr 2025 20:26:54 -0500 Subject: [PATCH 30/42] Reverted unintended change. --- apps/channels/api_views.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index f3fb800a..821b0ef4 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -689,7 +689,8 @@ class BulkUpdateChannelMembershipAPIView(APIView): if serializer.is_valid(): updates = serializer.validated_data['channels'] - channel_ids = [entry['channel_id'] for entry['channel_id'] in updates] + channel_ids = [entry['channel_id'] for entry in updates] + memberships = ChannelProfileMembership.objects.filter( channel_profile=channel_profile, From ee2c2194f81c26be4ec315fe1fa64f1bc8667a81 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 28 Apr 2025 20:36:09 -0500 Subject: [PATCH 31/42] Mimetype guessing as a fallback for remote images. --- apps/channels/api_views.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 821b0ef4..51952541 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -644,7 +644,18 @@ class LogoViewSet(viewsets.ModelViewSet): try: remote_response = requests.get(logo_url, stream=True) if remote_response.status_code == 200: - response = StreamingHttpResponse(remote_response.iter_content(chunk_size=8192), content_type=remote_response.headers['Content-Type']) + # Try to get content type from response headers first + content_type = remote_response.headers.get('Content-Type') + + # If no content type in headers or it's empty, guess based on URL + if not content_type: + content_type, _ = mimetypes.guess_type(logo_url) + + # If still no content type, default to common image type + if not content_type: + content_type = 'image/jpeg' + + response = StreamingHttpResponse(remote_response.iter_content(chunk_size=8192), content_type=content_type) response['Content-Disposition'] = 'inline; filename="{}"'.format(os.path.basename(logo_url)) return response raise Http404("Remote image not found") From 9b443a0a3ed8408902a401f3fc8cc30665f83196 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 29 Apr 2025 09:50:57 -0500 Subject: [PATCH 32/42] Adds m3u profile name to stream name. --- frontend/src/pages/Stats.jsx | 36 +++++++++++++++++++++++++++++++----- 1 file changed, 31 insertions(+), 5 deletions(-) diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index 3fb818e3..fc6705b0 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -34,6 +34,7 @@ import duration from 'dayjs/plugin/duration'; import relativeTime from 'dayjs/plugin/relativeTime'; import { Sparkline } from '@mantine/charts'; import useStreamProfilesStore from '../store/streamProfiles'; +import usePlaylistsStore from '../store/playlists'; // Add this import import { useLocation } from 'react-router-dom'; import { notifications } from '@mantine/notifications'; @@ -84,6 +85,22 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel const [activeStreamId, setActiveStreamId] = useState(null); const [currentM3UProfile, setCurrentM3UProfile] = useState(null); // Add state for current M3U profile + // Get M3U account data from the playlists store + const m3uAccounts = usePlaylistsStore((s) => s.playlists); + + // Create a map of M3U account IDs to names for quick lookup + const m3uAccountsMap = useMemo(() => { + const map = {}; + if (m3uAccounts && Array.isArray(m3uAccounts)) { + m3uAccounts.forEach(account => { + if (account.id) { + map[account.id] = account.name; + } + }); + } + return map; + }, [m3uAccounts]); + // Safety check - if channel doesn't have required data, don't render if (!channel || !channel.channel_id) { return null; @@ -346,13 +363,22 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel const m3uProfileName = currentM3UProfile?.name || channel.m3u_profile?.name || channel.m3u_profile_name || - 'Default M3U'; + 'Unknown M3U Profile'; // Create select options for available streams - const streamOptions = availableStreams.map(stream => ({ - value: stream.id.toString(), - label: `${stream.name || `Stream #${stream.id}`}`, // Make sure stream name is clear - })); + const streamOptions = availableStreams.map(stream => { + // Get account name from our mapping if it exists + const accountName = stream.m3u_account && m3uAccountsMap[stream.m3u_account] + ? m3uAccountsMap[stream.m3u_account] + : stream.m3u_account + ? `M3U #${stream.m3u_account}` + : 'Unknown M3U'; + + return { + value: stream.id.toString(), + label: `${stream.name || `Stream #${stream.id}`} [${accountName}]`, + }; + }); return ( Date: Tue, 29 Apr 2025 11:10:26 -0500 Subject: [PATCH 33/42] Add key prop to row Box in CustomTableBody for improved rendering --- frontend/src/components/tables/CustomTable/CustomTableBody.jsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/components/tables/CustomTable/CustomTableBody.jsx b/frontend/src/components/tables/CustomTable/CustomTableBody.jsx index ac7000d3..f3351541 100644 --- a/frontend/src/components/tables/CustomTable/CustomTableBody.jsx +++ b/frontend/src/components/tables/CustomTable/CustomTableBody.jsx @@ -73,7 +73,7 @@ const CustomTableBody = ({ const renderTableBodyRow = (row, index, style = {}) => { return ( - + Date: Tue, 29 Apr 2025 12:20:26 -0500 Subject: [PATCH 34/42] Allows holding shift and selecting rows. --- .../tables/CustomTable/CustomTable.jsx | 3 +- .../components/tables/CustomTable/index.jsx | 50 ++++++++++++++++--- 2 files changed, 45 insertions(+), 8 deletions(-) diff --git a/frontend/src/components/tables/CustomTable/CustomTable.jsx b/frontend/src/components/tables/CustomTable/CustomTable.jsx index 75e2445e..e1c05ff4 100644 --- a/frontend/src/components/tables/CustomTable/CustomTable.jsx +++ b/frontend/src/components/tables/CustomTable/CustomTable.jsx @@ -1,6 +1,6 @@ import { Box, Flex } from '@mantine/core'; import CustomTableHeader from './CustomTableHeader'; -import { useCallback, useState } from 'react'; +import { useCallback, useState, useRef } from 'react'; import { flexRender } from '@tanstack/react-table'; import table from '../../../helpers/table'; import CustomTableBody from './CustomTableBody'; @@ -11,7 +11,6 @@ const CustomTable = ({ table }) => { className="divTable table-striped" style={{ width: '100%', - // height: '100%', // ONLY required when using virtual tables display: 'flex', flexDirection: 'column', }} diff --git a/frontend/src/components/tables/CustomTable/index.jsx b/frontend/src/components/tables/CustomTable/index.jsx index 7be3a8e7..3303dffe 100644 --- a/frontend/src/components/tables/CustomTable/index.jsx +++ b/frontend/src/components/tables/CustomTable/index.jsx @@ -21,6 +21,7 @@ const useTable = ({ }) => { const [selectedTableIds, setSelectedTableIds] = useState([]); const [expandedRowIds, setExpandedRowIds] = useState([]); + const [lastClickedId, setLastClickedId] = useState(null); const rowCount = allRowIds.length; @@ -77,6 +78,34 @@ const useTable = ({ updateSelectedTableIds([row.original.id]); }; + // Handle the shift+click selection + const handleShiftSelect = (rowId, isShiftKey) => { + if (!isShiftKey || lastClickedId === null) { + // Normal selection behavior + setLastClickedId(rowId); + return false; // Return false to indicate we're not handling it + } + + // Handle shift-click range selection + const currentIndex = allRowIds.indexOf(rowId); + const lastIndex = allRowIds.indexOf(lastClickedId); + + if (currentIndex === -1 || lastIndex === -1) return false; + + // Determine range + const startIndex = Math.min(currentIndex, lastIndex); + const endIndex = Math.max(currentIndex, lastIndex); + const rangeIds = allRowIds.slice(startIndex, endIndex + 1); + + // Preserve existing selections outside the range + const idsOutsideRange = selectedTableIds.filter(id => !rangeIds.includes(id)); + const newSelection = [...new Set([...rangeIds, ...idsOutsideRange])]; + updateSelectedTableIds(newSelection); + + setLastClickedId(rowId); + return true; // Return true to indicate we've handled it + }; + const renderBodyCell = ({ row, cell }) => { if (bodyCellRenderFns[cell.column.id]) { return bodyCellRenderFns[cell.column.id]({ row, cell }); @@ -91,13 +120,22 @@ const useTable = ({ size="xs" checked={selectedTableIdsSet.has(row.original.id)} onChange={(e) => { - const newSet = new Set(selectedTableIds); - if (e.target.checked) { - newSet.add(row.original.id); - } else { - newSet.delete(row.original.id); + const rowId = row.original.id; + + // Get shift key state from the event + const isShiftKey = e.nativeEvent.shiftKey; + + // Try to handle with shift-select logic first + if (!handleShiftSelect(rowId, isShiftKey)) { + // If not handled by shift-select, do regular toggle + const newSet = new Set(selectedTableIds); + if (e.target.checked) { + newSet.add(rowId); + } else { + newSet.delete(rowId); + } + updateSelectedTableIds([...newSet]); } - updateSelectedTableIds([...newSet]); }} /> From 9be42ce53254cc3da265541e27393910d8a756ac Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 29 Apr 2025 12:47:16 -0500 Subject: [PATCH 35/42] Don't select text when shift is held --- .../components/tables/CustomTable/index.jsx | 62 +++++- frontend/src/components/tables/table.css | 207 ++++++++++++------ 2 files changed, 194 insertions(+), 75 deletions(-) diff --git a/frontend/src/components/tables/CustomTable/index.jsx b/frontend/src/components/tables/CustomTable/index.jsx index 3303dffe..0b1d824f 100644 --- a/frontend/src/components/tables/CustomTable/index.jsx +++ b/frontend/src/components/tables/CustomTable/index.jsx @@ -7,7 +7,7 @@ import { getCoreRowModel, flexRender, } from '@tanstack/react-table'; -import { useCallback, useMemo, useState } from 'react'; +import { useCallback, useMemo, useState, useEffect } from 'react'; import { ChevronDown, ChevronRight } from 'lucide-react'; const useTable = ({ @@ -22,6 +22,63 @@ const useTable = ({ const [selectedTableIds, setSelectedTableIds] = useState([]); const [expandedRowIds, setExpandedRowIds] = useState([]); const [lastClickedId, setLastClickedId] = useState(null); + const [isShiftKeyDown, setIsShiftKeyDown] = useState(false); + + // Event handlers for shift key detection with improved handling + const handleKeyDown = useCallback((e) => { + if (e.key === 'Shift') { + setIsShiftKeyDown(true); + // Apply the class to disable text selection immediately + document.body.classList.add('shift-key-active'); + // Set a style attribute directly on body for extra assurance + document.body.style.userSelect = 'none'; + document.body.style.webkitUserSelect = 'none'; + document.body.style.msUserSelect = 'none'; + document.body.style.cursor = 'pointer'; + } + }, []); + + const handleKeyUp = useCallback((e) => { + if (e.key === 'Shift') { + setIsShiftKeyDown(false); + // Remove the class when shift is released + document.body.classList.remove('shift-key-active'); + // Reset the style attributes + document.body.style.removeProperty('user-select'); + document.body.style.removeProperty('-webkit-user-select'); + document.body.style.removeProperty('-ms-user-select'); + document.body.style.removeProperty('cursor'); + } + }, []); + + // Add global event listeners for shift key detection with improved cleanup + useEffect(() => { + window.addEventListener('keydown', handleKeyDown); + window.addEventListener('keyup', handleKeyUp); + + // Also detect blur/focus events to handle cases where shift is held and window loses focus + window.addEventListener('blur', () => { + setIsShiftKeyDown(false); + document.body.classList.remove('shift-key-active'); + document.body.style.removeProperty('user-select'); + document.body.style.removeProperty('-webkit-user-select'); + document.body.style.removeProperty('-ms-user-select'); + document.body.style.removeProperty('cursor'); + }); + + return () => { + window.removeEventListener('keydown', handleKeyDown); + window.removeEventListener('keyup', handleKeyUp); + window.removeEventListener('blur', () => { + setIsShiftKeyDown(false); + document.body.classList.remove('shift-key-active'); + document.body.style.removeProperty('user-select'); + document.body.style.removeProperty('-webkit-user-select'); + document.body.style.removeProperty('-ms-user-select'); + document.body.style.removeProperty('cursor'); + }); + }; + }, [handleKeyDown, handleKeyUp]); const rowCount = allRowIds.length; @@ -175,8 +232,9 @@ const useTable = ({ expandedRowIds, expandedRowRenderer, setSelectedTableIds, + isShiftKeyDown, // Include shift key state in the table instance }), - [selectedTableIdsSet, expandedRowIds, allRowIds] + [selectedTableIdsSet, expandedRowIds, allRowIds, isShiftKeyDown] ); return { diff --git a/frontend/src/components/tables/table.css b/frontend/src/components/tables/table.css index 00198499..c3651246 100644 --- a/frontend/src/components/tables/table.css +++ b/frontend/src/components/tables/table.css @@ -1,94 +1,155 @@ * { - /* box-sizing: border-box; */ - } + /* box-sizing: border-box; */ +} - html { - font-family: sans-serif; - /* font-size: 14px; */ - } +html { + font-family: sans-serif; + /* font-size: 14px; */ +} - .divTable { - /* border: 1px solid lightgray; */ - /* width: fit-content; */ - /* display: flex; +.divTable { + /* border: 1px solid lightgray; */ + /* width: fit-content; */ + /* display: flex; flex-direction: column; */ - } +} - .tr { - display: flex; - } +.tr { + display: flex; +} - .table-striped .tbody .tr:hover { - background-color: rgb(68,68,68); - } +.table-striped .tbody .tr:hover { + background-color: rgb(68, 68, 68); +} - .tr { - /* width: fit-content; +.tr { + /* width: fit-content; width: 100%; */ - /* height: 30px; */ - } + /* height: 30px; */ +} - .th, - .td { - /* box-shadow: inset 0 0 0 1px lightgray; */ - /* padding: 0.25rem; */ - padding-left: 4px; - padding-right: 4px; - } +.th, +.td { + /* box-shadow: inset 0 0 0 1px lightgray; */ + /* padding: 0.25rem; */ + padding-left: 4px; + padding-right: 4px; +} - .th { - /* padding: 2px 4px; */ - position: relative; - font-weight: bold; - text-align: center; - /* height: 30px; */ - } +.th { + /* padding: 2px 4px; */ + position: relative; + font-weight: bold; + text-align: center; + /* height: 30px; */ +} - .td { - height: 28px; - border-bottom: solid 1px rgb(68,68,68); - } +.td { + height: 28px; + border-bottom: solid 1px rgb(68, 68, 68); +} +.resizer { + position: absolute; + top: 0; + height: 100%; + width: 5px; + background: rgba(0, 0, 0, 0.5); + cursor: col-resize; + user-select: none; + touch-action: none; +} + +.resizer.ltr { + right: 0; +} + +.resizer.rtl { + left: 0; +} + +.resizer.isResizing { + background: blue; + opacity: 1; +} + +@media (hover: hover) { .resizer { - position: absolute; - top: 0; - height: 100%; - width: 5px; - background: rgba(0, 0, 0, 0.5); - cursor: col-resize; - user-select: none; - touch-action: none; + opacity: 0; } - .resizer.ltr { - right: 0; - } - - .resizer.rtl { - left: 0; - } - - .resizer.isResizing { - background: blue; + *:hover>.resizer { opacity: 1; } +} - @media (hover: hover) { - .resizer { - opacity: 0; - } +/* .table-striped .tbody .tr:nth-child(odd), */ +.table-striped .tbody .tr-odd { + background-color: #18181b; +} - *:hover > .resizer { - opacity: 1; - } - } +/* .table-striped .tbody .tr:nth-child(even), */ +.table-striped .tbody .tr-even { + background-color: #27272A; +} - /* .table-striped .tbody .tr:nth-child(odd), */ - .table-striped .tbody .tr-odd { - background-color: #18181b; - } +/* Prevent text selection when shift key is pressed */ +.shift-key-active { + cursor: pointer !important; +} - /* .table-striped .tbody .tr:nth-child(even), */ - .table-striped .tbody .tr-even { - background-color: #27272A; - } +.shift-key-active *, +.shift-key-active .tr, +.shift-key-active .td, +.shift-key-active .tbody { + user-select: none !important; + -webkit-user-select: none !important; + -moz-user-select: none !important; + -ms-user-select: none !important; +} + +/* Always allow text selection in editable elements */ +.shift-key-active input, +.shift-key-active textarea, +.shift-key-active [contenteditable="true"], +.shift-key-active .table-input-header input { + user-select: text !important; + -webkit-user-select: text !important; + -moz-user-select: text !important; + -ms-user-select: text !important; + cursor: text !important; +} + +/* Improve specificity and ensure text selection is disabled when shift is pressed */ +.shift-key-active, +.shift-key-active * { + user-select: none !important; + -webkit-user-select: none !important; + -moz-user-select: none !important; + -ms-user-select: none !important; + cursor: pointer !important; +} + +/* Add a visual indicator when shift is pressed */ +.shift-key-active .tbody .tr { + transition: background-color 0.1s; +} + +.shift-key-active .tbody .tr:hover { + background-color: rgba(68, 68, 68, 0.7) !important; +} + +/* Always allow text selection in inputs even when shift is pressed */ +.shift-key-active input, +.shift-key-active textarea, +.shift-key-active [contenteditable="true"], +.shift-key-active select, +.shift-key-active .mantine-Select-input, +.shift-key-active .mantine-MultiSelect-input, +.shift-key-active .table-input-header input { + user-select: text !important; + -webkit-user-select: text !important; + -moz-user-select: text !important; + -ms-user-select: text !important; + cursor: text !important; +} \ No newline at end of file From d27e4b7e8abbec92152fa156721fefb3af7c0402 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 29 Apr 2025 14:14:40 -0500 Subject: [PATCH 36/42] Release stream lock before returning url if using redirect profile. --- apps/proxy/ts_proxy/views.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/apps/proxy/ts_proxy/views.py b/apps/proxy/ts_proxy/views.py index 222da4e3..35ca3648 100644 --- a/apps/proxy/ts_proxy/views.py +++ b/apps/proxy/ts_proxy/views.py @@ -193,7 +193,8 @@ def stream_ts(request, channel_id): break else: logger.warning(f"[{client_id}] Alternate stream #{alt['stream_id']} failed validation: {message}") - + # Release stream lock before redirecting + channel.release_stream() # Final decision based on validation results if is_valid: logger.info(f"[{client_id}] Redirecting to validated URL: {final_url} ({message})") From 2f23909bed50b940476fb12844331460ffd9ffa3 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 29 Apr 2025 15:13:15 -0500 Subject: [PATCH 37/42] Fixed bug overwriting tvg-id when loading TV Guide. --- apps/epg/api_views.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/apps/epg/api_views.py b/apps/epg/api_views.py index 48eb680d..526172f1 100644 --- a/apps/epg/api_views.py +++ b/apps/epg/api_views.py @@ -140,10 +140,6 @@ class EPGGridAPIView(APIView): } dummy_programs.append(dummy_program) - # Also update the channel to use this dummy tvg_id - channel.tvg_id = dummy_tvg_id - channel.save(update_fields=['tvg_id']) - except Exception as e: logger.error(f"Error creating dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}") From 418bf01449b42e9add51a4288308f833ad1e2715 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 29 Apr 2025 18:13:42 -0500 Subject: [PATCH 38/42] Notify user of how many matches auto-match found. Add batch EPG association endpoint and improve EPG matching logic - Implemented a new API endpoint to associate multiple channels with EPG data in a single request. - Enhanced the EPG matching process to normalize TVG IDs and log relevant information. - Updated frontend to handle batch EPG associations efficiently, falling back to legacy methods when necessary. --- apps/channels/api_views.py | 65 ++++++++++++++++++++++++++++++++++++++ apps/channels/tasks.py | 64 ++++++++++++++++++++++++++++--------- frontend/src/WebSocket.jsx | 14 +++++--- frontend/src/api.js | 29 ++++++++++++++++- scripts/epg_match.py | 18 +++++++++-- 5 files changed, 168 insertions(+), 22 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 51952541..ccd942d6 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -541,6 +541,71 @@ class ChannelViewSet(viewsets.ModelViewSet): except Exception as e: return Response({"error": str(e)}, status=400) + @swagger_auto_schema( + method='post', + operation_description="Associate multiple channels with EPG data without triggering a full refresh", + request_body=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + 'associations': openapi.Schema( + type=openapi.TYPE_ARRAY, + items=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + 'channel_id': openapi.Schema(type=openapi.TYPE_INTEGER), + 'epg_data_id': openapi.Schema(type=openapi.TYPE_INTEGER) + } + ) + ) + } + ), + responses={200: "EPG data linked for multiple channels"} + ) + @action(detail=False, methods=['post'], url_path='batch-set-epg') + def batch_set_epg(self, request): + """Efficiently associate multiple channels with EPG data at once.""" + associations = request.data.get('associations', []) + channels_updated = 0 + programs_refreshed = 0 + unique_epg_ids = set() + + for assoc in associations: + channel_id = assoc.get('channel_id') + epg_data_id = assoc.get('epg_data_id') + + if not channel_id: + continue + + try: + # Get the channel + channel = Channel.objects.get(id=channel_id) + + # Set the EPG data + channel.epg_data_id = epg_data_id + channel.save(update_fields=['epg_data']) + channels_updated += 1 + + # Track unique EPG data IDs + if epg_data_id: + unique_epg_ids.add(epg_data_id) + + except Channel.DoesNotExist: + logger.error(f"Channel with ID {channel_id} not found") + except Exception as e: + logger.error(f"Error setting EPG data for channel {channel_id}: {str(e)}") + + # Trigger program refresh for unique EPG data IDs + from apps.epg.tasks import parse_programs_for_tvg_id + for epg_id in unique_epg_ids: + parse_programs_for_tvg_id.delay(epg_id) + programs_refreshed += 1 + + return Response({ + 'success': True, + 'channels_updated': channels_updated, + 'programs_refreshed': programs_refreshed + }) + # ───────────────────────────────────────────────────────── # 4) Bulk Delete Streams # ───────────────────────────────────────────────────────── diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py index 94bb8ca9..88d040e8 100755 --- a/apps/channels/tasks.py +++ b/apps/channels/tasks.py @@ -75,21 +75,42 @@ def match_epg_channels(): matched_channels = [] channels_to_update = [] - channels_json = [{ - "id": channel.id, - "name": channel.name, - "tvg_id": channel.tvg_id, - "fallback_name": channel.tvg_id.strip() if channel.tvg_id else channel.name, - "norm_chan": normalize_name(channel.tvg_id.strip() if channel.tvg_id else channel.name) - } for channel in Channel.objects.all() if not channel.epg_data] + # Get channels that don't have EPG data assigned + channels_without_epg = Channel.objects.filter(epg_data__isnull=True) + logger.info(f"Found {channels_without_epg.count()} channels without EPG data") - epg_json = [{ - 'id': epg.id, - 'tvg_id': epg.tvg_id, - 'name': epg.name, - 'norm_name': normalize_name(epg.name), - 'epg_source_id': epg.epg_source.id, - } for epg in EPGData.objects.all()] + channels_json = [] + for channel in channels_without_epg: + # Normalize TVG ID - strip whitespace and convert to lowercase + normalized_tvg_id = channel.tvg_id.strip().lower() if channel.tvg_id else "" + if normalized_tvg_id: + logger.info(f"Processing channel {channel.id} '{channel.name}' with TVG ID='{normalized_tvg_id}'") + + channels_json.append({ + "id": channel.id, + "name": channel.name, + "tvg_id": normalized_tvg_id, # Use normalized TVG ID + "original_tvg_id": channel.tvg_id, # Keep original for reference + "fallback_name": normalized_tvg_id if normalized_tvg_id else channel.name, + "norm_chan": normalize_name(normalized_tvg_id if normalized_tvg_id else channel.name) + }) + + # Similarly normalize EPG data TVG IDs + epg_json = [] + for epg in EPGData.objects.all(): + normalized_tvg_id = epg.tvg_id.strip().lower() if epg.tvg_id else "" + epg_json.append({ + 'id': epg.id, + 'tvg_id': normalized_tvg_id, # Use normalized TVG ID + 'original_tvg_id': epg.tvg_id, # Keep original for reference + 'name': epg.name, + 'norm_name': normalize_name(epg.name), + 'epg_source_id': epg.epg_source.id if epg.epg_source else None, + }) + + # Log available EPG data TVG IDs for debugging + unique_epg_tvg_ids = set(e['tvg_id'] for e in epg_json if e['tvg_id']) + logger.info(f"Available EPG TVG IDs: {', '.join(sorted(unique_epg_tvg_ids))}") payload = { "channels": channels_json, @@ -159,12 +180,25 @@ def match_epg_channels(): logger.info("Finished EPG matching logic.") + # Send update with additional information for refreshing UI channel_layer = get_channel_layer() + associations = [ + {"channel_id": chan["id"], "epg_data_id": chan["epg_data_id"]} + for chan in channels_to_update_dicts + ] + async_to_sync(channel_layer.group_send)( 'updates', { 'type': 'update', - "data": {"success": True, "type": "epg_match"} + "data": { + "success": True, + "type": "epg_match", + "refresh_channels": True, # Flag to tell frontend to refresh channels + "matches_count": total_matched, + "message": f"EPG matching complete: {total_matched} channel(s) matched", + "associations": associations # Add the associations data + } } ) diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index cd4bca6f..0f5c4404 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -14,7 +14,7 @@ import useEPGsStore from './store/epgs'; import { Box, Button, Stack } from '@mantine/core'; import API from './api'; -export const WebsocketContext = createContext([false, () => {}, null]); +export const WebsocketContext = createContext([false, () => { }, null]); export const WebsocketProvider = ({ children }) => { const [isReady, setIsReady] = useState(false); @@ -121,11 +121,17 @@ export const WebsocketProvider = ({ children }) => { case 'epg_match': notifications.show({ - message: 'EPG match is complete!', + message: event.data.message || 'EPG match is complete!', color: 'green.5', }); - // fetchChannels(); - fetchEPGData(); + + // Check if we have associations data and use the more efficient batch API + if (event.data.associations && event.data.associations.length > 0) { + API.batchSetEPG(event.data.associations); + } else { + // Fall back to legacy full refresh method + API.requeryChannels(); + } break; case 'm3u_profile_test': diff --git a/frontend/src/api.js b/frontend/src/api.js index e54e628a..8e1fe46c 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -1130,7 +1130,7 @@ export default class API { return response; } catch (e) { - errorNotification('Failed to create channle profile', e); + errorNotification('Failed to create channel profile', e); } } @@ -1271,4 +1271,31 @@ export default class API { throw e; } } + + static async batchSetEPG(associations) { + try { + const response = await request( + `${host}/api/channels/channels/batch-set-epg/`, + { + method: 'POST', + body: { associations }, + } + ); + + // If successful, requery channels to update UI + if (response.success) { + notifications.show({ + title: 'EPG Association', + message: `Updated ${response.channels_updated} channels, refreshing ${response.programs_refreshed} EPG sources.`, + color: 'blue', + }); + + this.requeryChannels(); + } + + return response; + } catch (e) { + errorNotification('Failed to update channel EPGs', e); + } + } } diff --git a/scripts/epg_match.py b/scripts/epg_match.py index e5d17466..ed86d865 100644 --- a/scripts/epg_match.py +++ b/scripts/epg_match.py @@ -34,7 +34,7 @@ def process_data(input_data): channels = input_data["channels"] epg_data = input_data["epg_data"] - region_code = input_data["region_code"] + region_code = input_data.get("region_code", None) epg_embeddings = None if any(row["norm_name"] for row in epg_data): @@ -47,6 +47,21 @@ def process_data(input_data): matched_channels = [] for chan in channels: + normalized_tvg_id = chan.get("tvg_id", "") + fallback_name = chan["tvg_id"].strip() if chan["tvg_id"] else chan["name"] + + # Exact TVG ID match (direct match) + epg_by_tvg_id = next((epg for epg in epg_data if epg["tvg_id"] == normalized_tvg_id), None) + if normalized_tvg_id and epg_by_tvg_id: + chan["epg_data_id"] = epg_by_tvg_id["id"] + channels_to_update.append(chan) + + # Add to matched_channels list so it's counted in the total + matched_channels.append((chan['id'], fallback_name, epg_by_tvg_id["tvg_id"])) + + eprint(f"Channel {chan['id']} '{fallback_name}' => EPG found by tvg_id={epg_by_tvg_id['tvg_id']}") + continue + # If channel has a tvg_id that doesn't exist in EPGData, do direct check. # I don't THINK this should happen now that we assign EPG on channel creation. if chan["tvg_id"]: @@ -59,7 +74,6 @@ def process_data(input_data): continue # C) Perform name-based fuzzy matching - fallback_name = chan["tvg_id"].strip() if chan["tvg_id"] else chan["name"] if not chan["norm_chan"]: eprint(f"Channel {chan['id']} '{chan['name']}' => empty after normalization, skipping") continue From bdb8d326a59f1c259cd8c584bcb44affa70a9565 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 30 Apr 2025 12:17:11 -0500 Subject: [PATCH 39/42] Add better logging for which channel clients are getting chunks from. --- apps/proxy/ts_proxy/stream_generator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/proxy/ts_proxy/stream_generator.py b/apps/proxy/ts_proxy/stream_generator.py index 17e53b9d..26ed3a6b 100644 --- a/apps/proxy/ts_proxy/stream_generator.py +++ b/apps/proxy/ts_proxy/stream_generator.py @@ -208,7 +208,7 @@ class StreamGenerator: # Log empty reads periodically if self.empty_reads % 50 == 0: stream_status = "healthy" if (self.stream_manager and self.stream_manager.healthy) else "unknown" - logger.debug(f"[{self.client_id}] Waiting for chunks beyond {self.local_index} (buffer at {self.buffer.index}, stream: {stream_status})") + logger.debug(f"[{self.client_id}] Waiting for chunks beyond {self.local_index} for channel: {self.channel_id} (buffer at {self.buffer.index}, stream: {stream_status})") # Check for ghost clients if self._is_ghost_client(self.local_index): @@ -277,7 +277,7 @@ class StreamGenerator: yield chunk self.bytes_sent += len(chunk) self.chunks_sent += 1 - logger.debug(f"[{self.client_id}] Sent chunk {self.chunks_sent} ({len(chunk)} bytes) to client") + logger.debug(f"[{self.client_id}] Sent chunk {self.chunks_sent} ({len(chunk)} bytes) for channel {self.channel_id} to client") current_time = time.time() From b7c543b5f5da004739622d40a9028c425177a4d9 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 30 Apr 2025 12:48:50 -0500 Subject: [PATCH 40/42] Use gevent sleep instead of sleep. --- apps/proxy/ts_proxy/stream_buffer.py | 5 +++++ apps/proxy/ts_proxy/stream_generator.py | 9 +++++---- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/apps/proxy/ts_proxy/stream_buffer.py b/apps/proxy/ts_proxy/stream_buffer.py index 4d73bdc2..f0be1c52 100644 --- a/apps/proxy/ts_proxy/stream_buffer.py +++ b/apps/proxy/ts_proxy/stream_buffer.py @@ -11,6 +11,7 @@ from .redis_keys import RedisKeys from .config_helper import ConfigHelper from .constants import TS_PACKET_SIZE from .utils import get_logger +import gevent.event logger = get_logger() @@ -46,6 +47,7 @@ class StreamBuffer: # Track timers for proper cleanup self.stopping = False self.fill_timers = [] + self.chunk_available = gevent.event.Event() def add_chunk(self, chunk): """Add data with optimized Redis storage and TS packet alignment""" @@ -96,6 +98,9 @@ class StreamBuffer: if writes_done > 0: logger.debug(f"Added {writes_done} chunks ({self.target_chunk_size} bytes each) to Redis for channel {self.channel_id} at index {self.index}") + self.chunk_available.set() # Signal that new data is available + self.chunk_available.clear() # Reset for next notification + return True except Exception as e: diff --git a/apps/proxy/ts_proxy/stream_generator.py b/apps/proxy/ts_proxy/stream_generator.py index 26ed3a6b..bdd20874 100644 --- a/apps/proxy/ts_proxy/stream_generator.py +++ b/apps/proxy/ts_proxy/stream_generator.py @@ -6,6 +6,7 @@ This module handles generating and delivering video streams to clients. import time import logging import threading +import gevent # Add this import at the top of your file from apps.proxy.config import TSConfig as Config from .server import ProxyServer from .utils import create_ts_packet, get_logger @@ -135,7 +136,7 @@ class StreamGenerator: return False # Wait a bit before checking again - time.sleep(0.1) + gevent.sleep(0.1) # Timed out waiting logger.warning(f"[{self.client_id}] Timed out waiting for initialization") @@ -199,11 +200,11 @@ class StreamGenerator: self.bytes_sent += len(keepalive_packet) self.last_yield_time = time.time() self.consecutive_empty = 0 # Reset consecutive counter but keep total empty_reads - time.sleep(Config.KEEPALIVE_INTERVAL) + gevent.sleep(Config.KEEPALIVE_INTERVAL) # Replace time.sleep else: # Standard wait with backoff sleep_time = min(0.1 * self.consecutive_empty, 1.0) - time.sleep(sleep_time) + gevent.sleep(sleep_time) # Replace time.sleep # Log empty reads periodically if self.empty_reads % 50 == 0: @@ -416,7 +417,7 @@ class StreamGenerator: # Use the config setting instead of hardcoded value shutdown_delay = getattr(Config, 'CHANNEL_SHUTDOWN_DELAY', 5) logger.info(f"Waiting {shutdown_delay}s before checking if channel should be stopped") - time.sleep(shutdown_delay) + gevent.sleep(shutdown_delay) # After delay, check global client count if self.channel_id in proxy_server.client_managers: From 423020861c2179759931e49cc7bef4dbc242864c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 30 Apr 2025 13:32:16 -0500 Subject: [PATCH 41/42] Replace time.sleep with gevent.sleep for improved concurrency --- apps/proxy/ts_proxy/server.py | 9 +++++---- apps/proxy/ts_proxy/stream_generator.py | 6 ++---- apps/proxy/ts_proxy/stream_manager.py | 11 ++++++----- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/apps/proxy/ts_proxy/server.py b/apps/proxy/ts_proxy/server.py index 1a04ffb5..cebcc545 100644 --- a/apps/proxy/ts_proxy/server.py +++ b/apps/proxy/ts_proxy/server.py @@ -15,6 +15,7 @@ import time import sys import os import json +import gevent # Add gevent import from typing import Dict, Optional, Set from apps.proxy.config import TSConfig as Config from apps.channels.models import Channel, Stream @@ -209,7 +210,7 @@ class ProxyServer: if shutdown_delay > 0: logger.info(f"Waiting {shutdown_delay}s before stopping channel...") - time.sleep(shutdown_delay) + gevent.sleep(shutdown_delay) # REPLACE: time.sleep(shutdown_delay) # Re-check client count before stopping total = self.redis_client.scard(client_set_key) or 0 @@ -336,7 +337,7 @@ class ProxyServer: final_delay = delay + jitter logger.error(f"Error in event listener: {e}. Retrying in {final_delay:.1f}s (attempt {retry_count})") - time.sleep(final_delay) + gevent.sleep(final_delay) # REPLACE: time.sleep(final_delay) # Try to clean up the old connection try: @@ -350,7 +351,7 @@ class ProxyServer: except Exception as e: logger.error(f"Error in event listener: {e}") # Add a short delay to prevent rapid retries on persistent errors - time.sleep(5) + gevent.sleep(5) # REPLACE: time.sleep(5) thread = threading.Thread(target=event_listener, daemon=True) thread.name = "redis-event-listener" @@ -1000,7 +1001,7 @@ class ProxyServer: except Exception as e: logger.error(f"Error in cleanup thread: {e}", exc_info=True) - time.sleep(ConfigHelper.cleanup_check_interval()) + gevent.sleep(ConfigHelper.cleanup_check_interval()) # REPLACE: time.sleep(ConfigHelper.cleanup_check_interval()) thread = threading.Thread(target=cleanup_task, daemon=True) thread.name = "ts-proxy-cleanup" diff --git a/apps/proxy/ts_proxy/stream_generator.py b/apps/proxy/ts_proxy/stream_generator.py index bdd20874..82060f2f 100644 --- a/apps/proxy/ts_proxy/stream_generator.py +++ b/apps/proxy/ts_proxy/stream_generator.py @@ -417,7 +417,7 @@ class StreamGenerator: # Use the config setting instead of hardcoded value shutdown_delay = getattr(Config, 'CHANNEL_SHUTDOWN_DELAY', 5) logger.info(f"Waiting {shutdown_delay}s before checking if channel should be stopped") - gevent.sleep(shutdown_delay) + gevent.sleep(shutdown_delay) # Replace time.sleep # After delay, check global client count if self.channel_id in proxy_server.client_managers: @@ -428,9 +428,7 @@ class StreamGenerator: else: logger.info(f"Not shutting down channel {self.channel_id}, {total} clients still connected") - shutdown_thread = threading.Thread(target=delayed_shutdown) - shutdown_thread.daemon = True - shutdown_thread.start() + gevent.spawn(delayed_shutdown) def create_stream_generator(channel_id, client_id, client_ip, client_user_agent, channel_initializing=False): """ diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index 6fd2b4b8..e9a531d8 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -6,6 +6,7 @@ import time import socket import requests import subprocess +import gevent # Add this import from typing import Optional, List from django.shortcuts import get_object_or_404 from apps.proxy.config import TSConfig as Config @@ -157,7 +158,7 @@ class StreamManager: url_failed = False if self.url_switching: logger.debug("Skipping connection attempt during URL switch") - time.sleep(0.1) + gevent.sleep(0.1) # REPLACE time.sleep(0.1) continue # Connection retry loop for current URL while self.running and self.retry_count < self.max_retries and not url_failed: @@ -205,7 +206,7 @@ class StreamManager: # Wait with exponential backoff before retrying timeout = min(.25 * self.retry_count, 3) # Cap at 3 seconds logger.info(f"Reconnecting in {timeout} seconds... (attempt {self.retry_count}/{self.max_retries})") - time.sleep(timeout) + gevent.sleep(timeout) # REPLACE time.sleep(timeout) except Exception as e: logger.error(f"Connection error: {e}", exc_info=True) @@ -218,7 +219,7 @@ class StreamManager: # Wait with exponential backoff before retrying timeout = min(.25 * self.retry_count, 3) # Cap at 3 seconds logger.info(f"Reconnecting in {timeout} seconds after error... (attempt {self.retry_count}/{self.max_retries})") - time.sleep(timeout) + gevent.sleep(timeout) # REPLACE time.sleep(timeout) # If URL failed and we're still running, try switching to another stream if url_failed and self.running: @@ -425,7 +426,7 @@ class StreamManager: else: if not self.running: break - time.sleep(0.1) + gevent.sleep(0.1) # REPLACE time.sleep(0.1) else: # Handle direct HTTP connection chunk_count = 0 @@ -674,7 +675,7 @@ class StreamManager: except Exception as e: logger.error(f"Error in health monitor: {e}") - time.sleep(self.health_check_interval) + gevent.sleep(self.health_check_interval) # REPLACE time.sleep(self.health_check_interval) def _attempt_reconnect(self): """Attempt to reconnect to the current stream""" From 80fe7e02f84d67b7ddb8ab527d9b21d6f433f53b Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 30 Apr 2025 13:43:01 -0500 Subject: [PATCH 42/42] Added missing _attempt_health_recovery. --- apps/proxy/ts_proxy/stream_manager.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index e9a531d8..771ffba8 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -719,6 +719,29 @@ class StreamManager: logger.error(f"Error in reconnect attempt: {e}", exc_info=True) return False + def _attempt_health_recovery(self): + """Attempt to recover stream health by switching to another stream""" + try: + logger.info(f"Attempting health recovery for channel {self.channel_id}") + + # Don't try to switch if we're already in the process of switching URLs + if self.url_switching: + logger.info("URL switching already in progress, skipping health recovery") + return + + # Try to switch to next stream + switch_result = self._try_next_stream() + if switch_result: + logger.info(f"Health recovery successful - switched to new stream for channel {self.channel_id}") + return True + else: + logger.warning(f"Health recovery failed - no alternative streams available for channel {self.channel_id}") + return False + + except Exception as e: + logger.error(f"Error in health recovery attempt: {e}", exc_info=True) + return False + def _close_connection(self): """Close HTTP connection resources""" # Close response if it exists