From d0073637ef774be79a4b3040e969a4a116fc8144 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Sun, 13 Apr 2025 15:39:34 +0000 Subject: [PATCH 01/16] Increment build number to 1 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index da62c46d..1f13e1be 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.2.1' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '0' # Auto-incremented on builds +__build__ = '1' # Auto-incremented on builds From cb891461bfbb0a1bde166a603b5e914eabf55be0 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 13 Apr 2025 20:31:10 -0500 Subject: [PATCH 02/16] Dynamically create TunerCount for HDHR based on profile max connections. Minimum of 2, unlimited is 10. --- apps/hdhr/api_views.py | 27 +++++++++++++++++++++++++-- apps/hdhr/views.py | 4 ++-- 2 files changed, 27 insertions(+), 4 deletions(-) diff --git a/apps/hdhr/api_views.py b/apps/hdhr/api_views.py index 4aefcc9a..03920252 100644 --- a/apps/hdhr/api_views.py +++ b/apps/hdhr/api_views.py @@ -3,9 +3,11 @@ from rest_framework.response import Response from rest_framework.views import APIView from rest_framework.permissions import IsAuthenticated from django.http import JsonResponse, HttpResponseForbidden, HttpResponse +import logging from drf_yasg.utils import swagger_auto_schema from drf_yasg import openapi from django.shortcuts import get_object_or_404 +from django.db import models from apps.channels.models import Channel, ChannelProfile from .models import HDHRDevice from .serializers import HDHRDeviceSerializer @@ -15,6 +17,9 @@ from django.views import View from django.utils.decorators import method_decorator from django.contrib.auth.decorators import login_required from django.views.decorators.csrf import csrf_exempt +from apps.m3u.models import M3UAccountProfile +# Configure logger +logger = logging.getLogger(__name__) @login_required def hdhr_dashboard_view(request): @@ -46,6 +51,24 @@ class DiscoverAPIView(APIView): base_url = request.build_absolute_uri(f'/{"/".join(uri_parts)}/').rstrip('/') device = HDHRDevice.objects.first() + # Get active profiles and calculate tuner count + # Exclude the default "custom Default" profile (ID 1) + profiles = M3UAccountProfile.objects.filter(is_active=True).exclude(id=1) + + # Check if any profile has unlimited streams (max_streams=0) + has_unlimited = profiles.filter(max_streams=0).exists() + + if has_unlimited: + tuner_count = 10 # Default to 10 if any profile has unlimited streams + else: + # Sum all max_streams values + tuner_count = profiles.filter(max_streams__gt=0).aggregate( + total=models.Sum('max_streams') + ).get('total', 0) + + # Ensure there's at least 2 tuners + tuner_count = max(2, tuner_count or 0) + logger.debug(f"Calculated tuner count: {tuner_count}") if not device: data = { "FriendlyName": "Dispatcharr HDHomeRun", @@ -56,7 +79,7 @@ class DiscoverAPIView(APIView): "DeviceAuth": "test_auth_token", "BaseURL": base_url, "LineupURL": f"{base_url}/lineup.json", - "TunerCount": 10, + "TunerCount": tuner_count, } else: data = { @@ -68,7 +91,7 @@ class DiscoverAPIView(APIView): "DeviceAuth": "test_auth_token", "BaseURL": base_url, "LineupURL": f"{base_url}/lineup.json", - "TunerCount": 10, + "TunerCount": tuner_count, } return JsonResponse(data) diff --git a/apps/hdhr/views.py b/apps/hdhr/views.py index 48c48f80..048eb340 100644 --- a/apps/hdhr/views.py +++ b/apps/hdhr/views.py @@ -52,7 +52,7 @@ class DiscoverAPIView(APIView): "DeviceAuth": "test_auth_token", "BaseURL": base_url, "LineupURL": f"{base_url}/lineup.json", - "TunerCount": "10", + "TunerCount": 10, } else: data = { @@ -64,7 +64,7 @@ class DiscoverAPIView(APIView): "DeviceAuth": "test_auth_token", "BaseURL": base_url, "LineupURL": f"{base_url}/lineup.json", - "TunerCount": "10", + "TunerCount": 10, } return JsonResponse(data) From e936b56d3bd723c7cfe4620b5c4f2413d59a7a61 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Mon, 14 Apr 2025 01:46:18 +0000 Subject: [PATCH 03/16] Increment build number to 2 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 1f13e1be..4d173fa7 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.2.1' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '1' # Auto-incremented on builds +__build__ = '2' # Auto-incremented on builds From 2921588b231d7d4dc62c8716fab1840593fed6bd Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 13 Apr 2025 20:58:46 -0500 Subject: [PATCH 04/16] Add custom stream count to calculation. --- apps/hdhr/api_views.py | 38 +++++++++++++++++++++++++------------- 1 file changed, 25 insertions(+), 13 deletions(-) diff --git a/apps/hdhr/api_views.py b/apps/hdhr/api_views.py index 03920252..7dcf9254 100644 --- a/apps/hdhr/api_views.py +++ b/apps/hdhr/api_views.py @@ -8,7 +8,7 @@ from drf_yasg.utils import swagger_auto_schema from drf_yasg import openapi from django.shortcuts import get_object_or_404 from django.db import models -from apps.channels.models import Channel, ChannelProfile +from apps.channels.models import Channel, ChannelProfile, Stream from .models import HDHRDevice from .serializers import HDHRDeviceSerializer from django.contrib.auth.decorators import login_required @@ -51,24 +51,36 @@ class DiscoverAPIView(APIView): base_url = request.build_absolute_uri(f'/{"/".join(uri_parts)}/').rstrip('/') device = HDHRDevice.objects.first() - # Get active profiles and calculate tuner count - # Exclude the default "custom Default" profile (ID 1) + # Calculate tuner count from active profiles (excluding default "custom Default" profile) profiles = M3UAccountProfile.objects.filter(is_active=True).exclude(id=1) - # Check if any profile has unlimited streams (max_streams=0) + # 1. Check if any profile has unlimited streams (max_streams=0) has_unlimited = profiles.filter(max_streams=0).exists() - if has_unlimited: - tuner_count = 10 # Default to 10 if any profile has unlimited streams - else: - # Sum all max_streams values - tuner_count = profiles.filter(max_streams__gt=0).aggregate( + # 2. Calculate tuner count from limited profiles + limited_tuners = 0 + if not has_unlimited: + limited_tuners = profiles.filter(max_streams__gt=0).aggregate( total=models.Sum('max_streams') - ).get('total', 0) + ).get('total', 0) or 0 + + # 3. Add custom stream count to tuner count + custom_stream_count = Stream.objects.filter(is_custom=True).count() + logger.debug(f"Found {custom_stream_count} custom streams") + + # 4. Calculate final tuner count + if has_unlimited: + # If there are unlimited profiles, start with 10 plus custom streams + tuner_count = 10 + custom_stream_count + else: + # Otherwise use the limited profile sum plus custom streams + tuner_count = limited_tuners + custom_stream_count + + # 5. Ensure minimum of 2 tuners + tuner_count = max(2, tuner_count) + + logger.debug(f"Calculated tuner count: {tuner_count} (limited profiles: {limited_tuners}, custom streams: {custom_stream_count}, unlimited: {has_unlimited})") - # Ensure there's at least 2 tuners - tuner_count = max(2, tuner_count or 0) - logger.debug(f"Calculated tuner count: {tuner_count}") if not device: data = { "FriendlyName": "Dispatcharr HDHomeRun", From 55fce02469c840bf5581f0b4b12b03cedb29850b Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Mon, 14 Apr 2025 01:59:17 +0000 Subject: [PATCH 05/16] Increment build number to 3 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 4d173fa7..d86cd56b 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.2.1' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '2' # Auto-incremented on builds +__build__ = '3' # Auto-incremented on builds From 90cc65eb7d08aa1a01a192d7a3d635486f6fea6f Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 14 Apr 2025 18:11:20 -0500 Subject: [PATCH 06/16] Increase workers and threads for uwsgi. --- docker/uwsgi.ini | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docker/uwsgi.ini b/docker/uwsgi.ini index e014e030..9db61495 100644 --- a/docker/uwsgi.ini +++ b/docker/uwsgi.ini @@ -25,8 +25,9 @@ die-on-term = true static-map = /static=/app/static # Worker management (Optimize for I/O bound tasks) -workers = 2 -enable-threads = false +workers = 4 +threads = 4 +enable-threads = true # Optimize for streaming http = 0.0.0.0:5656 From 95a51d71a04265c9a078c986d4f1e42287771024 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Mon, 14 Apr 2025 23:11:45 +0000 Subject: [PATCH 07/16] Increment build number to 4 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index d86cd56b..752e0150 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.2.1' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '3' # Auto-incremented on builds +__build__ = '4' # Auto-incremented on builds From 60fd5afd9435fa76ae5334ab43edc1c443bbb5a6 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 14 Apr 2025 21:03:33 -0500 Subject: [PATCH 08/16] More robust stream switches. Has client wait if in switching state. --- apps/proxy/config.py | 2 +- apps/proxy/ts_proxy/views.py | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/apps/proxy/config.py b/apps/proxy/config.py index a465b5d7..9d38532b 100644 --- a/apps/proxy/config.py +++ b/apps/proxy/config.py @@ -35,7 +35,7 @@ class TSConfig(BaseConfig): # Streaming settings TARGET_BITRATE = 8000000 # Target bitrate (8 Mbps) - STREAM_TIMEOUT = 10 # Disconnect after this many seconds of no data + STREAM_TIMEOUT = 20 # Disconnect after this many seconds of no data HEALTH_CHECK_INTERVAL = 5 # Check stream health every N seconds # Resource management diff --git a/apps/proxy/ts_proxy/views.py b/apps/proxy/ts_proxy/views.py index 232b3d28..43543816 100644 --- a/apps/proxy/ts_proxy/views.py +++ b/apps/proxy/ts_proxy/views.py @@ -166,9 +166,41 @@ def stream_ts(request, channel_id): if time.time() - wait_start > timeout: proxy_server.stop_channel(channel_id) return JsonResponse({'error': 'Connection timeout'}, status=504) + + # Check if this manager should keep retrying or stop if not manager.should_retry(): + # Check channel state in Redis to make a better decision + metadata_key = RedisKeys.channel_metadata(channel_id) + current_state = None + + if proxy_server.redis_client: + try: + state_bytes = proxy_server.redis_client.hget(metadata_key, ChannelMetadataField.STATE) + if state_bytes: + current_state = state_bytes.decode('utf-8') + logger.info(f"[{client_id}] Current state of channel {channel_id}: {current_state}") + except Exception as e: + logger.warning(f"[{client_id}] Error getting channel state: {e}") + + # Allow normal transitional states to continue + if current_state in [ChannelState.INITIALIZING, ChannelState.CONNECTING]: + logger.info(f"[{client_id}] Channel {channel_id} is in {current_state} state, continuing to wait") + # Reset wait timer to allow the transition to complete + wait_start = time.time() + continue + + # Check if we're switching URLs + if hasattr(manager, 'url_switching') and manager.url_switching: + logger.info(f"[{client_id}] Stream manager is currently switching URLs for channel {channel_id}") + # Reset wait timer to give the switch a chance + wait_start = time.time() + continue + + # If we reach here, we've exhausted retries and the channel isn't in a valid transitional state + logger.warning(f"[{client_id}] Channel {channel_id} failed to connect and is not in transitional state") proxy_server.stop_channel(channel_id) return JsonResponse({'error': 'Failed to connect'}, status=502) + time.sleep(0.1) logger.info(f"[{client_id}] Successfully initialized channel {channel_id}") From 20baa0ddcfc28682d603d44954210d02ef6b1f18 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Tue, 15 Apr 2025 02:03:57 +0000 Subject: [PATCH 09/16] Increment build number to 5 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 752e0150..502e8ef3 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.2.1' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '4' # Auto-incremented on builds +__build__ = '5' # Auto-incremented on builds From 02b5fb6fc09ba0943a9886a4b67138b3cb1d2f49 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 14 Apr 2025 21:04:33 -0500 Subject: [PATCH 10/16] Changed logging level of channel state checks for client. --- apps/proxy/ts_proxy/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/proxy/ts_proxy/views.py b/apps/proxy/ts_proxy/views.py index 43543816..d4680149 100644 --- a/apps/proxy/ts_proxy/views.py +++ b/apps/proxy/ts_proxy/views.py @@ -178,7 +178,7 @@ def stream_ts(request, channel_id): state_bytes = proxy_server.redis_client.hget(metadata_key, ChannelMetadataField.STATE) if state_bytes: current_state = state_bytes.decode('utf-8') - logger.info(f"[{client_id}] Current state of channel {channel_id}: {current_state}") + logger.debug(f"[{client_id}] Current state of channel {channel_id}: {current_state}") except Exception as e: logger.warning(f"[{client_id}] Error getting channel state: {e}") From 5bb6539586b8f93dcd27cc449c71df4606dafb19 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 15 Apr 2025 07:44:08 -0500 Subject: [PATCH 11/16] Fixes multiple streams in a row being dead. --- apps/proxy/ts_proxy/stream_manager.py | 3 --- apps/proxy/ts_proxy/views.py | 9 +++++++++ 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index ac5f801b..9d2847c1 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -571,9 +571,6 @@ class StreamManager: # Reset retry counter to allow immediate reconnect self.retry_count = 0 - # Reset tried streams when manually switching URL - self.tried_stream_ids = set() - # Also reset buffer position to prevent stale data after URL change if hasattr(self.buffer, 'reset_buffer_position'): try: diff --git a/apps/proxy/ts_proxy/views.py b/apps/proxy/ts_proxy/views.py index d4680149..ff334f96 100644 --- a/apps/proxy/ts_proxy/views.py +++ b/apps/proxy/ts_proxy/views.py @@ -288,6 +288,15 @@ def change_stream(request, channel_id): # Use the service layer instead of direct implementation result = ChannelService.change_stream_url(channel_id, new_url, user_agent) + # Get the stream manager before updating URL + stream_manager = proxy_server.stream_managers.get(channel_id) + + # If we have a stream manager, reset its tried_stream_ids when manually changing streams + if stream_manager: + # Reset tried streams when manually switching URL via API + stream_manager.tried_stream_ids = set() + logger.debug(f"Reset tried stream IDs for channel {channel_id} during manual stream change") + if result.get('status') == 'error': return JsonResponse({ 'error': result.get('message', 'Unknown error'), From d1654371290abbafc4478e8953c506a747692ec5 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Tue, 15 Apr 2025 12:44:43 +0000 Subject: [PATCH 12/16] Increment build number to 6 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 502e8ef3..3875c9fe 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.2.1' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '5' # Auto-incremented on builds +__build__ = '6' # Auto-incremented on builds From 14621598f6c9cfd632174db47c3223b156b6d5ae Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 15 Apr 2025 08:28:47 -0500 Subject: [PATCH 13/16] Added url validation for redirect profile. --- apps/proxy/ts_proxy/url_utils.py | 123 +++++++++++++++++++++++++++++-- apps/proxy/ts_proxy/views.py | 58 ++++++++++++++- 2 files changed, 175 insertions(+), 6 deletions(-) diff --git a/apps/proxy/ts_proxy/url_utils.py b/apps/proxy/ts_proxy/url_utils.py index f68b9b6b..e3b1c264 100644 --- a/apps/proxy/ts_proxy/url_utils.py +++ b/apps/proxy/ts_proxy/url_utils.py @@ -11,6 +11,7 @@ from apps.m3u.models import M3UAccount, M3UAccountProfile from core.models import UserAgent, CoreSettings from .utils import get_logger from uuid import UUID +import requests logger = get_logger() @@ -95,14 +96,14 @@ def transform_url(input_url: str, search_pattern: str, replace_pattern: str) -> str: The transformed URL """ try: - logger.info("Executing URL pattern replacement:") - logger.info(f" base URL: {input_url}") - logger.info(f" search: {search_pattern}") + logger.debug("Executing URL pattern replacement:") + logger.debug(f" base URL: {input_url}") + logger.debug(f" search: {search_pattern}") # Handle backreferences in the replacement pattern safe_replace_pattern = re.sub(r'\$(\d+)', r'\\\1', replace_pattern) - logger.info(f" replace: {replace_pattern}") - logger.info(f" safe replace: {safe_replace_pattern}") + logger.debug(f" replace: {replace_pattern}") + logger.debug(f" safe replace: {safe_replace_pattern}") # Apply the transformation stream_url = re.sub(search_pattern, safe_replace_pattern, input_url) @@ -268,3 +269,115 @@ def get_alternate_streams(channel_id: str, current_stream_id: Optional[int] = No except Exception as e: logger.error(f"Error getting alternate streams for channel {channel_id}: {e}", exc_info=True) return [] + +def validate_stream_url(url, user_agent=None, timeout=(5, 5)): + """ + Validate if a stream URL is accessible without downloading the full content. + + Args: + url (str): The URL to validate + user_agent (str): User agent to use for the request + timeout (tuple): Connection and read timeout in seconds + + Returns: + tuple: (is_valid, final_url, status_code, message) + """ + try: + # Create session with proper headers + session = requests.Session() + headers = { + 'User-Agent': user_agent, + 'Connection': 'close' # Don't keep connection alive + } + session.headers.update(headers) + + # Make HEAD request first as it's faster and doesn't download content + head_response = session.head( + url, + timeout=timeout, + allow_redirects=True + ) + + # If HEAD not supported, server will return 405 or other error + if 200 <= head_response.status_code < 300: + # HEAD request successful + return True, head_response.url, head_response.status_code, "Valid (HEAD request)" + + # Try a GET request with stream=True to avoid downloading all content + get_response = session.get( + url, + stream=True, + timeout=timeout, + allow_redirects=True + ) + + # IMPORTANT: Check status code first before checking content + if not (200 <= get_response.status_code < 300): + logger.warning(f"Stream validation failed with HTTP status {get_response.status_code}") + return False, get_response.url, get_response.status_code, f"Invalid HTTP status: {get_response.status_code}" + + # Only check content if status code is valid + try: + chunk = next(get_response.iter_content(chunk_size=188*10)) + is_valid = len(chunk) > 0 + message = f"Valid (GET request, received {len(chunk)} bytes)" + except StopIteration: + is_valid = False + message = "Empty response from server" + + # Check content type for additional validation + content_type = get_response.headers.get('Content-Type', '').lower() + + # Expanded list of valid content types for streaming media + valid_content_types = [ + 'video/', + 'audio/', + 'mpegurl', + 'octet-stream', + 'mp2t', + 'mp4', + 'mpeg', + 'dash+xml', + 'application/mp4', + 'application/mpeg', + 'application/x-mpegurl', + 'application/vnd.apple.mpegurl', + 'application/ogg', + 'm3u', + 'playlist', + 'binary/', + 'rtsp', + 'rtmp', + 'hls', + 'ts' + ] + + content_type_valid = any(type_str in content_type for type_str in valid_content_types) + + # Always consider the stream valid if we got data, regardless of content type + # But add content type info to the message for debugging + if content_type: + content_type_msg = f" (Content-Type: {content_type}" + if content_type_valid: + content_type_msg += ", recognized as valid stream format)" + else: + content_type_msg += ", unrecognized but may still work)" + message += content_type_msg + + # Clean up connection + get_response.close() + + # If we have content, consider it valid even with unrecognized content type + return is_valid, get_response.url, get_response.status_code, message + + except requests.exceptions.Timeout: + return False, url, 0, "Timeout connecting to stream" + except requests.exceptions.TooManyRedirects: + return False, url, 0, "Too many redirects" + except requests.exceptions.RequestException as e: + return False, url, 0, f"Request error: {str(e)}" + except Exception as e: + return False, url, 0, f"Validation error: {str(e)}" + finally: + if 'session' in locals(): + session.close() diff --git a/apps/proxy/ts_proxy/views.py b/apps/proxy/ts_proxy/views.py index ff334f96..a924cd75 100644 --- a/apps/proxy/ts_proxy/views.py +++ b/apps/proxy/ts_proxy/views.py @@ -146,7 +146,63 @@ def stream_ts(request, channel_id): # Generate transcode command if needed stream_profile = channel.get_stream_profile() if stream_profile.is_redirect(): - return HttpResponseRedirect(stream_url) + # Validate the stream URL before redirecting + from .url_utils import validate_stream_url, get_alternate_streams, get_stream_info_for_switch + + # Try initial URL + logger.info(f"[{client_id}] Validating redirect URL: {stream_url}") + is_valid, final_url, status_code, message = validate_stream_url( + stream_url, + user_agent=stream_user_agent, + timeout=(5, 5) + ) + + # If first URL doesn't validate, try alternates + if not is_valid: + logger.warning(f"[{client_id}] Primary stream URL failed validation: {message}") + + # Track tried streams to avoid loops + tried_streams = {stream_id} + + # Get alternate streams + alternates = get_alternate_streams(channel_id, stream_id) + + # Try each alternate until one works + for alt in alternates: + if alt['stream_id'] in tried_streams: + continue + + tried_streams.add(alt['stream_id']) + + # Get stream info + alt_info = get_stream_info_for_switch(channel_id, alt['stream_id']) + if 'error' in alt_info: + logger.warning(f"[{client_id}] Error getting alternate stream info: {alt_info['error']}") + continue + + # Validate the alternate URL + logger.info(f"[{client_id}] Trying alternate stream #{alt['stream_id']}: {alt_info['url']}") + is_valid, final_url, status_code, message = validate_stream_url( + alt_info['url'], + user_agent=alt_info['user_agent'], + timeout=(5, 5) + ) + + if is_valid: + logger.info(f"[{client_id}] Alternate stream #{alt['stream_id']} validated successfully") + break + else: + logger.warning(f"[{client_id}] Alternate stream #{alt['stream_id']} failed validation: {message}") + + # Final decision based on validation results + if is_valid: + logger.info(f"[{client_id}] Redirecting to validated URL: {final_url} ({message})") + return HttpResponseRedirect(final_url) + else: + logger.error(f"[{client_id}] All available redirect URLs failed validation") + return JsonResponse({ + 'error': 'All available streams failed validation' + }, status=502) # 502 Bad Gateway # Initialize channel with the stream's user agent (not the client's) success = ChannelService.initialize_channel( From 8b89f4551076f27bf41fc141b6f07320bfc4fae8 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Tue, 15 Apr 2025 13:29:12 +0000 Subject: [PATCH 14/16] Increment build number to 7 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 3875c9fe..d73a3118 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.2.1' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '6' # Auto-incremented on builds +__build__ = '7' # Auto-incremented on builds From bb7bac48cff9aeb0ff6673c2dc429a0ea9f1ff88 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 15 Apr 2025 09:31:54 -0500 Subject: [PATCH 15/16] Fixed broken links to compose files (again) [skip ci] --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index a99ece49..fb37af1c 100644 --- a/README.md +++ b/README.md @@ -74,9 +74,9 @@ docker run -d \ | Use Case | File | Description | | --------------------------- | ------------------------------------------------------- | ------------------------------------------------------------------------------------------------------ | -| **All-in-One Deployment** | [docker-compose-aio.yml](docker/docker-compose-aio.yml) | ⭐ Recommended! A simple, all-in-one solution — everything runs in a single container for quick setup. | +| **All-in-One Deployment** | [docker-compose-aio.yml](docker/docker-compose.aio.yml) | ⭐ Recommended! A simple, all-in-one solution — everything runs in a single container for quick setup. | | **Modular Deployment** | [docker-compose.yml](docker/docker-compose.yml) | Separate containers for Dispatcharr, Celery, and Postgres — perfect if you want more granular control. | -| **Development Environment** | [docker-compose-dev.yml](docker/docker-compose-dev.yml) | Developer-friendly setup with pre-configured ports and settings for contributing and testing. | +| **Development Environment** | [docker-compose-dev.yml](docker/docker-compose.dev.yml) | Developer-friendly setup with pre-configured ports and settings for contributing and testing. | --- From 8a2245bdedeaa7fabc4b329fe137af3442091095 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Tue, 15 Apr 2025 15:08:50 +0000 Subject: [PATCH 16/16] Increment build number to 8 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index d73a3118..ddd9996f 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.2.1' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '7' # Auto-incremented on builds +__build__ = '8' # Auto-incremented on builds