From 7e5be6094f6fb9bc00ff3eee9e4e4b799c98ce3c Mon Sep 17 00:00:00 2001 From: Marlon Alkan Date: Sun, 8 Jun 2025 16:45:34 +0200 Subject: [PATCH 001/857] docker: init: 02-postgres.sh: allow DB user to create new DB (for tests) --- docker/init/02-postgres.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker/init/02-postgres.sh b/docker/init/02-postgres.sh index 69a81dd4..7bb90671 100644 --- a/docker/init/02-postgres.sh +++ b/docker/init/02-postgres.sh @@ -57,13 +57,14 @@ if [ -z "$(ls -A $POSTGRES_DIR)" ]; then echo "Creating PostgreSQL database..." su - postgres -c "createdb -p ${POSTGRES_PORT} ${POSTGRES_DB}" - # Create user, set ownership, and grant privileges + # Create user, set ownership, and grant privileges, including privileges to create new databases echo "Creating PostgreSQL user..." su - postgres -c "psql -p ${POSTGRES_PORT} -d ${POSTGRES_DB}" < Date: Sun, 8 Jun 2025 16:47:00 +0200 Subject: [PATCH 002/857] apps: output: change body detection logic and add tests --- apps/output/tests.py | 23 +++++++++++++++++++++++ apps/output/views.py | 5 +++-- 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/apps/output/tests.py b/apps/output/tests.py index e1e857ee..f87c8340 100644 --- a/apps/output/tests.py +++ b/apps/output/tests.py @@ -14,3 +14,26 @@ class OutputM3UTest(TestCase): self.assertEqual(response.status_code, 200) content = response.content.decode() self.assertIn("#EXTM3U", content) + + def test_generate_m3u_response_post_empty_body(self): + """ + Test that a POST request with an empty body returns 200 OK. + """ + url = reverse('output:generate_m3u') + + response = self.client.post(url, data=None, content_type='application/x-www-form-urlencoded') + content = response.content.decode() + + self.assertEqual(response.status_code, 200, "POST with empty body should return 200 OK") + self.assertIn("#EXTM3U", content) + + def test_generate_m3u_response_post_with_body(self): + """ + Test that a POST request with a non-empty body returns 403 Forbidden. + """ + url = reverse('output:generate_m3u') + + response = self.client.post(url, data={'evilstring': 'muhahaha'}) + + self.assertEqual(response.status_code, 403, "POST with body should return 403 Forbidden") + self.assertIn("POST requests with body are not allowed, body is:", response.content.decode()) diff --git a/apps/output/views.py b/apps/output/views.py index 2b18d185..ff02560c 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -18,9 +18,10 @@ def generate_m3u(request, profile_name=None): The stream URL now points to the new stream_view that uses StreamProfile. Supports both GET and POST methods for compatibility with IPTVSmarters. """ - # Check if this is a POST request with data (which we don't want to allow) + # Check if this is a POST request and the body is not empty (which we don't want to allow) if request.method == "POST" and request.body: - return HttpResponseForbidden("POST requests with content are not allowed") + if request.body.decode() != '{}': + return HttpResponseForbidden("POST requests with body are not allowed, body is: {}".format(request.body.decode())) if profile_name is not None: channel_profile = ChannelProfile.objects.get(name=profile_name) From d24520d3d89dd7d2e4881740630b14b4fe0e0916 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 10 Jul 2025 13:22:42 -0500 Subject: [PATCH 003/857] Enhance EPG XML generation with additional metadata extraction and improved handling for keywords, languages, ratings, and credits. --- apps/epg/tasks.py | 157 ++++++++++++++++++++++++++++++++++++++-- apps/output/views.py | 166 +++++++++++++++++++++++++++++++++++++------ 2 files changed, 293 insertions(+), 30 deletions(-) diff --git a/apps/epg/tasks.py b/apps/epg/tasks.py index d3062171..4fcf5706 100644 --- a/apps/epg/tasks.py +++ b/apps/epg/tasks.py @@ -1612,6 +1612,11 @@ def extract_custom_properties(prog): if categories: custom_props['categories'] = categories + # Extract keywords (new) + keywords = [kw.text.strip() for kw in prog.findall('keyword') if kw.text and kw.text.strip()] + if keywords: + custom_props['keywords'] = keywords + # Extract episode numbers for ep_num in prog.findall('episode-num'): system = ep_num.get('system', '') @@ -1637,6 +1642,9 @@ def extract_custom_properties(prog): elif system == 'dd_progid' and ep_num.text: # Store the dd_progid format custom_props['dd_progid'] = ep_num.text.strip() + # Add support for other systems like thetvdb.com, themoviedb.org, imdb.com + elif system in ['thetvdb.com', 'themoviedb.org', 'imdb.com'] and ep_num.text: + custom_props[f'{system}_id'] = ep_num.text.strip() # Extract ratings more efficiently rating_elem = prog.find('rating') @@ -1647,37 +1655,172 @@ def extract_custom_properties(prog): if rating_elem.get('system'): custom_props['rating_system'] = rating_elem.get('system') + # Extract star ratings (new) + star_ratings = [] + for star_rating in prog.findall('star-rating'): + value_elem = star_rating.find('value') + if value_elem is not None and value_elem.text: + rating_data = {'value': value_elem.text.strip()} + if star_rating.get('system'): + rating_data['system'] = star_rating.get('system') + star_ratings.append(rating_data) + if star_ratings: + custom_props['star_ratings'] = star_ratings + # Extract credits more efficiently credits_elem = prog.find('credits') if credits_elem is not None: credits = {} - for credit_type in ['director', 'actor', 'writer', 'presenter', 'producer']: - names = [e.text.strip() for e in credits_elem.findall(credit_type) if e.text and e.text.strip()] - if names: - credits[credit_type] = names + for credit_type in ['director', 'actor', 'writer', 'adapter', 'producer', 'composer', 'editor', 'presenter', 'commentator', 'guest']: + if credit_type == 'actor': + # Handle actors with roles and guest status + actors = [] + for actor_elem in credits_elem.findall('actor'): + if actor_elem.text and actor_elem.text.strip(): + actor_data = {'name': actor_elem.text.strip()} + if actor_elem.get('role'): + actor_data['role'] = actor_elem.get('role') + if actor_elem.get('guest') == 'yes': + actor_data['guest'] = True + actors.append(actor_data) + if actors: + credits['actor'] = actors + else: + names = [e.text.strip() for e in credits_elem.findall(credit_type) if e.text and e.text.strip()] + if names: + credits[credit_type] = names if credits: custom_props['credits'] = credits # Extract other common program metadata date_elem = prog.find('date') if date_elem is not None and date_elem.text: - custom_props['year'] = date_elem.text.strip()[:4] # Just the year part + custom_props['date'] = date_elem.text.strip() country_elem = prog.find('country') if country_elem is not None and country_elem.text: custom_props['country'] = country_elem.text.strip() + # Extract language information (new) + language_elem = prog.find('language') + if language_elem is not None and language_elem.text: + custom_props['language'] = language_elem.text.strip() + + orig_language_elem = prog.find('orig-language') + if orig_language_elem is not None and orig_language_elem.text: + custom_props['original_language'] = orig_language_elem.text.strip() + + # Extract length (new) + length_elem = prog.find('length') + if length_elem is not None and length_elem.text: + try: + length_value = int(length_elem.text.strip()) + length_units = length_elem.get('units', 'minutes') + custom_props['length'] = {'value': length_value, 'units': length_units} + except ValueError: + pass + + # Extract video information (new) + video_elem = prog.find('video') + if video_elem is not None: + video_info = {} + for video_attr in ['present', 'colour', 'aspect', 'quality']: + attr_elem = video_elem.find(video_attr) + if attr_elem is not None and attr_elem.text: + video_info[video_attr] = attr_elem.text.strip() + if video_info: + custom_props['video'] = video_info + + # Extract audio information (new) + audio_elem = prog.find('audio') + if audio_elem is not None: + audio_info = {} + for audio_attr in ['present', 'stereo']: + attr_elem = audio_elem.find(audio_attr) + if attr_elem is not None and attr_elem.text: + audio_info[audio_attr] = attr_elem.text.strip() + if audio_info: + custom_props['audio'] = audio_info + + # Extract subtitles information (new) + subtitles = [] + for subtitle_elem in prog.findall('subtitles'): + subtitle_data = {} + if subtitle_elem.get('type'): + subtitle_data['type'] = subtitle_elem.get('type') + lang_elem = subtitle_elem.find('language') + if lang_elem is not None and lang_elem.text: + subtitle_data['language'] = lang_elem.text.strip() + if subtitle_data: + subtitles.append(subtitle_data) + + if subtitles: + custom_props['subtitles'] = subtitles + + # Extract reviews (new) + reviews = [] + for review_elem in prog.findall('review'): + if review_elem.text and review_elem.text.strip(): + review_data = {'content': review_elem.text.strip()} + if review_elem.get('type'): + review_data['type'] = review_elem.get('type') + if review_elem.get('source'): + review_data['source'] = review_elem.get('source') + if review_elem.get('reviewer'): + review_data['reviewer'] = review_elem.get('reviewer') + reviews.append(review_data) + if reviews: + custom_props['reviews'] = reviews + + # Extract images (new) + images = [] + for image_elem in prog.findall('image'): + if image_elem.text and image_elem.text.strip(): + image_data = {'url': image_elem.text.strip()} + for attr in ['type', 'size', 'orient', 'system']: + if image_elem.get(attr): + image_data[attr] = image_elem.get(attr) + images.append(image_data) + if images: + custom_props['images'] = images + icon_elem = prog.find('icon') if icon_elem is not None and icon_elem.get('src'): custom_props['icon'] = icon_elem.get('src') - # Simpler approach for boolean flags - for kw in ['previously-shown', 'premiere', 'new', 'live']: + # Simpler approach for boolean flags - expanded list + for kw in ['previously-shown', 'premiere', 'new', 'live', 'last-chance']: if prog.find(kw) is not None: custom_props[kw.replace('-', '_')] = True + # Extract premiere and last-chance text content if available + premiere_elem = prog.find('premiere') + if premiere_elem is not None: + custom_props['premiere'] = True + if premiere_elem.text and premiere_elem.text.strip(): + custom_props['premiere_text'] = premiere_elem.text.strip() + + last_chance_elem = prog.find('last-chance') + if last_chance_elem is not None: + custom_props['last_chance'] = True + if last_chance_elem.text and last_chance_elem.text.strip(): + custom_props['last_chance_text'] = last_chance_elem.text.strip() + + # Extract previously-shown details + prev_shown_elem = prog.find('previously-shown') + if prev_shown_elem is not None: + custom_props['previously_shown'] = True + prev_shown_data = {} + if prev_shown_elem.get('start'): + prev_shown_data['start'] = prev_shown_elem.get('start') + if prev_shown_elem.get('channel'): + prev_shown_data['channel'] = prev_shown_elem.get('channel') + if prev_shown_data: + custom_props['previously_shown_details'] = prev_shown_data + return custom_props + def clear_element(elem): """Clear an XML element and its parent to free memory.""" try: diff --git a/apps/output/views.py b/apps/output/views.py index 4ef9f4f2..67d72bd2 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -467,19 +467,27 @@ def generate_epg(request, profile_name=None, user=None): for category in custom_data["categories"]: program_xml.append(f" {html.escape(category)}") - # Handle episode numbering - multiple formats supported - # Standard episode number if available - if "episode" in custom_data: - program_xml.append(f' E{custom_data["episode"]}') + # Add keywords if available + if "keywords" in custom_data and custom_data["keywords"]: + for keyword in custom_data["keywords"]: + program_xml.append(f" {html.escape(keyword)}") - # Handle onscreen episode format (like S06E128) + # Handle episode numbering - multiple formats supported + # Prioritize onscreen_episode over standalone episode for onscreen system if "onscreen_episode" in custom_data: program_xml.append(f' {html.escape(custom_data["onscreen_episode"])}') + elif "episode" in custom_data: + program_xml.append(f' E{custom_data["episode"]}') # Handle dd_progid format if 'dd_progid' in custom_data: program_xml.append(f' {html.escape(custom_data["dd_progid"])}') + # Handle external database IDs + for system in ['thetvdb.com', 'themoviedb.org', 'imdb.com']: + if f'{system}_id' in custom_data: + program_xml.append(f' {html.escape(custom_data[f"{system}_id"])}') + # Add season and episode numbers in xmltv_ns format if available if "season" in custom_data and "episode" in custom_data: season = ( @@ -494,6 +502,46 @@ def generate_epg(request, profile_name=None, user=None): ) program_xml.append(f' {season}.{episode}.') + # Add language information + if "language" in custom_data: + program_xml.append(f' {html.escape(custom_data["language"])}') + + if "original_language" in custom_data: + program_xml.append(f' {html.escape(custom_data["original_language"])}') + + # Add length information + if "length" in custom_data and isinstance(custom_data["length"], dict): + length_value = custom_data["length"].get("value", "") + length_units = custom_data["length"].get("units", "minutes") + program_xml.append(f' {html.escape(str(length_value))}') + + # Add video information + if "video" in custom_data and isinstance(custom_data["video"], dict): + program_xml.append(" ") + + # Add audio information + if "audio" in custom_data and isinstance(custom_data["audio"], dict): + program_xml.append(" ") + + # Add subtitles information + if "subtitles" in custom_data and isinstance(custom_data["subtitles"], list): + for subtitle in custom_data["subtitles"]: + if isinstance(subtitle, dict): + subtitle_type = subtitle.get("type", "") + type_attr = f' type="{html.escape(subtitle_type)}"' if subtitle_type else "" + program_xml.append(f" ") + if "language" in subtitle: + program_xml.append(f" {html.escape(subtitle['language'])}") + program_xml.append(" ") + # Add rating if available if "rating" in custom_data: rating_system = custom_data.get("rating_system", "TV Parental Guidelines") @@ -501,20 +549,74 @@ def generate_epg(request, profile_name=None, user=None): program_xml.append(f' {html.escape(custom_data["rating"])}') program_xml.append(f" ") - # Add actors/directors/writers if available - if "credits" in custom_data: - program_xml.append(f" ") - for role, people in custom_data["credits"].items(): - if isinstance(people, list): - for person in people: - program_xml.append(f" <{role}>{html.escape(person)}") - else: - program_xml.append(f" <{role}>{html.escape(people)}") - program_xml.append(f" ") + # Add star ratings + if "star_ratings" in custom_data and isinstance(custom_data["star_ratings"], list): + for star_rating in custom_data["star_ratings"]: + if isinstance(star_rating, dict) and "value" in star_rating: + system_attr = f' system="{html.escape(star_rating["system"])}"' if "system" in star_rating else "" + program_xml.append(f" ") + program_xml.append(f" {html.escape(star_rating['value'])}") + program_xml.append(" ") - # Add program date/year if available - if "year" in custom_data: - program_xml.append(f' {html.escape(custom_data["year"])}') + # Add reviews + if "reviews" in custom_data and isinstance(custom_data["reviews"], list): + for review in custom_data["reviews"]: + if isinstance(review, dict) and "content" in review: + review_type = review.get("type", "text") + attrs = [f'type="{html.escape(review_type)}"'] + if "source" in review: + attrs.append(f'source="{html.escape(review["source"])}"') + if "reviewer" in review: + attrs.append(f'reviewer="{html.escape(review["reviewer"])}"') + attr_str = " ".join(attrs) + program_xml.append(f' {html.escape(review["content"])}') + + # Add images + if "images" in custom_data and isinstance(custom_data["images"], list): + for image in custom_data["images"]: + if isinstance(image, dict) and "url" in image: + attrs = [] + for attr in ['type', 'size', 'orient', 'system']: + if attr in image: + attrs.append(f'{attr}="{html.escape(image[attr])}"') + attr_str = " " + " ".join(attrs) if attrs else "" + program_xml.append(f' {html.escape(image["url"])}') + + # Add enhanced credits handling + if "credits" in custom_data: + program_xml.append(" ") + credits = custom_data["credits"] + + # Handle different credit types + for role in ['director', 'writer', 'adapter', 'producer', 'composer', 'editor', 'presenter', 'commentator', 'guest']: + if role in credits: + people = credits[role] + if isinstance(people, list): + for person in people: + program_xml.append(f" <{role}>{html.escape(person)}") + else: + program_xml.append(f" <{role}>{html.escape(people)}") + + # Handle actors separately to include role and guest attributes + if "actor" in credits: + actors = credits["actor"] + if isinstance(actors, list): + for actor in actors: + if isinstance(actor, dict): + name = actor.get("name", "") + role_attr = f' role="{html.escape(actor["role"])}"' if "role" in actor else "" + guest_attr = ' guest="yes"' if actor.get("guest") else "" + program_xml.append(f" {html.escape(name)}") + else: + program_xml.append(f" {html.escape(actor)}") + else: + program_xml.append(f" {html.escape(actors)}") + + program_xml.append(" ") + + # Add program date if available (full date, not just year) + if "date" in custom_data: + program_xml.append(f' {html.escape(custom_data["date"])}') # Add country if available if "country" in custom_data: @@ -524,18 +626,36 @@ def generate_epg(request, profile_name=None, user=None): if "icon" in custom_data: program_xml.append(f' ') - # Add special flags as proper tags + # Add special flags as proper tags with enhanced handling if custom_data.get("previously_shown", False): - program_xml.append(f" ") + prev_shown_details = custom_data.get("previously_shown_details", {}) + attrs = [] + if "start" in prev_shown_details: + attrs.append(f'start="{html.escape(prev_shown_details["start"])}"') + if "channel" in prev_shown_details: + attrs.append(f'channel="{html.escape(prev_shown_details["channel"])}"') + attr_str = " " + " ".join(attrs) if attrs else "" + program_xml.append(f" ") if custom_data.get("premiere", False): - program_xml.append(f" ") + premiere_text = custom_data.get("premiere_text", "") + if premiere_text: + program_xml.append(f" {html.escape(premiere_text)}") + else: + program_xml.append(" ") + + if custom_data.get("last_chance", False): + last_chance_text = custom_data.get("last_chance_text", "") + if last_chance_text: + program_xml.append(f" {html.escape(last_chance_text)}") + else: + program_xml.append(" ") if custom_data.get("new", False): - program_xml.append(f" ") + program_xml.append(" ") if custom_data.get('live', False): - program_xml.append(f' ') + program_xml.append(' ') except Exception as e: program_xml.append(f" ") From b392788d5f4ee436ee6009237ecdd4f18ddd81fa Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 10 Jul 2025 16:22:16 -0500 Subject: [PATCH 004/857] Improve error handling for API responses by checking for empty content and handling JSON decode errors. --- core/xtream_codes.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/core/xtream_codes.py b/core/xtream_codes.py index 17f3eaad..64b49cb2 100644 --- a/core/xtream_codes.py +++ b/core/xtream_codes.py @@ -56,8 +56,19 @@ class Client: response = requests.get(url, params=params, headers=self.headers, timeout=30) response.raise_for_status() - data = response.json() - logger.debug(f"XC API Response: {url} status code: {response.status_code}") + # Check if response is empty + if not response.content: + error_msg = f"XC API returned empty response from {url}" + logger.error(error_msg) + raise ValueError(error_msg) + + try: + data = response.json() + except requests.exceptions.JSONDecodeError as json_err: + error_msg = f"XC API returned invalid JSON from {url}. Response: {response.text[:1000]}" + logger.error(error_msg) + logger.error(f"JSON decode error: {str(json_err)}") + raise ValueError(error_msg) # Check for XC-specific error responses if isinstance(data, dict) and data.get('user_info') is None and 'error' in data: From 65da85991c35690cf7d36b2bc54c65d696562417 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 10 Jul 2025 18:07:25 -0500 Subject: [PATCH 005/857] Enhance error handling in API requests by checking for common blocking responses and improving JSON decode error logging. --- core/xtream_codes.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/core/xtream_codes.py b/core/xtream_codes.py index 64b49cb2..846e53d4 100644 --- a/core/xtream_codes.py +++ b/core/xtream_codes.py @@ -62,12 +62,25 @@ class Client: logger.error(error_msg) raise ValueError(error_msg) + # Check for common blocking responses before trying to parse JSON + response_text = response.text.strip() + if response_text.lower() in ['blocked', 'forbidden', 'access denied', 'unauthorized']: + error_msg = f"XC API request blocked by server from {url}. Response: {response_text}" + logger.error(error_msg) + logger.error(f"This may indicate IP blocking, User-Agent filtering, or rate limiting") + raise ValueError(error_msg) + try: data = response.json() except requests.exceptions.JSONDecodeError as json_err: error_msg = f"XC API returned invalid JSON from {url}. Response: {response.text[:1000]}" logger.error(error_msg) logger.error(f"JSON decode error: {str(json_err)}") + + # Check if it looks like an HTML error page + if response_text.startswith('<'): + logger.error("Response appears to be HTML - server may be returning an error page") + raise ValueError(error_msg) # Check for XC-specific error responses From fafd93e9588cedb82ebfb0ee8709485075c44691 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 10 Jul 2025 19:14:43 -0500 Subject: [PATCH 006/857] Refactor XC Client usage to improve error handling and resource management with context management. Implement connection pooling for better performance. --- apps/m3u/tasks.py | 202 +++++++++++++++++++++---------------------- core/xtream_codes.py | 42 +++++++-- 2 files changed, 137 insertions(+), 107 deletions(-) diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index d6e0755b..d7e46cde 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -285,57 +285,56 @@ def process_xc_category(account_id, batch, groups, hash_keys): stream_hashes = {} try: - xc_client = XCClient(account.server_url, account.username, account.password, account.get_user_agent()) + with XCClient(account.server_url, account.username, account.password, account.get_user_agent()) as xc_client: + # Log the batch details to help with debugging + logger.debug(f"Processing XC batch: {batch}") - # Log the batch details to help with debugging - logger.debug(f"Processing XC batch: {batch}") - - for group_name, props in batch.items(): - # Check if we have a valid xc_id for this group - if 'xc_id' not in props: - logger.error(f"Missing xc_id for group {group_name} in batch {batch}") - continue - - # Get actual group ID from the mapping - group_id = groups.get(group_name) - if not group_id: - logger.error(f"Group {group_name} not found in enabled groups") - continue - - try: - logger.debug(f"Fetching streams for XC category: {group_name} (ID: {props['xc_id']})") - streams = xc_client.get_live_category_streams(props['xc_id']) - - if not streams: - logger.warning(f"No streams found for XC category {group_name} (ID: {props['xc_id']})") + for group_name, props in batch.items(): + # Check if we have a valid xc_id for this group + if 'xc_id' not in props: + logger.error(f"Missing xc_id for group {group_name} in batch {batch}") continue - logger.debug(f"Found {len(streams)} streams for category {group_name}") + # Get actual group ID from the mapping + group_id = groups.get(group_name) + if not group_id: + logger.error(f"Group {group_name} not found in enabled groups") + continue - for stream in streams: - name = stream["name"] - url = xc_client.get_stream_url(stream["stream_id"]) - tvg_id = stream.get("epg_channel_id", "") - tvg_logo = stream.get("stream_icon", "") - group_title = group_name + try: + logger.debug(f"Fetching streams for XC category: {group_name} (ID: {props['xc_id']})") + streams = xc_client.get_live_category_streams(props['xc_id']) - stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys) - stream_props = { - "name": name, - "url": url, - "logo_url": tvg_logo, - "tvg_id": tvg_id, - "m3u_account": account, - "channel_group_id": int(group_id), - "stream_hash": stream_hash, - "custom_properties": json.dumps(stream), - } + if not streams: + logger.warning(f"No streams found for XC category {group_name} (ID: {props['xc_id']})") + continue - if stream_hash not in stream_hashes: - stream_hashes[stream_hash] = stream_props - except Exception as e: - logger.error(f"Error processing XC category {group_name} (ID: {props['xc_id']}): {str(e)}") - continue + logger.debug(f"Found {len(streams)} streams for category {group_name}") + + for stream in streams: + name = stream["name"] + url = xc_client.get_stream_url(stream["stream_id"]) + tvg_id = stream.get("epg_channel_id", "") + tvg_logo = stream.get("stream_icon", "") + group_title = group_name + + stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys) + stream_props = { + "name": name, + "url": url, + "logo_url": tvg_logo, + "tvg_id": tvg_id, + "m3u_account": account, + "channel_group_id": int(group_id), + "stream_hash": stream_hash, + "custom_properties": json.dumps(stream), + } + + if stream_hash not in stream_hashes: + stream_hashes[stream_hash] = stream_props + except Exception as e: + logger.error(f"Error processing XC category {group_name} (ID: {props['xc_id']}): {str(e)}") + continue # Process all found streams existing_streams = {s.stream_hash: s for s in Stream.objects.filter(stream_hash__in=stream_hashes.keys())} @@ -622,62 +621,63 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): # Create XCClient with explicit error handling try: - xc_client = XCClient(server_url, account.username, account.password, user_agent_string) - logger.info(f"XCClient instance created successfully") + with XCClient(server_url, account.username, account.password, user_agent_string) as xc_client: + logger.info(f"XCClient instance created successfully") + + # Authenticate with detailed error handling + try: + logger.debug(f"Authenticating with XC server {server_url}") + auth_result = xc_client.authenticate() + logger.debug(f"Authentication response: {auth_result}") + except Exception as e: + error_msg = f"Failed to authenticate with XC server: {str(e)}" + logger.error(error_msg) + account.status = M3UAccount.Status.ERROR + account.last_message = error_msg + account.save(update_fields=['status', 'last_message']) + send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) + release_task_lock('refresh_m3u_account_groups', account_id) + return error_msg, None + + # Get categories with detailed error handling + try: + logger.info(f"Getting live categories from XC server") + xc_categories = xc_client.get_live_categories() + logger.info(f"Found {len(xc_categories)} categories: {xc_categories}") + + # Validate response + if not isinstance(xc_categories, list): + error_msg = f"Unexpected response from XC server: {xc_categories}" + logger.error(error_msg) + account.status = M3UAccount.Status.ERROR + account.last_message = error_msg + account.save(update_fields=['status', 'last_message']) + send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) + release_task_lock('refresh_m3u_account_groups', account_id) + return error_msg, None + + if len(xc_categories) == 0: + logger.warning("No categories found in XC server response") + + for category in xc_categories: + cat_name = category.get("category_name", "Unknown Category") + cat_id = category.get("category_id", "0") + logger.info(f"Adding category: {cat_name} (ID: {cat_id})") + groups[cat_name] = { + "xc_id": cat_id, + } + except Exception as e: + error_msg = f"Failed to get categories from XC server: {str(e)}" + logger.error(error_msg) + account.status = M3UAccount.Status.ERROR + account.last_message = error_msg + account.save(update_fields=['status', 'last_message']) + send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) + release_task_lock('refresh_m3u_account_groups', account_id) + return error_msg, None + except Exception as e: - error_msg = f"Failed to create XCClient: {str(e)}" - logger.error(error_msg) - account.status = M3UAccount.Status.ERROR - account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) - release_task_lock('refresh_m3u_account_groups', account_id) - return error_msg, None - - # Authenticate with detailed error handling - try: - logger.debug(f"Authenticating with XC server {server_url}") - auth_result = xc_client.authenticate() - logger.debug(f"Authentication response: {auth_result}") - except Exception as e: - error_msg = f"Failed to authenticate with XC server: {str(e)}" - logger.error(error_msg) - account.status = M3UAccount.Status.ERROR - account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) - release_task_lock('refresh_m3u_account_groups', account_id) - return error_msg, None - - # Get categories with detailed error handling - try: - logger.info(f"Getting live categories from XC server") - xc_categories = xc_client.get_live_categories() - logger.info(f"Found {len(xc_categories)} categories: {xc_categories}") - - # Validate response - if not isinstance(xc_categories, list): - error_msg = f"Unexpected response from XC server: {xc_categories}" - logger.error(error_msg) - account.status = M3UAccount.Status.ERROR - account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) - release_task_lock('refresh_m3u_account_groups', account_id) - return error_msg, None - - if len(xc_categories) == 0: - logger.warning("No categories found in XC server response") - - for category in xc_categories: - cat_name = category.get("category_name", "Unknown Category") - cat_id = category.get("category_id", "0") - logger.info(f"Adding category: {cat_name} (ID: {cat_id})") - groups[cat_name] = { - "xc_id": cat_id, - } - except Exception as e: - error_msg = f"Failed to get categories from XC server: {str(e)}" + error_msg = f"Failed to create XC Client: {str(e)}" logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg @@ -686,7 +686,7 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): release_task_lock('refresh_m3u_account_groups', account_id) return error_msg, None except Exception as e: - error_msg = f"Unexpected error in XC processing: {str(e)}" + error_msg = f"Unexpected error occurred in XC Client: {str(e)}" logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg diff --git a/core/xtream_codes.py b/core/xtream_codes.py index 846e53d4..d068bacb 100644 --- a/core/xtream_codes.py +++ b/core/xtream_codes.py @@ -17,20 +17,29 @@ class Client: # Fix: Properly handle all possible user_agent input types if user_agent: if isinstance(user_agent, str): - # Direct string user agent user_agent_string = user_agent elif hasattr(user_agent, 'user_agent'): - # UserAgent model object user_agent_string = user_agent.user_agent else: - # Fallback for any other type logger.warning(f"Unexpected user_agent type: {type(user_agent)}, using default") user_agent_string = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64)' else: - # No user agent provided user_agent_string = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64)' - self.headers = {'User-Agent': user_agent_string} + # Create persistent session + self.session = requests.Session() + self.session.headers.update({'User-Agent': user_agent_string}) + + # Configure connection pooling + adapter = requests.adapters.HTTPAdapter( + pool_connections=1, + pool_maxsize=2, + max_retries=3, + pool_block=False + ) + self.session.mount('http://', adapter) + self.session.mount('https://', adapter) + self.server_info = None def _normalize_url(self, url): @@ -53,7 +62,7 @@ class Client: url = f"{self.server_url}/{endpoint}" logger.debug(f"XC API Request: {url} with params: {params}") - response = requests.get(url, params=params, headers=self.headers, timeout=30) + response = self.session.get(url, params=params, timeout=30) response.raise_for_status() # Check if response is empty @@ -186,3 +195,24 @@ class Client: def get_stream_url(self, stream_id): """Get the playback URL for a stream""" return f"{self.server_url}/live/{self.username}/{self.password}/{stream_id}.ts" + + def close(self): + """Close the session and cleanup resources""" + if hasattr(self, 'session') and self.session: + try: + self.session.close() + except Exception as e: + logger.debug(f"Error closing XC session: {e}") + + def __enter__(self): + """Enter the context manager""" + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """Exit the context manager and cleanup resources""" + self.close() + return False # Don't suppress exceptions + + def __del__(self): + """Ensure session is closed when object is destroyed""" + self.close() From 1c7fa21b868bc160ad2897fa65afa892b7fba43a Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 11 Jul 2025 14:11:41 -0500 Subject: [PATCH 007/857] Add rehash streams endpoint and UI integration for triggering stream rehashing --- core/api_urls.py | 3 +- core/api_views.py | 37 +++++++++++++ core/tasks.py | 96 ++++++++++++++++++++++++++++----- frontend/src/pages/Settings.jsx | 36 ++++++++++++- 4 files changed, 156 insertions(+), 16 deletions(-) diff --git a/core/api_urls.py b/core/api_urls.py index e30eb698..30714d44 100644 --- a/core/api_urls.py +++ b/core/api_urls.py @@ -2,7 +2,7 @@ from django.urls import path, include from rest_framework.routers import DefaultRouter -from .api_views import UserAgentViewSet, StreamProfileViewSet, CoreSettingsViewSet, environment, version +from .api_views import UserAgentViewSet, StreamProfileViewSet, CoreSettingsViewSet, environment, version, rehash_streams_endpoint router = DefaultRouter() router.register(r'useragents', UserAgentViewSet, basename='useragent') @@ -12,5 +12,6 @@ router.register(r'settings', CoreSettingsViewSet, basename='settings') urlpatterns = [ path('settings/env/', environment, name='token_refresh'), path('version/', version, name='version'), + path('rehash-streams/', rehash_streams_endpoint, name='rehash_streams'), path('', include(router.urls)), ] diff --git a/core/api_views.py b/core/api_views.py index b416cf92..6b9743f6 100644 --- a/core/api_views.py +++ b/core/api_views.py @@ -280,3 +280,40 @@ def version(request): "timestamp": __timestamp__, } ) + + +@swagger_auto_schema( + method="post", + operation_description="Trigger rehashing of all streams", + responses={200: "Rehash task started"}, +) +@api_view(["POST"]) +@permission_classes([Authenticated]) +def rehash_streams_endpoint(request): + """Trigger the rehash streams task""" + try: + # Get the current hash keys from settings + hash_key_setting = CoreSettings.objects.get(key=STREAM_HASH_KEY) + hash_keys = hash_key_setting.value.split(",") + + # Queue the rehash task + task = rehash_streams.delay(hash_keys) + + return Response({ + "success": True, + "message": "Stream rehashing task has been queued", + "task_id": task.id + }, status=status.HTTP_200_OK) + + except CoreSettings.DoesNotExist: + return Response({ + "success": False, + "message": "Hash key settings not found" + }, status=status.HTTP_400_BAD_REQUEST) + + except Exception as e: + logger.error(f"Error triggering rehash streams: {e}") + return Response({ + "success": False, + "message": "Failed to trigger rehash task" + }, status=status.HTTP_500_INTERNAL_SERVER_ERROR) diff --git a/core/tasks.py b/core/tasks.py index 0fdaedf7..157ffadc 100644 --- a/core/tasks.py +++ b/core/tasks.py @@ -312,32 +312,100 @@ def fetch_channel_stats(): @shared_task def rehash_streams(keys): + """ + Rehash all streams with new hash keys and handle duplicates. + """ batch_size = 1000 queryset = Stream.objects.all() + # Track statistics + total_processed = 0 + duplicates_merged = 0 hash_keys = {} + total_records = queryset.count() + logger.info(f"Starting rehash of {total_records} streams with keys: {keys}") + for start in range(0, total_records, batch_size): + batch_processed = 0 + batch_duplicates = 0 + with transaction.atomic(): batch = queryset[start:start + batch_size] + for obj in batch: - stream_hash = Stream.generate_hash_key(obj.name, obj.url, obj.tvg_id, keys) - if stream_hash in hash_keys: - # Handle duplicate keys and remove any without channels - stream_channels = ChannelStream.objects.filter(stream_id=obj.id).count() - if stream_channels == 0: + # Generate new hash + new_hash = Stream.generate_hash_key(obj.name, obj.url, obj.tvg_id, keys) + + # Check if this hash already exists in our tracking dict or in database + if new_hash in hash_keys: + # Found duplicate in current batch - merge the streams + existing_stream_id = hash_keys[new_hash] + existing_stream = Stream.objects.get(id=existing_stream_id) + + # Move any channel relationships from duplicate to existing stream + ChannelStream.objects.filter(stream_id=obj.id).update(stream_id=existing_stream_id) + + # Update the existing stream with the most recent data + if obj.updated_at > existing_stream.updated_at: + existing_stream.name = obj.name + existing_stream.url = obj.url + existing_stream.logo_url = obj.logo_url + existing_stream.tvg_id = obj.tvg_id + existing_stream.m3u_account = obj.m3u_account + existing_stream.channel_group = obj.channel_group + existing_stream.custom_properties = obj.custom_properties + existing_stream.last_seen = obj.last_seen + existing_stream.updated_at = obj.updated_at + existing_stream.save() + + # Delete the duplicate + obj.delete() + batch_duplicates += 1 + else: + # Check if hash already exists in database (from previous batches or existing data) + existing_stream = Stream.objects.filter(stream_hash=new_hash).exclude(id=obj.id).first() + if existing_stream: + # Found duplicate in database - merge the streams + # Move any channel relationships from duplicate to existing stream + ChannelStream.objects.filter(stream_id=obj.id).update(stream_id=existing_stream.id) + + # Update the existing stream with the most recent data + if obj.updated_at > existing_stream.updated_at: + existing_stream.name = obj.name + existing_stream.url = obj.url + existing_stream.logo_url = obj.logo_url + existing_stream.tvg_id = obj.tvg_id + existing_stream.m3u_account = obj.m3u_account + existing_stream.channel_group = obj.channel_group + existing_stream.custom_properties = obj.custom_properties + existing_stream.last_seen = obj.last_seen + existing_stream.updated_at = obj.updated_at + existing_stream.save() + + # Delete the duplicate obj.delete() - continue + batch_duplicates += 1 + hash_keys[new_hash] = existing_stream.id + else: + # Update hash for this stream + obj.stream_hash = new_hash + obj.save(update_fields=['stream_hash']) + hash_keys[new_hash] = obj.id + batch_processed += 1 - existing_stream_channels = ChannelStream.objects.filter(stream_id=hash_keys[stream_hash]).count() - if existing_stream_channels == 0: - Stream.objects.filter(id=hash_keys[stream_hash]).delete() + total_processed += batch_processed + duplicates_merged += batch_duplicates - obj.stream_hash = stream_hash - obj.save(update_fields=['stream_hash']) - hash_keys[stream_hash] = obj.id + logger.info(f"Rehashed batch {start//batch_size + 1}/{(total_records//batch_size) + 1}: " + f"{batch_processed} processed, {batch_duplicates} duplicates merged") - logger.debug(f"Re-hashed {batch_size} streams") + logger.info(f"Rehashing complete: {total_processed} streams processed, " + f"{duplicates_merged} duplicates merged") - logger.debug(f"Re-hashing complete") + return { + 'total_processed': total_processed, + 'duplicates_merged': duplicates_merged, + 'final_count': total_processed - duplicates_merged + } diff --git a/frontend/src/pages/Settings.jsx b/frontend/src/pages/Settings.jsx index a5b07fa2..865ac6c7 100644 --- a/frontend/src/pages/Settings.jsx +++ b/frontend/src/pages/Settings.jsx @@ -47,6 +47,8 @@ const SettingsPage = () => { useState([]); const [proxySettingsSaved, setProxySettingsSaved] = useState(false); + const [rehashingStreams, setRehashingStreams] = useState(false); + const [rehashSuccess, setRehashSuccess] = useState(false); // UI / local storage settings const [tableSize, setTableSize] = useLocalStorage('table-size', 'default'); @@ -245,6 +247,22 @@ const SettingsPage = () => { } }; + const onRehashStreams = async () => { + setRehashingStreams(true); + setRehashSuccess(false); + + try { + await API.post('/core/rehash-streams/'); + setRehashSuccess(true); + setTimeout(() => setRehashSuccess(false), 5000); // Clear success message after 5 seconds + } catch (error) { + console.error('Error rehashing streams:', error); + // You might want to add error state handling here + } finally { + setRehashingStreams(false); + } + }; + return (
{ key={form.key('m3u-hash-key')} /> + {rehashSuccess && ( + + )} + + + )} + + + + + + {/* Existing groups */} + + Existing Groups ({Object.keys(channelGroups).length}) + + {loading ? ( + Loading group information... + ) : Object.keys(channelGroups).length === 0 ? ( + No groups found + ) : ( + + {Object.values(channelGroups) + .sort((a, b) => a.name.localeCompare(b.name)) + .map((group) => ( + + + {editingGroup === group.id ? ( + setEditName(e.target.value)} + size="sm" + onKeyPress={(e) => e.key === 'Enter' && handleSaveEdit()} + /> + ) : ( + <> + {group.name} + + {getGroupBadges(group)} + + + )} + + + + {editingGroup === group.id ? ( + <> + + + + + + + + ) : ( + <> + handleEdit(group)} + disabled={!canEditGroup(group)} + > + + + handleDelete(group)} + disabled={!canDeleteGroup(group)} + > + + + + )} + + + ))} + + )} + + + + + + + + + + ); +}; + +export default GroupManager; diff --git a/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx b/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx index 8813ceda..1568e10d 100644 --- a/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx +++ b/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx @@ -25,6 +25,7 @@ import { SquareMinus, SquarePen, SquarePlus, + Settings, } from 'lucide-react'; import API from '../../../api'; import { notifications } from '@mantine/notifications'; @@ -32,6 +33,7 @@ import useChannelsStore from '../../../store/channels'; import useAuthStore from '../../../store/auth'; import { USER_LEVELS } from '../../../constants'; import AssignChannelNumbersForm from '../../forms/AssignChannelNumbers'; +import GroupManager from '../../forms/GroupManager'; import ConfirmationDialog from '../../ConfirmationDialog'; import useWarningsStore from '../../../store/warnings'; @@ -105,6 +107,7 @@ const ChannelTableHeader = ({ const [channelNumAssignmentStart, setChannelNumAssignmentStart] = useState(1); const [assignNumbersModalOpen, setAssignNumbersModalOpen] = useState(false); + const [groupManagerOpen, setGroupManagerOpen] = useState(false); const [confirmDeleteProfileOpen, setConfirmDeleteProfileOpen] = useState(false); const [profileToDelete, setProfileToDelete] = useState(null); @@ -301,6 +304,15 @@ const ChannelTableHeader = ({ Auto-Match + + } + disabled={authUser.user_level != USER_LEVELS.ADMIN} + > + setGroupManagerOpen(true)}> + Edit Groups + + @@ -312,6 +324,11 @@ const ChannelTableHeader = ({ onClose={closeAssignChannelNumbersModal} /> + setGroupManagerOpen(false)} + /> + setConfirmDeleteProfileOpen(false)} diff --git a/frontend/src/store/channels.jsx b/frontend/src/store/channels.jsx index beb62fe1..03cf2b86 100644 --- a/frontend/src/store/channels.jsx +++ b/frontend/src/store/channels.jsx @@ -204,10 +204,18 @@ const useChannelsStore = create((set, get) => ({ updateChannelGroup: (channelGroup) => set((state) => ({ - ...state.channelGroups, - [channelGroup.id]: channelGroup, + channelGroups: { + ...state.channelGroups, + [channelGroup.id]: channelGroup, + }, })), + removeChannelGroup: (groupId) => + set((state) => { + const { [groupId]: removed, ...remainingGroups } = state.channelGroups; + return { channelGroups: remainingGroups }; + }), + fetchLogos: async () => { set({ isLoading: true, error: null }); try { From a1d9a7cbbe22c246e8c50714e6626f184f59f856 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 12 Jul 2025 16:21:40 -0500 Subject: [PATCH 017/857] Fixed performance issue while creating group. --- .../src/components/forms/GroupManager.jsx | 262 +++++++++++------- 1 file changed, 156 insertions(+), 106 deletions(-) diff --git a/frontend/src/components/forms/GroupManager.jsx b/frontend/src/components/forms/GroupManager.jsx index 7709416f..65f4e0b6 100644 --- a/frontend/src/components/forms/GroupManager.jsx +++ b/frontend/src/components/forms/GroupManager.jsx @@ -1,4 +1,4 @@ -import React, { useState, useEffect } from 'react'; +import React, { useState, useEffect, useCallback, useMemo } from 'react'; import { Modal, Stack, @@ -27,7 +27,114 @@ import { notifications } from '@mantine/notifications'; import useChannelsStore from '../../store/channels'; import API from '../../api'; -const GroupManager = ({ isOpen, onClose }) => { +// Move GroupItem outside to prevent recreation on every render +const GroupItem = React.memo(({ + group, + editingGroup, + editName, + onEditNameChange, + onSaveEdit, + onCancelEdit, + onEdit, + onDelete, + groupUsage +}) => { + const getGroupBadges = (group) => { + const usage = groupUsage[group.id]; + const badges = []; + + if (usage?.hasChannels) { + badges.push( + }> + Channels + + ); + } + + if (usage?.hasM3UAccounts) { + badges.push( + }> + M3U + + ); + } + + return badges; + }; + + const canEditGroup = (group) => { + const usage = groupUsage[group.id]; + return usage?.canEdit !== false; + }; + + const canDeleteGroup = (group) => { + const usage = groupUsage[group.id]; + return usage?.canDelete !== false && !usage?.hasChannels && !usage?.hasM3UAccounts; + }; + + return ( + + + {editingGroup === group.id ? ( + e.key === 'Enter' && onSaveEdit()} + autoFocus + /> + ) : ( + <> + {group.name} + + {getGroupBadges(group)} + + + )} + + + + {editingGroup === group.id ? ( + <> + + + + + + + + ) : ( + <> + onEdit(group)} + disabled={!canEditGroup(group)} + > + + + onDelete(group)} + disabled={!canDeleteGroup(group)} + > + + + + )} + + + ); +}); + +const GroupManager = React.memo(({ isOpen, onClose }) => { + // Use a more specific selector to avoid unnecessary re-renders + const fetchChannelGroups = useChannelsStore((s) => s.fetchChannelGroups); const channelGroups = useChannelsStore((s) => s.channelGroups); const [editingGroup, setEditingGroup] = useState(null); const [editName, setEditName] = useState(''); @@ -36,6 +143,18 @@ const GroupManager = ({ isOpen, onClose }) => { const [groupUsage, setGroupUsage] = useState({}); const [loading, setLoading] = useState(false); + // Memoize the channel groups array to prevent unnecessary re-renders + const channelGroupsArray = useMemo(() => + Object.values(channelGroups), + [channelGroups] + ); + + // Memoize sorted groups to prevent re-sorting on every render + const sortedGroups = useMemo(() => + channelGroupsArray.sort((a, b) => a.name.localeCompare(b.name)), + [channelGroupsArray] + ); + // Fetch group usage information when modal opens useEffect(() => { if (isOpen) { @@ -69,12 +188,12 @@ const GroupManager = ({ isOpen, onClose }) => { } }; - const handleEdit = (group) => { + const handleEdit = useCallback((group) => { setEditingGroup(group.id); setEditName(group.name); - }; + }, []); - const handleSaveEdit = async () => { + const handleSaveEdit = useCallback(async () => { if (!editName.trim()) { notifications.show({ title: 'Error', @@ -105,14 +224,14 @@ const GroupManager = ({ isOpen, onClose }) => { color: 'red', }); } - }; + }, [editName, editingGroup]); - const handleCancelEdit = () => { + const handleCancelEdit = useCallback(() => { setEditingGroup(null); setEditName(''); - }; + }, []); - const handleCreate = async () => { + const handleCreate = useCallback(async () => { if (!newGroupName.trim()) { notifications.show({ title: 'Error', @@ -143,9 +262,9 @@ const GroupManager = ({ isOpen, onClose }) => { color: 'red', }); } - }; + }, [newGroupName]); - const handleDelete = async (group) => { + const handleDelete = useCallback(async (group) => { const usage = groupUsage[group.id]; if (usage && (!usage.canDelete || usage.hasChannels || usage.hasM3UAccounts)) { @@ -174,40 +293,15 @@ const GroupManager = ({ isOpen, onClose }) => { color: 'red', }); } - }; + }, [groupUsage]); - const getGroupBadges = (group) => { - const usage = groupUsage[group.id]; - const badges = []; + const handleNewGroupNameChange = useCallback((e) => { + setNewGroupName(e.target.value); + }, []); - if (usage?.hasChannels) { - badges.push( - }> - Channels - - ); - } - - if (usage?.hasM3UAccounts) { - badges.push( - }> - M3U - - ); - } - - return badges; - }; - - const canEditGroup = (group) => { - const usage = groupUsage[group.id]; - return usage?.canEdit !== false; // Default to true if no usage data - }; - - const canDeleteGroup = (group) => { - const usage = groupUsage[group.id]; - return usage?.canDelete !== false && !usage?.hasChannels && !usage?.hasM3UAccounts; - }; + const handleEditNameChange = useCallback((e) => { + setEditName(e.target.value); + }, []); if (!isOpen) return null; @@ -233,9 +327,10 @@ const GroupManager = ({ isOpen, onClose }) => { setNewGroupName(e.target.value)} + onChange={handleNewGroupNameChange} style={{ flex: 1 }} onKeyPress={(e) => e.key === 'Enter' && handleCreate()} + autoFocus /> @@ -264,73 +359,28 @@ const GroupManager = ({ isOpen, onClose }) => { {/* Existing groups */} - Existing Groups ({Object.keys(channelGroups).length}) + Existing Groups ({channelGroupsArray.length}) {loading ? ( Loading group information... - ) : Object.keys(channelGroups).length === 0 ? ( + ) : sortedGroups.length === 0 ? ( No groups found ) : ( - {Object.values(channelGroups) - .sort((a, b) => a.name.localeCompare(b.name)) - .map((group) => ( - - - {editingGroup === group.id ? ( - setEditName(e.target.value)} - size="sm" - onKeyPress={(e) => e.key === 'Enter' && handleSaveEdit()} - /> - ) : ( - <> - {group.name} - - {getGroupBadges(group)} - - - )} - - - - {editingGroup === group.id ? ( - <> - - - - - - - - ) : ( - <> - handleEdit(group)} - disabled={!canEditGroup(group)} - > - - - handleDelete(group)} - disabled={!canDeleteGroup(group)} - > - - - - )} - - - ))} + {sortedGroups.map((group) => ( + + ))} )} @@ -345,6 +395,6 @@ const GroupManager = ({ isOpen, onClose }) => { ); -}; +}); export default GroupManager; From 9cb05a0ae1610d0e0c893cd78065d5cd31ed9c9c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 12 Jul 2025 16:27:49 -0500 Subject: [PATCH 018/857] Add search functionality to GroupManager for filtering groups --- .../src/components/forms/GroupManager.jsx | 45 ++++++++++++++++--- 1 file changed, 40 insertions(+), 5 deletions(-) diff --git a/frontend/src/components/forms/GroupManager.jsx b/frontend/src/components/forms/GroupManager.jsx index 65f4e0b6..e10b9a1c 100644 --- a/frontend/src/components/forms/GroupManager.jsx +++ b/frontend/src/components/forms/GroupManager.jsx @@ -142,6 +142,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { const [isCreating, setIsCreating] = useState(false); const [groupUsage, setGroupUsage] = useState({}); const [loading, setLoading] = useState(false); + const [searchTerm, setSearchTerm] = useState(''); // Memoize the channel groups array to prevent unnecessary re-renders const channelGroupsArray = useMemo(() => @@ -155,6 +156,14 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { [channelGroupsArray] ); + // Filter groups based on search term + const filteredGroups = useMemo(() => { + if (!searchTerm.trim()) return sortedGroups; + return sortedGroups.filter(group => + group.name.toLowerCase().includes(searchTerm.toLowerCase()) + ); + }, [sortedGroups, searchTerm]); + // Fetch group usage information when modal opens useEffect(() => { if (isOpen) { @@ -293,7 +302,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { color: 'red', }); } - }, [groupUsage]); + }, [groupUsage, fetchGroupUsage]); const handleNewGroupNameChange = useCallback((e) => { setNewGroupName(e.target.value); @@ -303,6 +312,10 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { setEditName(e.target.value); }, []); + const handleSearchChange = useCallback((e) => { + setSearchTerm(e.target.value); + }, []); + if (!isOpen) return null; return ( @@ -359,15 +372,37 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { {/* Existing groups */} - Existing Groups ({channelGroupsArray.length}) + + + Existing Groups ({filteredGroups.length}{searchTerm && ` of ${sortedGroups.length}`}) + + setSearchTerm('')} + > + + + )} + /> + {loading ? ( Loading group information... - ) : sortedGroups.length === 0 ? ( - No groups found + ) : filteredGroups.length === 0 ? ( + + {searchTerm ? 'No groups found matching your search' : 'No groups found'} + ) : ( - {sortedGroups.map((group) => ( + {filteredGroups.map((group) => ( Date: Sat, 12 Jul 2025 16:57:05 -0500 Subject: [PATCH 019/857] Disable buttons that can't be used. --- apps/channels/api_views.py | 34 +++++++++++++++++ apps/channels/serializers.py | 5 ++- frontend/src/api.js | 10 ++++- frontend/src/components/forms/Channel.jsx | 2 + .../src/components/forms/ChannelBatch.jsx | 2 + .../src/components/forms/ChannelGroup.jsx | 27 ++++++++++++- .../src/components/forms/GroupManager.jsx | 38 +++++++------------ .../src/components/tables/ChannelsTable.jsx | 4 +- frontend/src/store/channels.jsx | 35 +++++++++++++---- 9 files changed, 120 insertions(+), 37 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 5d00e84d..b4df2461 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -187,6 +187,40 @@ class ChannelGroupViewSet(viewsets.ModelViewSet): except KeyError: return [Authenticated()] + def get_queryset(self): + """Add annotation for association counts""" + from django.db.models import Count + return ChannelGroup.objects.annotate( + channel_count=Count('channels', distinct=True), + m3u_account_count=Count('m3u_account', distinct=True) + ) + + def update(self, request, *args, **kwargs): + """Override update to check M3U associations""" + instance = self.get_object() + + # Check if group has M3U account associations + if hasattr(instance, 'm3u_account') and instance.m3u_account.exists(): + return Response( + {"error": "Cannot edit group with M3U account associations"}, + status=status.HTTP_400_BAD_REQUEST + ) + + return super().update(request, *args, **kwargs) + + def partial_update(self, request, *args, **kwargs): + """Override partial_update to check M3U associations""" + instance = self.get_object() + + # Check if group has M3U account associations + if hasattr(instance, 'm3u_account') and instance.m3u_account.exists(): + return Response( + {"error": "Cannot edit group with M3U account associations"}, + status=status.HTTP_400_BAD_REQUEST + ) + + return super().partial_update(request, *args, **kwargs) + def destroy(self, request, *args, **kwargs): """Override destroy to check for associations before deletion""" instance = self.get_object() diff --git a/apps/channels/serializers.py b/apps/channels/serializers.py index cdc6ef60..4d1694dc 100644 --- a/apps/channels/serializers.py +++ b/apps/channels/serializers.py @@ -89,9 +89,12 @@ class StreamSerializer(serializers.ModelSerializer): # Channel Group # class ChannelGroupSerializer(serializers.ModelSerializer): + channel_count = serializers.IntegerField(read_only=True) + m3u_account_count = serializers.IntegerField(read_only=True) + class Meta: model = ChannelGroup - fields = ["id", "name"] + fields = ["id", "name", "channel_count", "m3u_account_count"] class ChannelProfileSerializer(serializers.ModelSerializer): diff --git a/frontend/src/api.js b/frontend/src/api.js index ff95f634..9786bb75 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -250,7 +250,15 @@ export default class API { }); if (response.id) { - useChannelsStore.getState().addChannelGroup(response); + // Add association flags for new groups + const processedGroup = { + ...response, + hasChannels: false, + hasM3UAccounts: false, + canEdit: true, + canDelete: true + }; + useChannelsStore.getState().addChannelGroup(processedGroup); } return response; diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index 452db052..64412cb4 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -45,6 +45,8 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { const groupListRef = useRef(null); const channelGroups = useChannelsStore((s) => s.channelGroups); + const canEditChannelGroup = useChannelsStore((s) => s.canEditChannelGroup); + const logos = useChannelsStore((s) => s.logos); const fetchLogos = useChannelsStore((s) => s.fetchLogos); const streams = useStreamsStore((state) => state.streams); diff --git a/frontend/src/components/forms/ChannelBatch.jsx b/frontend/src/components/forms/ChannelBatch.jsx index 2ba3245c..693ebb11 100644 --- a/frontend/src/components/forms/ChannelBatch.jsx +++ b/frontend/src/components/forms/ChannelBatch.jsx @@ -32,6 +32,8 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => { const groupListRef = useRef(null); const channelGroups = useChannelsStore((s) => s.channelGroups); + const canEditChannelGroup = useChannelsStore((s) => s.canEditChannelGroup); + const streamProfiles = useStreamProfilesStore((s) => s.profiles); const [channelGroupModelOpen, setChannelGroupModalOpen] = useState(false); diff --git a/frontend/src/components/forms/ChannelGroup.jsx b/frontend/src/components/forms/ChannelGroup.jsx index 18ed31c1..46641fb1 100644 --- a/frontend/src/components/forms/ChannelGroup.jsx +++ b/frontend/src/components/forms/ChannelGroup.jsx @@ -1,10 +1,17 @@ // Modal.js import React from 'react'; import API from '../../api'; -import { Flex, TextInput, Button, Modal } from '@mantine/core'; +import { Flex, TextInput, Button, Modal, Alert } from '@mantine/core'; +import { notifications } from '@mantine/notifications'; import { isNotEmpty, useForm } from '@mantine/form'; +import useChannelsStore from '../../store/channels'; const ChannelGroup = ({ channelGroup = null, isOpen, onClose }) => { + const canEditChannelGroup = useChannelsStore((s) => s.canEditChannelGroup); + + // Check if editing is allowed + const canEdit = !channelGroup || canEditChannelGroup(channelGroup.id); + const form = useForm({ mode: 'uncontrolled', initialValues: { @@ -17,6 +24,16 @@ const ChannelGroup = ({ channelGroup = null, isOpen, onClose }) => { }); const onSubmit = async () => { + // Prevent submission if editing is not allowed + if (channelGroup && !canEdit) { + notifications.show({ + title: 'Error', + message: 'Cannot edit group with M3U account associations', + color: 'red', + }); + return; + } + const values = form.getValues(); let newGroup; @@ -36,11 +53,17 @@ const ChannelGroup = ({ channelGroup = null, isOpen, onClose }) => { return ( + {channelGroup && !canEdit && ( + + This group cannot be edited because it has M3U account associations. + + )}
@@ -50,7 +73,7 @@ const ChannelGroup = ({ channelGroup = null, isOpen, onClose }) => { type="submit" variant="contained" color="primary" - disabled={form.submitting} + disabled={form.submitting || (channelGroup && !canEdit)} size="small" > Submit diff --git a/frontend/src/components/forms/GroupManager.jsx b/frontend/src/components/forms/GroupManager.jsx index e10b9a1c..f6bf7305 100644 --- a/frontend/src/components/forms/GroupManager.jsx +++ b/frontend/src/components/forms/GroupManager.jsx @@ -37,7 +37,9 @@ const GroupItem = React.memo(({ onCancelEdit, onEdit, onDelete, - groupUsage + groupUsage, + canEditGroup, + canDeleteGroup }) => { const getGroupBadges = (group) => { const usage = groupUsage[group.id]; @@ -62,16 +64,6 @@ const GroupItem = React.memo(({ return badges; }; - const canEditGroup = (group) => { - const usage = groupUsage[group.id]; - return usage?.canEdit !== false; - }; - - const canDeleteGroup = (group) => { - const usage = groupUsage[group.id]; - return usage?.canDelete !== false && !usage?.hasChannels && !usage?.hasM3UAccounts; - }; - return ( { - // Use a more specific selector to avoid unnecessary re-renders - const fetchChannelGroups = useChannelsStore((s) => s.fetchChannelGroups); const channelGroups = useChannelsStore((s) => s.channelGroups); + const canEditChannelGroup = useChannelsStore((s) => s.canEditChannelGroup); + const canDeleteChannelGroup = useChannelsStore((s) => s.canDeleteChannelGroup); const [editingGroup, setEditingGroup] = useState(null); const [editName, setEditName] = useState(''); const [newGroupName, setNewGroupName] = useState(''); @@ -171,21 +163,18 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { } }, [isOpen]); - const fetchGroupUsage = async () => { + const fetchGroupUsage = useCallback(async () => { setLoading(true); try { - // This would ideally be a dedicated API endpoint, but we'll use the existing data - // For now, we'll determine usage based on the group having associated data + // Use the actual channel group data that already has the flags const usage = {}; - // Check which groups have channels or M3U associations - // This is a simplified check - in a real implementation you'd want a dedicated API Object.values(channelGroups).forEach(group => { usage[group.id] = { - hasChannels: false, // Would need API call to check - hasM3UAccounts: false, // Would need API call to check - canEdit: true, // Assume editable unless proven otherwise - canDelete: true // Assume deletable unless proven otherwise + hasChannels: group.hasChannels ?? false, + hasM3UAccounts: group.hasM3UAccounts ?? false, + canEdit: group.canEdit ?? true, + canDelete: group.canDelete ?? true }; }); @@ -195,7 +184,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { } finally { setLoading(false); } - }; + }, [channelGroups]); const handleEdit = useCallback((group) => { setEditingGroup(group.id); @@ -414,6 +403,8 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { onEdit={handleEdit} onDelete={handleDelete} groupUsage={groupUsage} + canEditGroup={canEditChannelGroup} + canDeleteGroup={canDeleteChannelGroup} /> ))} @@ -431,5 +422,4 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { ); }); - export default GroupManager; diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index 7a9d5007..077602ad 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -216,6 +216,9 @@ const ChannelRowActions = React.memo( const ChannelsTable = ({ }) => { const theme = useMantineTheme(); + const channelGroups = useChannelsStore((s) => s.channelGroups); + const canEditChannelGroup = useChannelsStore((s) => s.canEditChannelGroup); + const canDeleteChannelGroup = useChannelsStore((s) => s.canDeleteChannelGroup); /** * STORES @@ -241,7 +244,6 @@ const ChannelsTable = ({ }) => { const channels = useChannelsStore((s) => s.channels); const profiles = useChannelsStore((s) => s.profiles); const selectedProfileId = useChannelsStore((s) => s.selectedProfileId); - const channelGroups = useChannelsStore((s) => s.channelGroups); const logos = useChannelsStore((s) => s.logos); const [tablePrefs, setTablePrefs] = useLocalStorage('channel-table-prefs', { pageSize: 50, diff --git a/frontend/src/store/channels.jsx b/frontend/src/store/channels.jsx index 03cf2b86..40791cf4 100644 --- a/frontend/src/store/channels.jsx +++ b/frontend/src/store/channels.jsx @@ -46,16 +46,24 @@ const useChannelsStore = create((set, get) => ({ }, fetchChannelGroups: async () => { - set({ isLoading: true, error: null }); try { const channelGroups = await api.getChannelGroups(); - set({ - channelGroups: channelGroups.reduce((acc, group) => { - acc[group.id] = group; - return acc; - }, {}), - isLoading: false, - }); + + // Process groups to add association flags + const processedGroups = channelGroups.reduce((acc, group) => { + acc[group.id] = { + ...group, + hasChannels: group.channel_count > 0, + hasM3UAccounts: group.m3u_account_count > 0, + canEdit: group.m3u_account_count === 0, + canDelete: group.channel_count === 0 && group.m3u_account_count === 0 + }; + return acc; + }, {}); + + set((state) => ({ + channelGroups: processedGroups, + })); } catch (error) { console.error('Failed to fetch channel groups:', error); set({ error: 'Failed to load channel groups.', isLoading: false }); @@ -435,6 +443,17 @@ const useChannelsStore = create((set, get) => ({ set({ error: 'Failed to load recordings.', isLoading: false }); } }, + + // Add helper methods for validation + canEditChannelGroup: (groupIdOrGroup) => { + const groupId = typeof groupIdOrGroup === 'object' ? groupIdOrGroup.id : groupIdOrGroup; + return get().channelGroups[groupId]?.canEdit ?? true; + }, + + canDeleteChannelGroup: (groupIdOrGroup) => { + const groupId = typeof groupIdOrGroup === 'object' ? groupIdOrGroup.id : groupIdOrGroup; + return get().channelGroups[groupId]?.canDelete ?? true; + }, })); export default useChannelsStore; From 9b7aa0c8946bccf65b641bf41fd330caa33fab96 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 12 Jul 2025 17:05:48 -0500 Subject: [PATCH 020/857] Add ability to cleanup all unused groups. --- apps/channels/api_views.py | 45 ++++++++++++++++--- frontend/src/api.js | 16 +++++++ .../src/components/forms/GroupManager.jsx | 41 ++++++++++++++++- 3 files changed, 93 insertions(+), 9 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index b4df2461..f0f59f29 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -198,47 +198,78 @@ class ChannelGroupViewSet(viewsets.ModelViewSet): def update(self, request, *args, **kwargs): """Override update to check M3U associations""" instance = self.get_object() - + # Check if group has M3U account associations if hasattr(instance, 'm3u_account') and instance.m3u_account.exists(): return Response( {"error": "Cannot edit group with M3U account associations"}, status=status.HTTP_400_BAD_REQUEST ) - + return super().update(request, *args, **kwargs) def partial_update(self, request, *args, **kwargs): """Override partial_update to check M3U associations""" instance = self.get_object() - + # Check if group has M3U account associations if hasattr(instance, 'm3u_account') and instance.m3u_account.exists(): return Response( {"error": "Cannot edit group with M3U account associations"}, status=status.HTTP_400_BAD_REQUEST ) - + return super().partial_update(request, *args, **kwargs) + @swagger_auto_schema( + method="post", + operation_description="Delete all channel groups that have no associations (no channels or M3U accounts)", + responses={200: "Cleanup completed"}, + ) + @action(detail=False, methods=["post"], url_path="cleanup") + def cleanup_unused_groups(self, request): + """Delete all channel groups with no channels or M3U account associations""" + from django.db.models import Count + + # Find groups with no channels and no M3U account associations + unused_groups = ChannelGroup.objects.annotate( + channel_count=Count('channels', distinct=True), + m3u_account_count=Count('m3u_account', distinct=True) + ).filter( + channel_count=0, + m3u_account_count=0 + ) + + deleted_count = unused_groups.count() + group_names = list(unused_groups.values_list('name', flat=True)) + + # Delete the unused groups + unused_groups.delete() + + return Response({ + "message": f"Successfully deleted {deleted_count} unused channel groups", + "deleted_count": deleted_count, + "deleted_groups": group_names + }) + def destroy(self, request, *args, **kwargs): """Override destroy to check for associations before deletion""" instance = self.get_object() - + # Check if group has associated channels if instance.channels.exists(): return Response( {"error": "Cannot delete group with associated channels"}, status=status.HTTP_400_BAD_REQUEST ) - + # Check if group has M3U account associations if hasattr(instance, 'm3u_account') and instance.m3u_account.exists(): return Response( {"error": "Cannot delete group with M3U account associations"}, status=status.HTTP_400_BAD_REQUEST ) - + return super().destroy(request, *args, **kwargs) diff --git a/frontend/src/api.js b/frontend/src/api.js index 9786bb75..e9ab4deb 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -301,6 +301,22 @@ export default class API { } } + static async cleanupUnusedChannelGroups() { + try { + const response = await request(`${host}/api/channels/groups/cleanup/`, { + method: 'POST', + }); + + // Refresh channel groups to update the UI + useChannelsStore.getState().fetchChannelGroups(); + + return response; + } catch (e) { + errorNotification('Failed to cleanup unused channel groups', e); + throw e; + } + } + static async addChannel(channel) { try { let body = null; diff --git a/frontend/src/components/forms/GroupManager.jsx b/frontend/src/components/forms/GroupManager.jsx index f6bf7305..f89c9228 100644 --- a/frontend/src/components/forms/GroupManager.jsx +++ b/frontend/src/components/forms/GroupManager.jsx @@ -21,7 +21,8 @@ import { X, AlertCircle, Database, - Tv + Tv, + Trash } from 'lucide-react'; import { notifications } from '@mantine/notifications'; import useChannelsStore from '../../store/channels'; @@ -135,6 +136,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { const [groupUsage, setGroupUsage] = useState({}); const [loading, setLoading] = useState(false); const [searchTerm, setSearchTerm] = useState(''); + const [isCleaningUp, setIsCleaningUp] = useState(false); // Memoize the channel groups array to prevent unnecessary re-renders const channelGroupsArray = useMemo(() => @@ -305,6 +307,29 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { setSearchTerm(e.target.value); }, []); + const handleCleanup = useCallback(async () => { + setIsCleaningUp(true); + try { + const result = await API.cleanupUnusedChannelGroups(); + + notifications.show({ + title: 'Cleanup Complete', + message: `Successfully deleted ${result.deleted_count} unused groups`, + color: 'green', + }); + + fetchGroupUsage(); // Refresh usage data + } catch (error) { + notifications.show({ + title: 'Cleanup Failed', + message: 'Failed to cleanup unused groups', + color: 'red', + }); + } finally { + setIsCleaningUp(false); + } + }, [fetchGroupUsage]); + if (!isOpen) return null; return ( @@ -322,7 +347,19 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { {/* Create new group section */} - Create New Group + + Create New Group + + {isCreating ? ( <> From 171d64841a566e79f0fb7dc84d3185016c2d5b99 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 12 Jul 2025 17:28:04 -0500 Subject: [PATCH 021/857] Changed some colors to match our theme better. --- .../src/components/forms/GroupManager.jsx | 91 ++++++++++--------- 1 file changed, 47 insertions(+), 44 deletions(-) diff --git a/frontend/src/components/forms/GroupManager.jsx b/frontend/src/components/forms/GroupManager.jsx index f89c9228..edc04d20 100644 --- a/frontend/src/components/forms/GroupManager.jsx +++ b/frontend/src/components/forms/GroupManager.jsx @@ -12,11 +12,12 @@ import { Alert, Divider, ScrollArea, + useMantineTheme, } from '@mantine/core'; import { SquarePlus, SquarePen, - Trash2, + SquareMinus, Check, X, AlertCircle, @@ -42,6 +43,8 @@ const GroupItem = React.memo(({ canEditGroup, canDeleteGroup }) => { + const theme = useMantineTheme(); + const getGroupBadges = (group) => { const usage = groupUsage[group.id]; const badges = []; @@ -69,7 +72,7 @@ const GroupItem = React.memo(({ {editingGroup === group.id ? ( @@ -103,20 +106,22 @@ const GroupItem = React.memo(({ ) : ( <> onEdit(group)} disabled={!canEditGroup(group)} > - + onDelete(group)} disabled={!canDeleteGroup(group)} > - + )} @@ -346,9 +351,39 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { {/* Create new group section */} - - - Create New Group + + {isCreating ? ( + + e.key === 'Enter' && handleCreate()} + autoFocus + /> + + + + { + setIsCreating(false); + setNewGroupName(''); + }}> + + + + ) : ( + + )} + + {!isCreating && ( - - - {isCreating ? ( - <> - e.key === 'Enter' && handleCreate()} - autoFocus - /> - - - - { - setIsCreating(false); - setNewGroupName(''); - }}> - - - - ) : ( - - )} - - + )} + @@ -400,7 +403,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { - Existing Groups ({filteredGroups.length}{searchTerm && ` of ${sortedGroups.length}`}) + Groups ({filteredGroups.length}{searchTerm && ` of ${sortedGroups.length}`}) Date: Sat, 12 Jul 2025 17:37:24 -0500 Subject: [PATCH 022/857] Add filtering based on group membership. --- .../src/components/forms/GroupManager.jsx | 139 ++++++++++++++++-- 1 file changed, 125 insertions(+), 14 deletions(-) diff --git a/frontend/src/components/forms/GroupManager.jsx b/frontend/src/components/forms/GroupManager.jsx index edc04d20..3b63b738 100644 --- a/frontend/src/components/forms/GroupManager.jsx +++ b/frontend/src/components/forms/GroupManager.jsx @@ -13,6 +13,7 @@ import { Divider, ScrollArea, useMantineTheme, + Chip, } from '@mantine/core'; import { SquarePlus, @@ -23,7 +24,8 @@ import { AlertCircle, Database, Tv, - Trash + Trash, + Filter } from 'lucide-react'; import { notifications } from '@mantine/notifications'; import useChannelsStore from '../../store/channels'; @@ -142,6 +144,9 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { const [loading, setLoading] = useState(false); const [searchTerm, setSearchTerm] = useState(''); const [isCleaningUp, setIsCleaningUp] = useState(false); + const [showChannelGroups, setShowChannelGroups] = useState(true); + const [showM3UGroups, setShowM3UGroups] = useState(true); + const [showUnusedGroups, setShowUnusedGroups] = useState(true); // Memoize the channel groups array to prevent unnecessary re-renders const channelGroupsArray = useMemo(() => @@ -155,13 +160,75 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { [channelGroupsArray] ); - // Filter groups based on search term + // Filter groups based on search term and chip filters const filteredGroups = useMemo(() => { - if (!searchTerm.trim()) return sortedGroups; - return sortedGroups.filter(group => - group.name.toLowerCase().includes(searchTerm.toLowerCase()) - ); - }, [sortedGroups, searchTerm]); + let filtered = sortedGroups; + + // Apply search filter + if (searchTerm.trim()) { + filtered = filtered.filter(group => + group.name.toLowerCase().includes(searchTerm.toLowerCase()) + ); + } + + // Apply chip filters + filtered = filtered.filter(group => { + const usage = groupUsage[group.id]; + if (!usage) return false; + + const hasChannels = usage.hasChannels; + const hasM3U = usage.hasM3UAccounts; + const isUnused = !hasChannels && !hasM3U; + + // If group is unused, only show if unused groups are enabled + if (isUnused) { + return showUnusedGroups; + } + + // For groups with channels and/or M3U, show if either filter is enabled + let shouldShow = false; + if (hasChannels && showChannelGroups) shouldShow = true; + if (hasM3U && showM3UGroups) shouldShow = true; + + return shouldShow; + }); + + return filtered; + }, [sortedGroups, searchTerm, showChannelGroups, showM3UGroups, showUnusedGroups, groupUsage]); + + // Calculate filter counts + const filterCounts = useMemo(() => { + const counts = { + channels: 0, + m3u: 0, + unused: 0 + }; + + sortedGroups.forEach(group => { + const usage = groupUsage[group.id]; + if (usage) { + const hasChannels = usage.hasChannels; + const hasM3U = usage.hasM3UAccounts; + + // Count groups with channels (including those with both) + if (hasChannels) { + counts.channels++; + } + + // Count groups with M3U (including those with both) + if (hasM3U) { + counts.m3u++; + } + + // Count truly unused groups + if (!hasChannels && !hasM3U) { + counts.unused++; + } + } + }); + + return counts; + }, [sortedGroups, groupUsage]); // Fetch group usage information when modal opens useEffect(() => { @@ -342,7 +409,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { opened={isOpen} onClose={onClose} title="Group Manager" - size="md" + size="lg" scrollAreaComponent={ScrollArea.Autosize} > @@ -399,12 +466,13 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { - {/* Existing groups */} - + {/* Filter Controls */} + - - Groups ({filteredGroups.length}{searchTerm && ` of ${sortedGroups.length}`}) - + + + Filter Groups + { /> + + Show: + + + + Channel Groups ({filterCounts.channels}) + + + + + + M3U Groups ({filterCounts.m3u}) + + + + Unused Groups ({filterCounts.unused}) + + + + + + + {/* Existing groups */} + + + Groups ({filteredGroups.length}{(searchTerm || !showChannelGroups || !showM3UGroups || !showUnusedGroups) && ` of ${sortedGroups.length}`}) + + {loading ? ( Loading group information... ) : filteredGroups.length === 0 ? ( - {searchTerm ? 'No groups found matching your search' : 'No groups found'} + {searchTerm || !showChannelGroups || !showM3UGroups || !showUnusedGroups ? 'No groups found matching your filters' : 'No groups found'} ) : ( From 2da8273de64fd19324ef4ea8769ab32141f75c3c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 12 Jul 2025 17:41:35 -0500 Subject: [PATCH 023/857] Add confirmation for deleting and cleaning up groups. --- .../src/components/forms/GroupManager.jsx | 452 +++++++++++------- 1 file changed, 268 insertions(+), 184 deletions(-) diff --git a/frontend/src/components/forms/GroupManager.jsx b/frontend/src/components/forms/GroupManager.jsx index 3b63b738..abb44727 100644 --- a/frontend/src/components/forms/GroupManager.jsx +++ b/frontend/src/components/forms/GroupManager.jsx @@ -29,6 +29,8 @@ import { } from 'lucide-react'; import { notifications } from '@mantine/notifications'; import useChannelsStore from '../../store/channels'; +import useWarningsStore from '../../store/warnings'; +import ConfirmationDialog from '../ConfirmationDialog'; import API from '../../api'; // Move GroupItem outside to prevent recreation on every render @@ -136,6 +138,9 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { const channelGroups = useChannelsStore((s) => s.channelGroups); const canEditChannelGroup = useChannelsStore((s) => s.canEditChannelGroup); const canDeleteChannelGroup = useChannelsStore((s) => s.canDeleteChannelGroup); + const isWarningSuppressed = useWarningsStore((s) => s.isWarningSuppressed); + const suppressWarning = useWarningsStore((s) => s.suppressWarning); + const [editingGroup, setEditingGroup] = useState(null); const [editName, setEditName] = useState(''); const [newGroupName, setNewGroupName] = useState(''); @@ -148,6 +153,11 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { const [showM3UGroups, setShowM3UGroups] = useState(true); const [showUnusedGroups, setShowUnusedGroups] = useState(true); + // Confirmation dialog states + const [confirmDeleteOpen, setConfirmDeleteOpen] = useState(false); + const [groupToDelete, setGroupToDelete] = useState(null); + const [confirmCleanupOpen, setConfirmCleanupOpen] = useState(false); + // Memoize the channel groups array to prevent unnecessary re-renders const channelGroupsArray = useMemo(() => Object.values(channelGroups), @@ -348,6 +358,18 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { return; } + // Store group for confirmation dialog + setGroupToDelete(group); + + // Skip warning if it's been suppressed + if (isWarningSuppressed('delete-group')) { + return executeDeleteGroup(group); + } + + setConfirmDeleteOpen(true); + }, [groupUsage, isWarningSuppressed]); + + const executeDeleteGroup = useCallback(async (group) => { try { await API.deleteChannelGroup(group.id); @@ -358,14 +380,50 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { }); fetchGroupUsage(); // Refresh usage data + setConfirmDeleteOpen(false); } catch (error) { notifications.show({ title: 'Error', message: 'Failed to delete group', color: 'red', }); + setConfirmDeleteOpen(false); } - }, [groupUsage, fetchGroupUsage]); + }, [fetchGroupUsage]); + + const handleCleanup = useCallback(async () => { + // Skip warning if it's been suppressed + if (isWarningSuppressed('cleanup-groups')) { + return executeCleanup(); + } + + setConfirmCleanupOpen(true); + }, [isWarningSuppressed]); + + const executeCleanup = useCallback(async () => { + setIsCleaningUp(true); + try { + const result = await API.cleanupUnusedChannelGroups(); + + notifications.show({ + title: 'Cleanup Complete', + message: `Successfully deleted ${result.deleted_count} unused groups`, + color: 'green', + }); + + fetchGroupUsage(); // Refresh usage data + setConfirmCleanupOpen(false); + } catch (error) { + notifications.show({ + title: 'Cleanup Failed', + message: 'Failed to cleanup unused groups', + color: 'red', + }); + setConfirmCleanupOpen(false); + } finally { + setIsCleaningUp(false); + } + }, [fetchGroupUsage]); const handleNewGroupNameChange = useCallback((e) => { setNewGroupName(e.target.value); @@ -379,198 +437,224 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { setSearchTerm(e.target.value); }, []); - const handleCleanup = useCallback(async () => { - setIsCleaningUp(true); - try { - const result = await API.cleanupUnusedChannelGroups(); - - notifications.show({ - title: 'Cleanup Complete', - message: `Successfully deleted ${result.deleted_count} unused groups`, - color: 'green', - }); - - fetchGroupUsage(); // Refresh usage data - } catch (error) { - notifications.show({ - title: 'Cleanup Failed', - message: 'Failed to cleanup unused groups', - color: 'red', - }); - } finally { - setIsCleaningUp(false); - } - }, [fetchGroupUsage]); - if (!isOpen) return null; return ( - - - } color="blue" variant="light"> - Manage channel groups. Groups associated with M3U accounts or containing channels cannot be deleted. - - - {/* Create new group section */} - - {isCreating ? ( - - e.key === 'Enter' && handleCreate()} - autoFocus - /> - - - - { - setIsCreating(false); - setNewGroupName(''); - }}> - - - - ) : ( - - )} - - {!isCreating && ( - - )} - - - - - {/* Filter Controls */} - - - - - Filter Groups - - setSearchTerm('')} - > - - - )} - /> - - - - Show: - - - - Channel Groups ({filterCounts.channels}) - - - - - - M3U Groups ({filterCounts.m3u}) - - - - Unused Groups ({filterCounts.unused}) - - - - - - - {/* Existing groups */} + <> + - - Groups ({filteredGroups.length}{(searchTerm || !showChannelGroups || !showM3UGroups || !showUnusedGroups) && ` of ${sortedGroups.length}`}) - + } color="blue" variant="light"> + Manage channel groups. Groups associated with M3U accounts or containing channels cannot be deleted. + - {loading ? ( - Loading group information... - ) : filteredGroups.length === 0 ? ( - - {searchTerm || !showChannelGroups || !showM3UGroups || !showUnusedGroups ? 'No groups found matching your filters' : 'No groups found'} - - ) : ( - - {filteredGroups.map((group) => ( - + {isCreating ? ( + + e.key === 'Enter' && handleCreate()} + autoFocus /> - ))} - - )} + + + + { + setIsCreating(false); + setNewGroupName(''); + }}> + + + + ) : ( + + )} + + {!isCreating && ( + + )} + + + + + {/* Filter Controls */} + + + + + Filter Groups + + setSearchTerm('')} + > + + + )} + /> + + + + Show: + + + + Channel Groups ({filterCounts.channels}) + + + + + + M3U Groups ({filterCounts.m3u}) + + + + Unused Groups ({filterCounts.unused}) + + + + + + + {/* Existing groups */} + + + Groups ({filteredGroups.length}{(searchTerm || !showChannelGroups || !showM3UGroups || !showUnusedGroups) && ` of ${sortedGroups.length}`}) + + + {loading ? ( + Loading group information... + ) : filteredGroups.length === 0 ? ( + + {searchTerm || !showChannelGroups || !showM3UGroups || !showUnusedGroups ? 'No groups found matching your filters' : 'No groups found'} + + ) : ( + + {filteredGroups.map((group) => ( + + ))} + + )} + + + + + + + + - + setConfirmDeleteOpen(false)} + onConfirm={() => executeDeleteGroup(groupToDelete)} + title="Confirm Group Deletion" + message={ + groupToDelete ? ( +
+ {`Are you sure you want to delete the following group? - - - - - +Name: ${groupToDelete.name} + +This action cannot be undone.`} +
+ ) : ( + 'Are you sure you want to delete this group? This action cannot be undone.' + ) + } + confirmLabel="Delete" + cancelLabel="Cancel" + actionKey="delete-group" + onSuppressChange={suppressWarning} + size="md" + /> + + setConfirmCleanupOpen(false)} + onConfirm={executeCleanup} + title="Confirm Group Cleanup" + message={ +
+ {`Are you sure you want to cleanup all unused groups? + +This will permanently delete all groups that are not associated with any channels or M3U accounts. + +This action cannot be undone.`} +
+ } + confirmLabel="Cleanup" + cancelLabel="Cancel" + actionKey="cleanup-groups" + onSuppressChange={suppressWarning} + size="md" + /> + ); }); + export default GroupManager; From 35d95c47c724dcf1563e7bb69b37783f11a4fbdd Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 12 Jul 2025 17:48:56 -0500 Subject: [PATCH 024/857] Fixed z index issue when stream table was refreshing. --- frontend/src/components/ConfirmationDialog.jsx | 12 ++++++++++-- frontend/src/components/forms/GroupManager.jsx | 3 +++ 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/frontend/src/components/ConfirmationDialog.jsx b/frontend/src/components/ConfirmationDialog.jsx index 822b46f1..8f96708d 100644 --- a/frontend/src/components/ConfirmationDialog.jsx +++ b/frontend/src/components/ConfirmationDialog.jsx @@ -27,7 +27,8 @@ const ConfirmationDialog = ({ cancelLabel = 'Cancel', actionKey, onSuppressChange, - size = 'md', // Add default size parameter - md is a medium width + size = 'md', + zIndex = 1000, }) => { const suppressWarning = useWarningsStore((s) => s.suppressWarning); const isWarningSuppressed = useWarningsStore((s) => s.isWarningSuppressed); @@ -50,7 +51,14 @@ const ConfirmationDialog = ({ }; return ( - + {message} {actionKey && ( diff --git a/frontend/src/components/forms/GroupManager.jsx b/frontend/src/components/forms/GroupManager.jsx index abb44727..48ca85b1 100644 --- a/frontend/src/components/forms/GroupManager.jsx +++ b/frontend/src/components/forms/GroupManager.jsx @@ -447,6 +447,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { title="Group Manager" size="lg" scrollAreaComponent={ScrollArea.Autosize} + zIndex={2000} > } color="blue" variant="light"> @@ -631,6 +632,7 @@ This action cannot be undone.`} actionKey="delete-group" onSuppressChange={suppressWarning} size="md" + zIndex={2100} /> ); From 8b361ee6466c212df64951e645b44fa0cab25cbf Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 12 Jul 2025 18:12:25 -0500 Subject: [PATCH 025/857] Fix eslint issues. --- .../src/components/forms/GroupManager.jsx | 89 ++++++++++--------- 1 file changed, 45 insertions(+), 44 deletions(-) diff --git a/frontend/src/components/forms/GroupManager.jsx b/frontend/src/components/forms/GroupManager.jsx index 48ca85b1..253a2b9c 100644 --- a/frontend/src/components/forms/GroupManager.jsx +++ b/frontend/src/components/forms/GroupManager.jsx @@ -240,13 +240,6 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { return counts; }, [sortedGroups, groupUsage]); - // Fetch group usage information when modal opens - useEffect(() => { - if (isOpen) { - fetchGroupUsage(); - } - }, [isOpen]); - const fetchGroupUsage = useCallback(async () => { setLoading(true); try { @@ -270,6 +263,13 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { } }, [channelGroups]); + // Fetch group usage information when modal opens + useEffect(() => { + if (isOpen) { + fetchGroupUsage(); + } + }, [isOpen, fetchGroupUsage]); + const handleEdit = useCallback((group) => { setEditingGroup(group.id); setEditName(group.name); @@ -299,6 +299,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { setEditingGroup(null); setEditName(''); + await fetchGroupUsage(); // Refresh usage data } catch (error) { notifications.show({ title: 'Error', @@ -306,7 +307,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { color: 'red', }); } - }, [editName, editingGroup]); + }, [editName, editingGroup, fetchGroupUsage]); const handleCancelEdit = useCallback(() => { setEditingGroup(null); @@ -336,7 +337,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { setNewGroupName(''); setIsCreating(false); - fetchGroupUsage(); // Refresh usage data + await fetchGroupUsage(); // Refresh usage data } catch (error) { notifications.show({ title: 'Error', @@ -344,7 +345,29 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { color: 'red', }); } - }, [newGroupName]); + }, [newGroupName, fetchGroupUsage]); + + const executeDeleteGroup = useCallback(async (group) => { + try { + await API.deleteChannelGroup(group.id); + + notifications.show({ + title: 'Success', + message: 'Group deleted successfully', + color: 'green', + }); + + await fetchGroupUsage(); // Refresh usage data + setConfirmDeleteOpen(false); + } catch (error) { + notifications.show({ + title: 'Error', + message: 'Failed to delete group', + color: 'red', + }); + setConfirmDeleteOpen(false); + } + }, [fetchGroupUsage]); const handleDelete = useCallback(async (group) => { const usage = groupUsage[group.id]; @@ -367,38 +390,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { } setConfirmDeleteOpen(true); - }, [groupUsage, isWarningSuppressed]); - - const executeDeleteGroup = useCallback(async (group) => { - try { - await API.deleteChannelGroup(group.id); - - notifications.show({ - title: 'Success', - message: 'Group deleted successfully', - color: 'green', - }); - - fetchGroupUsage(); // Refresh usage data - setConfirmDeleteOpen(false); - } catch (error) { - notifications.show({ - title: 'Error', - message: 'Failed to delete group', - color: 'red', - }); - setConfirmDeleteOpen(false); - } - }, [fetchGroupUsage]); - - const handleCleanup = useCallback(async () => { - // Skip warning if it's been suppressed - if (isWarningSuppressed('cleanup-groups')) { - return executeCleanup(); - } - - setConfirmCleanupOpen(true); - }, [isWarningSuppressed]); + }, [groupUsage, isWarningSuppressed, executeDeleteGroup]); const executeCleanup = useCallback(async () => { setIsCleaningUp(true); @@ -411,7 +403,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { color: 'green', }); - fetchGroupUsage(); // Refresh usage data + await fetchGroupUsage(); // Refresh usage data setConfirmCleanupOpen(false); } catch (error) { notifications.show({ @@ -425,6 +417,15 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { } }, [fetchGroupUsage]); + const handleCleanup = useCallback(async () => { + // Skip warning if it's been suppressed + if (isWarningSuppressed('cleanup-groups')) { + return executeCleanup(); + } + + setConfirmCleanupOpen(true); + }, [isWarningSuppressed, executeCleanup]); + const handleNewGroupNameChange = useCallback((e) => { setNewGroupName(e.target.value); }, []); @@ -612,7 +613,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { setConfirmDeleteOpen(false)} - onConfirm={() => executeDeleteGroup(groupToDelete)} + onConfirm={() => groupToDelete && executeDeleteGroup(groupToDelete)} title="Confirm Group Deletion" message={ groupToDelete ? ( From c4e5710b484ce8d31673d194e3636e8ed521cdca Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 12 Jul 2025 19:05:06 -0500 Subject: [PATCH 026/857] When adding a group. Fetch groups after. --- frontend/src/api.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/frontend/src/api.js b/frontend/src/api.js index e9ab4deb..e0a62160 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -259,6 +259,8 @@ export default class API { canDelete: true }; useChannelsStore.getState().addChannelGroup(processedGroup); + // Refresh channel groups to update the UI + useChannelsStore.getState().fetchChannelGroups(); } return response; From 69f8f426a627af88b7a8d85389501b40a3bcd4a1 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 12 Jul 2025 19:10:59 -0500 Subject: [PATCH 027/857] Refactor menu items in ChannelTableHeader to fix html error. --- .../ChannelsTable/ChannelTableHeader.jsx | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx b/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx index 1568e10d..72372cc7 100644 --- a/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx +++ b/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx @@ -287,31 +287,25 @@ const ChannelTableHeader = ({ selectedTableIds.length == 0 || authUser.user_level != USER_LEVELS.ADMIN } + onClick={() => setAssignNumbersModalOpen(true)} > - setAssignNumbersModalOpen(true)} - > - Assign #s - + Assign #s } disabled={authUser.user_level != USER_LEVELS.ADMIN} + onClick={matchEpg} > - - Auto-Match - + Auto-Match } disabled={authUser.user_level != USER_LEVELS.ADMIN} + onClick={() => setGroupManagerOpen(true)} > - setGroupManagerOpen(true)}> - Edit Groups - + Edit Groups From ea81cfb1afd426bb8b12df6bb5205673634e31a1 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 13 Jul 2025 15:59:25 -0500 Subject: [PATCH 028/857] Add auto channel sync settings to ChannelGroupM3UAccount and update related components - Introduced `auto_channel_sync` and `auto_sync_channel_start` fields in the ChannelGroupM3UAccount model. - Added API endpoint to update M3U group settings. - Updated M3UGroupFilter component to manage auto sync settings. - Enhanced M3URefreshNotification and M3U components for better user guidance. - Created a Celery task for automatic channel synchronization after M3U refresh. --- ...upm3uaccount_auto_channel_sync_and_more.py | 23 +++ apps/channels/models.py | 9 ++ apps/channels/serializers.py | 4 +- apps/m3u/api_views.py | 34 ++++- apps/m3u/tasks.py | 144 ++++++++++++++++++ dispatcharr/celery.py | 1 + frontend/src/api.js | 13 ++ .../src/components/M3URefreshNotification.jsx | 4 +- frontend/src/components/forms/M3U.jsx | 3 +- .../src/components/forms/M3UGroupFilter.jsx | 140 ++++++++++++----- 10 files changed, 334 insertions(+), 41 deletions(-) create mode 100644 apps/channels/migrations/0022_channelgroupm3uaccount_auto_channel_sync_and_more.py diff --git a/apps/channels/migrations/0022_channelgroupm3uaccount_auto_channel_sync_and_more.py b/apps/channels/migrations/0022_channelgroupm3uaccount_auto_channel_sync_and_more.py new file mode 100644 index 00000000..a0c94c7d --- /dev/null +++ b/apps/channels/migrations/0022_channelgroupm3uaccount_auto_channel_sync_and_more.py @@ -0,0 +1,23 @@ +# Generated by Django 5.1.6 on 2025-07-13 20:40 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dispatcharr_channels', '0021_channel_user_level'), + ] + + operations = [ + migrations.AddField( + model_name='channelgroupm3uaccount', + name='auto_channel_sync', + field=models.BooleanField(default=False, help_text='Automatically create/delete channels to match streams in this group'), + ), + migrations.AddField( + model_name='channelgroupm3uaccount', + name='auto_sync_channel_start', + field=models.FloatField(blank=True, help_text='Starting channel number for auto-created channels in this group', null=True), + ), + ] diff --git a/apps/channels/models.py b/apps/channels/models.py index 1bcbcc41..b6333aab 100644 --- a/apps/channels/models.py +++ b/apps/channels/models.py @@ -541,6 +541,15 @@ class ChannelGroupM3UAccount(models.Model): ) custom_properties = models.TextField(null=True, blank=True) enabled = models.BooleanField(default=True) + auto_channel_sync = models.BooleanField( + default=False, + help_text='Automatically create/delete channels to match streams in this group' + ) + auto_sync_channel_start = models.FloatField( + null=True, + blank=True, + help_text='Starting channel number for auto-created channels in this group' + ) class Meta: unique_together = ("channel_group", "m3u_account") diff --git a/apps/channels/serializers.py b/apps/channels/serializers.py index 4d1694dc..0eb5acc3 100644 --- a/apps/channels/serializers.py +++ b/apps/channels/serializers.py @@ -289,10 +289,12 @@ class ChannelSerializer(serializers.ModelSerializer): class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer): enabled = serializers.BooleanField() + auto_channel_sync = serializers.BooleanField(default=False) + auto_sync_channel_start = serializers.FloatField(allow_null=True, required=False) class Meta: model = ChannelGroupM3UAccount - fields = ["id", "channel_group", "enabled"] + fields = ["id", "channel_group", "enabled", "auto_channel_sync", "auto_sync_channel_start"] # Optionally, if you only need the id of the ChannelGroup, you can customize it like this: # channel_group = serializers.PrimaryKeyRelatedField(queryset=ChannelGroup.objects.all()) diff --git a/apps/m3u/api_views.py b/apps/m3u/api_views.py index 0ef42272..39b9e22e 100644 --- a/apps/m3u/api_views.py +++ b/apps/m3u/api_views.py @@ -16,13 +16,11 @@ from rest_framework.decorators import action from django.conf import settings from .tasks import refresh_m3u_groups -# Import all models, including UserAgent. from .models import M3UAccount, M3UFilter, ServerGroup, M3UAccountProfile from core.models import UserAgent from apps.channels.models import ChannelGroupM3UAccount from core.serializers import UserAgentSerializer -# Import all serializers, including the UserAgentSerializer. from .serializers import ( M3UAccountSerializer, M3UFilterSerializer, @@ -144,6 +142,38 @@ class M3UAccountViewSet(viewsets.ModelViewSet): # Continue with regular partial update return super().partial_update(request, *args, **kwargs) + @action(detail=True, methods=["patch"], url_path="group-settings") + def update_group_settings(self, request, pk=None): + """Update auto channel sync settings for M3U account groups""" + account = self.get_object() + group_settings = request.data.get("group_settings", []) + + try: + for setting in group_settings: + group_id = setting.get("channel_group") + enabled = setting.get("enabled", True) + auto_sync = setting.get("auto_channel_sync", False) + sync_start = setting.get("auto_sync_channel_start") + + if group_id: + ChannelGroupM3UAccount.objects.update_or_create( + channel_group_id=group_id, + m3u_account=account, + defaults={ + "enabled": enabled, + "auto_channel_sync": auto_sync, + "auto_sync_channel_start": sync_start, + }, + ) + + return Response({"message": "Group settings updated successfully"}) + + except Exception as e: + return Response( + {"error": f"Failed to update group settings: {str(e)}"}, + status=status.HTTP_400_BAD_REQUEST, + ) + class M3UFilterViewSet(viewsets.ModelViewSet): """Handles CRUD operations for M3U filters""" diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index 0b782649..b5614376 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -838,6 +838,144 @@ def delete_m3u_refresh_task_by_id(account_id): logger.error(f"Error deleting periodic task for M3UAccount {account_id}: {str(e)}", exc_info=True) return False +@shared_task +def sync_auto_channels(account_id): + """ + Automatically create/delete channels to match streams in groups with auto_channel_sync enabled. + Called after M3U refresh completes successfully. + """ + from apps.channels.models import Channel, ChannelGroup, ChannelGroupM3UAccount, Stream, ChannelStream + from apps.epg.models import EPGData + import json + + try: + account = M3UAccount.objects.get(id=account_id) + logger.info(f"Starting auto channel sync for M3U account {account.name}") + + # Get groups with auto sync enabled for this account + auto_sync_groups = ChannelGroupM3UAccount.objects.filter( + m3u_account=account, + enabled=True, + auto_channel_sync=True + ).select_related('channel_group') + + channels_created = 0 + channels_deleted = 0 + + for group_relation in auto_sync_groups: + channel_group = group_relation.channel_group + start_number = group_relation.auto_sync_channel_start or 1.0 + + logger.info(f"Processing auto sync for group: {channel_group.name} (start: {start_number})") + + # Get all streams in this group for this M3U account + current_streams = Stream.objects.filter( + m3u_account=account, + channel_group=channel_group + ) + + # Get existing channels in this group that were auto-created (we'll track this via a custom property) + existing_auto_channels = Channel.objects.filter( + channel_group=channel_group, + streams__m3u_account=account + ).distinct() + + # Create a mapping of stream hashes to existing channels + existing_channel_streams = {} + for channel in existing_auto_channels: + for stream in channel.streams.filter(m3u_account=account): + existing_channel_streams[stream.stream_hash] = channel + + # Track which channels should exist (based on current streams) + channels_to_keep = set() + current_channel_number = start_number + + # Create channels for streams that don't have them + for stream in current_streams.order_by('name'): + if stream.stream_hash in existing_channel_streams: + # Channel already exists for this stream + channels_to_keep.add(existing_channel_streams[stream.stream_hash].id) + continue + + # Find next available channel number + while Channel.objects.filter(channel_number=current_channel_number).exists(): + current_channel_number += 0.1 + + # Create new channel + try: + # Parse custom properties for additional info + stream_custom_props = json.loads(stream.custom_properties) if stream.custom_properties else {} + + # Get tvc_guide_stationid from custom properties if it exists + tvc_guide_stationid = stream_custom_props.get("tvc-guide-stationid") + + # Create the channel + channel = Channel.objects.create( + channel_number=current_channel_number, + name=stream.name, + tvg_id=stream.tvg_id, + tvc_guide_stationid=tvc_guide_stationid, + channel_group=channel_group, + user_level=0 # Default user level + ) + + # Associate the stream with the channel + ChannelStream.objects.create( + channel=channel, + stream=stream, + order=0 + ) + + # Try to match EPG data + if stream.tvg_id: + epg_data = EPGData.objects.filter(tvg_id=stream.tvg_id).first() + if epg_data: + channel.epg_data = epg_data + channel.save(update_fields=['epg_data']) + + # Handle logo + if stream.logo_url: + from apps.channels.models import Logo + logo, _ = Logo.objects.get_or_create( + url=stream.logo_url, + defaults={"name": stream.name or stream.tvg_id or "Unknown"} + ) + channel.logo = logo + channel.save(update_fields=['logo']) + + channels_to_keep.add(channel.id) + channels_created += 1 + current_channel_number += 1.0 + + logger.debug(f"Created auto channel: {channel.channel_number} - {channel.name}") + + except Exception as e: + logger.error(f"Error creating auto channel for stream {stream.name}: {str(e)}") + continue + + # Delete channels that no longer have corresponding streams + channels_to_delete = existing_auto_channels.exclude(id__in=channels_to_keep) + + for channel in channels_to_delete: + # Only delete if all streams for this channel are from this M3U account + # and this channel group + all_streams_from_account = all( + s.m3u_account_id == account.id and s.channel_group_id == channel_group.id + for s in channel.streams.all() + ) + + if all_streams_from_account: + logger.debug(f"Deleting auto channel: {channel.channel_number} - {channel.name}") + channel.delete() + channels_deleted += 1 + + logger.info(f"Auto channel sync complete for account {account.name}: {channels_created} created, {channels_deleted} deleted") + return f"Auto sync: {channels_created} channels created, {channels_deleted} deleted" + + except Exception as e: + logger.error(f"Error in auto channel sync for account {account_id}: {str(e)}") + return f"Auto sync error: {str(e)}" + @shared_task def refresh_single_m3u_account(account_id): """Splits M3U processing into chunks and dispatches them as parallel tasks.""" @@ -1120,6 +1258,12 @@ def refresh_single_m3u_account(account_id): message=account.last_message ) + # Run auto channel sync after successful refresh + try: + sync_result = sync_auto_channels(account_id) + logger.info(f"Auto channel sync result for account {account_id}: {sync_result}") + except Exception as e: + logger.error(f"Error running auto channel sync for account {account_id}: {str(e)}") except Exception as e: logger.error(f"Error processing M3U for account {account_id}: {str(e)}") account.status = M3UAccount.Status.ERROR diff --git a/dispatcharr/celery.py b/dispatcharr/celery.py index 8856d330..98c6210b 100644 --- a/dispatcharr/celery.py +++ b/dispatcharr/celery.py @@ -62,6 +62,7 @@ def cleanup_task_memory(**kwargs): 'apps.m3u.tasks.refresh_m3u_accounts', 'apps.m3u.tasks.process_m3u_batch', 'apps.m3u.tasks.process_xc_category', + 'apps.m3u.tasks.sync_auto_channels', 'apps.epg.tasks.refresh_epg_data', 'apps.epg.tasks.refresh_all_epg_data', 'apps.epg.tasks.parse_programs_for_source', diff --git a/frontend/src/api.js b/frontend/src/api.js index e0a62160..e34dabe2 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -733,6 +733,19 @@ export default class API { } } + static async updateM3UGroupSettings(playlistId, groupSettings) { + try { + const response = await request(`${host}/api/m3u/accounts/${playlistId}/group-settings/`, { + method: 'PATCH', + body: { group_settings: groupSettings }, + }); + + return response; + } catch (e) { + errorNotification('Failed to update M3U group settings', e); + } + } + static async addPlaylist(values) { if (values.custom_properties) { values.custom_properties = JSON.stringify(values.custom_properties); diff --git a/frontend/src/components/M3URefreshNotification.jsx b/frontend/src/components/M3URefreshNotification.jsx index 8a6647cb..3b57af37 100644 --- a/frontend/src/components/M3URefreshNotification.jsx +++ b/frontend/src/components/M3URefreshNotification.jsx @@ -49,7 +49,7 @@ export default function M3URefreshNotification() { message: ( {data.message || - 'M3U groups loaded. Please select groups or refresh M3U to complete setup.'} + 'M3U groups loaded. Configure group filters and auto channel sync settings.'} diff --git a/frontend/src/components/forms/M3U.jsx b/frontend/src/components/forms/M3U.jsx index 24ddd377..0e4d5643 100644 --- a/frontend/src/components/forms/M3U.jsx +++ b/frontend/src/components/forms/M3U.jsx @@ -145,8 +145,7 @@ const M3U = ({ if (values.account_type != 'XC') { notifications.show({ title: 'Fetching M3U Groups', - message: 'Filter out groups or refresh M3U once complete.', - // color: 'green.5', + message: 'Configure group filters and auto sync settings once complete.', }); // Don't prompt for group filters, but keeping this here diff --git a/frontend/src/components/forms/M3UGroupFilter.jsx b/frontend/src/components/forms/M3UGroupFilter.jsx index 7ca0fa96..0213eeee 100644 --- a/frontend/src/components/forms/M3UGroupFilter.jsx +++ b/frontend/src/components/forms/M3UGroupFilter.jsx @@ -21,7 +21,11 @@ import { Center, SimpleGrid, Text, + NumberInput, + Divider, + Alert, } from '@mantine/core'; +import { Info } from 'lucide-react'; import useChannelsStore from '../../store/channels'; import { CircleCheck, CircleX } from 'lucide-react'; @@ -40,6 +44,8 @@ const M3UGroupFilter = ({ playlist = null, isOpen, onClose }) => { playlist.channel_groups.map((group) => ({ ...group, name: channelGroups[group.channel_group].name, + auto_channel_sync: group.auto_channel_sync || false, + auto_sync_channel_start: group.auto_sync_channel_start || 1.0, })) ); }, [playlist, channelGroups]); @@ -53,15 +59,38 @@ const M3UGroupFilter = ({ playlist = null, isOpen, onClose }) => { ); }; + const toggleAutoSync = (id) => { + setGroupStates( + groupStates.map((state) => ({ + ...state, + auto_channel_sync: state.channel_group == id ? !state.auto_channel_sync : state.auto_channel_sync, + })) + ); + }; + + const updateChannelStart = (id, value) => { + setGroupStates( + groupStates.map((state) => ({ + ...state, + auto_sync_channel_start: state.channel_group == id ? value : state.auto_sync_channel_start, + })) + ); + }; + const submit = async () => { setIsLoading(true); - await API.updatePlaylist({ - ...playlist, - channel_groups: groupStates, - }); - setIsLoading(false); - API.refreshPlaylist(playlist.id); - onClose(); + try { + // Update group settings via new API endpoint + await API.updateM3UGroupSettings(playlist.id, groupStates); + + // Refresh the playlist + API.refreshPlaylist(playlist.id); + onClose(); + } catch (error) { + console.error('Error updating group settings:', error); + } finally { + setIsLoading(false); + } }; const selectAll = () => { @@ -94,14 +123,21 @@ const M3UGroupFilter = ({ playlist = null, isOpen, onClose }) => { + } color="blue" variant="light"> + + Auto Channel Sync: When enabled, channels will be automatically created for all streams in the group during M3U updates, + and removed when streams are no longer present. Set a starting channel number for each group to organize your channels. + + + setGroupFilter(event.currentTarget.value)} style={{ flex: 1 }} @@ -113,41 +149,77 @@ const M3UGroupFilter = ({ playlist = null, isOpen, onClose }) => { Deselect Visible - + + + + {groupStates .filter((group) => group.name.toLowerCase().includes(groupFilter.toLowerCase()) ) - .sort((a, b) => a.name > b.name) + .sort((a, b) => a.name.localeCompare(b.name)) .map((group) => ( - + + {/* Group Enable/Disable Button */} + + + {/* Auto Sync Checkbox */} + toggleAutoSync(group.channel_group)} + size="sm" + /> + + {/* Channel Start Number Input */} + updateChannelStart(group.channel_group, value)} + disabled={!group.enabled || !group.auto_channel_sync} + min={1} + step={1} + size="sm" + style={{ width: '120px' }} + precision={1} + /> + ))} - + + - - - {groupStates - .filter((group) => - group.name.toLowerCase().includes(groupFilter.toLowerCase()) - ) - .sort((a, b) => a.name.localeCompare(b.name)) - .map((group) => ( - - {/* Group Enable/Disable Button */} - + + + {groupStates + .filter((group) => + group.name.toLowerCase().includes(groupFilter.toLowerCase()) + ) + .sort((a, b) => a.name.localeCompare(b.name)) + .map((group) => ( + + {/* Group Enable/Disable Button */} + - {/* Auto Sync Checkbox */} - toggleAutoSync(group.channel_group)} - size="sm" - /> + {/* Auto Sync Controls */} + + toggleAutoSync(group.channel_group)} + size="xs" + /> - {/* Channel Start Number Input */} - updateChannelStart(group.channel_group, value)} - disabled={!group.enabled || !group.auto_channel_sync} - min={1} - step={1} - size="sm" - style={{ width: '120px' }} - precision={1} - /> - - ))} - + {group.auto_channel_sync && group.enabled && ( + updateChannelStart(group.channel_group, value)} + min={1} + step={1} + size="xs" + precision={1} + /> + )} + +
+ ))} + + - + + + + +
+ ); +}; + +export default LogoForm; diff --git a/frontend/src/pages/Logos.jsx b/frontend/src/pages/Logos.jsx new file mode 100644 index 00000000..7ca879f6 --- /dev/null +++ b/frontend/src/pages/Logos.jsx @@ -0,0 +1,223 @@ +import React, { useState, useEffect } from 'react'; +import { + Container, + Title, + Button, + Table, + Group, + ActionIcon, + Text, + Image, + Box, + Center, + Stack, + Badge, +} from '@mantine/core'; +import { SquarePen, Trash2, Plus, ExternalLink } from 'lucide-react'; +import { notifications } from '@mantine/notifications'; +import useChannelsStore from '../store/channels'; +import API from '../api'; +import LogoForm from '../components/forms/Logo'; +import ConfirmationDialog from '../components/ConfirmationDialog'; + +const LogosPage = () => { + const { logos, fetchLogos } = useChannelsStore(); + const [logoFormOpen, setLogoFormOpen] = useState(false); + const [editingLogo, setEditingLogo] = useState(null); + const [deleteConfirmOpen, setDeleteConfirmOpen] = useState(false); + const [logoToDelete, setLogoToDelete] = useState(null); + const [loading, setLoading] = useState(true); + + useEffect(() => { + loadLogos(); + }, []); + + const loadLogos = async () => { + setLoading(true); + try { + await fetchLogos(); + } catch (error) { + notifications.show({ + title: 'Error', + message: 'Failed to load logos', + color: 'red', + }); + } finally { + setLoading(false); + } + }; + + const handleCreateLogo = () => { + setEditingLogo(null); + setLogoFormOpen(true); + }; + + const handleEditLogo = (logo) => { + setEditingLogo(logo); + setLogoFormOpen(true); + }; + + const handleDeleteLogo = (logo) => { + setLogoToDelete(logo); + setDeleteConfirmOpen(true); + }; + + const confirmDeleteLogo = async () => { + if (!logoToDelete) return; + + try { + await API.deleteLogo(logoToDelete.id); + await fetchLogos(); + notifications.show({ + title: 'Success', + message: 'Logo deleted successfully', + color: 'green', + }); + } catch (error) { + notifications.show({ + title: 'Error', + message: 'Failed to delete logo', + color: 'red', + }); + } finally { + setDeleteConfirmOpen(false); + setLogoToDelete(null); + } + }; + + const handleFormClose = () => { + setLogoFormOpen(false); + setEditingLogo(null); + loadLogos(); // Refresh the logos list + }; + + const logosArray = Object.values(logos || {}); + + const rows = logosArray.map((logo) => ( + + +
+ {logo.name} +
+
+ + {logo.name} + + + + + {logo.url} + + {logo.url.startsWith('http') && ( + window.open(logo.url, '_blank')} + > + + + )} + + + + + handleEditLogo(logo)} + color="blue" + > + + + handleDeleteLogo(logo)} + color="red" + > + + + + +
+ )); + + return ( + <> + + + Logos + + + + {loading ? ( +
+ Loading logos... +
+ ) : logosArray.length === 0 ? ( +
+ + No logos found + Click "Add Logo" to create your first logo + +
+ ) : ( + + + Total: {logosArray.length} logo{logosArray.length !== 1 ? 's' : ''} + + + + + + Preview + Name + URL + Actions + + + {rows} +
+
+ )} +
+ + + + setDeleteConfirmOpen(false)} + onConfirm={confirmDeleteLogo} + title="Delete Logo" + message={ + logoToDelete ? ( +
+ Are you sure you want to delete the logo "{logoToDelete.name}"? +
+ + This action cannot be undone. + +
+ ) : ( + 'Are you sure you want to delete this logo?' + ) + } + confirmLabel="Delete" + cancelLabel="Cancel" + /> + + ); +}; + +export default LogosPage; diff --git a/frontend/src/store/channels.jsx b/frontend/src/store/channels.jsx index 40791cf4..a4c61149 100644 --- a/frontend/src/store/channels.jsx +++ b/frontend/src/store/channels.jsx @@ -21,7 +21,7 @@ const useChannelsStore = create((set, get) => ({ forceUpdate: 0, triggerUpdate: () => { - set({ forecUpdate: new Date() }); + set({ forceUpdate: new Date() }); }, fetchChannels: async () => { @@ -255,6 +255,24 @@ const useChannelsStore = create((set, get) => ({ }, })), + updateLogo: (logo) => + set((state) => ({ + logos: { + ...state.logos, + [logo.id]: { + ...logo, + url: logo.url.replace(/^\/data/, ''), + }, + }, + })), + + removeLogo: (logoId) => + set((state) => { + const newLogos = { ...state.logos }; + delete newLogos[logoId]; + return { logos: newLogos }; + }), + addProfile: (profile) => set((state) => ({ profiles: { From cea078f6ef5b20cbbb8c0fd6991964ca76527bba Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 15 Jul 2025 18:37:22 -0500 Subject: [PATCH 037/857] Use default user-agent and adjust timeouts. --- apps/channels/api_views.py | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 96b7362f..310fccbb 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -17,6 +17,8 @@ from apps.accounts.permissions import ( permission_classes_by_method, ) +from core.models import UserAgent, CoreSettings + from .models import ( Stream, Channel, @@ -1053,14 +1055,14 @@ class LogoViewSet(viewsets.ModelViewSet): def destroy(self, request, *args, **kwargs): """Delete a logo""" logo = self.get_object() - + # Check if logo is being used by any channels if logo.channels.exists(): return Response( {"error": f"Cannot delete logo as it is used by {logo.channels.count()} channel(s)"}, status=status.HTTP_400_BAD_REQUEST ) - + return super().destroy(request, *args, **kwargs) @action(detail=False, methods=["post"]) @@ -1117,12 +1119,21 @@ class LogoViewSet(viewsets.ModelViewSet): else: # Remote image try: + # Get the default user agent + try: + default_user_agent_id = CoreSettings.get_default_user_agent_id() + user_agent_obj = UserAgent.objects.get(id=int(default_user_agent_id)) + user_agent = user_agent_obj.user_agent + except (CoreSettings.DoesNotExist, UserAgent.DoesNotExist, ValueError): + # Fallback to hardcoded if default not found + user_agent = 'Dispatcharr/1.0' + # Add proper timeouts to prevent hanging remote_response = requests.get( - logo_url, - stream=True, - timeout=(10, 30), # (connect_timeout, read_timeout) - headers={'User-Agent': 'Dispatcharr/1.0'} + logo_url, + stream=True, + timeout=(3, 5), # (connect_timeout, read_timeout) + headers={'User-Agent': user_agent} ) if remote_response.status_code == 200: # Try to get content type from response headers first From 6afd5a38c9429459c12c757eef46e3f0b6c96527 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 15 Jul 2025 18:44:53 -0500 Subject: [PATCH 038/857] Add timeouts to logo fetching to avoid hanging UI if a logo is unreachable. Also add default user-agent to request to prevent servers from denying request. Fixes #217 and Fixes #101 --- apps/channels/api_views.py | 55 +++++++++++++++++++++++++++++++++++--- 1 file changed, 52 insertions(+), 3 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index f0f59f29..310fccbb 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -17,6 +17,8 @@ from apps.accounts.permissions import ( permission_classes_by_method, ) +from core.models import UserAgent, CoreSettings + from .models import ( Stream, Channel, @@ -1038,6 +1040,31 @@ class LogoViewSet(viewsets.ModelViewSet): except KeyError: return [Authenticated()] + def create(self, request, *args, **kwargs): + """Create a new logo entry""" + serializer = self.get_serializer(data=request.data) + if serializer.is_valid(): + logo = serializer.save() + return Response(self.get_serializer(logo).data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def update(self, request, *args, **kwargs): + """Update an existing logo""" + return super().update(request, *args, **kwargs) + + def destroy(self, request, *args, **kwargs): + """Delete a logo""" + logo = self.get_object() + + # Check if logo is being used by any channels + if logo.channels.exists(): + return Response( + {"error": f"Cannot delete logo as it is used by {logo.channels.count()} channel(s)"}, + status=status.HTTP_400_BAD_REQUEST + ) + + return super().destroy(request, *args, **kwargs) + @action(detail=False, methods=["post"]) def upload(self, request): if "file" not in request.FILES: @@ -1062,7 +1089,7 @@ class LogoViewSet(viewsets.ModelViewSet): ) return Response( - {"id": logo.id, "name": logo.name, "url": logo.url}, + LogoSerializer(logo, context={'request': request}).data, status=status.HTTP_201_CREATED, ) @@ -1092,7 +1119,22 @@ class LogoViewSet(viewsets.ModelViewSet): else: # Remote image try: - remote_response = requests.get(logo_url, stream=True) + # Get the default user agent + try: + default_user_agent_id = CoreSettings.get_default_user_agent_id() + user_agent_obj = UserAgent.objects.get(id=int(default_user_agent_id)) + user_agent = user_agent_obj.user_agent + except (CoreSettings.DoesNotExist, UserAgent.DoesNotExist, ValueError): + # Fallback to hardcoded if default not found + user_agent = 'Dispatcharr/1.0' + + # Add proper timeouts to prevent hanging + remote_response = requests.get( + logo_url, + stream=True, + timeout=(3, 5), # (connect_timeout, read_timeout) + headers={'User-Agent': user_agent} + ) if remote_response.status_code == 200: # Try to get content type from response headers first content_type = remote_response.headers.get("Content-Type") @@ -1114,7 +1156,14 @@ class LogoViewSet(viewsets.ModelViewSet): ) return response raise Http404("Remote image not found") - except requests.RequestException: + except requests.exceptions.Timeout: + logger.warning(f"Timeout fetching logo from {logo_url}") + raise Http404("Logo request timed out") + except requests.exceptions.ConnectionError: + logger.warning(f"Connection error fetching logo from {logo_url}") + raise Http404("Unable to connect to logo server") + except requests.RequestException as e: + logger.warning(f"Error fetching logo from {logo_url}: {e}") raise Http404("Error fetching remote image") From 2bba31940d1ac4927827f7a97f994c59291f4ceb Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 15 Jul 2025 20:02:21 -0500 Subject: [PATCH 039/857] Use our custom table for displaying logos --- frontend/src/components/tables/LogosTable.jsx | 356 ++++++++++++++++++ frontend/src/pages/Logos.jsx | 204 +--------- 2 files changed, 363 insertions(+), 197 deletions(-) create mode 100644 frontend/src/components/tables/LogosTable.jsx diff --git a/frontend/src/components/tables/LogosTable.jsx b/frontend/src/components/tables/LogosTable.jsx new file mode 100644 index 00000000..df6605d1 --- /dev/null +++ b/frontend/src/components/tables/LogosTable.jsx @@ -0,0 +1,356 @@ +import React, { useMemo, useCallback, useState } from 'react'; +import API from '../../api'; +import LogoForm from '../forms/Logo'; +import useChannelsStore from '../../store/channels'; +import useLocalStorage from '../../hooks/useLocalStorage'; +import { + SquarePlus, + SquareMinus, + SquarePen, + ExternalLink, +} from 'lucide-react'; +import { + ActionIcon, + Box, + Text, + Paper, + Button, + Flex, + Group, + useMantineTheme, + LoadingOverlay, + Stack, + Image, + Center, +} from '@mantine/core'; +import { CustomTable, useTable } from './CustomTable'; +import ConfirmationDialog from '../ConfirmationDialog'; +import { notifications } from '@mantine/notifications'; + +const LogoRowActions = ({ theme, row, editLogo, deleteLogo }) => { + const [tableSize, _] = useLocalStorage('table-size', 'default'); + + const onEdit = useCallback(() => { + editLogo(row.original); + }, [row.original, editLogo]); + + const onDelete = useCallback(() => { + deleteLogo(row.original.id); + }, [row.original.id, deleteLogo]); + + const iconSize = + tableSize == 'default' ? 'sm' : tableSize == 'compact' ? 'xs' : 'md'; + + return ( + + + + + + + + + + + + ); +}; + +const LogosTable = () => { + const theme = useMantineTheme(); + + /** + * STORES + */ + const { logos, fetchLogos } = useChannelsStore(); + + /** + * useState + */ + const [selectedLogo, setSelectedLogo] = useState(null); + const [logoModalOpen, setLogoModalOpen] = useState(false); + const [confirmDeleteOpen, setConfirmDeleteOpen] = useState(false); + const [deleteTarget, setDeleteTarget] = useState(null); + const [logoToDelete, setLogoToDelete] = useState(null); + const [isLoading, setIsLoading] = useState(false); + + /** + * Functions + */ + const executeDeleteLogo = useCallback(async (id) => { + setIsLoading(true); + try { + await API.deleteLogo(id); + await fetchLogos(); + notifications.show({ + title: 'Success', + message: 'Logo deleted successfully', + color: 'green', + }); + } catch (error) { + notifications.show({ + title: 'Error', + message: 'Failed to delete logo', + color: 'red', + }); + } finally { + setIsLoading(false); + setConfirmDeleteOpen(false); + } + }, [fetchLogos]); + + const editLogo = useCallback(async (logo = null) => { + setSelectedLogo(logo); + setLogoModalOpen(true); + }, []); + + const deleteLogo = useCallback(async (id) => { + const logosArray = Object.values(logos || {}); + const logo = logosArray.find((l) => l.id === id); + setLogoToDelete(logo); + setDeleteTarget(id); + setConfirmDeleteOpen(true); + }, [logos]); + + /** + * useMemo + */ + const columns = useMemo( + () => [ + { + header: 'Preview', + accessorKey: 'cache_url', + size: 80, + enableSorting: false, + cell: ({ getValue, row }) => ( +
+ {row.original.name} +
+ ), + }, + { + header: 'Name', + accessorKey: 'name', + size: 200, + cell: ({ getValue }) => ( + + {getValue()} + + ), + }, + { + header: 'URL', + accessorKey: 'url', + cell: ({ getValue }) => ( + + + + {getValue()} + + + {getValue()?.startsWith('http') && ( + window.open(getValue(), '_blank')} + > + + + )} + + ), + }, + { + id: 'actions', + size: 80, + header: 'Actions', + enableSorting: false, + cell: ({ row }) => ( + + ), + }, + ], + [theme, editLogo, deleteLogo] + ); + + const closeLogoForm = () => { + setSelectedLogo(null); + setLogoModalOpen(false); + fetchLogos(); // Refresh the logos list + }; + + const data = useMemo(() => { + const logosArray = Object.values(logos || {}); + return logosArray.sort((a, b) => a.id - b.id); + }, [logos]); + + const renderHeaderCell = (header) => { + return ( + + {header.column.columnDef.header} + + ); + }; + + const table = useTable({ + columns, + data, + allRowIds: data.map((logo) => logo.id), + enablePagination: false, + enableRowSelection: false, + enableRowVirtualization: false, + renderTopToolbar: false, + manualSorting: false, + manualFiltering: false, + manualPagination: false, + headerCellRenderFns: { + actions: renderHeaderCell, + cache_url: renderHeaderCell, + name: renderHeaderCell, + url: renderHeaderCell, + }, + }); + + return ( + <> + + + + + Logos + + + ({data.length} logo{data.length !== 1 ? 's' : ''}) + + + + + {/* Top toolbar */} + + + + + {/* Table container */} + +
+ + +
+
+
+
+
+ + + + setConfirmDeleteOpen(false)} + onConfirm={() => executeDeleteLogo(deleteTarget)} + title="Delete Logo" + message={ + logoToDelete ? ( +
+ Are you sure you want to delete the logo "{logoToDelete.name}"? +
+ + This action cannot be undone. + +
+ ) : ( + 'Are you sure you want to delete this logo?' + ) + } + confirmLabel="Delete" + cancelLabel="Cancel" + size="md" + /> + + ); +}; + +export default LogosTable; diff --git a/frontend/src/pages/Logos.jsx b/frontend/src/pages/Logos.jsx index 7ca879f6..ee26c51e 100644 --- a/frontend/src/pages/Logos.jsx +++ b/frontend/src/pages/Logos.jsx @@ -1,39 +1,17 @@ -import React, { useState, useEffect } from 'react'; -import { - Container, - Title, - Button, - Table, - Group, - ActionIcon, - Text, - Image, - Box, - Center, - Stack, - Badge, -} from '@mantine/core'; -import { SquarePen, Trash2, Plus, ExternalLink } from 'lucide-react'; +import React, { useEffect } from 'react'; +import { Box } from '@mantine/core'; import { notifications } from '@mantine/notifications'; import useChannelsStore from '../store/channels'; -import API from '../api'; -import LogoForm from '../components/forms/Logo'; -import ConfirmationDialog from '../components/ConfirmationDialog'; +import LogosTable from '../components/tables/LogosTable'; const LogosPage = () => { - const { logos, fetchLogos } = useChannelsStore(); - const [logoFormOpen, setLogoFormOpen] = useState(false); - const [editingLogo, setEditingLogo] = useState(null); - const [deleteConfirmOpen, setDeleteConfirmOpen] = useState(false); - const [logoToDelete, setLogoToDelete] = useState(null); - const [loading, setLoading] = useState(true); + const { fetchLogos } = useChannelsStore(); useEffect(() => { loadLogos(); }, []); const loadLogos = async () => { - setLoading(true); try { await fetchLogos(); } catch (error) { @@ -42,181 +20,13 @@ const LogosPage = () => { message: 'Failed to load logos', color: 'red', }); - } finally { - setLoading(false); } }; - const handleCreateLogo = () => { - setEditingLogo(null); - setLogoFormOpen(true); - }; - - const handleEditLogo = (logo) => { - setEditingLogo(logo); - setLogoFormOpen(true); - }; - - const handleDeleteLogo = (logo) => { - setLogoToDelete(logo); - setDeleteConfirmOpen(true); - }; - - const confirmDeleteLogo = async () => { - if (!logoToDelete) return; - - try { - await API.deleteLogo(logoToDelete.id); - await fetchLogos(); - notifications.show({ - title: 'Success', - message: 'Logo deleted successfully', - color: 'green', - }); - } catch (error) { - notifications.show({ - title: 'Error', - message: 'Failed to delete logo', - color: 'red', - }); - } finally { - setDeleteConfirmOpen(false); - setLogoToDelete(null); - } - }; - - const handleFormClose = () => { - setLogoFormOpen(false); - setEditingLogo(null); - loadLogos(); // Refresh the logos list - }; - - const logosArray = Object.values(logos || {}); - - const rows = logosArray.map((logo) => ( - - -
- {logo.name} -
-
- - {logo.name} - - - - - {logo.url} - - {logo.url.startsWith('http') && ( - window.open(logo.url, '_blank')} - > - - - )} - - - - - handleEditLogo(logo)} - color="blue" - > - - - handleDeleteLogo(logo)} - color="red" - > - - - - -
- )); - return ( - <> - - - Logos - - - - {loading ? ( -
- Loading logos... -
- ) : logosArray.length === 0 ? ( -
- - No logos found - Click "Add Logo" to create your first logo - -
- ) : ( - - - Total: {logosArray.length} logo{logosArray.length !== 1 ? 's' : ''} - - - - - - Preview - Name - URL - Actions - - - {rows} -
-
- )} -
- - - - setDeleteConfirmOpen(false)} - onConfirm={confirmDeleteLogo} - title="Delete Logo" - message={ - logoToDelete ? ( -
- Are you sure you want to delete the logo "{logoToDelete.name}"? -
- - This action cannot be undone. - -
- ) : ( - 'Are you sure you want to delete this logo?' - ) - } - confirmLabel="Delete" - cancelLabel="Cancel" - /> - + + + ); }; From 500df533bbe3ca68ab824ec5fc8f477a25a93086 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 15 Jul 2025 20:12:25 -0500 Subject: [PATCH 040/857] Center logos in the column. --- frontend/src/components/tables/LogosTable.jsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/components/tables/LogosTable.jsx b/frontend/src/components/tables/LogosTable.jsx index df6605d1..257bb17f 100644 --- a/frontend/src/components/tables/LogosTable.jsx +++ b/frontend/src/components/tables/LogosTable.jsx @@ -133,7 +133,7 @@ const LogosTable = () => { size: 80, enableSorting: false, cell: ({ getValue, row }) => ( -
+
{row.original.name} Date: Tue, 15 Jul 2025 20:14:34 -0500 Subject: [PATCH 041/857] Add padding to logos. --- frontend/src/components/tables/LogosTable.jsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/components/tables/LogosTable.jsx b/frontend/src/components/tables/LogosTable.jsx index 257bb17f..9138aeb7 100644 --- a/frontend/src/components/tables/LogosTable.jsx +++ b/frontend/src/components/tables/LogosTable.jsx @@ -133,7 +133,7 @@ const LogosTable = () => { size: 80, enableSorting: false, cell: ({ getValue, row }) => ( -
+
{row.original.name} Date: Tue, 15 Jul 2025 20:26:02 -0500 Subject: [PATCH 042/857] Enhance Logo management with filtering and usage details in API and UI --- apps/channels/api_views.py | 18 +++ apps/channels/serializers.py | 21 +++- frontend/src/api.js | 15 ++- frontend/src/components/tables/LogosTable.jsx | 108 +++++++++++++++++- 4 files changed, 155 insertions(+), 7 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 310fccbb..97d0b074 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -1040,6 +1040,24 @@ class LogoViewSet(viewsets.ModelViewSet): except KeyError: return [Authenticated()] + def get_queryset(self): + """Optimize queryset with prefetch and add filtering""" + queryset = Logo.objects.prefetch_related('channels').order_by('name') + + # Filter by usage + used_filter = self.request.query_params.get('used', None) + if used_filter == 'true': + queryset = queryset.filter(channels__isnull=False).distinct() + elif used_filter == 'false': + queryset = queryset.filter(channels__isnull=True) + + # Filter by name + name_filter = self.request.query_params.get('name', None) + if name_filter: + queryset = queryset.filter(name__icontains=name_filter) + + return queryset + def create(self, request, *args, **kwargs): """Create a new logo entry""" serializer = self.get_serializer(data=request.data) diff --git a/apps/channels/serializers.py b/apps/channels/serializers.py index 278399dd..3346495e 100644 --- a/apps/channels/serializers.py +++ b/apps/channels/serializers.py @@ -20,10 +20,13 @@ from django.utils import timezone class LogoSerializer(serializers.ModelSerializer): cache_url = serializers.SerializerMethodField() + channel_count = serializers.SerializerMethodField() + is_used = serializers.SerializerMethodField() + channel_names = serializers.SerializerMethodField() class Meta: model = Logo - fields = ["id", "name", "url", "cache_url"] + fields = ["id", "name", "url", "cache_url", "channel_count", "is_used", "channel_names"] def get_cache_url(self, obj): # return f"/api/channels/logos/{obj.id}/cache/" @@ -34,6 +37,22 @@ class LogoSerializer(serializers.ModelSerializer): ) return reverse("api:channels:logo-cache", args=[obj.id]) + def get_channel_count(self, obj): + """Get the number of channels using this logo""" + return obj.channels.count() + + def get_is_used(self, obj): + """Check if this logo is used by any channels""" + return obj.channels.exists() + + def get_channel_names(self, obj): + """Get the names of channels using this logo (limited to first 5)""" + channels = obj.channels.all()[:5] + names = [channel.name for channel in channels] + if obj.channels.count() > 5: + names.append(f"...and {obj.channels.count() - 5} more") + return names + # # Stream diff --git a/frontend/src/api.js b/frontend/src/api.js index cbd8950a..3263eaf5 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -1225,9 +1225,10 @@ export default class API { } } - static async getLogos() { + static async getLogos(params = {}) { try { - const response = await request(`${host}/api/channels/logos/`); + const queryParams = new URLSearchParams(params); + const response = await request(`${host}/api/channels/logos/?${queryParams.toString()}`); return response; } catch (e) { @@ -1235,6 +1236,16 @@ export default class API { } } + static async fetchLogos() { + try { + const response = await this.getLogos(); + useChannelsStore.getState().setLogos(response); + return response; + } catch (e) { + errorNotification('Failed to fetch logos', e); + } + } + static async uploadLogo(file) { try { const formData = new FormData(); diff --git a/frontend/src/components/tables/LogosTable.jsx b/frontend/src/components/tables/LogosTable.jsx index 9138aeb7..872b9dca 100644 --- a/frontend/src/components/tables/LogosTable.jsx +++ b/frontend/src/components/tables/LogosTable.jsx @@ -8,6 +8,7 @@ import { SquareMinus, SquarePen, ExternalLink, + Filter, } from 'lucide-react'; import { ActionIcon, @@ -22,6 +23,11 @@ import { Stack, Image, Center, + Badge, + Tooltip, + Select, + TextInput, + Menu, } from '@mantine/core'; import { CustomTable, useTable } from './CustomTable'; import ConfirmationDialog from '../ConfirmationDialog'; @@ -83,6 +89,10 @@ const LogosTable = () => { const [deleteTarget, setDeleteTarget] = useState(null); const [logoToDelete, setLogoToDelete] = useState(null); const [isLoading, setIsLoading] = useState(false); + const [filters, setFilters] = useState({ + name: '', + used: 'all' + }); /** * Functions @@ -155,6 +165,42 @@ const LogosTable = () => { ), }, + { + header: 'Usage', + accessorKey: 'channel_count', + size: 120, + cell: ({ getValue, row }) => { + const count = getValue(); + const channelNames = row.original.channel_names || []; + + if (count === 0) { + return ( + + Unused + + ); + } + + return ( + + Used by {count} channel{count !== 1 ? 's' : ''}: + {channelNames.map((name, index) => ( + • {name} + ))} + + } + multiline + width={220} + > + + {count} channel{count !== 1 ? 's' : ''} + + + ); + }, + }, { header: 'URL', accessorKey: 'url', @@ -211,8 +257,24 @@ const LogosTable = () => { const data = useMemo(() => { const logosArray = Object.values(logos || {}); - return logosArray.sort((a, b) => a.id - b.id); - }, [logos]); + + // Apply filters + let filteredLogos = logosArray; + + if (filters.name) { + filteredLogos = filteredLogos.filter(logo => + logo.name.toLowerCase().includes(filters.name.toLowerCase()) + ); + } + + if (filters.used === 'used') { + filteredLogos = filteredLogos.filter(logo => logo.is_used); + } else if (filters.used === 'unused') { + filteredLogos = filteredLogos.filter(logo => !logo.is_used); + } + + return filteredLogos.sort((a, b) => a.id - b.id); + }, [logos, filters]); const renderHeaderCell = (header) => { return ( @@ -238,6 +300,7 @@ const LogosTable = () => { cache_url: renderHeaderCell, name: renderHeaderCell, url: renderHeaderCell, + channel_count: renderHeaderCell, }, }); @@ -282,11 +345,44 @@ const LogosTable = () => { + + + setFilters(prev => ({ + ...prev, + name: event.currentTarget.value + })) + } + size="xs" + style={{ width: 200 }} + /> + { + const newValue = value ? parseInt(value) : null; + setGroupStates( + groupStates.map((state) => ({ + ...state, + custom_properties: { + ...state.custom_properties, + group_override: newValue, + }, + })) + ); + }} + data={Object.values(channelGroups).map((g) => ({ + value: g.id.toString(), + label: g.name, + }))} + clearable + searchable + size="xs" + /> )} diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index 077602ad..3d34ca55 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -288,7 +288,8 @@ const ChannelsTable = ({ }) => { const [isLoading, setIsLoading] = useState(true); const [hdhrUrl, setHDHRUrl] = useState(hdhrUrlBase); - const [epgUrl, setEPGUrl] = useState(epgUrlBase); const [m3uUrl, setM3UUrl] = useState(m3uUrlBase); + const [epgUrl, setEPGUrl] = useState(epgUrlBase); + const [m3uUrl, setM3UUrl] = useState(m3uUrlBase); const [confirmDeleteOpen, setConfirmDeleteOpen] = useState(false); const [deleteTarget, setDeleteTarget] = useState(null); @@ -308,7 +309,7 @@ const ChannelsTable = ({ }) => { }); /** - * Dereived variables + * Derived variables */ const activeGroupIds = new Set( Object.values(channels).map((channel) => channel.channel_group_id) From b406a3b504fda3aa588e605ff05a464568e1ccfe Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 17 Jul 2025 19:02:03 -0500 Subject: [PATCH 050/857] Move force dummy epg to top. --- .../src/components/forms/M3UGroupFilter.jsx | 117 ++++++++++++------ 1 file changed, 76 insertions(+), 41 deletions(-) diff --git a/frontend/src/components/forms/M3UGroupFilter.jsx b/frontend/src/components/forms/M3UGroupFilter.jsx index f94f35ef..315a6424 100644 --- a/frontend/src/components/forms/M3UGroupFilter.jsx +++ b/frontend/src/components/forms/M3UGroupFilter.jsx @@ -254,13 +254,23 @@ const M3UGroupFilter = ({ playlist = null, isOpen, onClose }) => { {/* Auto Sync Controls */} - toggleAutoSync(group.channel_group)} - size="xs" - /> + + toggleAutoSync(group.channel_group)} + size="xs" + /> + {group.auto_channel_sync && group.enabled && ( + toggleForceDummyEPG(group.channel_group)} + size="xs" + /> + )} + {group.auto_channel_sync && group.enabled && ( <> @@ -274,39 +284,64 @@ const M3UGroupFilter = ({ playlist = null, isOpen, onClose }) => { precision={1} /> - {/* Force Dummy EPG Checkbox */} - toggleForceDummyEPG(group.channel_group)} - size="xs" - /> - - {/* Override Channel Group Select */} - { + const newValue = value ? parseInt(value) : null; + setGroupStates( + groupStates.map((state) => { + if (state.channel_group == group.channel_group) { + return { + ...state, + custom_properties: { + ...state.custom_properties, + group_override: newValue, + }, + }; + } + return state; + }) + ); + }} + data={Object.values(channelGroups).map((g) => ({ + value: g.id.toString(), + label: g.name, + }))} + disabled={!(group.custom_properties && Object.prototype.hasOwnProperty.call(group.custom_properties, 'group_override'))} + clearable + searchable + size="xs" + style={{ flex: 1 }} + /> + )} @@ -334,4 +369,4 @@ const M3UGroupFilter = ({ playlist = null, isOpen, onClose }) => { ); }; -export default M3UGroupFilter; +export default M3UGroupFilter; \ No newline at end of file From f40e9fb9be1458e56881828edce746a6e200a38f Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 17 Jul 2025 19:24:27 -0500 Subject: [PATCH 051/857] Update playlist store when auto sync settings change. --- frontend/src/api.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/frontend/src/api.js b/frontend/src/api.js index e34dabe2..5812a4b9 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -739,7 +739,9 @@ export default class API { method: 'PATCH', body: { group_settings: groupSettings }, }); - + // Fetch the updated playlist and update the store + const updatedPlaylist = await API.getPlaylist(playlistId); + usePlaylistsStore.getState().updatePlaylist(updatedPlaylist); return response; } catch (e) { errorNotification('Failed to update M3U group settings', e); @@ -781,7 +783,6 @@ export default class API { const response = await request(`${host}/api/m3u/refresh/${id}/`, { method: 'POST', }); - return response; } catch (e) { errorNotification('Failed to refresh M3U account', e); From cebc4c8ca931967f814bd6d1fee3a66f548801b3 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 17 Jul 2025 20:32:24 -0500 Subject: [PATCH 052/857] Add pagination. --- frontend/src/components/tables/LogosTable.jsx | 120 ++++++++++++++++-- 1 file changed, 111 insertions(+), 9 deletions(-) diff --git a/frontend/src/components/tables/LogosTable.jsx b/frontend/src/components/tables/LogosTable.jsx index 61a325c2..64822cb5 100644 --- a/frontend/src/components/tables/LogosTable.jsx +++ b/frontend/src/components/tables/LogosTable.jsx @@ -31,6 +31,8 @@ import { TextInput, Menu, Checkbox, + Pagination, + NativeSelect, } from '@mantine/core'; import { CustomTable, useTable } from './CustomTable'; import ConfirmationDialog from '../ConfirmationDialog'; @@ -101,6 +103,12 @@ const LogosTable = () => { }); const [debouncedNameFilter, setDebouncedNameFilter] = useState(''); const [selectedRows, setSelectedRows] = useState(new Set()); + const [pageSize, setPageSize] = useLocalStorage('logos-page-size', 25); + const [pagination, setPagination] = useState({ + pageIndex: 0, + pageSize: pageSize, + }); + const [paginationString, setPaginationString] = useState(''); // Debounce the name filter useEffect(() => { @@ -132,6 +140,13 @@ const LogosTable = () => { return filteredLogos.sort((a, b) => a.id - b.id); }, [logos, debouncedNameFilter, filters.used]); + // Get paginated data + const paginatedData = useMemo(() => { + const startIndex = pagination.pageIndex * pagination.pageSize; + const endIndex = startIndex + pagination.pageSize; + return data.slice(startIndex, endIndex); + }, [data, pagination.pageIndex, pagination.pageSize]); + // Calculate unused logos count const unusedLogosCount = useMemo(() => { const allLogos = Object.values(logos || {}); @@ -270,6 +285,29 @@ const LogosTable = () => { setSelectedRows(new Set()); }, [data.length]); + // Update pagination when pageSize changes + useEffect(() => { + setPagination(prev => ({ + ...prev, + pageSize: pageSize, + })); + }, [pageSize]); + + // Calculate pagination string + useEffect(() => { + const startItem = pagination.pageIndex * pagination.pageSize + 1; + const endItem = Math.min( + (pagination.pageIndex + 1) * pagination.pageSize, + data.length + ); + setPaginationString(`${startItem} to ${endItem} of ${data.length}`); + }, [pagination.pageIndex, pagination.pageSize, data.length]); + + // Calculate page count + const pageCount = useMemo(() => { + return Math.ceil(data.length / pagination.pageSize); + }, [data.length, pagination.pageSize]); + /** * useMemo */ @@ -425,17 +463,38 @@ const LogosTable = () => { setSelectedRows(new Set(newSelection)); }, []); + const onPageSizeChange = (e) => { + const newPageSize = parseInt(e.target.value); + setPageSize(newPageSize); + setPagination(prev => ({ + ...prev, + pageSize: newPageSize, + pageIndex: 0, // Reset to first page + })); + }; + + const onPageIndexChange = (pageIndex) => { + if (!pageIndex || pageIndex > pageCount) { + return; + } + + setPagination(prev => ({ + ...prev, + pageIndex: pageIndex - 1, + })); + }; + const table = useTable({ columns, - data, - allRowIds: data.map((logo) => logo.id), - enablePagination: false, + data: paginatedData, + allRowIds: paginatedData.map((logo) => logo.id), + enablePagination: false, // Disable internal pagination since we're handling it manually enableRowSelection: true, enableRowVirtualization: false, renderTopToolbar: false, manualSorting: false, manualFiltering: false, - manualPagination: false, + manualPagination: true, // Enable manual pagination onRowSelectionChange: onRowSelectionChange, headerCellRenderFns: { actions: renderHeaderCell, @@ -571,14 +630,57 @@ const LogosTable = () => { -
- - -
+ +
+ + +
+
+ + {/* Pagination Controls */} + + + Page Size + + + {paginationString} + +
From 05539794e3578101e84a2f643ebed69e0af7a12b Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 17 Jul 2025 20:45:04 -0500 Subject: [PATCH 053/857] Set better sizing. --- frontend/src/components/tables/LogosTable.jsx | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/frontend/src/components/tables/LogosTable.jsx b/frontend/src/components/tables/LogosTable.jsx index 64822cb5..477da0cf 100644 --- a/frontend/src/components/tables/LogosTable.jsx +++ b/frontend/src/components/tables/LogosTable.jsx @@ -512,7 +512,8 @@ const LogosTable = () => { display: 'flex', justifyContent: 'center', padding: '0px', - minHeight: '100vh', + minHeight: 'calc(100vh - 200px)', + minWidth: '900px', }} > @@ -636,10 +637,10 @@ const LogosTable = () => { -
+
From bd1831e226b7443cf4ac4c23e8e3e6a3b67c1d6c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 17 Jul 2025 20:49:05 -0500 Subject: [PATCH 054/857] Fix edits not saving --- frontend/src/api.js | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/frontend/src/api.js b/frontend/src/api.js index 63c193ba..967e462b 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -209,10 +209,10 @@ export default class API { API.getAllChannelIds(API.lastQueryParams), ]); - useChannelsTableStore + useChannelsTable .getState() .queryChannels(response, API.lastQueryParams); - useChannelsTableStore.getState().setAllQueryIds(ids); + useChannelsTable.getState().setAllQueryIds(ids); return response; } catch (e) { @@ -1282,9 +1282,19 @@ export default class API { static async updateLogo(id, values) { try { + // Convert values to FormData for the multipart/form-data content type + const formData = new FormData(); + + // Add each field to the form data + Object.keys(values).forEach(key => { + if (values[key] !== null && values[key] !== undefined) { + formData.append(key, values[key]); + } + }); + const response = await request(`${host}/api/channels/logos/${id}/`, { method: 'PUT', - body: values, + body: formData, // Send as FormData instead of JSON }); useChannelsStore.getState().updateLogo(response); From 8e2309ac583c05bb2b479bc97b799c1989826921 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 17 Jul 2025 21:02:50 -0500 Subject: [PATCH 055/857] Fixes logo uploads --- apps/channels/api_views.py | 14 +++++++- dispatcharr/utils.py | 8 ++--- frontend/src/api.js | 42 +++++++++++++++++++--- frontend/src/components/forms/Channel.jsx | 27 +++++++++++--- frontend/src/components/forms/Channels.jsx | 27 +++++++++++--- frontend/src/components/forms/Logo.jsx | 23 +++++++++--- 6 files changed, 119 insertions(+), 22 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 0956da11..ee7109b7 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -1172,6 +1172,16 @@ class LogoViewSet(viewsets.ModelViewSet): ) file = request.FILES["file"] + + # Validate file + try: + from dispatcharr.utils import validate_logo_file + validate_logo_file(file) + except Exception as e: + return Response( + {"error": str(e)}, status=status.HTTP_400_BAD_REQUEST + ) + file_name = file.name file_path = os.path.join("/data/logos", file_name) @@ -1187,8 +1197,10 @@ class LogoViewSet(viewsets.ModelViewSet): }, ) + # Use get_serializer to ensure proper context + serializer = self.get_serializer(logo) return Response( - LogoSerializer(logo, context={'request': request}).data, + serializer.data, status=status.HTTP_201_CREATED, ) diff --git a/dispatcharr/utils.py b/dispatcharr/utils.py index 767913c6..5e1ad087 100644 --- a/dispatcharr/utils.py +++ b/dispatcharr/utils.py @@ -21,11 +21,11 @@ def json_success_response(data=None, status=200): def validate_logo_file(file): """Validate uploaded logo file size and MIME type.""" - valid_mime_types = ["image/jpeg", "image/png", "image/gif"] + valid_mime_types = ["image/jpeg", "image/png", "image/gif", "image/webp"] if file.content_type not in valid_mime_types: - raise ValidationError("Unsupported file type. Allowed types: JPEG, PNG, GIF.") - if file.size > 2 * 1024 * 1024: - raise ValidationError("File too large. Max 2MB.") + raise ValidationError("Unsupported file type. Allowed types: JPEG, PNG, GIF, WebP.") + if file.size > 5 * 1024 * 1024: # Increased to 5MB + raise ValidationError("File too large. Max 5MB.") def get_client_ip(request): diff --git a/frontend/src/api.js b/frontend/src/api.js index 967e462b..bcffc920 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -209,10 +209,10 @@ export default class API { API.getAllChannelIds(API.lastQueryParams), ]); - useChannelsTable + useChannelsTableStore .getState() .queryChannels(response, API.lastQueryParams); - useChannelsTable.getState().setAllQueryIds(ids); + useChannelsTableStore.getState().setAllQueryIds(ids); return response; } catch (e) { @@ -1252,16 +1252,48 @@ export default class API { const formData = new FormData(); formData.append('file', file); - const response = await request(`${host}/api/channels/logos/upload/`, { + // Add timeout handling for file uploads + const controller = new AbortController(); + const timeoutId = setTimeout(() => controller.abort(), 30000); // 30 second timeout + + const response = await fetch(`${host}/api/channels/logos/upload/`, { method: 'POST', body: formData, + headers: { + Authorization: `Bearer ${await API.getAuthToken()}`, + }, + signal: controller.signal, }); - useChannelsStore.getState().addLogo(response); + clearTimeout(timeoutId); - return response; + if (!response.ok) { + const error = new Error(`HTTP error! Status: ${response.status}`); + let errorBody = await response.text(); + + try { + errorBody = JSON.parse(errorBody); + } catch (e) { + // If parsing fails, leave errorBody as the raw text + } + + error.status = response.status; + error.response = response; + error.body = errorBody; + throw error; + } + + const result = await response.json(); + useChannelsStore.getState().addLogo(result); + return result; } catch (e) { + if (e.name === 'AbortError') { + const timeoutError = new Error('Upload timed out. Please try again.'); + timeoutError.code = 'NETWORK_ERROR'; + throw timeoutError; + } errorNotification('Failed to upload logo', e); + throw e; } } diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index 64412cb4..c7d8ed6c 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -31,6 +31,7 @@ import { Image, UnstyledButton, } from '@mantine/core'; +import { notifications } from '@mantine/notifications'; import { ListOrdered, SquarePlus, SquareX, X } from 'lucide-react'; import useEPGsStore from '../../store/epgs'; import { Dropzone } from '@mantine/dropzone'; @@ -84,10 +85,28 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { const handleLogoChange = async (files) => { if (files.length === 1) { - const retval = await API.uploadLogo(files[0]); - await fetchLogos(); - setLogoPreview(retval.cache_url); - formik.setFieldValue('logo_id', retval.id); + const file = files[0]; + + // Validate file size on frontend first + if (file.size > 5 * 1024 * 1024) { + // 5MB + notifications.show({ + title: 'Error', + message: 'File too large. Maximum size is 5MB.', + color: 'red', + }); + return; + } + + try { + const retval = await API.uploadLogo(file); + await fetchLogos(); + setLogoPreview(retval.cache_url); + formik.setFieldValue('logo_id', retval.id); + } catch (error) { + console.error('Logo upload failed:', error); + // Error notification is already handled in API.uploadLogo + } } else { setLogoPreview(null); } diff --git a/frontend/src/components/forms/Channels.jsx b/frontend/src/components/forms/Channels.jsx index dbce5cf3..e67d9419 100644 --- a/frontend/src/components/forms/Channels.jsx +++ b/frontend/src/components/forms/Channels.jsx @@ -34,6 +34,7 @@ import { import { ListOrdered, SquarePlus, SquareX, X } from 'lucide-react'; import useEPGsStore from '../../store/epgs'; import { Dropzone } from '@mantine/dropzone'; +import { notifications } from '@mantine/notifications'; import { FixedSizeList as List } from 'react-window'; const ChannelsForm = ({ channel = null, isOpen, onClose }) => { @@ -81,10 +82,28 @@ const ChannelsForm = ({ channel = null, isOpen, onClose }) => { const handleLogoChange = async (files) => { if (files.length === 1) { - const retval = await API.uploadLogo(files[0]); - await fetchLogos(); - setLogoPreview(retval.cache_url); - formik.setFieldValue('logo_id', retval.id); + const file = files[0]; + + // Validate file size on frontend first + if (file.size > 5 * 1024 * 1024) { + // 5MB + notifications.show({ + title: 'Error', + message: 'File too large. Maximum size is 5MB.', + color: 'red', + }); + return; + } + + try { + const retval = await API.uploadLogo(file); + await fetchLogos(); + setLogoPreview(retval.cache_url); + formik.setFieldValue('logo_id', retval.id); + } catch (error) { + console.error('Logo upload failed:', error); + // Error notification is already handled in API.uploadLogo + } } else { setLogoPreview(null); } diff --git a/frontend/src/components/forms/Logo.jsx b/frontend/src/components/forms/Logo.jsx index c3e48d5d..436dbf8a 100644 --- a/frontend/src/components/forms/Logo.jsx +++ b/frontend/src/components/forms/Logo.jsx @@ -51,12 +51,12 @@ const LogoForm = ({ logo = null, isOpen, onClose }) => { onClose(); } catch (error) { let errorMessage = logo ? 'Failed to update logo' : 'Failed to create logo'; - + // Handle specific timeout errors if (error.code === 'NETWORK_ERROR' || error.message?.includes('timeout')) { errorMessage = 'Request timed out. Please try again.'; } - + notifications.show({ title: 'Error', message: errorMessage, @@ -85,6 +85,17 @@ const LogoForm = ({ logo = null, isOpen, onClose }) => { if (files.length === 0) return; const file = files[0]; + + // Validate file size on frontend first + if (file.size > 5 * 1024 * 1024) { // 5MB + notifications.show({ + title: 'Error', + message: 'File too large. Maximum size is 5MB.', + color: 'red', + }); + return; + } + setUploading(true); try { @@ -102,12 +113,16 @@ const LogoForm = ({ logo = null, isOpen, onClose }) => { }); } catch (error) { let errorMessage = 'Failed to upload logo'; - + // Handle specific timeout errors if (error.code === 'NETWORK_ERROR' || error.message?.includes('timeout')) { errorMessage = 'Upload timed out. Please try again.'; + } else if (error.status === 413) { + errorMessage = 'File too large. Please choose a smaller file.'; + } else if (error.body?.error) { + errorMessage = error.body.error; } - + notifications.show({ title: 'Error', message: errorMessage, From 5d82fd17c2865aef1b36a743a80248bdadf76114 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 17 Jul 2025 21:09:05 -0500 Subject: [PATCH 056/857] Treat local files as valid urls --- frontend/src/components/forms/Logo.jsx | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/frontend/src/components/forms/Logo.jsx b/frontend/src/components/forms/Logo.jsx index 436dbf8a..7c685b2f 100644 --- a/frontend/src/components/forms/Logo.jsx +++ b/frontend/src/components/forms/Logo.jsx @@ -29,7 +29,20 @@ const LogoForm = ({ logo = null, isOpen, onClose }) => { }, validationSchema: Yup.object({ name: Yup.string().required('Name is required'), - url: Yup.string().url('Must be a valid URL').required('URL is required'), + url: Yup.string() + .required('URL is required') + .test('valid-url-or-path', 'Must be a valid URL or local file path', (value) => { + if (!value) return false; + // Allow local file paths starting with /logos/ + if (value.startsWith('/logos/')) return true; + // Allow valid URLs + try { + new URL(value); + return true; + } catch { + return false; + } + }), }), onSubmit: async (values, { setSubmitting }) => { try { From 23bd5484ee1a0e3a472ba865a961c661189de5ef Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 17 Jul 2025 21:12:05 -0500 Subject: [PATCH 057/857] Enlarge logo on hover. --- frontend/src/components/forms/Logo.jsx | 13 +++++++++++++ frontend/src/components/tables/LogosTable.jsx | 10 ++++++++++ 2 files changed, 23 insertions(+) diff --git a/frontend/src/components/forms/Logo.jsx b/frontend/src/components/forms/Logo.jsx index 7c685b2f..bd711443 100644 --- a/frontend/src/components/forms/Logo.jsx +++ b/frontend/src/components/forms/Logo.jsx @@ -179,6 +179,19 @@ const LogoForm = ({ logo = null, isOpen, onClose }) => { height={75} fit="contain" fallbackSrc="/logo.png" + style={{ + transition: 'transform 0.3s ease', + cursor: 'pointer', + ':hover': { + transform: 'scale(1.5)' + } + }} + onMouseEnter={(e) => { + e.target.style.transform = 'scale(1.5)'; + }} + onMouseLeave={(e) => { + e.target.style.transform = 'scale(1)'; + }} />
diff --git a/frontend/src/components/tables/LogosTable.jsx b/frontend/src/components/tables/LogosTable.jsx index 477da0cf..41799449 100644 --- a/frontend/src/components/tables/LogosTable.jsx +++ b/frontend/src/components/tables/LogosTable.jsx @@ -347,6 +347,16 @@ const LogosTable = () => { height={30} fit="contain" fallbackSrc="/logo.png" + style={{ + transition: 'transform 0.3s ease', + cursor: 'pointer', + }} + onMouseEnter={(e) => { + e.target.style.transform = 'scale(1.5)'; + }} + onMouseLeave={(e) => { + e.target.style.transform = 'scale(1)'; + }} />
), From e7771d5b6764c7c18c15384d51fba3adbec3da23 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 18 Jul 2025 11:36:15 -0500 Subject: [PATCH 058/857] Allow deleting logos that are assigned to channels. --- apps/channels/api_views.py | 42 ++++++++++--------- apps/channels/serializers.py | 10 +++++ frontend/src/components/forms/Logo.jsx | 2 + frontend/src/components/tables/LogosTable.jsx | 5 ++- 4 files changed, 38 insertions(+), 21 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index ee7109b7..dbdd4271 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -1053,24 +1053,27 @@ class BulkDeleteLogosAPIView(APIView): def delete(self, request): logo_ids = request.data.get("logo_ids", []) - # Check if any logos are being used by channels - used_logos = Logo.objects.filter( - id__in=logo_ids, - channels__isnull=False - ).distinct() + # Get logos and their usage info before deletion + logos_to_delete = Logo.objects.filter(id__in=logo_ids) + total_channels_affected = 0 + + for logo in logos_to_delete: + if logo.channels.exists(): + channel_count = logo.channels.count() + total_channels_affected += channel_count + # Remove logo from channels + logo.channels.update(logo=None) + logger.info(f"Removed logo {logo.name} from {channel_count} channels before deletion") - if used_logos.exists(): - used_names = list(used_logos.values_list('name', flat=True)) - return Response( - {"error": f"Cannot delete logos that are in use: {', '.join(used_names)}"}, - status=status.HTTP_400_BAD_REQUEST - ) + # Delete logos + deleted_count = logos_to_delete.delete()[0] - # Delete logos that are not in use - deleted_count = Logo.objects.filter(id__in=logo_ids).delete()[0] + message = f"Successfully deleted {deleted_count} logos" + if total_channels_affected > 0: + message += f" and removed them from {total_channels_affected} channels" return Response( - {"message": f"Successfully deleted {deleted_count} logos"}, + {"message": message}, status=status.HTTP_204_NO_CONTENT ) @@ -1152,15 +1155,14 @@ class LogoViewSet(viewsets.ModelViewSet): return super().update(request, *args, **kwargs) def destroy(self, request, *args, **kwargs): - """Delete a logo""" + """Delete a logo and remove it from any channels using it""" logo = self.get_object() - # Check if logo is being used by any channels + # Instead of preventing deletion, remove the logo from channels if logo.channels.exists(): - return Response( - {"error": f"Cannot delete logo as it is used by {logo.channels.count()} channel(s)"}, - status=status.HTTP_400_BAD_REQUEST - ) + channel_count = logo.channels.count() + logo.channels.update(logo=None) + logger.info(f"Removed logo {logo.name} from {channel_count} channels before deletion") return super().destroy(request, *args, **kwargs) diff --git a/apps/channels/serializers.py b/apps/channels/serializers.py index a933c496..82b5f808 100644 --- a/apps/channels/serializers.py +++ b/apps/channels/serializers.py @@ -28,6 +28,16 @@ class LogoSerializer(serializers.ModelSerializer): model = Logo fields = ["id", "name", "url", "cache_url", "channel_count", "is_used", "channel_names"] + def validate_url(self, value): + """Validate that the URL is unique for creation or update""" + if self.instance and self.instance.url == value: + return value + + if Logo.objects.filter(url=value).exists(): + raise serializers.ValidationError("A logo with this URL already exists.") + + return value + def get_cache_url(self, obj): # return f"/api/channels/logos/{obj.id}/cache/" request = self.context.get("request") diff --git a/frontend/src/components/forms/Logo.jsx b/frontend/src/components/forms/Logo.jsx index bd711443..c724c21c 100644 --- a/frontend/src/components/forms/Logo.jsx +++ b/frontend/src/components/forms/Logo.jsx @@ -68,6 +68,8 @@ const LogoForm = ({ logo = null, isOpen, onClose }) => { // Handle specific timeout errors if (error.code === 'NETWORK_ERROR' || error.message?.includes('timeout')) { errorMessage = 'Request timed out. Please try again.'; + } else if (error.response?.data?.error) { + errorMessage = error.response.data.error; } notifications.show({ diff --git a/frontend/src/components/tables/LogosTable.jsx b/frontend/src/components/tables/LogosTable.jsx index 41799449..0ec6488f 100644 --- a/frontend/src/components/tables/LogosTable.jsx +++ b/frontend/src/components/tables/LogosTable.jsx @@ -718,6 +718,9 @@ const LogosTable = () => { isBulkDelete ? (
Are you sure you want to delete {selectedRows.size} selected logos? + + Any channels using these logos will have their logo removed. + This action cannot be undone. @@ -727,7 +730,7 @@ const LogosTable = () => { Are you sure you want to delete the logo "{logoToDelete.name}"? {logoToDelete.channel_count > 0 && ( - Warning: This logo is currently used by {logoToDelete.channel_count} channel{logoToDelete.channel_count !== 1 ? 's' : ''}. + This logo is currently used by {logoToDelete.channel_count} channel{logoToDelete.channel_count !== 1 ? 's' : ''}. They will have their logo removed. )} From 0fcb8b9f2eeec7a74e03d9afe36c07d898f50a1e Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 18 Jul 2025 13:44:00 -0500 Subject: [PATCH 059/857] Don't convert urls in the store. --- frontend/src/store/channels.jsx | 3 --- 1 file changed, 3 deletions(-) diff --git a/frontend/src/store/channels.jsx b/frontend/src/store/channels.jsx index a4c61149..b18b02f6 100644 --- a/frontend/src/store/channels.jsx +++ b/frontend/src/store/channels.jsx @@ -232,7 +232,6 @@ const useChannelsStore = create((set, get) => ({ logos: logos.reduce((acc, logo) => { acc[logo.id] = { ...logo, - url: logo.url.replace(/^\/data/, ''), }; return acc; }, {}), @@ -250,7 +249,6 @@ const useChannelsStore = create((set, get) => ({ ...state.logos, [newLogo.id]: { ...newLogo, - url: newLogo.url.replace(/^\/data/, ''), }, }, })), @@ -261,7 +259,6 @@ const useChannelsStore = create((set, get) => ({ ...state.logos, [logo.id]: { ...logo, - url: logo.url.replace(/^\/data/, ''), }, }, })), From e27f45809bb479967ba92fef108857c55d34c556 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 18 Jul 2025 13:47:50 -0500 Subject: [PATCH 060/857] Allow /data/logos as a url. --- frontend/src/components/forms/Logo.jsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/components/forms/Logo.jsx b/frontend/src/components/forms/Logo.jsx index c724c21c..e209659c 100644 --- a/frontend/src/components/forms/Logo.jsx +++ b/frontend/src/components/forms/Logo.jsx @@ -33,8 +33,8 @@ const LogoForm = ({ logo = null, isOpen, onClose }) => { .required('URL is required') .test('valid-url-or-path', 'Must be a valid URL or local file path', (value) => { if (!value) return false; - // Allow local file paths starting with /logos/ - if (value.startsWith('/logos/')) return true; + // Allow local file paths starting with /data/logos/ + if (value.startsWith('/data/logos/')) return true; // Allow valid URLs try { new URL(value); From 1ece74a0b0d2cbdbe849988ac274721fae1f8bda Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 18 Jul 2025 14:07:58 -0500 Subject: [PATCH 061/857] Scan logos folder for new logos. --- core/tasks.py | 117 +++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 115 insertions(+), 2 deletions(-) diff --git a/core/tasks.py b/core/tasks.py index e8b36162..41e5d707 100644 --- a/core/tasks.py +++ b/core/tasks.py @@ -21,10 +21,12 @@ logger = logging.getLogger(__name__) EPG_WATCH_DIR = '/data/epgs' M3U_WATCH_DIR = '/data/m3us' +LOGO_WATCH_DIR = '/data/logos' MIN_AGE_SECONDS = 6 STARTUP_SKIP_AGE = 30 REDIS_PREFIX = "processed_file:" REDIS_TTL = 60 * 60 * 24 * 3 # expire keys after 3 days (optional) +SUPPORTED_LOGO_FORMATS = ['.jpg', '.jpeg', '.png', '.gif', '.webp', '.bmp', '.svg'] # Store the last known value to compare with new data last_known_data = {} @@ -56,10 +58,11 @@ def scan_and_process_files(): global _first_scan_completed redis_client = RedisClient.get_client() now = time.time() + # Check if directories exist - dirs_exist = all(os.path.exists(d) for d in [M3U_WATCH_DIR, EPG_WATCH_DIR]) + dirs_exist = all(os.path.exists(d) for d in [M3U_WATCH_DIR, EPG_WATCH_DIR, LOGO_WATCH_DIR]) if not dirs_exist: - throttled_log(logger.warning, f"Watch directories missing: M3U ({os.path.exists(M3U_WATCH_DIR)}), EPG ({os.path.exists(EPG_WATCH_DIR)})", "watch_dirs_missing") + throttled_log(logger.warning, f"Watch directories missing: M3U ({os.path.exists(M3U_WATCH_DIR)}), EPG ({os.path.exists(EPG_WATCH_DIR)}), LOGO ({os.path.exists(LOGO_WATCH_DIR)})", "watch_dirs_missing") # Process M3U files m3u_files = [f for f in os.listdir(M3U_WATCH_DIR) @@ -266,6 +269,116 @@ def scan_and_process_files(): logger.trace(f"EPG processing complete: {epg_processed} processed, {epg_skipped} skipped, {epg_errors} errors") + # Process Logo files + try: + logo_files = os.listdir(LOGO_WATCH_DIR) if os.path.exists(LOGO_WATCH_DIR) else [] + logger.trace(f"Found {len(logo_files)} files in LOGO directory") + except Exception as e: + logger.error(f"Error listing LOGO directory: {e}") + logo_files = [] + + logo_processed = 0 + logo_skipped = 0 + logo_errors = 0 + + for filename in logo_files: + filepath = os.path.join(LOGO_WATCH_DIR, filename) + + if not os.path.isfile(filepath): + if _first_scan_completed: + logger.trace(f"Skipping {filename}: Not a file") + else: + logger.debug(f"Skipping {filename}: Not a file") + logo_skipped += 1 + continue + + # Check if file has supported logo extension + file_ext = os.path.splitext(filename)[1].lower() + if file_ext not in SUPPORTED_LOGO_FORMATS: + if _first_scan_completed: + logger.trace(f"Skipping {filename}: Not a supported logo format") + else: + logger.debug(f"Skipping {filename}: Not a supported logo format") + logo_skipped += 1 + continue + + mtime = os.path.getmtime(filepath) + age = now - mtime + redis_key = REDIS_PREFIX + filepath + stored_mtime = redis_client.get(redis_key) + + # Check if logo already exists in database + if not stored_mtime and age > STARTUP_SKIP_AGE: + from apps.channels.models import Logo + existing_logo = Logo.objects.filter(url=filepath).exists() + if existing_logo: + if _first_scan_completed: + logger.trace(f"Skipping {filename}: Already exists in database") + else: + logger.debug(f"Skipping {filename}: Already exists in database") + redis_client.set(redis_key, mtime, ex=REDIS_TTL) + logo_skipped += 1 + continue + else: + logger.debug(f"Processing {filename} despite age: Not found in database") + + # File too new — probably still being written + if age < MIN_AGE_SECONDS: + if _first_scan_completed: + logger.trace(f"Skipping {filename}: Too new, possibly still being written (age={age}s)") + else: + logger.debug(f"Skipping {filename}: Too new, possibly still being written (age={age}s)") + logo_skipped += 1 + continue + + # Skip if we've already processed this mtime + if stored_mtime and float(stored_mtime) >= mtime: + if _first_scan_completed: + logger.trace(f"Skipping {filename}: Already processed this version") + else: + logger.debug(f"Skipping {filename}: Already processed this version") + logo_skipped += 1 + continue + + try: + from apps.channels.models import Logo + + # Create logo entry with just the filename (without extension) as name + logo_name = os.path.splitext(filename)[0] + + logo, created = Logo.objects.get_or_create( + url=filepath, + defaults={ + "name": logo_name, + } + ) + + redis_client.set(redis_key, mtime, ex=REDIS_TTL) + + if created: + logger.info(f"Created new logo entry: {logo_name}") + else: + logger.debug(f"Logo entry already exists: {logo_name}") + + logo_processed += 1 + + # Send websocket notification + channel_layer = get_channel_layer() + async_to_sync(channel_layer.group_send)( + "updates", + { + "type": "update", + "data": {"success": True, "type": "logo_file", "filename": filename, "created": created} + }, + ) + + except Exception as e: + logger.error(f"Error processing logo file {filename}: {str(e)}", exc_info=True) + logo_errors += 1 + continue + + logger.trace(f"LOGO processing complete: {logo_processed} processed, {logo_skipped} skipped, {logo_errors} errors") + # Mark that the first scan is complete _first_scan_completed = True From 13672919d0f7c10f0abcc3ebebcbc8169e47711f Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 18 Jul 2025 14:26:09 -0500 Subject: [PATCH 062/857] Fetch Playlists on successful m3u update. --- frontend/src/WebSocket.jsx | 1 + 1 file changed, 1 insertion(+) diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index 538ffda3..ae0316ad 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -218,6 +218,7 @@ export const WebsocketProvider = ({ children }) => { } updatePlaylist(updateData); + fetchPlaylists(); // Refresh playlists to ensure UI is up-to-date } else { // Log when playlist can't be found for debugging purposes console.warn( From 479826709bdd41c3bf1a2923ba11f3ddf30e6121 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 18 Jul 2025 15:01:26 -0500 Subject: [PATCH 063/857] Fetch logos when logos are added by filesystem scan. --- core/tasks.py | 26 +++++++++++++++++--------- frontend/src/WebSocket.jsx | 11 +++++++++++ 2 files changed, 28 insertions(+), 9 deletions(-) diff --git a/core/tasks.py b/core/tasks.py index 41e5d707..3a738611 100644 --- a/core/tasks.py +++ b/core/tasks.py @@ -362,15 +362,7 @@ def scan_and_process_files(): logo_processed += 1 - # Send websocket notification - channel_layer = get_channel_layer() - async_to_sync(channel_layer.group_send)( - "updates", - { - "type": "update", - "data": {"success": True, "type": "logo_file", "filename": filename, "created": created} - }, - ) + # Remove individual websocket notification - will send summary instead except Exception as e: logger.error(f"Error processing logo file {filename}: {str(e)}", exc_info=True) @@ -379,6 +371,22 @@ def scan_and_process_files(): logger.trace(f"LOGO processing complete: {logo_processed} processed, {logo_skipped} skipped, {logo_errors} errors") + # Send summary websocket update for logo processing + if logo_processed > 0 or logo_errors > 0: + send_websocket_update( + "updates", + "update", + { + "success": True, + "type": "logo_processing_summary", + "processed": logo_processed, + "skipped": logo_skipped, + "errors": logo_errors, + "total_files": len(logo_files), + "message": f"Logo processing complete: {logo_processed} processed, {logo_skipped} skipped, {logo_errors} errors" + } + ) + # Mark that the first scan is complete _first_scan_completed = True diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index ae0316ad..156a7e29 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -418,6 +418,16 @@ export const WebsocketProvider = ({ children }) => { } break; + case 'logo_processing_summary': + notifications.show({ + title: 'Logo Processing Summary', + message: `Logo processing complete: ${parsedEvent.data.processed} logos processed, ${parsedEvent.data.duplicates_merged} duplicates merged.`, + color: 'blue', + autoClose: 5000, + }); + fetchLogos(); + break; + default: console.error( `Unknown websocket event type: ${parsedEvent.data?.type}` @@ -488,6 +498,7 @@ export const WebsocketProvider = ({ children }) => { const setProfilePreview = usePlaylistsStore((s) => s.setProfilePreview); const fetchEPGData = useEPGsStore((s) => s.fetchEPGData); const fetchEPGs = useEPGsStore((s) => s.fetchEPGs); + const fetchLogos = useChannelsStore((s) => s.fetchLogos); const ret = useMemo(() => { return [isReady, ws.current?.send.bind(ws.current), val]; From e876af1aa2a79b0b7046a56b31c99ab85010c08b Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 18 Jul 2025 15:04:34 -0500 Subject: [PATCH 064/857] Scan sub folders for logos. --- core/tasks.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/core/tasks.py b/core/tasks.py index 3a738611..47bc8cf0 100644 --- a/core/tasks.py +++ b/core/tasks.py @@ -269,10 +269,14 @@ def scan_and_process_files(): logger.trace(f"EPG processing complete: {epg_processed} processed, {epg_skipped} skipped, {epg_errors} errors") - # Process Logo files + # Process Logo files (including subdirectories) try: - logo_files = os.listdir(LOGO_WATCH_DIR) if os.path.exists(LOGO_WATCH_DIR) else [] - logger.trace(f"Found {len(logo_files)} files in LOGO directory") + logo_files = [] + if os.path.exists(LOGO_WATCH_DIR): + for root, dirs, files in os.walk(LOGO_WATCH_DIR): + for filename in files: + logo_files.append(os.path.join(root, filename)) + logger.trace(f"Found {len(logo_files)} files in LOGO directory (including subdirectories)") except Exception as e: logger.error(f"Error listing LOGO directory: {e}") logo_files = [] @@ -281,8 +285,8 @@ def scan_and_process_files(): logo_skipped = 0 logo_errors = 0 - for filename in logo_files: - filepath = os.path.join(LOGO_WATCH_DIR, filename) + for filepath in logo_files: + filename = os.path.basename(filepath) if not os.path.isfile(filepath): if _first_scan_completed: @@ -362,8 +366,6 @@ def scan_and_process_files(): logo_processed += 1 - # Remove individual websocket notification - will send summary instead - except Exception as e: logger.error(f"Error processing logo file {filename}: {str(e)}", exc_info=True) logo_errors += 1 From d926d90dd913d266701193e8a4401c12930c591d Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 18 Jul 2025 15:14:11 -0500 Subject: [PATCH 065/857] Fix websocket message. --- frontend/src/WebSocket.jsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index 156a7e29..2e210461 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -421,7 +421,7 @@ export const WebsocketProvider = ({ children }) => { case 'logo_processing_summary': notifications.show({ title: 'Logo Processing Summary', - message: `Logo processing complete: ${parsedEvent.data.processed} logos processed, ${parsedEvent.data.duplicates_merged} duplicates merged.`, + message: `${parsedEvent.data.message}`, color: 'blue', autoClose: 5000, }); From bc08cb1270a618deec0bc924c7f00a19013f9404 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 18 Jul 2025 15:23:30 -0500 Subject: [PATCH 066/857] Ask to delete local files as well. --- apps/channels/api_views.py | 34 ++++++++++++++++++- frontend/src/api.js | 19 ++++++++--- .../src/components/ConfirmationDialog.jsx | 28 +++++++++++++-- frontend/src/components/tables/LogosTable.jsx | 27 +++++++++++---- 4 files changed, 93 insertions(+), 15 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index dbdd4271..0126aaf9 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -1052,12 +1052,28 @@ class BulkDeleteLogosAPIView(APIView): ) def delete(self, request): logo_ids = request.data.get("logo_ids", []) + delete_files = request.data.get("delete_files", False) # Get logos and their usage info before deletion logos_to_delete = Logo.objects.filter(id__in=logo_ids) total_channels_affected = 0 - + local_files_deleted = 0 + for logo in logos_to_delete: + # Handle file deletion for local files + if delete_files and logo.url and logo.url.startswith('/data/logos'): + try: + if os.path.exists(logo.url): + os.remove(logo.url) + local_files_deleted += 1 + logger.info(f"Deleted local logo file: {logo.url}") + except Exception as e: + logger.error(f"Failed to delete logo file {logo.url}: {str(e)}") + return Response( + {"error": f"Failed to delete logo file {logo.url}: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR + ) + if logo.channels.exists(): channel_count = logo.channels.count() total_channels_affected += channel_count @@ -1071,6 +1087,8 @@ class BulkDeleteLogosAPIView(APIView): message = f"Successfully deleted {deleted_count} logos" if total_channels_affected > 0: message += f" and removed them from {total_channels_affected} channels" + if local_files_deleted > 0: + message += f" and deleted {local_files_deleted} local files" return Response( {"message": message}, @@ -1157,6 +1175,20 @@ class LogoViewSet(viewsets.ModelViewSet): def destroy(self, request, *args, **kwargs): """Delete a logo and remove it from any channels using it""" logo = self.get_object() + delete_file = request.query_params.get('delete_file', 'false').lower() == 'true' + + # Check if it's a local file that should be deleted + if delete_file and logo.url and logo.url.startswith('/data/logos'): + try: + if os.path.exists(logo.url): + os.remove(logo.url) + logger.info(f"Deleted local logo file: {logo.url}") + except Exception as e: + logger.error(f"Failed to delete logo file {logo.url}: {str(e)}") + return Response( + {"error": f"Failed to delete logo file: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR + ) # Instead of preventing deletion, remove the logo from channels if logo.channels.exists(): diff --git a/frontend/src/api.js b/frontend/src/api.js index bcffc920..b285e2ea 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -1337,9 +1337,15 @@ export default class API { } } - static async deleteLogo(id) { + static async deleteLogo(id, deleteFile = false) { try { - await request(`${host}/api/channels/logos/${id}/`, { + const params = new URLSearchParams(); + if (deleteFile) { + params.append('delete_file', 'true'); + } + + const url = `${host}/api/channels/logos/${id}/?${params.toString()}`; + await request(url, { method: 'DELETE', }); @@ -1351,11 +1357,16 @@ export default class API { } } - static async deleteLogos(ids) { + static async deleteLogos(ids, deleteFiles = false) { try { + const body = { logo_ids: ids }; + if (deleteFiles) { + body.delete_files = true; + } + await request(`${host}/api/channels/logos/bulk-delete/`, { method: 'DELETE', - body: { logo_ids: ids }, + body: body, }); // Remove multiple logos from store diff --git a/frontend/src/components/ConfirmationDialog.jsx b/frontend/src/components/ConfirmationDialog.jsx index 8f96708d..1cfbe84d 100644 --- a/frontend/src/components/ConfirmationDialog.jsx +++ b/frontend/src/components/ConfirmationDialog.jsx @@ -29,12 +29,15 @@ const ConfirmationDialog = ({ onSuppressChange, size = 'md', zIndex = 1000, + showDeleteFileOption = false, + deleteFileLabel = "Also delete files from disk", }) => { const suppressWarning = useWarningsStore((s) => s.suppressWarning); const isWarningSuppressed = useWarningsStore((s) => s.isWarningSuppressed); const [suppressChecked, setSuppressChecked] = useState( isWarningSuppressed(actionKey) ); + const [deleteFiles, setDeleteFiles] = useState(false); const handleToggleSuppress = (e) => { setSuppressChecked(e.currentTarget.checked); @@ -47,13 +50,23 @@ const ConfirmationDialog = ({ if (suppressChecked) { suppressWarning(actionKey); } - onConfirm(); + if (showDeleteFileOption) { + onConfirm(deleteFiles); + } else { + onConfirm(); + } + setDeleteFiles(false); // Reset for next time + }; + + const handleClose = () => { + setDeleteFiles(false); // Reset for next time + onClose(); }; return ( )} + {showDeleteFileOption && ( + setDeleteFiles(event.currentTarget.checked)} + label={deleteFileLabel} + mb="md" + /> + )} + - + + )} + + {/* Advanced Stats (expandable) */} + + + {renderStatsCategory('Video', categorizedStats.video)} + {renderStatsCategory('Audio', categorizedStats.audio)} + {renderStatsCategory('Technical', categorizedStats.technical)} + {renderStatsCategory('Other', categorizedStats.other)} + + {/* Show when stats were last updated */} + {stream.stream_stats_updated_at && ( + + Last updated: {new Date(stream.stream_stats_updated_at).toLocaleString()} + + )} + + ); }, @@ -296,7 +454,7 @@ const ChannelStreams = ({ channel, isExpanded }) => { ), }, ], - [data, playlists, m3uAccountsMap] + [data, playlists, m3uAccountsMap, expandedAdvancedStats] ), data, state: { From 7551869a2eaa8101926af557839947fa68288b6b Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 29 Jul 2025 15:12:14 -0500 Subject: [PATCH 096/857] Remove audio bitrate from basic stats. --- frontend/src/components/tables/ChannelTableStreams.jsx | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/frontend/src/components/tables/ChannelTableStreams.jsx b/frontend/src/components/tables/ChannelTableStreams.jsx index 373427bb..2da4002d 100644 --- a/frontend/src/components/tables/ChannelTableStreams.jsx +++ b/frontend/src/components/tables/ChannelTableStreams.jsx @@ -367,7 +367,7 @@ const ChannelStreams = ({ channel, isExpanded }) => { )} {/* Audio Information */} - {(stream.stream_stats.audio_codec || stream.stream_stats.audio_channels || stream.stream_stats.audio_bitrate) && ( + {(stream.stream_stats.audio_codec || stream.stream_stats.audio_channels) && ( <> Audio: {stream.stream_stats.audio_channels && ( @@ -380,11 +380,6 @@ const ChannelStreams = ({ channel, isExpanded }) => { {stream.stream_stats.audio_codec.toUpperCase()} )} - {stream.stream_stats.audio_bitrate && ( - - {stream.stream_stats.audio_bitrate} kbps - - )} )} From e26ecad013c0fba5f76c5df0fbecc721462871de Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 29 Jul 2025 15:17:18 -0500 Subject: [PATCH 097/857] Move m3u and url badges to same line as stream name. --- frontend/src/components/tables/ChannelTableStreams.jsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/components/tables/ChannelTableStreams.jsx b/frontend/src/components/tables/ChannelTableStreams.jsx index 2da4002d..d226c52a 100644 --- a/frontend/src/components/tables/ChannelTableStreams.jsx +++ b/frontend/src/components/tables/ChannelTableStreams.jsx @@ -303,8 +303,8 @@ const ChannelStreams = ({ channel, isExpanded }) => { return ( - {stream.name} - + + {stream.name} {accountName} From 613c0d8bb559dd25c37229e68dc15f38ae77a914 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 29 Jul 2025 15:43:44 -0500 Subject: [PATCH 098/857] Add input_bitrate to technical for future use. --- frontend/src/components/tables/ChannelTableStreams.jsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/components/tables/ChannelTableStreams.jsx b/frontend/src/components/tables/ChannelTableStreams.jsx index d226c52a..11fa9600 100644 --- a/frontend/src/components/tables/ChannelTableStreams.jsx +++ b/frontend/src/components/tables/ChannelTableStreams.jsx @@ -181,7 +181,7 @@ const ChannelStreams = ({ channel, isExpanded }) => { basic: ['resolution', 'video_codec', 'source_fps', 'audio_codec', 'audio_channels'], video: ['video_bitrate', 'pixel_format', 'width', 'height', 'aspect_ratio', 'frame_rate'], audio: ['audio_bitrate', 'sample_rate', 'audio_format', 'audio_channels_layout'], - technical: ['stream_type', 'container_format', 'duration', 'file_size', 'ffmpeg_output_bitrate'], + technical: ['stream_type', 'container_format', 'duration', 'file_size', 'ffmpeg_output_bitrate', 'input_bitrate'], other: [] // Will catch anything not categorized above }; From 26e237f2d10cc57148f2717129ae086c2685a785 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Tue, 29 Jul 2025 21:06:06 +0000 Subject: [PATCH 099/857] Release v0.7.1 --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index aca85fa0..3c0bf9df 100644 --- a/version.py +++ b/version.py @@ -1,5 +1,5 @@ """ Dispatcharr version information. """ -__version__ = '0.7.0' # Follow semantic versioning (MAJOR.MINOR.PATCH) +__version__ = '0.7.1' # Follow semantic versioning (MAJOR.MINOR.PATCH) __timestamp__ = None # Set during CI/CD build process From 4ae66e0bc9572001c4a1bad5cd771861833a019f Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 31 Jul 2025 09:52:02 -0500 Subject: [PATCH 100/857] Add membership creation in UpdateChannelMembershipAPIView if not found. Fixes #275 --- apps/channels/api_views.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 636d4875..0221a266 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -1508,9 +1508,17 @@ class UpdateChannelMembershipAPIView(APIView): """Enable or disable a channel for a specific group""" channel_profile = get_object_or_404(ChannelProfile, id=profile_id) channel = get_object_or_404(Channel, id=channel_id) - membership = get_object_or_404( - ChannelProfileMembership, channel_profile=channel_profile, channel=channel - ) + try: + membership = ChannelProfileMembership.objects.get( + channel_profile=channel_profile, channel=channel + ) + except ChannelProfileMembership.DoesNotExist: + # Create the membership if it does not exist (for custom channels) + membership = ChannelProfileMembership.objects.create( + channel_profile=channel_profile, + channel=channel, + enabled=False # Default to False, will be updated below + ) serializer = ChannelProfileMembershipSerializer( membership, data=request.data, partial=True From e029cd8b3dbbff550b9a1926370e156de13571af Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 31 Jul 2025 10:22:43 -0500 Subject: [PATCH 101/857] Fix XML escaping for channel ID in generate_dummy_epg function --- apps/output/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/output/views.py b/apps/output/views.py index 8d58a1b3..3fcd512b 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -266,7 +266,7 @@ def generate_dummy_epg( # Create program entry with escaped channel name xml_lines.append( - f' ' + f' ' ) xml_lines.append(f" {html.escape(program['title'])}") xml_lines.append(f" {html.escape(program['description'])}") From 5a887cc55ab6cabe58bdda7a15bccc66b58a84d8 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 31 Jul 2025 13:54:20 -0500 Subject: [PATCH 102/857] Bump Postgres to version 17. --- docker/DispatcharrBase | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/DispatcharrBase b/docker/DispatcharrBase index 4360ced3..957c8573 100644 --- a/docker/DispatcharrBase +++ b/docker/DispatcharrBase @@ -32,11 +32,11 @@ RUN curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyri apt-get update && apt-get install -y redis-server && \ apt-get clean && rm -rf /var/lib/apt/lists/* -# --- Set up PostgreSQL 14.x --- +# --- Set up PostgreSQL 17.x --- RUN curl -fsSL https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor -o /usr/share/keyrings/postgresql-keyring.gpg && \ echo "deb [signed-by=/usr/share/keyrings/postgresql-keyring.gpg] http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" | \ tee /etc/apt/sources.list.d/pgdg.list && \ - apt-get update && apt-get install -y postgresql-14 postgresql-contrib-14 && \ + apt-get update && apt-get install -y postgresql-17 postgresql-contrib-17 && \ apt-get clean && rm -rf /var/lib/apt/lists/* # Create render group for hardware acceleration support with GID 109 From 826c824084c58d104dbcc642634882f289936b41 Mon Sep 17 00:00:00 2001 From: SergeantPanda <61642231+SergeantPanda@users.noreply.github.com> Date: Thu, 31 Jul 2025 14:03:37 -0500 Subject: [PATCH 103/857] Bump Postgres to version 17 --- docker/DispatcharrBase | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/DispatcharrBase b/docker/DispatcharrBase index 4360ced3..574284f4 100644 --- a/docker/DispatcharrBase +++ b/docker/DispatcharrBase @@ -32,14 +32,14 @@ RUN curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyri apt-get update && apt-get install -y redis-server && \ apt-get clean && rm -rf /var/lib/apt/lists/* -# --- Set up PostgreSQL 14.x --- +# --- Set up PostgreSQL 17.x --- RUN curl -fsSL https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor -o /usr/share/keyrings/postgresql-keyring.gpg && \ echo "deb [signed-by=/usr/share/keyrings/postgresql-keyring.gpg] http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" | \ tee /etc/apt/sources.list.d/pgdg.list && \ - apt-get update && apt-get install -y postgresql-14 postgresql-contrib-14 && \ + apt-get update && apt-get install -y postgresql-17 postgresql-contrib-17 && \ apt-get clean && rm -rf /var/lib/apt/lists/* # Create render group for hardware acceleration support with GID 109 RUN groupadd -r -g 109 render || true -ENTRYPOINT ["/app/docker/entrypoint.sh"] \ No newline at end of file +ENTRYPOINT ["/app/docker/entrypoint.sh"] From 108a99264333a2135cbd84b0cba101903b01c264 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 31 Jul 2025 14:53:55 -0500 Subject: [PATCH 104/857] Detect mismatched Postgres version and automatically run pg_upgrade --- docker/entrypoint.sh | 9 ++--- docker/init/02-postgres.sh | 67 ++++++++++++++++++++++++++++++++++---- 2 files changed, 66 insertions(+), 10 deletions(-) diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh index 412cf808..8d204a5b 100755 --- a/docker/entrypoint.sh +++ b/docker/entrypoint.sh @@ -33,7 +33,8 @@ export POSTGRES_USER=${POSTGRES_USER:-dispatch} export POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-secret} export POSTGRES_HOST=${POSTGRES_HOST:-localhost} export POSTGRES_PORT=${POSTGRES_PORT:-5432} - +export PG_VERSION=$(ls /usr/lib/postgresql/ | sort -V | tail -n 1) +export PG_BINDIR="/usr/lib/postgresql/${PG_VERSION}/bin" export REDIS_HOST=${REDIS_HOST:-localhost} export REDIS_DB=${REDIS_DB:-0} export DISPATCHARR_PORT=${DISPATCHARR_PORT:-9191} @@ -107,13 +108,13 @@ echo "Starting init process..." # Start PostgreSQL echo "Starting Postgres..." -su - postgres -c "/usr/lib/postgresql/14/bin/pg_ctl -D ${POSTGRES_DIR} start -w -t 300 -o '-c port=${POSTGRES_PORT}'" +su - postgres -c "$PG_BINDIR/pg_ctl -D ${POSTGRES_DIR} start -w -t 300 -o '-c port=${POSTGRES_PORT}'" # Wait for PostgreSQL to be ready -until su - postgres -c "/usr/lib/postgresql/14/bin/pg_isready -h ${POSTGRES_HOST} -p ${POSTGRES_PORT}" >/dev/null 2>&1; do +until su - postgres -c "$PG_BINDIR/pg_isready -h ${POSTGRES_HOST} -p ${POSTGRES_PORT}" >/dev/null 2>&1; do echo_with_timestamp "Waiting for PostgreSQL to be ready..." sleep 1 done -postgres_pid=$(su - postgres -c "/usr/lib/postgresql/14/bin/pg_ctl -D ${POSTGRES_DIR} status" | sed -n 's/.*PID: \([0-9]\+\).*/\1/p') +postgres_pid=$(su - postgres -c "$PG_BINDIR/pg_ctl -D ${POSTGRES_DIR} status" | sed -n 's/.*PID: \([0-9]\+\).*/\1/p') echo "✅ Postgres started with PID $postgres_pid" pids+=("$postgres_pid") diff --git a/docker/init/02-postgres.sh b/docker/init/02-postgres.sh index 69a81dd4..aebce1a4 100644 --- a/docker/init/02-postgres.sh +++ b/docker/init/02-postgres.sh @@ -27,6 +27,61 @@ if [ -e "/data/postgresql.conf" ]; then echo "Migration completed successfully." fi +PG_VERSION_FILE="${POSTGRES_DIR}/PG_VERSION" + +# Detect current version from data directory, if present +if [ -f "$PG_VERSION_FILE" ]; then + CURRENT_VERSION=$(cat "$PG_VERSION_FILE") +else + CURRENT_VERSION="" +fi + +# Set binary paths for upgrade if needed +OLD_PG_VERSION="$CURRENT_VERSION" +OLD_BINDIR="/usr/lib/postgresql/${OLD_PG_VERSION}/bin" +NEW_BINDIR="/usr/lib/postgresql/${PG_VERSION}/bin" + +# Only run upgrade if current version is set and not the target +PG_INSTALLED_BY_SCRIPT=0 + +if [ -n "$CURRENT_VERSION" ] && [ "$CURRENT_VERSION" != "$PG_VERSION" ]; then + echo "Detected PostgreSQL data directory version $CURRENT_VERSION, upgrading to $PG_VERSION..." + if [ ! -d "$OLD_BINDIR" ]; then + echo "PostgreSQL binaries for version $CURRENT_VERSION not found. Installing..." + apt update && apt install -y postgresql-$CURRENT_VERSION postgresql-contrib-$CURRENT_VERSION + if [ $? -ne 0 ]; then + echo "Failed to install PostgreSQL version $CURRENT_VERSION. Exiting." + exit 1 + fi + PG_INSTALLED_BY_SCRIPT=1 + fi + + # Prepare new data directory + NEW_POSTGRES_DIR="${POSTGRES_DIR}_$PG_VERSION" + mkdir -p "$NEW_POSTGRES_DIR" + chown -R postgres:postgres "$NEW_POSTGRES_DIR" + chmod 700 "$NEW_POSTGRES_DIR" + + # Initialize new data directory + su - postgres -c "$NEW_BINDIR/initdb -D $NEW_POSTGRES_DIR" + + # Run pg_upgrade + su - postgres -c "$NEW_BINDIR/pg_upgrade -b $OLD_BINDIR -B $NEW_BINDIR -d $POSTGRES_DIR -D $NEW_POSTGRES_DIR" + + # Move old data directory for backup, move new into place + mv "$POSTGRES_DIR" "${POSTGRES_DIR}_backup_${CURRENT_VERSION}_$(date +%s)" + mv "$NEW_POSTGRES_DIR" "$POSTGRES_DIR" + + echo "Upgrade complete. Old data directory backed up." + + # Uninstall PostgreSQL if we installed it just for upgrade + if [ "$PG_INSTALLED_BY_SCRIPT" -eq 1 ]; then + echo "Uninstalling temporary PostgreSQL $CURRENT_VERSION packages..." + apt remove -y postgresql-$CURRENT_VERSION postgresql-contrib-$CURRENT_VERSION + apt autoremove -y + fi +fi + # Initialize PostgreSQL database if [ -z "$(ls -A $POSTGRES_DIR)" ]; then echo "Initializing PostgreSQL database..." @@ -35,21 +90,21 @@ if [ -z "$(ls -A $POSTGRES_DIR)" ]; then chmod 700 $POSTGRES_DIR # Initialize PostgreSQL - su - postgres -c "/usr/lib/postgresql/14/bin/initdb -D ${POSTGRES_DIR}" + su - postgres -c "$PG_BINDIR/initdb -D ${POSTGRES_DIR}" # Configure PostgreSQL echo "host all all 0.0.0.0/0 md5" >> "${POSTGRES_DIR}/pg_hba.conf" echo "listen_addresses='*'" >> "${POSTGRES_DIR}/postgresql.conf" # Start PostgreSQL echo "Starting Postgres..." - su - postgres -c "/usr/lib/postgresql/14/bin/pg_ctl -D ${POSTGRES_DIR} start -w -t 300 -o '-c port=${POSTGRES_PORT}'" + su - postgres -c "$PG_BINDIR/pg_ctl -D ${POSTGRES_DIR} start -w -t 300 -o '-c port=${POSTGRES_PORT}'" # Wait for PostgreSQL to be ready - until su - postgres -c "/usr/lib/postgresql/14/bin/pg_isready -h ${POSTGRES_HOST} -p ${POSTGRES_PORT}" >/dev/null 2>&1; do + until su - postgres -c "$PG_BINDIR/pg_isready -h ${POSTGRES_HOST} -p ${POSTGRES_PORT}" >/dev/null 2>&1; do echo "Waiting for PostgreSQL to be ready..." sleep 1 done - postgres_pid=$(su - postgres -c "/usr/lib/postgresql/14/bin/pg_ctl -D ${POSTGRES_DIR} status" | sed -n 's/.*PID: \([0-9]\+\).*/\1/p') + postgres_pid=$(su - postgres -c "$PG_BINDIR/pg_ctl -D ${POSTGRES_DIR} status" | sed -n 's/.*PID: \([0-9]\+\).*/\1/p') # Setup database if needed if ! su - postgres -c "psql -p ${POSTGRES_PORT} -tAc \"SELECT 1 FROM pg_database WHERE datname = '$POSTGRES_DB';\"" | grep -q 1; then @@ -69,8 +124,8 @@ END \$\$; EOF echo "Setting PostgreSQL user privileges..." - su postgres -c "/usr/lib/postgresql/14/bin/psql -p ${POSTGRES_PORT} -c \"ALTER DATABASE ${POSTGRES_DB} OWNER TO $POSTGRES_USER;\"" - su postgres -c "/usr/lib/postgresql/14/bin/psql -p ${POSTGRES_PORT} -c \"GRANT ALL PRIVILEGES ON DATABASE ${POSTGRES_DB} TO $POSTGRES_USER;\"" + su postgres -c "$PG_BINDIR/psql -p ${POSTGRES_PORT} -c \"ALTER DATABASE ${POSTGRES_DB} OWNER TO $POSTGRES_USER;\"" + su postgres -c "$PG_BINDIR/psql -p ${POSTGRES_PORT} -c \"GRANT ALL PRIVILEGES ON DATABASE ${POSTGRES_DB} TO $POSTGRES_USER;\"" # Finished setting up PosgresSQL database echo "PostgreSQL database setup complete." fi From 406ac37fb97c9803e8cc3778425d11c75db53e79 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 31 Jul 2025 15:01:28 -0500 Subject: [PATCH 105/857] Delete temp folder if it exists during upgrade. --- docker/init/02-postgres.sh | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/docker/init/02-postgres.sh b/docker/init/02-postgres.sh index aebce1a4..4deb921d 100644 --- a/docker/init/02-postgres.sh +++ b/docker/init/02-postgres.sh @@ -36,16 +36,13 @@ else CURRENT_VERSION="" fi -# Set binary paths for upgrade if needed -OLD_PG_VERSION="$CURRENT_VERSION" -OLD_BINDIR="/usr/lib/postgresql/${OLD_PG_VERSION}/bin" -NEW_BINDIR="/usr/lib/postgresql/${PG_VERSION}/bin" - # Only run upgrade if current version is set and not the target -PG_INSTALLED_BY_SCRIPT=0 - if [ -n "$CURRENT_VERSION" ] && [ "$CURRENT_VERSION" != "$PG_VERSION" ]; then echo "Detected PostgreSQL data directory version $CURRENT_VERSION, upgrading to $PG_VERSION..." + # Set binary paths for upgrade if needed + OLD_BINDIR="/usr/lib/postgresql/${CURRENT_VERSION}/bin" + NEW_BINDIR="/usr/lib/postgresql/${PG_VERSION}/bin" + PG_INSTALLED_BY_SCRIPT=0 if [ ! -d "$OLD_BINDIR" ]; then echo "PostgreSQL binaries for version $CURRENT_VERSION not found. Installing..." apt update && apt install -y postgresql-$CURRENT_VERSION postgresql-contrib-$CURRENT_VERSION @@ -58,13 +55,21 @@ if [ -n "$CURRENT_VERSION" ] && [ "$CURRENT_VERSION" != "$PG_VERSION" ]; then # Prepare new data directory NEW_POSTGRES_DIR="${POSTGRES_DIR}_$PG_VERSION" + + # Remove new data directory if it already exists (from a failed/partial upgrade) + if [ -d "$NEW_POSTGRES_DIR" ]; then + echo "Warning: $NEW_POSTGRES_DIR already exists. Removing it to avoid upgrade issues." + rm -rf "$NEW_POSTGRES_DIR" + fi + mkdir -p "$NEW_POSTGRES_DIR" chown -R postgres:postgres "$NEW_POSTGRES_DIR" chmod 700 "$NEW_POSTGRES_DIR" # Initialize new data directory + echo "Initializing new PostgreSQL data directory at $NEW_POSTGRES_DIR..." su - postgres -c "$NEW_BINDIR/initdb -D $NEW_POSTGRES_DIR" - + echo "Running pg_upgrade from $OLD_BINDIR to $NEW_BINDIR..." # Run pg_upgrade su - postgres -c "$NEW_BINDIR/pg_upgrade -b $OLD_BINDIR -B $NEW_BINDIR -d $POSTGRES_DIR -D $NEW_POSTGRES_DIR" From a9aac72a60439c7af701acc181913c254311df8f Mon Sep 17 00:00:00 2001 From: deku-m <37069737+deku-m@users.noreply.github.com> Date: Thu, 31 Jul 2025 22:14:35 +0200 Subject: [PATCH 106/857] Update debian_install.sh --- debian_install.sh | 450 +++++++++++++++++++++++----------------------- 1 file changed, 227 insertions(+), 223 deletions(-) diff --git a/debian_install.sh b/debian_install.sh index 0e41139e..3a97caed 100644 --- a/debian_install.sh +++ b/debian_install.sh @@ -1,208 +1,217 @@ #!/usr/bin/env bash -set -e +set -euo pipefail +IFS=$'\n\t' + +# Root check +if [[ $EUID -ne 0 ]]; then + echo "[ERROR] This script must be run as root." >&2 + exit 1 +fi + +trap 'echo -e "\n[ERROR] Line $LINENO failed. Exiting." >&2; exit 1' ERR ############################################################################## # 0) Warning / Disclaimer ############################################################################## -echo "**************************************************************" -echo "WARNING: While we do not anticipate any problems, we disclaim all" -echo "responsibility for anything that happens to your machine." -echo "" -echo "This script is intended for **Debian-based operating systems only**." -echo "Running it on other distributions WILL cause unexpected issues." -echo "" -echo "This script is **NOT RECOMMENDED** for use on your primary machine." -echo "For safety and best results, we strongly advise running this inside a" -echo "clean virtual machine (VM) or LXC container environment." -echo "" -echo "Additionally, there is NO SUPPORT for this method; Docker is the only" -echo "officially supported way to run Dispatcharr." -echo "**************************************************************" -echo "" -echo "If you wish to proceed, type \"I understand\" and press Enter." -read user_input - -if [ "$user_input" != "I understand" ]; then - echo "Exiting script..." - exit 1 -fi - +show_disclaimer() { + echo "**************************************************************" + echo "WARNING: While we do not anticipate any problems, we disclaim all" + echo "responsibility for anything that happens to your machine." + echo "" + echo "This script is intended for **Debian-based operating systems only**." + echo "Running it on other distributions WILL cause unexpected issues." + echo "" + echo "This script is **NOT RECOMMENDED** for use on your primary machine." + echo "For safety and best results, we strongly advise running this inside a" + echo "clean virtual machine (VM) or LXC container environment." + echo "" + echo "Additionally, there is NO SUPPORT for this method; Docker is the only" + echo "officially supported way to run Dispatcharr." + echo "**************************************************************" + echo "" + echo "If you wish to proceed, type \"I understand\" and press Enter." + read user_input + if [ "$user_input" != "I understand" ]; then + echo "Exiting script..." + exit 1 + fi +} ############################################################################## # 1) Configuration ############################################################################## -# Linux user/group under which Dispatcharr processes will run -DISPATCH_USER="dispatcharr" -DISPATCH_GROUP="dispatcharr" - -# Where Dispatcharr source code should live -APP_DIR="/opt/dispatcharr" - -# Git branch to clone (e.g., "main" or "dev") -DISPATCH_BRANCH="dev" - -# PostgreSQL settings -POSTGRES_DB="dispatcharr" -POSTGRES_USER="dispatch" -POSTGRES_PASSWORD="secret" - -# The port on which Nginx will listen for HTTP -NGINX_HTTP_PORT="9191" - -# The TCP port for Daphné (Django Channels) -WEBSOCKET_PORT="8001" - -# Directory inside /run/ for our socket; full path becomes /run/dispatcharr/dispatcharr.sock -GUNICORN_RUNTIME_DIR="dispatcharr" -GUNICORN_SOCKET="/run/${GUNICORN_RUNTIME_DIR}/dispatcharr.sock" +configure_variables() { + DISPATCH_USER="dispatcharr" + DISPATCH_GROUP="dispatcharr" + APP_DIR="/opt/dispatcharr" + DISPATCH_BRANCH="main" + POSTGRES_DB="dispatcharr" + POSTGRES_USER="dispatch" + POSTGRES_PASSWORD="secret" + NGINX_HTTP_PORT="9191" + WEBSOCKET_PORT="8001" + GUNICORN_RUNTIME_DIR="dispatcharr" + GUNICORN_SOCKET="/run/${GUNICORN_RUNTIME_DIR}/dispatcharr.sock" + PYTHON_BIN=$(command -v python3) + SYSTEMD_DIR="/etc/systemd/system" + NGINX_SITE="/etc/nginx/sites-available/dispatcharr" +} ############################################################################## # 2) Install System Packages ############################################################################## -echo ">>> Installing system packages..." -apt-get update -apt-get install -y \ - git \ - curl \ - wget \ - build-essential \ - gcc \ - libpcre3-dev \ - libpq-dev \ - python3-dev \ - python3-venv \ - python3-pip \ - nginx \ - redis-server \ - postgresql \ - postgresql-contrib \ - ffmpeg \ - procps \ - streamlink +install_packages() { + echo ">>> Installing system packages..." + apt-get update + declare -a packages=( + git curl wget build-essential gcc libpcre3-dev libpq-dev + python3-dev python3-venv python3-pip nginx redis-server + postgresql postgresql-contrib ffmpeg procps streamlink + ) + apt-get install -y --no-install-recommends "${packages[@]}" -# Node.js setup (v23.x from NodeSource) - adjust version if needed -if ! command -v node >/dev/null 2>&1; then - echo ">>> Installing Node.js..." - curl -sL https://deb.nodesource.com/setup_23.x | bash - - apt-get install -y nodejs -fi + if ! command -v node >/dev/null 2>&1; then + echo ">>> Installing Node.js..." + curl -sL https://deb.nodesource.com/setup_23.x | bash - + apt-get install -y nodejs + fi -# Start & enable PostgreSQL and Redis -systemctl enable postgresql redis-server -systemctl start postgresql redis-server + systemctl enable --now postgresql redis-server +} ############################################################################## -# 3) Create Dispatcharr User/Group +# 3) Create User/Group ############################################################################## -if ! getent group "${DISPATCH_GROUP}" >/dev/null; then - echo ">>> Creating group: ${DISPATCH_GROUP}" - groupadd "${DISPATCH_GROUP}" -fi - -if ! id -u "${DISPATCH_USER}" >/dev/null; then - echo ">>> Creating user: ${DISPATCH_USER}" - useradd -m -g "${DISPATCH_GROUP}" -s /bin/bash "${DISPATCH_USER}" -fi +create_dispatcharr_user() { + if ! getent group "$DISPATCH_GROUP" >/dev/null; then + groupadd "$DISPATCH_GROUP" + fi + if ! id -u "$DISPATCH_USER" >/dev/null; then + useradd -m -g "$DISPATCH_GROUP" -s /bin/bash "$DISPATCH_USER" + fi +} ############################################################################## -# 4) Configure PostgreSQL Database +# 4) PostgreSQL Setup ############################################################################## -echo ">>> Configuring PostgreSQL..." -su - postgres -c "psql -tc \"SELECT 1 FROM pg_database WHERE datname='${POSTGRES_DB}'\"" | grep -q 1 || \ - su - postgres -c "psql -c \"CREATE DATABASE ${POSTGRES_DB};\"" +setup_postgresql() { + echo ">>> Checking PostgreSQL database and user..." -su - postgres -c "psql -tc \"SELECT 1 FROM pg_roles WHERE rolname='${POSTGRES_USER}'\"" | grep -q 1 || \ - su - postgres -c "psql -c \"CREATE USER ${POSTGRES_USER} WITH PASSWORD '${POSTGRES_PASSWORD}';\"" + db_exists=$(sudo -u postgres psql -tAc "SELECT 1 FROM pg_database WHERE datname='$POSTGRES_DB'") + if [[ "$db_exists" != "1" ]]; then + echo ">>> Creating database '${POSTGRES_DB}'..." + sudo -u postgres createdb "$POSTGRES_DB" + else + echo ">>> Database '${POSTGRES_DB}' already exists, skipping creation." + fi -su - postgres -c "psql -c \"GRANT ALL PRIVILEGES ON DATABASE ${POSTGRES_DB} TO ${POSTGRES_USER};\"" -su - postgres -c "psql -c \"ALTER DATABASE ${POSTGRES_DB} OWNER TO ${POSTGRES_USER};\"" -su - postgres -c "psql -d ${POSTGRES_DB} -c \"ALTER SCHEMA public OWNER TO ${POSTGRES_USER};\"" + user_exists=$(sudo -u postgres psql -tAc "SELECT 1 FROM pg_roles WHERE rolname='$POSTGRES_USER'") + if [[ "$user_exists" != "1" ]]; then + echo ">>> Creating user '${POSTGRES_USER}'..." + sudo -u postgres psql -c "CREATE USER $POSTGRES_USER WITH PASSWORD '$POSTGRES_PASSWORD';" + else + echo ">>> User '${POSTGRES_USER}' already exists, skipping creation." + fi + + echo ">>> Granting privileges..." + sudo -u postgres psql -c "GRANT ALL PRIVILEGES ON DATABASE $POSTGRES_DB TO $POSTGRES_USER;" + sudo -u postgres psql -c "ALTER DATABASE $POSTGRES_DB OWNER TO $POSTGRES_USER;" + sudo -u postgres psql -d "$POSTGRES_DB" -c "ALTER SCHEMA public OWNER TO $POSTGRES_USER;" +} ############################################################################## -# 5) Clone or Update Dispatcharr Code +# 5) Clone Dispatcharr Repository ############################################################################## -echo ">>> Installing or updating Dispatcharr in ${APP_DIR} ..." +clone_dispatcharr_repo() { + echo ">>> Installing or updating Dispatcharr in ${APP_DIR} ..." + + if [ ! -d "$APP_DIR" ]; then + mkdir -p "$APP_DIR" + chown "$DISPATCH_USER:$DISPATCH_GROUP" "$APP_DIR" + fi -if [ ! -d "${APP_DIR}" ]; then - echo ">>> Cloning repository for the first time..." - mkdir -p "${APP_DIR}" - chown "${DISPATCH_USER}:${DISPATCH_GROUP}" "${APP_DIR}" - su - "${DISPATCH_USER}" -c "git clone -b ${DISPATCH_BRANCH} https://github.com/Dispatcharr/Dispatcharr.git ${APP_DIR}" -else - echo ">>> Updating existing repository..." - su - "${DISPATCH_USER}" <>> Updating existing Dispatcharr repo..." + su - "$DISPATCH_USER" <>> Cloning Dispatcharr repo into ${APP_DIR}..." + rm -rf "$APP_DIR"/* + chown "$DISPATCH_USER:$DISPATCH_GROUP" "$APP_DIR" + su - "$DISPATCH_USER" -c "git clone -b $DISPATCH_BRANCH https://github.com/Dispatcharr/Dispatcharr.git $APP_DIR" + fi +} ############################################################################## -# 6) Create Python Virtual Environment & Install Python Dependencies +# 6) Setup Python Environment ############################################################################## -echo ">>> Setting up Python virtual environment..." -su - "${DISPATCH_USER}" <>> Setting up Python virtual environment..." + su - "$DISPATCH_USER" <>> Linking ffmpeg into the virtual environment..." -ln -sf /usr/bin/ffmpeg ${APP_DIR}/env/bin/ffmpeg + ln -sf /usr/bin/ffmpeg "$APP_DIR/env/bin/ffmpeg" +} ############################################################################## -# 7) Build Frontend (React) +# 7) Build Frontend ############################################################################## -echo ">>> Building frontend..." -su - "${DISPATCH_USER}" <>> Building frontend..." + su - "$DISPATCH_USER" <>> Running Django migrations & collectstatic..." -su - "${DISPATCH_USER}" <>> Running Django migrations & collectstatic..." + su - "$DISPATCH_USER" </etc/systemd/system/dispatcharr.service +configure_services() { + echo ">>> Creating systemd service files..." + + # Gunicorn + cat <${SYSTEMD_DIR}/dispatcharr.service [Unit] Description=Gunicorn for Dispatcharr After=network.target postgresql.service redis-server.service @@ -211,36 +220,31 @@ After=network.target postgresql.service redis-server.service User=${DISPATCH_USER} Group=${DISPATCH_GROUP} WorkingDirectory=${APP_DIR} - RuntimeDirectory=${GUNICORN_RUNTIME_DIR} RuntimeDirectoryMode=0775 - -# Update PATH to include both the virtualenv and system binaries (for ffmpeg) Environment="PATH=${APP_DIR}/env/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin" Environment="POSTGRES_DB=${POSTGRES_DB}" Environment="POSTGRES_USER=${POSTGRES_USER}" Environment="POSTGRES_PASSWORD=${POSTGRES_PASSWORD}" Environment="POSTGRES_HOST=localhost" - +ExecStartPre=/usr/bin/bash -c 'until pg_isready -h localhost -U ${POSTGRES_USER}; do sleep 1; done' ExecStart=${APP_DIR}/env/bin/gunicorn \\ --workers=4 \\ --worker-class=gevent \\ --timeout=300 \\ --bind unix:${GUNICORN_SOCKET} \\ dispatcharr.wsgi:application - Restart=always KillMode=mixed - +SyslogIdentifier=dispatcharr +StandardOutput=journal +StandardError=journal [Install] WantedBy=multi-user.target EOF -############################################################################## -# 10) Create Systemd Service for Celery -############################################################################## - -cat </etc/systemd/system/dispatcharr-celery.service + # Celery + cat <${SYSTEMD_DIR}/dispatcharr-celery.service [Unit] Description=Celery Worker for Dispatcharr After=network.target redis-server.service @@ -256,21 +260,18 @@ Environment="POSTGRES_USER=${POSTGRES_USER}" Environment="POSTGRES_PASSWORD=${POSTGRES_PASSWORD}" Environment="POSTGRES_HOST=localhost" Environment="CELERY_BROKER_URL=redis://localhost:6379/0" - ExecStart=${APP_DIR}/env/bin/celery -A dispatcharr worker -l info - Restart=always KillMode=mixed - +SyslogIdentifier=dispatcharr-celery +StandardOutput=journal +StandardError=journal [Install] WantedBy=multi-user.target EOF -############################################################################## -# 11) Create Systemd Service for Celery Beat (Optional) -############################################################################## - -cat </etc/systemd/system/dispatcharr-celerybeat.service + # Celery Beat + cat <${SYSTEMD_DIR}/dispatcharr-celerybeat.service [Unit] Description=Celery Beat Scheduler for Dispatcharr After=network.target redis-server.service @@ -286,23 +287,20 @@ Environment="POSTGRES_USER=${POSTGRES_USER}" Environment="POSTGRES_PASSWORD=${POSTGRES_PASSWORD}" Environment="POSTGRES_HOST=localhost" Environment="CELERY_BROKER_URL=redis://localhost:6379/0" - ExecStart=${APP_DIR}/env/bin/celery -A dispatcharr beat -l info - Restart=always KillMode=mixed - +SyslogIdentifier=dispatcharr-celerybeat +StandardOutput=journal +StandardError=journal [Install] WantedBy=multi-user.target EOF -############################################################################## -# 12) Create Systemd Service for Daphné (WebSockets / Channels) -############################################################################## - -cat </etc/systemd/system/dispatcharr-daphne.service + # Daphne + cat <${SYSTEMD_DIR}/dispatcharr-daphne.service [Unit] -Description=Daphne for Dispatcharr (ASGI) +Description=Daphne for Dispatcharr (ASGI/WebSockets) After=network.target Requires=dispatcharr.service @@ -315,47 +313,33 @@ Environment="POSTGRES_DB=${POSTGRES_DB}" Environment="POSTGRES_USER=${POSTGRES_USER}" Environment="POSTGRES_PASSWORD=${POSTGRES_PASSWORD}" Environment="POSTGRES_HOST=localhost" - ExecStart=${APP_DIR}/env/bin/daphne -b 0.0.0.0 -p ${WEBSOCKET_PORT} dispatcharr.asgi:application - Restart=always KillMode=mixed - +SyslogIdentifier=dispatcharr-daphne +StandardOutput=journal +StandardError=journal [Install] WantedBy=multi-user.target EOF -############################################################################## -# 13) Configure Nginx -############################################################################## - -echo ">>> Configuring Nginx at /etc/nginx/sites-available/dispatcharr.conf ..." -cat </etc/nginx/sites-available/dispatcharr.conf + echo ">>> Creating Nginx config..." + cat </etc/nginx/sites-available/dispatcharr.conf server { listen ${NGINX_HTTP_PORT}; - - # Proxy to Gunicorn socket for main HTTP traffic location / { include proxy_params; proxy_pass http://unix:${GUNICORN_SOCKET}; } - - # Serve Django static files location /static/ { alias ${APP_DIR}/static/; } - - # Serve React build assets location /assets/ { alias ${APP_DIR}/frontend/dist/assets/; } - - # Serve media files if any location /media/ { alias ${APP_DIR}/media/; } - - # WebSockets for Daphné location /ws/ { proxy_pass http://127.0.0.1:${WEBSOCKET_PORT}; proxy_http_version 1.1; @@ -368,46 +352,66 @@ server { } EOF -ln -sf /etc/nginx/sites-available/dispatcharr.conf /etc/nginx/sites-enabled/dispatcharr.conf - -# Remove default site if it exists -if [ -f /etc/nginx/sites-enabled/default ]; then - rm -f /etc/nginx/sites-enabled/default -fi - -echo ">>> Testing Nginx config..." -nginx -t - -echo ">>> Restarting Nginx..." -systemctl restart nginx -systemctl enable nginx + ln -sf /etc/nginx/sites-available/dispatcharr.conf /etc/nginx/sites-enabled/dispatcharr.conf + [ -f /etc/nginx/sites-enabled/default ] && rm /etc/nginx/sites-enabled/default + nginx -t + systemctl restart nginx + systemctl enable nginx +} ############################################################################## -# 14) Start & Enable Services +# 10) Start Services ############################################################################## -echo ">>> Enabling systemd services..." -systemctl daemon-reload -systemctl enable dispatcharr -systemctl enable dispatcharr-celery -systemctl enable dispatcharr-celerybeat -systemctl enable dispatcharr-daphne - -echo ">>> Restarting / Starting services..." -systemctl restart dispatcharr -systemctl restart dispatcharr-celery -systemctl restart dispatcharr-celerybeat -systemctl restart dispatcharr-daphne +start_services() { + echo ">>> Enabling and starting services..." + systemctl daemon-reexec + systemctl daemon-reload + systemctl enable --now dispatcharr dispatcharr-celery dispatcharr-celerybeat dispatcharr-daphne +} ############################################################################## -# Done! +# 11) Summary ############################################################################## -echo "=================================================" -echo "Dispatcharr installation (or update) complete!" -echo "Nginx is listening on port ${NGINX_HTTP_PORT}." -echo "Gunicorn socket: ${GUNICORN_SOCKET}." -echo "WebSockets on port ${WEBSOCKET_PORT} (path /ws/)." -echo "You can check logs via 'sudo journalctl -u dispatcharr -f', etc." -echo "Visit http://:${NGINX_HTTP_PORT} in your browser." -echo "=================================================" +show_summary() { + server_ip=$(ip route get 1 | awk '{print $7; exit}') + cat < Date: Thu, 31 Jul 2025 15:54:24 -0500 Subject: [PATCH 107/857] Add ability to preview streams under a channel. --- .../components/tables/ChannelTableStreams.jsx | 66 +++++++++++++------ 1 file changed, 45 insertions(+), 21 deletions(-) diff --git a/frontend/src/components/tables/ChannelTableStreams.jsx b/frontend/src/components/tables/ChannelTableStreams.jsx index 11fa9600..991b7074 100644 --- a/frontend/src/components/tables/ChannelTableStreams.jsx +++ b/frontend/src/components/tables/ChannelTableStreams.jsx @@ -1,7 +1,7 @@ import React, { useMemo, useState, useEffect } from 'react'; import API from '../../api'; import { copyToClipboard } from '../../utils'; -import { GripHorizontal, SquareMinus, ChevronDown, ChevronRight } from 'lucide-react'; +import { GripHorizontal, SquareMinus, ChevronDown, ChevronRight, Eye } from 'lucide-react'; import { Box, ActionIcon, @@ -14,6 +14,7 @@ import { Tooltip, Collapse, Button, + } from '@mantine/core'; import { notifications } from '@mantine/notifications'; import { @@ -24,6 +25,8 @@ import { import './table.css'; import useChannelsTableStore from '../../store/channelsTable'; import usePlaylistsStore from '../../store/playlists'; +import useVideoStore from '../../store/useVideoStore'; +import useSettingsStore from '../../store/settings'; import { DndContext, KeyboardSensor, @@ -130,6 +133,15 @@ const ChannelStreams = ({ channel, isExpanded }) => { ); const playlists = usePlaylistsStore((s) => s.playlists); const authUser = useAuthStore((s) => s.user); + const showVideo = useVideoStore((s) => s.showVideo); + const env_mode = useSettingsStore((s) => s.environment.env_mode); + function handleWatchStream(streamHash) { + let vidUrl = `/proxy/ts/stream/${streamHash}`; + if (env_mode === 'dev') { + vidUrl = `${window.location.protocol}//${window.location.hostname}:5656${vidUrl}`; + } + showVideo(vidUrl); + } const [data, setData] = useState(channelStreams || []); @@ -314,25 +326,38 @@ const ChannelStreams = ({ channel, isExpanded }) => { )} {stream.url && ( - - { - e.stopPropagation(); - const success = await copyToClipboard(stream.url); - notifications.show({ - title: success ? 'URL Copied' : 'Copy Failed', - message: success ? 'Stream URL copied to clipboard' : 'Failed to copy URL to clipboard', - color: success ? 'green' : 'red', - }); - }} - > - URL - - + <> + + { + e.stopPropagation(); + const success = await copyToClipboard(stream.url); + notifications.show({ + title: success ? 'URL Copied' : 'Copy Failed', + message: success ? 'Stream URL copied to clipboard' : 'Failed to copy URL to clipboard', + color: success ? 'green' : 'red', + }); + }} + > + URL + + + + handleWatchStream(stream.stream_hash || stream.id)} + style={{ marginLeft: 2 }} + > + + + + )} @@ -563,5 +588,4 @@ const ChannelStreams = ({ channel, isExpanded }) => { ); }; - export default ChannelStreams; From 20651a8d590bd4e8f92f045e5b780265f95e2573 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 31 Jul 2025 20:10:36 -0500 Subject: [PATCH 108/857] Update dependencies in requirements.txt for compatibility and improvements --- requirements.txt | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/requirements.txt b/requirements.txt index f1526ceb..01a51342 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,33 +1,33 @@ -Django==5.1.6 +Django==5.2.4 psycopg2-binary==2.9.10 -redis==4.5.5 +redis==6.2.0 celery celery[redis] -djangorestframework==3.15.2 -requests==2.32.3 +djangorestframework==3.16.0 +requests==2.32.4 psutil==7.0.0 pillow drf-yasg>=1.20.0 streamlink python-vlc yt-dlp -gevent==24.11.1 +gevent==25.5.1 daphne uwsgi django-cors-headers djangorestframework-simplejwt m3u8 -rapidfuzz==3.12.1 +rapidfuzz==3.13.0 tzlocal # PyTorch dependencies (CPU only) --extra-index-url https://download.pytorch.org/whl/cpu/ -torch==2.6.0+cpu +torch==2.7.1+cpu # ML/NLP dependencies -sentence-transformers==3.4.1 +sentence-transformers==5.0.0 channels -channels-redis +channels-redis==4.3.0 django-filter django-celery-beat -lxml==5.4.0 +lxml==6.0.0 From 953db7947644ebf3789d71bd8fdc9f0e83decabb Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 31 Jul 2025 21:26:59 -0500 Subject: [PATCH 109/857] Display stream logo and name in channel card when previewing streams. --- frontend/src/pages/Stats.jsx | 26 ++++++++++++++++++++++---- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index c3709c17..19520a21 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -94,6 +94,7 @@ const ChannelCard = ({ const [activeStreamId, setActiveStreamId] = useState(null); const [currentM3UProfile, setCurrentM3UProfile] = useState(null); // Add state for current M3U profile const [data, setData] = useState([]); + const [previewedStream, setPreviewedStream] = useState(null); // Get M3U account data from the playlists store const m3uAccounts = usePlaylistsStore((s) => s.playlists); @@ -425,12 +426,29 @@ const ChannelCard = ({ // Get logo URL from the logos object if available const logoUrl = - channel.logo_id && logos && logos[channel.logo_id] + (channel.logo_id && logos && logos[channel.logo_id] ? logos[channel.logo_id].cache_url - : null; + : null) || + (previewedStream && previewedStream.logo_url) || + null; - // Ensure these values exist to prevent errors - const channelName = channel.name || 'Unnamed Channel'; + useEffect(() => { + let isMounted = true; + // Only fetch if we have a stream_id and NO channel.name + if (!channel.name && channel.stream_id) { + API.getStreamsByIds([channel.stream_id]).then((streams) => { + if (isMounted && streams && streams.length > 0) { + setPreviewedStream(streams[0]); + } + }); + } + return () => { isMounted = false; }; + }, [channel.name, channel.stream_id]); + + const channelName = + channel.name || + previewedStream?.name || + 'Unnamed Channel'; const uptime = channel.uptime || 0; const bitrates = channel.bitrates || []; const totalBytes = channel.total_bytes || 0; From 7b5a617bf829f91f26f7dafa1dbc0e1dfea7fa6a Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 1 Aug 2025 11:28:51 -0500 Subject: [PATCH 110/857] Use custom validator for urls fields to allow for non fqdn hostnames. Fixes #63 --- apps/channels/serializers.py | 11 +++++++++-- apps/epg/serializers.py | 7 +++++++ apps/m3u/serializers.py | 7 +++++++ core/utils.py | 32 ++++++++++++++++++++++++++++++++ 4 files changed, 55 insertions(+), 2 deletions(-) diff --git a/apps/channels/serializers.py b/apps/channels/serializers.py index 32fd4a74..7c5ddd54 100644 --- a/apps/channels/serializers.py +++ b/apps/channels/serializers.py @@ -16,6 +16,7 @@ from apps.epg.models import EPGData from django.urls import reverse from rest_framework import serializers from django.utils import timezone +from core.utils import validate_flexible_url class LogoSerializer(serializers.ModelSerializer): @@ -32,10 +33,10 @@ class LogoSerializer(serializers.ModelSerializer): """Validate that the URL is unique for creation or update""" if self.instance and self.instance.url == value: return value - + if Logo.objects.filter(url=value).exists(): raise serializers.ValidationError("A logo with this URL already exists.") - + return value def create(self, validated_data): @@ -79,6 +80,12 @@ class LogoSerializer(serializers.ModelSerializer): # Stream # class StreamSerializer(serializers.ModelSerializer): + url = serializers.CharField( + required=False, + allow_blank=True, + allow_null=True, + validators=[validate_flexible_url] + ) stream_profile_id = serializers.PrimaryKeyRelatedField( queryset=StreamProfile.objects.all(), source="stream_profile", diff --git a/apps/epg/serializers.py b/apps/epg/serializers.py index 09390237..2f97cebf 100644 --- a/apps/epg/serializers.py +++ b/apps/epg/serializers.py @@ -1,3 +1,4 @@ +from core.utils import validate_flexible_url from rest_framework import serializers from .models import EPGSource, EPGData, ProgramData from apps.channels.models import Channel @@ -5,6 +6,12 @@ from apps.channels.models import Channel class EPGSourceSerializer(serializers.ModelSerializer): epg_data_ids = serializers.SerializerMethodField() read_only_fields = ['created_at', 'updated_at'] + url = serializers.CharField( + required=False, + allow_blank=True, + allow_null=True, + validators=[validate_flexible_url] + ) class Meta: model = EPGSource diff --git a/apps/m3u/serializers.py b/apps/m3u/serializers.py index 7394f00b..a86227aa 100644 --- a/apps/m3u/serializers.py +++ b/apps/m3u/serializers.py @@ -1,3 +1,4 @@ +from core.utils import validate_flexible_url from rest_framework import serializers from rest_framework.response import Response from .models import M3UAccount, M3UFilter, ServerGroup, M3UAccountProfile @@ -76,6 +77,12 @@ class M3UAccountSerializer(serializers.ModelSerializer): channel_groups = ChannelGroupM3UAccountSerializer( source="channel_group", many=True, required=False ) + server_url = serializers.CharField( + required=False, + allow_blank=True, + allow_null=True, + validators=[validate_flexible_url], + ) class Meta: model = M3UAccount diff --git a/core/utils.py b/core/utils.py index 932af979..36ac5fef 100644 --- a/core/utils.py +++ b/core/utils.py @@ -9,6 +9,8 @@ from redis.exceptions import ConnectionError, TimeoutError from django.core.cache import cache from asgiref.sync import async_to_sync from channels.layers import get_channel_layer +from django.core.validators import URLValidator +from django.core.exceptions import ValidationError import gc logger = logging.getLogger(__name__) @@ -354,3 +356,33 @@ def is_protected_path(file_path): return True return False + +def validate_flexible_url(value): + """ + Custom URL validator that accepts URLs with hostnames that aren't FQDNs. + This allows URLs like "http://hostname/" which + Django's standard URLValidator rejects. + """ + if not value: + return # Allow empty values since the field is nullable + + # Create a standard Django URL validator + url_validator = URLValidator() + + try: + # First try the standard validation + url_validator(value) + except ValidationError as e: + # If standard validation fails, check if it's a non-FQDN hostname + import re + + # More flexible pattern for non-FQDN hostnames with paths + # Matches: http://hostname, http://hostname/, http://hostname:port/path/to/file.xml + non_fqdn_pattern = r'^https?://[a-zA-Z0-9]([a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])?(\:[0-9]+)?(/[^\s]*)?$' + non_fqdn_match = re.match(non_fqdn_pattern, value) + + if non_fqdn_match: + return # Accept non-FQDN hostnames + + # If it doesn't match our flexible patterns, raise the original error + raise ValidationError("Enter a valid URL.") From ead76fe6611476d5cdd163b51e6d0910c4c7d3ce Mon Sep 17 00:00:00 2001 From: dekzter Date: Fri, 1 Aug 2025 15:02:43 -0400 Subject: [PATCH 111/857] first run at m3u filtering --- apps/m3u/api_urls.py | 36 +- apps/m3u/api_views.py | 19 +- .../0013_alter_m3ufilter_filter_type.py | 18 + ...alter_m3ufilter_options_m3ufilter_order.py | 22 + apps/m3u/models.py | 8 +- apps/m3u/serializers.py | 10 +- apps/m3u/tasks.py | 1016 ++++++++++++----- frontend/src/api.js | 61 +- frontend/src/components/forms/M3U.jsx | 44 +- frontend/src/components/forms/M3UFilter.jsx | 126 ++ frontend/src/components/forms/M3UFilters.jsx | 327 ++++++ frontend/src/constants.js | 27 +- frontend/src/store/playlists.jsx | 24 +- 13 files changed, 1400 insertions(+), 338 deletions(-) create mode 100644 apps/m3u/migrations/0013_alter_m3ufilter_filter_type.py create mode 100644 apps/m3u/migrations/0014_alter_m3ufilter_options_m3ufilter_order.py create mode 100644 frontend/src/components/forms/M3UFilter.jsx create mode 100644 frontend/src/components/forms/M3UFilters.jsx diff --git a/apps/m3u/api_urls.py b/apps/m3u/api_urls.py index 41fc2fbc..80e54bb2 100644 --- a/apps/m3u/api_urls.py +++ b/apps/m3u/api_urls.py @@ -1,18 +1,38 @@ from django.urls import path, include from rest_framework.routers import DefaultRouter -from .api_views import M3UAccountViewSet, M3UFilterViewSet, ServerGroupViewSet, RefreshM3UAPIView, RefreshSingleM3UAPIView, UserAgentViewSet, M3UAccountProfileViewSet +from .api_views import ( + M3UAccountViewSet, + M3UFilterViewSet, + ServerGroupViewSet, + RefreshM3UAPIView, + RefreshSingleM3UAPIView, + UserAgentViewSet, + M3UAccountProfileViewSet, +) -app_name = 'm3u' +app_name = "m3u" router = DefaultRouter() -router.register(r'accounts', M3UAccountViewSet, basename='m3u-account') -router.register(r'accounts\/(?P\d+)\/profiles', M3UAccountProfileViewSet, basename='m3u-account-profiles') -router.register(r'filters', M3UFilterViewSet, basename='m3u-filter') -router.register(r'server-groups', ServerGroupViewSet, basename='server-group') +router.register(r"accounts", M3UAccountViewSet, basename="m3u-account") +router.register( + r"accounts\/(?P\d+)\/profiles", + M3UAccountProfileViewSet, + basename="m3u-account-profiles", +) +router.register( + r"accounts\/(?P\d+)\/filters", + M3UFilterViewSet, + basename="m3u-filters", +) +router.register(r"server-groups", ServerGroupViewSet, basename="server-group") urlpatterns = [ - path('refresh/', RefreshM3UAPIView.as_view(), name='m3u_refresh'), - path('refresh//', RefreshSingleM3UAPIView.as_view(), name='m3u_refresh_single'), + path("refresh/", RefreshM3UAPIView.as_view(), name="m3u_refresh"), + path( + "refresh//", + RefreshSingleM3UAPIView.as_view(), + name="m3u_refresh_single", + ), ] urlpatterns += router.urls diff --git a/apps/m3u/api_views.py b/apps/m3u/api_views.py index d3739f19..46676e93 100644 --- a/apps/m3u/api_views.py +++ b/apps/m3u/api_views.py @@ -183,8 +183,6 @@ class M3UAccountViewSet(viewsets.ModelViewSet): class M3UFilterViewSet(viewsets.ModelViewSet): - """Handles CRUD operations for M3U filters""" - queryset = M3UFilter.objects.all() serializer_class = M3UFilterSerializer @@ -194,6 +192,23 @@ class M3UFilterViewSet(viewsets.ModelViewSet): except KeyError: return [Authenticated()] + def get_queryset(self): + m3u_account_id = self.kwargs["account_id"] + return M3UFilter.objects.filter(m3u_account_id=m3u_account_id) + + def perform_create(self, serializer): + # Get the account ID from the URL + account_id = self.kwargs["account_id"] + + # # Get the M3UAccount instance for the account_id + # m3u_account = M3UAccount.objects.get(id=account_id) + + # Save the 'm3u_account' in the serializer context + serializer.context["m3u_account"] = account_id + + # Perform the actual save + serializer.save(m3u_account_id=account_id) + class ServerGroupViewSet(viewsets.ModelViewSet): """Handles CRUD operations for Server Groups""" diff --git a/apps/m3u/migrations/0013_alter_m3ufilter_filter_type.py b/apps/m3u/migrations/0013_alter_m3ufilter_filter_type.py new file mode 100644 index 00000000..0b0a8a1d --- /dev/null +++ b/apps/m3u/migrations/0013_alter_m3ufilter_filter_type.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.6 on 2025-07-22 21:16 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('m3u', '0012_alter_m3uaccount_refresh_interval'), + ] + + operations = [ + migrations.AlterField( + model_name='m3ufilter', + name='filter_type', + field=models.CharField(choices=[('group', 'Group'), ('name', 'Stream Name'), ('url', 'Stream URL')], default='group', help_text='Filter based on either group title or stream name.', max_length=50), + ), + ] diff --git a/apps/m3u/migrations/0014_alter_m3ufilter_options_m3ufilter_order.py b/apps/m3u/migrations/0014_alter_m3ufilter_options_m3ufilter_order.py new file mode 100644 index 00000000..3510bfc5 --- /dev/null +++ b/apps/m3u/migrations/0014_alter_m3ufilter_options_m3ufilter_order.py @@ -0,0 +1,22 @@ +# Generated by Django 5.1.6 on 2025-07-31 17:14 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('m3u', '0013_alter_m3ufilter_filter_type'), + ] + + operations = [ + migrations.AlterModelOptions( + name='m3ufilter', + options={'ordering': ['order']}, + ), + migrations.AddField( + model_name='m3ufilter', + name='order', + field=models.PositiveIntegerField(default=0), + ), + ] diff --git a/apps/m3u/models.py b/apps/m3u/models.py index 94ec88fc..b7993ef6 100644 --- a/apps/m3u/models.py +++ b/apps/m3u/models.py @@ -155,9 +155,11 @@ class M3UFilter(models.Model): """Defines filters for M3U accounts based on stream name or group title.""" FILTER_TYPE_CHOICES = ( - ("group", "Group Title"), + ("group", "Group"), ("name", "Stream Name"), + ("url", "Stream URL"), ) + m3u_account = models.ForeignKey( M3UAccount, on_delete=models.CASCADE, @@ -177,6 +179,7 @@ class M3UFilter(models.Model): default=True, help_text="If True, matching items are excluded; if False, only matches are included.", ) + order = models.PositiveIntegerField(default=0) def applies_to(self, stream_name, group_name): target = group_name if self.filter_type == "group" else stream_name @@ -226,9 +229,6 @@ class ServerGroup(models.Model): return self.name -from django.db import models - - class M3UAccountProfile(models.Model): """Represents a profile associated with an M3U Account.""" diff --git a/apps/m3u/serializers.py b/apps/m3u/serializers.py index a86227aa..3bf0e335 100644 --- a/apps/m3u/serializers.py +++ b/apps/m3u/serializers.py @@ -16,11 +16,9 @@ logger = logging.getLogger(__name__) class M3UFilterSerializer(serializers.ModelSerializer): """Serializer for M3U Filters""" - channel_groups = ChannelGroupM3UAccountSerializer(source="m3u_account", many=True) - class Meta: model = M3UFilter - fields = ["id", "filter_type", "regex_pattern", "exclude", "channel_groups"] + fields = ["id", "filter_type", "regex_pattern", "exclude", "order"] class M3UAccountProfileSerializer(serializers.ModelSerializer): @@ -64,7 +62,7 @@ class M3UAccountProfileSerializer(serializers.ModelSerializer): class M3UAccountSerializer(serializers.ModelSerializer): """Serializer for M3U Account""" - filters = M3UFilterSerializer(many=True, read_only=True) + filters = serializers.SerializerMethodField() # Include user_agent as a mandatory field using its primary key. user_agent = serializers.PrimaryKeyRelatedField( queryset=UserAgent.objects.all(), @@ -149,6 +147,10 @@ class M3UAccountSerializer(serializers.ModelSerializer): return instance + def get_filters(self, obj): + filters = obj.filters.order_by("order") + return M3UFilterSerializer(filters, many=True).data + class ServerGroupSerializer(serializers.ModelSerializer): """Serializer for Server Group""" diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index 40a395ce..588705a4 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -18,7 +18,12 @@ from channels.layers import get_channel_layer from django.utils import timezone import time import json -from core.utils import RedisClient, acquire_task_lock, release_task_lock, natural_sort_key +from core.utils import ( + RedisClient, + acquire_task_lock, + release_task_lock, + natural_sort_key, +) from core.models import CoreSettings, UserAgent from asgiref.sync import async_to_sync from core.xtream_codes import Client as XCClient @@ -29,6 +34,7 @@ logger = logging.getLogger(__name__) BATCH_SIZE = 1000 m3u_dir = os.path.join(settings.MEDIA_ROOT, "cached_m3u") + def fetch_m3u_lines(account, use_cache=False): os.makedirs(m3u_dir, exist_ok=True) file_path = os.path.join(m3u_dir, f"{account.id}.m3u") @@ -39,27 +45,35 @@ def fetch_m3u_lines(account, use_cache=False): try: # Try to get account-specific user agent first user_agent_obj = account.get_user_agent() - user_agent = user_agent_obj.user_agent if user_agent_obj else "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" + user_agent = ( + user_agent_obj.user_agent + if user_agent_obj + else "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" + ) - logger.debug(f"Using user agent: {user_agent} for M3U account: {account.name}") + logger.debug( + f"Using user agent: {user_agent} for M3U account: {account.name}" + ) headers = {"User-Agent": user_agent} logger.info(f"Fetching from URL {account.server_url}") # Set account status to FETCHING before starting download account.status = M3UAccount.Status.FETCHING account.last_message = "Starting download..." - account.save(update_fields=['status', 'last_message']) + account.save(update_fields=["status", "last_message"]) - response = requests.get(account.server_url, headers=headers, stream=True) + response = requests.get( + account.server_url, headers=headers, stream=True + ) response.raise_for_status() - total_size = int(response.headers.get('Content-Length', 0)) + total_size = int(response.headers.get("Content-Length", 0)) downloaded = 0 start_time = time.time() last_update_time = start_time progress = 0 - with open(file_path, 'wb') as file: + with open(file_path, "wb") as file: send_m3u_update(account.id, "downloading", 0) for chunk in response.iter_content(chunk_size=8192): if chunk: @@ -76,7 +90,11 @@ def fetch_m3u_lines(account, use_cache=False): progress = (downloaded / total_size) * 100 # Time remaining (in seconds) - time_remaining = (total_size - downloaded) / (speed * 1024) if speed > 0 else 0 + time_remaining = ( + (total_size - downloaded) / (speed * 1024) + if speed > 0 + else 0 + ) current_time = time.time() if current_time - last_update_time >= 0.5: @@ -85,26 +103,36 @@ def fetch_m3u_lines(account, use_cache=False): # Update the account's last_message with detailed progress info progress_msg = f"Downloading: {progress:.1f}% - {speed:.1f} KB/s - {time_remaining:.1f}s remaining" account.last_message = progress_msg - account.save(update_fields=['last_message']) + account.save(update_fields=["last_message"]) - send_m3u_update(account.id, "downloading", progress, - speed=speed, - elapsed_time=elapsed_time, - time_remaining=time_remaining, - message=progress_msg) + send_m3u_update( + account.id, + "downloading", + progress, + speed=speed, + elapsed_time=elapsed_time, + time_remaining=time_remaining, + message=progress_msg, + ) # Final update with 100% progress final_msg = f"Download complete. Size: {total_size/1024/1024:.2f} MB, Time: {time.time() - start_time:.1f}s" account.last_message = final_msg - account.save(update_fields=['last_message']) + account.save(update_fields=["last_message"]) send_m3u_update(account.id, "downloading", 100, message=final_msg) except Exception as e: logger.error(f"Error fetching M3U from URL {account.server_url}: {e}") # Update account status and send error notification account.status = M3UAccount.Status.ERROR account.last_message = f"Error downloading M3U file: {str(e)}" - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account.id, "downloading", 100, status="error", error=f"Error downloading M3U file: {str(e)}") + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account.id, + "downloading", + 100, + status="error", + error=f"Error downloading M3U file: {str(e)}", + ) return [], False # Return empty list and False for success # Check if the file exists and is not empty @@ -113,45 +141,55 @@ def fetch_m3u_lines(account, use_cache=False): logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account.id, "downloading", 100, status="error", error=error_msg) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account.id, "downloading", 100, status="error", error=error_msg + ) return [], False # Return empty list and False for success try: - with open(file_path, 'r', encoding='utf-8') as f: + with open(file_path, "r", encoding="utf-8") as f: return f.readlines(), True except Exception as e: error_msg = f"Error reading M3U file: {str(e)}" logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account.id, "downloading", 100, status="error", error=error_msg) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account.id, "downloading", 100, status="error", error=error_msg + ) return [], False elif account.file_path: try: - if account.file_path.endswith('.gz'): - with gzip.open(account.file_path, 'rt', encoding='utf-8') as f: + if account.file_path.endswith(".gz"): + with gzip.open(account.file_path, "rt", encoding="utf-8") as f: return f.readlines(), True - elif account.file_path.endswith('.zip'): - with zipfile.ZipFile(account.file_path, 'r') as zip_file: + elif account.file_path.endswith(".zip"): + with zipfile.ZipFile(account.file_path, "r") as zip_file: for name in zip_file.namelist(): - if name.endswith('.m3u'): + if name.endswith(".m3u"): with zip_file.open(name) as f: - return [line.decode('utf-8') for line in f.readlines()], True + return [ + line.decode("utf-8") for line in f.readlines() + ], True - error_msg = f"No .m3u file found in ZIP archive: {account.file_path}" + error_msg = ( + f"No .m3u file found in ZIP archive: {account.file_path}" + ) logger.warning(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account.id, "downloading", 100, status="error", error=error_msg) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account.id, "downloading", 100, status="error", error=error_msg + ) return [], False else: - with open(account.file_path, 'r', encoding='utf-8') as f: + with open(account.file_path, "r", encoding="utf-8") as f: return f.readlines(), True except (IOError, OSError, zipfile.BadZipFile, gzip.BadGzipFile) as e: @@ -159,8 +197,10 @@ def fetch_m3u_lines(account, use_cache=False): logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account.id, "downloading", 100, status="error", error=error_msg) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account.id, "downloading", 100, status="error", error=error_msg + ) return [], False # Neither server_url nor uploaded_file is available @@ -168,10 +208,11 @@ def fetch_m3u_lines(account, use_cache=False): logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) + account.save(update_fields=["status", "last_message"]) send_m3u_update(account.id, "downloading", 100, status="error", error=error_msg) return [], False + def get_case_insensitive_attr(attributes, key, default=""): """Get attribute value using case-insensitive key lookup.""" for attr_key, attr_value in attributes.items(): @@ -179,6 +220,7 @@ def get_case_insensitive_attr(attributes, key, default=""): return attr_value return default + def parse_extinf_line(line: str) -> dict: """ Parse an EXTINF line from an M3U file. @@ -192,7 +234,7 @@ def parse_extinf_line(line: str) -> dict: """ if not line.startswith("#EXTINF:"): return None - content = line[len("#EXTINF:"):].strip() + content = line[len("#EXTINF:") :].strip() # Split on the first comma that is not inside quotes. parts = re.split(r',(?=(?:[^"]*"[^"]*")*[^"]*$)', content, maxsplit=1) if len(parts) != 2: @@ -200,21 +242,9 @@ def parse_extinf_line(line: str) -> dict: attributes_part, display_name = parts[0], parts[1].strip() attrs = dict(re.findall(r'([^\s]+)=["\']([^"\']+)["\']', attributes_part)) # Use tvg-name attribute if available; otherwise, use the display name. - name = get_case_insensitive_attr(attrs, 'tvg-name', display_name) - return { - 'attributes': attrs, - 'display_name': display_name, - 'name': name - } + name = get_case_insensitive_attr(attrs, "tvg-name", display_name) + return {"attributes": attrs, "display_name": display_name, "name": name} -def _matches_filters(stream_name: str, group_name: str, filters): - """Check if a stream or group name matches a precompiled regex filter.""" - compiled_filters = [(re.compile(f.regex_pattern, re.IGNORECASE), f.exclude) for f in filters] - for pattern, exclude in compiled_filters: - target = group_name if f.filter_type == 'group' else stream_name - if pattern.search(target or ''): - return exclude - return False @shared_task def refresh_m3u_accounts(): @@ -229,6 +259,7 @@ def refresh_m3u_accounts(): logger.info(msg) return msg + def check_field_lengths(streams_to_create): for stream in streams_to_create: for field, value in stream.__dict__.items(): @@ -238,19 +269,44 @@ def check_field_lengths(streams_to_create): print("") print("") + @shared_task def process_groups(account, groups): - existing_groups = {group.name: group for group in ChannelGroup.objects.filter(name__in=groups.keys())} + existing_groups = { + group.name: group + for group in ChannelGroup.objects.filter(name__in=groups.keys()) + } logger.info(f"Currently {len(existing_groups)} existing groups") + compiled_filters = [ + (re.compile(f.regex_pattern), f) + for f in account.filters.order_by("order") + if f.filter_type == "group" + ] + group_objs = [] groups_to_create = [] for group_name, custom_props in groups.items(): logger.debug(f"Handling group for M3U account {account.id}: {group_name}") - if (group_name not in existing_groups): - groups_to_create.append(ChannelGroup( - name=group_name, - )) + + include = True + for pattern, filter in compiled_filters: + if pattern.search(group_name): + logger.debug( + f"Group {group_name} matches filter pattern {filter.regex_pattern}" + ) + include = not filter.exclude + break + + if not include: + continue + + if group_name not in existing_groups: + groups_to_create.append( + ChannelGroup( + name=group_name, + ) + ) else: group_objs.append(existing_groups[group_name]) @@ -264,17 +320,17 @@ def process_groups(account, groups): for group in group_objs: # Ensure we include the xc_id in the custom_properties custom_props = groups.get(group.name, {}) - relations.append(ChannelGroupM3UAccount( - channel_group=group, - m3u_account=account, - custom_properties=json.dumps(custom_props), - enabled=True, # Default to enabled - )) + relations.append( + ChannelGroupM3UAccount( + channel_group=group, + m3u_account=account, + custom_properties=json.dumps(custom_props), + enabled=True, # Default to enabled + ) + ) + + ChannelGroupM3UAccount.objects.bulk_create(relations, ignore_conflicts=True) - ChannelGroupM3UAccount.objects.bulk_create( - relations, - ignore_conflicts=True - ) @shared_task def process_xc_category(account_id, batch, groups, hash_keys): @@ -285,14 +341,21 @@ def process_xc_category(account_id, batch, groups, hash_keys): stream_hashes = {} try: - with XCClient(account.server_url, account.username, account.password, account.get_user_agent()) as xc_client: + with XCClient( + account.server_url, + account.username, + account.password, + account.get_user_agent(), + ) as xc_client: # Log the batch details to help with debugging logger.debug(f"Processing XC batch: {batch}") for group_name, props in batch.items(): # Check if we have a valid xc_id for this group - if 'xc_id' not in props: - logger.error(f"Missing xc_id for group {group_name} in batch {batch}") + if "xc_id" not in props: + logger.error( + f"Missing xc_id for group {group_name} in batch {batch}" + ) continue # Get actual group ID from the mapping @@ -302,14 +365,20 @@ def process_xc_category(account_id, batch, groups, hash_keys): continue try: - logger.debug(f"Fetching streams for XC category: {group_name} (ID: {props['xc_id']})") - streams = xc_client.get_live_category_streams(props['xc_id']) + logger.debug( + f"Fetching streams for XC category: {group_name} (ID: {props['xc_id']})" + ) + streams = xc_client.get_live_category_streams(props["xc_id"]) if not streams: - logger.warning(f"No streams found for XC category {group_name} (ID: {props['xc_id']})") + logger.warning( + f"No streams found for XC category {group_name} (ID: {props['xc_id']})" + ) continue - logger.debug(f"Found {len(streams)} streams for category {group_name}") + logger.debug( + f"Found {len(streams)} streams for category {group_name}" + ) for stream in streams: name = stream["name"] @@ -318,7 +387,9 @@ def process_xc_category(account_id, batch, groups, hash_keys): tvg_logo = stream.get("stream_icon", "") group_title = group_name - stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys) + stream_hash = Stream.generate_hash_key( + name, url, tvg_id, hash_keys + ) stream_props = { "name": name, "url": url, @@ -333,23 +404,38 @@ def process_xc_category(account_id, batch, groups, hash_keys): if stream_hash not in stream_hashes: stream_hashes[stream_hash] = stream_props except Exception as e: - logger.error(f"Error processing XC category {group_name} (ID: {props['xc_id']}): {str(e)}") + logger.error( + f"Error processing XC category {group_name} (ID: {props['xc_id']}): {str(e)}" + ) continue # Process all found streams - existing_streams = {s.stream_hash: s for s in Stream.objects.filter(stream_hash__in=stream_hashes.keys())} + existing_streams = { + s.stream_hash: s + for s in Stream.objects.filter(stream_hash__in=stream_hashes.keys()) + } for stream_hash, stream_props in stream_hashes.items(): if stream_hash in existing_streams: obj = existing_streams[stream_hash] - existing_attr = {field.name: getattr(obj, field.name) for field in Stream._meta.fields if field != 'channel_group_id'} - changed = any(existing_attr[key] != value for key, value in stream_props.items() if key != 'channel_group_id') + existing_attr = { + field.name: getattr(obj, field.name) + for field in Stream._meta.fields + if field != "channel_group_id" + } + changed = any( + existing_attr[key] != value + for key, value in stream_props.items() + if key != "channel_group_id" + ) if changed: for key, value in stream_props.items(): setattr(obj, key, value) obj.last_seen = timezone.now() - obj.updated_at = timezone.now() # Update timestamp only for changed streams + obj.updated_at = ( + timezone.now() + ) # Update timestamp only for changed streams streams_to_update.append(obj) del existing_streams[stream_hash] else: @@ -360,7 +446,9 @@ def process_xc_category(account_id, batch, groups, hash_keys): existing_streams[stream_hash] = obj else: stream_props["last_seen"] = timezone.now() - stream_props["updated_at"] = timezone.now() # Set initial updated_at for new streams + stream_props["updated_at"] = ( + timezone.now() + ) # Set initial updated_at for new streams streams_to_create.append(Stream(**stream_props)) try: @@ -370,14 +458,28 @@ def process_xc_category(account_id, batch, groups, hash_keys): if streams_to_update: # We need to split the bulk update to correctly handle updated_at # First, get the subset of streams that have content changes - changed_streams = [s for s in streams_to_update if hasattr(s, 'updated_at') and s.updated_at] - unchanged_streams = [s for s in streams_to_update if not hasattr(s, 'updated_at') or not s.updated_at] + changed_streams = [ + s + for s in streams_to_update + if hasattr(s, "updated_at") and s.updated_at + ] + unchanged_streams = [ + s + for s in streams_to_update + if not hasattr(s, "updated_at") or not s.updated_at + ] # Update changed streams with all fields including updated_at if changed_streams: Stream.objects.bulk_update( changed_streams, - {key for key in stream_props.keys() if key not in ["m3u_account", "stream_hash"] and key not in hash_keys} | {"last_seen", "updated_at"} + { + key + for key in stream_props.keys() + if key not in ["m3u_account", "stream_hash"] + and key not in hash_keys + } + | {"last_seen", "updated_at"}, ) # Update unchanged streams with only last_seen @@ -401,11 +503,18 @@ def process_xc_category(account_id, batch, groups, hash_keys): return retval + @shared_task def process_m3u_batch(account_id, batch, groups, hash_keys): """Processes a batch of M3U streams using bulk operations.""" account = M3UAccount.objects.get(id=account_id) + compiled_filters = [ + (re.compile(f.regex_pattern), f) + for f in account.filters.order_by("order") + if f.filter_type != "group" + ] + streams_to_create = [] streams_to_update = [] stream_hashes = {} @@ -415,12 +524,34 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): for stream_info in batch: try: name, url = stream_info["name"], stream_info["url"] - tvg_id, tvg_logo = get_case_insensitive_attr(stream_info["attributes"], "tvg-id", ""), get_case_insensitive_attr(stream_info["attributes"], "tvg-logo", "") - group_title = get_case_insensitive_attr(stream_info["attributes"], "group-title", "Default Group") + + include = True + for pattern, filter in compiled_filters: + logger.debug(f"Checking filter patterh {pattern}") + target = url if filter.filter_type == "url" else name + if pattern.search(target or ""): + logger.debug( + f"Stream {name} - {url} matches filter pattern {filter.regex_pattern}" + ) + include = not filter.exclude + break + + if not include: + logger.debug(f"Stream excluded by filter, skipping.") + continue + + tvg_id, tvg_logo = get_case_insensitive_attr( + stream_info["attributes"], "tvg-id", "" + ), get_case_insensitive_attr(stream_info["attributes"], "tvg-logo", "") + group_title = get_case_insensitive_attr( + stream_info["attributes"], "group-title", "Default Group" + ) # Filter out disabled groups for this account if group_title not in groups: - logger.debug(f"Skipping stream in disabled group: {group_title}") + logger.debug( + f"Skipping stream in disabled or excluded group: {group_title}" + ) continue stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys) @@ -441,19 +572,32 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): logger.error(f"Failed to process stream {name}: {e}") logger.error(json.dumps(stream_info)) - existing_streams = {s.stream_hash: s for s in Stream.objects.filter(stream_hash__in=stream_hashes.keys())} + existing_streams = { + s.stream_hash: s + for s in Stream.objects.filter(stream_hash__in=stream_hashes.keys()) + } for stream_hash, stream_props in stream_hashes.items(): if stream_hash in existing_streams: obj = existing_streams[stream_hash] - existing_attr = {field.name: getattr(obj, field.name) for field in Stream._meta.fields if field != 'channel_group_id'} - changed = any(existing_attr[key] != value for key, value in stream_props.items() if key != 'channel_group_id') + existing_attr = { + field.name: getattr(obj, field.name) + for field in Stream._meta.fields + if field != "channel_group_id" + } + changed = any( + existing_attr[key] != value + for key, value in stream_props.items() + if key != "channel_group_id" + ) if changed: for key, value in stream_props.items(): setattr(obj, key, value) obj.last_seen = timezone.now() - obj.updated_at = timezone.now() # Update timestamp only for changed streams + obj.updated_at = ( + timezone.now() + ) # Update timestamp only for changed streams streams_to_update.append(obj) del existing_streams[stream_hash] else: @@ -464,7 +608,9 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): existing_streams[stream_hash] = obj else: stream_props["last_seen"] = timezone.now() - stream_props["updated_at"] = timezone.now() # Set initial updated_at for new streams + stream_props["updated_at"] = ( + timezone.now() + ) # Set initial updated_at for new streams streams_to_create.append(Stream(**stream_props)) try: @@ -474,14 +620,28 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): if streams_to_update: # We need to split the bulk update to correctly handle updated_at # First, get the subset of streams that have content changes - changed_streams = [s for s in streams_to_update if hasattr(s, 'updated_at') and s.updated_at] - unchanged_streams = [s for s in streams_to_update if not hasattr(s, 'updated_at') or not s.updated_at] + changed_streams = [ + s + for s in streams_to_update + if hasattr(s, "updated_at") and s.updated_at + ] + unchanged_streams = [ + s + for s in streams_to_update + if not hasattr(s, "updated_at") or not s.updated_at + ] # Update changed streams with all fields including updated_at if changed_streams: Stream.objects.bulk_update( changed_streams, - {key for key in stream_props.keys() if key not in ["m3u_account", "stream_hash"] and key not in hash_keys} | {"last_seen", "updated_at"} + { + key + for key in stream_props.keys() + if key not in ["m3u_account", "stream_hash"] + and key not in hash_keys + } + | {"last_seen", "updated_at"}, ) # Update unchanged streams with only last_seen @@ -496,35 +656,37 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): retval = f"M3U account: {account_id}, Batch processed: {len(streams_to_create)} created, {len(streams_to_update)} updated." # Aggressive garbage collection - #del streams_to_create, streams_to_update, stream_hashes, existing_streams - #from core.utils import cleanup_memory - #cleanup_memory(log_usage=True, force_collection=True) + # del streams_to_create, streams_to_update, stream_hashes, existing_streams + # from core.utils import cleanup_memory + # cleanup_memory(log_usage=True, force_collection=True) return retval + def cleanup_streams(account_id, scan_start_time=timezone.now): account = M3UAccount.objects.get(id=account_id, is_active=True) existing_groups = ChannelGroup.objects.filter( m3u_account__m3u_account=account, m3u_account__enabled=True, - ).values_list('id', flat=True) - logger.info(f"Found {len(existing_groups)} active groups for M3U account {account_id}") + ).values_list("id", flat=True) + logger.info( + f"Found {len(existing_groups)} active groups for M3U account {account_id}" + ) # Calculate cutoff date for stale streams stale_cutoff = scan_start_time - timezone.timedelta(days=account.stale_stream_days) - logger.info(f"Removing streams not seen since {stale_cutoff} for M3U account {account_id}") + logger.info( + f"Removing streams not seen since {stale_cutoff} for M3U account {account_id}" + ) # Delete streams that are not in active groups - streams_to_delete = Stream.objects.filter( - m3u_account=account - ).exclude( + streams_to_delete = Stream.objects.filter(m3u_account=account).exclude( channel_group__in=existing_groups ) # Also delete streams that haven't been seen for longer than stale_stream_days stale_streams = Stream.objects.filter( - m3u_account=account, - last_seen__lt=stale_cutoff + m3u_account=account, last_seen__lt=stale_cutoff ) deleted_count = streams_to_delete.count() @@ -534,20 +696,23 @@ def cleanup_streams(account_id, scan_start_time=timezone.now): stale_streams.delete() total_deleted = deleted_count + stale_count - logger.info(f"Cleanup for M3U account {account_id} complete: {deleted_count} streams removed due to group filter, {stale_count} removed as stale") + logger.info( + f"Cleanup for M3U account {account_id} complete: {deleted_count} streams removed due to group filter, {stale_count} removed as stale" + ) # Return the total count of deleted streams return total_deleted + @shared_task def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): - if not acquire_task_lock('refresh_m3u_account_groups', account_id): + if not acquire_task_lock("refresh_m3u_account_groups", account_id): return f"Task already running for account_id={account_id}.", None try: account = M3UAccount.objects.get(id=account_id, is_active=True) except M3UAccount.DoesNotExist: - release_task_lock('refresh_m3u_account_groups', account_id) + release_task_lock("refresh_m3u_account_groups", account_id) return f"M3UAccount with ID={account_id} not found or inactive.", None extinf_data = [] @@ -555,8 +720,12 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): if account.account_type == M3UAccount.Types.XC: # Log detailed information about the account - logger.info(f"Processing XC account {account_id} with URL: {account.server_url}") - logger.debug(f"Username: {account.username}, Has password: {'Yes' if account.password else 'No'}") + logger.info( + f"Processing XC account {account_id} with URL: {account.server_url}" + ) + logger.debug( + f"Username: {account.username}, Has password: {'Yes' if account.password else 'No'}" + ) # Validate required fields if not account.server_url: @@ -564,9 +733,11 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) - release_task_lock('refresh_m3u_account_groups', account_id) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account_id, "processing_groups", 100, status="error", error=error_msg + ) + release_task_lock("refresh_m3u_account_groups", account_id) return error_msg, None if not account.username or not account.password: @@ -574,15 +745,19 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) - release_task_lock('refresh_m3u_account_groups', account_id) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account_id, "processing_groups", 100, status="error", error=error_msg + ) + release_task_lock("refresh_m3u_account_groups", account_id) return error_msg, None try: # Ensure server URL is properly formatted - server_url = account.server_url.rstrip('/') - if not (server_url.startswith('http://') or server_url.startswith('https://')): + server_url = account.server_url.rstrip("/") + if not ( + server_url.startswith("http://") or server_url.startswith("https://") + ): server_url = f"http://{server_url}" # User agent handling - completely rewritten @@ -591,37 +766,63 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): logger.debug(f"Getting user agent for account {account.id}") # Use a hardcoded user agent string to avoid any issues with object structure - user_agent_string = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" + user_agent_string = ( + "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" + ) try: # Try to get the user agent directly from the database if account.user_agent_id: ua_obj = UserAgent.objects.get(id=account.user_agent_id) - if ua_obj and hasattr(ua_obj, 'user_agent') and ua_obj.user_agent: + if ( + ua_obj + and hasattr(ua_obj, "user_agent") + and ua_obj.user_agent + ): user_agent_string = ua_obj.user_agent - logger.debug(f"Using user agent from account: {user_agent_string}") + logger.debug( + f"Using user agent from account: {user_agent_string}" + ) else: # Get default user agent from CoreSettings default_ua_id = CoreSettings.get_default_user_agent_id() - logger.debug(f"Default user agent ID from settings: {default_ua_id}") + logger.debug( + f"Default user agent ID from settings: {default_ua_id}" + ) if default_ua_id: ua_obj = UserAgent.objects.get(id=default_ua_id) - if ua_obj and hasattr(ua_obj, 'user_agent') and ua_obj.user_agent: + if ( + ua_obj + and hasattr(ua_obj, "user_agent") + and ua_obj.user_agent + ): user_agent_string = ua_obj.user_agent - logger.debug(f"Using default user agent: {user_agent_string}") + logger.debug( + f"Using default user agent: {user_agent_string}" + ) except Exception as e: - logger.warning(f"Error getting user agent, using fallback: {str(e)}") + logger.warning( + f"Error getting user agent, using fallback: {str(e)}" + ) logger.debug(f"Final user agent string: {user_agent_string}") except Exception as e: - user_agent_string = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" - logger.warning(f"Exception in user agent handling, using fallback: {str(e)}") + user_agent_string = ( + "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" + ) + logger.warning( + f"Exception in user agent handling, using fallback: {str(e)}" + ) - logger.info(f"Creating XCClient with URL: {server_url}, Username: {account.username}, User-Agent: {user_agent_string}") + logger.info( + f"Creating XCClient with URL: {server_url}, Username: {account.username}, User-Agent: {user_agent_string}" + ) # Create XCClient with explicit error handling try: - with XCClient(server_url, account.username, account.password, user_agent_string) as xc_client: + with XCClient( + server_url, account.username, account.password, user_agent_string + ) as xc_client: logger.info(f"XCClient instance created successfully") # Authenticate with detailed error handling @@ -634,26 +835,42 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) - release_task_lock('refresh_m3u_account_groups', account_id) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account_id, + "processing_groups", + 100, + status="error", + error=error_msg, + ) + release_task_lock("refresh_m3u_account_groups", account_id) return error_msg, None # Get categories with detailed error handling try: logger.info(f"Getting live categories from XC server") xc_categories = xc_client.get_live_categories() - logger.info(f"Found {len(xc_categories)} categories: {xc_categories}") + logger.info( + f"Found {len(xc_categories)} categories: {xc_categories}" + ) # Validate response if not isinstance(xc_categories, list): - error_msg = f"Unexpected response from XC server: {xc_categories}" + error_msg = ( + f"Unexpected response from XC server: {xc_categories}" + ) logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) - release_task_lock('refresh_m3u_account_groups', account_id) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account_id, + "processing_groups", + 100, + status="error", + error=error_msg, + ) + release_task_lock("refresh_m3u_account_groups", account_id) return error_msg, None if len(xc_categories) == 0: @@ -671,9 +888,15 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) - release_task_lock('refresh_m3u_account_groups', account_id) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account_id, + "processing_groups", + 100, + status="error", + error=error_msg, + ) + release_task_lock("refresh_m3u_account_groups", account_id) return error_msg, None except Exception as e: @@ -681,25 +904,33 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) - release_task_lock('refresh_m3u_account_groups', account_id) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account_id, + "processing_groups", + 100, + status="error", + error=error_msg, + ) + release_task_lock("refresh_m3u_account_groups", account_id) return error_msg, None except Exception as e: error_msg = f"Unexpected error occurred in XC Client: {str(e)}" logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) - release_task_lock('refresh_m3u_account_groups', account_id) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account_id, "processing_groups", 100, status="error", error=error_msg + ) + release_task_lock("refresh_m3u_account_groups", account_id) return error_msg, None else: # Here's the key change - use the success flag from fetch_m3u_lines lines, success = fetch_m3u_lines(account, use_cache) if not success: # If fetch failed, don't continue processing - release_task_lock('refresh_m3u_account_groups', account_id) + release_task_lock("refresh_m3u_account_groups", account_id) return f"Failed to fetch M3U data for account_id={account_id}.", None # Log basic file structure for debugging @@ -719,19 +950,25 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): extinf_count += 1 parsed = parse_extinf_line(line) if parsed: - group_title_attr = get_case_insensitive_attr(parsed["attributes"], "group-title", "") + group_title_attr = get_case_insensitive_attr( + parsed["attributes"], "group-title", "" + ) if group_title_attr: group_name = group_title_attr # Log new groups as they're discovered if group_name not in groups: - logger.debug(f"Found new group for M3U account {account_id}: '{group_name}'") + logger.debug( + f"Found new group for M3U account {account_id}: '{group_name}'" + ) groups[group_name] = {} extinf_data.append(parsed) else: # Log problematic EXTINF lines - logger.warning(f"Failed to parse EXTINF at line {line_index+1}: {line[:200]}") - problematic_lines.append((line_index+1, line[:200])) + logger.warning( + f"Failed to parse EXTINF at line {line_index+1}: {line[:200]}" + ) + problematic_lines.append((line_index + 1, line[:200])) elif extinf_data and line.startswith("http"): url_count += 1 @@ -741,49 +978,69 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): # Periodically log progress for large files if valid_stream_count % 1000 == 0: - logger.debug(f"Processed {valid_stream_count} valid streams so far for M3U account: {account_id}") + logger.debug( + f"Processed {valid_stream_count} valid streams so far for M3U account: {account_id}" + ) # Log summary statistics - logger.info(f"M3U parsing complete - Lines: {line_count}, EXTINF: {extinf_count}, URLs: {url_count}, Valid streams: {valid_stream_count}") + logger.info( + f"M3U parsing complete - Lines: {line_count}, EXTINF: {extinf_count}, URLs: {url_count}, Valid streams: {valid_stream_count}" + ) if problematic_lines: - logger.warning(f"Found {len(problematic_lines)} problematic lines during parsing") - for i, (line_num, content) in enumerate(problematic_lines[:10]): # Log max 10 examples + logger.warning( + f"Found {len(problematic_lines)} problematic lines during parsing" + ) + for i, (line_num, content) in enumerate( + problematic_lines[:10] + ): # Log max 10 examples logger.warning(f"Problematic line #{i+1} at line {line_num}: {content}") if len(problematic_lines) > 10: - logger.warning(f"... and {len(problematic_lines) - 10} more problematic lines") + logger.warning( + f"... and {len(problematic_lines) - 10} more problematic lines" + ) # Log group statistics - logger.info(f"Found {len(groups)} groups in M3U file: {', '.join(list(groups.keys())[:20])}" + - ("..." if len(groups) > 20 else "")) + logger.info( + f"Found {len(groups)} groups in M3U file: {', '.join(list(groups.keys())[:20])}" + + ("..." if len(groups) > 20 else "") + ) # Cache processed data cache_path = os.path.join(m3u_dir, f"{account_id}.json") - with open(cache_path, 'w', encoding='utf-8') as f: - json.dump({ - "extinf_data": extinf_data, - "groups": groups, - }, f) + with open(cache_path, "w", encoding="utf-8") as f: + json.dump( + { + "extinf_data": extinf_data, + "groups": groups, + }, + f, + ) logger.debug(f"Cached parsed M3U data to {cache_path}") send_m3u_update(account_id, "processing_groups", 0) process_groups(account, groups) - release_task_lock('refresh_m3u_account_groups', account_id) - - + release_task_lock("refresh_m3u_account_groups", account_id) if not full_refresh: # Use update() instead of save() to avoid triggering signals M3UAccount.objects.filter(id=account_id).update( status=M3UAccount.Status.PENDING_SETUP, - last_message="M3U groups loaded. Please select groups or refresh M3U to complete setup." + last_message="M3U groups loaded. Please select groups or refresh M3U to complete setup.", + ) + send_m3u_update( + account_id, + "processing_groups", + 100, + status="pending_setup", + message="M3U groups loaded. Please select groups or refresh M3U to complete setup.", ) - send_m3u_update(account_id, "processing_groups", 100, status="pending_setup", message="M3U groups loaded. Please select groups or refresh M3U to complete setup.") return extinf_data, groups + def delete_m3u_refresh_task_by_id(account_id): """ Delete the periodic task associated with an M3U account ID. @@ -797,6 +1054,7 @@ def delete_m3u_refresh_task_by_id(account_id): # Look for task by name try: from django_celery_beat.models import PeriodicTask, IntervalSchedule + task = PeriodicTask.objects.get(name=task_name) logger.debug(f"Found task by name: {task.id} for M3UAccount {account_id}") except PeriodicTask.DoesNotExist: @@ -807,12 +1065,16 @@ def delete_m3u_refresh_task_by_id(account_id): if task: # Store interval info before deleting the task interval_id = None - if hasattr(task, 'interval') and task.interval: + if hasattr(task, "interval") and task.interval: interval_id = task.interval.id # Count how many TOTAL tasks use this interval (including this one) - tasks_with_same_interval = PeriodicTask.objects.filter(interval_id=interval_id).count() - logger.debug(f"Interval {interval_id} is used by {tasks_with_same_interval} tasks total") + tasks_with_same_interval = PeriodicTask.objects.filter( + interval_id=interval_id + ).count() + logger.debug( + f"Interval {interval_id} is used by {tasks_with_same_interval} tasks total" + ) # Delete the task first task_id = task.id @@ -824,20 +1086,28 @@ def delete_m3u_refresh_task_by_id(account_id): if interval_id and tasks_with_same_interval == 1: try: interval = IntervalSchedule.objects.get(id=interval_id) - logger.debug(f"Deleting interval schedule {interval_id} (not shared with other tasks)") + logger.debug( + f"Deleting interval schedule {interval_id} (not shared with other tasks)" + ) interval.delete() logger.debug(f"Successfully deleted interval {interval_id}") except IntervalSchedule.DoesNotExist: logger.warning(f"Interval {interval_id} no longer exists") elif interval_id: - logger.debug(f"Not deleting interval {interval_id} as it's shared with {tasks_with_same_interval-1} other tasks") + logger.debug( + f"Not deleting interval {interval_id} as it's shared with {tasks_with_same_interval-1} other tasks" + ) return True return False except Exception as e: - logger.error(f"Error deleting periodic task for M3UAccount {account_id}: {str(e)}", exc_info=True) + logger.error( + f"Error deleting periodic task for M3UAccount {account_id}: {str(e)}", + exc_info=True, + ) return False + @shared_task def sync_auto_channels(account_id, scan_start_time=None): """ @@ -845,7 +1115,13 @@ def sync_auto_channels(account_id, scan_start_time=None): Preserves existing channel UUIDs to maintain M3U link integrity. Called after M3U refresh completes successfully. """ - from apps.channels.models import Channel, ChannelGroup, ChannelGroupM3UAccount, Stream, ChannelStream + from apps.channels.models import ( + Channel, + ChannelGroup, + ChannelGroupM3UAccount, + Stream, + ChannelStream, + ) from apps.epg.models import EPGData from django.utils import timezone @@ -862,10 +1138,8 @@ def sync_auto_channels(account_id, scan_start_time=None): # Get groups with auto sync enabled for this account auto_sync_groups = ChannelGroupM3UAccount.objects.filter( - m3u_account=account, - enabled=True, - auto_channel_sync=True - ).select_related('channel_group') + m3u_account=account, enabled=True, auto_channel_sync=True + ).select_related("channel_group") channels_created = 0 channels_updated = 0 @@ -890,7 +1164,9 @@ def sync_auto_channels(account_id, scan_start_time=None): force_dummy_epg = group_custom_props.get("force_dummy_epg", False) override_group_id = group_custom_props.get("group_override") name_regex_pattern = group_custom_props.get("name_regex_pattern") - name_replace_pattern = group_custom_props.get("name_replace_pattern") + name_replace_pattern = group_custom_props.get( + "name_replace_pattern" + ) name_match_regex = group_custom_props.get("name_match_regex") channel_profile_ids = group_custom_props.get("channel_profile_ids") channel_sort_order = group_custom_props.get("channel_sort_order") @@ -908,17 +1184,23 @@ def sync_auto_channels(account_id, scan_start_time=None): if override_group_id: try: target_group = ChannelGroup.objects.get(id=override_group_id) - logger.info(f"Using override group '{target_group.name}' instead of '{channel_group.name}' for auto-created channels") + logger.info( + f"Using override group '{target_group.name}' instead of '{channel_group.name}' for auto-created channels" + ) except ChannelGroup.DoesNotExist: - logger.warning(f"Override group with ID {override_group_id} not found, using original group '{channel_group.name}'") + logger.warning( + f"Override group with ID {override_group_id} not found, using original group '{channel_group.name}'" + ) - logger.info(f"Processing auto sync for group: {channel_group.name} (start: {start_number})") + logger.info( + f"Processing auto sync for group: {channel_group.name} (start: {start_number})" + ) # Get all current streams in this group for this M3U account, filter out stale streams current_streams = Stream.objects.filter( m3u_account=account, channel_group=channel_group, - last_seen__gte=scan_start_time + last_seen__gte=scan_start_time, ) # --- FILTER STREAMS BY NAME MATCH REGEX IF SPECIFIED --- @@ -928,33 +1210,38 @@ def sync_auto_channels(account_id, scan_start_time=None): name__iregex=name_match_regex ) except re.error as e: - logger.warning(f"Invalid name_match_regex '{name_match_regex}' for group '{channel_group.name}': {e}. Skipping name filter.") + logger.warning( + f"Invalid name_match_regex '{name_match_regex}' for group '{channel_group.name}': {e}. Skipping name filter." + ) # --- APPLY CHANNEL SORT ORDER --- streams_is_list = False # Track if we converted to list - if channel_sort_order and channel_sort_order != '': - if channel_sort_order == 'name': + if channel_sort_order and channel_sort_order != "": + if channel_sort_order == "name": # Use natural sorting for names to handle numbers correctly current_streams = list(current_streams) - current_streams.sort(key=lambda stream: natural_sort_key(stream.name)) + current_streams.sort( + key=lambda stream: natural_sort_key(stream.name) + ) streams_is_list = True - elif channel_sort_order == 'tvg_id': - current_streams = current_streams.order_by('tvg_id') - elif channel_sort_order == 'updated_at': - current_streams = current_streams.order_by('updated_at') + elif channel_sort_order == "tvg_id": + current_streams = current_streams.order_by("tvg_id") + elif channel_sort_order == "updated_at": + current_streams = current_streams.order_by("updated_at") else: - logger.warning(f"Unknown channel_sort_order '{channel_sort_order}' for group '{channel_group.name}'. Using provider order.") - current_streams = current_streams.order_by('id') + logger.warning( + f"Unknown channel_sort_order '{channel_sort_order}' for group '{channel_group.name}'. Using provider order." + ) + current_streams = current_streams.order_by("id") else: - current_streams = current_streams.order_by('id') + current_streams = current_streams.order_by("id") # If channel_sort_order is empty or None, use provider order (no additional sorting) # Get existing auto-created channels for this account (regardless of current group) # We'll find them by their stream associations instead of just group location existing_channels = Channel.objects.filter( - auto_created=True, - auto_created_by=account - ).select_related('logo', 'epg_data') + auto_created=True, auto_created_by=account + ).select_related("logo", "epg_data") # Create mapping of existing channels by their associated stream # This approach finds channels even if they've been moved to different groups @@ -964,8 +1251,8 @@ def sync_auto_channels(account_id, scan_start_time=None): channel_streams = ChannelStream.objects.filter( channel=channel, stream__m3u_account=account, - stream__channel_group=channel_group # Match streams from the original group - ).select_related('stream') + stream__channel_group=channel_group, # Match streams from the original group + ).select_related("stream") # Map each of our M3U account's streams to this channel for channel_stream in channel_streams: @@ -976,7 +1263,11 @@ def sync_auto_channels(account_id, scan_start_time=None): processed_stream_ids = set() # Check if we have streams - handle both QuerySet and list cases - has_streams = len(current_streams) > 0 if streams_is_list else current_streams.exists() + has_streams = ( + len(current_streams) > 0 + if streams_is_list + else current_streams.exists() + ) if not has_streams: logger.debug(f"No streams found in group {channel_group.name}") @@ -984,20 +1275,31 @@ def sync_auto_channels(account_id, scan_start_time=None): channels_to_delete = [ch for ch in existing_channel_map.values()] if channels_to_delete: deleted_count = len(channels_to_delete) - Channel.objects.filter(id__in=[ch.id for ch in channels_to_delete]).delete() + Channel.objects.filter( + id__in=[ch.id for ch in channels_to_delete] + ).delete() channels_deleted += deleted_count - logger.debug(f"Deleted {deleted_count} auto channels (no streams remaining)") + logger.debug( + f"Deleted {deleted_count} auto channels (no streams remaining)" + ) continue # Prepare profiles to assign to new channels from apps.channels.models import ChannelProfile, ChannelProfileMembership - if channel_profile_ids and isinstance(channel_profile_ids, list) and len(channel_profile_ids) > 0: + + if ( + channel_profile_ids + and isinstance(channel_profile_ids, list) + and len(channel_profile_ids) > 0 + ): # Convert all to int (in case they're strings) try: profile_ids = [int(pid) for pid in channel_profile_ids] except Exception: profile_ids = [] - profiles_to_assign = list(ChannelProfile.objects.filter(id__in=profile_ids)) + profiles_to_assign = list( + ChannelProfile.objects.filter(id__in=profile_ids) + ) else: profiles_to_assign = list(ChannelProfile.objects.all()) @@ -1010,10 +1312,11 @@ def sync_auto_channels(account_id, scan_start_time=None): temp_channel_number = start_number # Get all channel numbers that are already in use by other channels (not auto-created by this account) - used_numbers = set(Channel.objects.exclude( - auto_created=True, - auto_created_by=account - ).values_list('channel_number', flat=True)) + used_numbers = set( + Channel.objects.exclude( + auto_created=True, auto_created_by=account + ).values_list("channel_number", flat=True) + ) for stream in current_streams: if stream.id in existing_channel_map: @@ -1030,7 +1333,9 @@ def sync_auto_channels(account_id, scan_start_time=None): if channel.channel_number != target_number: channel.channel_number = target_number channels_to_renumber.append(channel) - logger.debug(f"Will renumber channel '{channel.name}' to {target_number}") + logger.debug( + f"Will renumber channel '{channel.name}' to {target_number}" + ) temp_channel_number += 1.0 if temp_channel_number % 1 != 0: # Has decimal @@ -1038,8 +1343,10 @@ def sync_auto_channels(account_id, scan_start_time=None): # Bulk update channel numbers if any need renumbering if channels_to_renumber: - Channel.objects.bulk_update(channels_to_renumber, ['channel_number']) - logger.info(f"Renumbered {len(channels_to_renumber)} channels to maintain sort order") + Channel.objects.bulk_update(channels_to_renumber, ["channel_number"]) + logger.info( + f"Renumbered {len(channels_to_renumber)} channels to maintain sort order" + ) # Reset channel number counter for processing new channels current_channel_number = start_number @@ -1048,7 +1355,11 @@ def sync_auto_channels(account_id, scan_start_time=None): processed_stream_ids.add(stream.id) try: # Parse custom properties for additional info - stream_custom_props = json.loads(stream.custom_properties) if stream.custom_properties else {} + stream_custom_props = ( + json.loads(stream.custom_properties) + if stream.custom_properties + else {} + ) tvc_guide_stationid = stream_custom_props.get("tvc-guide-stationid") # --- REGEX FIND/REPLACE LOGIC --- @@ -1056,11 +1367,19 @@ def sync_auto_channels(account_id, scan_start_time=None): new_name = original_name if name_regex_pattern is not None: # If replace is None, treat as empty string (remove match) - replace = name_replace_pattern if name_replace_pattern is not None else '' + replace = ( + name_replace_pattern + if name_replace_pattern is not None + else "" + ) try: - new_name = re.sub(name_regex_pattern, replace, original_name) + new_name = re.sub( + name_regex_pattern, replace, original_name + ) except re.error as e: - logger.warning(f"Regex error for group '{channel_group.name}': {e}. Using original name.") + logger.warning( + f"Regex error for group '{channel_group.name}': {e}. Using original name." + ) new_name = original_name # Check if we already have a channel for this stream @@ -1087,15 +1406,20 @@ def sync_auto_channels(account_id, scan_start_time=None): if existing_channel.channel_group != target_group: existing_channel.channel_group = target_group channel_updated = True - logger.info(f"Moved auto channel '{existing_channel.name}' from '{existing_channel.channel_group.name if existing_channel.channel_group else 'None'}' to '{target_group.name}'") + logger.info( + f"Moved auto channel '{existing_channel.name}' from '{existing_channel.channel_group.name if existing_channel.channel_group else 'None'}' to '{target_group.name}'" + ) # Handle logo updates current_logo = None if stream.logo_url: from apps.channels.models import Logo + current_logo, _ = Logo.objects.get_or_create( url=stream.logo_url, - defaults={"name": stream.name or stream.tvg_id or "Unknown"} + defaults={ + "name": stream.name or stream.tvg_id or "Unknown" + }, ) if existing_channel.logo != current_logo: @@ -1105,7 +1429,9 @@ def sync_auto_channels(account_id, scan_start_time=None): # Handle EPG data updates current_epg_data = None if stream.tvg_id and not force_dummy_epg: - current_epg_data = EPGData.objects.filter(tvg_id=stream.tvg_id).first() + current_epg_data = EPGData.objects.filter( + tvg_id=stream.tvg_id + ).first() if existing_channel.epg_data != current_epg_data: existing_channel.epg_data = current_epg_data @@ -1114,17 +1440,20 @@ def sync_auto_channels(account_id, scan_start_time=None): if channel_updated: existing_channel.save() channels_updated += 1 - logger.debug(f"Updated auto channel: {existing_channel.channel_number} - {existing_channel.name}") + logger.debug( + f"Updated auto channel: {existing_channel.channel_number} - {existing_channel.name}" + ) # Update channel profile memberships for existing channels current_memberships = set( ChannelProfileMembership.objects.filter( - channel=existing_channel, - enabled=True - ).values_list('channel_profile_id', flat=True) + channel=existing_channel, enabled=True + ).values_list("channel_profile_id", flat=True) ) - target_profile_ids = set(profile.id for profile in profiles_to_assign) + target_profile_ids = set( + profile.id for profile in profiles_to_assign + ) # Only update if memberships have changed if current_memberships != target_profile_ids: @@ -1135,16 +1464,20 @@ def sync_auto_channels(account_id, scan_start_time=None): # Enable/create memberships for target profiles for profile in profiles_to_assign: - membership, created = ChannelProfileMembership.objects.get_or_create( - channel_profile=profile, - channel=existing_channel, - defaults={'enabled': True} + membership, created = ( + ChannelProfileMembership.objects.get_or_create( + channel_profile=profile, + channel=existing_channel, + defaults={"enabled": True}, + ) ) if not created and not membership.enabled: membership.enabled = True membership.save() - logger.debug(f"Updated profile memberships for auto channel: {existing_channel.name}") + logger.debug( + f"Updated profile memberships for auto channel: {existing_channel.name}" + ) else: # Create new channel @@ -1164,19 +1497,19 @@ def sync_auto_channels(account_id, scan_start_time=None): channel_group=target_group, user_level=0, auto_created=True, - auto_created_by=account + auto_created_by=account, ) # Associate the stream with the channel ChannelStream.objects.create( - channel=channel, - stream=stream, - order=0 + channel=channel, stream=stream, order=0 ) # Assign to correct profiles memberships = [ - ChannelProfileMembership(channel_profile=profile, channel=channel, enabled=True) + ChannelProfileMembership( + channel_profile=profile, channel=channel, enabled=True + ) for profile in profiles_to_assign ] if memberships: @@ -1184,26 +1517,33 @@ def sync_auto_channels(account_id, scan_start_time=None): # Try to match EPG data if stream.tvg_id and not force_dummy_epg: - epg_data = EPGData.objects.filter(tvg_id=stream.tvg_id).first() + epg_data = EPGData.objects.filter( + tvg_id=stream.tvg_id + ).first() if epg_data: channel.epg_data = epg_data - channel.save(update_fields=['epg_data']) + channel.save(update_fields=["epg_data"]) elif stream.tvg_id and force_dummy_epg: channel.epg_data = None - channel.save(update_fields=['epg_data']) + channel.save(update_fields=["epg_data"]) # Handle logo if stream.logo_url: from apps.channels.models import Logo + logo, _ = Logo.objects.get_or_create( url=stream.logo_url, - defaults={"name": stream.name or stream.tvg_id or "Unknown"} + defaults={ + "name": stream.name or stream.tvg_id or "Unknown" + }, ) channel.logo = logo - channel.save(update_fields=['logo']) + channel.save(update_fields=["logo"]) channels_created += 1 - logger.debug(f"Created auto channel: {channel.channel_number} - {channel.name}") + logger.debug( + f"Created auto channel: {channel.channel_number} - {channel.name}" + ) # Increment channel number for next iteration current_channel_number += 1.0 @@ -1211,7 +1551,9 @@ def sync_auto_channels(account_id, scan_start_time=None): current_channel_number = int(current_channel_number) + 1.0 except Exception as e: - logger.error(f"Error processing auto channel for stream {stream.name}: {str(e)}") + logger.error( + f"Error processing auto channel for stream {stream.name}: {str(e)}" + ) continue # Delete channels for streams that no longer exist @@ -1222,21 +1564,28 @@ def sync_auto_channels(account_id, scan_start_time=None): if channels_to_delete: deleted_count = len(channels_to_delete) - Channel.objects.filter(id__in=[ch.id for ch in channels_to_delete]).delete() + Channel.objects.filter( + id__in=[ch.id for ch in channels_to_delete] + ).delete() channels_deleted += deleted_count - logger.debug(f"Deleted {deleted_count} auto channels for removed streams") + logger.debug( + f"Deleted {deleted_count} auto channels for removed streams" + ) - logger.info(f"Auto channel sync complete for account {account.name}: {channels_created} created, {channels_updated} updated, {channels_deleted} deleted") + logger.info( + f"Auto channel sync complete for account {account.name}: {channels_created} created, {channels_updated} updated, {channels_deleted} deleted" + ) return f"Auto sync: {channels_created} channels created, {channels_updated} updated, {channels_deleted} deleted" except Exception as e: logger.error(f"Error in auto channel sync for account {account_id}: {str(e)}") return f"Auto sync error: {str(e)}" + @shared_task def refresh_single_m3u_account(account_id): """Splits M3U processing into chunks and dispatches them as parallel tasks.""" - if not acquire_task_lock('refresh_single_m3u_account', account_id): + if not acquire_task_lock("refresh_single_m3u_account", account_id): return f"Task already running for account_id={account_id}." # Record start time @@ -1250,25 +1599,27 @@ def refresh_single_m3u_account(account_id): account = M3UAccount.objects.get(id=account_id, is_active=True) if not account.is_active: logger.debug(f"Account {account_id} is not active, skipping.") - release_task_lock('refresh_single_m3u_account', account_id) + release_task_lock("refresh_single_m3u_account", account_id) return # Set status to fetching account.status = M3UAccount.Status.FETCHING - account.save(update_fields=['status']) - - filters = list(account.filters.all()) + account.save(update_fields=["status"]) except M3UAccount.DoesNotExist: # The M3U account doesn't exist, so delete the periodic task if it exists - logger.warning(f"M3U account with ID {account_id} not found, but task was triggered. Cleaning up orphaned task.") + logger.warning( + f"M3U account with ID {account_id} not found, but task was triggered. Cleaning up orphaned task." + ) # Call the helper function to delete the task if delete_m3u_refresh_task_by_id(account_id): - logger.info(f"Successfully cleaned up orphaned task for M3U account {account_id}") + logger.info( + f"Successfully cleaned up orphaned task for M3U account {account_id}" + ) else: logger.debug(f"No orphaned task found for M3U account {account_id}") - release_task_lock('refresh_single_m3u_account', account_id) + release_task_lock("refresh_single_m3u_account", account_id) return f"M3UAccount with ID={account_id} not found or inactive, task cleaned up" # Fetch M3U lines and handle potential issues @@ -1278,14 +1629,16 @@ def refresh_single_m3u_account(account_id): cache_path = os.path.join(m3u_dir, f"{account_id}.json") if os.path.exists(cache_path): try: - with open(cache_path, 'r') as file: + with open(cache_path, "r") as file: data = json.load(file) - extinf_data = data['extinf_data'] - groups = data['groups'] + extinf_data = data["extinf_data"] + groups = data["groups"] except json.JSONDecodeError as e: # Handle corrupted JSON file - logger.error(f"Error parsing cached M3U data for account {account_id}: {str(e)}") + logger.error( + f"Error parsing cached M3U data for account {account_id}: {str(e)}" + ) # Backup the corrupted file for potential analysis backup_path = f"{cache_path}.corrupted" @@ -1293,7 +1646,9 @@ def refresh_single_m3u_account(account_id): os.rename(cache_path, backup_path) logger.info(f"Renamed corrupted cache file to {backup_path}") except OSError as rename_err: - logger.warning(f"Failed to rename corrupted cache file: {str(rename_err)}") + logger.warning( + f"Failed to rename corrupted cache file: {str(rename_err)}" + ) # Reset the data to empty structures extinf_data = [] @@ -1311,8 +1666,10 @@ def refresh_single_m3u_account(account_id): # Check for completely empty result or missing groups if not result or result[1] is None: - logger.error(f"Failed to refresh M3U groups for account {account_id}: {result}") - release_task_lock('refresh_single_m3u_account', account_id) + logger.error( + f"Failed to refresh M3U groups for account {account_id}: {result}" + ) + release_task_lock("refresh_single_m3u_account", account_id) return "Failed to update m3u account - download failed or other error" extinf_data, groups = result @@ -1329,15 +1686,23 @@ def refresh_single_m3u_account(account_id): logger.error(f"No streams found for non-XC account {account_id}") account.status = M3UAccount.Status.ERROR account.last_message = "No streams found in M3U source" - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "parsing", 100, status="error", error="No streams found") + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account_id, "parsing", 100, status="error", error="No streams found" + ) except Exception as e: logger.error(f"Exception in refresh_m3u_groups: {str(e)}", exc_info=True) account.status = M3UAccount.Status.ERROR account.last_message = f"Error refreshing M3U groups: {str(e)}" - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "parsing", 100, status="error", error=f"Error refreshing M3U groups: {str(e)}") - release_task_lock('refresh_single_m3u_account', account_id) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account_id, + "parsing", + 100, + status="error", + error=f"Error refreshing M3U groups: {str(e)}", + ) + release_task_lock("refresh_single_m3u_account", account_id) return "Failed to update m3u account" # Only proceed with parsing if we actually have data and no errors were encountered @@ -1352,37 +1717,53 @@ def refresh_single_m3u_account(account_id): logger.error(f"No data to process for account {account_id}") account.status = M3UAccount.Status.ERROR account.last_message = "No data available for processing" - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "parsing", 100, status="error", error="No data available for processing") - release_task_lock('refresh_single_m3u_account', account_id) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account_id, + "parsing", + 100, + status="error", + error="No data available for processing", + ) + release_task_lock("refresh_single_m3u_account", account_id) return "Failed to update m3u account, no data available" hash_keys = CoreSettings.get_m3u_hash_key().split(",") - existing_groups = {group.name: group.id for group in ChannelGroup.objects.filter( - m3u_account__m3u_account=account, # Filter by the M3UAccount - m3u_account__enabled=True # Filter by the enabled flag in the join table - )} + existing_groups = { + group.name: group.id + for group in ChannelGroup.objects.filter( + m3u_account__m3u_account=account, # Filter by the M3UAccount + m3u_account__enabled=True, # Filter by the enabled flag in the join table + ) + } try: # Set status to parsing account.status = M3UAccount.Status.PARSING - account.save(update_fields=['status']) + account.save(update_fields=["status"]) if account.account_type == M3UAccount.Types.STADNARD: - logger.debug(f"Processing Standard account ({account_id}) with groups: {existing_groups}") + logger.debug( + f"Processing Standard account ({account_id}) with groups: {existing_groups}" + ) # Break into batches and process in parallel - batches = [extinf_data[i:i + BATCH_SIZE] for i in range(0, len(extinf_data), BATCH_SIZE)] - task_group = group(process_m3u_batch.s(account_id, batch, existing_groups, hash_keys) for batch in batches) + batches = [ + extinf_data[i : i + BATCH_SIZE] + for i in range(0, len(extinf_data), BATCH_SIZE) + ] + task_group = group( + process_m3u_batch.s(account_id, batch, existing_groups, hash_keys) + for batch in batches + ) else: # For XC accounts, get the groups with their custom properties containing xc_id logger.debug(f"Processing XC account with groups: {existing_groups}") # Get the ChannelGroupM3UAccount entries with their custom_properties channel_group_relationships = ChannelGroupM3UAccount.objects.filter( - m3u_account=account, - enabled=True - ).select_related('channel_group') + m3u_account=account, enabled=True + ).select_related("channel_group") filtered_groups = {} for rel in channel_group_relationships: @@ -1391,34 +1772,51 @@ def refresh_single_m3u_account(account_id): # Load the custom properties with the xc_id try: - custom_props = json.loads(rel.custom_properties) if rel.custom_properties else {} - if 'xc_id' in custom_props: + custom_props = ( + json.loads(rel.custom_properties) + if rel.custom_properties + else {} + ) + if "xc_id" in custom_props: filtered_groups[group_name] = { - 'xc_id': custom_props['xc_id'], - 'channel_group_id': group_id + "xc_id": custom_props["xc_id"], + "channel_group_id": group_id, } - logger.debug(f"Added group {group_name} with xc_id {custom_props['xc_id']}") + logger.debug( + f"Added group {group_name} with xc_id {custom_props['xc_id']}" + ) else: - logger.warning(f"No xc_id found in custom properties for group {group_name}") + logger.warning( + f"No xc_id found in custom properties for group {group_name}" + ) except (json.JSONDecodeError, KeyError) as e: - logger.error(f"Error parsing custom properties for group {group_name}: {str(e)}") + logger.error( + f"Error parsing custom properties for group {group_name}: {str(e)}" + ) - logger.info(f"Filtered {len(filtered_groups)} groups for processing: {filtered_groups}") + logger.info( + f"Filtered {len(filtered_groups)} groups for processing: {filtered_groups}" + ) # Batch the groups filtered_groups_list = list(filtered_groups.items()) batches = [ - dict(filtered_groups_list[i:i + 2]) + dict(filtered_groups_list[i : i + 2]) for i in range(0, len(filtered_groups_list), 2) ] logger.info(f"Created {len(batches)} batches for XC processing") - task_group = group(process_xc_category.s(account_id, batch, existing_groups, hash_keys) for batch in batches) + task_group = group( + process_xc_category.s(account_id, batch, existing_groups, hash_keys) + for batch in batches + ) total_batches = len(batches) completed_batches = 0 streams_processed = 0 # Track total streams processed - logger.debug(f"Dispatched {len(batches)} parallel tasks for account_id={account_id}.") + logger.debug( + f"Dispatched {len(batches)} parallel tasks for account_id={account_id}." + ) # result = task_group.apply_async() result = task_group.apply_async() @@ -1427,7 +1825,9 @@ def refresh_single_m3u_account(account_id): completed_task_ids = set() while completed_batches < total_batches: for async_result in result: - if async_result.ready() and async_result.id not in completed_task_ids: # If the task has completed and we haven't counted it + if ( + async_result.ready() and async_result.id not in completed_task_ids + ): # If the task has completed and we haven't counted it task_result = async_result.result # The result of the task logger.debug(f"Task completed with result: {task_result}") @@ -1447,7 +1847,9 @@ def refresh_single_m3u_account(account_id): pass completed_batches += 1 - completed_task_ids.add(async_result.id) # Mark this task as processed + completed_task_ids.add( + async_result.id + ) # Mark this task as processed # Calculate progress progress = int((completed_batches / total_batches) * 100) @@ -1471,7 +1873,7 @@ def refresh_single_m3u_account(account_id): progress, elapsed_time=current_elapsed, time_remaining=time_remaining, - streams_processed=streams_processed + streams_processed=streams_processed, ) # Optionally remove completed task from the group to prevent processing it again @@ -1480,9 +1882,13 @@ def refresh_single_m3u_account(account_id): logger.trace(f"Task is still running.") # Ensure all database transactions are committed before cleanup - logger.info(f"All {total_batches} tasks completed, ensuring DB transactions are committed before cleanup") + logger.info( + f"All {total_batches} tasks completed, ensuring DB transactions are committed before cleanup" + ) # Force a simple DB query to ensure connection sync - Stream.objects.filter(id=-1).exists() # This will never find anything but ensures DB sync + Stream.objects.filter( + id=-1 + ).exists() # This will never find anything but ensures DB sync # Now run cleanup streams_deleted = cleanup_streams(account_id, refresh_start_timestamp) @@ -1490,12 +1896,18 @@ def refresh_single_m3u_account(account_id): # Run auto channel sync after successful refresh auto_sync_message = "" try: - sync_result = sync_auto_channels(account_id, scan_start_time=str(refresh_start_timestamp)) - logger.info(f"Auto channel sync result for account {account_id}: {sync_result}") + sync_result = sync_auto_channels( + account_id, scan_start_time=str(refresh_start_timestamp) + ) + logger.info( + f"Auto channel sync result for account {account_id}: {sync_result}" + ) if sync_result and "created" in sync_result: auto_sync_message = f" {sync_result}." except Exception as e: - logger.error(f"Error running auto channel sync for account {account_id}: {str(e)}") + logger.error( + f"Error running auto channel sync for account {account_id}: {str(e)}" + ) # Calculate elapsed time elapsed_time = time.time() - start_time @@ -1508,7 +1920,7 @@ def refresh_single_m3u_account(account_id): f"Total processed: {streams_processed}.{auto_sync_message}" ) account.updated_at = timezone.now() - account.save(update_fields=['status', 'last_message', 'updated_at']) + account.save(update_fields=["status", "last_message", "updated_at"]) # Send final update with complete metrics and explicitly include success status send_m3u_update( @@ -1522,21 +1934,22 @@ def refresh_single_m3u_account(account_id): streams_created=streams_created, streams_updated=streams_updated, streams_deleted=streams_deleted, - message=account.last_message + message=account.last_message, ) except Exception as e: logger.error(f"Error processing M3U for account {account_id}: {str(e)}") account.status = M3UAccount.Status.ERROR account.last_message = f"Error processing M3U: {str(e)}" - account.save(update_fields=['status', 'last_message']) + account.save(update_fields=["status", "last_message"]) raise # Re-raise the exception for Celery to handle - release_task_lock('refresh_single_m3u_account', account_id) + release_task_lock("refresh_single_m3u_account", account_id) # Aggressive garbage collection del existing_groups, extinf_data, groups, batches from core.utils import cleanup_memory + cleanup_memory(log_usage=True, force_collection=True) # Clean up cache file since we've fully processed it @@ -1545,6 +1958,7 @@ def refresh_single_m3u_account(account_id): return f"Dispatched jobs complete." + def send_m3u_update(account_id, action, progress, **kwargs): # Start with the base data dictionary data = { @@ -1567,7 +1981,7 @@ def send_m3u_update(account_id, action, progress, **kwargs): # Add the additional key-value pairs from kwargs data.update(kwargs) - send_websocket_update('updates', 'update', data, collect_garbage=False) + send_websocket_update("updates", "update", data, collect_garbage=False) # Explicitly clear data reference to help garbage collection data = None diff --git a/frontend/src/api.js b/frontend/src/api.js index ddaccbc7..a6998bc2 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -256,7 +256,7 @@ export default class API { hasChannels: false, hasM3UAccounts: false, canEdit: true, - canDelete: true + canDelete: true, }; useChannelsStore.getState().addChannelGroup(processedGroup); // Refresh channel groups to update the UI @@ -736,10 +736,13 @@ export default class API { static async updateM3UGroupSettings(playlistId, groupSettings) { try { - const response = await request(`${host}/api/m3u/accounts/${playlistId}/group-settings/`, { - method: 'PATCH', - body: { group_settings: groupSettings }, - }); + const response = await request( + `${host}/api/m3u/accounts/${playlistId}/group-settings/`, + { + method: 'PATCH', + body: { group_settings: groupSettings }, + } + ); // Fetch the updated playlist and update the store const updatedPlaylist = await API.getPlaylist(playlistId); usePlaylistsStore.getState().updatePlaylist(updatedPlaylist); @@ -1110,6 +1113,48 @@ export default class API { } } + static async addM3UFilter(accountId, values) { + try { + const response = await request( + `${host}/api/m3u/accounts/${accountId}/filters/`, + { + method: 'POST', + body: values, + } + ); + + return response; + } catch (e) { + errorNotification(`Failed to add profile to account ${accountId}`, e); + } + } + + static async deleteM3UFilter(accountId, id) { + try { + await request(`${host}/api/m3u/accounts/${accountId}/filters/${id}/`, { + method: 'DELETE', + }); + } catch (e) { + errorNotification(`Failed to delete profile for account ${accountId}`, e); + } + } + + static async updateM3UFilter(accountId, filterId, values) { + const { id, ...payload } = values; + + try { + await request( + `${host}/api/m3u/accounts/${accountId}/filters/${filterId}/`, + { + method: 'PUT', + body: payload, + } + ); + } catch (e) { + errorNotification(`Failed to update profile for account ${accountId}`, e); + } + } + static async getSettings() { try { const response = await request(`${host}/api/core/settings/`); @@ -1230,7 +1275,9 @@ export default class API { static async getLogos(params = {}) { try { const queryParams = new URLSearchParams(params); - const response = await request(`${host}/api/channels/logos/?${queryParams.toString()}`); + const response = await request( + `${host}/api/channels/logos/?${queryParams.toString()}` + ); return response; } catch (e) { @@ -1369,7 +1416,7 @@ export default class API { }); // Remove multiple logos from store - ids.forEach(id => { + ids.forEach((id) => { useChannelsStore.getState().removeLogo(id); }); diff --git a/frontend/src/components/forms/M3U.jsx b/frontend/src/components/forms/M3U.jsx index 0e4d5643..3d55d31b 100644 --- a/frontend/src/components/forms/M3U.jsx +++ b/frontend/src/components/forms/M3U.jsx @@ -27,6 +27,7 @@ import usePlaylistsStore from '../../store/playlists'; import { notifications } from '@mantine/notifications'; import { isNotEmpty, useForm } from '@mantine/form'; import useEPGsStore from '../../store/epgs'; +import M3UFilters from './M3UFilters'; const M3U = ({ m3uAccount = null, @@ -45,6 +46,7 @@ const M3U = ({ const [file, setFile] = useState(null); const [profileModalOpen, setProfileModalOpen] = useState(false); const [groupFilterModalOpen, setGroupFilterModalOpen] = useState(false); + const [filterModalOpen, setFilterModalOpen] = useState(false); const [loadingText, setLoadingText] = useState(''); const [showCredentialFields, setShowCredentialFields] = useState(false); @@ -85,7 +87,11 @@ const M3U = ({ account_type: m3uAccount.account_type, username: m3uAccount.username ?? '', password: '', - stale_stream_days: m3uAccount.stale_stream_days !== undefined && m3uAccount.stale_stream_days !== null ? m3uAccount.stale_stream_days : 7, + stale_stream_days: + m3uAccount.stale_stream_days !== undefined && + m3uAccount.stale_stream_days !== null + ? m3uAccount.stale_stream_days + : 7, }); if (m3uAccount.account_type == 'XC') { @@ -145,7 +151,8 @@ const M3U = ({ if (values.account_type != 'XC') { notifications.show({ title: 'Fetching M3U Groups', - message: 'Configure group filters and auto sync settings once complete.', + message: + 'Configure group filters and auto sync settings once complete.', }); // Don't prompt for group filters, but keeping this here @@ -177,7 +184,10 @@ const M3U = ({ const closeGroupFilter = () => { setGroupFilterModalOpen(false); - close(); + }; + + const closeFilter = () => { + setFilterModalOpen(false); }; useEffect(() => { @@ -224,7 +234,12 @@ const M3U = ({ id="account_type" name="account_type" label="Account Type" - description={<>Standard for direct M3U URLs,
Xtream Codes for panel-based services} + description={ + <> + Standard for direct M3U URLs,
+ Xtream Codes for panel-based services + + } data={[ { value: 'STD', @@ -316,8 +331,13 @@ const M3U = ({ How often to automatically refresh M3U data
- (0 to disable automatic refreshes)} + description={ + <> + How often to automatically refresh M3U data +
+ (0 to disable automatic refreshes) + + } {...form.getInputProps('refresh_interval')} key={form.key('refresh_interval')} /> @@ -342,6 +362,13 @@ const M3U = ({ {playlist && ( <> + + + + + {/* Description */} + {displayVOD.description && ( + + Description + + {displayVOD.description} + + + )} + + {/* YouTube trailer if available */} + {displayVOD.youtube_trailer && ( + + Trailer + + + )} + +
+ ); +}; + const MIN_CARD_WIDTH = 260; const MAX_CARD_WIDTH = 320; @@ -316,7 +551,10 @@ const VODsPage = () => { const showVideo = useVideoStore((s) => s.showVideo); const [selectedSeries, setSelectedSeries] = useState(null); + const [selectedVOD, setSelectedVOD] = useState(null); const [seriesModalOpened, { open: openSeriesModal, close: closeSeriesModal }] = useDisclosure(false); + const [vodModalOpened, { open: openVODModal, close: closeVODModal }] = useDisclosure(false); + const [initialLoad, setInitialLoad] = useState(true); const columns = useCardColumns(); useEffect(() => { @@ -325,9 +563,9 @@ const VODsPage = () => { useEffect(() => { if (filters.type === 'series') { - fetchSeries(); + fetchSeries().finally(() => setInitialLoad(false)); } else { - fetchVODs(); + fetchVODs().finally(() => setInitialLoad(false)); } }, [filters, currentPage, fetchVODs, fetchSeries]); @@ -339,7 +577,12 @@ const VODsPage = () => { } else { streamUrl = `${window.location.origin}${vod.stream_url}`; } - showVideo(streamUrl, 'vod'); // Specify VOD content type + showVideo(streamUrl, 'vod', vod); + }; + + const handleVODCardClick = (vod) => { + setSelectedVOD(vod); + openVODModal(); }; const handleSeriesClick = (series) => { @@ -395,7 +638,7 @@ const VODsPage = () => { {/* Content */} - {loading ? ( + {initialLoad ? ( @@ -424,7 +667,7 @@ const VODsPage = () => { key={vod.id} style={{ minWidth: MIN_CARD_WIDTH, maxWidth: MAX_CARD_WIDTH, margin: '0 auto' }} > - + ))} @@ -450,6 +693,13 @@ const VODsPage = () => { opened={seriesModalOpened} onClose={closeSeriesModal} /> + + {/* VOD Details Modal */} + ); }; diff --git a/frontend/src/store/useVODStore.jsx b/frontend/src/store/useVODStore.jsx index cb79701b..249182a8 100644 --- a/frontend/src/store/useVODStore.jsx +++ b/frontend/src/store/useVODStore.jsx @@ -28,8 +28,8 @@ const useVODStore = create((set, get) => ({ })), fetchVODs: async () => { - set({ loading: true, error: null }); try { + set({ loading: true, error: null }); const state = get(); const params = new URLSearchParams(); @@ -126,6 +126,96 @@ const useVODStore = create((set, get) => ({ } }, + fetchVODDetails: async (vodId) => { + set({ loading: true, error: null }); + try { + const response = await api.getVODInfo(vodId); + + // Transform the response data to match our expected format + const vodDetails = { + id: response.id || vodId, + name: response.name || '', + description: response.description || '', + year: response.year || null, + genre: response.genre || '', + rating: response.rating || '', + duration: response.duration || null, + stream_url: response.stream_url || '', + logo: response.logo || null, + type: 'movie', + director: response.director || '', + actors: response.actors || '', + country: response.country || '', + tmdb_id: response.tmdb_id || '', + youtube_trailer: response.youtube_trailer || '', + }; + + set((state) => ({ + vods: { + ...state.vods, + [vodDetails.id]: vodDetails, + }, + loading: false, + })); + + return vodDetails; + } catch (error) { + console.error('Failed to fetch VOD details:', error); + set({ error: 'Failed to load VOD details.', loading: false }); + throw error; + } + }, + + fetchVODDetailsFromProvider: async (vodId) => { + set({ loading: true, error: null }); + try { + const response = await api.getVODInfoFromProvider(vodId); + + // Transform the response data to match our expected format + const vodDetails = { + id: response.id || vodId, + name: response.name || '', + description: response.description || response.plot || '', + year: response.year || null, + genre: response.genre || '', + rating: response.rating || '', + duration: response.duration || null, + stream_url: response.stream_url || '', + logo: response.logo || response.cover || null, + type: 'movie', + director: response.director || '', + actors: response.actors || response.cast || '', + country: response.country || '', + tmdb_id: response.tmdb_id || '', + youtube_trailer: response.youtube_trailer || '', + // Additional provider fields + backdrop_path: response.backdrop_path || [], + release_date: response.release_date || response.releasedate || '', + movie_image: response.movie_image || null, + o_name: response.o_name || '', + age: response.age || '', + episode_run_time: response.episode_run_time || null, + bitrate: response.bitrate || 0, + video: response.video || {}, + audio: response.audio || {}, + }; + + set((state) => ({ + vods: { + ...state.vods, + [vodDetails.id]: vodDetails, + }, + loading: false, + })); + + return vodDetails; + } catch (error) { + console.error('Failed to fetch VOD details from provider:', error); + set({ error: 'Failed to load VOD details from provider.', loading: false }); + throw error; + } + }, + fetchCategories: async () => { try { const response = await api.getVODCategories(); diff --git a/frontend/src/store/useVideoStore.jsx b/frontend/src/store/useVideoStore.jsx index 4aa721ed..1ac21542 100644 --- a/frontend/src/store/useVideoStore.jsx +++ b/frontend/src/store/useVideoStore.jsx @@ -8,12 +8,14 @@ const useVideoStore = create((set) => ({ isVisible: false, streamUrl: null, contentType: 'live', // 'live' for MPEG-TS streams, 'vod' for MP4/MKV files + metadata: null, // Store additional metadata for VOD content - showVideo: (url, type = 'live') => + showVideo: (url, type = 'live', metadata = null) => set({ isVisible: true, streamUrl: url, contentType: type, + metadata: metadata, }), hideVideo: () => @@ -21,6 +23,7 @@ const useVideoStore = create((set) => ({ isVisible: false, streamUrl: null, contentType: 'live', + metadata: null, }), })); From 10ab3e4bd811b0874a8c61954a924ce669b38559 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 4 Aug 2025 17:28:20 -0500 Subject: [PATCH 129/857] Fix movie link not building correctly for web player. --- frontend/src/pages/VODs.jsx | 17 +++-------------- 1 file changed, 3 insertions(+), 14 deletions(-) diff --git a/frontend/src/pages/VODs.jsx b/frontend/src/pages/VODs.jsx index c7c0d296..7138b940 100644 --- a/frontend/src/pages/VODs.jsx +++ b/frontend/src/pages/VODs.jsx @@ -314,11 +314,11 @@ const VODModal = ({ vod, opened, onClose }) => { const vodToPlay = detailedVOD || vod; if (!vodToPlay) return; - let streamUrl = vodToPlay.stream_url; + let streamUrl = `/proxy/vod/movie/${vod.uuid}`; if (env_mode === 'dev') { - streamUrl = `${window.location.protocol}//${window.location.hostname}:5656${vodToPlay.stream_url}`; + streamUrl = `${window.location.protocol}//${window.location.hostname}:5656${streamUrl}`; } else { - streamUrl = `${window.location.origin}${vodToPlay.stream_url}`; + streamUrl = `${window.location.origin}${streamUrl}`; } showVideo(streamUrl, 'vod', vodToPlay); }; @@ -569,17 +569,6 @@ const VODsPage = () => { } }, [filters, currentPage, fetchVODs, fetchSeries]); - const env_mode = useSettingsStore((s) => s.environment.env_mode); - const handlePlayVOD = (vod) => { - let streamUrl = vod.stream_url; - if (env_mode === 'dev') { - streamUrl = `${window.location.protocol}//${window.location.hostname}:5656${vod.stream_url}`; - } else { - streamUrl = `${window.location.origin}${vod.stream_url}`; - } - showVideo(streamUrl, 'vod', vod); - }; - const handleVODCardClick = (vod) => { setSelectedVOD(vod); openVODModal(); From d917a3a915025c63edfb54bb6b742e9d39c7916e Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 4 Aug 2025 18:21:18 -0500 Subject: [PATCH 130/857] Rearranged data. --- frontend/src/pages/VODs.jsx | 152 ++++++++++++++++++++--------- frontend/src/store/useVODStore.jsx | 3 +- 2 files changed, 106 insertions(+), 49 deletions(-) diff --git a/frontend/src/pages/VODs.jsx b/frontend/src/pages/VODs.jsx index 7138b940..fb877aa7 100644 --- a/frontend/src/pages/VODs.jsx +++ b/frontend/src/pages/VODs.jsx @@ -345,7 +345,7 @@ const VODModal = ({ vod, opened, onClose }) => { > {loadingDetails && ( - + Loading additional details... @@ -443,67 +443,123 @@ const VODModal = ({ vod, opened, onClose }) => {
)} - {/* Technical info */} - {(displayVOD.bitrate || displayVOD.video || displayVOD.audio) && ( - - Technical Details: - {displayVOD.bitrate && displayVOD.bitrate > 0 && ( - - Bitrate: {displayVOD.bitrate} kbps - - )} - {displayVOD.video && Object.keys(displayVOD.video).length > 0 && ( - - Video: {JSON.stringify(displayVOD.video)} - - )} - {displayVOD.audio && Object.keys(displayVOD.audio).length > 0 && ( - - Audio: {JSON.stringify(displayVOD.audio)} - - )} - + {/* Description */} + {displayVOD.description && ( + + Description + + {displayVOD.description} + + )} + {/* Watch Trailer button at top */} + {displayVOD.youtube_trailer && ( + + )} + {/* Removed Play Movie button from here */} + + + {/* Provider Information & Play Button Row */} + {(vod?.m3u_account || true) && ( + + {vod?.m3u_account && ( + + IPTV Provider + + + {vod.m3u_account.name} + + {vod.m3u_account.account_type && ( + + {vod.m3u_account.account_type === 'XC' ? 'Xtream Codes' : 'Standard M3U'} + + )} + + + )} + + )} + {/* Technical Details */} + {(displayVOD.bitrate || displayVOD.video || displayVOD.audio) && ( + + Technical Details: + {displayVOD.bitrate && displayVOD.bitrate > 0 && ( + + Bitrate: {displayVOD.bitrate} kbps + + )} + {displayVOD.video && Object.keys(displayVOD.video).length > 0 && ( + + Video:{' '} + {displayVOD.video.codec_long_name || displayVOD.video.codec_name} + {displayVOD.video.profile ? ` (${displayVOD.video.profile})` : ''} + {displayVOD.video.width && displayVOD.video.height + ? `, ${displayVOD.video.width}x${displayVOD.video.height}` + : ''} + {displayVOD.video.display_aspect_ratio + ? `, Aspect Ratio: ${displayVOD.video.display_aspect_ratio}` + : ''} + {displayVOD.video.bit_rate + ? `, Bitrate: ${Math.round(Number(displayVOD.video.bit_rate) / 1000)} kbps` + : ''} + {displayVOD.video.r_frame_rate + ? `, Frame Rate: ${displayVOD.video.r_frame_rate.replace('/', '/')} fps` + : ''} + {displayVOD.video.tags?.encoder + ? `, Encoder: ${displayVOD.video.tags.encoder}` + : ''} + + )} + {displayVOD.audio && Object.keys(displayVOD.audio).length > 0 && ( + + Audio:{' '} + {displayVOD.audio.codec_long_name || displayVOD.audio.codec_name} + {displayVOD.audio.profile ? ` (${displayVOD.audio.profile})` : ''} + {displayVOD.audio.channel_layout + ? `, Channels: ${displayVOD.audio.channel_layout}` + : displayVOD.audio.channels + ? `, Channels: ${displayVOD.audio.channels}` + : ''} + {displayVOD.audio.sample_rate + ? `, Sample Rate: ${displayVOD.audio.sample_rate} Hz` + : ''} + {displayVOD.audio.bit_rate + ? `, Bitrate: ${Math.round(Number(displayVOD.audio.bit_rate) / 1000)} kbps` + : ''} + {displayVOD.audio.tags?.handler_name + ? `, Handler: ${displayVOD.audio.tags.handler_name}` + : ''} + + )} - - - {/* Description */} - {displayVOD.description && ( - - Description - - {displayVOD.description} - - )} - {/* YouTube trailer if available */} - {displayVOD.youtube_trailer && ( - - Trailer - - - )} ); diff --git a/frontend/src/store/useVODStore.jsx b/frontend/src/store/useVODStore.jsx index 249182a8..a8436507 100644 --- a/frontend/src/store/useVODStore.jsx +++ b/frontend/src/store/useVODStore.jsx @@ -148,8 +148,9 @@ const useVODStore = create((set, get) => ({ country: response.country || '', tmdb_id: response.tmdb_id || '', youtube_trailer: response.youtube_trailer || '', + m3u_account: response.m3u_account || '', }; - + console.log('Fetched VOD Details:', vodDetails); set((state) => ({ vods: { ...state.vods, From b19efd2f753529cf26fa5a980563ef35791bf9be Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 4 Aug 2025 18:24:45 -0500 Subject: [PATCH 131/857] Use backdrop image as background for modal. --- frontend/src/pages/VODs.jsx | 433 +++++++++++++++++++----------------- 1 file changed, 230 insertions(+), 203 deletions(-) diff --git a/frontend/src/pages/VODs.jsx b/frontend/src/pages/VODs.jsx index fb877aa7..d391b887 100644 --- a/frontend/src/pages/VODs.jsx +++ b/frontend/src/pages/VODs.jsx @@ -343,224 +343,251 @@ const VODModal = ({ vod, opened, onClose }) => { size="xl" centered > - - {loadingDetails && ( - - - Loading additional details... - - )} - - {/* Backdrop image if available */} + + {/* Backdrop image as background */} {displayVOD.backdrop_path && displayVOD.backdrop_path.length > 0 && ( - + <> {`${displayVOD.name} - - )} - - {/* Movie poster and basic info */} - - {/* Use movie_image or logo */} - {(displayVOD.movie_image || displayVOD.logo?.url) ? ( - - {displayVOD.name} - - ) : ( + {/* Overlay for readability */} - - - )} - - - {displayVOD.name} - - {/* Original name if different */} - {displayVOD.o_name && displayVOD.o_name !== displayVOD.name && ( - - Original: {displayVOD.o_name} - + /> + + )} + {/* Modal content above backdrop */} + + + {loadingDetails && ( + + + Loading additional details... + )} - - {displayVOD.year && {displayVOD.year}} - {displayVOD.duration && {formatDuration(displayVOD.duration)}} - {displayVOD.rating && {displayVOD.rating}} - {displayVOD.age && {displayVOD.age}} - Movie - - - {/* Release date */} - {displayVOD.release_date && ( - - Release Date: {displayVOD.release_date} - - )} - - {displayVOD.genre && ( - - Genre: {displayVOD.genre} - - )} - - {displayVOD.director && ( - - Director: {displayVOD.director} - - )} - - {displayVOD.actors && ( - - Cast: {displayVOD.actors} - - )} - - {displayVOD.country && ( - - Country: {displayVOD.country} - - )} - - {/* Description */} - {displayVOD.description && ( - - Description - - {displayVOD.description} - - - )} - - {/* Watch Trailer button at top */} - {displayVOD.youtube_trailer && ( - - )} - {/* Removed Play Movie button from here */} - - - {/* Provider Information & Play Button Row */} - {(vod?.m3u_account || true) && ( - - {vod?.m3u_account && ( - - IPTV Provider - - - {vod.m3u_account.name} - - {vod.m3u_account.account_type && ( - - {vod.m3u_account.account_type === 'XC' ? 'Xtream Codes' : 'Standard M3U'} - - )} - - - )} - - - )} - {/* Technical Details */} - {(displayVOD.bitrate || displayVOD.video || displayVOD.audio) && ( - - Technical Details: - {displayVOD.bitrate && displayVOD.bitrate > 0 && ( - - Bitrate: {displayVOD.bitrate} kbps - + + + {displayVOD.name} + + {/* Original name if different */} + {displayVOD.o_name && displayVOD.o_name !== displayVOD.name && ( + + Original: {displayVOD.o_name} + + )} + + + {displayVOD.year && {displayVOD.year}} + {displayVOD.duration && {formatDuration(displayVOD.duration)}} + {displayVOD.rating && {displayVOD.rating}} + {displayVOD.age && {displayVOD.age}} + Movie + + + {/* Release date */} + {displayVOD.release_date && ( + + Release Date: {displayVOD.release_date} + + )} + + {displayVOD.genre && ( + + Genre: {displayVOD.genre} + + )} + + {displayVOD.director && ( + + Director: {displayVOD.director} + + )} + + {displayVOD.actors && ( + + Cast: {displayVOD.actors} + + )} + + {displayVOD.country && ( + + Country: {displayVOD.country} + + )} + + {/* Description */} + {displayVOD.description && ( + + Description + + {displayVOD.description} + + + )} + + {/* Watch Trailer button at top */} + {displayVOD.youtube_trailer && ( + + )} + {/* Removed Play Movie button from here */} + + + {/* Provider Information & Play Button Row */} + {(vod?.m3u_account || true) && ( + + {vod?.m3u_account && ( + + IPTV Provider + + + {vod.m3u_account.name} + + {vod.m3u_account.account_type && ( + + {vod.m3u_account.account_type === 'XC' ? 'Xtream Codes' : 'Standard M3U'} + + )} + + + )} + + )} - {displayVOD.video && Object.keys(displayVOD.video).length > 0 && ( - - Video:{' '} - {displayVOD.video.codec_long_name || displayVOD.video.codec_name} - {displayVOD.video.profile ? ` (${displayVOD.video.profile})` : ''} - {displayVOD.video.width && displayVOD.video.height - ? `, ${displayVOD.video.width}x${displayVOD.video.height}` - : ''} - {displayVOD.video.display_aspect_ratio - ? `, Aspect Ratio: ${displayVOD.video.display_aspect_ratio}` - : ''} - {displayVOD.video.bit_rate - ? `, Bitrate: ${Math.round(Number(displayVOD.video.bit_rate) / 1000)} kbps` - : ''} - {displayVOD.video.r_frame_rate - ? `, Frame Rate: ${displayVOD.video.r_frame_rate.replace('/', '/')} fps` - : ''} - {displayVOD.video.tags?.encoder - ? `, Encoder: ${displayVOD.video.tags.encoder}` - : ''} - - )} - {displayVOD.audio && Object.keys(displayVOD.audio).length > 0 && ( - - Audio:{' '} - {displayVOD.audio.codec_long_name || displayVOD.audio.codec_name} - {displayVOD.audio.profile ? ` (${displayVOD.audio.profile})` : ''} - {displayVOD.audio.channel_layout - ? `, Channels: ${displayVOD.audio.channel_layout}` - : displayVOD.audio.channels - ? `, Channels: ${displayVOD.audio.channels}` - : ''} - {displayVOD.audio.sample_rate - ? `, Sample Rate: ${displayVOD.audio.sample_rate} Hz` - : ''} - {displayVOD.audio.bit_rate - ? `, Bitrate: ${Math.round(Number(displayVOD.audio.bit_rate) / 1000)} kbps` - : ''} - {displayVOD.audio.tags?.handler_name - ? `, Handler: ${displayVOD.audio.tags.handler_name}` - : ''} - + {/* Technical Details */} + {(displayVOD.bitrate || displayVOD.video || displayVOD.audio) && ( + + Technical Details: + {displayVOD.bitrate && displayVOD.bitrate > 0 && ( + + Bitrate: {displayVOD.bitrate} kbps + + )} + {displayVOD.video && Object.keys(displayVOD.video).length > 0 && ( + + Video:{' '} + {displayVOD.video.codec_long_name || displayVOD.video.codec_name} + {displayVOD.video.profile ? ` (${displayVOD.video.profile})` : ''} + {displayVOD.video.width && displayVOD.video.height + ? `, ${displayVOD.video.width}x${displayVOD.video.height}` + : ''} + {displayVOD.video.display_aspect_ratio + ? `, Aspect Ratio: ${displayVOD.video.display_aspect_ratio}` + : ''} + {displayVOD.video.bit_rate + ? `, Bitrate: ${Math.round(Number(displayVOD.video.bit_rate) / 1000)} kbps` + : ''} + {displayVOD.video.r_frame_rate + ? `, Frame Rate: ${displayVOD.video.r_frame_rate.replace('/', '/')} fps` + : ''} + {displayVOD.video.tags?.encoder + ? `, Encoder: ${displayVOD.video.tags.encoder}` + : ''} + + )} + {displayVOD.audio && Object.keys(displayVOD.audio).length > 0 && ( + + Audio:{' '} + {displayVOD.audio.codec_long_name || displayVOD.audio.codec_name} + {displayVOD.audio.profile ? ` (${displayVOD.audio.profile})` : ''} + {displayVOD.audio.channel_layout + ? `, Channels: ${displayVOD.audio.channel_layout}` + : displayVOD.audio.channels + ? `, Channels: ${displayVOD.audio.channels}` + : ''} + {displayVOD.audio.sample_rate + ? `, Sample Rate: ${displayVOD.audio.sample_rate} Hz` + : ''} + {displayVOD.audio.bit_rate + ? `, Bitrate: ${Math.round(Number(displayVOD.audio.bit_rate) / 1000)} kbps` + : ''} + {displayVOD.audio.tags?.handler_name + ? `, Handler: ${displayVOD.audio.tags.handler_name}` + : ''} + + )} + )} + {/* YouTube trailer if available */} - )} - {/* YouTube trailer if available */} - + + ); }; From 36450af23fffe808486b7b1b8af2c88b7df76dda Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 4 Aug 2025 18:30:33 -0500 Subject: [PATCH 132/857] Fix youtube links not loading. --- frontend/src/pages/VODs.jsx | 530 +++++++++++++++++++----------------- 1 file changed, 287 insertions(+), 243 deletions(-) diff --git a/frontend/src/pages/VODs.jsx b/frontend/src/pages/VODs.jsx index d391b887..8a4b4ab7 100644 --- a/frontend/src/pages/VODs.jsx +++ b/frontend/src/pages/VODs.jsx @@ -282,6 +282,8 @@ const SeriesModal = ({ series, opened, onClose }) => { const VODModal = ({ vod, opened, onClose }) => { const [detailedVOD, setDetailedVOD] = useState(null); const [loadingDetails, setLoadingDetails] = useState(false); + const [trailerModalOpened, setTrailerModalOpened] = useState(false); + const [trailerUrl, setTrailerUrl] = useState(''); const { fetchVODDetailsFromProvider } = useVODStore(); const showVideo = useVideoStore((s) => s.showVideo); const env_mode = useSettingsStore((s) => s.environment.env_mode); @@ -307,6 +309,8 @@ const VODModal = ({ vod, opened, onClose }) => { if (!opened) { setDetailedVOD(null); setLoadingDetails(false); + setTrailerModalOpened(false); + setTrailerUrl(''); } }, [opened]); @@ -330,265 +334,305 @@ const VODModal = ({ vod, opened, onClose }) => { return hours > 0 ? `${hours}h ${mins}m` : `${mins}m`; }; + // Helper to get embeddable YouTube URL + const getEmbedUrl = (url) => { + if (!url) return ''; + // Accepts full YouTube URLs or just IDs + const match = url.match(/(?:youtube\.com\/watch\?v=|youtu\.be\/)([\w-]+)/); + const videoId = match ? match[1] : url; + return `https://www.youtube.com/embed/${videoId}`; + }; + if (!vod) return null; // Use detailed data if available, otherwise use basic vod data const displayVOD = detailedVOD || vod; return ( - - - {/* Backdrop image as background */} - {displayVOD.backdrop_path && displayVOD.backdrop_path.length > 0 && ( - <> - {`${displayVOD.name} + + + {/* Backdrop image as background */} + {displayVOD.backdrop_path && displayVOD.backdrop_path.length > 0 && ( + <> + {`${displayVOD.name} + {/* Overlay for readability */} + + + )} + {/* Modal content above backdrop */} + + + {loadingDetails && ( + + + Loading additional details... + + )} + + {/* Movie poster and basic info */} + + {/* Use movie_image or logo */} + {(displayVOD.movie_image || displayVOD.logo?.url) ? ( + + {displayVOD.name} + + ) : ( + + + + )} + + + {displayVOD.name} + + {/* Original name if different */} + {displayVOD.o_name && displayVOD.o_name !== displayVOD.name && ( + + Original: {displayVOD.o_name} + + )} + + + {displayVOD.year && {displayVOD.year}} + {displayVOD.duration && {formatDuration(displayVOD.duration)}} + {displayVOD.rating && {displayVOD.rating}} + {displayVOD.age && {displayVOD.age}} + Movie + + + {/* Release date */} + {displayVOD.release_date && ( + + Release Date: {displayVOD.release_date} + + )} + + {displayVOD.genre && ( + + Genre: {displayVOD.genre} + + )} + + {displayVOD.director && ( + + Director: {displayVOD.director} + + )} + + {displayVOD.actors && ( + + Cast: {displayVOD.actors} + + )} + + {displayVOD.country && ( + + Country: {displayVOD.country} + + )} + + {/* Description */} + {displayVOD.description && ( + + Description + + {displayVOD.description} + + + )} + + {/* Watch Trailer button at top */} + {displayVOD.youtube_trailer && ( + + )} + {/* Removed Play Movie button from here */} + + + {/* Provider Information & Play Button Row */} + {(vod?.m3u_account || true) && ( + + {vod?.m3u_account && ( + + IPTV Provider + + + {vod.m3u_account.name} + + {vod.m3u_account.account_type && ( + + {vod.m3u_account.account_type === 'XC' ? 'Xtream Codes' : 'Standard M3U'} + + )} + + + )} + + + )} + {/* Technical Details */} + {(displayVOD.bitrate || displayVOD.video || displayVOD.audio) && ( + + Technical Details: + {displayVOD.bitrate && displayVOD.bitrate > 0 && ( + + Bitrate: {displayVOD.bitrate} kbps + + )} + {displayVOD.video && Object.keys(displayVOD.video).length > 0 && ( + + Video:{' '} + {displayVOD.video.codec_long_name || displayVOD.video.codec_name} + {displayVOD.video.profile ? ` (${displayVOD.video.profile})` : ''} + {displayVOD.video.width && displayVOD.video.height + ? `, ${displayVOD.video.width}x${displayVOD.video.height}` + : ''} + {displayVOD.video.display_aspect_ratio + ? `, Aspect Ratio: ${displayVOD.video.display_aspect_ratio}` + : ''} + {displayVOD.video.bit_rate + ? `, Bitrate: ${Math.round(Number(displayVOD.video.bit_rate) / 1000)} kbps` + : ''} + {displayVOD.video.r_frame_rate + ? `, Frame Rate: ${displayVOD.video.r_frame_rate.replace('/', '/')} fps` + : ''} + {displayVOD.video.tags?.encoder + ? `, Encoder: ${displayVOD.video.tags.encoder}` + : ''} + + )} + {displayVOD.audio && Object.keys(displayVOD.audio).length > 0 && ( + + Audio:{' '} + {displayVOD.audio.codec_long_name || displayVOD.audio.codec_name} + {displayVOD.audio.profile ? ` (${displayVOD.audio.profile})` : ''} + {displayVOD.audio.channel_layout + ? `, Channels: ${displayVOD.audio.channel_layout}` + : displayVOD.audio.channels + ? `, Channels: ${displayVOD.audio.channels}` + : ''} + {displayVOD.audio.sample_rate + ? `, Sample Rate: ${displayVOD.audio.sample_rate} Hz` + : ''} + {displayVOD.audio.bit_rate + ? `, Bitrate: ${Math.round(Number(displayVOD.audio.bit_rate) / 1000)} kbps` + : ''} + {displayVOD.audio.tags?.handler_name + ? `, Handler: ${displayVOD.audio.tags.handler_name}` + : ''} + + )} + + )} + {/* YouTube trailer if available */} + + + + + {/* YouTube Trailer Modal */} + setTrailerModalOpened(false)} + title="Trailer" + size="xl" + centered + withCloseButton + > + + {trailerUrl && ( +