From cc9d38212e0572776461dc4b5cd8a35c962a2af5 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 9 Jan 2026 12:03:55 -0600 Subject: [PATCH] Enhancement: Groups now follow the same stale retention logic as streams, using the account's `stale_stream_days` setting. Groups that temporarily disappear from an M3U source are retained for the configured retention period instead of being immediately deleted, preserving user settings and preventing data loss when providers temporarily remove/re-add groups. (Closes #809) --- CHANGELOG.md | 4 ++ apps/channels/models.py | 5 +++ apps/m3u/tasks.py | 95 +++++++++++++++++++++++++++++------------ 3 files changed, 76 insertions(+), 28 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 83a9a64a..67a45fdf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +### Added + +- Group retention policy for M3U accounts: Groups now follow the same stale retention logic as streams, using the account's `stale_stream_days` setting. Groups that temporarily disappear from an M3U source are retained for the configured retention period instead of being immediately deleted, preserving user settings and preventing data loss when providers temporarily remove/re-add groups. (Closes #809) + ### Changed - Docker setup enhanced for legacy CPU support: Added `USE_LEGACY_NUMPY` environment variable to enable custom-built NumPy with no CPU baseline, allowing Dispatcharr to run on older CPUs (circa 2009) that lack support for newer baseline CPU features. When set to `true`, the entrypoint script will install the legacy NumPy build instead of the standard distribution. diff --git a/apps/channels/models.py b/apps/channels/models.py index 88df3661..aff4a2fb 100644 --- a/apps/channels/models.py +++ b/apps/channels/models.py @@ -589,6 +589,11 @@ class ChannelGroupM3UAccount(models.Model): blank=True, help_text='Starting channel number for auto-created channels in this group' ) + last_seen = models.DateTimeField( + default=datetime.now, + db_index=True, + help_text='Last time this group was seen in the M3U source during a refresh' + ) class Meta: unique_together = ("channel_group", "m3u_account") diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index 87759ab9..71a8ba60 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -513,7 +513,19 @@ def check_field_lengths(streams_to_create): @shared_task -def process_groups(account, groups): +def process_groups(account, groups, scan_start_time=None): + """Process groups and update their relationships with the M3U account. + + Args: + account: M3UAccount instance + groups: Dict of group names to custom properties + scan_start_time: Timestamp when the scan started (for consistent last_seen marking) + """ + # Use scan_start_time if provided, otherwise current time + # This ensures consistency with stream processing and cleanup logic + if scan_start_time is None: + scan_start_time = timezone.now() + existing_groups = { group.name: group for group in ChannelGroup.objects.filter(name__in=groups.keys()) @@ -553,24 +565,8 @@ def process_groups(account, groups): ).select_related('channel_group') } - # Get ALL existing relationships for this account to identify orphaned ones - all_existing_relationships = { - rel.channel_group.name: rel - for rel in ChannelGroupM3UAccount.objects.filter( - m3u_account=account - ).select_related('channel_group') - } - relations_to_create = [] relations_to_update = [] - relations_to_delete = [] - - # Find orphaned relationships (groups that no longer exist in the source) - current_group_names = set(groups.keys()) - for group_name, rel in all_existing_relationships.items(): - if group_name not in current_group_names: - relations_to_delete.append(rel) - logger.debug(f"Marking relationship for deletion: group '{group_name}' no longer exists in source for account {account.id}") for group in all_group_objs: custom_props = groups.get(group.name, {}) @@ -597,9 +593,13 @@ def process_groups(account, groups): del updated_custom_props["xc_id"] existing_rel.custom_properties = updated_custom_props + existing_rel.last_seen = scan_start_time relations_to_update.append(existing_rel) logger.debug(f"Updated xc_id for group '{group.name}' from '{existing_xc_id}' to '{new_xc_id}' - account {account.id}") else: + # Update last_seen even if xc_id hasn't changed + existing_rel.last_seen = scan_start_time + relations_to_update.append(existing_rel) logger.debug(f"xc_id unchanged for group '{group.name}' - account {account.id}") else: # Create new relationship - this group is new to this M3U account @@ -613,6 +613,7 @@ def process_groups(account, groups): m3u_account=account, custom_properties=custom_props, enabled=auto_enable_new_groups_live, + last_seen=scan_start_time, ) ) @@ -623,15 +624,38 @@ def process_groups(account, groups): # Bulk update existing relationships if relations_to_update: - ChannelGroupM3UAccount.objects.bulk_update(relations_to_update, ['custom_properties']) - logger.info(f"Updated {len(relations_to_update)} existing group relationships with new xc_id values for account {account.id}") + ChannelGroupM3UAccount.objects.bulk_update(relations_to_update, ['custom_properties', 'last_seen']) + logger.info(f"Updated {len(relations_to_update)} existing group relationships for account {account.id}") - # Delete orphaned relationships - if relations_to_delete: - ChannelGroupM3UAccount.objects.filter( - id__in=[rel.id for rel in relations_to_delete] - ).delete() - logger.info(f"Deleted {len(relations_to_delete)} orphaned group relationships for account {account.id}: {[rel.channel_group.name for rel in relations_to_delete]}") + +def cleanup_stale_group_relationships(account, scan_start_time): + """ + Remove group relationships that haven't been seen since the stale retention period. + This follows the same logic as stream cleanup for consistency. + """ + # Calculate cutoff date for stale group relationships + stale_cutoff = scan_start_time - timezone.timedelta(days=account.stale_stream_days) + logger.info( + f"Removing group relationships not seen since {stale_cutoff} for M3U account {account.id}" + ) + + # Find stale relationships + stale_relationships = ChannelGroupM3UAccount.objects.filter( + m3u_account=account, + last_seen__lt=stale_cutoff + ).select_related('channel_group') + + relations_to_delete = list(stale_relationships) + deleted_count = len(relations_to_delete) + + if deleted_count > 0: + logger.info( + f"Found {deleted_count} stale group relationships for account {account.id}: " + f"{[rel.channel_group.name for rel in relations_to_delete]}" + ) + + # Delete the stale relationships + stale_relationships.delete() # Check if any of the deleted relationships left groups with no remaining associations orphaned_group_ids = [] @@ -656,6 +680,10 @@ def process_groups(account, groups): deleted_groups = list(ChannelGroup.objects.filter(id__in=orphaned_group_ids).values_list('name', flat=True)) ChannelGroup.objects.filter(id__in=orphaned_group_ids).delete() logger.info(f"Deleted {len(orphaned_group_ids)} orphaned groups that had no remaining associations: {deleted_groups}") + else: + logger.debug(f"No stale group relationships found for account {account.id}") + + return deleted_count def collect_xc_streams(account_id, enabled_groups): @@ -1092,7 +1120,15 @@ def cleanup_streams(account_id, scan_start_time=timezone.now): @shared_task -def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): +def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False, scan_start_time=None): + """Refresh M3U groups for an account. + + Args: + account_id: ID of the M3U account + use_cache: Whether to use cached M3U file + full_refresh: Whether this is part of a full refresh + scan_start_time: Timestamp when the scan started (for consistent last_seen marking) + """ if not acquire_task_lock("refresh_m3u_account_groups", account_id): return f"Task already running for account_id={account_id}.", None @@ -1419,7 +1455,7 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): send_m3u_update(account_id, "processing_groups", 0) - process_groups(account, groups) + process_groups(account, groups, scan_start_time) release_task_lock("refresh_m3u_account_groups", account_id) @@ -2526,7 +2562,7 @@ def refresh_single_m3u_account(account_id): if not extinf_data: try: logger.info(f"Calling refresh_m3u_groups for account {account_id}") - result = refresh_m3u_groups(account_id, full_refresh=True) + result = refresh_m3u_groups(account_id, full_refresh=True, scan_start_time=refresh_start_timestamp) logger.trace(f"refresh_m3u_groups result: {result}") # Check for completely empty result or missing groups @@ -2809,6 +2845,9 @@ def refresh_single_m3u_account(account_id): # Now run cleanup streams_deleted = cleanup_streams(account_id, refresh_start_timestamp) + # Cleanup stale group relationships (follows same retention policy as streams) + cleanup_stale_group_relationships(account, refresh_start_timestamp) + # Run auto channel sync after successful refresh auto_sync_message = "" try: