diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 51952541..ccd942d6 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -541,6 +541,71 @@ class ChannelViewSet(viewsets.ModelViewSet): except Exception as e: return Response({"error": str(e)}, status=400) + @swagger_auto_schema( + method='post', + operation_description="Associate multiple channels with EPG data without triggering a full refresh", + request_body=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + 'associations': openapi.Schema( + type=openapi.TYPE_ARRAY, + items=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + 'channel_id': openapi.Schema(type=openapi.TYPE_INTEGER), + 'epg_data_id': openapi.Schema(type=openapi.TYPE_INTEGER) + } + ) + ) + } + ), + responses={200: "EPG data linked for multiple channels"} + ) + @action(detail=False, methods=['post'], url_path='batch-set-epg') + def batch_set_epg(self, request): + """Efficiently associate multiple channels with EPG data at once.""" + associations = request.data.get('associations', []) + channels_updated = 0 + programs_refreshed = 0 + unique_epg_ids = set() + + for assoc in associations: + channel_id = assoc.get('channel_id') + epg_data_id = assoc.get('epg_data_id') + + if not channel_id: + continue + + try: + # Get the channel + channel = Channel.objects.get(id=channel_id) + + # Set the EPG data + channel.epg_data_id = epg_data_id + channel.save(update_fields=['epg_data']) + channels_updated += 1 + + # Track unique EPG data IDs + if epg_data_id: + unique_epg_ids.add(epg_data_id) + + except Channel.DoesNotExist: + logger.error(f"Channel with ID {channel_id} not found") + except Exception as e: + logger.error(f"Error setting EPG data for channel {channel_id}: {str(e)}") + + # Trigger program refresh for unique EPG data IDs + from apps.epg.tasks import parse_programs_for_tvg_id + for epg_id in unique_epg_ids: + parse_programs_for_tvg_id.delay(epg_id) + programs_refreshed += 1 + + return Response({ + 'success': True, + 'channels_updated': channels_updated, + 'programs_refreshed': programs_refreshed + }) + # ───────────────────────────────────────────────────────── # 4) Bulk Delete Streams # ───────────────────────────────────────────────────────── diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py index 94bb8ca9..88d040e8 100755 --- a/apps/channels/tasks.py +++ b/apps/channels/tasks.py @@ -75,21 +75,42 @@ def match_epg_channels(): matched_channels = [] channels_to_update = [] - channels_json = [{ - "id": channel.id, - "name": channel.name, - "tvg_id": channel.tvg_id, - "fallback_name": channel.tvg_id.strip() if channel.tvg_id else channel.name, - "norm_chan": normalize_name(channel.tvg_id.strip() if channel.tvg_id else channel.name) - } for channel in Channel.objects.all() if not channel.epg_data] + # Get channels that don't have EPG data assigned + channels_without_epg = Channel.objects.filter(epg_data__isnull=True) + logger.info(f"Found {channels_without_epg.count()} channels without EPG data") - epg_json = [{ - 'id': epg.id, - 'tvg_id': epg.tvg_id, - 'name': epg.name, - 'norm_name': normalize_name(epg.name), - 'epg_source_id': epg.epg_source.id, - } for epg in EPGData.objects.all()] + channels_json = [] + for channel in channels_without_epg: + # Normalize TVG ID - strip whitespace and convert to lowercase + normalized_tvg_id = channel.tvg_id.strip().lower() if channel.tvg_id else "" + if normalized_tvg_id: + logger.info(f"Processing channel {channel.id} '{channel.name}' with TVG ID='{normalized_tvg_id}'") + + channels_json.append({ + "id": channel.id, + "name": channel.name, + "tvg_id": normalized_tvg_id, # Use normalized TVG ID + "original_tvg_id": channel.tvg_id, # Keep original for reference + "fallback_name": normalized_tvg_id if normalized_tvg_id else channel.name, + "norm_chan": normalize_name(normalized_tvg_id if normalized_tvg_id else channel.name) + }) + + # Similarly normalize EPG data TVG IDs + epg_json = [] + for epg in EPGData.objects.all(): + normalized_tvg_id = epg.tvg_id.strip().lower() if epg.tvg_id else "" + epg_json.append({ + 'id': epg.id, + 'tvg_id': normalized_tvg_id, # Use normalized TVG ID + 'original_tvg_id': epg.tvg_id, # Keep original for reference + 'name': epg.name, + 'norm_name': normalize_name(epg.name), + 'epg_source_id': epg.epg_source.id if epg.epg_source else None, + }) + + # Log available EPG data TVG IDs for debugging + unique_epg_tvg_ids = set(e['tvg_id'] for e in epg_json if e['tvg_id']) + logger.info(f"Available EPG TVG IDs: {', '.join(sorted(unique_epg_tvg_ids))}") payload = { "channels": channels_json, @@ -159,12 +180,25 @@ def match_epg_channels(): logger.info("Finished EPG matching logic.") + # Send update with additional information for refreshing UI channel_layer = get_channel_layer() + associations = [ + {"channel_id": chan["id"], "epg_data_id": chan["epg_data_id"]} + for chan in channels_to_update_dicts + ] + async_to_sync(channel_layer.group_send)( 'updates', { 'type': 'update', - "data": {"success": True, "type": "epg_match"} + "data": { + "success": True, + "type": "epg_match", + "refresh_channels": True, # Flag to tell frontend to refresh channels + "matches_count": total_matched, + "message": f"EPG matching complete: {total_matched} channel(s) matched", + "associations": associations # Add the associations data + } } ) diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index cd4bca6f..0f5c4404 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -14,7 +14,7 @@ import useEPGsStore from './store/epgs'; import { Box, Button, Stack } from '@mantine/core'; import API from './api'; -export const WebsocketContext = createContext([false, () => {}, null]); +export const WebsocketContext = createContext([false, () => { }, null]); export const WebsocketProvider = ({ children }) => { const [isReady, setIsReady] = useState(false); @@ -121,11 +121,17 @@ export const WebsocketProvider = ({ children }) => { case 'epg_match': notifications.show({ - message: 'EPG match is complete!', + message: event.data.message || 'EPG match is complete!', color: 'green.5', }); - // fetchChannels(); - fetchEPGData(); + + // Check if we have associations data and use the more efficient batch API + if (event.data.associations && event.data.associations.length > 0) { + API.batchSetEPG(event.data.associations); + } else { + // Fall back to legacy full refresh method + API.requeryChannels(); + } break; case 'm3u_profile_test': diff --git a/frontend/src/api.js b/frontend/src/api.js index e54e628a..8e1fe46c 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -1130,7 +1130,7 @@ export default class API { return response; } catch (e) { - errorNotification('Failed to create channle profile', e); + errorNotification('Failed to create channel profile', e); } } @@ -1271,4 +1271,31 @@ export default class API { throw e; } } + + static async batchSetEPG(associations) { + try { + const response = await request( + `${host}/api/channels/channels/batch-set-epg/`, + { + method: 'POST', + body: { associations }, + } + ); + + // If successful, requery channels to update UI + if (response.success) { + notifications.show({ + title: 'EPG Association', + message: `Updated ${response.channels_updated} channels, refreshing ${response.programs_refreshed} EPG sources.`, + color: 'blue', + }); + + this.requeryChannels(); + } + + return response; + } catch (e) { + errorNotification('Failed to update channel EPGs', e); + } + } } diff --git a/scripts/epg_match.py b/scripts/epg_match.py index e5d17466..ed86d865 100644 --- a/scripts/epg_match.py +++ b/scripts/epg_match.py @@ -34,7 +34,7 @@ def process_data(input_data): channels = input_data["channels"] epg_data = input_data["epg_data"] - region_code = input_data["region_code"] + region_code = input_data.get("region_code", None) epg_embeddings = None if any(row["norm_name"] for row in epg_data): @@ -47,6 +47,21 @@ def process_data(input_data): matched_channels = [] for chan in channels: + normalized_tvg_id = chan.get("tvg_id", "") + fallback_name = chan["tvg_id"].strip() if chan["tvg_id"] else chan["name"] + + # Exact TVG ID match (direct match) + epg_by_tvg_id = next((epg for epg in epg_data if epg["tvg_id"] == normalized_tvg_id), None) + if normalized_tvg_id and epg_by_tvg_id: + chan["epg_data_id"] = epg_by_tvg_id["id"] + channels_to_update.append(chan) + + # Add to matched_channels list so it's counted in the total + matched_channels.append((chan['id'], fallback_name, epg_by_tvg_id["tvg_id"])) + + eprint(f"Channel {chan['id']} '{fallback_name}' => EPG found by tvg_id={epg_by_tvg_id['tvg_id']}") + continue + # If channel has a tvg_id that doesn't exist in EPGData, do direct check. # I don't THINK this should happen now that we assign EPG on channel creation. if chan["tvg_id"]: @@ -59,7 +74,6 @@ def process_data(input_data): continue # C) Perform name-based fuzzy matching - fallback_name = chan["tvg_id"].strip() if chan["tvg_id"] else chan["name"] if not chan["norm_chan"]: eprint(f"Channel {chan['id']} '{chan['name']}' => empty after normalization, skipping") continue