From 7a7cd0711d8ad3aa6f7e88e846cb6dcf5f0e2ad8 Mon Sep 17 00:00:00 2001 From: dekzter Date: Mon, 17 Mar 2025 12:42:48 -0400 Subject: [PATCH] live stats dashboard --- apps/m3u/tasks.py | 4 +- apps/proxy/tasks.py | 51 +++ apps/proxy/ts_proxy/channel_status.py | 6 +- apps/proxy/ts_proxy/client_manager.py | 5 +- apps/proxy/ts_proxy/server.py | 31 +- apps/proxy/ts_proxy/views.py | 11 +- dispatcharr/consumers.py | 2 +- dispatcharr/settings.py | 8 + docker/uwsgi.dev.ini | 2 + docker/uwsgi.ini | 4 +- frontend/src/App.jsx | 2 + frontend/src/WebSocket.jsx | 13 +- frontend/src/api.js | 39 +++ frontend/src/components/Sidebar.jsx | 1 + .../src/components/tables/ChannelsTable.jsx | 1 + frontend/src/pages/Stats.jsx | 302 ++++++++++++++++++ frontend/src/store/channels.jsx | 6 + frontend/vite.config.js | 10 +- requirements.txt | 1 + 19 files changed, 469 insertions(+), 30 deletions(-) create mode 100644 apps/proxy/tasks.py create mode 100644 frontend/src/pages/Stats.jsx diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index cbb7bd94..b3de8567 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -206,8 +206,8 @@ def refresh_single_m3u_account(account_id): async_to_sync(channel_layer.group_send)( "updates", { - "type": "m3u_refresh", - "message": {"success": True, "message": "M3U refresh completed successfully"} + "type": "update", + "data": {"success": True, "type": "m3u_refresh", "message": "M3U refresh completed successfully"} }, ) return f"Account {account_id} => Created {created_count}, updated {updated_count}, excluded {excluded_count} Streams." diff --git a/apps/proxy/tasks.py b/apps/proxy/tasks.py new file mode 100644 index 00000000..37a1f8f9 --- /dev/null +++ b/apps/proxy/tasks.py @@ -0,0 +1,51 @@ +# yourapp/tasks.py +from celery import shared_task +from channels.layers import get_channel_layer +from asgiref.sync import async_to_sync +import redis +import json +import logging +import re +from core.utils import redis_client +from apps.proxy.ts_proxy.channel_status import ChannelStatus + +logger = logging.getLogger(__name__) + +# Store the last known value to compare with new data +last_known_data = {} + +@shared_task +def fetch_channel_stats(): + try: + # Basic info for all channels + channel_pattern = "ts_proxy:channel:*:metadata" + all_channels = [] + + # Extract channel IDs from keys + cursor = 0 + while True: + cursor, keys = redis_client.scan(cursor, match=channel_pattern) + for key in keys: + channel_id_match = re.search(r"ts_proxy:channel:(.*):metadata", key.decode('utf-8')) + if channel_id_match: + ch_id = channel_id_match.group(1) + channel_info = ChannelStatus.get_basic_channel_info(ch_id) + if channel_info: + all_channels.append(channel_info) + + if cursor == 0: + break + + except Exception as e: + logger.error(f"Error in channel_status: {e}", exc_info=True) + return + # return JsonResponse({'error': str(e)}, status=500) + + channel_layer = get_channel_layer() + async_to_sync(channel_layer.group_send)( + "updates", + { + "type": "update", + "data": {"success": True, "type": "channel_stats", "stats": json.dumps({'channels': all_channels, 'count': len(all_channels)})} + }, + ) diff --git a/apps/proxy/ts_proxy/channel_status.py b/apps/proxy/ts_proxy/channel_status.py index 3c36b8cd..c9c896e0 100644 --- a/apps/proxy/ts_proxy/channel_status.py +++ b/apps/proxy/ts_proxy/channel_status.py @@ -6,7 +6,7 @@ from . import proxy_server logger = logging.getLogger("ts_proxy") class ChannelStatus: - + def get_detailed_channel_info(channel_id): # Get channel metadata metadata_key = f"ts_proxy:channel:{channel_id}:metadata" @@ -231,7 +231,8 @@ class ChannelStatus: # Efficient way - just retrieve the essentials client_info = { 'client_id': client_id_str, - 'user_agent': proxy_server.redis_client.hget(client_key, 'user_agent') + 'user_agent': proxy_server.redis_client.hget(client_key, 'user_agent'), + 'ip_address': proxy_server.redis_client.hget(client_key, 'ip_address').decode('utf-8'), } if client_info['user_agent']: @@ -251,4 +252,3 @@ class ChannelStatus: info['clients'] = clients return info - diff --git a/apps/proxy/ts_proxy/client_manager.py b/apps/proxy/ts_proxy/client_manager.py index 7285e7fe..178b7a9d 100644 --- a/apps/proxy/ts_proxy/client_manager.py +++ b/apps/proxy/ts_proxy/client_manager.py @@ -135,7 +135,7 @@ class ClientManager: except Exception as e: logger.error(f"Error notifying owner of client activity: {e}") - def add_client(self, client_id, user_agent=None): + def add_client(self, client_id, client_ip, user_agent=None): """Add a client with duplicate prevention""" if client_id in self._registered_clients: logger.debug(f"Client {client_id} already registered, skipping") @@ -150,6 +150,7 @@ class ClientManager: current_time = str(time.time()) client_data = { "user_agent": user_agent or "unknown", + "ip_address": client_ip, "connected_at": current_time, "last_active": current_time, "worker_id": self.worker_id or "unknown" @@ -285,4 +286,4 @@ class ClientManager: # Refresh TTL on the set itself self.redis_client.expire(self.client_set_key, self.client_ttl) except Exception as e: - logger.error(f"Error refreshing client TTL: {e}") \ No newline at end of file + logger.error(f"Error refreshing client TTL: {e}") diff --git a/apps/proxy/ts_proxy/server.py b/apps/proxy/ts_proxy/server.py index 5dc4a6b5..72bd93dd 100644 --- a/apps/proxy/ts_proxy/server.py +++ b/apps/proxy/ts_proxy/server.py @@ -190,16 +190,16 @@ class ProxyServer: metadata_key = f"ts_proxy:channel:{channel_id}:metadata" if self.redis_client.exists(metadata_key): self.redis_client.hset(metadata_key, mapping={ - "state": "stopping", + "state": "stopping", "state_changed_at": str(time.time()) }) - + # If we have local resources for this channel, clean them up if channel_id in self.stream_buffers or channel_id in self.client_managers: # Use existing stop_channel method logger.info(f"Stopping local resources for channel {channel_id}") self.stop_channel(channel_id) - + # Acknowledge stop by publishing a response stop_response = { "event": "channel_stopped", @@ -215,14 +215,14 @@ class ProxyServer: client_id = data.get("client_id") if client_id and channel_id: logger.info(f"Received request to stop client {client_id} on channel {channel_id}") - + # Both remove from client manager AND set a key for the generator to detect if channel_id in self.client_managers: client_manager = self.client_managers[channel_id] if client_id in client_manager.clients: client_manager.remove_client(client_id) logger.info(f"Removed client {client_id} from client manager") - + # Set a Redis key for the generator to detect if self.redis_client: stop_key = f"ts_proxy:channel:{channel_id}:client:{client_id}:stop" @@ -497,13 +497,13 @@ class ProxyServer: """Stop a channel with proper ownership handling""" try: logger.info(f"Stopping channel {channel_id}") - + # First set a stopping key that clients will check if self.redis_client: stop_key = f"ts_proxy:channel:{channel_id}:stopping" # Set with 60 second TTL - enough time for clients to notice self.redis_client.setex(stop_key, 10, "true") - + # Only stop the actual stream manager if we're the owner if self.am_i_owner(channel_id): logger.info(f"This worker ({self.worker_id}) is the owner - closing provider connection") @@ -592,17 +592,17 @@ class ProxyServer: try: # Refresh channel registry self.refresh_channel_registry() - + # Create a unified list of all channels we have locally all_local_channels = set(self.stream_managers.keys()) | set(self.client_managers.keys()) - + # Single loop through all channels - process each exactly once for channel_id in list(all_local_channels): if self.am_i_owner(channel_id): # === OWNER CHANNEL HANDLING === # Extend ownership lease self.extend_ownership(channel_id) - + # Get channel state from metadata hash channel_state = "unknown" if self.redis_client: @@ -701,7 +701,7 @@ class ProxyServer: self.redis_client.delete(f"ts_proxy:channel:{channel_id}:last_client_disconnect_time") else: - # === NON-OWNER CHANNEL HANDLING === + # === NON-OWNER CHANNEL HANDLING === # For channels we don't own, check if they've been stopped/cleaned up in Redis if self.redis_client: # Method 1: Check for stopping key @@ -710,21 +710,21 @@ class ProxyServer: logger.debug(f"Non-owner cleanup: Channel {channel_id} has stopping flag in Redis, cleaning up local resources") self._cleanup_local_resources(channel_id) continue - + # Method 2: Check if owner still exists owner_key = f"ts_proxy:channel:{channel_id}:owner" if not self.redis_client.exists(owner_key): logger.debug(f"Non-owner cleanup: Channel {channel_id} has no owner in Redis, cleaning up local resources") self._cleanup_local_resources(channel_id) continue - + # Method 3: Check if metadata still exists metadata_key = f"ts_proxy:channel:{channel_id}:metadata" if not self.redis_client.exists(metadata_key): logger.debug(f"Non-owner cleanup: Channel {channel_id} has no metadata in Redis, cleaning up local resources") self._cleanup_local_resources(channel_id) continue - + # Check for local client count - if zero, clean up our local resources if self.client_managers[channel_id].get_client_count() == 0: # We're not the owner, and we have no local clients - clean up our resources @@ -831,6 +831,7 @@ class ProxyServer: self.redis_client.hset(metadata_key, "last_active", str(time.time())) self.redis_client.expire(metadata_key, 30) # Reset TTL on metadata hash logger.debug(f"Refreshed metadata TTL for channel {channel_id}") + def update_channel_state(self, channel_id, new_state, additional_fields=None): """Update channel state with proper history tracking and logging""" if not self.redis_client: @@ -887,7 +888,7 @@ class ProxyServer: if channel_id in self.client_managers: del self.client_managers[channel_id] logger.info(f"Non-owner cleanup: Removed client manager for channel {channel_id}") - + return True except Exception as e: logger.error(f"Error cleaning up local resources: {e}", exc_info=True) diff --git a/apps/proxy/ts_proxy/views.py b/apps/proxy/ts_proxy/views.py index 32496c55..f7424f38 100644 --- a/apps/proxy/ts_proxy/views.py +++ b/apps/proxy/ts_proxy/views.py @@ -21,6 +21,14 @@ from rest_framework.permissions import IsAuthenticated logger = logging.getLogger("ts_proxy") +def get_client_ip(request): + x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR') + if x_forwarded_for: + ip = x_forwarded_for.split(',')[0] + else: + ip = request.META.get('REMOTE_ADDR') + return ip + @api_view(['GET']) def stream_ts(request, channel_id): """Stream TS data to client with immediate response and keep-alive packets during initialization""" @@ -31,6 +39,7 @@ def stream_ts(request, channel_id): try: # Generate a unique client ID client_id = f"client_{int(time.time() * 1000)}_{random.randint(1000, 9999)}" + client_ip = get_client_ip(request) logger.info(f"[{client_id}] Requested stream for channel {channel_id}") # Extract client user agent early @@ -156,7 +165,7 @@ def stream_ts(request, channel_id): # Register client buffer = proxy_server.stream_buffers[channel_id] client_manager = proxy_server.client_managers[channel_id] - client_manager.add_client(client_id, client_user_agent) + client_manager.add_client(client_id, client_ip, client_user_agent) logger.info(f"[{client_id}] Client registered with channel {channel_id}") # Define a single generate function diff --git a/dispatcharr/consumers.py b/dispatcharr/consumers.py index 9c56605d..356422d7 100644 --- a/dispatcharr/consumers.py +++ b/dispatcharr/consumers.py @@ -14,5 +14,5 @@ class MyWebSocketConsumer(AsyncWebsocketConsumer): data = json.loads(text_data) print("Received:", data) - async def m3u_refresh(self, event): + async def update(self, event): await self.send(text_data=json.dumps(event)) diff --git a/dispatcharr/settings.py b/dispatcharr/settings.py index 5c3ee116..5cd21169 100644 --- a/dispatcharr/settings.py +++ b/dispatcharr/settings.py @@ -1,6 +1,7 @@ import os from pathlib import Path from datetime import timedelta +from celery.schedules import crontab BASE_DIR = Path(__file__).resolve().parent.parent @@ -150,6 +151,13 @@ AUTH_USER_MODEL = 'accounts.User' CELERY_BROKER_URL = os.environ.get('CELERY_BROKER_URL', 'redis://localhost:6379/0') CELERY_RESULT_BACKEND = CELERY_BROKER_URL +CELERY_BEAT_SCHEDULE = { + 'fetch-channel-statuses': { + 'task': 'apps.proxy.tasks.fetch_channel_stats', + 'schedule': 2.0, + }, +} + MEDIA_ROOT = BASE_DIR / 'media' MEDIA_URL = '/media/' diff --git a/docker/uwsgi.dev.ini b/docker/uwsgi.dev.ini index 535897a5..93ef9fa0 100644 --- a/docker/uwsgi.dev.ini +++ b/docker/uwsgi.dev.ini @@ -3,6 +3,7 @@ ; exec-before = python manage.py migrate --noinput attach-daemon = celery -A dispatcharr worker -l info +attach-daemon = celery -A dispatcharr beat -l info attach-daemon = redis-server attach-daemon = daphne -b 0.0.0.0 -p 8001 dispatcharr.asgi:application attach-daemon = cd /app/frontend && npm run dev @@ -17,6 +18,7 @@ socket = /app/uwsgi.sock chmod-socket = 777 vacuum = true die-on-term = true +static-map = /static=/app/static # Worker management (Optimize for I/O bound tasks) workers = 4 diff --git a/docker/uwsgi.ini b/docker/uwsgi.ini index ace423af..adb5de33 100644 --- a/docker/uwsgi.ini +++ b/docker/uwsgi.ini @@ -2,7 +2,8 @@ ; exec-before = python manage.py collectstatic --noinput ; exec-before = python manage.py migrate --noinput -attach-daemon = celery -A dispatcharr worker -l info +attach-daemon = celery -A dispatcharr worker -l error +attach-daemon = celery -A dispatcharr beat -l error attach-daemon = redis-server attach-daemon = daphne -b 0.0.0.0 -p 8001 dispatcharr.asgi:application @@ -16,6 +17,7 @@ socket = /app/uwsgi.sock chmod-socket = 777 vacuum = true die-on-term = true +static-map = /static=/app/static # Worker management (Optimize for I/O bound tasks) workers = 4 diff --git a/frontend/src/App.jsx b/frontend/src/App.jsx index 25dcb8aa..ff8ae3b6 100644 --- a/frontend/src/App.jsx +++ b/frontend/src/App.jsx @@ -12,6 +12,7 @@ import Channels from './pages/Channels'; import M3U from './pages/M3U'; import EPG from './pages/EPG'; import Guide from './pages/Guide'; +import Stats from './pages/Stats'; import Settings from './pages/Settings'; import StreamProfiles from './pages/StreamProfiles'; import useAuthStore from './store/auth'; @@ -122,6 +123,7 @@ const App = () => { element={} /> } /> + } /> } /> ) : ( diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index ffc86673..edf45d7c 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -7,6 +7,7 @@ import React, { } from 'react'; import useStreamsStore from './store/streams'; import { notifications } from '@mantine/notifications'; +import useChannelsStore from './store/channels'; export const WebsocketContext = createContext(false, null, () => {}); @@ -15,6 +16,7 @@ export const WebsocketProvider = ({ children }) => { const [val, setVal] = useState(null); const { fetchStreams } = useStreamsStore(); + const { setChannelStats } = useChannelsStore(); const ws = useRef(null); @@ -48,17 +50,22 @@ export const WebsocketProvider = ({ children }) => { socket.onmessage = async (event) => { event = JSON.parse(event.data); - switch (event.type) { + switch (event.data.type) { case 'm3u_refresh': - if (event.message?.success) { + if (event.data.success) { fetchStreams(); notifications.show({ - message: event.message.message, + message: event.data.message, color: 'green.5', }); } break; + case 'channel_stats': + console.log(JSON.parse(event.data.stats)); + setChannelStats(JSON.parse(event.data.stats)); + break; + default: console.error(`Unknown websocket event type: ${event.type}`); break; diff --git a/frontend/src/api.js b/frontend/src/api.js index 61d03d60..5b318d68 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -817,4 +817,43 @@ export default class API { return retval; } + + static async getChannelStats(uuid = null) { + const response = await fetch(`${host}/proxy/ts/status`, { + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${await API.getAuthToken()}`, + }, + }); + + const retval = await response.json(); + return retval; + } + + static async stopChannel(id) { + const response = await fetch(`${host}/proxy/ts/stop/${id}`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${await API.getAuthToken()}`, + }, + }); + + const retval = await response.json(); + return retval; + } + + static async stopClient(channelId, clientId) { + const response = await fetch(`${host}/proxy/ts/stop_client/${channelId}`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${await API.getAuthToken()}`, + body: JSON.stringify({ client_id: clientId }), + }, + }); + + const retval = await response.json(); + return retval; + } } diff --git a/frontend/src/components/Sidebar.jsx b/frontend/src/components/Sidebar.jsx index 34e830dd..f66d6c1b 100644 --- a/frontend/src/components/Sidebar.jsx +++ b/frontend/src/components/Sidebar.jsx @@ -79,6 +79,7 @@ const Sidebar = ({ collapsed, toggleDrawer, drawerWidth, miniDrawerWidth }) => { path: '/stream-profiles', }, { label: 'TV Guide', icon: , path: '/guide' }, + { label: 'Stats', icon: , path: '/stats' }, { label: 'Settings', icon: , diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index e096b6bb..d22b90ed 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -113,6 +113,7 @@ const ChannelStreams = ({ channel, isExpanded }) => { enableColumnFilters: false, enableBottomToolbar: false, enableTopToolbar: false, + enableTableHead: false, columnFilterDisplayMode: 'popover', enablePagination: false, enableRowVirtualization: true, diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx new file mode 100644 index 00000000..73814089 --- /dev/null +++ b/frontend/src/pages/Stats.jsx @@ -0,0 +1,302 @@ +import React, { useMemo, useState, useEffect, useCallback } from 'react'; +import { ActionIcon, Box, Center, Grid } from '@mantine/core'; +import { MantineReactTable, useMantineReactTable } from 'mantine-react-table'; +import { TableHelper } from '../helpers'; +import API from '../api'; +import useChannelsStore from '../store/channels'; +import logo from '../images/logo.png'; +import { + Tv2, + ScreenShare, + Scroll, + SquareMinus, + CirclePlay, + SquarePen, + Binary, + ArrowDown01, + SquareX, +} from 'lucide-react'; + +const ChannelsPage = () => { + const { channels, stats: channelStats } = useChannelsStore(); + const [activeChannels, setActiveChannels] = useState([]); + const [clients, setClients] = useState([]); + + const channelsColumns = useMemo( + () => [ + { + id: 'logo', + header: 'Logo', + accessorKey: 'logo_url', + size: 50, + Cell: ({ cell }) => ( +
+ channel logo +
+ ), + }, + { + id: 'name', + header: 'Name', + accessorKey: 'name', + Cell: ({ cell }) => ( +
+ {cell.getValue()} +
+ ), + }, + { + id: 'started', + header: 'Started', + accessorFn: (row) => { + // Get the current date and time + const currentDate = new Date(); + // Calculate the start date by subtracting uptime (in milliseconds) + const startDate = new Date(currentDate.getTime() - row.uptime * 1000); + // Format the date as a string (you can adjust the format as needed) + return startDate.toLocaleString({ + weekday: 'short', // optional, adds day of the week + year: 'numeric', + month: '2-digit', + day: '2-digit', + hour: '2-digit', + minute: '2-digit', + second: '2-digit', + hour12: true, // 12-hour format with AM/PM + }); // This will give you a string like: "2025-03-14T14:00:00.000Z" + }, + }, + { + id: 'uptime', + header: 'Uptime', + size: 50, + accessorFn: (row) => { + const days = Math.floor(row.uptime / (3600 * 24)); // Calculate the number of days + const hours = Math.floor((row.uptime % (3600 * 24)) / 3600); // Calculate remaining hours + const minutes = Math.floor((row.uptime % 3600) / 60); // Calculate remaining minutes + const seconds = parseInt(row.uptime % 60); // Remaining seconds + + // Format uptime as "d hh:mm:ss" + return `${days ? days : ''} ${String(hours).padStart(2, '0')}:${String(minutes).padStart(2, '0')}:${String(seconds).padStart(2, '0')}`; + }, + mantineTableBodyCellProps: { + align: 'right', + }, + }, + { + id: 'num_clients', + header: 'Clients', + accessorKey: 'client_count', + size: 50, + mantineTableBodyCellProps: { + align: 'center', + }, + }, + ], + [] + ); + + const stopChannel = async (id) => { + await API.stopChannel(id); + }; + + const stopClient = async (id) => { + await API.stopClient(id); + }; + + const channelsTable = useMantineReactTable({ + ...TableHelper.defaultProperties, + renderTopToolbar: false, + columns: channelsColumns, + data: activeChannels, + enableRowActions: true, + mantineTableBodyCellProps: { + style: { + padding: 4, + borderColor: '#444', + color: '#E0E0E0', + fontSize: '0.85rem', + }, + }, + renderRowActions: ({ row }) => ( + +
+ stopChannel(row.original.uuid)} + > + + +
+
+ ), + }); + + const clientsTable = useMantineReactTable({ + ...TableHelper.defaultProperties, + renderTopToolbar: false, + data: clients, + columns: useMemo( + () => [ + { + id: 'logo', + header: 'Logo', + accessorKey: 'channel.logo_url', + size: 50, + Cell: ({ cell }) => ( +
+ channel logo +
+ ), + }, + { + header: 'Channel', + accessorKey: 'channel.name', + size: 100, + }, + { + header: 'User-Agent', + accessorKey: 'user_agent', + size: 250, + mantineTableBodyCellProps: { + style: { + whiteSpace: 'nowrap', + maxWidth: 400, + }, + }, + }, + { + header: 'IP Address', + accessorKey: 'ip_address', + size: 50, + }, + ], + [] + ), + mantineTableBodyCellProps: { + style: { + padding: 4, + borderColor: '#444', + color: '#E0E0E0', + fontSize: '0.85rem', + }, + }, + enableRowActions: true, + renderRowActions: ({ row }) => ( + +
+ + stopClient(row.original.channel.uuid, row.original.client_id) + } + > + + +
+
+ ), + }); + + useEffect(() => { + const stats = channelStats.channels.map((ch) => ({ + ...ch, + ...Object.values(channels).filter( + (channel) => channel.uuid === channelStats.channels[0].channel_id + )[0], + })); + setActiveChannels(stats); + + console.log(stats); + + const clientStats = stats.reduce((acc, ch) => { + return acc.concat( + ch.clients.map((client) => ({ + ...client, + channel: ch, + })) + ); + }, []); + setClients(clientStats); + console.log(clientStats); + }, [channelStats]); + + // const fetchData = useCallback(async () => { + // const response = await API.getChannelStats(); + // const channelStats = response.channels.map((ch) => ({ + // ...ch, + // ...Object.values(channels).filter( + // (channel) => channel.uuid === response.channels[0].channel_id + // )[0], + // })); + // setActiveChannels(channelStats); + + // console.log(channelStats); + + // const clientStats = channelStats.reduce((acc, ch) => { + // return acc.concat( + // ch.clients.map((client) => ({ + // ...client, + // channel: ch, + // })) + // ); + // }, []); + // setClients(clientStats); + // console.log(clientStats); + // }, [channels]); + + // useEffect(() => { + // fetchData(); + // }, [fetchData]); + + return ( + + + + + + + + + + + + + ); +}; + +export default ChannelsPage; diff --git a/frontend/src/store/channels.jsx b/frontend/src/store/channels.jsx index 347838bd..47d36e8f 100644 --- a/frontend/src/store/channels.jsx +++ b/frontend/src/store/channels.jsx @@ -5,6 +5,7 @@ const useChannelsStore = create((set) => ({ channels: [], channelGroups: [], channelsPageSelection: [], + stats: [], isLoading: false, error: null, @@ -87,6 +88,11 @@ const useChannelsStore = create((set) => ({ setChannelsPageSelection: (channelsPageSelection) => set((state) => ({ channelsPageSelection })), + + setChannelStats: (stats) => + set((state) => ({ + stats, + })), })); export default useChannelsStore; diff --git a/frontend/vite.config.js b/frontend/vite.config.js index 83f74384..d409838a 100644 --- a/frontend/vite.config.js +++ b/frontend/vite.config.js @@ -1,8 +1,14 @@ -import { defineConfig } from "vite"; -import react from "@vitejs/plugin-react-swc"; +import { defineConfig } from 'vite'; +import react from '@vitejs/plugin-react-swc'; // https://vite.dev/config/ export default defineConfig({ + // The base URL for the build, adjust this to match your desired path + build: { + // Make sure assets are placed correctly in the build output + assetsDir: 'static/assets', + }, + plugins: [react()], server: { diff --git a/requirements.txt b/requirements.txt index 0c1de6cc..53b6e023 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,6 +2,7 @@ Django==5.1.6 psycopg2-binary==2.9.10 redis==4.5.5 celery +celery[redis] djangorestframework==3.15.2 requests==2.32.3 psutil==7.0.0