mirror of
https://github.com/Dispatcharr/Dispatcharr.git
synced 2026-01-23 02:35:14 +00:00
live stats dashboard
This commit is contained in:
parent
8f3d175a90
commit
7a7cd0711d
19 changed files with 469 additions and 30 deletions
|
|
@ -206,8 +206,8 @@ def refresh_single_m3u_account(account_id):
|
|||
async_to_sync(channel_layer.group_send)(
|
||||
"updates",
|
||||
{
|
||||
"type": "m3u_refresh",
|
||||
"message": {"success": True, "message": "M3U refresh completed successfully"}
|
||||
"type": "update",
|
||||
"data": {"success": True, "type": "m3u_refresh", "message": "M3U refresh completed successfully"}
|
||||
},
|
||||
)
|
||||
return f"Account {account_id} => Created {created_count}, updated {updated_count}, excluded {excluded_count} Streams."
|
||||
|
|
|
|||
51
apps/proxy/tasks.py
Normal file
51
apps/proxy/tasks.py
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
# yourapp/tasks.py
|
||||
from celery import shared_task
|
||||
from channels.layers import get_channel_layer
|
||||
from asgiref.sync import async_to_sync
|
||||
import redis
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from core.utils import redis_client
|
||||
from apps.proxy.ts_proxy.channel_status import ChannelStatus
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Store the last known value to compare with new data
|
||||
last_known_data = {}
|
||||
|
||||
@shared_task
|
||||
def fetch_channel_stats():
|
||||
try:
|
||||
# Basic info for all channels
|
||||
channel_pattern = "ts_proxy:channel:*:metadata"
|
||||
all_channels = []
|
||||
|
||||
# Extract channel IDs from keys
|
||||
cursor = 0
|
||||
while True:
|
||||
cursor, keys = redis_client.scan(cursor, match=channel_pattern)
|
||||
for key in keys:
|
||||
channel_id_match = re.search(r"ts_proxy:channel:(.*):metadata", key.decode('utf-8'))
|
||||
if channel_id_match:
|
||||
ch_id = channel_id_match.group(1)
|
||||
channel_info = ChannelStatus.get_basic_channel_info(ch_id)
|
||||
if channel_info:
|
||||
all_channels.append(channel_info)
|
||||
|
||||
if cursor == 0:
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in channel_status: {e}", exc_info=True)
|
||||
return
|
||||
# return JsonResponse({'error': str(e)}, status=500)
|
||||
|
||||
channel_layer = get_channel_layer()
|
||||
async_to_sync(channel_layer.group_send)(
|
||||
"updates",
|
||||
{
|
||||
"type": "update",
|
||||
"data": {"success": True, "type": "channel_stats", "stats": json.dumps({'channels': all_channels, 'count': len(all_channels)})}
|
||||
},
|
||||
)
|
||||
|
|
@ -6,7 +6,7 @@ from . import proxy_server
|
|||
logger = logging.getLogger("ts_proxy")
|
||||
|
||||
class ChannelStatus:
|
||||
|
||||
|
||||
def get_detailed_channel_info(channel_id):
|
||||
# Get channel metadata
|
||||
metadata_key = f"ts_proxy:channel:{channel_id}:metadata"
|
||||
|
|
@ -231,7 +231,8 @@ class ChannelStatus:
|
|||
# Efficient way - just retrieve the essentials
|
||||
client_info = {
|
||||
'client_id': client_id_str,
|
||||
'user_agent': proxy_server.redis_client.hget(client_key, 'user_agent')
|
||||
'user_agent': proxy_server.redis_client.hget(client_key, 'user_agent'),
|
||||
'ip_address': proxy_server.redis_client.hget(client_key, 'ip_address').decode('utf-8'),
|
||||
}
|
||||
|
||||
if client_info['user_agent']:
|
||||
|
|
@ -251,4 +252,3 @@ class ChannelStatus:
|
|||
info['clients'] = clients
|
||||
|
||||
return info
|
||||
|
||||
|
|
|
|||
|
|
@ -135,7 +135,7 @@ class ClientManager:
|
|||
except Exception as e:
|
||||
logger.error(f"Error notifying owner of client activity: {e}")
|
||||
|
||||
def add_client(self, client_id, user_agent=None):
|
||||
def add_client(self, client_id, client_ip, user_agent=None):
|
||||
"""Add a client with duplicate prevention"""
|
||||
if client_id in self._registered_clients:
|
||||
logger.debug(f"Client {client_id} already registered, skipping")
|
||||
|
|
@ -150,6 +150,7 @@ class ClientManager:
|
|||
current_time = str(time.time())
|
||||
client_data = {
|
||||
"user_agent": user_agent or "unknown",
|
||||
"ip_address": client_ip,
|
||||
"connected_at": current_time,
|
||||
"last_active": current_time,
|
||||
"worker_id": self.worker_id or "unknown"
|
||||
|
|
@ -285,4 +286,4 @@ class ClientManager:
|
|||
# Refresh TTL on the set itself
|
||||
self.redis_client.expire(self.client_set_key, self.client_ttl)
|
||||
except Exception as e:
|
||||
logger.error(f"Error refreshing client TTL: {e}")
|
||||
logger.error(f"Error refreshing client TTL: {e}")
|
||||
|
|
|
|||
|
|
@ -190,16 +190,16 @@ class ProxyServer:
|
|||
metadata_key = f"ts_proxy:channel:{channel_id}:metadata"
|
||||
if self.redis_client.exists(metadata_key):
|
||||
self.redis_client.hset(metadata_key, mapping={
|
||||
"state": "stopping",
|
||||
"state": "stopping",
|
||||
"state_changed_at": str(time.time())
|
||||
})
|
||||
|
||||
|
||||
# If we have local resources for this channel, clean them up
|
||||
if channel_id in self.stream_buffers or channel_id in self.client_managers:
|
||||
# Use existing stop_channel method
|
||||
logger.info(f"Stopping local resources for channel {channel_id}")
|
||||
self.stop_channel(channel_id)
|
||||
|
||||
|
||||
# Acknowledge stop by publishing a response
|
||||
stop_response = {
|
||||
"event": "channel_stopped",
|
||||
|
|
@ -215,14 +215,14 @@ class ProxyServer:
|
|||
client_id = data.get("client_id")
|
||||
if client_id and channel_id:
|
||||
logger.info(f"Received request to stop client {client_id} on channel {channel_id}")
|
||||
|
||||
|
||||
# Both remove from client manager AND set a key for the generator to detect
|
||||
if channel_id in self.client_managers:
|
||||
client_manager = self.client_managers[channel_id]
|
||||
if client_id in client_manager.clients:
|
||||
client_manager.remove_client(client_id)
|
||||
logger.info(f"Removed client {client_id} from client manager")
|
||||
|
||||
|
||||
# Set a Redis key for the generator to detect
|
||||
if self.redis_client:
|
||||
stop_key = f"ts_proxy:channel:{channel_id}:client:{client_id}:stop"
|
||||
|
|
@ -497,13 +497,13 @@ class ProxyServer:
|
|||
"""Stop a channel with proper ownership handling"""
|
||||
try:
|
||||
logger.info(f"Stopping channel {channel_id}")
|
||||
|
||||
|
||||
# First set a stopping key that clients will check
|
||||
if self.redis_client:
|
||||
stop_key = f"ts_proxy:channel:{channel_id}:stopping"
|
||||
# Set with 60 second TTL - enough time for clients to notice
|
||||
self.redis_client.setex(stop_key, 10, "true")
|
||||
|
||||
|
||||
# Only stop the actual stream manager if we're the owner
|
||||
if self.am_i_owner(channel_id):
|
||||
logger.info(f"This worker ({self.worker_id}) is the owner - closing provider connection")
|
||||
|
|
@ -592,17 +592,17 @@ class ProxyServer:
|
|||
try:
|
||||
# Refresh channel registry
|
||||
self.refresh_channel_registry()
|
||||
|
||||
|
||||
# Create a unified list of all channels we have locally
|
||||
all_local_channels = set(self.stream_managers.keys()) | set(self.client_managers.keys())
|
||||
|
||||
|
||||
# Single loop through all channels - process each exactly once
|
||||
for channel_id in list(all_local_channels):
|
||||
if self.am_i_owner(channel_id):
|
||||
# === OWNER CHANNEL HANDLING ===
|
||||
# Extend ownership lease
|
||||
self.extend_ownership(channel_id)
|
||||
|
||||
|
||||
# Get channel state from metadata hash
|
||||
channel_state = "unknown"
|
||||
if self.redis_client:
|
||||
|
|
@ -701,7 +701,7 @@ class ProxyServer:
|
|||
self.redis_client.delete(f"ts_proxy:channel:{channel_id}:last_client_disconnect_time")
|
||||
|
||||
else:
|
||||
# === NON-OWNER CHANNEL HANDLING ===
|
||||
# === NON-OWNER CHANNEL HANDLING ===
|
||||
# For channels we don't own, check if they've been stopped/cleaned up in Redis
|
||||
if self.redis_client:
|
||||
# Method 1: Check for stopping key
|
||||
|
|
@ -710,21 +710,21 @@ class ProxyServer:
|
|||
logger.debug(f"Non-owner cleanup: Channel {channel_id} has stopping flag in Redis, cleaning up local resources")
|
||||
self._cleanup_local_resources(channel_id)
|
||||
continue
|
||||
|
||||
|
||||
# Method 2: Check if owner still exists
|
||||
owner_key = f"ts_proxy:channel:{channel_id}:owner"
|
||||
if not self.redis_client.exists(owner_key):
|
||||
logger.debug(f"Non-owner cleanup: Channel {channel_id} has no owner in Redis, cleaning up local resources")
|
||||
self._cleanup_local_resources(channel_id)
|
||||
continue
|
||||
|
||||
|
||||
# Method 3: Check if metadata still exists
|
||||
metadata_key = f"ts_proxy:channel:{channel_id}:metadata"
|
||||
if not self.redis_client.exists(metadata_key):
|
||||
logger.debug(f"Non-owner cleanup: Channel {channel_id} has no metadata in Redis, cleaning up local resources")
|
||||
self._cleanup_local_resources(channel_id)
|
||||
continue
|
||||
|
||||
|
||||
# Check for local client count - if zero, clean up our local resources
|
||||
if self.client_managers[channel_id].get_client_count() == 0:
|
||||
# We're not the owner, and we have no local clients - clean up our resources
|
||||
|
|
@ -831,6 +831,7 @@ class ProxyServer:
|
|||
self.redis_client.hset(metadata_key, "last_active", str(time.time()))
|
||||
self.redis_client.expire(metadata_key, 30) # Reset TTL on metadata hash
|
||||
logger.debug(f"Refreshed metadata TTL for channel {channel_id}")
|
||||
|
||||
def update_channel_state(self, channel_id, new_state, additional_fields=None):
|
||||
"""Update channel state with proper history tracking and logging"""
|
||||
if not self.redis_client:
|
||||
|
|
@ -887,7 +888,7 @@ class ProxyServer:
|
|||
if channel_id in self.client_managers:
|
||||
del self.client_managers[channel_id]
|
||||
logger.info(f"Non-owner cleanup: Removed client manager for channel {channel_id}")
|
||||
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error cleaning up local resources: {e}", exc_info=True)
|
||||
|
|
|
|||
|
|
@ -21,6 +21,14 @@ from rest_framework.permissions import IsAuthenticated
|
|||
logger = logging.getLogger("ts_proxy")
|
||||
|
||||
|
||||
def get_client_ip(request):
|
||||
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
|
||||
if x_forwarded_for:
|
||||
ip = x_forwarded_for.split(',')[0]
|
||||
else:
|
||||
ip = request.META.get('REMOTE_ADDR')
|
||||
return ip
|
||||
|
||||
@api_view(['GET'])
|
||||
def stream_ts(request, channel_id):
|
||||
"""Stream TS data to client with immediate response and keep-alive packets during initialization"""
|
||||
|
|
@ -31,6 +39,7 @@ def stream_ts(request, channel_id):
|
|||
try:
|
||||
# Generate a unique client ID
|
||||
client_id = f"client_{int(time.time() * 1000)}_{random.randint(1000, 9999)}"
|
||||
client_ip = get_client_ip(request)
|
||||
logger.info(f"[{client_id}] Requested stream for channel {channel_id}")
|
||||
|
||||
# Extract client user agent early
|
||||
|
|
@ -156,7 +165,7 @@ def stream_ts(request, channel_id):
|
|||
# Register client
|
||||
buffer = proxy_server.stream_buffers[channel_id]
|
||||
client_manager = proxy_server.client_managers[channel_id]
|
||||
client_manager.add_client(client_id, client_user_agent)
|
||||
client_manager.add_client(client_id, client_ip, client_user_agent)
|
||||
logger.info(f"[{client_id}] Client registered with channel {channel_id}")
|
||||
|
||||
# Define a single generate function
|
||||
|
|
|
|||
|
|
@ -14,5 +14,5 @@ class MyWebSocketConsumer(AsyncWebsocketConsumer):
|
|||
data = json.loads(text_data)
|
||||
print("Received:", data)
|
||||
|
||||
async def m3u_refresh(self, event):
|
||||
async def update(self, event):
|
||||
await self.send(text_data=json.dumps(event))
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import os
|
||||
from pathlib import Path
|
||||
from datetime import timedelta
|
||||
from celery.schedules import crontab
|
||||
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||
|
||||
|
|
@ -150,6 +151,13 @@ AUTH_USER_MODEL = 'accounts.User'
|
|||
CELERY_BROKER_URL = os.environ.get('CELERY_BROKER_URL', 'redis://localhost:6379/0')
|
||||
CELERY_RESULT_BACKEND = CELERY_BROKER_URL
|
||||
|
||||
CELERY_BEAT_SCHEDULE = {
|
||||
'fetch-channel-statuses': {
|
||||
'task': 'apps.proxy.tasks.fetch_channel_stats',
|
||||
'schedule': 2.0,
|
||||
},
|
||||
}
|
||||
|
||||
MEDIA_ROOT = BASE_DIR / 'media'
|
||||
MEDIA_URL = '/media/'
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
; exec-before = python manage.py migrate --noinput
|
||||
|
||||
attach-daemon = celery -A dispatcharr worker -l info
|
||||
attach-daemon = celery -A dispatcharr beat -l info
|
||||
attach-daemon = redis-server
|
||||
attach-daemon = daphne -b 0.0.0.0 -p 8001 dispatcharr.asgi:application
|
||||
attach-daemon = cd /app/frontend && npm run dev
|
||||
|
|
@ -17,6 +18,7 @@ socket = /app/uwsgi.sock
|
|||
chmod-socket = 777
|
||||
vacuum = true
|
||||
die-on-term = true
|
||||
static-map = /static=/app/static
|
||||
|
||||
# Worker management (Optimize for I/O bound tasks)
|
||||
workers = 4
|
||||
|
|
|
|||
|
|
@ -2,7 +2,8 @@
|
|||
; exec-before = python manage.py collectstatic --noinput
|
||||
; exec-before = python manage.py migrate --noinput
|
||||
|
||||
attach-daemon = celery -A dispatcharr worker -l info
|
||||
attach-daemon = celery -A dispatcharr worker -l error
|
||||
attach-daemon = celery -A dispatcharr beat -l error
|
||||
attach-daemon = redis-server
|
||||
attach-daemon = daphne -b 0.0.0.0 -p 8001 dispatcharr.asgi:application
|
||||
|
||||
|
|
@ -16,6 +17,7 @@ socket = /app/uwsgi.sock
|
|||
chmod-socket = 777
|
||||
vacuum = true
|
||||
die-on-term = true
|
||||
static-map = /static=/app/static
|
||||
|
||||
# Worker management (Optimize for I/O bound tasks)
|
||||
workers = 4
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ import Channels from './pages/Channels';
|
|||
import M3U from './pages/M3U';
|
||||
import EPG from './pages/EPG';
|
||||
import Guide from './pages/Guide';
|
||||
import Stats from './pages/Stats';
|
||||
import Settings from './pages/Settings';
|
||||
import StreamProfiles from './pages/StreamProfiles';
|
||||
import useAuthStore from './store/auth';
|
||||
|
|
@ -122,6 +123,7 @@ const App = () => {
|
|||
element={<StreamProfiles />}
|
||||
/>
|
||||
<Route path="/guide" element={<Guide />} />
|
||||
<Route path="/stats" element={<Stats />} />
|
||||
<Route path="/settings" element={<Settings />} />
|
||||
</>
|
||||
) : (
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import React, {
|
|||
} from 'react';
|
||||
import useStreamsStore from './store/streams';
|
||||
import { notifications } from '@mantine/notifications';
|
||||
import useChannelsStore from './store/channels';
|
||||
|
||||
export const WebsocketContext = createContext(false, null, () => {});
|
||||
|
||||
|
|
@ -15,6 +16,7 @@ export const WebsocketProvider = ({ children }) => {
|
|||
const [val, setVal] = useState(null);
|
||||
|
||||
const { fetchStreams } = useStreamsStore();
|
||||
const { setChannelStats } = useChannelsStore();
|
||||
|
||||
const ws = useRef(null);
|
||||
|
||||
|
|
@ -48,17 +50,22 @@ export const WebsocketProvider = ({ children }) => {
|
|||
|
||||
socket.onmessage = async (event) => {
|
||||
event = JSON.parse(event.data);
|
||||
switch (event.type) {
|
||||
switch (event.data.type) {
|
||||
case 'm3u_refresh':
|
||||
if (event.message?.success) {
|
||||
if (event.data.success) {
|
||||
fetchStreams();
|
||||
notifications.show({
|
||||
message: event.message.message,
|
||||
message: event.data.message,
|
||||
color: 'green.5',
|
||||
});
|
||||
}
|
||||
break;
|
||||
|
||||
case 'channel_stats':
|
||||
console.log(JSON.parse(event.data.stats));
|
||||
setChannelStats(JSON.parse(event.data.stats));
|
||||
break;
|
||||
|
||||
default:
|
||||
console.error(`Unknown websocket event type: ${event.type}`);
|
||||
break;
|
||||
|
|
|
|||
|
|
@ -817,4 +817,43 @@ export default class API {
|
|||
|
||||
return retval;
|
||||
}
|
||||
|
||||
static async getChannelStats(uuid = null) {
|
||||
const response = await fetch(`${host}/proxy/ts/status`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${await API.getAuthToken()}`,
|
||||
},
|
||||
});
|
||||
|
||||
const retval = await response.json();
|
||||
return retval;
|
||||
}
|
||||
|
||||
static async stopChannel(id) {
|
||||
const response = await fetch(`${host}/proxy/ts/stop/${id}`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${await API.getAuthToken()}`,
|
||||
},
|
||||
});
|
||||
|
||||
const retval = await response.json();
|
||||
return retval;
|
||||
}
|
||||
|
||||
static async stopClient(channelId, clientId) {
|
||||
const response = await fetch(`${host}/proxy/ts/stop_client/${channelId}`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${await API.getAuthToken()}`,
|
||||
body: JSON.stringify({ client_id: clientId }),
|
||||
},
|
||||
});
|
||||
|
||||
const retval = await response.json();
|
||||
return retval;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -79,6 +79,7 @@ const Sidebar = ({ collapsed, toggleDrawer, drawerWidth, miniDrawerWidth }) => {
|
|||
path: '/stream-profiles',
|
||||
},
|
||||
{ label: 'TV Guide', icon: <LayoutGrid size={20} />, path: '/guide' },
|
||||
{ label: 'Stats', icon: <LayoutGrid size={20} />, path: '/stats' },
|
||||
{
|
||||
label: 'Settings',
|
||||
icon: <LucideSettings size={20} />,
|
||||
|
|
|
|||
|
|
@ -113,6 +113,7 @@ const ChannelStreams = ({ channel, isExpanded }) => {
|
|||
enableColumnFilters: false,
|
||||
enableBottomToolbar: false,
|
||||
enableTopToolbar: false,
|
||||
enableTableHead: false,
|
||||
columnFilterDisplayMode: 'popover',
|
||||
enablePagination: false,
|
||||
enableRowVirtualization: true,
|
||||
|
|
|
|||
302
frontend/src/pages/Stats.jsx
Normal file
302
frontend/src/pages/Stats.jsx
Normal file
|
|
@ -0,0 +1,302 @@
|
|||
import React, { useMemo, useState, useEffect, useCallback } from 'react';
|
||||
import { ActionIcon, Box, Center, Grid } from '@mantine/core';
|
||||
import { MantineReactTable, useMantineReactTable } from 'mantine-react-table';
|
||||
import { TableHelper } from '../helpers';
|
||||
import API from '../api';
|
||||
import useChannelsStore from '../store/channels';
|
||||
import logo from '../images/logo.png';
|
||||
import {
|
||||
Tv2,
|
||||
ScreenShare,
|
||||
Scroll,
|
||||
SquareMinus,
|
||||
CirclePlay,
|
||||
SquarePen,
|
||||
Binary,
|
||||
ArrowDown01,
|
||||
SquareX,
|
||||
} from 'lucide-react';
|
||||
|
||||
const ChannelsPage = () => {
|
||||
const { channels, stats: channelStats } = useChannelsStore();
|
||||
const [activeChannels, setActiveChannels] = useState([]);
|
||||
const [clients, setClients] = useState([]);
|
||||
|
||||
const channelsColumns = useMemo(
|
||||
() => [
|
||||
{
|
||||
id: 'logo',
|
||||
header: 'Logo',
|
||||
accessorKey: 'logo_url',
|
||||
size: 50,
|
||||
Cell: ({ cell }) => (
|
||||
<Center>
|
||||
<img src={cell.getValue() || logo} width="20" alt="channel logo" />
|
||||
</Center>
|
||||
),
|
||||
},
|
||||
{
|
||||
id: 'name',
|
||||
header: 'Name',
|
||||
accessorKey: 'name',
|
||||
Cell: ({ cell }) => (
|
||||
<div
|
||||
style={{
|
||||
whiteSpace: 'nowrap',
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
}}
|
||||
>
|
||||
{cell.getValue()}
|
||||
</div>
|
||||
),
|
||||
},
|
||||
{
|
||||
id: 'started',
|
||||
header: 'Started',
|
||||
accessorFn: (row) => {
|
||||
// Get the current date and time
|
||||
const currentDate = new Date();
|
||||
// Calculate the start date by subtracting uptime (in milliseconds)
|
||||
const startDate = new Date(currentDate.getTime() - row.uptime * 1000);
|
||||
// Format the date as a string (you can adjust the format as needed)
|
||||
return startDate.toLocaleString({
|
||||
weekday: 'short', // optional, adds day of the week
|
||||
year: 'numeric',
|
||||
month: '2-digit',
|
||||
day: '2-digit',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
second: '2-digit',
|
||||
hour12: true, // 12-hour format with AM/PM
|
||||
}); // This will give you a string like: "2025-03-14T14:00:00.000Z"
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'uptime',
|
||||
header: 'Uptime',
|
||||
size: 50,
|
||||
accessorFn: (row) => {
|
||||
const days = Math.floor(row.uptime / (3600 * 24)); // Calculate the number of days
|
||||
const hours = Math.floor((row.uptime % (3600 * 24)) / 3600); // Calculate remaining hours
|
||||
const minutes = Math.floor((row.uptime % 3600) / 60); // Calculate remaining minutes
|
||||
const seconds = parseInt(row.uptime % 60); // Remaining seconds
|
||||
|
||||
// Format uptime as "d hh:mm:ss"
|
||||
return `${days ? days : ''} ${String(hours).padStart(2, '0')}:${String(minutes).padStart(2, '0')}:${String(seconds).padStart(2, '0')}`;
|
||||
},
|
||||
mantineTableBodyCellProps: {
|
||||
align: 'right',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'num_clients',
|
||||
header: 'Clients',
|
||||
accessorKey: 'client_count',
|
||||
size: 50,
|
||||
mantineTableBodyCellProps: {
|
||||
align: 'center',
|
||||
},
|
||||
},
|
||||
],
|
||||
[]
|
||||
);
|
||||
|
||||
const stopChannel = async (id) => {
|
||||
await API.stopChannel(id);
|
||||
};
|
||||
|
||||
const stopClient = async (id) => {
|
||||
await API.stopClient(id);
|
||||
};
|
||||
|
||||
const channelsTable = useMantineReactTable({
|
||||
...TableHelper.defaultProperties,
|
||||
renderTopToolbar: false,
|
||||
columns: channelsColumns,
|
||||
data: activeChannels,
|
||||
enableRowActions: true,
|
||||
mantineTableBodyCellProps: {
|
||||
style: {
|
||||
padding: 4,
|
||||
borderColor: '#444',
|
||||
color: '#E0E0E0',
|
||||
fontSize: '0.85rem',
|
||||
},
|
||||
},
|
||||
renderRowActions: ({ row }) => (
|
||||
<Box sx={{ justifyContent: 'right' }}>
|
||||
<Center>
|
||||
<ActionIcon
|
||||
size="sm"
|
||||
variant="transparent"
|
||||
color="red.9"
|
||||
onClick={() => stopChannel(row.original.uuid)}
|
||||
>
|
||||
<SquareX size="18" />
|
||||
</ActionIcon>
|
||||
</Center>
|
||||
</Box>
|
||||
),
|
||||
});
|
||||
|
||||
const clientsTable = useMantineReactTable({
|
||||
...TableHelper.defaultProperties,
|
||||
renderTopToolbar: false,
|
||||
data: clients,
|
||||
columns: useMemo(
|
||||
() => [
|
||||
{
|
||||
id: 'logo',
|
||||
header: 'Logo',
|
||||
accessorKey: 'channel.logo_url',
|
||||
size: 50,
|
||||
Cell: ({ cell }) => (
|
||||
<Center>
|
||||
<img
|
||||
src={cell.getValue() || logo}
|
||||
width="20"
|
||||
alt="channel logo"
|
||||
/>
|
||||
</Center>
|
||||
),
|
||||
},
|
||||
{
|
||||
header: 'Channel',
|
||||
accessorKey: 'channel.name',
|
||||
size: 100,
|
||||
},
|
||||
{
|
||||
header: 'User-Agent',
|
||||
accessorKey: 'user_agent',
|
||||
size: 250,
|
||||
mantineTableBodyCellProps: {
|
||||
style: {
|
||||
whiteSpace: 'nowrap',
|
||||
maxWidth: 400,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
header: 'IP Address',
|
||||
accessorKey: 'ip_address',
|
||||
size: 50,
|
||||
},
|
||||
],
|
||||
[]
|
||||
),
|
||||
mantineTableBodyCellProps: {
|
||||
style: {
|
||||
padding: 4,
|
||||
borderColor: '#444',
|
||||
color: '#E0E0E0',
|
||||
fontSize: '0.85rem',
|
||||
},
|
||||
},
|
||||
enableRowActions: true,
|
||||
renderRowActions: ({ row }) => (
|
||||
<Box sx={{ justifyContent: 'right' }}>
|
||||
<Center>
|
||||
<ActionIcon
|
||||
size="sm"
|
||||
variant="transparent"
|
||||
color="red.9"
|
||||
onClick={() =>
|
||||
stopClient(row.original.channel.uuid, row.original.client_id)
|
||||
}
|
||||
>
|
||||
<SquareX size="18" />
|
||||
</ActionIcon>
|
||||
</Center>
|
||||
</Box>
|
||||
),
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
const stats = channelStats.channels.map((ch) => ({
|
||||
...ch,
|
||||
...Object.values(channels).filter(
|
||||
(channel) => channel.uuid === channelStats.channels[0].channel_id
|
||||
)[0],
|
||||
}));
|
||||
setActiveChannels(stats);
|
||||
|
||||
console.log(stats);
|
||||
|
||||
const clientStats = stats.reduce((acc, ch) => {
|
||||
return acc.concat(
|
||||
ch.clients.map((client) => ({
|
||||
...client,
|
||||
channel: ch,
|
||||
}))
|
||||
);
|
||||
}, []);
|
||||
setClients(clientStats);
|
||||
console.log(clientStats);
|
||||
}, [channelStats]);
|
||||
|
||||
// const fetchData = useCallback(async () => {
|
||||
// const response = await API.getChannelStats();
|
||||
// const channelStats = response.channels.map((ch) => ({
|
||||
// ...ch,
|
||||
// ...Object.values(channels).filter(
|
||||
// (channel) => channel.uuid === response.channels[0].channel_id
|
||||
// )[0],
|
||||
// }));
|
||||
// setActiveChannels(channelStats);
|
||||
|
||||
// console.log(channelStats);
|
||||
|
||||
// const clientStats = channelStats.reduce((acc, ch) => {
|
||||
// return acc.concat(
|
||||
// ch.clients.map((client) => ({
|
||||
// ...client,
|
||||
// channel: ch,
|
||||
// }))
|
||||
// );
|
||||
// }, []);
|
||||
// setClients(clientStats);
|
||||
// console.log(clientStats);
|
||||
// }, [channels]);
|
||||
|
||||
// useEffect(() => {
|
||||
// fetchData();
|
||||
// }, [fetchData]);
|
||||
|
||||
return (
|
||||
<Grid style={{ padding: 18 }}>
|
||||
<Grid.Col span={6}>
|
||||
<Box
|
||||
style={{
|
||||
height: '100vh - 20px', // Full viewport height
|
||||
paddingTop: 0, // Top padding
|
||||
paddingBottom: 1, // Bottom padding
|
||||
paddingRight: 0.5,
|
||||
paddingLeft: 0,
|
||||
boxSizing: 'border-box', // Include padding in height calculation
|
||||
overflow: 'hidden', // Prevent parent scrolling
|
||||
}}
|
||||
>
|
||||
<MantineReactTable table={channelsTable} />
|
||||
</Box>
|
||||
</Grid.Col>
|
||||
<Grid.Col span={6}>
|
||||
<Box
|
||||
style={{
|
||||
height: '100vh - 20px', // Full viewport height
|
||||
paddingTop: 0, // Top padding
|
||||
paddingBottom: 1, // Bottom padding
|
||||
paddingRight: 0,
|
||||
paddingLeft: 0.5,
|
||||
boxSizing: 'border-box', // Include padding in height calculation
|
||||
overflow: 'hidden', // Prevent parent scrolling
|
||||
}}
|
||||
>
|
||||
<MantineReactTable table={clientsTable} />
|
||||
</Box>
|
||||
</Grid.Col>
|
||||
</Grid>
|
||||
);
|
||||
};
|
||||
|
||||
export default ChannelsPage;
|
||||
|
|
@ -5,6 +5,7 @@ const useChannelsStore = create((set) => ({
|
|||
channels: [],
|
||||
channelGroups: [],
|
||||
channelsPageSelection: [],
|
||||
stats: [],
|
||||
isLoading: false,
|
||||
error: null,
|
||||
|
||||
|
|
@ -87,6 +88,11 @@ const useChannelsStore = create((set) => ({
|
|||
|
||||
setChannelsPageSelection: (channelsPageSelection) =>
|
||||
set((state) => ({ channelsPageSelection })),
|
||||
|
||||
setChannelStats: (stats) =>
|
||||
set((state) => ({
|
||||
stats,
|
||||
})),
|
||||
}));
|
||||
|
||||
export default useChannelsStore;
|
||||
|
|
|
|||
|
|
@ -1,8 +1,14 @@
|
|||
import { defineConfig } from "vite";
|
||||
import react from "@vitejs/plugin-react-swc";
|
||||
import { defineConfig } from 'vite';
|
||||
import react from '@vitejs/plugin-react-swc';
|
||||
|
||||
// https://vite.dev/config/
|
||||
export default defineConfig({
|
||||
// The base URL for the build, adjust this to match your desired path
|
||||
build: {
|
||||
// Make sure assets are placed correctly in the build output
|
||||
assetsDir: 'static/assets',
|
||||
},
|
||||
|
||||
plugins: [react()],
|
||||
|
||||
server: {
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ Django==5.1.6
|
|||
psycopg2-binary==2.9.10
|
||||
redis==4.5.5
|
||||
celery
|
||||
celery[redis]
|
||||
djangorestframework==3.15.2
|
||||
requests==2.32.3
|
||||
psutil==7.0.0
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue