diff --git a/apps/channels/admin.py b/apps/channels/admin.py index d4eb7fcb..49ef04a9 100644 --- a/apps/channels/admin.py +++ b/apps/channels/admin.py @@ -4,23 +4,31 @@ from .models import Stream, Channel, ChannelGroup @admin.register(Stream) class StreamAdmin(admin.ModelAdmin): list_display = ( - 'id', 'name', 'group_name', 'custom_url', - 'current_viewers', 'updated_at', + 'id', # Primary Key + 'name', + 'group_name', + 'custom_url', + 'current_viewers', + 'updated_at', ) list_filter = ('group_name',) - search_fields = ('name', 'custom_url', 'group_name') + search_fields = ('id', 'name', 'custom_url', 'group_name') # Added 'id' for searching by ID ordering = ('-updated_at',) @admin.register(Channel) class ChannelAdmin(admin.ModelAdmin): list_display = ( - 'channel_number', 'channel_name', 'channel_group', 'tvg_name' + 'id', # Primary Key + 'channel_number', + 'channel_name', + 'channel_group', + 'tvg_name' ) list_filter = ('channel_group',) - search_fields = ('channel_name', 'channel_group__name', 'tvg_name') + search_fields = ('id', 'channel_name', 'channel_group__name', 'tvg_name') # Added 'id' ordering = ('channel_number',) @admin.register(ChannelGroup) class ChannelGroupAdmin(admin.ModelAdmin): - list_display = ('name',) - search_fields = ('name',) + list_display = ('id', 'name') # Added 'id' + search_fields = ('id', 'name') # Added 'id' diff --git a/apps/channels/management/__init__.py b/apps/channels/management/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/apps/channels/management/commands/__init__.py b/apps/channels/management/commands/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/apps/channels/management/commands/remove_duplicates.py b/apps/channels/management/commands/remove_duplicates.py deleted file mode 100644 index 58fa0a4d..00000000 --- a/apps/channels/management/commands/remove_duplicates.py +++ /dev/null @@ -1,24 +0,0 @@ -from django.core.management.base import BaseCommand -from apps.channels.models import Stream, Channel, ChannelGroup -from apps.m3u.models import M3UAccount - -class Command(BaseCommand): - help = "Delete all Channels, Streams, M3Us from the database (example)." - - def handle(self, *args, **kwargs): - # Delete all Streams - stream_count = Stream.objects.count() - Stream.objects.all().delete() - self.stdout.write(self.style.SUCCESS(f"Deleted {stream_count} Streams.")) - - # Or delete Channels: - channel_count = Channel.objects.count() - Channel.objects.all().delete() - self.stdout.write(self.style.SUCCESS(f"Deleted {channel_count} Channels.")) - - # If you have M3UAccount: - m3u_count = M3UAccount.objects.count() - M3UAccount.objects.all().delete() - self.stdout.write(self.style.SUCCESS(f"Deleted {m3u_count} M3U accounts.")) - - self.stdout.write(self.style.SUCCESS("Successfully deleted the requested objects.")) diff --git a/apps/m3u/models.py b/apps/m3u/models.py index 3e1fd228..36f49374 100644 --- a/apps/m3u/models.py +++ b/apps/m3u/models.py @@ -210,7 +210,7 @@ def create_profile_for_m3u_account(sender, instance, created, **kwargs): m3u_account=instance, is_default=True, ) - console.log(profile) + profile.max_streams = instance.max_streams profile.save() diff --git a/core/management/commands/kill_processes.py b/core/management/commands/kill_processes.py new file mode 100644 index 00000000..f4b186ae --- /dev/null +++ b/core/management/commands/kill_processes.py @@ -0,0 +1,27 @@ +# core/management/commands/kill_processes.py + +import psutil +from django.core.management.base import BaseCommand + +class Command(BaseCommand): + help = "Kills all processes with 'ffmpeg' or 'streamlink' in their name or command line." + + def handle(self, *args, **options): + kill_count = 0 + + for proc in psutil.process_iter(['pid', 'name', 'cmdline']): + try: + name = proc.info.get('name') or '' + cmdline = ' '.join(proc.info.get('cmdline') or []) + lower_name = name.lower() + lower_cmdline = cmdline.lower() + + if ('ffmpeg' in lower_name or 'ffmpeg' in lower_cmdline or + 'streamlink' in lower_name or 'streamlink' in lower_cmdline): + self.stdout.write(f"Killing PID {proc.pid}: {name} {cmdline}") + proc.kill() + kill_count += 1 + except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess): + continue + + self.stdout.write(self.style.SUCCESS(f"Killed {kill_count} processes.")) diff --git a/core/views.py b/core/views.py index 560096e0..91aeb600 100644 --- a/core/views.py +++ b/core/views.py @@ -1,22 +1,27 @@ +# core/views.py import os import sys import subprocess import logging import re +import redis from django.conf import settings from django.http import StreamingHttpResponse, HttpResponseServerError -from django.db.models import F from django.shortcuts import render from apps.channels.models import Channel, Stream from apps.m3u.models import M3UAccountProfile from core.models import StreamProfile +# Import the persistent lock (the “real” lock) +from dispatcharr.persistent_lock import PersistentLock + # Configure logging to output to the console. logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) logger = logging.getLogger(__name__) + def settings_view(request): """ Renders the settings page. @@ -28,10 +33,11 @@ def stream_view(request, stream_id): """ Streams the first available stream for the given channel. It uses the channel’s assigned StreamProfile. + A persistent Redis lock is used to prevent concurrent streaming on the same channel. """ try: # Retrieve the channel by the provided stream_id. - channel = Channel.objects.get(id=stream_id) + channel = Channel.objects.get(channel_number=stream_id) logger.debug("Channel retrieved: ID=%s, Name=%s", channel.id, channel.channel_name) # Ensure the channel has at least one stream. @@ -43,50 +49,44 @@ def stream_view(request, stream_id): stream = channel.streams.first() logger.debug("Using stream: ID=%s, Name=%s", stream.id, stream.name) - # Retrieve m3u account to determine number of streams and profiles + # Retrieve the M3U account associated with the stream. m3u_account = stream.m3u_account - logger.debug(f"Using M3U account ID={m3u_account.id}, Name={m3u_account.name}") + logger.debug("Using M3U account ID=%s, Name=%s", m3u_account.id, m3u_account.name) # Use the custom URL if available; otherwise, use the standard URL. input_url = stream.custom_url or stream.url logger.debug("Input URL: %s", input_url) - # Determine which profile we can use + # Determine which profile we can use. m3u_profiles = m3u_account.profiles.all() default_profile = next((obj for obj in m3u_profiles if obj.is_default), None) - - # Get the remaining objects profiles = [obj for obj in m3u_profiles if not obj.is_default] active_profile = None + # -- Loop through profiles and pick the first active one -- for profile in [default_profile] + profiles: logger.debug(f'Checking profile {profile.name}...') if not profile.is_active: - logger.debug(f'Profile is not active, skipping.') - continue - if profile.current_viewers < profile.max_streams: - logger.debug(f"Using M3U profile ID={profile.id}") - active_profile = M3UAccountProfile.objects.get(id=profile.id) - logger.debug("Executing the following pattern replacement:") - logger.debug(f" search: {profile.search_pattern}") - # Convert $1 to \1 for Python regex - safe_replace_pattern = re.sub(r'\$(\d+)', r'\\\1', profile.replace_pattern) - logger.debug(f" replace: {profile.replace_pattern}") - logger.debug(f" safe replace: {safe_replace_pattern}") - stream_url = re.sub(profile.search_pattern, safe_replace_pattern, input_url) - logger.debug(f"Generated stream url: {stream_url}") - break - else: - logger.debug(f'Profile {profile.name} as exceeded its stream count: {profile.current_viewers} / {profile.max_streams}') + logger.debug('Profile is not active, skipping.') continue + # *** DISABLE FAKE LOCKS: Ignore current_viewers/max_streams check *** + logger.debug(f"Using M3U profile ID={profile.id} (ignoring viewer count limits)") + active_profile = M3UAccountProfile.objects.get(id=profile.id) + # Prepare the pattern replacement. + logger.debug("Executing the following pattern replacement:") + logger.debug(f" search: {profile.search_pattern}") + safe_replace_pattern = re.sub(r'\$(\d+)', r'\\\1', profile.replace_pattern) + logger.debug(f" replace: {profile.replace_pattern}") + logger.debug(f" safe replace: {safe_replace_pattern}") + stream_url = re.sub(profile.search_pattern, safe_replace_pattern, input_url) + logger.debug(f"Generated stream url: {stream_url}") + break if active_profile is None: logger.exception("No available profiles for the stream") return HttpResponseServerError("No available profiles for the stream") - # Get the stream profile set on the channel. - # (Ensure your Channel model has a 'stream_profile' field.) stream_profile = channel.stream_profile if not stream_profile: logger.error("No stream profile set for channel ID=%s", channel.id) @@ -105,18 +105,29 @@ def stream_view(request, stream_id): cmd = [stream_profile.command] + parameters.split() logger.debug("Executing command: %s", cmd) - # Increment the viewer count. - active_profile.current_viewers += 1 - active_profile.save() - logger.debug("Viewer count incremented for stream ID=%s", stream.id) + # Acquire the persistent Redis lock. + redis_host = getattr(settings, "REDIS_HOST", "localhost") + redis_client = redis.Redis(host=settings.REDIS_HOST, port=6379, db=0) + lock_key = f"lock:channel:{channel.id}" + persistent_lock = PersistentLock(redis_client, lock_key, lock_timeout=120) + + if not persistent_lock.acquire(): + logger.error("Could not acquire persistent lock for channel %s", channel.id) + return HttpResponseServerError("Resource busy, please try again later.") + + try: + # Start the streaming process. + process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + except Exception as e: + persistent_lock.release() # Ensure the lock is released on error. + logger.exception("Error starting stream for channel ID=%s", stream_id) + return HttpResponseServerError(f"Error starting stream: {e}") - # Start the streaming process. - process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except Exception as e: - logger.exception("Error starting stream for channel ID=%s", stream_id) - return HttpResponseServerError(f"Error starting stream: {e}") + logger.exception("Error preparing stream for channel ID=%s", stream_id) + return HttpResponseServerError(f"Error preparing stream: {e}") - def stream_generator(proc, s): + def stream_generator(proc, s, persistent_lock): try: while True: chunk = proc.stdout.read(8192) @@ -124,9 +135,15 @@ def stream_view(request, stream_id): break yield chunk finally: - # Decrement the viewer count once streaming ends. - active_profile.current_viewers -= 1 - active_profile.save() - logger.debug("Viewer count decremented for stream ID=%s", s.id) + try: + proc.terminate() + logger.debug("Streaming process terminated for stream ID=%s", s.id) + except Exception as e: + logger.error("Error terminating process for stream ID=%s: %s", s.id, e) + persistent_lock.release() + logger.debug("Persistent lock released for channel ID=%s", channel.id) - return StreamingHttpResponse(stream_generator(process, stream), content_type="video/MP2T") + return StreamingHttpResponse( + stream_generator(process, stream, persistent_lock), + content_type="video/MP2T" + ) diff --git a/dispatcharr/persistent_lock.py b/dispatcharr/persistent_lock.py new file mode 100644 index 00000000..3df2b650 --- /dev/null +++ b/dispatcharr/persistent_lock.py @@ -0,0 +1,84 @@ +# dispatcharr/persistent_lock.py +import uuid +import redis + +class PersistentLock: + """ + A persistent, auto-expiring lock that uses Redis. + + Usage: + 1. Instantiate with a Redis client, a unique lock key (e.g. "lock:account:123"), + and an optional timeout (in seconds). + 2. Call acquire() to try to obtain the lock. + 3. Optionally, periodically call refresh() to extend the lock's lifetime. + 4. When finished, call release() to free the lock. + """ + def __init__(self, redis_client: redis.Redis, lock_key: str, lock_timeout: int = 120): + """ + Initialize the lock. + + :param redis_client: An instance of redis.Redis. + :param lock_key: The unique key for the lock. + :param lock_timeout: Time-to-live for the lock in seconds. + """ + self.redis_client = redis_client + self.lock_key = lock_key + self.lock_timeout = lock_timeout + self.lock_token = None + + def acquire(self) -> bool: + """ + Attempt to acquire the lock. Returns True if successful. + """ + self.lock_token = str(uuid.uuid4()) + # Set the lock with NX (only if not exists) and EX (expire time) + result = self.redis_client.set(self.lock_key, self.lock_token, nx=True, ex=self.lock_timeout) + return result is not None + + def refresh(self) -> bool: + """ + Refresh the lock's expiration time if this instance owns the lock. + Returns True if the expiration was successfully extended. + """ + current_value = self.redis_client.get(self.lock_key) + if current_value and current_value.decode("utf-8") == self.lock_token: + self.redis_client.expire(self.lock_key, self.lock_timeout) + return True + return False + + def release(self) -> bool: + """ + Release the lock only if owned by this instance. + Returns True if the lock was successfully released. + """ + # Use a Lua script for atomicity: only delete if the token matches. + lua_script = """ + if redis.call("get", KEYS[1]) == ARGV[1] then + return redis.call("del", KEYS[1]) + else + return 0 + end + """ + release_lock = self.redis_client.register_script(lua_script) + result = release_lock(keys=[self.lock_key], args=[self.lock_token]) + return result == 1 + +# Example usage (for testing purposes only): +if __name__ == "__main__": + # Connect to Redis on localhost; adjust connection parameters as needed. + client = redis.Redis(host="localhost", port=6379, db=0) + lock = PersistentLock(client, "lock:example_account", lock_timeout=120) + + if lock.acquire(): + print("Lock acquired successfully!") + # Do work here... + # Optionally refresh the lock periodically: + if lock.refresh(): + print("Lock refreshed.") + # Finally, release the lock: + if lock.release(): + print("Lock released.") + else: + print("Failed to release lock.") + else: + print("Failed to acquire lock.") diff --git a/dispatcharr/settings.py b/dispatcharr/settings.py index 166e6c21..d4d60878 100644 --- a/dispatcharr/settings.py +++ b/dispatcharr/settings.py @@ -5,6 +5,7 @@ from datetime import timedelta BASE_DIR = Path(__file__).resolve().parent.parent SECRET_KEY = 'REPLACE_ME_WITH_A_REAL_SECRET' +REDIS_HOST = os.environ.get("REDIS_HOST", "localhost") DEBUG = True ALLOWED_HOSTS = ["*"] diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index abbfcdf3..a6d7d34c 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -45,7 +45,7 @@ services: extra_hosts: - "host.docker.internal:host-gateway" environment: - - POSTGRES_HOST=dispatcharr_db + - POSTGRES_HOST=db - POSTGRES_DB=dispatcharr - POSTGRES_USER=dispatch - POSTGRES_PASSWORD=secret diff --git a/frontend/src/api.js b/frontend/src/api.js index 671cda3e..fdd68e63 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -12,7 +12,7 @@ import useStreamProfilesStore from './store/streamProfiles'; const host = ''; -const getAuthToken = async () => { +export const getAuthToken = async () => { const token = await useAuthStore.getState().getToken(); // Assuming token is stored in Zustand store return token; }; @@ -189,24 +189,34 @@ export default class API { return retval; } - static async assignChannelNumbers(ids) { - const response = await fetch(`${host}/api/channels/channels/assign/`, { - method: 'POST', - headers: { - Authorization: `Bearer ${await getAuthToken()}`, - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ channel_order: ids }), - }); +static async assignChannelNumbers(channelIds) { + // Make the request + const response = await fetch(`${host}/api/channels/channels/assign/`, { + method: 'POST', + headers: { + Authorization: `Bearer ${await getAuthToken()}`, + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ channel_order: channelIds }), + }); - const retval = await response.json(); - if (retval.id) { - useChannelsStore.getState().addChannel(retval); - } - - return retval; + // The backend returns something like { "message": "Channels have been auto-assigned!" } + if (!response.ok) { + // If you want to handle errors gracefully: + const text = await response.text(); + throw new Error(`Assign channels failed: ${response.status} => ${text}`); } + // Usually it has a { message: "..."} or similar + const retval = await response.json(); + + // If you want to automatically refresh the channel list in Zustand: + await useChannelsStore.getState().fetchChannels(); + + // Return the entire JSON result (so the caller can see the "message") + return retval; +} + static async createChannelFromStream(values) { const response = await fetch(`${host}/api/channels/channels/from-stream/`, { method: 'POST', diff --git a/frontend/src/components/tables/ChannelsTable.js b/frontend/src/components/tables/ChannelsTable.js index b151b941..e271511c 100644 --- a/frontend/src/components/tables/ChannelsTable.js +++ b/frontend/src/components/tables/ChannelsTable.js @@ -23,31 +23,30 @@ import { Add as AddIcon, SwapVert as SwapVertIcon, LiveTv as LiveTvIcon, + ContentCopy, } from '@mui/icons-material'; import API from '../../api'; import ChannelForm from '../forms/Channel'; import { TableHelper } from '../../helpers'; import utils from '../../utils'; -import { ContentCopy } from '@mui/icons-material'; import logo from '../../images/logo.png'; import useVideoStore from '../../store/useVideoStore'; // NEW import -const Example = () => { +const ChannelsTable = () => { const [channel, setChannel] = useState(null); - const [channelModelOpen, setChannelModalOpen] = useState(false); + const [channelModalOpen, setChannelModalOpen] = useState(false); const [rowSelection, setRowSelection] = useState([]); const [anchorEl, setAnchorEl] = useState(null); const [textToCopy, setTextToCopy] = useState(''); - const [snackbarMessage, setSnackbarMessage] = useState(''); const [snackbarOpen, setSnackbarOpen] = useState(false); const { channels, isLoading: channelsLoading } = useChannelsStore(); const { showVideo } = useVideoStore.getState(); // or useVideoStore() + // Configure columns const columns = useMemo( - //column definitions... () => [ { header: '#', @@ -60,7 +59,6 @@ const Example = () => { }, { header: 'Group', - accessorFn: (row) => row.channel_group?.name || '', }, { @@ -76,7 +74,7 @@ const Example = () => { alignItems: 'center', }} > - + channel logo ), meta: { @@ -87,18 +85,16 @@ const Example = () => { [] ); - //optionally access the underlying virtualizer instance + // Access the row virtualizer instance (optional) const rowVirtualizerInstanceRef = useRef(null); const [isLoading, setIsLoading] = useState(true); const [sorting, setSorting] = useState([]); - const closeSnackbar = () => { - setSnackbarOpen(false); - }; + const closeSnackbar = () => setSnackbarOpen(false); - const editChannel = async (channel = null) => { - setChannel(channel); + const editChannel = async (ch = null) => { + setChannel(ch); setChannelModalOpen(true); }; @@ -110,7 +106,7 @@ const Example = () => { showVideo(`/output/stream/${channelNumber}/`); } - // @TODO: the bulk delete endpoint is currently broken + // (Optional) bulk delete, but your endpoint is @TODO const deleteChannels = async () => { setIsLoading(true); const selected = table @@ -118,21 +114,34 @@ const Example = () => { .rows.filter((row) => row.getIsSelected()); await utils.Limiter( 4, - selected.map((chan) => () => { - return deleteChannel(chan.original.id); - }) + selected.map((chan) => () => deleteChannel(chan.original.id)) ); // await API.deleteChannels(selected.map((sel) => sel.id)); setIsLoading(false); }; + // ───────────────────────────────────────────────────────── + // The "Assign Channels" button logic + // ───────────────────────────────────────────────────────── const assignChannels = async () => { - const selected = table - .getRowModel() - .rows.filter((row) => row.getIsSelected()); - await API.assignChannelNumbers(selected.map((sel) => sel.original.id)); + try { + // Get row order from the table + const rowOrder = table.getRowModel().rows.map((row) => row.original.id); - // @TODO: update the channels that were assigned + // Call our custom API endpoint + const result = await API.assignChannelNumbers(rowOrder); + + // We might get { message: "Channels have been auto-assigned!" } + setSnackbarMessage(result.message || 'Channels assigned'); + setSnackbarOpen(true); + + // Refresh the channel list + await useChannelsStore.getState().fetchChannels(); + } catch (err) { + console.error(err); + setSnackbarMessage('Failed to assign channels'); + setSnackbarOpen(true); + } }; const closeChannelForm = () => { @@ -147,7 +156,7 @@ const Example = () => { }, []); useEffect(() => { - //scroll to the top of the table when the sorting changes + // Scroll to the top of the table when sorting changes try { rowVirtualizerInstanceRef.current?.scrollToIndex?.(0); } catch (error) { @@ -159,6 +168,7 @@ const Example = () => { setAnchorEl(null); setSnackbarMessage(''); }; + const openPopover = Boolean(anchorEl); const handleCopy = async () => { try { @@ -167,33 +177,30 @@ const Example = () => { } catch (err) { setSnackbarMessage('Failed to copy'); } - setSnackbarOpen(true); }; - const open = Boolean(anchorEl); - - const copyM3UUrl = async (event) => { + // Example copy URLs + const copyM3UUrl = (event) => { setAnchorEl(event.currentTarget); setTextToCopy( `${window.location.protocol}//${window.location.host}/output/m3u` ); }; - - const copyEPGUrl = async (event) => { + const copyEPGUrl = (event) => { setAnchorEl(event.currentTarget); setTextToCopy( `${window.location.protocol}//${window.location.host}/output/epg` ); }; - - const copyHDHRUrl = async (event) => { + const copyHDHRUrl = (event) => { setAnchorEl(event.currentTarget); setTextToCopy( `${window.location.protocol}//${window.location.host}/output/hdhr` ); }; + // Configure the MaterialReactTable const table = useMaterialReactTable({ ...TableHelper.defaultProperties, columns, @@ -208,8 +215,8 @@ const Example = () => { sorting, rowSelection, }, - rowVirtualizerInstanceRef, //optional - rowVirtualizerOptions: { overscan: 5 }, //optionally customize the row virtualizer + rowVirtualizerInstanceRef, // optional + rowVirtualizerOptions: { overscan: 5 }, initialState: { density: 'compact', }, @@ -246,20 +253,15 @@ const Example = () => { ), muiTableContainerProps: { sx: { - height: 'calc(100vh - 75px)', // Subtract padding to avoid cutoff - overflowY: 'auto', // Internal scrolling for the table + height: 'calc(100vh - 75px)', + overflowY: 'auto', }, }, muiSearchTextFieldProps: { variant: 'standard', }, renderTopToolbarCustomActions: ({ table }) => ( - + Channels { - + @@ -314,14 +312,17 @@ const Example = () => { return ( + + {/* Channel Form Modal */} + {/* Popover for the "copy" URLs */} { horizontal: 'left', }} > -
+
{
- {/* {copySuccess && {copySuccess}} */} + {/* Snackbar for feedback */} { ); }; -export default Example; +export default ChannelsTable; diff --git a/frontend/src/components/tables/StreamsTable.js b/frontend/src/components/tables/StreamsTable.js index ba7766c1..a3591fcf 100644 --- a/frontend/src/components/tables/StreamsTable.js +++ b/frontend/src/components/tables/StreamsTable.js @@ -12,18 +12,21 @@ import { Button, } from '@mui/material'; import useStreamsStore from '../../store/streams'; +import useChannelsStore from '../../store/channels'; // NEW: Import channels store import API from '../../api'; +// Make sure your api.js exports getAuthToken as a named export: +// e.g. export const getAuthToken = async () => { ... } +import { getAuthToken } from '../../api'; import { Delete as DeleteIcon, Edit as EditIcon, Add as AddIcon, } from '@mui/icons-material'; import { TableHelper } from '../../helpers'; -import utils from '../../utils'; import StreamForm from '../forms/Stream'; import usePlaylistsStore from '../../store/playlists'; -const Example = () => { +const StreamsTable = () => { const [rowSelection, setRowSelection] = useState([]); const [stream, setStream] = useState(null); const [modalOpen, setModalOpen] = useState(false); @@ -31,16 +34,9 @@ const Example = () => { const { playlists } = usePlaylistsStore(); const columns = useMemo( - //column definitions... () => [ - { - header: 'Name', - accessorKey: 'name', - }, - { - header: 'Group', - accessorKey: 'group_name', - }, + { header: 'Name', accessorKey: 'name' }, + { header: 'Group', accessorKey: 'group_name' }, { header: 'M3U', size: 100, @@ -51,12 +47,11 @@ const Example = () => { [playlists] ); - //optionally access the underlying virtualizer instance const rowVirtualizerInstanceRef = useRef(null); - const [isLoading, setIsLoading] = useState(true); const [sorting, setSorting] = useState([]); + // Fallback: Individual creation (optional) const createChannelFromStream = async (stream) => { await API.createChannelFromStream({ channel_name: stream.name, @@ -65,9 +60,9 @@ const Example = () => { }); }; - // @TODO: bulk create is broken, returning a 404 + // Bulk creation: create channels from selected streams in one API call const createChannelsFromStreams = async () => { - setIsLoading(true); + // Get all selected streams from the table const selected = table .getRowModel() .rows.filter((row) => row.getIsSelected()); @@ -109,7 +104,6 @@ const Example = () => { }, []); useEffect(() => { - //scroll to the top of the table when the sorting changes try { rowVirtualizerInstanceRef.current?.scrollToIndex?.(0); } catch (error) { @@ -119,7 +113,6 @@ const Example = () => { const table = useMaterialReactTable({ ...TableHelper.defaultProperties, - columns, data: streams, enablePagination: false, @@ -132,14 +125,14 @@ const Example = () => { sorting, rowSelection, }, - rowVirtualizerInstanceRef, //optional - rowVirtualizerOptions: { overscan: 5 }, //optionally customize the row virtualizer + rowVirtualizerInstanceRef, + rowVirtualizerOptions: { overscan: 5 }, enableRowActions: true, renderRowActions: ({ row }) => ( <> editStream(row.original)} disabled={row.original.m3u_account} sx={{ p: 0 }} @@ -147,16 +140,16 @@ const Example = () => { deleteStream(row.original.id)} sx={{ p: 0 }} > createChannelFromStream(row.original)} sx={{ p: 0 }} > @@ -166,46 +159,38 @@ const Example = () => { ), muiTableContainerProps: { sx: { - height: 'calc(100vh - 75px)', // Subtract padding to avoid cutoff - overflowY: 'auto', // Internal scrolling for the table + height: 'calc(100vh - 75px)', + overflowY: 'auto', }, }, renderTopToolbarCustomActions: ({ table }) => ( - + Streams editStream()} > - {/* Small icon size */} + - {/* Small icon size */} + @@ -214,16 +199,7 @@ const Example = () => { }); return ( - + { ); }; -export default Example; +export default StreamsTable; diff --git a/frontend/src/pages/Guide.js b/frontend/src/pages/Guide.js index c7de50b8..fca295c4 100644 --- a/frontend/src/pages/Guide.js +++ b/frontend/src/pages/Guide.js @@ -164,7 +164,7 @@ export default function TVChannelGuide({ startDate, endDate }) { return; } // Build a playable stream URL for that channel - const url = window.location.origin + '/output/stream/' + matched.id; + const url = window.location.origin + '/output/stream/' + matched.channel_number; showVideo(url); // Optionally close the modal