From 85835dbf085ed683e10ee1e03bffddaccd9997f9 Mon Sep 17 00:00:00 2001 From: dekzter Date: Mon, 14 Apr 2025 09:44:11 -0400 Subject: [PATCH 0001/1435] attempting to optimize virtualized rows --- .../src/components/tables/ChannelsTable.jsx | 327 ++++++++++-------- 1 file changed, 175 insertions(+), 152 deletions(-) diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index f75f9b53..6ba5ea9a 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -1,4 +1,10 @@ -import { useEffect, useMemo, useRef, useState, useCallback } from 'react'; +import React, { + useEffect, + useMemo, + useRef, + useState, + useCallback, +} from 'react'; import { MantineReactTable, useMantineReactTable } from 'mantine-react-table'; import useChannelsStore from '../../store/channels'; import { notifications } from '@mantine/notifications'; @@ -44,13 +50,12 @@ import { Group, useMantineTheme, Center, - Container, Switch, Menu, MultiSelect, } from '@mantine/core'; -const ChannelStreams = ({ channel, isExpanded }) => { +const ChannelStreams = React.memo(({ channel, isExpanded }) => { const channelStreams = useChannelsStore( (state) => state.channels[channel.id]?.streams ); @@ -180,13 +185,13 @@ const ChannelStreams = ({ channel, isExpanded }) => { ); -}; +}); const m3uUrlBase = `${window.location.protocol}//${window.location.host}/output/m3u`; const epgUrlBase = `${window.location.protocol}//${window.location.host}/output/epg`; const hdhrUrlBase = `${window.location.protocol}//${window.location.host}/hdhr`; -const CreateProfilePopover = ({}) => { +const CreateProfilePopover = React.memo(({}) => { const [opened, setOpened] = useState(false); const [name, setName] = useState(''); const theme = useMantineTheme(); @@ -241,9 +246,9 @@ const CreateProfilePopover = ({}) => { ); -}; +}); -const ChannelsTable = ({}) => { +const ChannelsTable = React.memo(({}) => { const { channels, isLoading: channelsLoading, @@ -326,39 +331,58 @@ const ChannelsTable = ({}) => { } }; + const enabledChannelSet = useMemo(() => { + return new Set( + selectedProfileChannels.filter((c) => c.enabled).map((c) => c.id) + ); + }, [selectedProfileChannels]); + + const EnabledHeaderSwitch = React.memo(({ isActive, toggle, disabled }) => ( + + )); + + const renderEnabledHeader = useCallback(() => { + if (Object.values(rowSelection).length === 0) { + return ; + } + + const handleToggle = () => { + toggleChannelEnabled( + channelsPageSelection.map((row) => row.id), + !channelsEnabledHeaderSwitch + ); + }; + + return ( + + ); + }, [ + rowSelection, + channelsPageSelection, + channelsEnabledHeaderSwitch, + selectedProfileId, + ]); + // Configure columns const columns = useMemo( () => [ { id: 'enabled', - Header: () => { - if (Object.values(rowSelection).length == 0) { - return ; - } - - return ( - { - console.log(channelsPageSelection); - toggleChannelEnabled( - channelsPageSelection.map((row) => row.id), - !channelsEnabledHeaderSwitch - ); - }} - disabled={selectedProfileId == '0'} - /> - ); - }, + Header: renderEnabledHeader, enableSorting: false, accessorFn: (row) => { - if (selectedProfileId == '0') { - return true; - } - - return selectedProfileChannels.find((channel) => row.id == channel.id) - .enabled; + return selectedProfileId == '0' + ? true + : enabledChannelSet.has(row.id); }, mantineTableHeadCellProps: { align: 'right', @@ -388,16 +412,21 @@ const ChannelsTable = ({}) => { padding: 0, }, }, - Cell: ({ row, cell }) => ( - { - toggleChannelEnabled([row.original.id], !cell.getValue()); - }} - disabled={selectedProfileId == '0'} - /> - ), + Cell: ({ row, cell }) => { + const memoizedCellValue = useMemo(() => cell.getValue(), [cell]); + const handleSwitchChange = useCallback(() => { + toggleChannelEnabled([row.original.id], !memoizedCellValue); + }, [memoizedCellValue, row.original.id, toggleChannelEnabled]); + + return ( + + ); + }, }, { header: '#', @@ -545,32 +574,6 @@ const ChannelsTable = ({}) => { { id: 'name', desc: false }, ]); - const editChannel = async (ch = null) => { - setChannel(ch); - setChannelModalOpen(true); - }; - - const deleteChannel = async (id) => { - setRowSelection([]); - if (channelsPageSelection.length > 0) { - return deleteChannels(); - } - await API.deleteChannel(id); - }; - - const createRecording = (channel) => { - setChannel(channel); - setRecordingModalOpen(true); - }; - - function handleWatchStream(channelNumber) { - let vidUrl = `/proxy/ts/stream/${channelNumber}`; - if (env_mode == 'dev') { - vidUrl = `${window.location.protocol}//${window.location.hostname}:5656${vidUrl}`; - } - showVideo(vidUrl); - } - // (Optional) bulk delete, but your endpoint is @TODO const deleteChannels = async () => { setIsLoading(true); @@ -744,6 +747,104 @@ const ChannelsTable = ({}) => { ); }; + const RowActions = React.memo(({ row }) => { + const editChannel = useCallback(() => { + setChannel(row.original); + setChannelModalOpen(true); + }, []); + + const deleteChannel = useCallback(async () => { + setRowSelection([]); + // if (channelsPageSelection.length > 0) { + // return deleteChannels(); + // } + await API.deleteChannel(row.id); + }, []); + + const createRecording = useCallback(() => { + setChannel(row); + setRecordingModalOpen(true); + }, []); + + const handleWatchStream = useCallback(() => { + let vidUrl = `/proxy/ts/stream/${row.uuid}`; + if (env_mode == 'dev') { + vidUrl = `${window.location.protocol}//${window.location.hostname}:5656${vidUrl}`; + } + showVideo(vidUrl); + }, []); + + return ( + +
+ + + + + 0 && + !channelsPageSelection.map((row) => row.id).includes(row.id) + } + > + {channelsPageSelection.length === 0 ? ( + + ) : ( + + )} + + + + + + + {env_mode == 'dev' && ( + + + + + + + + + + } + > + Record + + + + )} +
+
+ ); + }); + const table = useMantineReactTable({ ...TableHelper.defaultProperties, columns, @@ -761,7 +862,7 @@ const ChannelsTable = ({}) => { rowSelection, }, rowVirtualizerInstanceRef, - rowVirtualizerOptions: { overscan: 5 }, + rowVirtualizerOptions: { overscan: 25 }, initialState: { density: 'compact', sorting: [ @@ -828,7 +929,7 @@ const ChannelsTable = ({}) => { style: { minWidth: '85px', maxWidth: '85px', - paddingRight: 40, + // paddingRight: 40, fontWeight: 'normal', color: 'rgb(207,207,207)', backgroundColor: '#3F3F46', @@ -839,7 +940,7 @@ const ChannelsTable = ({}) => { minWidth: '85px', maxWidth: '85px', paddingLeft: 0, - paddingRight: 10, + // paddingRight: 10, }, }, }, @@ -858,85 +959,7 @@ const ChannelsTable = ({}) => { renderDetailPanel: ({ row }) => ( ), - renderRowActions: ({ row }) => ( - -
- - { - editChannel(row.original); - }} - > - - - - - - deleteChannel(row.original.id)} - disabled={ - channelsPageSelection.length > 0 && - !channelsPageSelection - .map((row) => row.id) - .includes(row.original.id) - } - > - {channelsPageSelection.length === 0 ? ( - - ) : ( - - )} - - - - - handleWatchStream(row.original.uuid)} - > - - - - - {env_mode == 'dev' && ( - - - - - - - - - createRecording(row.original)} - leftSection={ -
- } - > - Record -
-
-
- )} -
-
- ), + renderRowActions: ({ row }) => , mantineTableContainerProps: { style: { height: 'calc(100vh - 110px)', @@ -1270,6 +1293,6 @@ const ChannelsTable = ({}) => { /> ); -}; +}); export default ChannelsTable; From eb48083ccec9fcd748e87f0532d0697c310f252d Mon Sep 17 00:00:00 2001 From: dekzter Date: Tue, 15 Apr 2025 09:03:24 -0400 Subject: [PATCH 0002/1435] channels pagination --- .../src/components/tables/ChannelsTable.jsx | 205 +++++++++++++++--- 1 file changed, 177 insertions(+), 28 deletions(-) diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index 6ba5ea9a..4a6bc014 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -12,7 +12,7 @@ import API from '../../api'; import ChannelForm from '../forms/Channel'; import RecordingForm from '../forms/Recording'; import { TableHelper } from '../../helpers'; -import { getDescendantProp } from '../../utils'; +import { getDescendantProp, useDebounce } from '../../utils'; import logo from '../../images/logo.png'; import useVideoStore from '../../store/useVideoStore'; import useSettingsStore from '../../store/settings'; @@ -53,6 +53,8 @@ import { Switch, Menu, MultiSelect, + Pagination, + NativeSelect, } from '@mantine/core'; const ChannelStreams = React.memo(({ channel, isExpanded }) => { @@ -191,7 +193,7 @@ const m3uUrlBase = `${window.location.protocol}//${window.location.host}/output/ const epgUrlBase = `${window.location.protocol}//${window.location.host}/output/epg`; const hdhrUrlBase = `${window.location.protocol}//${window.location.host}/hdhr`; -const CreateProfilePopover = React.memo(({}) => { +const CreateProfilePopover = React.memo(({ }) => { const [opened, setOpened] = useState(false); const [name, setName] = useState(''); const theme = useMantineTheme(); @@ -248,7 +250,7 @@ const CreateProfilePopover = React.memo(({}) => { ); }); -const ChannelsTable = React.memo(({}) => { +const ChannelsTable = React.memo(({ }) => { const { channels, isLoading: channelsLoading, @@ -275,6 +277,28 @@ const ChannelsTable = React.memo(({}) => { ); const [channelsEnabledHeaderSwitch, setChannelsEnabledHeaderSwitch] = useState(false); + const [initialDataCount, setInitialDataCount] = useState(null); + const [data, setData] = useState([]); + const [rowCount, setRowCount] = useState(0); + const [pageCount, setPageCount] = useState(0); + const [paginationString, setPaginationString] = useState(''); + const [selectedStreamIds, setSelectedStreamIds] = useState([]); + // const [allRowsSelected, setAllRowsSelected] = useState(false); + const [pagination, setPagination] = useState({ + pageIndex: 0, + pageSize: 250, + }); + const [filters, setFilters] = useState({ + name: '', + channel_group: '', + m3u_account: '', + }); + const debouncedFilters = useDebounce(filters, 500); + const [isLoading, setIsLoading] = useState(true); + const [sorting, setSorting] = useState([ + { id: 'channel_number', desc: false }, + { id: 'name', desc: false }, + ]); const [hdhrUrl, setHDHRUrl] = useState(hdhrUrlBase); const [epgUrl, setEPGUrl] = useState(epgUrlBase); @@ -322,6 +346,125 @@ const ChannelsTable = React.memo(({}) => { const m3uUrlRef = useRef(null); const epgUrlRef = useRef(null); + const fetchData = useCallback(async () => { + setIsLoading(true); + + const params = new URLSearchParams(); + params.append('page', pagination.pageIndex + 1); + params.append('page_size', pagination.pageSize); + + // Apply sorting + if (sorting.length > 0) { + const sortField = sorting[0].id; + const sortDirection = sorting[0].desc ? '-' : ''; + params.append('ordering', `${sortDirection}${sortField}`); + } + + // Apply debounced filters + Object.entries(debouncedFilters).forEach(([key, value]) => { + if (value) params.append(key, value); + }); + + try { + const result = await API.queryChannels(params); + setData(result.results); + setRowCount(result.count); + setPageCount(Math.ceil(result.count / pagination.pageSize)); + + // Calculate the starting and ending item indexes + const startItem = pagination.pageIndex * pagination.pageSize + 1; // +1 to start from 1, not 0 + const endItem = Math.min( + (pagination.pageIndex + 1) * pagination.pageSize, + result.count + ); + + if (initialDataCount === null) { + setInitialDataCount(result.count); + } + + // Generate the string + setPaginationString(`${startItem} to ${endItem} of ${result.count}`); + + const newSelection = {}; + result.results.forEach((item, index) => { + if (selectedStreamIds.includes(item.id)) { + newSelection[index] = true; + } + }); + + // ✅ Only update rowSelection if it's different + if (JSON.stringify(newSelection) !== JSON.stringify(rowSelection)) { + setRowSelection(newSelection); + } + } catch (error) { + console.error('Error fetching data:', error); + } + + setIsLoading(false); + }, [pagination, sorting, debouncedFilters]); + + useEffect(() => { + fetchData() + }, [fetchData]) + + const onRowSelectionChange = (updater) => { + setRowSelection((prevRowSelection) => { + const newRowSelection = + typeof updater === 'function' ? updater(prevRowSelection) : updater; + + const updatedSelected = new Set([...selectedStreamIds]); + table.getRowModel().rows.forEach((row) => { + if (newRowSelection[row.id] === undefined || !newRowSelection[row.id]) { + updatedSelected.delete(row.original.id); + } else { + updatedSelected.add(row.original.id); + } + }); + setSelectedStreamIds([...updatedSelected]); + + return newRowSelection; + }); + }; + + const onSelectAllChange = async (e) => { + const selectAll = e.target.checked; + if (selectAll) { + // Get all stream IDs for current view + const params = new URLSearchParams(); + Object.entries(debouncedFilters).forEach(([key, value]) => { + if (value) params.append(key, value); + }); + const ids = await API.getAllStreamIds(params); + setSelectedStreamIds(ids); + } else { + setSelectedStreamIds([]); + } + + const newSelection = {}; + table.getRowModel().rows.forEach((item, index) => { + newSelection[index] = selectAll; + }); + setRowSelection(newSelection); + }; + + const onPageSizeChange = (e) => { + setPagination({ + ...pagination, + pageSize: e.target.value, + }); + }; + + const onPageIndexChange = (pageIndex) => { + if (!pageIndex || pageIndex > pageCount) { + return; + } + + setPagination({ + ...pagination, + pageIndex: pageIndex - 1, + }); + }; + const toggleChannelEnabled = async (channelIds, enabled) => { if (channelIds.length == 1) { await API.updateProfileChannel(channelIds[0], selectedProfileId, enabled); @@ -565,15 +708,6 @@ const ChannelsTable = React.memo(({}) => { ] ); - // Access the row virtualizer instance (optional) - const rowVirtualizerInstanceRef = useRef(null); - - const [isLoading, setIsLoading] = useState(true); - const [sorting, setSorting] = useState([ - { id: 'channel_number', desc: false }, - { id: 'name', desc: false }, - ]); - // (Optional) bulk delete, but your endpoint is @TODO const deleteChannels = async () => { setIsLoading(true); @@ -645,15 +779,6 @@ const ChannelsTable = React.memo(({}) => { } }, []); - useEffect(() => { - // Scroll to the top of the table when sorting changes - try { - rowVirtualizerInstanceRef.current?.scrollToIndex?.(0); - } catch (error) { - console.error(error); - } - }, [sorting]); - const handleCopy = async (textToCopy, ref) => { try { await navigator.clipboard.writeText(textToCopy); @@ -848,21 +973,45 @@ const ChannelsTable = React.memo(({}) => { const table = useMantineReactTable({ ...TableHelper.defaultProperties, columns, - data: filteredData, - enablePagination: false, + data, + enablePagination: true, + manualPagination: true, enableColumnActions: false, - enableRowVirtualization: true, enableRowSelection: true, renderTopToolbar: false, - onRowSelectionChange: setRowSelection, + onRowSelectionChange: onRowSelectionChange, onSortingChange: setSorting, state: { isLoading: isLoading || channelsLoading, sorting, rowSelection, }, - rowVirtualizerInstanceRef, - rowVirtualizerOptions: { overscan: 25 }, + enableBottomToolbar: true, + renderBottomToolbar: ({ table }) => ( + + Page Size + + + {paginationString} + + ), initialState: { density: 'compact', sorting: [ @@ -962,7 +1111,7 @@ const ChannelsTable = React.memo(({}) => { renderRowActions: ({ row }) => , mantineTableContainerProps: { style: { - height: 'calc(100vh - 110px)', + height: 'calc(100vh - 150px)', overflowY: 'auto', // margin: 5, }, From af638326e1cb35f4ff484b50f40fcdae9a94926e Mon Sep 17 00:00:00 2001 From: dekzter Date: Tue, 15 Apr 2025 13:48:06 -0400 Subject: [PATCH 0003/1435] attempt at table rewrite for efficient virtualized table --- frontend/package-lock.json | 59 +- frontend/package.json | 2 + frontend/src/WebSocket.jsx | 2 +- frontend/src/api.js | 4 +- .../src/components/tables/ChannelsTable.jsx | 1034 ++++++++--------- .../ChannelsTable/ChannelsTableBody.jsx | 86 ++ .../tables/ChannelsTable/ChannelsTableRow.jsx | 61 + .../ChannelsTable/EmptyChannelsTableGuide.jsx | 78 ++ frontend/src/store/auth.jsx | 2 +- frontend/src/store/channels.jsx | 124 +- 10 files changed, 814 insertions(+), 638 deletions(-) create mode 100644 frontend/src/components/tables/ChannelsTable/ChannelsTableBody.jsx create mode 100644 frontend/src/components/tables/ChannelsTable/ChannelsTableRow.jsx create mode 100644 frontend/src/components/tables/ChannelsTable/EmptyChannelsTableGuide.jsx diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 4f1856f1..9f680d3e 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -16,6 +16,7 @@ "@mantine/hooks": "^7.17.2", "@mantine/notifications": "^7.17.2", "@tabler/icons-react": "^3.31.0", + "@tanstack/react-table": "^8.21.2", "allotment": "^1.20.3", "axios": "^1.8.2", "clsx": "^2.1.1", @@ -31,6 +32,7 @@ "react-draggable": "^4.4.6", "react-pro-sidebar": "^1.1.0", "react-router-dom": "^7.3.0", + "react-virtualized-auto-sizer": "^1.0.26", "react-window": "^1.8.11", "recharts": "^2.15.1", "video.js": "^8.21.0", @@ -1741,12 +1743,12 @@ } }, "node_modules/@tanstack/react-table": { - "version": "8.20.5", - "resolved": "https://registry.npmjs.org/@tanstack/react-table/-/react-table-8.20.5.tgz", - "integrity": "sha512-WEHopKw3znbUZ61s9i0+i9g8drmDo6asTWbrQh8Us63DAk/M0FkmIqERew6P71HI75ksZ2Pxyuf4vvKh9rAkiA==", + "version": "8.21.2", + "resolved": "https://registry.npmjs.org/@tanstack/react-table/-/react-table-8.21.2.tgz", + "integrity": "sha512-11tNlEDTdIhMJba2RBH+ecJ9l1zgS2kjmexDPAraulc8jeNA4xocSNeyzextT0XJyASil4XsCYlJmf5jEWAtYg==", "license": "MIT", "dependencies": { - "@tanstack/table-core": "8.20.5" + "@tanstack/table-core": "8.21.2" }, "engines": { "node": ">=12" @@ -1778,9 +1780,9 @@ } }, "node_modules/@tanstack/table-core": { - "version": "8.20.5", - "resolved": "https://registry.npmjs.org/@tanstack/table-core/-/table-core-8.20.5.tgz", - "integrity": "sha512-P9dF7XbibHph2PFRz8gfBKEXEY/HJPOhym8CHmjF8y3q5mWpKx9xtZapXQUWCgkqvsK0R46Azuz+VaxD4Xl+Tg==", + "version": "8.21.2", + "resolved": "https://registry.npmjs.org/@tanstack/table-core/-/table-core-8.21.2.tgz", + "integrity": "sha512-uvXk/U4cBiFMxt+p9/G7yUWI/UbHYbyghLCjlpWZ3mLeIZiUBSKcUnw9UnKkdRz7Z/N4UBuFLWQdJCjUe7HjvA==", "license": "MIT", "engines": { "node": ">=12" @@ -3452,6 +3454,39 @@ "react-dom": ">=18.0" } }, + "node_modules/mantine-react-table/node_modules/@tanstack/react-table": { + "version": "8.20.5", + "resolved": "https://registry.npmjs.org/@tanstack/react-table/-/react-table-8.20.5.tgz", + "integrity": "sha512-WEHopKw3znbUZ61s9i0+i9g8drmDo6asTWbrQh8Us63DAk/M0FkmIqERew6P71HI75ksZ2Pxyuf4vvKh9rAkiA==", + "license": "MIT", + "dependencies": { + "@tanstack/table-core": "8.20.5" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + }, + "peerDependencies": { + "react": ">=16.8", + "react-dom": ">=16.8" + } + }, + "node_modules/mantine-react-table/node_modules/@tanstack/table-core": { + "version": "8.20.5", + "resolved": "https://registry.npmjs.org/@tanstack/table-core/-/table-core-8.20.5.tgz", + "integrity": "sha512-P9dF7XbibHph2PFRz8gfBKEXEY/HJPOhym8CHmjF8y3q5mWpKx9xtZapXQUWCgkqvsK0R46Azuz+VaxD4Xl+Tg==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + } + }, "node_modules/math-intrinsics": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", @@ -4081,6 +4116,16 @@ "react-dom": ">=16.6.0" } }, + "node_modules/react-virtualized-auto-sizer": { + "version": "1.0.26", + "resolved": "https://registry.npmjs.org/react-virtualized-auto-sizer/-/react-virtualized-auto-sizer-1.0.26.tgz", + "integrity": "sha512-CblNyiNVw2o+hsa5/49NH2ogGxZ+t+3aweRvNSq7TVjDIlwk7ir4lencEg5HxHeSzwNarSkNkiu0qJSOXtxm5A==", + "license": "MIT", + "peerDependencies": { + "react": "^15.3.0 || ^16.0.0-alpha || ^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^15.3.0 || ^16.0.0-alpha || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, "node_modules/react-window": { "version": "1.8.11", "resolved": "https://registry.npmjs.org/react-window/-/react-window-1.8.11.tgz", diff --git a/frontend/package.json b/frontend/package.json index 5677558b..c0d6ced3 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -18,6 +18,7 @@ "@mantine/hooks": "^7.17.2", "@mantine/notifications": "^7.17.2", "@tabler/icons-react": "^3.31.0", + "@tanstack/react-table": "^8.21.2", "allotment": "^1.20.3", "axios": "^1.8.2", "clsx": "^2.1.1", @@ -33,6 +34,7 @@ "react-draggable": "^4.4.6", "react-pro-sidebar": "^1.1.0", "react-router-dom": "^7.3.0", + "react-virtualized-auto-sizer": "^1.0.26", "react-window": "^1.8.11", "recharts": "^2.15.1", "video.js": "^8.21.0", diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index 1d33565f..f9ab1cc3 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -122,7 +122,7 @@ export const WebsocketProvider = ({ children }) => { message: 'EPG match is complete!', color: 'green.5', }); - fetchChannels(); + // fetchChannels(); fetchEPGData(); break; diff --git a/frontend/src/api.js b/frontend/src/api.js index 92dc84e8..411a7f26 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -320,7 +320,7 @@ export default class API { }); // Optionally refesh the channel list in Zustand - await useChannelsStore.getState().fetchChannels(); + // await useChannelsStore.getState().fetchChannels(); return response; } catch (e) { @@ -604,7 +604,7 @@ export default class API { usePlaylistsStore.getState().removePlaylists([id]); // @TODO: MIGHT need to optimize this later if someone has thousands of channels // but I'm feeling laze right now - useChannelsStore.getState().fetchChannels(); + // useChannelsStore.getState().fetchChannels(); } catch (e) { errorNotification(`Failed to delete playlist ${id}`, e); } diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index 4a6bc014..d3d4b868 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -5,7 +5,6 @@ import React, { useState, useCallback, } from 'react'; -import { MantineReactTable, useMantineReactTable } from 'mantine-react-table'; import useChannelsStore from '../../store/channels'; import { notifications } from '@mantine/notifications'; import API from '../../api'; @@ -55,7 +54,21 @@ import { MultiSelect, Pagination, NativeSelect, + Checkbox, + Table, } from '@mantine/core'; +import AutoSizer from 'react-virtualized-auto-sizer' +import { FixedSizeList as List } from 'react-window' +import ChannelsTableRow from './ChannelsTable/ChannelsTableRow'; +import ChannelsTableBody from './ChannelsTable/ChannelsTableBody'; +import { + flexRender, + useReactTable, + getCoreRowModel, + getSortedRowModel, + getPaginationRowModel, +} from '@tanstack/react-table' +import { notUndefined, useVirtualizer } from '@tanstack/react-virtual' const ChannelStreams = React.memo(({ channel, isExpanded }) => { const channelStreams = useChannelsStore( @@ -71,7 +84,7 @@ const ChannelStreams = React.memo(({ channel, isExpanded }) => { }); }; - const channelStreamsTable = useMantineReactTable({ + const channelStreamsTable = useReactTable({ ...TableHelper.defaultProperties, data: channelStreams, columns: useMemo( @@ -250,6 +263,160 @@ const CreateProfilePopover = React.memo(({ }) => { ); }); +const ChannelEnabledCell = ({ cell, row, toggleChannelEnabled, selectedProfileId }) => { + const handleSwitchChange = useCallback(() => { + toggleChannelEnabled([row.original.id], !cell.getValue()); + }, [cell.getValue(), row.original.id, toggleChannelEnabled]); + + return ( +
+ +
+ ); +} + +const ChannelLogoCell = React.memo(({ cell }) => { + return ( +
+ channel logo +
+ ) +}) + +const ChannelSelectCell = React.memo(({ + checked, + disabled, + indeterminate, + onChange, +}) => { + return ( +
+
+ +
+
+ ) +}) + +const RowActions = React.memo(({ + row, + editChannel, + deleteChannel, + handleWatchStream, + createRecording, +}) => { + const theme = useMantineTheme() + const { channelsPageSelection } = useChannelsStore() + + const onEdit = useCallback(() => { + editChannel(row.original) + }, [row]); + + const onDelete = useCallback(async () => { + deleteChannel(row.original) + }, [row]); + + const onRecord = useCallback(() => { + createRecording(row.original) + }, [row]); + + const onPreview = useCallback(() => { + handleWatchStream(row.original) + }, [row]); + + return ( + +
+ + + + + 0 && + !channelsPageSelection.map((row) => row.id).includes(row.id) + } + > + {channelsPageSelection.length === 0 ? ( + + ) : ( + + )} + + + + + + + {/* {env_mode == 'dev' && ( + + + + + + + + + + } + > + Record + + + + )} */} +
+
+ ); +}); + const ChannelsTable = React.memo(({ }) => { const { channels, @@ -277,17 +444,17 @@ const ChannelsTable = React.memo(({ }) => { ); const [channelsEnabledHeaderSwitch, setChannelsEnabledHeaderSwitch] = useState(false); - const [initialDataCount, setInitialDataCount] = useState(null); - const [data, setData] = useState([]); + const [data, setData] = useState([]); // Holds fetched data + const [selectedRowIds, setSelectedRowIds] = useState([]); const [rowCount, setRowCount] = useState(0); const [pageCount, setPageCount] = useState(0); const [paginationString, setPaginationString] = useState(''); - const [selectedStreamIds, setSelectedStreamIds] = useState([]); - // const [allRowsSelected, setAllRowsSelected] = useState(false); const [pagination, setPagination] = useState({ pageIndex: 0, pageSize: 250, }); + const [groupOptions, setGroupOptions] = useState([]); + const [initialDataCount, setInitialDataCount] = useState(null); const [filters, setFilters] = useState({ name: '', channel_group: '', @@ -295,6 +462,7 @@ const ChannelsTable = React.memo(({ }) => { }); const debouncedFilters = useDebounce(filters, 500); const [isLoading, setIsLoading] = useState(true); + const [selectedChannelIds, setSelectedChannelIds] = useState([]); const [sorting, setSorting] = useState([ { id: 'channel_number', desc: false }, { id: 'name', desc: false }, @@ -326,19 +494,23 @@ const ChannelsTable = React.memo(({ }) => { useEffect(() => { setChannelGroupOptions([ ...new Set( - Object.values(channels).map((channel) => channel.channel_group?.name) + Object.values(data).map((channel) => channel.channel_group?.name) ), ]); - }, [channels]); + }, [data]); - const handleFilterChange = (columnId, value) => { - setFilterValues((prev) => ({ + const handleFilterChange = (e) => { + const { name, value } = e.target; + setFilters((prev) => ({ ...prev, - [columnId]: Array.isArray(value) - ? value - : value - ? value.toLowerCase() - : '', + [name]: value, + })); + }; + + const handleGroupChange = (value) => { + setFilters((prev) => ({ + ...prev, + channel_group: value ? value : '', })); }; @@ -387,7 +559,7 @@ const ChannelsTable = React.memo(({ }) => { const newSelection = {}; result.results.forEach((item, index) => { - if (selectedStreamIds.includes(item.id)) { + if (selectedChannelIds.includes(item.id)) { newSelection[index] = true; } }); @@ -412,7 +584,7 @@ const ChannelsTable = React.memo(({ }) => { const newRowSelection = typeof updater === 'function' ? updater(prevRowSelection) : updater; - const updatedSelected = new Set([...selectedStreamIds]); + const updatedSelected = new Set([...selectedChannelIds]); table.getRowModel().rows.forEach((row) => { if (newRowSelection[row.id] === undefined || !newRowSelection[row.id]) { updatedSelected.delete(row.original.id); @@ -420,7 +592,7 @@ const ChannelsTable = React.memo(({ }) => { updatedSelected.add(row.original.id); } }); - setSelectedStreamIds([...updatedSelected]); + setSelectedChannelIds([...updatedSelected]); return newRowSelection; }); @@ -435,9 +607,9 @@ const ChannelsTable = React.memo(({ }) => { if (value) params.append(key, value); }); const ids = await API.getAllStreamIds(params); - setSelectedStreamIds(ids); + setSelectedChannelIds(ids); } else { - setSelectedStreamIds([]); + setSelectedChannelIds([]); } const newSelection = {}; @@ -489,9 +661,13 @@ const ChannelsTable = React.memo(({ }) => { /> )); - const renderEnabledHeader = useCallback(() => { + const renderEnabledHeader = useCallback(({ header }) => { if (Object.values(rowSelection).length === 0) { - return ; + return ( +
+ +
+ ); } const handleToggle = () => { @@ -518,109 +694,82 @@ const ChannelsTable = React.memo(({ }) => { // Configure columns const columns = useMemo( () => [ + { + id: 'select', + size: 20, + meta: { + minWidth: 20, + maxWidth: 20, + }, + header: ({ table }) => ( +
+ +
+ ), + cell: ({ row }) => ( + + ), + }, { id: 'enabled', - Header: renderEnabledHeader, - enableSorting: false, + size: 32, + meta: { + minWidth: 32, + maxWidth: 32, + }, + header: renderEnabledHeader, accessorFn: (row) => { return selectedProfileId == '0' ? true : enabledChannelSet.has(row.id); }, - mantineTableHeadCellProps: { - align: 'right', - style: { - backgroundColor: '#3F3F46', - width: '40px', - minWidth: '40px', - maxWidth: '40px', - // // minWidth: '20px', - // // width: '50px !important', - // // justifyContent: 'center', - padding: 0, - // // paddingLeft: 8, - // // paddingRight: 0, - }, - }, - mantineTableBodyCellProps: { - align: 'right', - style: { - width: '40px', - minWidth: '40px', - maxWidth: '40px', - // // minWidth: '20px', - // // justifyContent: 'center', - // // paddingLeft: 0, - // // paddingRight: 0, - padding: 0, - }, - }, - Cell: ({ row, cell }) => { - const memoizedCellValue = useMemo(() => cell.getValue(), [cell]); - const handleSwitchChange = useCallback(() => { - toggleChannelEnabled([row.original.id], !memoizedCellValue); - }, [memoizedCellValue, row.original.id, toggleChannelEnabled]); - - return ( - - ); - }, + cell: ({ row, cell }) => ( + + ), }, { - header: '#', - size: 50, - maxSize: 50, + size: 26, + maxSize: 26, accessorKey: 'channel_number', - sortingFn: (a, b, columnId) => { - return ( - parseInt(a.original.channel_number) - - parseInt(b.original.channel_number) - ); - }, - mantineTableHeadCellProps: { - align: 'right', - // // style: { - // // backgroundColor: '#3F3F46', - // // // minWidth: '20px', - // // // justifyContent: 'center', - // // // paddingLeft: 15, - // // paddingRight: 0, - // // }, - }, - mantineTableBodyCellProps: { - align: 'right', - // // style: { - // // minWidth: '20px', - // // // justifyContent: 'center', - // // paddingLeft: 0, - // // paddingRight: 0, - // // }, + header: ({ header }) => ( +
#
+ ), + meta: { + align: 'right' }, + // cell: ({ cell }) => ( + // + // {cell.getValue()} + // + // ) }, { - header: 'Name', accessorKey: 'name', - Header: ({ column }) => ( + header: ({ column }) => ( { - e.stopPropagation(); - handleFilterChange(column.id, e.target.value); - }} + onClick={(e) => e.stopPropagation()} + onChange={handleFilterChange} size="xs" variant="unstyled" className="table-input-header" - onClick={(e) => e.stopPropagation()} /> ), - Cell: ({ cell }) => ( + cell: ({ cell }) => (
{ ), }, { - header: 'Group', accessorKey: 'channel_group.name', accessorFn: (row) => row.channel_group?.name || '', - Cell: ({ cell }) => ( + cell: ({ cell }) => (
{ {cell.getValue()}
), - Header: ({ column }) => ( + header: ({ column }) => ( e.stopPropagation()}> { - handleFilterChange(column.id, value); - }} + onChange={handleGroupChange} data={channelGroupOptions} variant="unstyled" className="table-input-header custom-multiselect" @@ -675,27 +821,23 @@ const ChannelsTable = React.memo(({ }) => { maxWidth: '75px', }, }, - Cell: ({ cell }) => ( - - channel logo - + cell: ({ cell }) => ( + ), }, + { + header: 'Actions', + size: 40, + cell: ({ row }) => ( + + ) + } ], [ channelGroupOptions, @@ -703,8 +845,7 @@ const ChannelsTable = React.memo(({ }) => { selectedProfile, selectedProfileChannels, rowSelection, - channelsPageSelection, - channelsEnabledHeaderSwitch, + // channelsEnabledHeaderSwitch, ] ); @@ -716,6 +857,7 @@ const ChannelsTable = React.memo(({ }) => { .rows.filter((row) => row.getIsSelected()); await API.deleteChannels(selected.map((row) => row.original.id)); + fetchData(); setIsLoading(false); }; @@ -739,7 +881,7 @@ const ChannelsTable = React.memo(({ }) => { }); // Refresh the channel list - await fetchChannels(); + // await fetchChannels(); } catch (err) { console.error(err); notifications.show({ @@ -811,41 +953,26 @@ const ChannelsTable = React.memo(({ }) => { handleCopy(hdhrUrl, hdhrUrlRef); }; + // useEffect(() => { + // const selectedRows = table + // .getSelectedRowModel() + // .rows.map((row) => row.original); + // setChannelsPageSelection(selectedRows); + + // if (selectedProfileId != '0') { + // setChannelsEnabledHeaderSwitch( + // selectedRows.filter( + // (row) => + // selectedProfileChannels.find((channel) => row.id == channel.id) + // .enabled + // ).length == selectedRows.length + // ); + // } + // }, [rowSelection]) + useEffect(() => { - const selectedRows = table - .getSelectedRowModel() - .rows.map((row) => row.original); - setChannelsPageSelection(selectedRows); - - if (selectedProfileId != '0') { - setChannelsEnabledHeaderSwitch( - selectedRows.filter( - (row) => - selectedProfileChannels.find((channel) => row.id == channel.id) - .enabled - ).length == selectedRows.length - ); - } - }, [rowSelection]); - - const filteredData = Object.values(channels).filter((row) => - columns.every(({ accessorKey }) => { - if (!accessorKey) { - return true; - } - - const filterValue = filterValues[accessorKey]; - const rowValue = getDescendantProp(row, accessorKey); - - if (Array.isArray(filterValue) && filterValue.length != 0) { - return filterValue.includes(rowValue); - } else if (filterValue) { - return rowValue?.toLowerCase().includes(filterValues[accessorKey]); - } - - return true; - }) - ); + fetchData(); + }, [fetchData]); const deleteProfile = async (id) => { await API.deleteChannelProfile(id); @@ -872,257 +999,71 @@ const ChannelsTable = React.memo(({ }) => { ); }; - const RowActions = React.memo(({ row }) => { - const editChannel = useCallback(() => { - setChannel(row.original); - setChannelModalOpen(true); - }, []); + const editChannel = useCallback((row) => { + setChannel(row.original); + setChannelModalOpen(true); + }, []); - const deleteChannel = useCallback(async () => { - setRowSelection([]); - // if (channelsPageSelection.length > 0) { - // return deleteChannels(); - // } - await API.deleteChannel(row.id); - }, []); + const deleteChannel = useCallback(async (row) => { + console.log(row) + setRowSelection([]); + // if (channelsPageSelection.length > 0) { + // return deleteChannels(); + // } + await API.deleteChannel(row.id); + }, []); - const createRecording = useCallback(() => { - setChannel(row); - setRecordingModalOpen(true); - }, []); + const createRecording = useCallback((row) => { + setChannel(row); + setRecordingModalOpen(true); + }, []); - const handleWatchStream = useCallback(() => { - let vidUrl = `/proxy/ts/stream/${row.uuid}`; - if (env_mode == 'dev') { - vidUrl = `${window.location.protocol}//${window.location.hostname}:5656${vidUrl}`; - } - showVideo(vidUrl); - }, []); + const handleWatchStream = useCallback((row) => { + let vidUrl = `/proxy/ts/stream/${row.uuid}`; + if (env_mode == 'dev') { + vidUrl = `${window.location.protocol}//${window.location.hostname}:5656${vidUrl}`; + } + showVideo(vidUrl); + }, []); - return ( - -
- - - - - 0 && - !channelsPageSelection.map((row) => row.id).includes(row.id) - } - > - {channelsPageSelection.length === 0 ? ( - - ) : ( - - )} - - - - - - - {env_mode == 'dev' && ( - - - - - - - - -
- } - > - Record - - - - )} - - - ); - }); - - const table = useMantineReactTable({ - ...TableHelper.defaultProperties, - columns, + const table = useReactTable({ data, - enablePagination: true, - manualPagination: true, - enableColumnActions: false, + columns, + // filterFns: {}, + getCoreRowModel: getCoreRowModel(), + getSortedRowModel: getSortedRowModel(), + // getPaginationRowModel: getPaginationRowModel(), + // manualPagination: true, enableRowSelection: true, - renderTopToolbar: false, - onRowSelectionChange: onRowSelectionChange, - onSortingChange: setSorting, - state: { - isLoading: isLoading || channelsLoading, - sorting, - rowSelection, - }, - enableBottomToolbar: true, - renderBottomToolbar: ({ table }) => ( - - Page Size - - - {paginationString} - - ), - initialState: { - density: 'compact', - sorting: [ - { - id: 'channel_number', - desc: false, - }, - ], - }, - enableRowActions: true, - enableExpandAll: false, - displayColumnDefOptions: { - 'mrt-row-select': { - size: 10, - maxSize: 10, - mantineTableHeadCellProps: { - align: 'right', - style: { - paddding: 0, - // paddingLeft: 7, - width: '20px', - minWidth: '20px', - backgroundColor: '#3F3F46', - }, - }, - mantineTableBodyCellProps: { - align: 'right', - style: { - paddingLeft: 0, - width: '20px', - minWidth: '20px', - }, - }, - }, - 'mrt-row-expand': { - size: 20, - maxSize: 20, - header: '', - mantineTableHeadCellProps: { - style: { - padding: 0, - paddingLeft: 2, - width: '20px', - minWidth: '20px', - maxWidth: '20px', - backgroundColor: '#3F3F46', - }, - }, - mantineTableBodyCellProps: { - style: { - padding: 0, - paddingLeft: 2, - width: '20px', - minWidth: '20px', - maxWidth: '20px', - }, - }, - }, - 'mrt-row-actions': { - size: 85, - maxWidth: 85, - mantineTableHeadCellProps: { - align: 'center', - style: { - minWidth: '85px', - maxWidth: '85px', - // paddingRight: 40, - fontWeight: 'normal', - color: 'rgb(207,207,207)', - backgroundColor: '#3F3F46', - }, - }, - mantineTableBodyCellProps: { - style: { - minWidth: '85px', - maxWidth: '85px', - paddingLeft: 0, - // paddingRight: 10, - }, - }, - }, - }, - mantineExpandButtonProps: ({ row, table }) => ({ - onClick: () => { - setRowSelection({ [row.index]: true }); - table.setExpanded({ [row.id]: !row.getIsExpanded() }); - }, - size: 'xs', - style: { - transform: row.getIsExpanded() ? 'rotate(180deg)' : 'rotate(-90deg)', - transition: 'transform 0.2s', - }, - }), - renderDetailPanel: ({ row }) => ( - - ), - renderRowActions: ({ row }) => , - mantineTableContainerProps: { - style: { - height: 'calc(100vh - 150px)', - overflowY: 'auto', - // margin: 5, - }, - }, - }); + // debugTable: true, + // debugHeaders: true, + // debugColumns: false, + }) + + const { rows } = table.getRowModel() + + const virtualizerRef = useRef(null) + const virtualizer = useVirtualizer({ + count: rows.length, + getScrollElement: () => virtualizerRef.current, + estimateSize: () => 21, + overscan: 20, + }) + const items = virtualizer.getVirtualItems() + + const [before, after] = + items.length > 0 + ? [ + notUndefined(items[0]).start - virtualizer.options.scrollMargin, + virtualizer.getTotalSize() - notUndefined(items[items.length - 1]).end + ] + : [0, 0]; return ( {/* Header Row: outside the Paper */} { {/* Paper container: contains top toolbar and table (or ghost state) */} - - {/* Top toolbar with Remove, Assign, Auto-match, and Add buttons */} - - - ({ + label: profile.name, + value: `${profile.id}`, + }))} + renderOption={renderProfileOption} + /> - - - - - - - - - - - - - - - + + + - {/* Table or ghost empty state inside Paper */} - - {Object.keys(channels).length === 0 && ( - + + + + + + + + + + + + - - - -
- -
-
- )} + Add + +
- {Object.keys(channels).length > 0 && ( - + + + {/* Table or ghost empty state inside Paper */} + + {initialDataCount === 0 && ( + )} - + + + + {initialDataCount > 0 && ( + + + + {table.getHeaderGroups().map(headerGroup => ( + + {headerGroup.headers.map(header => { + return ( + + {flexRender(header.column.columnDef.header, header.getContext())} + + ) + })} + + ))} + + +
+
+ )} { isOpen={recordingModalOpen} onClose={closeRecordingForm} /> - + ); }); diff --git a/frontend/src/components/tables/ChannelsTable/ChannelsTableBody.jsx b/frontend/src/components/tables/ChannelsTable/ChannelsTableBody.jsx new file mode 100644 index 00000000..2f50cb23 --- /dev/null +++ b/frontend/src/components/tables/ChannelsTable/ChannelsTableBody.jsx @@ -0,0 +1,86 @@ +// HeadlessChannelsTable.jsx +import React, { useMemo, useState, useCallback, useRef } from 'react'; +import { FixedSizeList as List } from 'react-window'; +import AutoSizer from 'react-virtualized-auto-sizer'; +import { + useReactTable, + getCoreRowModel, + getSortedRowModel, + flexRender, + getExpandedRowModel, +} from '@tanstack/react-table'; +import { + Table, + Box, + Checkbox, + ActionIcon, + ScrollArea, +} from '@mantine/core'; +import { ChevronRight, ChevronDown } from 'lucide-react'; +import ChannelsTableRow from './ChannelsTableRow'; +import { useVirtualizer } from '@tanstack/react-virtual' + +const ChannelsTableBody = ({ rows, height, onEdit, onDelete, onPreview, onRecord, virtualizedItems }) => { + const rowHeight = 48; + + // return ( + // + // + // {({ height }) => ( + // + // {({ index, style }) => { + // const row = rows[index]; + // return ( + // + // + // {row.getIsExpanded() && } + // + // ); + // }} + // + // )} + // + // + // ); + + return ( + + {virtualizedItems.map((virtualRow, index) => { + const row = rows[virtualRow.index] + return ( + + ); + })} + + ); +}; + +export default ChannelsTableBody; diff --git a/frontend/src/components/tables/ChannelsTable/ChannelsTableRow.jsx b/frontend/src/components/tables/ChannelsTable/ChannelsTableRow.jsx new file mode 100644 index 00000000..94466e13 --- /dev/null +++ b/frontend/src/components/tables/ChannelsTable/ChannelsTableRow.jsx @@ -0,0 +1,61 @@ +// HeadlessChannelsTable.jsx +import React, { useMemo, useState, useCallback } from 'react'; +import { FixedSizeList as List } from 'react-window'; +import AutoSizer from 'react-virtualized-auto-sizer'; +import { + useReactTable, + getCoreRowModel, + getSortedRowModel, + flexRender, + getExpandedRowModel, +} from '@tanstack/react-table'; +import { + Table, + Box, + Checkbox, + ActionIcon, + ScrollArea, + Center, + useMantineTheme, +} from '@mantine/core'; +import { ChevronRight, ChevronDown } from 'lucide-react'; +import useSettingsStore from '../../../store/settings'; +import useChannelsStore from '../../../store/channels'; + +const ExpandIcon = ({ row, toggle }) => ( + + {row.getIsExpanded() ? : } + +); + +const ChannelsTableRow = ({ row, virtualRow, index, style, onEdit, onDelete, onPreview, onRecord }) => { + return ( + + {row.getVisibleCells().map(cell => { + return ( + + {flexRender( + cell.column.columnDef.cell, + cell.getContext() + )} + + ) + })} + + ) +}; + +export default ChannelsTableRow diff --git a/frontend/src/components/tables/ChannelsTable/EmptyChannelsTableGuide.jsx b/frontend/src/components/tables/ChannelsTable/EmptyChannelsTableGuide.jsx new file mode 100644 index 00000000..9f719cda --- /dev/null +++ b/frontend/src/components/tables/ChannelsTable/EmptyChannelsTableGuide.jsx @@ -0,0 +1,78 @@ +export default () => { + return ( + +
+ + + It’s recommended to create channels after adding your M3U or + streams. + + + You can still create channels without streams if you’d like, + and map them later. + + + +
+ +
+ +
+
+ ) +} diff --git a/frontend/src/store/auth.jsx b/frontend/src/store/auth.jsx index d6eb8053..3e45d7d9 100644 --- a/frontend/src/store/auth.jsx +++ b/frontend/src/store/auth.jsx @@ -32,7 +32,7 @@ const useAuthStore = create((set, get) => ({ initData: async () => { await Promise.all([ - useChannelsStore.getState().fetchChannels(), + // useChannelsStore.getState().fetchChannels(), useChannelsStore.getState().fetchChannelGroups(), useChannelsStore.getState().fetchLogos(), useChannelsStore.getState().fetchChannelProfiles(), diff --git a/frontend/src/store/channels.jsx b/frontend/src/store/channels.jsx index 53cce7af..f0987f44 100644 --- a/frontend/src/store/channels.jsx +++ b/frontend/src/store/channels.jsx @@ -310,75 +310,75 @@ const useChannelsStore = create((set, get) => ({ })), setChannelStats: (stats) => { - return set((state) => { - const { - channels, - stats: currentStats, - activeChannels: oldChannels, - activeClients: oldClients, - channelsByUUID, - } = state; + // return set((state) => { + // const { + // channels, + // stats: currentStats, + // activeChannels: oldChannels, + // activeClients: oldClients, + // channelsByUUID, + // } = state; - const newClients = {}; - const newChannels = stats.channels.reduce((acc, ch) => { - acc[ch.channel_id] = ch; + // const newClients = {}; + // const newChannels = stats.channels.reduce((acc, ch) => { + // acc[ch.channel_id] = ch; - if (currentStats.channels) { - if (oldChannels[ch.channel_id] === undefined) { - notifications.show({ - title: 'New channel streaming', - message: channels[channelsByUUID[ch.channel_id]].name, - color: 'blue.5', - }); - } - } + // if (currentStats.channels) { + // if (oldChannels[ch.channel_id] === undefined) { + // notifications.show({ + // title: 'New channel streaming', + // message: channels[channelsByUUID[ch.channel_id]].name, + // color: 'blue.5', + // }); + // } + // } - ch.clients.map((client) => { - newClients[client.client_id] = client; - // This check prevents the notifications if streams are active on page load - if (currentStats.channels) { - if (oldClients[client.client_id] === undefined) { - notifications.show({ - title: 'New client started streaming', - message: `Client streaming from ${client.ip_address}`, - color: 'blue.5', - }); - } - } - }); + // ch.clients.map((client) => { + // newClients[client.client_id] = client; + // // This check prevents the notifications if streams are active on page load + // if (currentStats.channels) { + // if (oldClients[client.client_id] === undefined) { + // notifications.show({ + // title: 'New client started streaming', + // message: `Client streaming from ${client.ip_address}`, + // color: 'blue.5', + // }); + // } + // } + // }); - return acc; - }, {}); + // return acc; + // }, {}); - // This check prevents the notifications if streams are active on page load - if (currentStats.channels) { - for (const uuid in oldChannels) { - if (newChannels[uuid] === undefined) { - notifications.show({ - title: 'Channel streaming stopped', - message: channels[channelsByUUID[uuid]].name, - color: 'blue.5', - }); - } - } + // // This check prevents the notifications if streams are active on page load + // if (currentStats.channels) { + // for (const uuid in oldChannels) { + // if (newChannels[uuid] === undefined) { + // notifications.show({ + // title: 'Channel streaming stopped', + // message: channels[channelsByUUID[uuid]].name, + // color: 'blue.5', + // }); + // } + // } - for (const clientId in oldClients) { - if (newClients[clientId] === undefined) { - notifications.show({ - title: 'Client stopped streaming', - message: `Client stopped streaming from ${oldClients[clientId].ip_address}`, - color: 'blue.5', - }); - } - } - } + // for (const clientId in oldClients) { + // if (newClients[clientId] === undefined) { + // notifications.show({ + // title: 'Client stopped streaming', + // message: `Client stopped streaming from ${oldClients[clientId].ip_address}`, + // color: 'blue.5', + // }); + // } + // } + // } - return { - stats, - activeChannels: newChannels, - activeClients: newClients, - }; - }); + // return { + // stats, + // activeChannels: newChannels, + // activeClients: newClients, + // }; + // }); }, fetchRecordings: async () => { From 6a0ce574b0360190796daa3ecc999667cf7593fe Mon Sep 17 00:00:00 2001 From: dekzter Date: Wed, 16 Apr 2025 13:06:51 -0400 Subject: [PATCH 0004/1435] rewrite with tanstack table --- apps/channels/api_views.py | 12 +- apps/channels/serializers.py | 7 +- frontend/package-lock.json | 19 + frontend/package.json | 4 + .../src/components/tables/ChannelsTable.jsx | 848 +++++++++++++++++- frontend/src/store/channels.jsx | 44 +- 6 files changed, 884 insertions(+), 50 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 7ea7e3aa..15da6c14 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -139,13 +139,13 @@ class ChannelViewSet(viewsets.ModelViewSet): queryset = Channel.objects.all() serializer_class = ChannelSerializer permission_classes = [IsAuthenticated] - # pagination_class = ChannelPagination + pagination_class = ChannelPagination - # filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter] - # filterset_class = ChannelFilter - # search_fields = ['name', 'channel_group__name'] - # ordering_fields = ['channel_number', 'name', 'channel_group__name'] - # ordering = ['-channel_number'] + filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter] + filterset_class = ChannelFilter + search_fields = ['name', 'channel_group__name'] + ordering_fields = ['channel_number', 'name', 'channel_group__name'] + ordering = ['-channel_number'] def get_queryset(self): qs = super().get_queryset() diff --git a/apps/channels/serializers.py b/apps/channels/serializers.py index fcab5905..f157d4dd 100644 --- a/apps/channels/serializers.py +++ b/apps/channels/serializers.py @@ -83,12 +83,9 @@ class ChannelProfileSerializer(serializers.ModelSerializer): fields = ['id', 'name', 'channels'] def get_channels(self, obj): - memberships = ChannelProfileMembership.objects.filter(channel_profile=obj) + memberships = ChannelProfileMembership.objects.filter(channel_profile=obj, enabled=True) return [ - { - 'id': membership.channel.id, - 'enabled': membership.enabled - } + membership.channel.id for membership in memberships ] diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 9f680d3e..edd99202 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -16,7 +16,11 @@ "@mantine/hooks": "^7.17.2", "@mantine/notifications": "^7.17.2", "@tabler/icons-react": "^3.31.0", +<<<<<<< Updated upstream "@tanstack/react-table": "^8.21.2", +======= + "@tanstack/react-table": "^8.21.3", +>>>>>>> Stashed changes "allotment": "^1.20.3", "axios": "^1.8.2", "clsx": "^2.1.1", @@ -1743,12 +1747,21 @@ } }, "node_modules/@tanstack/react-table": { +<<<<<<< Updated upstream "version": "8.21.2", "resolved": "https://registry.npmjs.org/@tanstack/react-table/-/react-table-8.21.2.tgz", "integrity": "sha512-11tNlEDTdIhMJba2RBH+ecJ9l1zgS2kjmexDPAraulc8jeNA4xocSNeyzextT0XJyASil4XsCYlJmf5jEWAtYg==", "license": "MIT", "dependencies": { "@tanstack/table-core": "8.21.2" +======= + "version": "8.21.3", + "resolved": "https://registry.npmjs.org/@tanstack/react-table/-/react-table-8.21.3.tgz", + "integrity": "sha512-5nNMTSETP4ykGegmVkhjcS8tTLW6Vl4axfEGQN3v0zdHYbK4UfoqfPChclTrJ4EoK9QynqAu9oUf8VEmrpZ5Ww==", + "license": "MIT", + "dependencies": { + "@tanstack/table-core": "8.21.3" +>>>>>>> Stashed changes }, "engines": { "node": ">=12" @@ -1780,9 +1793,15 @@ } }, "node_modules/@tanstack/table-core": { +<<<<<<< Updated upstream "version": "8.21.2", "resolved": "https://registry.npmjs.org/@tanstack/table-core/-/table-core-8.21.2.tgz", "integrity": "sha512-uvXk/U4cBiFMxt+p9/G7yUWI/UbHYbyghLCjlpWZ3mLeIZiUBSKcUnw9UnKkdRz7Z/N4UBuFLWQdJCjUe7HjvA==", +======= + "version": "8.21.3", + "resolved": "https://registry.npmjs.org/@tanstack/table-core/-/table-core-8.21.3.tgz", + "integrity": "sha512-ldZXEhOBb8Is7xLs01fR3YEc3DERiz5silj8tnGkFZytt1abEvl/GhUmCE0PMLaMPTa3Jk4HbKmRlHmu+gCftg==", +>>>>>>> Stashed changes "license": "MIT", "engines": { "node": ">=12" diff --git a/frontend/package.json b/frontend/package.json index c0d6ced3..76a72114 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -18,7 +18,11 @@ "@mantine/hooks": "^7.17.2", "@mantine/notifications": "^7.17.2", "@tabler/icons-react": "^3.31.0", +<<<<<<< Updated upstream "@tanstack/react-table": "^8.21.2", +======= + "@tanstack/react-table": "^8.21.3", +>>>>>>> Stashed changes "allotment": "^1.20.3", "axios": "^1.8.2", "clsx": "^2.1.1", diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index d3d4b868..d75a7274 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -54,6 +54,7 @@ import { MultiSelect, Pagination, NativeSelect, +<<<<<<< Updated upstream Checkbox, Table, } from '@mantine/core'; @@ -69,6 +70,19 @@ import { getPaginationRowModel, } from '@tanstack/react-table' import { notUndefined, useVirtualizer } from '@tanstack/react-virtual' +======= + Table, + Checkbox, +} from '@mantine/core'; +import { + useReactTable, + getCoreRowModel, + getPaginationRowModel, + getSortedRowModel, + getFilteredRowModel, + flexRender, +} from '@tanstack/react-table'; +>>>>>>> Stashed changes const ChannelStreams = React.memo(({ channel, isExpanded }) => { const channelStreams = useChannelsStore( @@ -206,7 +220,7 @@ const m3uUrlBase = `${window.location.protocol}//${window.location.host}/output/ const epgUrlBase = `${window.location.protocol}//${window.location.host}/output/epg`; const hdhrUrlBase = `${window.location.protocol}//${window.location.host}/hdhr`; -const CreateProfilePopover = React.memo(({ }) => { +const CreateProfilePopover = React.memo(({}) => { const [opened, setOpened] = useState(false); const [name, setName] = useState(''); const theme = useMantineTheme(); @@ -263,6 +277,7 @@ const CreateProfilePopover = React.memo(({ }) => { ); }); +<<<<<<< Updated upstream const ChannelEnabledCell = ({ cell, row, toggleChannelEnabled, selectedProfileId }) => { const handleSwitchChange = useCallback(() => { toggleChannelEnabled([row.original.id], !cell.getValue()); @@ -385,6 +400,88 @@ const RowActions = React.memo(({ {/* {env_mode == 'dev' && ( +======= +const ChannelEnabledSwitch = React.memo( + ({ row, selectedProfileId, toggleChannelEnabled, enabled }) => { + const isEnabled = selectedProfileId === '0' || enabled; + + const handleToggle = useCallback(() => { + toggleChannelEnabled([row.original.id], !isEnabled); + }, []); + + return ( + + ); + } +); + +const ChannelRowActions = React.memo( + ({ + theme, + row, + editChannel, + deleteChannel, + handleWatchStream, + createRecording, + }) => { + const onEdit = useCallback(() => { + editChannel(row.original); + }, []); + + const onDelete = useCallback(() => { + deleteChannel(row.original.id); + }, []); + + const onPreview = useCallback(() => { + handleWatchStream(row.original.uuid); + }, []); + + const onRecord = useCallback(() => { + createRecording(row.original); + }, []); + + return ( + +
+ + + + + + + + + + + + + + + + + + +>>>>>>> Stashed changes @@ -394,7 +491,11 @@ const RowActions = React.memo(({ >>>>>> Stashed changes leftSection={
+<<<<<<< Updated upstream )} */}
@@ -418,6 +520,15 @@ const RowActions = React.memo(({ }); const ChannelsTable = React.memo(({ }) => { +======= + + + ); + } +); + +const ChannelsTable = React.memo(({}) => { +>>>>>>> Stashed changes const { channels, isLoading: channelsLoading, @@ -426,7 +537,7 @@ const ChannelsTable = React.memo(({ }) => { profiles, selectedProfileId, setSelectedProfileId, - selectedProfileChannels, + selectedProfileChannelIds, channelsPageSelection, } = useChannelsStore(); @@ -451,14 +562,13 @@ const ChannelsTable = React.memo(({ }) => { const [paginationString, setPaginationString] = useState(''); const [pagination, setPagination] = useState({ pageIndex: 0, - pageSize: 250, + pageSize: 50, }); const [groupOptions, setGroupOptions] = useState([]); const [initialDataCount, setInitialDataCount] = useState(null); const [filters, setFilters] = useState({ name: '', channel_group: '', - m3u_account: '', }); const debouncedFilters = useDebounce(filters, 500); const [isLoading, setIsLoading] = useState(true); @@ -499,6 +609,13 @@ const ChannelsTable = React.memo(({ }) => { ]); }, [data]); +<<<<<<< Updated upstream +======= + const stopPropagation = (e) => { + e.stopPropagation(); + }; + +>>>>>>> Stashed changes const handleFilterChange = (e) => { const { name, value } = e.target; setFilters((prev) => ({ @@ -518,6 +635,32 @@ const ChannelsTable = React.memo(({ }) => { const m3uUrlRef = useRef(null); const epgUrlRef = useRef(null); + const editChannel = async (ch = null) => { + setChannel(ch); + setChannelModalOpen(true); + }; + + const deleteChannel = async (id) => { + setRowSelection([]); + if (channelsPageSelection.length > 0) { + return deleteChannels(); + } + await API.deleteChannel(id); + }; + + const createRecording = (channel) => { + setChannel(channel); + setRecordingModalOpen(true); + }; + + function handleWatchStream(channelNumber) { + let vidUrl = `/proxy/ts/stream/${channelNumber}`; + if (env_mode == 'dev') { + vidUrl = `${window.location.protocol}//${window.location.hostname}:5656${vidUrl}`; + } + showVideo(vidUrl); + } + const fetchData = useCallback(async () => { setIsLoading(true); @@ -576,8 +719,8 @@ const ChannelsTable = React.memo(({ }) => { }, [pagination, sorting, debouncedFilters]); useEffect(() => { - fetchData() - }, [fetchData]) + fetchData(); + }, [fetchData]); const onRowSelectionChange = (updater) => { setRowSelection((prevRowSelection) => { @@ -637,20 +780,21 @@ const ChannelsTable = React.memo(({ }) => { }); }; - const toggleChannelEnabled = async (channelIds, enabled) => { - if (channelIds.length == 1) { - await API.updateProfileChannel(channelIds[0], selectedProfileId, enabled); - } else { - await API.updateProfileChannels(channelIds, selectedProfileId, enabled); - setChannelsEnabledHeaderSwitch(enabled); - } - }; - - const enabledChannelSet = useMemo(() => { - return new Set( - selectedProfileChannels.filter((c) => c.enabled).map((c) => c.id) - ); - }, [selectedProfileChannels]); + const toggleChannelEnabled = useCallback( + async (channelIds, enabled) => { + if (channelIds.length == 1) { + await API.updateProfileChannel( + channelIds[0], + selectedProfileId, + enabled + ); + } else { + await API.updateProfileChannels(channelIds, selectedProfileId, enabled); + setChannelsEnabledHeaderSwitch(enabled); + } + }, + [selectedProfileId] + ); const EnabledHeaderSwitch = React.memo(({ isActive, toggle, disabled }) => ( { selectedProfileId, ]); +<<<<<<< Updated upstream // Configure columns const columns = useMemo( () => [ @@ -848,6 +993,201 @@ const ChannelsTable = React.memo(({ }) => { // channelsEnabledHeaderSwitch, ] ); +======= + // // Configure columns + // const columns = useMemo( + // () => [ + // { + // id: 'enabled', + // // Header: renderEnabledHeader, + // enableSorting: false, + // accessorFn: (row) => { + // return selectedProfileId == '0' + // ? true + // : enabledChannelSet.has(row.id); + // }, + // mantineTableHeadCellProps: { + // align: 'right', + // style: { + // backgroundColor: '#3F3F46', + // width: '40px', + // minWidth: '40px', + // maxWidth: '40px', + // // // minWidth: '20px', + // // // width: '50px !important', + // // // justifyContent: 'center', + // padding: 0, + // // // paddingLeft: 8, + // // // paddingRight: 0, + // }, + // }, + // mantineTableBodyCellProps: { + // align: 'right', + // style: { + // width: '40px', + // minWidth: '40px', + // maxWidth: '40px', + // // // minWidth: '20px', + // // // justifyContent: 'center', + // // // paddingLeft: 0, + // // // paddingRight: 0, + // padding: 0, + // }, + // }, + // Cell: ({ row, cell }) => { + // const memoizedCellValue = useMemo(() => cell.getValue(), [cell]); + // const handleSwitchChange = useCallback(() => { + // toggleChannelEnabled([row.original.id], !memoizedCellValue); + // }, [memoizedCellValue, row.original.id, toggleChannelEnabled]); + + // return ( + // + // ); + // }, + // }, + // { + // header: '#', + // size: 50, + // maxSize: 50, + // accessorKey: 'channel_number', + // sortingFn: (a, b, columnId) => { + // return ( + // parseInt(a.original.channel_number) - + // parseInt(b.original.channel_number) + // ); + // }, + // mantineTableHeadCellProps: { + // align: 'right', + // // // style: { + // // // backgroundColor: '#3F3F46', + // // // // minWidth: '20px', + // // // // justifyContent: 'center', + // // // // paddingLeft: 15, + // // // paddingRight: 0, + // // // }, + // }, + // mantineTableBodyCellProps: { + // align: 'right', + // // // style: { + // // // minWidth: '20px', + // // // // justifyContent: 'center', + // // // paddingLeft: 0, + // // // paddingRight: 0, + // // // }, + // }, + // }, + // { + // id: 'name', + // header: 'Name', + // accessorKey: 'name', + // Header: ({ column }) => ( + // { + // e.stopPropagation(); + // handleFilterChange(column.id, e.target.value); + // }} + // size="xs" + // variant="unstyled" + // className="table-input-header" + // onClick={(e) => e.stopPropagation()} + // /> + // ), + // Cell: ({ cell }) => ( + //
+ // {cell.getValue()} + //
+ // ), + // }, + // { + // header: 'Group', + // accessorKey: 'channel_group.name', + // accessorFn: (row) => row.channel_group?.name || '', + // Cell: ({ cell }) => ( + //
+ // {cell.getValue()} + //
+ // ), + // Header: ({ column }) => ( + // e.stopPropagation()}> + // { + // handleFilterChange(column.id, value); + // }} + // data={channelGroupOptions} + // variant="unstyled" + // className="table-input-header custom-multiselect" + // /> + // + // ), + // }, + // { + // header: '', + // accessorKey: 'logo', + // enableSorting: false, + // size: 75, + // mantineTableBodyCellProps: { + // align: 'center', + // style: { + // maxWidth: '75px', + // }, + // }, + // Cell: ({ cell }) => ( + // + // channel logo + // + // ), + // }, + // ], + // [ + // channelGroupOptions, + // filterValues, + // selectedProfile, + // selectedProfileChannels, + // rowSelection, + // channelsPageSelection, + // channelsEnabledHeaderSwitch, + // ] + // ); +>>>>>>> Stashed changes // (Optional) bulk delete, but your endpoint is @TODO const deleteChannels = async () => { @@ -971,8 +1311,42 @@ const ChannelsTable = React.memo(({ }) => { // }, [rowSelection]) useEffect(() => { +<<<<<<< Updated upstream fetchData(); }, [fetchData]); +======= + const selectedRows = table + .getSelectedRowModel() + .rows.map((row) => row.original); + setChannelsPageSelection(selectedRows); + + if (selectedProfileId != '0') { + setChannelsEnabledHeaderSwitch( + selectedRows.filter((row) => selectedProfileChannelIds.has(row.id)) + .length == selectedRows.length + ); + } + }, [rowSelection]); + + const filteredData = Object.values(channels).filter((row) => + columns.every(({ accessorKey }) => { + if (!accessorKey) { + return true; + } + + const filterValue = filterValues[accessorKey]; + const rowValue = getDescendantProp(row, accessorKey); + + if (Array.isArray(filterValue) && filterValue.length != 0) { + return filterValue.includes(rowValue); + } else if (filterValue) { + return rowValue?.toLowerCase().includes(filterValues[accessorKey]); + } + + return true; + }) + ); +>>>>>>> Stashed changes const deleteProfile = async (id) => { await API.deleteChannelProfile(id); @@ -999,6 +1373,7 @@ const ChannelsTable = React.memo(({ }) => { ); }; +<<<<<<< Updated upstream const editChannel = useCallback((row) => { setChannel(row.original); setChannelModalOpen(true); @@ -1058,6 +1433,334 @@ const ChannelsTable = React.memo(({ }) => { virtualizer.getTotalSize() - notUndefined(items[items.length - 1]).end ] : [0, 0]; +======= + const newColumns = useMemo( + () => [ + { + id: 'select', + size: 30, + header: ({ table }) => ( + + ), + cell: ({ row }) => ( + + ), + enableSorting: false, + enableColumnFilter: false, + }, + { + id: 'enabled', + size: 45, + header: () => ( +
+ +
+ ), + cell: ({ row }) => ( + + ), + enableSorting: false, + }, + { + accessorKey: 'channel_number', + size: 30, + header: () => #, + cell: ({ getValue }) => ( + + {getValue()} + + ), + }, + { + id: 'name', + accessorKey: 'name', + header: ({ column }) => ( + + ), + cell: ({ getValue }) => ( + + {getValue()} + + ), + }, + { + accessorFn: (row) => row.channel_group?.name || '', + id: 'channel_group', + header: () => ( + d.channel_group?.name || '')) + )} + size="xs" + searchable + onClick={stopPropagation} + onChange={handleGroupChange} + /> + ), + cell: ({ getValue }) => ( + + {getValue()} + + ), + }, + { + accessorKey: 'logo', + size: 75, + header: '', + cell: ({ getValue }) => { + const value = getValue(); + const src = value?.cache_url || logo; + return ( + + logo + + ); + }, + enableSorting: false, + }, + { + id: 'actions', + size: 75, + header: '', + cell: ({ row }) => ( + + ), + enableSorting: false, + }, + ], + [selectedProfileId, selectedProfileChannelIds, data] + ); + + const table = useReactTable({ + data, + columns: newColumns, + pageCount, + state: { + sorting, + filters, + pagination, + rowSelection, + }, + manualPagination: true, + manualSorting: true, + manualFiltering: true, + enableRowSelection: true, + onPaginationChange: setPagination, + onSortingChange: setSorting, + onColumnFiltersChange: setFilters, + onRowSelectionChange: setRowSelection, + getCoreRowModel: getCoreRowModel(), + getFilteredRowModel: getFilteredRowModel(), + getSortedRowModel: getSortedRowModel(), + getPaginationRowModel: getPaginationRowModel(), + debugTable: true, + }); +>>>>>>> Stashed changes + + // const oldtable = useMantineReactTable({ + // ...TableHelper.defaultProperties, + // columns, + // data, + // enablePagination: true, + // manualPagination: true, + // enableColumnActions: false, + // enableRowSelection: true, + // renderTopToolbar: false, + // onRowSelectionChange: onRowSelectionChange, + // onSortingChange: setSorting, + // state: { + // isLoading: isLoading || channelsLoading, + // sorting, + // rowSelection, + // }, + // enableBottomToolbar: true, + // renderBottomToolbar: ({ table }) => ( + // + // Page Size + // + // + // {paginationString} + // + // ), + // initialState: { + // density: 'compact', + // sorting: [ + // { + // id: 'channel_number', + // desc: false, + // }, + // ], + // }, + // enableRowActions: true, + // enableExpandAll: false, + // displayColumnDefOptions: { + // 'mrt-row-select': { + // size: 10, + // maxSize: 10, + // mantineTableHeadCellProps: { + // align: 'right', + // style: { + // paddding: 0, + // // paddingLeft: 7, + // width: '20px', + // minWidth: '20px', + // backgroundColor: '#3F3F46', + // }, + // }, + // mantineTableBodyCellProps: { + // align: 'right', + // style: { + // paddingLeft: 0, + // width: '20px', + // minWidth: '20px', + // }, + // }, + // }, + // 'mrt-row-expand': { + // size: 20, + // maxSize: 20, + // header: '', + // mantineTableHeadCellProps: { + // style: { + // padding: 0, + // paddingLeft: 2, + // width: '20px', + // minWidth: '20px', + // maxWidth: '20px', + // backgroundColor: '#3F3F46', + // }, + // }, + // mantineTableBodyCellProps: { + // style: { + // padding: 0, + // paddingLeft: 2, + // width: '20px', + // minWidth: '20px', + // maxWidth: '20px', + // }, + // }, + // }, + // 'mrt-row-actions': { + // size: 85, + // maxWidth: 85, + // mantineTableHeadCellProps: { + // align: 'center', + // style: { + // minWidth: '85px', + // maxWidth: '85px', + // // paddingRight: 40, + // fontWeight: 'normal', + // color: 'rgb(207,207,207)', + // backgroundColor: '#3F3F46', + // }, + // }, + // mantineTableBodyCellProps: { + // style: { + // minWidth: '85px', + // maxWidth: '85px', + // paddingLeft: 0, + // // paddingRight: 10, + // }, + // }, + // }, + // }, + // mantineExpandButtonProps: ({ row, table }) => ({ + // onClick: () => { + // setRowSelection({ [row.index]: true }); + // table.setExpanded({ [row.id]: !row.getIsExpanded() }); + // }, + // size: 'xs', + // style: { + // transform: row.getIsExpanded() ? 'rotate(180deg)' : 'rotate(-90deg)', + // transition: 'transform 0.2s', + // }, + // }), + // renderDetailPanel: ({ row }) => ( + // + // ), + // renderRowActions: ({ row }) => ( + // + // ), + // mantineTableContainerProps: { + // style: { + // height: 'calc(100vh - 150px)', + // overflowY: 'auto', + // // margin: 5, + // }, + // }, + // }); return ( @@ -1200,6 +1903,30 @@ const ChannelsTable = React.memo(({ }) => { {/* Paper container: contains top toolbar and table (or ghost state) */} +<<<<<<< Updated upstream +======= + + {/* Top toolbar with Remove, Assign, Auto-match, and Add buttons */} + + + ({ - label: profile.name, - value: `${profile.id}`, - }))} - renderOption={renderProfileOption} - /> + + + + - - - + + + + + + + + + + + + + + + -<<<<<<< Updated upstream - - - - - - - - - - - - - - - -<<<<<<< Updated upstream - +
+ + + It’s recommended to create channels after adding your M3U or + streams. + + + You can still create channels without streams if you’d like, + and map them later. + + + +
+ +
+ +
+
+ )} + - {/* Table or ghost empty state inside Paper */} - - {initialDataCount === 0 && ( - -======= {initialDataCount > 0 && ( - - - - {table.getHeaderGroups().map((headerGroup) => ( - - {headerGroup.headers.map((header) => ( - - {flexRender( - header.column.columnDef.header, - header.getContext() - )} - - ))} - - ))} - - - {table.getRowModel().rows.map((row) => ( - - {row.getVisibleCells().map((cell) => ( - - {flexRender( - cell.column.columnDef.cell, - cell.getContext() - )} - - ))} - - ))} - -
+ + + + + {getHeaderGroups().map((headerGroup) => ( + + {headerGroup.headers.map((header) => { + const width = header.getSize(); + return ( + + + {renderHeaderCell(header)} + + + ); + })} + + ))} + + + {getRowModel().rows.map((row) => ( + + {row.getVisibleCells().map((cell) => { + const width = cell.column.getSize(); + return ( + + + {cell.column.id === 'select' + ? ChannelRowSelectCell({ row: cell.row }) + : flexRender( + cell.column.columnDef.cell, + cell.getContext() + )} + + + ); + })} + + ))} + + + @@ -2076,7 +1448,7 @@ const ChannelsTable = React.memo(({}) => { @@ -2092,54 +1464,8 @@ const ChannelsTable = React.memo(({}) => { ->>>>>>> Stashed changes )} -
- - - {initialDataCount > 0 && ( - - - - {table.getHeaderGroups().map(headerGroup => ( - - {headerGroup.headers.map(header => { - return ( - - {flexRender(header.column.columnDef.header, header.getContext())} - - ) - })} - - ))} - - -
-
- )} + { isOpen={recordingModalOpen} onClose={closeRecordingForm} /> -
+ ); }); diff --git a/frontend/src/components/tables/ChannelsTable/EmptyChannelsTableGuide.jsx b/frontend/src/components/tables/ChannelsTable/EmptyChannelsTableGuide.jsx deleted file mode 100644 index 9f719cda..00000000 --- a/frontend/src/components/tables/ChannelsTable/EmptyChannelsTableGuide.jsx +++ /dev/null @@ -1,78 +0,0 @@ -export default () => { - return ( - -
- - - It’s recommended to create channels after adding your M3U or - streams. - - - You can still create channels without streams if you’d like, - and map them later. - - - -
- -
- -
-
- ) -} diff --git a/frontend/src/components/tables/table.css b/frontend/src/components/tables/table.css new file mode 100644 index 00000000..92afaaaa --- /dev/null +++ b/frontend/src/components/tables/table.css @@ -0,0 +1,90 @@ +* { + /* box-sizing: border-box; */ + } + + html { + font-family: sans-serif; + /* font-size: 14px; */ + } + + .divTable { + /* border: 1px solid lightgray; */ + /* width: fit-content; */ + /* display: flex; + flex-direction: column; */ + } + + .tr { + display: flex; + } + + .tr { + /* width: fit-content; + width: 100%; */ + /* height: 30px; */ + } + + .th, + .td { + /* box-shadow: inset 0 0 0 1px lightgray; */ + /* padding: 0.25rem; */ + padding-left: 4px; + padding-right: 4px; + } + + .th { + /* padding: 2px 4px; */ + position: relative; + font-weight: bold; + text-align: center; + /* height: 30px; */ + } + + .td { + height: 21px; + border-bottom: solid 1px rgb(68,68,68); + } + + .resizer { + position: absolute; + top: 0; + height: 100%; + width: 5px; + background: rgba(0, 0, 0, 0.5); + cursor: col-resize; + user-select: none; + touch-action: none; + } + + .resizer.ltr { + right: 0; + } + + .resizer.rtl { + left: 0; + } + + .resizer.isResizing { + background: blue; + opacity: 1; + } + + @media (hover: hover) { + .resizer { + opacity: 0; + } + + *:hover > .resizer { + opacity: 1; + } + } + + .table-striped .tbody .tr:nth-child(odd), + .table-striped .tbody .tr-odd { + background-color: #18181b; + } + + .table-striped .tbody .tr:nth-child(even), + .table-striped .tbody .tr-even { + /* background-color: #ffffff; */ + } diff --git a/frontend/src/store/channels.jsx b/frontend/src/store/channels.jsx index fe00f655..282ece4c 100644 --- a/frontend/src/store/channels.jsx +++ b/frontend/src/store/channels.jsx @@ -2,7 +2,7 @@ import { create } from 'zustand'; import api from '../api'; import { notifications } from '@mantine/notifications'; -const defaultProfiles = { 0: { id: '0', name: 'All', channels: [] } }; +const defaultProfiles = { 0: { id: '0', name: 'All', channels: new Set() } }; const useChannelsStore = create((set, get) => ({ channels: [], @@ -10,7 +10,6 @@ const useChannelsStore = create((set, get) => ({ channelGroups: {}, profiles: {}, selectedProfileId: '0', - selectedProfileChannelIds: new Set(), channelsPageSelection: [], stats: {}, activeChannels: {}, @@ -64,7 +63,10 @@ const useChannelsStore = create((set, get) => ({ const profiles = await api.getChannelProfiles(); set({ profiles: profiles.reduce((acc, profile) => { - acc[profile.id] = profile; + acc[profile.id] = { + ...profile, + channels: new Set(profile.channels), + }; return acc; }, defaultProfiles), isLoading: false, @@ -80,10 +82,7 @@ const useChannelsStore = create((set, get) => ({ set((state) => { const profiles = { ...state.profiles }; Object.values(profiles).forEach((item) => { - item.channels.push({ - id: newChannel.id, - enabled: true, - }); + item.channels.add(newChannel.id); }); return { @@ -96,11 +95,6 @@ const useChannelsStore = create((set, get) => ({ [newChannel.uuid]: newChannel.id, }, profiles, - selectedProfile: profiles[state.selectedProfileId], - selectedProfileChannelIds: - state.selectedProfileId == '0' - ? [] - : new Set(profiles[state.selectedProfileId].channels), }; }); }, @@ -109,7 +103,7 @@ const useChannelsStore = create((set, get) => ({ set((state) => { const channelsByUUID = {}; const logos = {}; - const profileChannels = []; + const profileChannels = new Set(); const channelsByID = newChannels.reduce((acc, channel) => { acc[channel.id] = channel; @@ -118,22 +112,19 @@ const useChannelsStore = create((set, get) => ({ logos[channel.logo.id] = channel.logo; } - profileChannels.push({ - id: channel.id, - enabled: true, - }); + profileChannels.add(channel.id); return acc; }, {}); - const profiles = { ...state.profiles }; - Object.values(profiles).forEach((item) => { - item.channels = item.channels.concat(profileChannels); // Append a new channel object + const newProfiles = {}; + Object.entries(state.profiles).forEach(([id, profile]) => { + newProfiles[id] = { + ...profile, + channels: new Set([...profile.channels, ...profileChannels]), + }; }); - console.log(profileChannels); - console.log(profiles); - return { channels: { ...state.channels, @@ -147,11 +138,7 @@ const useChannelsStore = create((set, get) => ({ ...state.logos, ...logos, }, - profiles, - selectedProfile: profiles[state.selectedProfileId], - selectedProfileChannelIds: new Set( - profiles[state.selectedProfileId].channels - ), + profiles: newProfiles, }; }), @@ -235,15 +222,21 @@ const useChannelsStore = create((set, get) => ({ set((state) => ({ profiles: { ...state.profiles, - [profile.id]: profile, + [profile.id]: { + ...profile, + channels: new Set(profile.channels), + }, }, })), updateProfile: (profile) => set((state) => ({ - channels: { + profiles: { ...state.profiles, - [profile.id]: profile, + [profile.id]: { + ...profile, + channels: new Set(profile.channels), + }, }, })), @@ -258,8 +251,6 @@ const useChannelsStore = create((set, get) => ({ if (profileIds.includes(state.selectedProfileId)) { additionalUpdates = { selectedProfileId: '0', - selectedProfileChannelIds: new Set(), - selectedProfile: {}, }; } @@ -274,35 +265,42 @@ const useChannelsStore = create((set, get) => ({ updateProfileChannels: (channelIds, profileId, enabled) => set((state) => { - // Get the specific profile const profile = state.profiles[profileId]; - if (!profile) return state; // Profile doesn't exist, no update needed + if (!profile) return {}; - const profileChannels = new Set(state.profiles[profileId].channels); + const currentChannelsSet = profile.channels; + let hasChanged = false; if (enabled) { - channelIds.forEach((id) => { - profileChannels.add(id); - }); + for (const id of channelIds) { + if (!currentChannelsSet.has(id)) { + currentChannelsSet.add(id); + hasChanged = true; + } + } } else { - channelIds.forEach((id) => { - profileChannels.delete(id); - }); + for (const id of channelIds) { + if (currentChannelsSet.has(id)) { + currentChannelsSet.delete(id); + hasChanged = true; + } + } } - // Efficiently update only the specific channel - return { - profiles: { - ...state.profiles, - [profileId]: { - ...profile, - channels: profileChannels, - }, - }, - ...(state.selectedProfileId == profileId && { - selectedProfileChannelIds: profileChannels, - }), + if (!hasChanged) return {}; // No need to update anything + + const updatedProfile = { + ...profile, + channels: currentChannelsSet, }; + + const updates = { + profiles: { + [profileId]: updatedProfile, + }, + }; + + return updates; }), setChannelsPageSelection: (channelsPageSelection) => @@ -311,8 +309,6 @@ const useChannelsStore = create((set, get) => ({ setSelectedProfileId: (id) => set((state) => ({ selectedProfileId: id, - selectedProfileChannelIds: - id == '0' ? new Set() : new Set(state.profiles[id].channels), })), setChannelStats: (stats) => { @@ -324,11 +320,9 @@ const useChannelsStore = create((set, get) => ({ // activeClients: oldClients, // channelsByUUID, // } = state; - // const newClients = {}; // const newChannels = stats.channels.reduce((acc, ch) => { // acc[ch.channel_id] = ch; - // if (currentStats.channels) { // if (oldChannels[ch.channel_id] === undefined) { // notifications.show({ @@ -338,7 +332,6 @@ const useChannelsStore = create((set, get) => ({ // }); // } // } - // ch.clients.map((client) => { // newClients[client.client_id] = client; // // This check prevents the notifications if streams are active on page load @@ -352,10 +345,8 @@ const useChannelsStore = create((set, get) => ({ // } // } // }); - // return acc; // }, {}); - // // This check prevents the notifications if streams are active on page load // if (currentStats.channels) { // for (const uuid in oldChannels) { @@ -367,7 +358,6 @@ const useChannelsStore = create((set, get) => ({ // }); // } // } - // for (const clientId in oldClients) { // if (newClients[clientId] === undefined) { // notifications.show({ @@ -378,7 +368,6 @@ const useChannelsStore = create((set, get) => ({ // } // } // } - // return { // stats, // activeChannels: newChannels, From b74b388f7d92c0912f9e4474b09a976bf22db9b8 Mon Sep 17 00:00:00 2001 From: dekzter Date: Thu, 17 Apr 2025 17:57:33 -0400 Subject: [PATCH 0006/1435] finishing up table refactor for channels --- frontend/package-lock.json | 816 ------------------ frontend/package.json | 4 - .../src/components/tables/ChannelsTable.jsx | 354 +++----- .../src/components/tables/StreamsTable.jsx | 10 +- frontend/src/store/channels.jsx | 5 + frontend/src/store/channelsTable | 59 ++ 6 files changed, 187 insertions(+), 1061 deletions(-) create mode 100644 frontend/src/store/channelsTable diff --git a/frontend/package-lock.json b/frontend/package-lock.json index edd99202..5184cceb 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -16,11 +16,7 @@ "@mantine/hooks": "^7.17.2", "@mantine/notifications": "^7.17.2", "@tabler/icons-react": "^3.31.0", -<<<<<<< Updated upstream "@tanstack/react-table": "^8.21.2", -======= - "@tanstack/react-table": "^8.21.3", ->>>>>>> Stashed changes "allotment": "^1.20.3", "axios": "^1.8.2", "clsx": "^2.1.1", @@ -343,278 +339,6 @@ "integrity": "sha512-snKqtPW01tN0ui7yu9rGv69aJXr/a/Ywvl11sUjNtEcRc+ng/mQriFL0wLXMef74iHa/EkftbDzU9F8iFbH+zg==", "license": "MIT" }, - "node_modules/@esbuild/aix-ppc64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.1.tgz", - "integrity": "sha512-kfYGy8IdzTGy+z0vFGvExZtxkFlA4zAxgKEahG9KE1ScBjpQnFsNOX8KTU5ojNru5ed5CVoJYXFtoxaq5nFbjQ==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "aix" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-arm": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.1.tgz", - "integrity": "sha512-dp+MshLYux6j/JjdqVLnMglQlFu+MuVeNrmT5nk6q07wNhCdSnB7QZj+7G8VMUGh1q+vj2Bq8kRsuyA00I/k+Q==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.1.tgz", - "integrity": "sha512-50tM0zCJW5kGqgG7fQ7IHvQOcAn9TKiVRuQ/lN0xR+T2lzEFvAi1ZcS8DiksFcEpf1t/GYOeOfCAgDHFpkiSmA==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.1.tgz", - "integrity": "sha512-GCj6WfUtNldqUzYkN/ITtlhwQqGWu9S45vUXs7EIYf+7rCiiqH9bCloatO9VhxsL0Pji+PF4Lz2XXCES+Q8hDw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/darwin-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.1.tgz", - "integrity": "sha512-5hEZKPf+nQjYoSr/elb62U19/l1mZDdqidGfmFutVUjjUZrOazAtwK+Kr+3y0C/oeJfLlxo9fXb1w7L+P7E4FQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/darwin-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.1.tgz", - "integrity": "sha512-hxVnwL2Dqs3fM1IWq8Iezh0cX7ZGdVhbTfnOy5uURtao5OIVCEyj9xIzemDi7sRvKsuSdtCAhMKarxqtlyVyfA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/freebsd-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.1.tgz", - "integrity": "sha512-1MrCZs0fZa2g8E+FUo2ipw6jw5qqQiH+tERoS5fAfKnRx6NXH31tXBKI3VpmLijLH6yriMZsxJtaXUyFt/8Y4A==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/freebsd-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.1.tgz", - "integrity": "sha512-0IZWLiTyz7nm0xuIs0q1Y3QWJC52R8aSXxe40VUxm6BB1RNmkODtW6LHvWRrGiICulcX7ZvyH6h5fqdLu4gkww==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-arm": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.1.tgz", - "integrity": "sha512-NdKOhS4u7JhDKw9G3cY6sWqFcnLITn6SqivVArbzIaf3cemShqfLGHYMx8Xlm/lBit3/5d7kXvriTUGa5YViuQ==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.1.tgz", - "integrity": "sha512-jaN3dHi0/DDPelk0nLcXRm1q7DNJpjXy7yWaWvbfkPvI+7XNSc/lDOnCLN7gzsyzgu6qSAmgSvP9oXAhP973uQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-ia32": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.1.tgz", - "integrity": "sha512-OJykPaF4v8JidKNGz8c/q1lBO44sQNUQtq1KktJXdBLn1hPod5rE/Hko5ugKKZd+D2+o1a9MFGUEIUwO2YfgkQ==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-loong64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.1.tgz", - "integrity": "sha512-nGfornQj4dzcq5Vp835oM/o21UMlXzn79KobKlcs3Wz9smwiifknLy4xDCLUU0BWp7b/houtdrgUz7nOGnfIYg==", - "cpu": [ - "loong64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-mips64el": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.1.tgz", - "integrity": "sha512-1osBbPEFYwIE5IVB/0g2X6i1qInZa1aIoj1TdL4AaAb55xIIgbg8Doq6a5BzYWgr+tEcDzYH67XVnTmUzL+nXg==", - "cpu": [ - "mips64el" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-ppc64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.1.tgz", - "integrity": "sha512-/6VBJOwUf3TdTvJZ82qF3tbLuWsscd7/1w+D9LH0W/SqUgM5/JJD0lrJ1fVIfZsqB6RFmLCe0Xz3fmZc3WtyVg==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-riscv64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.1.tgz", - "integrity": "sha512-nSut/Mx5gnilhcq2yIMLMe3Wl4FK5wx/o0QuuCLMtmJn+WeWYoEGDN1ipcN72g1WHsnIbxGXd4i/MF0gTcuAjQ==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-s390x": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.1.tgz", - "integrity": "sha512-cEECeLlJNfT8kZHqLarDBQso9a27o2Zd2AQ8USAEoGtejOrCYHNtKP8XQhMDJMtthdF4GBmjR2au3x1udADQQQ==", - "cpu": [ - "s390x" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, "node_modules/@esbuild/linux-x64": { "version": "0.25.1", "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.1.tgz", @@ -632,142 +356,6 @@ "node": ">=18" } }, - "node_modules/@esbuild/netbsd-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.1.tgz", - "integrity": "sha512-O96poM2XGhLtpTh+s4+nP7YCCAfb4tJNRVZHfIE7dgmax+yMP2WgMd2OecBuaATHKTHsLWHQeuaxMRnCsH8+5g==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/netbsd-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.1.tgz", - "integrity": "sha512-X53z6uXip6KFXBQ+Krbx25XHV/NCbzryM6ehOAeAil7X7oa4XIq+394PWGnwaSQ2WRA0KI6PUO6hTO5zeF5ijA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openbsd-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.1.tgz", - "integrity": "sha512-Na9T3szbXezdzM/Kfs3GcRQNjHzM6GzFBeU1/6IV/npKP5ORtp9zbQjvkDJ47s6BCgaAZnnnu/cY1x342+MvZg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openbsd-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.1.tgz", - "integrity": "sha512-T3H78X2h1tszfRSf+txbt5aOp/e7TAz3ptVKu9Oyir3IAOFPGV6O9c2naym5TOriy1l0nNf6a4X5UXRZSGX/dw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/sunos-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.1.tgz", - "integrity": "sha512-2H3RUvcmULO7dIE5EWJH8eubZAI4xw54H1ilJnRNZdeo8dTADEZ21w6J22XBkXqGJbe0+wnNJtw3UXRoLJnFEg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "sunos" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.1.tgz", - "integrity": "sha512-GE7XvrdOzrb+yVKB9KsRMq+7a2U/K5Cf/8grVFRAGJmfADr/e/ODQ134RK2/eeHqYV5eQRFxb1hY7Nr15fv1NQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-ia32": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.1.tgz", - "integrity": "sha512-uOxSJCIcavSiT6UnBhBzE8wy3n0hOkJsBOzy7HDAuTDE++1DJMRRVCPGisULScHL+a/ZwdXPpXD3IyFKjA7K8A==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.1.tgz", - "integrity": "sha512-Y1EQdcfwMSeQN/ujR5VayLOJ1BHaK+ssyk0AEzPjC+t1lITgsnccPqFjb6V+LsTp/9Iov4ysfjxLaGJ9RPtkVg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, "node_modules/@eslint-community/eslint-utils": { "version": "4.5.0", "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.5.0.tgz", @@ -1212,202 +800,6 @@ "url": "https://opencollective.com/popperjs" } }, - "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.35.0.tgz", - "integrity": "sha512-uYQ2WfPaqz5QtVgMxfN6NpLD+no0MYHDBywl7itPYd3K5TjjSghNKmX8ic9S8NU8w81NVhJv/XojcHptRly7qQ==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ] - }, - "node_modules/@rollup/rollup-android-arm64": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.35.0.tgz", - "integrity": "sha512-FtKddj9XZudurLhdJnBl9fl6BwCJ3ky8riCXjEw3/UIbjmIY58ppWwPEvU3fNu+W7FUsAsB1CdH+7EQE6CXAPA==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ] - }, - "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.35.0.tgz", - "integrity": "sha512-Uk+GjOJR6CY844/q6r5DR/6lkPFOw0hjfOIzVx22THJXMxktXG6CbejseJFznU8vHcEBLpiXKY3/6xc+cBm65Q==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.35.0.tgz", - "integrity": "sha512-3IrHjfAS6Vkp+5bISNQnPogRAW5GAV1n+bNCrDwXmfMHbPl5EhTmWtfmwlJxFRUCBZ+tZ/OxDyU08aF6NI/N5Q==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.35.0.tgz", - "integrity": "sha512-sxjoD/6F9cDLSELuLNnY0fOrM9WA0KrM0vWm57XhrIMf5FGiN8D0l7fn+bpUeBSU7dCgPV2oX4zHAsAXyHFGcQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ] - }, - "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.35.0.tgz", - "integrity": "sha512-2mpHCeRuD1u/2kruUiHSsnjWtHjqVbzhBkNVQ1aVD63CcexKVcQGwJ2g5VphOd84GvxfSvnnlEyBtQCE5hxVVw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ] - }, - "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.35.0.tgz", - "integrity": "sha512-mrA0v3QMy6ZSvEuLs0dMxcO2LnaCONs1Z73GUDBHWbY8tFFocM6yl7YyMu7rz4zS81NDSqhrUuolyZXGi8TEqg==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.35.0.tgz", - "integrity": "sha512-DnYhhzcvTAKNexIql8pFajr0PiDGrIsBYPRvCKlA5ixSS3uwo/CWNZxB09jhIapEIg945KOzcYEAGGSmTSpk7A==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.35.0.tgz", - "integrity": "sha512-uagpnH2M2g2b5iLsCTZ35CL1FgyuzzJQ8L9VtlJ+FckBXroTwNOaD0z0/UF+k5K3aNQjbm8LIVpxykUOQt1m/A==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.35.0.tgz", - "integrity": "sha512-XQxVOCd6VJeHQA/7YcqyV0/88N6ysSVzRjJ9I9UA/xXpEsjvAgDTgH3wQYz5bmr7SPtVK2TsP2fQ2N9L4ukoUg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-loongarch64-gnu": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.35.0.tgz", - "integrity": "sha512-5pMT5PzfgwcXEwOaSrqVsz/LvjDZt+vQ8RT/70yhPU06PTuq8WaHhfT1LW+cdD7mW6i/J5/XIkX/1tCAkh1W6g==", - "cpu": [ - "loong64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.35.0.tgz", - "integrity": "sha512-c+zkcvbhbXF98f4CtEIP1EBA/lCic5xB0lToneZYvMeKu5Kamq3O8gqrxiYYLzlZH6E3Aq+TSW86E4ay8iD8EA==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.35.0.tgz", - "integrity": "sha512-s91fuAHdOwH/Tad2tzTtPX7UZyytHIRR6V4+2IGlV0Cej5rkG0R61SX4l4y9sh0JBibMiploZx3oHKPnQBKe4g==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.35.0.tgz", - "integrity": "sha512-hQRkPQPLYJZYGP+Hj4fR9dDBMIM7zrzJDWFEMPdTnTy95Ljnv0/4w/ixFw3pTBMEuuEuoqtBINYND4M7ujcuQw==", - "cpu": [ - "s390x" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, "node_modules/@rollup/rollup-linux-x64-gnu": { "version": "4.35.0", "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.35.0.tgz", @@ -1436,48 +828,6 @@ "linux" ] }, - "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.35.0.tgz", - "integrity": "sha512-OUOlGqPkVJCdJETKOCEf1mw848ZyJ5w50/rZ/3IBQVdLfR5jk/6Sr5m3iO2tdPgwo0x7VcncYuOvMhBWZq8ayg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.35.0.tgz", - "integrity": "sha512-2/lsgejMrtwQe44glq7AFFHLfJBPafpsTa6JvP2NGef/ifOa4KBoglVf7AKN7EV9o32evBPRqfg96fEHzWo5kw==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.35.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.35.0.tgz", - "integrity": "sha512-PIQeY5XDkrOysbQblSW7v3l1MDZzkTEzAfTPkj5VAu3FW8fS4ynyLg2sINp0fp3SjZ8xkRYpLqoKcYqAkhU1dw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, "node_modules/@swc/core": { "version": "1.11.8", "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.11.8.tgz", @@ -1517,91 +867,6 @@ } } }, - "node_modules/@swc/core-darwin-arm64": { - "version": "1.11.8", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.11.8.tgz", - "integrity": "sha512-rrSsunyJWpHN+5V1zumndwSSifmIeFQBK9i2RMQQp15PgbgUNxHK5qoET1n20pcUrmZeT6jmJaEWlQchkV//Og==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "Apache-2.0 AND MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=10" - } - }, - "node_modules/@swc/core-darwin-x64": { - "version": "1.11.8", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.11.8.tgz", - "integrity": "sha512-44goLqQuuo0HgWnG8qC+ZFw/qnjCVVeqffhzFr9WAXXotogVaxM8ze6egE58VWrfEc8me8yCcxOYL9RbtjhS/Q==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "Apache-2.0 AND MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=10" - } - }, - "node_modules/@swc/core-linux-arm-gnueabihf": { - "version": "1.11.8", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.11.8.tgz", - "integrity": "sha512-Mzo8umKlhTWwF1v8SLuTM1z2A+P43UVhf4R8RZDhzIRBuB2NkeyE+c0gexIOJBuGSIATryuAF4O4luDu727D1w==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "Apache-2.0", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=10" - } - }, - "node_modules/@swc/core-linux-arm64-gnu": { - "version": "1.11.8", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.11.8.tgz", - "integrity": "sha512-EyhO6U+QdoGYC1MeHOR0pyaaSaKYyNuT4FQNZ1eZIbnuueXpuICC7iNmLIOfr3LE5bVWcZ7NKGVPlM2StJEcgA==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "Apache-2.0 AND MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=10" - } - }, - "node_modules/@swc/core-linux-arm64-musl": { - "version": "1.11.8", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.11.8.tgz", - "integrity": "sha512-QU6wOkZnS6/QuBN1MHD6G2BgFxB0AclvTVGbqYkRA7MsVkcC29PffESqzTXnypzB252/XkhQjoB2JIt9rPYf6A==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "Apache-2.0 AND MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=10" - } - }, "node_modules/@swc/core-linux-x64-gnu": { "version": "1.11.8", "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.11.8.tgz", @@ -1636,57 +901,6 @@ "node": ">=10" } }, - "node_modules/@swc/core-win32-arm64-msvc": { - "version": "1.11.8", - "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.11.8.tgz", - "integrity": "sha512-EbjOzQ+B85rumHyeesBYxZ+hq3ZQn+YAAT1ZNE9xW1/8SuLoBmHy/K9YniRGVDq/2NRmp5kI5+5h5TX0asIS9A==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "Apache-2.0 AND MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=10" - } - }, - "node_modules/@swc/core-win32-ia32-msvc": { - "version": "1.11.8", - "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.11.8.tgz", - "integrity": "sha512-Z+FF5kgLHfQWIZ1KPdeInToXLzbY0sMAashjd/igKeP1Lz0qKXVAK+rpn6ASJi85Fn8wTftCGCyQUkRVn0bTDg==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "Apache-2.0 AND MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=10" - } - }, - "node_modules/@swc/core-win32-x64-msvc": { - "version": "1.11.8", - "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.11.8.tgz", - "integrity": "sha512-j6B6N0hChCeAISS6xp/hh6zR5CSCr037BAjCxNLsT8TGe5D+gYZ57heswUWXRH8eMKiRDGiLCYpPB2pkTqxCSw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "Apache-2.0 AND MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=10" - } - }, "node_modules/@swc/counter": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz", @@ -1747,21 +961,12 @@ } }, "node_modules/@tanstack/react-table": { -<<<<<<< Updated upstream - "version": "8.21.2", - "resolved": "https://registry.npmjs.org/@tanstack/react-table/-/react-table-8.21.2.tgz", - "integrity": "sha512-11tNlEDTdIhMJba2RBH+ecJ9l1zgS2kjmexDPAraulc8jeNA4xocSNeyzextT0XJyASil4XsCYlJmf5jEWAtYg==", - "license": "MIT", - "dependencies": { - "@tanstack/table-core": "8.21.2" -======= "version": "8.21.3", "resolved": "https://registry.npmjs.org/@tanstack/react-table/-/react-table-8.21.3.tgz", "integrity": "sha512-5nNMTSETP4ykGegmVkhjcS8tTLW6Vl4axfEGQN3v0zdHYbK4UfoqfPChclTrJ4EoK9QynqAu9oUf8VEmrpZ5Ww==", "license": "MIT", "dependencies": { "@tanstack/table-core": "8.21.3" ->>>>>>> Stashed changes }, "engines": { "node": ">=12" @@ -1793,15 +998,9 @@ } }, "node_modules/@tanstack/table-core": { -<<<<<<< Updated upstream - "version": "8.21.2", - "resolved": "https://registry.npmjs.org/@tanstack/table-core/-/table-core-8.21.2.tgz", - "integrity": "sha512-uvXk/U4cBiFMxt+p9/G7yUWI/UbHYbyghLCjlpWZ3mLeIZiUBSKcUnw9UnKkdRz7Z/N4UBuFLWQdJCjUe7HjvA==", -======= "version": "8.21.3", "resolved": "https://registry.npmjs.org/@tanstack/table-core/-/table-core-8.21.3.tgz", "integrity": "sha512-ldZXEhOBb8Is7xLs01fR3YEc3DERiz5silj8tnGkFZytt1abEvl/GhUmCE0PMLaMPTa3Jk4HbKmRlHmu+gCftg==", ->>>>>>> Stashed changes "license": "MIT", "engines": { "node": ">=12" @@ -2972,21 +2171,6 @@ "react": ">=16.8.0" } }, - "node_modules/fsevents": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, "node_modules/function-bind": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", diff --git a/frontend/package.json b/frontend/package.json index 76a72114..c0d6ced3 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -18,11 +18,7 @@ "@mantine/hooks": "^7.17.2", "@mantine/notifications": "^7.17.2", "@tabler/icons-react": "^3.31.0", -<<<<<<< Updated upstream "@tanstack/react-table": "^8.21.2", -======= - "@tanstack/react-table": "^8.21.3", ->>>>>>> Stashed changes "allotment": "^1.20.3", "axios": "^1.8.2", "clsx": "^2.1.1", diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index 69df12a1..b79aae85 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -32,6 +32,9 @@ import { EllipsisVertical, CircleEllipsis, CopyMinus, + ArrowUpNarrowWide, + ArrowUpDown, + ArrowDownWideNarrow, } from 'lucide-react'; import ghostImage from '../../images/ghost.svg'; import { @@ -66,144 +69,13 @@ import { flexRender, } from '@tanstack/react-table'; import './table.css'; - -const ChannelStreams = React.memo(({ channel, isExpanded }) => { - const channelStreams = useChannelsStore( - (state) => state.channels[channel.id]?.streams - ); - const { playlists } = usePlaylistsStore(); - - const removeStream = async (stream) => { - const newStreamList = channelStreams.filter((s) => s.id !== stream.id); - await API.updateChannel({ - ...channel, - stream_ids: newStreamList.map((s) => s.id), - }); - }; - - const channelStreamsTable = useReactTable({ - ...TableHelper.defaultProperties, - data: channelStreams, - columns: useMemo( - () => [ - { - size: 400, - header: 'Name', - accessorKey: 'name', - Cell: ({ cell }) => ( -
- {cell.getValue()} -
- ), - }, - { - size: 100, - header: 'M3U', - accessorFn: (row) => - playlists.find((playlist) => playlist.id === row.m3u_account)?.name, - Cell: ({ cell }) => ( -
- {cell.getValue()} -
- ), - }, - ], - [playlists] - ), - displayColumnDefOptions: { - 'mrt-row-actions': { - size: 10, - }, - }, - enableKeyboardShortcuts: false, - enableColumnFilters: false, - enableBottomToolbar: false, - enableTopToolbar: false, - enableTableHead: false, - columnFilterDisplayMode: 'popover', - enablePagination: false, - enableRowVirtualization: true, - enableColumnHeaders: false, - rowVirtualizerOptions: { overscan: 5 }, //optionally customize the row virtualizer - initialState: { - density: 'compact', - }, - enableRowActions: true, - enableRowOrdering: true, - mantineTableHeadRowProps: { - style: { display: 'none' }, - }, - mantineTableBodyCellProps: { - style: { - // py: 0, - padding: 4, - borderColor: '#444', - color: '#E0E0E0', - fontSize: '0.85rem', - }, - }, - mantineRowDragHandleProps: ({ table }) => ({ - onDragEnd: async () => { - const { draggingRow, hoveredRow } = table.getState(); - - if (hoveredRow && draggingRow) { - channelStreams.splice( - hoveredRow.index, - 0, - channelStreams.splice(draggingRow.index, 1)[0] - ); - - const { streams: _, ...channelUpdate } = channel; - - API.updateChannel({ - ...channelUpdate, - stream_ids: channelStreams.map((stream) => stream.id), - }); - } - }, - }), - renderRowActions: ({ row }) => ( - - removeStream(row.original)} - > - - - - ), - }); - - if (!isExpanded) { - return <>; - } - - return ( - - - - ); -}); +import useChannelsTableStore from '../../store/channelsTable'; const m3uUrlBase = `${window.location.protocol}//${window.location.host}/output/m3u`; const epgUrlBase = `${window.location.protocol}//${window.location.host}/output/epg`; const hdhrUrlBase = `${window.location.protocol}//${window.location.host}/hdhr`; -const CreateProfilePopover = React.memo(({}) => { +const CreateProfilePopover = React.memo(() => { const [opened, setOpened] = useState(false); const [name, setName] = useState(''); const theme = useMantineTheme(); @@ -382,18 +254,17 @@ const ChannelRowActions = React.memo( } ); -const ChannelsTable = React.memo(({}) => { - const { - channels, - isLoading: channelsLoading, - fetchChannels, - setChannelsPageSelection, - profiles, - selectedProfileId, - setSelectedProfileId, - channelsPageSelection, - channelGroups, - } = useChannelsStore(); +const ChannelsTable = ({}) => { + const profiles = useChannelsStore((s) => s.profiles); + const selectedProfileId = useChannelsStore((s) => s.selectedProfileId); + const setSelectedProfileId = useChannelsStore((s) => s.setSelectedProfileId); + const channelGroups = useChannelsStore((s) => s.channelGroups); + + const queryChannels = useChannelsTableStore((s) => s.queryChannels); + const requeryChannels = useChannelsTableStore((s) => s.requeryChannels); + const data = useChannelsTableStore((s) => s.channels); + const rowCount = useChannelsTableStore((s) => s.count); + const pageCount = useChannelsTableStore((s) => s.pageCount); const selectedProfileChannels = useChannelsStore( (s) => s.profiles[selectedProfileId]?.channels @@ -416,10 +287,6 @@ const ChannelsTable = React.memo(({}) => { const [selectedProfile, setSelectedProfile] = useState( profiles[selectedProfileId] ); - const [data, setData] = useState([]); // Holds fetched data - const [selectedRowIds, setSelectedRowIds] = useState([]); - const [rowCount, setRowCount] = useState(0); - const [pageCount, setPageCount] = useState(0); const [paginationString, setPaginationString] = useState(''); const [pagination, setPagination] = useState({ pageIndex: 0, @@ -435,14 +302,34 @@ const ChannelsTable = React.memo(({}) => { const [selectedChannelIds, setSelectedChannelIds] = useState([]); const [sorting, setSorting] = useState([ { id: 'channel_number', desc: false }, - { id: 'name', desc: false }, ]); const [hdhrUrl, setHDHRUrl] = useState(hdhrUrlBase); const [epgUrl, setEPGUrl] = useState(epgUrlBase); const [m3uUrl, setM3UUrl] = useState(m3uUrlBase); - const [textToCopy, setTextToCopy] = useState(''); + useEffect(() => { + const startItem = pagination.pageIndex * pagination.pageSize + 1; // +1 to start from 1, not 0 + const endItem = Math.min( + (pagination.pageIndex + 1) * pagination.pageSize, + rowCount + ); + + if (initialDataCount === null) { + setInitialDataCount(rowCount); + } + + // Generate the string + setPaginationString(`${startItem} to ${endItem} of ${rowCount}`); + }, [data]); + + useEffect(() => { + queryChannels({ pagination, sorting, filters }); + }, []); + + useEffect(() => { + queryChannels({ pagination, sorting, filters }); + }, [pagination, sorting, debouncedFilters]); // const theme = useTheme(); const theme = useMantineTheme(); @@ -508,67 +395,6 @@ const ChannelsTable = React.memo(({}) => { showVideo(vidUrl); } - const fetchData = useCallback(async () => { - setIsLoading(true); - - const params = new URLSearchParams(); - params.append('page', pagination.pageIndex + 1); - params.append('page_size', pagination.pageSize); - - // Apply sorting - if (sorting.length > 0) { - const sortField = sorting[0].id; - const sortDirection = sorting[0].desc ? '-' : ''; - params.append('ordering', `${sortDirection}${sortField}`); - } - - // Apply debounced filters - Object.entries(debouncedFilters).forEach(([key, value]) => { - if (value) params.append(key, value); - }); - - try { - const result = await API.queryChannels(params); - setData(result.results); - setRowCount(result.count); - setPageCount(Math.ceil(result.count / pagination.pageSize)); - - // Calculate the starting and ending item indexes - const startItem = pagination.pageIndex * pagination.pageSize + 1; // +1 to start from 1, not 0 - const endItem = Math.min( - (pagination.pageIndex + 1) * pagination.pageSize, - result.count - ); - - if (initialDataCount === null) { - setInitialDataCount(result.count); - } - - // Generate the string - setPaginationString(`${startItem} to ${endItem} of ${result.count}`); - - const newSelection = {}; - result.results.forEach((item, index) => { - if (selectedChannelIds.includes(item.id)) { - newSelection[index] = true; - } - }); - - // ✅ Only update rowSelection if it's different - if (JSON.stringify(newSelection) !== JSON.stringify(rowSelection)) { - setRowSelection(newSelection); - } - } catch (error) { - console.error('Error fetching data:', error); - } - - setIsLoading(false); - }, [pagination, sorting, debouncedFilters]); - - useEffect(() => { - fetchData(); - }, [fetchData]); - const onRowSelectionChange = (updater) => { setRowSelection((prevRowSelection) => { const newRowSelection = @@ -656,13 +482,13 @@ const ChannelsTable = React.memo(({}) => { }; return ; - }, [selectedChannelIds, selectedProfileChannelIds, fetchData]); + }, [selectedChannelIds, selectedProfileChannelIds, data]); // (Optional) bulk delete, but your endpoint is @TODO const deleteChannels = async () => { setIsLoading(true); await API.deleteChannels(selectedChannelIds); - fetchData(); + requeryChannels(); setIsLoading(false); }; @@ -687,7 +513,7 @@ const ChannelsTable = React.memo(({}) => { // Refresh the channel list // await fetchChannels(); - fetchData(); + requeryChannels(); } catch (err) { console.error(err); notifications.show({ @@ -762,6 +588,32 @@ const ChannelsTable = React.memo(({}) => { await API.deleteChannelProfile(id); }; + const onSortingChange = (column) => { + console.log(sorting); + const sortField = sorting[0]?.id; + const sortDirection = sorting[0]?.desc; + + if (sortField == column) { + if (sortDirection == false) { + setSorting([ + { + id: column, + desc: true, + }, + ]); + } else { + setSorting([]); + } + } else { + setSorting([ + { + id: column, + desc: false, + }, + ]); + } + }; + const renderProfileOption = ({ option, checked }) => { return ( @@ -859,7 +711,7 @@ const ChannelsTable = React.memo(({}) => { const value = getValue(); const src = value?.cache_url || logo; return ( -
+
logo { enableSorting: false, }, ], - [selectedProfileId] + [selectedProfileId, data] ); const { getHeaderGroups, getRowModel } = useReactTable({ @@ -899,6 +751,8 @@ const ChannelsTable = React.memo(({}) => { }, pageCount, state: { + data, + rowCount, sorting, filters, pagination, @@ -908,9 +762,6 @@ const ChannelsTable = React.memo(({}) => { manualSorting: true, manualFiltering: true, enableRowSelection: true, - // onPaginationChange: setPagination, - // onSortingChange: setSorting, - // onColumnFiltersChange: setFilters, onRowSelectionChange: onRowSelectionChange, getCoreRowModel: getCoreRowModel(), getFilteredRowModel: getFilteredRowModel(), @@ -922,6 +773,15 @@ const ChannelsTable = React.memo(({}) => { const rows = getRowModel().rows; const renderHeaderCell = (header) => { + let sortingIcon = ArrowUpDown; + if (sorting[0]?.id == header.id) { + if (sorting[0].desc === false) { + sortingIcon = ArrowUpNarrowWide; + } else { + sortingIcon = ArrowDownWideNarrow; + } + } + switch (header.id) { case 'select': return ChannelRowSelectHeader({ @@ -938,19 +798,39 @@ const ChannelsTable = React.memo(({}) => {
); + case 'channel_number': + return ( + + # + {/*
+ {React.createElement(sortingIcon, { + onClick: () => onSortingChange('name'), + size: 14, + })} +
*/} +
+ ); + case 'name': return ( - e.stopPropagation()} - onChange={handleFilterChange} - size="xs" - variant="unstyled" - className="table-input-header" - style={{ width: '100%' }} - /> + + e.stopPropagation()} + onChange={handleFilterChange} + size="xs" + variant="unstyled" + className="table-input-header" + /> +
+ {React.createElement(sortingIcon, { + onClick: () => onSortingChange('name'), + size: 14, + })} +
+
); case 'channel_group': @@ -1006,7 +886,7 @@ const ChannelsTable = React.memo(({}) => {
); }, - [rows] + [rows, rowCount] ); return ( @@ -1320,8 +1200,7 @@ const ChannelsTable = React.memo(({}) => { style={{ display: 'flex', flexDirection: 'column', - height: '100%', - paddingBottom: '56px', + height: 'calc(100vh - 120px)', }} > { > { }} > { /> ); -}); +}; export default ChannelsTable; diff --git a/frontend/src/components/tables/StreamsTable.jsx b/frontend/src/components/tables/StreamsTable.jsx index bcc81a37..777d2a32 100644 --- a/frontend/src/components/tables/StreamsTable.jsx +++ b/frontend/src/components/tables/StreamsTable.jsx @@ -39,6 +39,7 @@ import { IconSquarePlus } from '@tabler/icons-react'; import { useNavigate } from 'react-router-dom'; import useSettingsStore from '../../store/settings'; import useVideoStore from '../../store/useVideoStore'; +import useChannelsTableStore from '../../store/channelsTable'; const StreamsTable = ({}) => { const theme = useMantineTheme(); @@ -77,11 +78,15 @@ const StreamsTable = ({}) => { * Stores */ const { playlists } = usePlaylistsStore(); - const { channelGroups, channelsPageSelection, fetchLogos } = - useChannelsStore(); + const channelGroups = useChannelsStore((s) => s.channelGroups); + const channelsPageSelection = useChannelsStore( + (s) => s.channelsPageSelection + ); + const fetchLogos = useChannelsStore((s) => s.fetchLogos); const channelSelectionStreams = useChannelsStore( (state) => state.channels[state.channelsPageSelection[0]?.id]?.streams ); + const requeryChannels = useChannelsTableStore((s) => s.requeryChannels); const { environment: { env_mode }, } = useSettingsStore(); @@ -293,6 +298,7 @@ const StreamsTable = ({}) => { stream_id, })) ); + requeryChannels(); fetchLogos(); setIsLoading(false); }; diff --git a/frontend/src/store/channels.jsx b/frontend/src/store/channels.jsx index 282ece4c..5e5049be 100644 --- a/frontend/src/store/channels.jsx +++ b/frontend/src/store/channels.jsx @@ -18,6 +18,11 @@ const useChannelsStore = create((set, get) => ({ recordings: [], isLoading: false, error: null, + forceUpdate: 0, + + triggerUpdate: () => { + set({ forecUpdate: new Date() }); + }, fetchChannels: async () => { set({ isLoading: true, error: null }); diff --git a/frontend/src/store/channelsTable b/frontend/src/store/channelsTable new file mode 100644 index 00000000..a5146363 --- /dev/null +++ b/frontend/src/store/channelsTable @@ -0,0 +1,59 @@ +import { create } from 'zustand'; +import api from '../api'; +import { notifications } from '@mantine/notifications'; +import API from '../api'; + +const defaultProfiles = { 0: { id: '0', name: 'All', channels: new Set() } }; + +const useChannelsTableStore = create((set, get) => ({ + channels: [], + count: 0, + pageCount: 0, + lastParams: new URLSearchParams(), + + requeryChannels: async () => { + const lastParams = get().lastParams; + console.log(lastParams); + const result = await API.queryChannels(lastParams); + const pageSize = parseInt(lastParams.get?.('page_size') || '25'); + + set({ + channels: result.results, + count: result.count, + pageCount: Math.ceil(result.count / pageSize), + }); + }, + + queryChannels: async ({ pagination, sorting, filters }) => { + const params = new URLSearchParams(); + params.append('page', pagination.pageIndex + 1); + params.append('page_size', pagination.pageSize); + + // Apply sorting + if (sorting.length > 0) { + const sortField = sorting[0].id; + const sortDirection = sorting[0].desc ? '-' : ''; + params.append('ordering', `${sortDirection}${sortField}`); + } + + // Apply debounced filters + Object.entries(filters).forEach(([key, value]) => { + if (value) params.append(key, value); + }); + + try { + const result = await API.queryChannels(params); + + set((state) => ({ + channels: result.results, + count: result.count, + pageCount: Math.ceil(result.count / pagination.pageSize), + lastParams: params, + })); + } catch (error) { + console.error('Error fetching data:', error); + } + }, +})); + +export default useChannelsTableStore; From 641a543e78ef19f59abc4193e5de622164bd9be7 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Fri, 18 Apr 2025 14:22:25 +0000 Subject: [PATCH 0007/1435] Increment build number to 1 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 15dedace..b44e11da 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '0' # Auto-incremented on builds +__build__ = '1' # Auto-incremented on builds From 5e0f81522c62acf24198b376be222256bb3c2349 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 18 Apr 2025 17:08:47 -0500 Subject: [PATCH 0008/1435] Enhance tuner count calculation to include only active M3U accounts --- apps/hdhr/api_views.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/apps/hdhr/api_views.py b/apps/hdhr/api_views.py index 7dcf9254..676c0fb9 100644 --- a/apps/hdhr/api_views.py +++ b/apps/hdhr/api_views.py @@ -51,8 +51,11 @@ class DiscoverAPIView(APIView): base_url = request.build_absolute_uri(f'/{"/".join(uri_parts)}/').rstrip('/') device = HDHRDevice.objects.first() - # Calculate tuner count from active profiles (excluding default "custom Default" profile) - profiles = M3UAccountProfile.objects.filter(is_active=True).exclude(id=1) + # Calculate tuner count from active profiles from active M3U accounts (excluding default "custom Default" profile) + profiles = M3UAccountProfile.objects.filter( + is_active=True, + m3u_account__is_active=True # Only include profiles from enabled M3U accounts + ).exclude(id=1) # 1. Check if any profile has unlimited streams (max_streams=0) has_unlimited = profiles.filter(max_streams=0).exists() From ca96921d24db3919825b207603e42548caa0e259 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Fri, 18 Apr 2025 22:09:17 +0000 Subject: [PATCH 0009/1435] Increment build number to 2 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index b44e11da..ccb3ebea 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '1' # Auto-incremented on builds +__build__ = '2' # Auto-incremented on builds From ccdb8ab00d32972e0d300366aec73ff6b2942998 Mon Sep 17 00:00:00 2001 From: dekzter Date: Sat, 19 Apr 2025 08:37:43 -0400 Subject: [PATCH 0010/1435] more table bug fixes, query optimizations, re-added channel expansion stream table with reworked drag-and-drop --- apps/channels/api_views.py | 14 +- ...016_channelstream_unique_channel_stream.py | 38 +++ ..._channel_number_alter_channelgroup_name.py | 23 ++ apps/channels/models.py | 7 +- apps/channels/serializers.py | 58 ++-- frontend/package-lock.json | 71 +++++ frontend/package.json | 4 + frontend/src/api.js | 48 ++- frontend/src/components/forms/Channel.jsx | 26 +- .../components/tables/ChannelTableStreams.jsx | 252 +++++++++++++++ .../src/components/tables/ChannelsTable.jsx | 287 ++++++++++++------ .../ChannelsTable/ChannelsTableBody.jsx | 86 ------ .../tables/ChannelsTable/ChannelsTableRow.jsx | 61 ---- .../tables/CustomTable/CustomTable.jsx | 134 ++++++++ .../tables/CustomTable/CustomTableHeader.jsx | 171 +++++++++++ .../src/components/tables/StreamsTable.jsx | 24 +- frontend/src/components/tables/table.css | 2 +- frontend/src/pages/Guide.jsx | 36 ++- frontend/src/store/auth.jsx | 2 +- frontend/src/store/channels.jsx | 10 +- frontend/src/store/channelsTable | 55 +--- 21 files changed, 1054 insertions(+), 355 deletions(-) create mode 100644 apps/channels/migrations/0016_channelstream_unique_channel_stream.py create mode 100644 apps/channels/migrations/0017_alter_channel_channel_number_alter_channelgroup_name.py create mode 100644 frontend/src/components/tables/ChannelTableStreams.jsx delete mode 100644 frontend/src/components/tables/ChannelsTable/ChannelsTableBody.jsx delete mode 100644 frontend/src/components/tables/ChannelsTable/ChannelsTableRow.jsx create mode 100644 frontend/src/components/tables/CustomTable/CustomTable.jsx create mode 100644 frontend/src/components/tables/CustomTable/CustomTableHeader.jsx diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index b46384ad..b406fa07 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -127,6 +127,13 @@ class ChannelPagination(PageNumberPagination): page_size_query_param = 'page_size' # Allow clients to specify page size max_page_size = 10000 # Prevent excessive page sizes + + def paginate_queryset(self, queryset, request, view=None): + if not request.query_params.get(self.page_query_param): + return None # disables pagination, returns full queryset + + return super().paginate_queryset(queryset, request, view) + class ChannelFilter(django_filters.FilterSet): name = django_filters.CharFilter(lookup_expr='icontains') channel_group_name = OrInFilter(field_name="channel_group__name", lookup_expr="icontains") @@ -148,7 +155,12 @@ class ChannelViewSet(viewsets.ModelViewSet): ordering = ['-channel_number'] def get_queryset(self): - qs = super().get_queryset() + qs = super().get_queryset().select_related( + 'channel_group', + 'logo', + 'epg_data', + 'stream_profile', + ).prefetch_related('streams') channel_group = self.request.query_params.get('channel_group') if channel_group: diff --git a/apps/channels/migrations/0016_channelstream_unique_channel_stream.py b/apps/channels/migrations/0016_channelstream_unique_channel_stream.py new file mode 100644 index 00000000..5301530a --- /dev/null +++ b/apps/channels/migrations/0016_channelstream_unique_channel_stream.py @@ -0,0 +1,38 @@ +# Generated by Django 5.1.6 on 2025-04-18 16:21 + +from django.db import migrations, models +from django.db.models import Count + +def remove_duplicate_channel_streams(apps, schema_editor): + ChannelStream = apps.get_model('dispatcharr_channels', 'ChannelStream') + # Find duplicates by (channel, stream) + duplicates = ( + ChannelStream.objects + .values('channel', 'stream') + .annotate(count=Count('id')) + .filter(count__gt=1) + ) + + for dupe in duplicates: + # Get all duplicates for this pair + dups = ChannelStream.objects.filter( + channel=dupe['channel'], + stream=dupe['stream'] + ).order_by('id') + + # Keep the first one, delete the rest + dups.exclude(id=dups.first().id).delete() + +class Migration(migrations.Migration): + + dependencies = [ + ('dispatcharr_channels', '0015_recording_custom_properties'), + ] + + operations = [ + migrations.RunPython(remove_duplicate_channel_streams), + migrations.AddConstraint( + model_name='channelstream', + constraint=models.UniqueConstraint(fields=('channel', 'stream'), name='unique_channel_stream'), + ), + ] diff --git a/apps/channels/migrations/0017_alter_channel_channel_number_alter_channelgroup_name.py b/apps/channels/migrations/0017_alter_channel_channel_number_alter_channelgroup_name.py new file mode 100644 index 00000000..1bb7d2e7 --- /dev/null +++ b/apps/channels/migrations/0017_alter_channel_channel_number_alter_channelgroup_name.py @@ -0,0 +1,23 @@ +# Generated by Django 5.1.6 on 2025-04-19 12:08 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dispatcharr_channels', '0016_channelstream_unique_channel_stream'), + ] + + operations = [ + migrations.AlterField( + model_name='channel', + name='channel_number', + field=models.IntegerField(db_index=True), + ), + migrations.AlterField( + model_name='channelgroup', + name='name', + field=models.CharField(db_index=True, max_length=100, unique=True), + ), + ] diff --git a/apps/channels/models.py b/apps/channels/models.py index deb66ae1..249343e9 100644 --- a/apps/channels/models.py +++ b/apps/channels/models.py @@ -27,7 +27,7 @@ def get_total_viewers(channel_id): return 0 class ChannelGroup(models.Model): - name = models.CharField(max_length=100, unique=True) + name = models.CharField(max_length=100, unique=True, db_index=True) def related_channels(self): # local import if needed to avoid cyc. Usually fine in a single file though @@ -210,7 +210,7 @@ class ChannelManager(models.Manager): class Channel(models.Model): - channel_number = models.IntegerField() + channel_number = models.IntegerField(db_index=True) name = models.CharField(max_length=255) logo = models.ForeignKey( 'Logo', @@ -426,6 +426,9 @@ class ChannelStream(models.Model): class Meta: ordering = ['order'] # Ensure streams are retrieved in order + constraints = [ + models.UniqueConstraint(fields=['channel', 'stream'], name='unique_channel_stream') + ] class ChannelGroupM3UAccount(models.Model): channel_group = models.ForeignKey( diff --git a/apps/channels/serializers.py b/apps/channels/serializers.py index 505020ff..67386d4e 100644 --- a/apps/channels/serializers.py +++ b/apps/channels/serializers.py @@ -115,18 +115,14 @@ class BulkChannelProfileMembershipSerializer(serializers.Serializer): class ChannelSerializer(serializers.ModelSerializer): # Show nested group data, or ID channel_number = serializers.IntegerField(allow_null=True, required=False) - channel_group = ChannelGroupSerializer(read_only=True) channel_group_id = serializers.PrimaryKeyRelatedField( queryset=ChannelGroup.objects.all(), source="channel_group", - write_only=True, required=False ) - epg_data = EPGDataSerializer(read_only=True) epg_data_id = serializers.PrimaryKeyRelatedField( queryset=EPGData.objects.all(), source="epg_data", - write_only=True, required=False, allow_null=True, ) @@ -143,13 +139,11 @@ class ChannelSerializer(serializers.ModelSerializer): queryset=Stream.objects.all(), many=True, write_only=True, required=False ) - logo = LogoSerializer(read_only=True) logo_id = serializers.PrimaryKeyRelatedField( queryset=Logo.objects.all(), source='logo', allow_null=True, required=False, - write_only=True, ) class Meta: @@ -158,16 +152,13 @@ class ChannelSerializer(serializers.ModelSerializer): 'id', 'channel_number', 'name', - 'channel_group', 'channel_group_id', 'tvg_id', - 'epg_data', 'epg_data_id', 'streams', 'stream_ids', 'stream_profile_id', 'uuid', - 'logo', 'logo_id', ] @@ -194,34 +185,53 @@ class ChannelSerializer(serializers.ModelSerializer): ChannelStream.objects.create(channel=channel, stream_id=stream_id, order=index) return channel - def update(self, instance, validated_data): stream_ids = validated_data.pop('stream_ids', None) - # Update all fields from validated_data + # Update standard fields for attr, value in validated_data.items(): setattr(instance, attr, value) instance.save() - # Handle streams if provided if stream_ids is not None: - # Clear existing associations - instance.channelstream_set.all().delete() + # Normalize stream IDs + normalized_ids = [ + stream.id if hasattr(stream, "id") else stream + for stream in stream_ids + ] - # Create new associations with proper ordering - for index, stream in enumerate(stream_ids): - # Extract the ID from the Stream object - actual_stream_id = stream.id if hasattr(stream, "id") else stream - print(f'Setting stream {actual_stream_id} to index {index}') - ChannelStream.objects.create( - channel=instance, - stream_id=actual_stream_id, - order=index - ) + # Get current mapping of stream_id -> ChannelStream + current_links = { + cs.stream_id: cs for cs in instance.channelstream_set.all() + } + + # Track existing stream IDs + existing_ids = set(current_links.keys()) + new_ids = set(normalized_ids) + + # Delete any links not in the new list + to_remove = existing_ids - new_ids + if to_remove: + instance.channelstream_set.filter(stream_id__in=to_remove).delete() + + # Update or create with new order + for order, stream_id in enumerate(normalized_ids): + if stream_id in current_links: + cs = current_links[stream_id] + if cs.order != order: + cs.order = order + cs.save(update_fields=["order"]) + else: + ChannelStream.objects.create( + channel=instance, + stream_id=stream_id, + order=order + ) return instance + def validate_stream_profile(self, value): """Handle special case where empty/0 values mean 'use default' (null)""" if value == '0' or value == 0 or value == '' or value is None: diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 5184cceb..f1caa9f6 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -8,6 +8,10 @@ "name": "vite", "version": "0.0.0", "dependencies": { + "@dnd-kit/core": "^6.3.1", + "@dnd-kit/modifiers": "^9.0.0", + "@dnd-kit/sortable": "^10.0.0", + "@dnd-kit/utilities": "^3.2.2", "@mantine/charts": "^7.17.2", "@mantine/core": "^7.17.2", "@mantine/dates": "^7.17.2", @@ -193,6 +197,73 @@ "node": ">=6.9.0" } }, + "node_modules/@dnd-kit/accessibility": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@dnd-kit/accessibility/-/accessibility-3.1.1.tgz", + "integrity": "sha512-2P+YgaXF+gRsIihwwY1gCsQSYnu9Zyj2py8kY5fFvUM1qm2WA2u639R6YNVfU4GWr+ZM5mqEsfHZZLoRONbemw==", + "license": "MIT", + "dependencies": { + "tslib": "^2.0.0" + }, + "peerDependencies": { + "react": ">=16.8.0" + } + }, + "node_modules/@dnd-kit/core": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/@dnd-kit/core/-/core-6.3.1.tgz", + "integrity": "sha512-xkGBRQQab4RLwgXxoqETICr6S5JlogafbhNsidmrkVv2YRs5MLwpjoF2qpiGjQt8S9AoxtIV603s0GIUpY5eYQ==", + "license": "MIT", + "dependencies": { + "@dnd-kit/accessibility": "^3.1.1", + "@dnd-kit/utilities": "^3.2.2", + "tslib": "^2.0.0" + }, + "peerDependencies": { + "react": ">=16.8.0", + "react-dom": ">=16.8.0" + } + }, + "node_modules/@dnd-kit/modifiers": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/@dnd-kit/modifiers/-/modifiers-9.0.0.tgz", + "integrity": "sha512-ybiLc66qRGuZoC20wdSSG6pDXFikui/dCNGthxv4Ndy8ylErY0N3KVxY2bgo7AWwIbxDmXDg3ylAFmnrjcbVvw==", + "license": "MIT", + "dependencies": { + "@dnd-kit/utilities": "^3.2.2", + "tslib": "^2.0.0" + }, + "peerDependencies": { + "@dnd-kit/core": "^6.3.0", + "react": ">=16.8.0" + } + }, + "node_modules/@dnd-kit/sortable": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/@dnd-kit/sortable/-/sortable-10.0.0.tgz", + "integrity": "sha512-+xqhmIIzvAYMGfBYYnbKuNicfSsk4RksY2XdmJhT+HAC01nix6fHCztU68jooFiMUB01Ky3F0FyOvhG/BZrWkg==", + "license": "MIT", + "dependencies": { + "@dnd-kit/utilities": "^3.2.2", + "tslib": "^2.0.0" + }, + "peerDependencies": { + "@dnd-kit/core": "^6.3.0", + "react": ">=16.8.0" + } + }, + "node_modules/@dnd-kit/utilities": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@dnd-kit/utilities/-/utilities-3.2.2.tgz", + "integrity": "sha512-+MKAJEOfaBe5SmV6t34p80MMKhjvUz0vRrvVJbPT0WElzaOJ/1xs+D+KDv+tD/NE5ujfrChEcshd4fLn0wpiqg==", + "license": "MIT", + "dependencies": { + "tslib": "^2.0.0" + }, + "peerDependencies": { + "react": ">=16.8.0" + } + }, "node_modules/@emotion/babel-plugin": { "version": "11.13.5", "resolved": "https://registry.npmjs.org/@emotion/babel-plugin/-/babel-plugin-11.13.5.tgz", diff --git a/frontend/package.json b/frontend/package.json index c0d6ced3..7af7ff89 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -10,6 +10,10 @@ "preview": "vite preview" }, "dependencies": { + "@dnd-kit/core": "^6.3.1", + "@dnd-kit/modifiers": "^9.0.0", + "@dnd-kit/sortable": "^10.0.0", + "@dnd-kit/utilities": "^3.2.2", "@mantine/charts": "^7.17.2", "@mantine/core": "^7.17.2", "@mantine/dates": "^7.17.2", diff --git a/frontend/src/api.js b/frontend/src/api.js index 7e025d6f..f30d43dc 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -8,6 +8,7 @@ import useStreamsStore from './store/streams'; import useStreamProfilesStore from './store/streamProfiles'; import useSettingsStore from './store/settings'; import { notifications } from '@mantine/notifications'; +import useChannelsTableStore from './store/channelsTable'; // If needed, you can set a base host or keep it empty if relative requests const host = import.meta.env.DEV @@ -91,6 +92,8 @@ const request = async (url, options = {}) => { }; export default class API { + static lastQueryParams = new URLSearchParams(); + /** * A static method so we can do: await API.getAuthToken() */ @@ -172,10 +175,30 @@ export default class API { static async queryChannels(params) { try { + API.lastQueryParams = params; + const response = await request( `${host}/api/channels/channels/?${params.toString()}` ); + useChannelsTableStore.getState().queryChannels(response, params); + + return response; + } catch (e) { + errorNotification('Failed to fetch channels', e); + } + } + + static async requeryChannels() { + try { + const response = await request( + `${host}/api/channels/channels/?${API.lastQueryParams.toString()}` + ); + + useChannelsTableStore + .getState() + .queryChannels(response, API.lastQueryParams); + return response; } catch (e) { errorNotification('Failed to fetch channels', e); @@ -258,6 +281,8 @@ export default class API { body: body, }); + API.getLogos(); + if (response.id) { useChannelsStore.getState().addChannel(response); } @@ -300,7 +325,10 @@ export default class API { const payload = { ...values }; // Handle special values - if (payload.stream_profile_id === '0' || payload.stream_profile_id === 0) { + if ( + payload.stream_profile_id === '0' || + payload.stream_profile_id === 0 + ) { payload.stream_profile_id = null; } @@ -312,15 +340,21 @@ export default class API { // Handle channel_number properly if (payload.channel_number === '') { payload.channel_number = null; - } else if (payload.channel_number !== null && payload.channel_number !== undefined) { + } else if ( + payload.channel_number !== null && + payload.channel_number !== undefined + ) { const parsedNumber = parseInt(payload.channel_number, 10); payload.channel_number = isNaN(parsedNumber) ? null : parsedNumber; } - const response = await request(`${host}/api/channels/channels/${payload.id}/`, { - method: 'PATCH', - body: payload, - }); + const response = await request( + `${host}/api/channels/channels/${payload.id}/`, + { + method: 'PATCH', + body: payload, + } + ); useChannelsStore.getState().updateChannel(response); return response; @@ -349,7 +383,7 @@ export default class API { notifications.show({ title: 'EPG Status', message: response.task_status, - color: 'blue' + color: 'blue', }); } diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index 36efafb7..696143a2 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -102,7 +102,10 @@ const Channel = ({ channel = null, isOpen, onClose }) => { const formattedValues = { ...values }; // Convert empty or "0" stream_profile_id to null for the API - if (!formattedValues.stream_profile_id || formattedValues.stream_profile_id === '0') { + if ( + !formattedValues.stream_profile_id || + formattedValues.stream_profile_id === '0' + ) { formattedValues.stream_profile_id = null; } @@ -111,9 +114,12 @@ const Channel = ({ channel = null, isOpen, onClose }) => { if (channel) { // If there's an EPG to set, use our enhanced endpoint - if (values.epg_data_id !== (channel.epg_data ? `${channel.epg_data.id}` : '')) { + if (values.epg_data_id !== (channel.epg_data_id ?? '')) { // Use the special endpoint to set EPG and trigger refresh - const epgResponse = await API.setChannelEPG(channel.id, values.epg_data_id); + const epgResponse = await API.setChannelEPG( + channel.id, + values.epg_data_id + ); // Remove epg_data_id from values since we've handled it separately const { epg_data_id, ...otherValues } = formattedValues; @@ -142,7 +148,7 @@ const Channel = ({ channel = null, isOpen, onClose }) => { }); } } catch (error) { - console.error("Error saving channel:", error); + console.error('Error saving channel:', error); } setSubmitting(false); @@ -154,8 +160,8 @@ const Channel = ({ channel = null, isOpen, onClose }) => { useEffect(() => { if (channel) { - if (channel.epg_data) { - const epgSource = epgs[channel.epg_data.epg_source]; + if (channel.epg_data_id) { + const epgSource = epgs[tvgsById[channel.epg_data_id].epg_source]; setSelectedEPG(`${epgSource.id}`); } @@ -167,8 +173,8 @@ const Channel = ({ channel = null, isOpen, onClose }) => { ? `${channel.stream_profile_id}` : '0', tvg_id: channel.tvg_id, - epg_data_id: channel.epg_data ? `${channel.epg_data?.id}` : '', - logo_id: `${channel.logo?.id}`, + epg_data_id: channel.epg_data_id ?? '', + logo_id: `${channel.logo_id}`, }); setChannelStreams(channel.streams); @@ -535,7 +541,9 @@ const Channel = ({ channel = null, isOpen, onClose }) => { name="channel_number" label="Channel # (blank to auto-assign)" value={formik.values.channel_number} - onChange={(value) => formik.setFieldValue('channel_number', value)} + onChange={(value) => + formik.setFieldValue('channel_number', value) + } error={ formik.errors.channel_number ? formik.touched.channel_number diff --git a/frontend/src/components/tables/ChannelTableStreams.jsx b/frontend/src/components/tables/ChannelTableStreams.jsx new file mode 100644 index 00000000..8c29c228 --- /dev/null +++ b/frontend/src/components/tables/ChannelTableStreams.jsx @@ -0,0 +1,252 @@ +import React, { useMemo, useState, useEffect } from 'react'; +import API from '../../api'; +import { GripHorizontal, SquareMinus } from 'lucide-react'; +import { + Box, + ActionIcon, + Flex, + Text, + useMantineTheme, + Center, +} from '@mantine/core'; +import { + useReactTable, + getCoreRowModel, + flexRender, +} from '@tanstack/react-table'; +import './table.css'; +import useChannelsTableStore from '../../store/channelsTable'; +import usePlaylistsStore from '../../store/playlists'; +import { + DndContext, + KeyboardSensor, + MouseSensor, + TouchSensor, + closestCenter, + useSensor, + useSensors, +} from '@dnd-kit/core'; +import { restrictToVerticalAxis } from '@dnd-kit/modifiers'; +import { + arrayMove, + SortableContext, + verticalListSortingStrategy, +} from '@dnd-kit/sortable'; +import { useSortable } from '@dnd-kit/sortable'; +import { CSS } from '@dnd-kit/utilities'; +import { shallow } from 'zustand/shallow'; + +// Cell Component +const RowDragHandleCell = ({ rowId }) => { + const { attributes, listeners } = useSortable({ + id: rowId, + }); + return ( + // Alternatively, you could set these attributes on the rows themselves + + + + ); +}; + +// Row Component +const DraggableRow = ({ row }) => { + const { transform, transition, setNodeRef, isDragging } = useSortable({ + id: row.original.id, + }); + + const style = { + transform: CSS.Transform.toString(transform), //let dnd-kit do its thing + transition: transition, + opacity: isDragging ? 0.8 : 1, + zIndex: isDragging ? 1 : 0, + position: 'relative', + }; + return ( + + {row.getVisibleCells().map((cell) => { + return ( + + + + {flexRender(cell.column.columnDef.cell, cell.getContext())} + + + + ); + })} + + ); +}; + +const ChannelStreams = ({ channel, isExpanded }) => { + const theme = useMantineTheme(); + + const channelStreams = useChannelsTableStore( + (state) => state.getChannelStreams(channel.id), + shallow + ); + + useEffect(() => { + setData(channelStreams); + }, [channelStreams]); + + const [data, setData] = useState(channelStreams || []); + + const dataIds = data?.map(({ id }) => id); + + const { playlists } = usePlaylistsStore(); + + const removeStream = async (stream) => { + const newStreamList = channelStreams.filter((s) => s.id !== stream.id); + await API.updateChannel({ + ...channel, + stream_ids: newStreamList.map((s) => s.id), + }); + await API.requeryChannels(); + }; + + const table = useReactTable({ + columns: useMemo( + () => [ + { + id: 'drag-handle', + header: 'Move', + cell: ({ row }) => , + size: 30, + }, + { + id: 'name', + header: 'Name', + accessorKey: 'name', + }, + { + id: 'm3u', + header: 'M3U', + accessorFn: (row) => + playlists.find((playlist) => playlist.id === row.m3u_account)?.name, + }, + { + id: 'actions', + header: '', + size: 30, + cell: ({ row }) => ( +
+ + removeStream(row.original)} + /> + +
+ ), + }, + ], + [playlists] + ), + data: data, + state: { + data, + }, + defaultColumn: { + size: undefined, + minSize: 0, + }, + manualPagination: true, + manualSorting: true, + manualFiltering: true, + enableRowSelection: true, + getRowId: (row) => row.id, + getCoreRowModel: getCoreRowModel(), + // getFilteredRowModel: getFilteredRowModel(), + // getSortedRowModel: getSortedRowModel(), + // getPaginationRowModel: getPaginationRowModel(), + }); + + function handleDragEnd(event) { + const { active, over } = event; + if (active && over && active.id !== over.id) { + setData((data) => { + const oldIndex = dataIds.indexOf(active.id); + const newIndex = dataIds.indexOf(over.id); + const retval = arrayMove(data, oldIndex, newIndex); + + const { streams: _, ...channelUpdate } = channel; + API.updateChannel({ + ...channelUpdate, + stream_ids: retval.map((row) => row.id), + }).then(() => { + API.requeryChannels(); + }); + + return retval; //this is just a splice util + }); + } + } + + const sensors = useSensors( + useSensor(MouseSensor, {}), + useSensor(TouchSensor, {}), + useSensor(KeyboardSensor, {}) + ); + + if (!isExpanded) { + return <>; + } + + return ( + + + {' '} + + + + {table.getRowModel().rows.map((row) => ( + + ))} + + + + + + ); +}; + +export default ChannelStreams; diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index b79aae85..c6272ef1 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -15,7 +15,6 @@ import { useDebounce } from '../../utils'; import logo from '../../images/logo.png'; import useVideoStore from '../../store/useVideoStore'; import useSettingsStore from '../../store/settings'; -import usePlaylistsStore from '../../store/playlists'; import { Tv2, ScreenShare, @@ -35,6 +34,8 @@ import { ArrowUpNarrowWide, ArrowUpDown, ArrowDownWideNarrow, + ChevronDown, + ChevronRight, } from 'lucide-react'; import ghostImage from '../../images/ghost.svg'; import { @@ -70,6 +71,26 @@ import { } from '@tanstack/react-table'; import './table.css'; import useChannelsTableStore from '../../store/channelsTable'; +import usePlaylistsStore from '../../store/playlists'; +import { MantineReactTable, useMantineReactTable } from 'mantine-react-table'; +import { + DndContext, + KeyboardSensor, + MouseSensor, + TouchSensor, + closestCenter, + useSensor, + useSensors, +} from '@dnd-kit/core'; +import { restrictToVerticalAxis } from '@dnd-kit/modifiers'; +import { + arrayMove, + SortableContext, + verticalListSortingStrategy, +} from '@dnd-kit/sortable'; +import { useSortable } from '@dnd-kit/sortable'; +import { CSS } from '@dnd-kit/utilities'; +import ChannelTableStreams from './ChannelTableStreams'; const m3uUrlBase = `${window.location.protocol}//${window.location.host}/output/m3u`; const epgUrlBase = `${window.location.protocol}//${window.location.host}/output/epg`; @@ -189,38 +210,32 @@ const ChannelRowActions = React.memo( return (
- - - - - + + + - - - - - + + + - - - - - + + + @@ -255,16 +270,17 @@ const ChannelRowActions = React.memo( ); const ChannelsTable = ({}) => { + const data = useChannelsTableStore((s) => s.channels); + const rowCount = useChannelsTableStore((s) => s.count); + const pageCount = useChannelsTableStore((s) => s.pageCount); + const setSelectedTableIds = useChannelsTableStore( + (s) => s.setSelectedChannelIds + ); const profiles = useChannelsStore((s) => s.profiles); const selectedProfileId = useChannelsStore((s) => s.selectedProfileId); const setSelectedProfileId = useChannelsStore((s) => s.setSelectedProfileId); const channelGroups = useChannelsStore((s) => s.channelGroups); - - const queryChannels = useChannelsTableStore((s) => s.queryChannels); - const requeryChannels = useChannelsTableStore((s) => s.requeryChannels); - const data = useChannelsTableStore((s) => s.channels); - const rowCount = useChannelsTableStore((s) => s.count); - const pageCount = useChannelsTableStore((s) => s.pageCount); + const logos = useChannelsStore((s) => s.logos); const selectedProfileChannels = useChannelsStore( (s) => s.profiles[selectedProfileId]?.channels @@ -303,34 +319,49 @@ const ChannelsTable = ({}) => { const [sorting, setSorting] = useState([ { id: 'channel_number', desc: false }, ]); + const [expandedRowId, setExpandedRowId] = useState(null); const [hdhrUrl, setHDHRUrl] = useState(hdhrUrlBase); const [epgUrl, setEPGUrl] = useState(epgUrlBase); const [m3uUrl, setM3UUrl] = useState(m3uUrlBase); - useEffect(() => { + const fetchData = useCallback(async () => { + const params = new URLSearchParams(); + params.append('page', pagination.pageIndex + 1); + params.append('page_size', pagination.pageSize); + + // Apply sorting + if (sorting.length > 0) { + const sortField = sorting[0].id; + const sortDirection = sorting[0].desc ? '-' : ''; + params.append('ordering', `${sortDirection}${sortField}`); + } + + // Apply debounced filters + Object.entries(filters).forEach(([key, value]) => { + if (value) params.append(key, value); + }); + + const results = await API.queryChannels(params); + const startItem = pagination.pageIndex * pagination.pageSize + 1; // +1 to start from 1, not 0 const endItem = Math.min( (pagination.pageIndex + 1) * pagination.pageSize, - rowCount + results.count ); if (initialDataCount === null) { - setInitialDataCount(rowCount); + setInitialDataCount(results.count); } // Generate the string - setPaginationString(`${startItem} to ${endItem} of ${rowCount}`); - }, [data]); - - useEffect(() => { - queryChannels({ pagination, sorting, filters }); - }, []); - - useEffect(() => { - queryChannels({ pagination, sorting, filters }); + setPaginationString(`${startItem} to ${endItem} of ${results.count}`); }, [pagination, sorting, debouncedFilters]); + useEffect(() => { + fetchData(); + }, [fetchData]); + // const theme = useTheme(); const theme = useMantineTheme(); @@ -408,7 +439,9 @@ const ChannelsTable = ({}) => { updatedSelected.add(row.original.id); } }); - setSelectedChannelIds([...updatedSelected]); + const newSelection = [...updatedSelected]; + setSelectedChannelIds(newSelection); + setSelectedTableIds(newSelection); return newRowSelection; }); @@ -423,8 +456,10 @@ const ChannelsTable = ({}) => { if (value) params.append(key, value); }); const ids = await API.getAllChannelIds(params); + setSelectedTableIds(ids); setSelectedChannelIds(ids); } else { + setSelectedTableIds([]); setSelectedChannelIds([]); } @@ -488,7 +523,9 @@ const ChannelsTable = ({}) => { const deleteChannels = async () => { setIsLoading(true); await API.deleteChannels(selectedChannelIds); - requeryChannels(); + await API.requeryChannels(); + setSelectedChannelIds([]); + setRowSelection([]); setIsLoading(false); }; @@ -513,7 +550,7 @@ const ChannelsTable = ({}) => { // Refresh the channel list // await fetchChannels(); - requeryChannels(); + API.requeryChannels(); } catch (err) { console.error(err); notifications.show({ @@ -589,7 +626,6 @@ const ChannelsTable = ({}) => { }; const onSortingChange = (column) => { - console.log(sorting); const sortField = sorting[0]?.id; const sortDirection = sorting[0]?.desc; @@ -637,6 +673,12 @@ const ChannelsTable = ({}) => { const columns = useMemo( () => [ + { + id: 'expand', + size: 20, + enableSorting: false, + enableColumnFilter: false, + }, { id: 'select', size: 30, @@ -704,7 +746,8 @@ const ChannelsTable = ({}) => { ), }, { - accessorKey: 'logo', + id: 'logo', + accessorFn: (row) => logos[row.logo_id] ?? logo, size: 75, header: '', cell: ({ getValue }) => { @@ -772,6 +815,17 @@ const ChannelsTable = ({}) => { const rows = getRowModel().rows; + const onRowExpansion = (row) => { + let isExpanded = false; + setExpandedRowId((prev) => { + isExpanded = prev === row.original.id ? null : row.original.id; + return isExpanded; + }); + setRowSelection({ [row.index]: true }); + setSelectedChannelIds([row.original.id]); + setSelectedTableIds([row.original.id]); + }; + const renderHeaderCell = (header) => { let sortingIcon = ArrowUpDown; if (sorting[0]?.id == header.id) { @@ -802,12 +856,12 @@ const ChannelsTable = ({}) => { return ( # - {/*
+
{React.createElement(sortingIcon, { - onClick: () => onSortingChange('name'), + onClick: () => onSortingChange('channel_number'), size: 14, })} -
*/} +
); @@ -853,6 +907,37 @@ const ChannelsTable = ({}) => { } }; + const renderBodyCell = (cell) => { + switch (cell.column.id) { + case 'select': + return ChannelRowSelectCell({ row: cell.row }); + + case 'expand': + return ChannelExpandCell({ row: cell.row }); + + default: + return flexRender(cell.column.columnDef.cell, cell.getContext()); + } + }; + + const ChannelExpandCell = useCallback( + ({ row }) => { + const isExpanded = expandedRowId === row.original.id; + + return ( +
{ + onRowExpansion(row); + }} + > + {isExpanded ? : } +
+ ); + }, + [expandedRowId] + ); + const ChannelRowSelectCell = useCallback( ({ row }) => { return ( @@ -1031,7 +1116,7 @@ const ChannelsTable = ({}) => { style={{ display: 'flex', flexDirection: 'column', - height: 'calc(100vh - 60px)', + height: 'calc(100vh - 58px)', backgroundColor: '#27272A', }} > @@ -1200,7 +1285,7 @@ const ChannelsTable = ({}) => { style={{ display: 'flex', flexDirection: 'column', - height: 'calc(100vh - 120px)', + height: 'calc(100vh - 110px)', }} > { {getRowModel().rows.map((row) => ( - - {row.getVisibleCells().map((cell) => { - const width = cell.column.getSize(); - return ( - - - {cell.column.id === 'select' - ? ChannelRowSelectCell({ row: cell.row }) - : flexRender( - cell.column.columnDef.cell, - cell.getContext() - )} - - - ); - })} + + + {row.getVisibleCells().map((cell) => { + const width = cell.column.getSize(); + return ( + + + {renderBodyCell(cell)} + + + ); + })} + + {row.original.id === expandedRowId && ( + + + + )} ))} @@ -1318,7 +1418,10 @@ const ChannelsTable = ({}) => { Page Size { - const rowHeight = 48; - - // return ( - // - // - // {({ height }) => ( - // - // {({ index, style }) => { - // const row = rows[index]; - // return ( - // - // - // {row.getIsExpanded() && } - // - // ); - // }} - // - // )} - // - // - // ); - - return ( - - {virtualizedItems.map((virtualRow, index) => { - const row = rows[virtualRow.index] - return ( - - ); - })} - - ); -}; - -export default ChannelsTableBody; diff --git a/frontend/src/components/tables/ChannelsTable/ChannelsTableRow.jsx b/frontend/src/components/tables/ChannelsTable/ChannelsTableRow.jsx deleted file mode 100644 index 94466e13..00000000 --- a/frontend/src/components/tables/ChannelsTable/ChannelsTableRow.jsx +++ /dev/null @@ -1,61 +0,0 @@ -// HeadlessChannelsTable.jsx -import React, { useMemo, useState, useCallback } from 'react'; -import { FixedSizeList as List } from 'react-window'; -import AutoSizer from 'react-virtualized-auto-sizer'; -import { - useReactTable, - getCoreRowModel, - getSortedRowModel, - flexRender, - getExpandedRowModel, -} from '@tanstack/react-table'; -import { - Table, - Box, - Checkbox, - ActionIcon, - ScrollArea, - Center, - useMantineTheme, -} from '@mantine/core'; -import { ChevronRight, ChevronDown } from 'lucide-react'; -import useSettingsStore from '../../../store/settings'; -import useChannelsStore from '../../../store/channels'; - -const ExpandIcon = ({ row, toggle }) => ( - - {row.getIsExpanded() ? : } - -); - -const ChannelsTableRow = ({ row, virtualRow, index, style, onEdit, onDelete, onPreview, onRecord }) => { - return ( - - {row.getVisibleCells().map(cell => { - return ( - - {flexRender( - cell.column.columnDef.cell, - cell.getContext() - )} - - ) - })} - - ) -}; - -export default ChannelsTableRow diff --git a/frontend/src/components/tables/CustomTable/CustomTable.jsx b/frontend/src/components/tables/CustomTable/CustomTable.jsx new file mode 100644 index 00000000..c35b5bf8 --- /dev/null +++ b/frontend/src/components/tables/CustomTable/CustomTable.jsx @@ -0,0 +1,134 @@ +import { Box, Flex } from '@mantine/core'; +import CustomTableHeader from './CustomTableHeader'; +import { useCallback, useState } from 'react'; +import { flexRender } from '@tanstack/react-table'; + +const CustomTable = ({ + table, + headerCellRenderer, + rowDetailRenderer, + bodyCellRenderFns, + rowCount, +}) => { + const [expandedRowId, setExpandedRowId] = useState(null); + + const rows = table.getRowModel().rows; + + const ChannelExpandCell = useCallback( + ({ row }) => { + const isExpanded = expandedRowId === row.original.id; + + return ( +
{ + setExpandedRowId((prev) => + prev === row.original.id ? null : row.original.id + ); + }} + > + {isExpanded ? : } +
+ ); + }, + [expandedRowId] + ); + + const ChannelRowSelectCell = useCallback( + ({ row }) => { + return ( +
+ +
+ ); + }, + [rows] + ); + + const bodyCellRenderer = (cell) => { + if (bodyCellRenderFns[cell.column.id]) { + return bodyCellRenderFns(cell); + } + + switch (cell.column.id) { + case 'select': + return ChannelRowSelectCell({ row: cell.row }); + + case 'expand': + return ChannelExpandCell({ row: cell.row }); + + default: + return flexRender(cell.column.columnDef.cell, cell.getContext()); + } + }; + + return ( + + + + {table.getRowModel().rows.map((row) => ( + + + {row.getVisibleCells().map((cell) => { + return ( + + + {bodyCellRenderer(cell)} + + + ); + })} + + {row.original.id === expandedRowId && ( + + + + )} + + ))} + + + ); +}; + +export default CustomTable; diff --git a/frontend/src/components/tables/CustomTable/CustomTableHeader.jsx b/frontend/src/components/tables/CustomTable/CustomTableHeader.jsx new file mode 100644 index 00000000..50a173d2 --- /dev/null +++ b/frontend/src/components/tables/CustomTable/CustomTableHeader.jsx @@ -0,0 +1,171 @@ +import { Box, Flex } from '@mantine/core'; +import { + ArrowDownWideNarrow, + ArrowUpDown, + ArrowUpNarrowWide, +} from 'lucide-react'; +import { useCallback } from 'react'; + +const CustomTableHeader = ({ + table, + headerCellRenderFns, + rowCount, + onSelectAllChange, +}) => { + const ChannelRowSelectHeader = useCallback( + ({ selectedChannelIds }) => { + return ( +
+ 0 && + selectedChannelIds.length !== rowCount + } + onChange={onSelectAllChange} + /> +
+ ); + }, + [rows, rowCount] + ); + + const onSelectAll = (e) => { + if (onSelectAllChange) { + onSelectAllChange(e); + } + }; + + const headerCellRenderer = (header) => { + let sortingIcon = ArrowUpDown; + if (sorting[0]?.id == header.id) { + if (sorting[0].desc === false) { + sortingIcon = ArrowUpNarrowWide; + } else { + sortingIcon = ArrowDownWideNarrow; + } + } + + switch (header.id) { + case 'select': + return ChannelRowSelectHeader({ + selectedChannelIds, + }); + + case 'enabled': + if (selectedProfileId !== '0' && selectedChannelIds.length > 0) { + // return EnabledHeaderSwitch(); + } + return ( +
+ +
+ ); + + // case 'channel_number': + // return ( + // + // # + // {/*
+ // {React.createElement(sortingIcon, { + // onClick: () => onSortingChange('name'), + // size: 14, + // })} + //
*/} + //
+ // ); + + // case 'name': + // return ( + // + // e.stopPropagation()} + // onChange={handleFilterChange} + // size="xs" + // variant="unstyled" + // className="table-input-header" + // /> + //
+ // {React.createElement(sortingIcon, { + // onClick: () => onSortingChange('name'), + // size: 14, + // })} + //
+ //
+ // ); + + // case 'channel_group': + // return ( + // + // ); + + default: + return flexRender(header.column.columnDef.header, header.getContext()); + } + }; + + return ( + + {table.getHeaderGroups().map((headerGroup) => ( + + {headerGroup.headers.map((header) => { + return ( + + + {headerCellRenderer(header)} + + + ); + })} + + ))} + + ); +}; + +export default CustomTableHeader; diff --git a/frontend/src/components/tables/StreamsTable.jsx b/frontend/src/components/tables/StreamsTable.jsx index 777d2a32..e12bb125 100644 --- a/frontend/src/components/tables/StreamsTable.jsx +++ b/frontend/src/components/tables/StreamsTable.jsx @@ -78,15 +78,14 @@ const StreamsTable = ({}) => { * Stores */ const { playlists } = usePlaylistsStore(); + const channelGroups = useChannelsStore((s) => s.channelGroups); - const channelsPageSelection = useChannelsStore( - (s) => s.channelsPageSelection - ); + const selectedChannelIds = useChannelsTableStore((s) => s.selectedChannelIds); const fetchLogos = useChannelsStore((s) => s.fetchLogos); - const channelSelectionStreams = useChannelsStore( - (state) => state.channels[state.channelsPageSelection[0]?.id]?.streams + const channelSelectionStreams = useChannelsTableStore( + (state) => + state.channels.find((chan) => chan.id === selectedChannelIds[0])?.streams ); - const requeryChannels = useChannelsTableStore((s) => s.requeryChannels); const { environment: { env_mode }, } = useSettingsStore(); @@ -287,6 +286,7 @@ const StreamsTable = ({}) => { channel_number: null, stream_id: stream.id, }); + await API.requeryChannels(); fetchLogos(); }; @@ -298,7 +298,7 @@ const StreamsTable = ({}) => { stream_id, })) ); - requeryChannels(); + await API.requeryChannels(); fetchLogos(); setIsLoading(false); }; @@ -325,9 +325,8 @@ const StreamsTable = ({}) => { }; const addStreamsToChannel = async () => { - const { streams, ...channel } = { ...channelsPageSelection[0] }; await API.updateChannel({ - ...channel, + id: selectedChannelIds[0], stream_ids: [ ...new Set( channelSelectionStreams @@ -336,18 +335,19 @@ const StreamsTable = ({}) => { ), ], }); + await API.requeryChannels(); }; const addStreamToChannel = async (streamId) => { - const { streams, ...channel } = { ...channelsPageSelection[0] }; await API.updateChannel({ - ...channel, + id: selectedChannelIds[0], stream_ids: [ ...new Set( channelSelectionStreams.map((stream) => stream.id).concat([streamId]) ), ], }); + await API.requeryChannels(); }; const onRowSelectionChange = (updater) => { @@ -512,7 +512,7 @@ const StreamsTable = ({}) => { onClick={() => addStreamToChannel(row.original.id)} style={{ background: 'none' }} disabled={ - channelsPageSelection.length !== 1 || + selectedChannelIds.length !== 1 || (channelSelectionStreams && channelSelectionStreams .map((stream) => stream.id) diff --git a/frontend/src/components/tables/table.css b/frontend/src/components/tables/table.css index 92afaaaa..c1c43f20 100644 --- a/frontend/src/components/tables/table.css +++ b/frontend/src/components/tables/table.css @@ -86,5 +86,5 @@ .table-striped .tbody .tr:nth-child(even), .table-striped .tbody .tr-even { - /* background-color: #ffffff; */ + background-color: #27272A; } diff --git a/frontend/src/pages/Guide.jsx b/frontend/src/pages/Guide.jsx index 0b03954f..2e4de29b 100644 --- a/frontend/src/pages/Guide.jsx +++ b/frontend/src/pages/Guide.jsx @@ -23,6 +23,7 @@ import { } from '@mantine/core'; import { Search, X, Clock, Video, Calendar, Play } from 'lucide-react'; import './guide.css'; +import useEPGsStore from '../store/epgs'; /** Layout constants */ const CHANNEL_WIDTH = 120; // Width of the channel/logo column @@ -33,7 +34,13 @@ const MINUTE_INCREMENT = 15; // For positioning programs every 15 min const MINUTE_BLOCK_WIDTH = HOUR_WIDTH / (60 / MINUTE_INCREMENT); export default function TVChannelGuide({ startDate, endDate }) { - const { channels, recordings, channelGroups, profiles } = useChannelsStore(); + const channels = useChannelsStore((s) => s.channels); + const recordings = useChannelsStore((s) => s.recordings); + const channelGroups = useChannelsStore((s) => s.channelGroups); + const profiles = useChannelsStore((s) => s.profiles); + const logos = useChannelsStore((s) => s.logos); + + const tvgsById = useEPGsStore((s) => s.tvgsById); const [programs, setPrograms] = useState([]); const [guideChannels, setGuideChannels] = useState([]); @@ -79,7 +86,12 @@ export default function TVChannelGuide({ startDate, endDate }) { // Filter your Redux/Zustand channels by matching tvg_id const filteredChannels = Object.values(channels) // Include channels with matching tvg_ids OR channels with null epg_data - .filter((ch) => programIds.includes(ch.epg_data?.tvg_id) || programIds.includes(ch.uuid) || ch.epg_data === null) + .filter( + (ch) => + programIds.includes(tvgsById[ch.epg_data_id]?.tvg_id) || + programIds.includes(ch.uuid) || + ch.epg_data_id === null + ) // Add sorting by channel_number .sort( (a, b) => @@ -276,7 +288,9 @@ export default function TVChannelGuide({ startDate, endDate }) { // Helper: find channel by tvg_id function findChannelByTvgId(tvgId) { return guideChannels.find( - (ch) => ch.epg_data?.tvg_id === tvgId || (!ch.epg_data && ch.uuid === tvgId) + (ch) => + tvgsById[ch.epg_data_id]?.tvg_id === tvgId || + (!ch.epg_data_id && ch.uuid === tvgId) ); } @@ -839,10 +853,10 @@ export default function TVChannelGuide({ startDate, endDate }) { {(searchQuery !== '' || selectedGroupId !== 'all' || selectedProfileId !== 'all') && ( - - )} + + )} {filteredChannels.length}{' '} @@ -1049,8 +1063,10 @@ export default function TVChannelGuide({ startDate, endDate }) { {filteredChannels.length > 0 ? ( filteredChannels.map((channel) => { const channelPrograms = programs.filter( - (p) => (channel.epg_data && p.tvg_id === channel.epg_data.tvg_id) || - (!channel.epg_data && p.tvg_id === channel.uuid) + (p) => + (channel.epg_data_id && + p.tvg_id === tvgsById[channel.epg_data_id].tvg_id) || + (!channel.epg_data_id && p.tvg_id === channel.uuid) ); // Check if any program in this channel is expanded const hasExpandedProgram = channelPrograms.some( @@ -1149,7 +1165,7 @@ export default function TVChannelGuide({ startDate, endDate }) { }} > {channel.name} ({ initData: async () => { await Promise.all([ - // useChannelsStore.getState().fetchChannels(), + useChannelsStore.getState().fetchChannels(), useChannelsStore.getState().fetchChannelGroups(), useChannelsStore.getState().fetchLogos(), useChannelsStore.getState().fetchChannelProfiles(), diff --git a/frontend/src/store/channels.jsx b/frontend/src/store/channels.jsx index 5e5049be..29ba3bfc 100644 --- a/frontend/src/store/channels.jsx +++ b/frontend/src/store/channels.jsx @@ -113,16 +113,12 @@ const useChannelsStore = create((set, get) => ({ const channelsByID = newChannels.reduce((acc, channel) => { acc[channel.id] = channel; channelsByUUID[channel.uuid] = channel.id; - if (channel.logo) { - logos[channel.logo.id] = channel.logo; - } - profileChannels.add(channel.id); return acc; }, {}); - const newProfiles = {}; + const newProfiles = { ...defaultProfiles }; Object.entries(state.profiles).forEach(([id, profile]) => { newProfiles[id] = { ...profile, @@ -139,10 +135,6 @@ const useChannelsStore = create((set, get) => ({ ...state.channelsByUUID, ...channelsByUUID, }, - logos: { - ...state.logos, - ...logos, - }, profiles: newProfiles, }; }), diff --git a/frontend/src/store/channelsTable b/frontend/src/store/channelsTable index a5146363..2a230e84 100644 --- a/frontend/src/store/channelsTable +++ b/frontend/src/store/channelsTable @@ -3,56 +3,31 @@ import api from '../api'; import { notifications } from '@mantine/notifications'; import API from '../api'; -const defaultProfiles = { 0: { id: '0', name: 'All', channels: new Set() } }; - const useChannelsTableStore = create((set, get) => ({ channels: [], count: 0, pageCount: 0, - lastParams: new URLSearchParams(), + selectedChannelIds: [], - requeryChannels: async () => { - const lastParams = get().lastParams; - console.log(lastParams); - const result = await API.queryChannels(lastParams); - const pageSize = parseInt(lastParams.get?.('page_size') || '25'); - - set({ - channels: result.results, - count: result.count, - pageCount: Math.ceil(result.count / pageSize), + queryChannels: ({ results, count }, params) => { + set((state) => { + return { + channels: results, + count: count, + pageCount: Math.ceil(count / params.page_size), + }; }); }, - queryChannels: async ({ pagination, sorting, filters }) => { - const params = new URLSearchParams(); - params.append('page', pagination.pageIndex + 1); - params.append('page_size', pagination.pageSize); - - // Apply sorting - if (sorting.length > 0) { - const sortField = sorting[0].id; - const sortDirection = sorting[0].desc ? '-' : ''; - params.append('ordering', `${sortDirection}${sortField}`); - } - - // Apply debounced filters - Object.entries(filters).forEach(([key, value]) => { - if (value) params.append(key, value); + setSelectedChannelIds: (selectedChannelIds) => { + set({ + selectedChannelIds, }); + }, - try { - const result = await API.queryChannels(params); - - set((state) => ({ - channels: result.results, - count: result.count, - pageCount: Math.ceil(result.count / pagination.pageSize), - lastParams: params, - })); - } catch (error) { - console.error('Error fetching data:', error); - } + getChannelStreams: (id) => { + const channel = get().channels.find((c) => c.id === id); + return channel?.streams ?? []; }, })); From eb9419ddd232f93763538dd1849b23b27e9a92f8 Mon Sep 17 00:00:00 2001 From: dekzter Date: Sat, 19 Apr 2025 08:49:04 -0400 Subject: [PATCH 0011/1435] proper handling of store variables so we now aren't listening on any change from the state of a store --- frontend/src/App.jsx | 14 ++++++-------- frontend/src/WebSocket.jsx | 12 +++++++----- frontend/src/components/FloatingVideo.jsx | 4 +++- frontend/src/components/M3URefreshNotification.jsx | 11 ++++++----- frontend/src/components/Sidebar.jsx | 6 +++--- frontend/src/components/forms/Channel.jsx | 12 ++++++++---- frontend/src/components/forms/LoginForm.jsx | 5 ++++- frontend/src/components/forms/M3U.jsx | 4 ++-- frontend/src/components/forms/M3UGroupFilter.jsx | 2 +- frontend/src/components/forms/M3UProfile.jsx | 4 +++- frontend/src/components/forms/M3UProfiles.jsx | 3 ++- frontend/src/components/forms/Recording.jsx | 2 +- frontend/src/components/forms/Stream.jsx | 2 +- frontend/src/components/forms/SuperuserForm.jsx | 2 +- .../src/components/tables/ChannelTableStreams.jsx | 2 +- frontend/src/components/tables/ChannelsTable.jsx | 6 ++---- frontend/src/components/tables/EPGsTable.jsx | 2 +- frontend/src/components/tables/M3UsTable.jsx | 5 +++-- .../src/components/tables/StreamProfilesTable.jsx | 2 +- frontend/src/components/tables/StreamsTable.jsx | 8 +++----- frontend/src/components/tables/UserAgentsTable.jsx | 2 +- frontend/src/pages/DVR.jsx | 4 ++-- frontend/src/pages/Guide.jsx | 6 ++---- frontend/src/pages/Login.jsx | 2 +- frontend/src/pages/Settings.jsx | 6 +++--- frontend/src/pages/Stats.jsx | 6 ++++-- 26 files changed, 72 insertions(+), 62 deletions(-) diff --git a/frontend/src/App.jsx b/frontend/src/App.jsx index 5aa5df23..e13c5af8 100644 --- a/frontend/src/App.jsx +++ b/frontend/src/App.jsx @@ -36,14 +36,12 @@ const defaultRoute = '/channels'; const App = () => { const [open, setOpen] = useState(true); - const { - isAuthenticated, - setIsAuthenticated, - logout, - initData, - initializeAuth, - setSuperuserExists, - } = useAuthStore(); + const isAuthenticated = useAuthStore((s) => s.isAuthenticated); + const setIsAuthenticated = useAuthStore((s) => s.setIsAuthenticated); + const logout = useAuthStore((s) => s.logout); + const initData = useAuthStore((s) => s.initData); + const initializeAuth = useAuthStore((s) => s.initializeAuth); + const setSuperuserExists = useAuthStore((s) => s.setSuperuserExists); const toggleDrawer = () => { setOpen(!open); diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index f9ab1cc3..cd4bca6f 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -20,11 +20,13 @@ export const WebsocketProvider = ({ children }) => { const [isReady, setIsReady] = useState(false); const [val, setVal] = useState(null); - const { fetchChannels, setChannelStats, fetchChannelGroups } = - useChannelsStore(); - const { fetchPlaylists, setRefreshProgress, setProfilePreview } = - usePlaylistsStore(); - const { fetchEPGData, fetchEPGs } = useEPGsStore(); + const setChannelStats = useChannelsStore((s) => s.setChannelStats); + const fetchChannelGroups = useChannelsStore((s) => s.fetchChannelGroups); + const fetchPlaylists = usePlaylistsStore((s) => s.fetchPlaylists); + const setRefreshProgress = usePlaylistsStore((s) => s.setRefreshProgress); + const setProfilePreview = usePlaylistsStore((s) => s.setProfilePreview); + const fetchEPGData = useEPGsStore((s) => s.fetchEPGData); + const fetchEPGs = useEPGsStore((s) => s.fetchEPGs); const ws = useRef(null); diff --git a/frontend/src/components/FloatingVideo.jsx b/frontend/src/components/FloatingVideo.jsx index 451df5cd..0be42029 100644 --- a/frontend/src/components/FloatingVideo.jsx +++ b/frontend/src/components/FloatingVideo.jsx @@ -6,7 +6,9 @@ import mpegts from 'mpegts.js'; import { CloseButton, Flex } from '@mantine/core'; export default function FloatingVideo() { - const { isVisible, streamUrl, hideVideo } = useVideoStore(); + const isVisible = useVideoStore((s) => s.isVisible); + const streamUrl = useVideoStore((s) => s.streamUrl); + const hideVideo = useVideoStore((s) => s.hideVideo); const videoRef = useRef(null); const playerRef = useRef(null); const videoContainerRef = useRef(null); diff --git a/frontend/src/components/M3URefreshNotification.jsx b/frontend/src/components/M3URefreshNotification.jsx index 90123dc4..9e469f43 100644 --- a/frontend/src/components/M3URefreshNotification.jsx +++ b/frontend/src/components/M3URefreshNotification.jsx @@ -8,11 +8,12 @@ import useChannelsStore from '../store/channels'; import useEPGsStore from '../store/epgs'; export default function M3URefreshNotification() { - const { playlists, refreshProgress } = usePlaylistsStore(); - const { fetchStreams } = useStreamsStore(); - const { fetchChannelGroups } = useChannelsStore(); - const { fetchPlaylists } = usePlaylistsStore(); - const { fetchEPGData } = useEPGsStore(); + const playlists = usePlaylistsStore((s) => s.playlists); + const refreshProgress = usePlaylistsStore((s) => s.refreshProgress); + const fetchStreams = useStreamsStore((s) => s.fetchStreams); + const fetchChannelGroups = useChannelsStore((s) => s.fetchChannelGroups); + const fetchPlaylists = usePlaylistsStore((s) => s.fetchPlaylists); + const fetchEPGData = useEPGsStore((s) => s.fetchEPGData); const [notificationStatus, setNotificationStatus] = useState({}); diff --git a/frontend/src/components/Sidebar.jsx b/frontend/src/components/Sidebar.jsx index 3286f0d6..b5dc15b2 100644 --- a/frontend/src/components/Sidebar.jsx +++ b/frontend/src/components/Sidebar.jsx @@ -63,9 +63,9 @@ const NavLink = ({ item, isActive, collapsed }) => { const Sidebar = ({ collapsed, toggleDrawer, drawerWidth, miniDrawerWidth }) => { const location = useLocation(); - const { channels } = useChannelsStore(); - const { environment } = useSettingsStore(); - const { isAuthenticated } = useAuthStore(); // Add this line to get authentication state + const channels = useChannelsStore((s) => s.channels); + const environment = useSettingsStore((s) => s.environment); + const isAuthenticated = useAuthStore((s) => s.isAuthenticated); const publicIPRef = useRef(null); const [appVersion, setAppVersion] = useState({ version: '', build: '' }); diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index 696143a2..9d184d5c 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -42,11 +42,15 @@ const Channel = ({ channel = null, isOpen, onClose }) => { const listRef = useRef(null); const logoListRef = useRef(null); - const { channelGroups, logos, fetchLogos } = useChannelsStore(); + const channelGroups = useChannelsStore((s) => s.channelGroups); + const logos = useChannelsStore((s) => s.logos); + const fetchLogos = useChannelsStore((s) => s.fetchLogos); const streams = useStreamsStore((state) => state.streams); - const { profiles: streamProfiles } = useStreamProfilesStore(); - const { playlists } = usePlaylistsStore(); - const { epgs, tvgs, tvgsById } = useEPGsStore(); + const streamProfiles = useStreamProfilesStore((s) => s.profiles); + const playlists = usePlaylistsStore((s) => s.playlists); + const epgs = useEPGsStore((s) => s.epgs); + const tvgs = useEPGsStore((s) => s.tvgs); + const tvgsById = useEPGsStore((s) => s.tvgsById); const [logoPreview, setLogoPreview] = useState(null); const [channelStreams, setChannelStreams] = useState([]); diff --git a/frontend/src/components/forms/LoginForm.jsx b/frontend/src/components/forms/LoginForm.jsx index 2cc40988..28387868 100644 --- a/frontend/src/components/forms/LoginForm.jsx +++ b/frontend/src/components/forms/LoginForm.jsx @@ -4,7 +4,10 @@ import useAuthStore from '../../store/auth'; import { Paper, Title, TextInput, Button, Center, Stack } from '@mantine/core'; const LoginForm = () => { - const { login, isAuthenticated, initData } = useAuthStore(); // Get login function from AuthContext + const login = useAuthStore((s) => s.login); + const isAuthenticated = useAuthStore((s) => s.isAuthenticated); + const initData = useAuthStore((s) => s.initData); + const navigate = useNavigate(); // Hook to navigate to other routes const [formData, setFormData] = useState({ username: '', password: '' }); diff --git a/frontend/src/components/forms/M3U.jsx b/frontend/src/components/forms/M3U.jsx index ccef1944..bd9cf3c8 100644 --- a/frontend/src/components/forms/M3U.jsx +++ b/frontend/src/components/forms/M3U.jsx @@ -28,8 +28,8 @@ import { isNotEmpty, useForm } from '@mantine/form'; const M3U = ({ playlist = null, isOpen, onClose, playlistCreated = false }) => { const theme = useMantineTheme(); - const { userAgents } = useUserAgentsStore(); - const { fetchChannelGroups } = useChannelsStore(); + const userAgents = useUserAgentsStore((s) => s.userAgents); + const fetchChannelGroups = useChannelsStore((s) => s.fetchChannelGroups); const [file, setFile] = useState(null); const [profileModalOpen, setProfileModalOpen] = useState(false); diff --git a/frontend/src/components/forms/M3UGroupFilter.jsx b/frontend/src/components/forms/M3UGroupFilter.jsx index c6795916..fd82c906 100644 --- a/frontend/src/components/forms/M3UGroupFilter.jsx +++ b/frontend/src/components/forms/M3UGroupFilter.jsx @@ -26,7 +26,7 @@ import useChannelsStore from '../../store/channels'; import { CircleCheck, CircleX } from 'lucide-react'; const M3UGroupFilter = ({ playlist = null, isOpen, onClose }) => { - const { channelGroups } = useChannelsStore(); + const channelGroups = useChannelsStore((s) => s.channelGroups); const [groupStates, setGroupStates] = useState([]); const [isLoading, setIsLoading] = useState(false); const [groupFilter, setGroupFilter] = useState(''); diff --git a/frontend/src/components/forms/M3UProfile.jsx b/frontend/src/components/forms/M3UProfile.jsx index 4c34d58f..2de99750 100644 --- a/frontend/src/components/forms/M3UProfile.jsx +++ b/frontend/src/components/forms/M3UProfile.jsx @@ -17,7 +17,9 @@ import { useDebounce } from '../../utils'; const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => { const [websocketReady, sendMessage] = useWebSocket(); - const { profileSearchPreview, profileResult } = usePlaylistsStore(); + + const profileSearchPreview = usePlaylistsStore((s) => s.profileSearchPreview); + const profileResult = usePlaylistsStore((s) => s.profileResult); const [searchPattern, setSearchPattern] = useState(''); const [replacePattern, setReplacePattern] = useState(''); diff --git a/frontend/src/components/forms/M3UProfiles.jsx b/frontend/src/components/forms/M3UProfiles.jsx index 609e2251..12860f22 100644 --- a/frontend/src/components/forms/M3UProfiles.jsx +++ b/frontend/src/components/forms/M3UProfiles.jsx @@ -22,7 +22,8 @@ import { SquareMinus, SquarePen } from 'lucide-react'; const M3UProfiles = ({ playlist = null, isOpen, onClose }) => { const theme = useMantineTheme(); - const { profiles: allProfiles } = usePlaylistsStore(); + const allProfiles = usePlaylistsStore((s) => s.profiles); + const [profileEditorOpen, setProfileEditorOpen] = useState(false); const [profile, setProfile] = useState(null); const [profiles, setProfiles] = useState([]); diff --git a/frontend/src/components/forms/Recording.jsx b/frontend/src/components/forms/Recording.jsx index a4aaf266..4b3f79f5 100644 --- a/frontend/src/components/forms/Recording.jsx +++ b/frontend/src/components/forms/Recording.jsx @@ -8,7 +8,7 @@ import { CircleAlert } from 'lucide-react'; import { isNotEmpty, useForm } from '@mantine/form'; const DVR = ({ recording = null, channel = null, isOpen, onClose }) => { - const { channels } = useChannelsStore(); + const channels = useChannelsStore((s) => s.channels); let startTime = new Date(); startTime.setMinutes(Math.ceil(startTime.getMinutes() / 30) * 30); diff --git a/frontend/src/components/forms/Stream.jsx b/frontend/src/components/forms/Stream.jsx index aee3438b..3cca4551 100644 --- a/frontend/src/components/forms/Stream.jsx +++ b/frontend/src/components/forms/Stream.jsx @@ -9,7 +9,7 @@ import useChannelsStore from '../../store/channels'; const Stream = ({ stream = null, isOpen, onClose }) => { const streamProfiles = useStreamProfilesStore((state) => state.profiles); - const { channelGroups } = useChannelsStore(); + const channelGroups = useChannelsStore((s) => s.channelGroups); const formik = useFormik({ initialValues: { diff --git a/frontend/src/components/forms/SuperuserForm.jsx b/frontend/src/components/forms/SuperuserForm.jsx index 66aa5ced..1af4e8d4 100644 --- a/frontend/src/components/forms/SuperuserForm.jsx +++ b/frontend/src/components/forms/SuperuserForm.jsx @@ -11,7 +11,7 @@ function SuperuserForm({}) { email: '', }); const [error, setError] = useState(''); - const { setSuperuserExists } = useAuthStore(); + const setSuperuserExists = useAuthStore((s) => s.setSuperuserExists); const handleChange = (e) => { setFormData((prev) => ({ diff --git a/frontend/src/components/tables/ChannelTableStreams.jsx b/frontend/src/components/tables/ChannelTableStreams.jsx index 8c29c228..ca7d23b6 100644 --- a/frontend/src/components/tables/ChannelTableStreams.jsx +++ b/frontend/src/components/tables/ChannelTableStreams.jsx @@ -117,7 +117,7 @@ const ChannelStreams = ({ channel, isExpanded }) => { const dataIds = data?.map(({ id }) => id); - const { playlists } = usePlaylistsStore(); + const playlists = usePlaylistsStore((s) => s.playlists); const removeStream = async (stream) => { const newStreamList = channelStreams.filter((s) => s.id !== stream.id); diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index c6272ef1..f9396022 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -292,9 +292,7 @@ const ChannelsTable = ({}) => { const groupOptions = Object.values(channelGroups).map((group) => group.name); - const { - environment: { env_mode }, - } = useSettingsStore(); + const env_mode = useSettingsStore((s) => s.environment.env_mode); const [channel, setChannel] = useState(null); const [channelModalOpen, setChannelModalOpen] = useState(false); @@ -365,7 +363,7 @@ const ChannelsTable = ({}) => { // const theme = useTheme(); const theme = useMantineTheme(); - const { showVideo } = useVideoStore(); + const showVideo = useVideoStore((s) => s.showVideo); useEffect(() => { setSelectedProfile(profiles[selectedProfileId]); diff --git a/frontend/src/components/tables/EPGsTable.jsx b/frontend/src/components/tables/EPGsTable.jsx index 9da2b114..07f4128d 100644 --- a/frontend/src/components/tables/EPGsTable.jsx +++ b/frontend/src/components/tables/EPGsTable.jsx @@ -25,7 +25,7 @@ const EPGsTable = () => { const [epgModalOpen, setEPGModalOpen] = useState(false); const [rowSelection, setRowSelection] = useState([]); - const { epgs } = useEPGsStore(); + const epgs = useEPGsStore((s) => s.epgs); const theme = useMantineTheme(); diff --git a/frontend/src/components/tables/M3UsTable.jsx b/frontend/src/components/tables/M3UsTable.jsx index 0fccfc82..9765ca66 100644 --- a/frontend/src/components/tables/M3UsTable.jsx +++ b/frontend/src/components/tables/M3UsTable.jsx @@ -27,8 +27,9 @@ const M3UTable = () => { const [activeFilterValue, setActiveFilterValue] = useState('all'); const [playlistCreated, setPlaylistCreated] = useState(false); - const { playlists, refreshProgress, setRefreshProgress } = - usePlaylistsStore(); + const playlists = usePlaylistsStore((s) => s.playlists); + const refreshProgress = usePlaylistsStore((s) => s.refreshProgress); + const setRefreshProgress = usePlaylistsStore((s) => s.setRefreshProgress); const theme = useMantineTheme(); diff --git a/frontend/src/components/tables/StreamProfilesTable.jsx b/frontend/src/components/tables/StreamProfilesTable.jsx index 2f5a6f9b..04c6bbe2 100644 --- a/frontend/src/components/tables/StreamProfilesTable.jsx +++ b/frontend/src/components/tables/StreamProfilesTable.jsx @@ -30,7 +30,7 @@ const StreamProfiles = () => { const [hideInactive, setHideInactive] = useState(false); const streamProfiles = useStreamProfilesStore((state) => state.profiles); - const { settings } = useSettingsStore(); + const settings = useSettingsStore((s) => s.settings); const theme = useMantineTheme(); diff --git a/frontend/src/components/tables/StreamsTable.jsx b/frontend/src/components/tables/StreamsTable.jsx index e12bb125..b7962dad 100644 --- a/frontend/src/components/tables/StreamsTable.jsx +++ b/frontend/src/components/tables/StreamsTable.jsx @@ -77,7 +77,7 @@ const StreamsTable = ({}) => { /** * Stores */ - const { playlists } = usePlaylistsStore(); + const playlists = usePlaylistsStore((s) => s.playlists); const channelGroups = useChannelsStore((s) => s.channelGroups); const selectedChannelIds = useChannelsTableStore((s) => s.selectedChannelIds); @@ -86,10 +86,8 @@ const StreamsTable = ({}) => { (state) => state.channels.find((chan) => chan.id === selectedChannelIds[0])?.streams ); - const { - environment: { env_mode }, - } = useSettingsStore(); - const { showVideo } = useVideoStore(); + const env_mode = useSettingsStore((s) => s.environment.env_mode); + const showVideo = useVideoStore((s) => s.showVideo); // Access the row virtualizer instance (optional) const rowVirtualizerInstanceRef = useRef(null); diff --git a/frontend/src/components/tables/UserAgentsTable.jsx b/frontend/src/components/tables/UserAgentsTable.jsx index 2ea2d3b7..3d69cec5 100644 --- a/frontend/src/components/tables/UserAgentsTable.jsx +++ b/frontend/src/components/tables/UserAgentsTable.jsx @@ -28,7 +28,7 @@ const UserAgentsTable = () => { const [activeFilterValue, setActiveFilterValue] = useState('all'); const userAgents = useUserAgentsStore((state) => state.userAgents); - const { settings } = useSettingsStore(); + const settings = useSettingsStore((s) => s.settings); const columns = useMemo( //column definitions... diff --git a/frontend/src/pages/DVR.jsx b/frontend/src/pages/DVR.jsx index e7a26186..9180a229 100644 --- a/frontend/src/pages/DVR.jsx +++ b/frontend/src/pages/DVR.jsx @@ -36,7 +36,7 @@ dayjs.extend(duration); dayjs.extend(relativeTime); const RecordingCard = ({ recording }) => { - const { channels } = useChannelsStore(); + const channels = useChannelsStore((s) => s.channels); const deleteRecording = (id) => { API.deleteRecording(id); @@ -103,7 +103,7 @@ const RecordingCard = ({ recording }) => { const DVRPage = () => { const theme = useMantineTheme(); - const { recordings } = useChannelsStore(); + const recordings = useChannelsStore((s) => s.recordings); const [recordingModalOpen, setRecordingModalOpen] = useState(false); diff --git a/frontend/src/pages/Guide.jsx b/frontend/src/pages/Guide.jsx index 2e4de29b..f01254bf 100644 --- a/frontend/src/pages/Guide.jsx +++ b/frontend/src/pages/Guide.jsx @@ -56,9 +56,7 @@ export default function TVChannelGuide({ startDate, endDate }) { const [selectedGroupId, setSelectedGroupId] = useState('all'); const [selectedProfileId, setSelectedProfileId] = useState('all'); - const { - environment: { env_mode }, - } = useSettingsStore(); + const env_mode = useSettingsStore((s) => s.environment.env_mode); const guideRef = useRef(null); const timelineRef = useRef(null); // New ref for timeline scrolling @@ -308,7 +306,7 @@ export default function TVChannelGuide({ startDate, endDate }) { }; // The “Watch Now” click => show floating video - const { showVideo } = useVideoStore(); // or useVideoStore() + const showVideo = useVideoStore((s) => s.showVideo); function handleWatchStream(program) { const matched = findChannelByTvgId(program.tvg_id); if (!matched) { diff --git a/frontend/src/pages/Login.jsx b/frontend/src/pages/Login.jsx index 3e9cdecc..262d4c35 100644 --- a/frontend/src/pages/Login.jsx +++ b/frontend/src/pages/Login.jsx @@ -4,7 +4,7 @@ import SuperuserForm from '../components/forms/SuperuserForm'; import useAuthStore from '../store/auth'; const Login = ({}) => { - const { superuserExists } = useAuthStore(); + const superuserExists = useAuthStore((s) => s.superuserExists); if (!superuserExists) { return ; diff --git a/frontend/src/pages/Settings.jsx b/frontend/src/pages/Settings.jsx index 19bec97e..74329880 100644 --- a/frontend/src/pages/Settings.jsx +++ b/frontend/src/pages/Settings.jsx @@ -21,9 +21,9 @@ import UserAgentsTable from '../components/tables/UserAgentsTable'; import StreamProfilesTable from '../components/tables/StreamProfilesTable'; const SettingsPage = () => { - const { settings } = useSettingsStore(); - const { userAgents } = useUserAgentsStore(); - const { profiles: streamProfiles } = useStreamProfilesStore(); + const settings = useSettingsStore((s) => s.settings); + const userAgents = useUserAgentsStore((s) => s.userAgents); + const streamProfiles = useStreamProfilesStore((s) => s.profiles); const regionChoices = [ { value: 'ad', label: 'AD' }, diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index 4c34be06..018e64ae 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -227,8 +227,10 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel }) => { const ChannelsPage = () => { const theme = useMantineTheme(); - const { channels, channelsByUUID, stats: channelStats } = useChannelsStore(); - const { profiles: streamProfiles } = useStreamProfilesStore(); + const channels = useChannelsStore((s) => s.channels); + const channelsByUUID = useChannelsStore((s) => s.channelsByUUID); + const channelStats = useChannelsStore((s) => s.stats); + const streamProfiles = useStreamProfilesStore((s) => s.profiles); const [activeChannels, setActiveChannels] = useState({}); const [clients, setClients] = useState([]); From a199eeab921a79bcfe0e0b694982dddd6961ac24 Mon Sep 17 00:00:00 2001 From: dekzter Date: Sat, 19 Apr 2025 18:04:48 -0400 Subject: [PATCH 0012/1435] bug fixes, display 'no data' on empty streams table --- .../components/tables/ChannelTableStreams.jsx | 60 +++++++++++++++---- .../src/components/tables/ChannelsTable.jsx | 11 +++- 2 files changed, 57 insertions(+), 14 deletions(-) diff --git a/frontend/src/components/tables/ChannelTableStreams.jsx b/frontend/src/components/tables/ChannelTableStreams.jsx index ca7d23b6..c682705a 100644 --- a/frontend/src/components/tables/ChannelTableStreams.jsx +++ b/frontend/src/components/tables/ChannelTableStreams.jsx @@ -43,9 +43,16 @@ const RowDragHandleCell = ({ rowId }) => { }); return ( // Alternatively, you could set these attributes on the rows themselves - - - +
+ + + +
); }; @@ -108,19 +115,18 @@ const ChannelStreams = ({ channel, isExpanded }) => { (state) => state.getChannelStreams(channel.id), shallow ); + const playlists = usePlaylistsStore((s) => s.playlists); + + const [data, setData] = useState(channelStreams || []); useEffect(() => { setData(channelStreams); }, [channelStreams]); - const [data, setData] = useState(channelStreams || []); - const dataIds = data?.map(({ id }) => id); - const playlists = usePlaylistsStore((s) => s.playlists); - const removeStream = async (stream) => { - const newStreamList = channelStreams.filter((s) => s.id !== stream.id); + const newStreamList = data.filter((s) => s.id !== stream.id); await API.updateChannel({ ...channel, stream_ids: newStreamList.map((s) => s.id), @@ -164,9 +170,9 @@ const ChannelStreams = ({ channel, isExpanded }) => { ), }, ], - [playlists] + [data, playlists] ), - data: data, + data, state: { data, }, @@ -216,6 +222,8 @@ const ChannelStreams = ({ channel, isExpanded }) => { return <>; } + const rows = table.getRowModel().rows; + return ( { items={dataIds} strategy={verticalListSortingStrategy} > - {table.getRowModel().rows.map((row) => ( - - ))} + {rows.length === 0 && ( + + + + No Data + + + + )} + {rows.length > 0 && + table + .getRowModel() + .rows.map((row) => )}
diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index f9396022..f32f6a8b 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -385,6 +385,10 @@ const ChannelsTable = ({}) => { ...prev, [name]: value, })); + setPagination({ + pageIndex: 0, + pageSize: pagination.pageSize, + }); }, []); const handleGroupChange = (value) => { @@ -392,6 +396,10 @@ const ChannelsTable = ({}) => { ...prev, channel_group: value ? value : '', })); + setPagination({ + pageIndex: 0, + pageSize: pagination.pageSize, + }); }; const hdhrUrlRef = useRef(null); @@ -729,7 +737,8 @@ const ChannelsTable = ({}) => { }, }, { - accessorFn: (row) => row.channel_group?.name || '', + accessorFn: (row) => + row.channel_group_id ? channelGroups[row.channel_group_id].name : '', id: 'channel_group', cell: ({ getValue }) => ( Date: Sun, 20 Apr 2025 09:21:53 -0400 Subject: [PATCH 0013/1435] Fixed refresh token bug --- frontend/src/api.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/frontend/src/api.js b/frontend/src/api.js index f30d43dc..e3a1fb8f 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -151,8 +151,9 @@ export default class API { static async refreshToken(refresh) { return await request(`${host}/api/accounts/token/refresh/`, { + auth: false, method: 'POST', - body: { auth: false, refresh }, + body: { refresh }, }); } From c4f470e8f70d1f12072ffbc9331b3c6582568ea7 Mon Sep 17 00:00:00 2001 From: dekzter Date: Sun, 20 Apr 2025 09:54:16 -0400 Subject: [PATCH 0014/1435] Fixed bug in deleting stream profiles --- core/apps.py | 4 ++++ core/models.py | 6 ------ core/signals.py | 9 +++++++++ 3 files changed, 13 insertions(+), 6 deletions(-) create mode 100644 core/signals.py diff --git a/core/apps.py b/core/apps.py index 3a01f0bd..63c883ca 100644 --- a/core/apps.py +++ b/core/apps.py @@ -5,3 +5,7 @@ import os, logging class CoreConfig(AppConfig): default_auto_field = 'django.db.models.BigAutoField' name = 'core' + + def ready(self): + # Import signals to ensure they get registered + import core.signals diff --git a/core/models.py b/core/models.py index a8571b5c..fe7e9eb5 100644 --- a/core/models.py +++ b/core/models.py @@ -58,12 +58,6 @@ class StreamProfile(models.Model): def __str__(self): return self.name - def delete(self): - if self.locked(): - raise ValueError("This profile is locked and cannot be deleted.") - - self.delete() - def save(self, *args, **kwargs): if self.pk: # Only check existing records orig = StreamProfile.objects.get(pk=self.pk) diff --git a/core/signals.py b/core/signals.py new file mode 100644 index 00000000..6844a890 --- /dev/null +++ b/core/signals.py @@ -0,0 +1,9 @@ +from django.db.models.signals import pre_delete +from django.dispatch import receiver +from django.core.exceptions import ValidationError +from .models import StreamProfile + +@receiver(pre_delete, sender=StreamProfile) +def prevent_deletion_if_locked(sender, instance, **kwargs): + if instance.locked: + raise ValidationError("This profile is locked and cannot be deleted.") From 3fc37f8f4f6e6e363f7ac0fbc88c9ce3a848db76 Mon Sep 17 00:00:00 2001 From: dekzter Date: Sun, 20 Apr 2025 10:06:08 -0400 Subject: [PATCH 0015/1435] cache url for logos in m3u and epg --- apps/output/views.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/apps/output/views.py b/apps/output/views.py index 3d268e10..3bcd19b2 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -26,7 +26,11 @@ def generate_m3u(request, profile_name=None): group_title = channel.channel_group.name if channel.channel_group else "Default" tvg_id = channel.channel_number or channel.id tvg_name = channel.name - tvg_logo = channel.logo.url if channel.logo else "" + + tvg_logo = "" + if channel.logo: + tvg_logo = request.build_absolute_uri(reverse('api:channels:logo-cache', args=[channel.logo.id])) + channel_number = channel.channel_number extinf_line = ( @@ -96,15 +100,7 @@ def generate_epg(request, profile_name=None): # Add channel logo if available if channel.logo: - logo_url = channel.logo.url - - # Convert to absolute URL if it's relative - if logo_url.startswith('/data'): - # Use the full URL for the logo - logo_uri = re.sub(r"^\/data", '', logo_url) - base_url = request.build_absolute_uri('/')[:-1] - logo_url = f"{base_url}{logo_uri}" - + logo_url = request.build_absolute_uri(reverse('api:channels:logo-cache', args=[channel.logo.id])) xml_lines.append(f' ') xml_lines.append(' ') From fb56e4d3f511e233b7a2254a9e4f9d9eb66a3651 Mon Sep 17 00:00:00 2001 From: dekzter Date: Sun, 20 Apr 2025 10:44:34 -0400 Subject: [PATCH 0016/1435] Requery on form submission --- frontend/src/components/forms/Channel.jsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index 9d184d5c..2e0b702b 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -155,6 +155,7 @@ const Channel = ({ channel = null, isOpen, onClose }) => { console.error('Error saving channel:', error); } + API.requeryChannels(); setSubmitting(false); setTvgFilter(''); setLogoFilter(''); @@ -172,7 +173,7 @@ const Channel = ({ channel = null, isOpen, onClose }) => { formik.setValues({ name: channel.name, channel_number: channel.channel_number, - channel_group_id: `${channel.channel_group?.id}`, + channel_group_id: channel.channel_group_id ?? '', stream_profile_id: channel.stream_profile_id ? `${channel.stream_profile_id}` : '0', From ebf514cbede5f8c92c8ea197585d79b62bc08a65 Mon Sep 17 00:00:00 2001 From: dekzter Date: Mon, 21 Apr 2025 08:21:21 -0400 Subject: [PATCH 0017/1435] fixed channel group saving --- frontend/src/components/forms/Channel.jsx | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index 2e0b702b..6e53884e 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -173,7 +173,9 @@ const Channel = ({ channel = null, isOpen, onClose }) => { formik.setValues({ name: channel.name, channel_number: channel.channel_number, - channel_group_id: channel.channel_group_id ?? '', + channel_group_id: channel.channel_group_id + ? `${channel.channel_group_id}` + : '', stream_profile_id: channel.stream_profile_id ? `${channel.stream_profile_id}` : '0', From 4df796ac7f778cef3ea42314c5d5613cc64fa1fe Mon Sep 17 00:00:00 2001 From: dekzter Date: Mon, 21 Apr 2025 15:59:29 -0400 Subject: [PATCH 0018/1435] requery on channel delete --- frontend/src/components/tables/ChannelsTable.jsx | 1 + 1 file changed, 1 insertion(+) diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index f32f6a8b..9cb5aa34 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -417,6 +417,7 @@ const ChannelsTable = ({}) => { return deleteChannels(); } await API.deleteChannel(id); + API.requeryChannels(); }; const createRecording = (channel) => { From 21c67b999df7e0e7fe89388b3112e8b325564b84 Mon Sep 17 00:00:00 2001 From: dekzter Date: Mon, 21 Apr 2025 16:23:55 -0400 Subject: [PATCH 0019/1435] fixing when to render onboarding channel section --- frontend/src/components/tables/ChannelsTable.jsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index 9cb5aa34..ca100ac4 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -1210,7 +1210,7 @@ const ChannelsTable = ({}) => { {/* Table or ghost empty state inside Paper */} - {initialDataCount === 0 && ( + {initialDataCount === 0 && data.length === 0 && ( { )} - {initialDataCount > 0 && ( + {data.length > 0 && ( Date: Mon, 21 Apr 2025 18:33:01 -0400 Subject: [PATCH 0020/1435] copy buttons for stream and channel urls --- .../src/components/tables/ChannelsTable.jsx | 31 +++++++++++++++---- .../src/components/tables/StreamsTable.jsx | 18 +++++++++-- 2 files changed, 40 insertions(+), 9 deletions(-) diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index ca100ac4..62886be5 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -60,6 +60,8 @@ import { NativeSelect, Table, Checkbox, + UnstyledButton, + CopyButton, } from '@mantine/core'; import { useReactTable, @@ -190,6 +192,7 @@ const ChannelRowActions = React.memo( deleteChannel, handleWatchStream, createRecording, + getChannelURL, }) => { const onEdit = useCallback(() => { editChannel(row.original); @@ -200,7 +203,7 @@ const ChannelRowActions = React.memo( }, []); const onPreview = useCallback(() => { - handleWatchStream(row.original.uuid); + handleWatchStream(row.original); }, []); const onRecord = useCallback(() => { @@ -245,6 +248,15 @@ const ChannelRowActions = React.memo(
+ }> + + {({ copied, copy }) => ( + + {copied ? 'Copied!' : 'Copy URL'} + + )} + + } > - Record + Record
@@ -425,12 +437,18 @@ const ChannelsTable = ({}) => { setRecordingModalOpen(true); }; - function handleWatchStream(channelNumber) { - let vidUrl = `/proxy/ts/stream/${channelNumber}`; + const getChannelURL = (channel) => { + console.log(window.location); + let channelUrl = `${window.location.protocol}//${window.location.host}/proxy/ts/stream/${channel.uuid}`; if (env_mode == 'dev') { - vidUrl = `${window.location.protocol}//${window.location.hostname}:5656${vidUrl}`; + channelUrl = `${window.location.protocol}//${window.location.hostname}:5656/proxy/ts/stream/${channel.uuid}`; } - showVideo(vidUrl); + + return channelUrl; + }; + + function handleWatchStream(channel) { + showVideo(getChannelURL(channel)); } const onRowSelectionChange = (updater) => { @@ -785,6 +803,7 @@ const ChannelsTable = ({}) => { deleteChannel={deleteChannel} handleWatchStream={handleWatchStream} createRecording={createRecording} + getChannelURL={getChannelURL} /> ), enableSorting: false, diff --git a/frontend/src/components/tables/StreamsTable.jsx b/frontend/src/components/tables/StreamsTable.jsx index b7962dad..4a27baa1 100644 --- a/frontend/src/components/tables/StreamsTable.jsx +++ b/frontend/src/components/tables/StreamsTable.jsx @@ -11,6 +11,7 @@ import { ListPlus, SquareMinus, EllipsisVertical, + Copy, } from 'lucide-react'; import { TextInput, @@ -34,6 +35,8 @@ import { NativeSelect, MultiSelect, useMantineTheme, + CopyButton, + UnstyledButton, } from '@mantine/core'; import { IconSquarePlus } from '@tabler/icons-react'; import { useNavigate } from 'react-router-dom'; @@ -540,22 +543,31 @@ const StreamsTable = ({}) => { + }> + + {({ copied, copy }) => ( + + {copied ? 'Copied!' : 'Copy URL'} + + )} + + editStream(row.original)} disabled={!row.original.is_custom} > - Edit + Edit deleteStream(row.original.id)} disabled={!row.original.is_custom} > - Delete Stream + Delete Stream handleWatchStream(row.original.stream_hash)} > - Preview Stream + Preview Stream From 8f29707a0b3145356a56d3eb072d25f15ca9e0b3 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 23 Apr 2025 08:38:29 -0500 Subject: [PATCH 0021/1435] Fix logo URL handling in M3U generation to convert filesystem paths to web URLs --- apps/output/views.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/apps/output/views.py b/apps/output/views.py index 3d268e10..debd1369 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -26,7 +26,18 @@ def generate_m3u(request, profile_name=None): group_title = channel.channel_group.name if channel.channel_group else "Default" tvg_id = channel.channel_number or channel.id tvg_name = channel.name - tvg_logo = channel.logo.url if channel.logo else "" + + # Fix for logo URLs - convert filesystem paths to web URLs + tvg_logo = "" + if channel.logo: + tvg_logo = channel.logo.url + + # Convert to absolute URL if it's a filesystem path + if tvg_logo.startswith('/data'): + logo_uri = re.sub(r"^\/data", '', tvg_logo) + base_url = request.build_absolute_uri('/')[:-1] + tvg_logo = f"{base_url}{logo_uri}" + channel_number = channel.channel_number extinf_line = ( From 0846b9b42cb13a3ee52cac2a1e2ef03ab8e6ea19 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Wed, 23 Apr 2025 13:39:06 +0000 Subject: [PATCH 0022/1435] Increment build number to 3 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index ccb3ebea..704d023e 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '2' # Auto-incremented on builds +__build__ = '3' # Auto-incremented on builds From 5eae8bd603aa925ef5eb0322c94a30375f198099 Mon Sep 17 00:00:00 2001 From: dekzter Date: Wed, 23 Apr 2025 10:50:23 -0400 Subject: [PATCH 0023/1435] attempt to use localstorage for saving preferences --- .../src/components/tables/ChannelsTable.jsx | 49 +++++++------------ 1 file changed, 19 insertions(+), 30 deletions(-) diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index 62886be5..1bb276d3 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -10,7 +10,6 @@ import { notifications } from '@mantine/notifications'; import API from '../../api'; import ChannelForm from '../forms/Channel'; import RecordingForm from '../forms/Recording'; -import { TableHelper } from '../../helpers'; import { useDebounce } from '../../utils'; import logo from '../../images/logo.png'; import useVideoStore from '../../store/useVideoStore'; @@ -29,8 +28,6 @@ import { CircleCheck, ScanEye, EllipsisVertical, - CircleEllipsis, - CopyMinus, ArrowUpNarrowWide, ArrowUpDown, ArrowDownWideNarrow, @@ -49,7 +46,6 @@ import { Flex, Text, Tooltip, - Grid, Group, useMantineTheme, Center, @@ -58,7 +54,6 @@ import { MultiSelect, Pagination, NativeSelect, - Table, Checkbox, UnstyledButton, CopyButton, @@ -73,26 +68,8 @@ import { } from '@tanstack/react-table'; import './table.css'; import useChannelsTableStore from '../../store/channelsTable'; -import usePlaylistsStore from '../../store/playlists'; -import { MantineReactTable, useMantineReactTable } from 'mantine-react-table'; -import { - DndContext, - KeyboardSensor, - MouseSensor, - TouchSensor, - closestCenter, - useSensor, - useSensors, -} from '@dnd-kit/core'; -import { restrictToVerticalAxis } from '@dnd-kit/modifiers'; -import { - arrayMove, - SortableContext, - verticalListSortingStrategy, -} from '@dnd-kit/sortable'; -import { useSortable } from '@dnd-kit/sortable'; -import { CSS } from '@dnd-kit/utilities'; import ChannelTableStreams from './ChannelTableStreams'; +import useLocalStorage from '../../hooks/useLocalStorage'; const m3uUrlBase = `${window.location.protocol}//${window.location.host}/output/m3u`; const epgUrlBase = `${window.location.protocol}//${window.location.host}/output/epg`; @@ -288,11 +265,15 @@ const ChannelsTable = ({}) => { const setSelectedTableIds = useChannelsTableStore( (s) => s.setSelectedChannelIds ); + const channels = useChannelsStore((s) => s.channels); const profiles = useChannelsStore((s) => s.profiles); const selectedProfileId = useChannelsStore((s) => s.selectedProfileId); const setSelectedProfileId = useChannelsStore((s) => s.setSelectedProfileId); const channelGroups = useChannelsStore((s) => s.channelGroups); const logos = useChannelsStore((s) => s.logos); + const [tablePrefs, setTablePrefs] = useLocalStorage('channel-table-prefs', { + pageSize: 50, + }); const selectedProfileChannels = useChannelsStore( (s) => s.profiles[selectedProfileId]?.channels @@ -302,7 +283,12 @@ const ChannelsTable = ({}) => { [selectedProfileChannels] ); - const groupOptions = Object.values(channelGroups).map((group) => group.name); + const activeGroupIds = new Set( + Object.values(channels).map((channel) => channel.channel_group_id) + ); + const groupOptions = Object.values(channelGroups) + .filter((group) => activeGroupIds.has(group.id)) + .map((group) => group.name); const env_mode = useSettingsStore((s) => s.environment.env_mode); @@ -316,7 +302,7 @@ const ChannelsTable = ({}) => { const [paginationString, setPaginationString] = useState(''); const [pagination, setPagination] = useState({ pageIndex: 0, - pageSize: 50, + pageSize: tablePrefs.pageSize, }); const [initialDataCount, setInitialDataCount] = useState(null); const [filters, setFilters] = useState({ @@ -366,6 +352,9 @@ const ChannelsTable = ({}) => { // Generate the string setPaginationString(`${startItem} to ${endItem} of ${results.count}`); + setTablePrefs({ + pageSize: pagination.pageSize, + }); }, [pagination, sorting, debouncedFilters]); useEffect(() => { @@ -438,10 +427,10 @@ const ChannelsTable = ({}) => { }; const getChannelURL = (channel) => { - console.log(window.location); - let channelUrl = `${window.location.protocol}//${window.location.host}/proxy/ts/stream/${channel.uuid}`; + const uri = `/proxy/ts/stream/${channel.uuid}`; + let channelUrl = `${window.location.protocol}//${window.location.host}${uri}`; if (env_mode == 'dev') { - channelUrl = `${window.location.protocol}//${window.location.hostname}:5656/proxy/ts/stream/${channel.uuid}`; + channelUrl = `${window.location.protocol}//${window.location.hostname}:5656${uri}`; } return channelUrl; @@ -998,7 +987,7 @@ const ChannelsTable = ({}) => {
); }, - [rows, rowCount] + [rows] ); return ( From 3e2f91abf88675795b71a3fa2fa013079be6f548 Mon Sep 17 00:00:00 2001 From: dekzter Date: Wed, 23 Apr 2025 11:02:00 -0400 Subject: [PATCH 0024/1435] hopefully finalizing table rewrite --- ...ame.py => 0017_alter_channelgroup_name.py} | 9 +- apps/channels/models.py | 2 +- .../src/components/tables/ChannelsTable.jsx | 433 +++++++----------- .../tables/CustomTable/CustomTable.jsx | 130 +----- .../tables/CustomTable/CustomTableBody.jsx | 65 +++ .../tables/CustomTable/CustomTableHeader.jsx | 127 +---- .../components/tables/CustomTable/index.jsx | 202 ++++++++ frontend/src/components/tables/table.css | 2 +- .../{channelsTable => channelsTable.jsx} | 18 +- 9 files changed, 490 insertions(+), 498 deletions(-) rename apps/channels/migrations/{0017_alter_channel_channel_number_alter_channelgroup_name.py => 0017_alter_channelgroup_name.py} (52%) create mode 100644 frontend/src/components/tables/CustomTable/CustomTableBody.jsx create mode 100644 frontend/src/components/tables/CustomTable/index.jsx rename frontend/src/store/{channelsTable => channelsTable.jsx} (72%) diff --git a/apps/channels/migrations/0017_alter_channel_channel_number_alter_channelgroup_name.py b/apps/channels/migrations/0017_alter_channelgroup_name.py similarity index 52% rename from apps/channels/migrations/0017_alter_channel_channel_number_alter_channelgroup_name.py rename to apps/channels/migrations/0017_alter_channelgroup_name.py index 1bb7d2e7..03043d65 100644 --- a/apps/channels/migrations/0017_alter_channel_channel_number_alter_channelgroup_name.py +++ b/apps/channels/migrations/0017_alter_channelgroup_name.py @@ -1,4 +1,4 @@ -# Generated by Django 5.1.6 on 2025-04-19 12:08 +# Generated by Django 5.1.6 on 2025-04-21 20:47 from django.db import migrations, models @@ -10,14 +10,9 @@ class Migration(migrations.Migration): ] operations = [ - migrations.AlterField( - model_name='channel', - name='channel_number', - field=models.IntegerField(db_index=True), - ), migrations.AlterField( model_name='channelgroup', name='name', - field=models.CharField(db_index=True, max_length=100, unique=True), + field=models.TextField(db_index=True, unique=True), ), ] diff --git a/apps/channels/models.py b/apps/channels/models.py index 249343e9..0b66c468 100644 --- a/apps/channels/models.py +++ b/apps/channels/models.py @@ -27,7 +27,7 @@ def get_total_viewers(channel_id): return 0 class ChannelGroup(models.Model): - name = models.CharField(max_length=100, unique=True, db_index=True) + name = models.TextField(unique=True, db_index=True) def related_channels(self): # local import if needed to avoid cyc. Usually fine in a single file though diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index 1bb276d3..8568b52c 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -58,18 +58,12 @@ import { UnstyledButton, CopyButton, } from '@mantine/core'; -import { - useReactTable, - getCoreRowModel, - getPaginationRowModel, - getSortedRowModel, - getFilteredRowModel, - flexRender, -} from '@tanstack/react-table'; +import { getCoreRowModel, flexRender } from '@tanstack/react-table'; import './table.css'; import useChannelsTableStore from '../../store/channelsTable'; import ChannelTableStreams from './ChannelTableStreams'; import useLocalStorage from '../../hooks/useLocalStorage'; +import { CustomTable, useTable } from './CustomTable'; const m3uUrlBase = `${window.location.protocol}//${window.location.host}/output/m3u`; const epgUrlBase = `${window.location.protocol}//${window.location.host}/output/epg`; @@ -292,6 +286,7 @@ const ChannelsTable = ({}) => { const env_mode = useSettingsStore((s) => s.environment.env_mode); + const [allRowIds, setAllRowIds] = useState([]); const [channel, setChannel] = useState(null); const [channelModalOpen, setChannelModalOpen] = useState(false); const [recordingModalOpen, setRecordingModalOpen] = useState(false); @@ -299,12 +294,9 @@ const ChannelsTable = ({}) => { const [selectedProfile, setSelectedProfile] = useState( profiles[selectedProfileId] ); + const pagination = useChannelsTableStore((s) => s.pagination); + const setPagination = useChannelsTableStore((s) => s.setPagination); const [paginationString, setPaginationString] = useState(''); - const [pagination, setPagination] = useState({ - pageIndex: 0, - pageSize: tablePrefs.pageSize, - }); - const [initialDataCount, setInitialDataCount] = useState(null); const [filters, setFilters] = useState({ name: '', channel_group: '', @@ -312,10 +304,9 @@ const ChannelsTable = ({}) => { const debouncedFilters = useDebounce(filters, 500); const [isLoading, setIsLoading] = useState(true); const [selectedChannelIds, setSelectedChannelIds] = useState([]); - const [sorting, setSorting] = useState([ - { id: 'channel_number', desc: false }, - ]); - const [expandedRowId, setExpandedRowId] = useState(null); + const sorting = useChannelsTableStore((s) => s.sorting); + const setSorting = useChannelsTableStore((s) => s.setSorting); + const [expandedRowIds, setExpandedRowIds] = useState([]); const [hdhrUrl, setHDHRUrl] = useState(hdhrUrlBase); const [epgUrl, setEPGUrl] = useState(epgUrlBase); @@ -339,6 +330,7 @@ const ChannelsTable = ({}) => { }); const results = await API.queryChannels(params); + const ids = await API.getAllChannelIds(params); const startItem = pagination.pageIndex * pagination.pageSize + 1; // +1 to start from 1, not 0 const endItem = Math.min( @@ -346,15 +338,12 @@ const ChannelsTable = ({}) => { results.count ); - if (initialDataCount === null) { - setInitialDataCount(results.count); - } - // Generate the string setPaginationString(`${startItem} to ${endItem} of ${results.count}`); setTablePrefs({ pageSize: pagination.pageSize, }); + setAllRowIds(ids); }, [pagination, sorting, debouncedFilters]); useEffect(() => { @@ -386,10 +375,6 @@ const ChannelsTable = ({}) => { ...prev, [name]: value, })); - setPagination({ - pageIndex: 0, - pageSize: pagination.pageSize, - }); }, []); const handleGroupChange = (value) => { @@ -397,10 +382,6 @@ const ChannelsTable = ({}) => { ...prev, channel_group: value ? value : '', })); - setPagination({ - pageIndex: 0, - pageSize: pagination.pageSize, - }); }; const hdhrUrlRef = useRef(null); @@ -440,49 +421,49 @@ const ChannelsTable = ({}) => { showVideo(getChannelURL(channel)); } - const onRowSelectionChange = (updater) => { - setRowSelection((prevRowSelection) => { - const newRowSelection = - typeof updater === 'function' ? updater(prevRowSelection) : updater; + // const onRowSelectionChange = (updater) => { + // setRowSelection((prevRowSelection) => { + // const newRowSelection = + // typeof updater === 'function' ? updater(prevRowSelection) : updater; - const updatedSelected = new Set([...selectedChannelIds]); - getRowModel().rows.forEach((row) => { - if (newRowSelection[row.id] === undefined || !newRowSelection[row.id]) { - updatedSelected.delete(row.original.id); - } else { - updatedSelected.add(row.original.id); - } - }); - const newSelection = [...updatedSelected]; - setSelectedChannelIds(newSelection); - setSelectedTableIds(newSelection); + // const updatedSelected = new Set([...selectedChannelIds]); + // getRowModel().rows.forEach((row) => { + // if (newRowSelection[row.id] === undefined || !newRowSelection[row.id]) { + // updatedSelected.delete(row.original.id); + // } else { + // updatedSelected.add(row.original.id); + // } + // }); + // const newSelection = [...updatedSelected]; + // setSelectedChannelIds(newSelection); + // setSelectedTableIds(newSelection); - return newRowSelection; - }); - }; + // return newRowSelection; + // }); + // }; - const onSelectAllChange = async (e) => { - const selectAll = e.target.checked; - if (selectAll) { - // Get all channel IDs for current view - const params = new URLSearchParams(); - Object.entries(debouncedFilters).forEach(([key, value]) => { - if (value) params.append(key, value); - }); - const ids = await API.getAllChannelIds(params); - setSelectedTableIds(ids); - setSelectedChannelIds(ids); - } else { - setSelectedTableIds([]); - setSelectedChannelIds([]); - } + // const onSelectAllChange = async (e) => { + // const selectAll = e.target.checked; + // if (selectAll) { + // // Get all channel IDs for current view + // const params = new URLSearchParams(); + // Object.entries(debouncedFilters).forEach(([key, value]) => { + // if (value) params.append(key, value); + // }); + // const ids = await API.getAllChannelIds(params); + // setSelectedTableIds(ids); + // setSelectedChannelIds(ids); + // } else { + // setSelectedTableIds([]); + // setSelectedChannelIds([]); + // } - const newSelection = {}; - getRowModel().rows.forEach((item, index) => { - newSelection[index] = selectAll; - }); - setRowSelection(newSelection); - }; + // const newSelection = {}; + // getRowModel().rows.forEach((item, index) => { + // newSelection[index] = selectAll; + // }); + // setRowSelection(newSelection); + // }; const onPageSizeChange = (e) => { setPagination({ @@ -798,50 +779,9 @@ const ChannelsTable = ({}) => { enableSorting: false, }, ], - [selectedProfileId, data] + [selectedProfileId, data, channelGroups] ); - const { getHeaderGroups, getRowModel } = useReactTable({ - data, - columns: columns, - defaultColumn: { - size: undefined, - minSize: 0, - }, - pageCount, - state: { - data, - rowCount, - sorting, - filters, - pagination, - rowSelection, - }, - manualPagination: true, - manualSorting: true, - manualFiltering: true, - enableRowSelection: true, - onRowSelectionChange: onRowSelectionChange, - getCoreRowModel: getCoreRowModel(), - getFilteredRowModel: getFilteredRowModel(), - getSortedRowModel: getSortedRowModel(), - getPaginationRowModel: getPaginationRowModel(), - // debugTable: true, - }); - - const rows = getRowModel().rows; - - const onRowExpansion = (row) => { - let isExpanded = false; - setExpandedRowId((prev) => { - isExpanded = prev === row.original.id ? null : row.original.id; - return isExpanded; - }); - setRowSelection({ [row.index]: true }); - setSelectedChannelIds([row.original.id]); - setSelectedTableIds([row.original.id]); - }; - const renderHeaderCell = (header) => { let sortingIcon = ArrowUpDown; if (sorting[0]?.id == header.id) { @@ -853,11 +793,6 @@ const ChannelsTable = ({}) => { } switch (header.id) { - case 'select': - return ChannelRowSelectHeader({ - selectedChannelIds, - }); - case 'enabled': if (selectedProfileId !== '0' && selectedChannelIds.length > 0) { // return EnabledHeaderSwitch(); @@ -923,22 +858,85 @@ const ChannelsTable = ({}) => { } }; - const renderBodyCell = (cell) => { - switch (cell.column.id) { - case 'select': - return ChannelRowSelectCell({ row: cell.row }); + const table = useTable({ + data, + columns, + allRowIds, + defaultColumn: { + size: undefined, + minSize: 0, + }, + pageCount, + // state: { + // data, + // rowCount, + // sorting, + // filters, + // pagination, + // rowSelection, + // }, + filters, + pagination, + sorting, + expandedRowIds, + manualPagination: true, + manualSorting: true, + manualFiltering: true, + enableRowSelection: true, + // onRowSelectionChange: onRowSelectionChange, + getCoreRowModel: getCoreRowModel(), + // getFilteredRowModel: getFilteredRowModel(), + // getSortedRowModel: getSortedRowModel(), + // getPaginationRowModel: getPaginationRowModel(), + // debugTable: true, + expandedRowRenderer: ({ row }) => { + return ( + + + + ); + }, + headerCellRenderFns: { + name: renderHeaderCell, + enabled: () => ( +
+ +
+ ), + }, + }); - case 'expand': - return ChannelExpandCell({ row: cell.row }); - - default: - return flexRender(cell.column.columnDef.cell, cell.getContext()); - } + const onRowExpansion = (row) => { + let isExpanded = false; + setExpandedRowIds((prev) => { + isExpanded = prev === row.original.id ? null : row.original.id; + return isExpanded; + }); + setRowSelection({ [row.index]: true }); + setSelectedChannelIds([row.original.id]); + setSelectedTableIds([row.original.id]); }; + // const renderBodyCell = (cell) => { + // switch (cell.column.id) { + // case 'select': + // return ChannelRowSelectCell({ row: cell.row }); + + // case 'expand': + // return ChannelExpandCell({ row: cell.row }); + + // default: + // return flexRender(cell.column.columnDef.cell, cell.getContext()); + // } + // }; + const ChannelExpandCell = useCallback( ({ row }) => { - const isExpanded = expandedRowId === row.original.id; + const isExpanded = expandedRowIds === row.original.id; return (
{
); }, - [expandedRowId] + [expandedRowIds] ); - const ChannelRowSelectCell = useCallback( - ({ row }) => { - return ( -
- -
- ); - }, - [rows] - ); + // const ChannelRowSelectCell = useCallback( + // ({ row }) => { + // return ( + //
+ // + //
+ // ); + // }, + // [rows] + // ); - const ChannelRowSelectHeader = useCallback( - ({ selectedChannelIds }) => { - return ( -
- 0 && - selectedChannelIds.length !== rowCount - } - onChange={onSelectAllChange} - /> -
- ); - }, - [rows] - ); + // const ChannelRowSelectHeader = useCallback( + // ({ selectedChannelIds }) => { + // return ( + //
+ // 0 && + // selectedChannelIds.length !== rowCount + // } + // onChange={onSelectAllChange} + // /> + //
+ // ); + // }, + // [rows] + // ); return ( @@ -1218,7 +1216,7 @@ const ChannelsTable = ({}) => { {/* Table or ghost empty state inside Paper */} - {initialDataCount === 0 && data.length === 0 && ( + {Object.keys(channels).length === 0 && ( { )} - {data.length > 0 && ( + {Object.keys(channels).length > 0 && ( { borderRadius: 'var(--mantine-radius-default)', }} > - - - {getHeaderGroups().map((headerGroup) => ( - - {headerGroup.headers.map((header) => { - const width = header.getSize(); - return ( - - - {renderHeaderCell(header)} - - - ); - })} - - ))} - - - {getRowModel().rows.map((row) => ( - - - {row.getVisibleCells().map((cell) => { - const width = cell.column.getSize(); - return ( - - - {renderBodyCell(cell)} - - - ); - })} - - {row.original.id === expandedRowId && ( - - - - )} - - ))} - - + { - const [expandedRowId, setExpandedRowId] = useState(null); - - const rows = table.getRowModel().rows; - - const ChannelExpandCell = useCallback( - ({ row }) => { - const isExpanded = expandedRowId === row.original.id; - - return ( -
{ - setExpandedRowId((prev) => - prev === row.original.id ? null : row.original.id - ); - }} - > - {isExpanded ? : } -
- ); - }, - [expandedRowId] - ); - - const ChannelRowSelectCell = useCallback( - ({ row }) => { - return ( -
- -
- ); - }, - [rows] - ); - - const bodyCellRenderer = (cell) => { - if (bodyCellRenderFns[cell.column.id]) { - return bodyCellRenderFns(cell); - } - - switch (cell.column.id) { - case 'select': - return ChannelRowSelectCell({ row: cell.row }); - - case 'expand': - return ChannelExpandCell({ row: cell.row }); - - default: - return flexRender(cell.column.columnDef.cell, cell.getContext()); - } - }; - +const CustomTable = ({ table }) => { return ( + - - {table.getRowModel().rows.map((row) => ( - - - {row.getVisibleCells().map((cell) => { - return ( - - - {bodyCellRenderer(cell)} - - - ); - })} - - {row.original.id === expandedRowId && ( - - - - )} - - ))} - ); }; diff --git a/frontend/src/components/tables/CustomTable/CustomTableBody.jsx b/frontend/src/components/tables/CustomTable/CustomTableBody.jsx new file mode 100644 index 00000000..c2a26f2d --- /dev/null +++ b/frontend/src/components/tables/CustomTable/CustomTableBody.jsx @@ -0,0 +1,65 @@ +import { Box, Flex } from '@mantine/core'; +import { flexRender } from '@tanstack/react-table'; + +const CustomTableBody = ({ + getRowModel, + bodyCellRenderFns, + expandedRowIds, + expandedRowRenderer, +}) => { + const renderExpandedRow = (row) => { + if (expandedRowRenderer) { + return expandedRowRenderer({ row }); + } + + return <>; + }; + + return ( + + {getRowModel().rows.map((row) => ( + + + {row.getVisibleCells().map((cell) => { + return ( + + + {bodyCellRenderFns[cell.column.id] + ? bodyCellRenderFns[cell.column.id](cell) + : flexRender( + cell.column.columnDef.cell, + cell.getContext() + )} + + + ); + })} + + {expandedRowIds.includes(row.original.id) && renderExpandedRow(row)} + + ))} + + ); +}; + +export default CustomTableBody; diff --git a/frontend/src/components/tables/CustomTable/CustomTableHeader.jsx b/frontend/src/components/tables/CustomTable/CustomTableHeader.jsx index 50a173d2..7f71e04d 100644 --- a/frontend/src/components/tables/CustomTable/CustomTableHeader.jsx +++ b/frontend/src/components/tables/CustomTable/CustomTableHeader.jsx @@ -1,120 +1,39 @@ -import { Box, Flex } from '@mantine/core'; -import { - ArrowDownWideNarrow, - ArrowUpDown, - ArrowUpNarrowWide, -} from 'lucide-react'; +import { Box, Center, Checkbox, Flex } from '@mantine/core'; +import { flexRender } from '@tanstack/react-table'; import { useCallback } from 'react'; const CustomTableHeader = ({ - table, + getHeaderGroups, + allRowIds, + selectedTableIds, headerCellRenderFns, - rowCount, onSelectAllChange, }) => { - const ChannelRowSelectHeader = useCallback( - ({ selectedChannelIds }) => { - return ( -
- 0 && - selectedChannelIds.length !== rowCount - } - onChange={onSelectAllChange} - /> -
- ); - }, - [rows, rowCount] - ); - - const onSelectAll = (e) => { - if (onSelectAllChange) { - onSelectAllChange(e); - } - }; - - const headerCellRenderer = (header) => { - let sortingIcon = ArrowUpDown; - if (sorting[0]?.id == header.id) { - if (sorting[0].desc === false) { - sortingIcon = ArrowUpNarrowWide; - } else { - sortingIcon = ArrowDownWideNarrow; - } + const renderHeaderCell = (header) => { + if (headerCellRenderFns[header.id]) { + return headerCellRenderFns[header.id](header); } switch (header.id) { case 'select': - return ChannelRowSelectHeader({ - selectedChannelIds, - }); - - case 'enabled': - if (selectedProfileId !== '0' && selectedChannelIds.length > 0) { - // return EnabledHeaderSwitch(); - } return (
- + 0 && + selectedTableIds.length !== allRowIds.length + } + onChange={onSelectAllChange} + />
); - // case 'channel_number': - // return ( - // - // # - // {/*
- // {React.createElement(sortingIcon, { - // onClick: () => onSortingChange('name'), - // size: 14, - // })} - //
*/} - //
- // ); - - // case 'name': - // return ( - // - // e.stopPropagation()} - // onChange={handleFilterChange} - // size="xs" - // variant="unstyled" - // className="table-input-header" - // /> - //
- // {React.createElement(sortingIcon, { - // onClick: () => onSortingChange('name'), - // size: 14, - // })} - //
- //
- // ); - - // case 'channel_group': - // return ( - // - // ); - default: return flexRender(header.column.columnDef.header, header.getContext()); } @@ -130,7 +49,7 @@ const CustomTableHeader = ({ zIndex: 10, }} > - {table.getHeaderGroups().map((headerGroup) => ( + {getHeaderGroups().map((headerGroup) => ( - {headerCellRenderer(header)} + {renderHeaderCell(header)} ); diff --git a/frontend/src/components/tables/CustomTable/index.jsx b/frontend/src/components/tables/CustomTable/index.jsx new file mode 100644 index 00000000..514938cd --- /dev/null +++ b/frontend/src/components/tables/CustomTable/index.jsx @@ -0,0 +1,202 @@ +import { Center, Checkbox } from '@mantine/core'; +import CustomTable from './CustomTable'; +import CustomTableHeader from './CustomTableHeader'; + +import { + useReactTable, + getCoreRowModel, + flexRender, +} from '@tanstack/react-table'; +import { useCallback, useMemo, useState } from 'react'; +import { ChevronDown, ChevronRight } from 'lucide-react'; + +const useTable = ({ + allRowIds, + headerCellRenderFns = {}, + filters = {}, + pagination = {}, + sorting = [], + expandedRowRenderer = () => <>, + ...options +}) => { + const [selectedTableIds, setSelectedTableIds] = useState([]); + const [expandedRowIds, setExpandedRowIds] = useState([]); + + const rowCount = allRowIds.length; + + const onRowSelectionChange = (updater) => { + const newRowSelection = + typeof updater === 'function' ? updater(rowSelection) : updater; + + const updatedSelected = new Set(selectedTableIds); + + const allChangedRowIds = new Set([ + ...Object.keys(rowSelection), + ...Object.keys(newRowSelection), + ]); + + for (const rowId of allChangedRowIds) { + const wasSelected = !!rowSelection[rowId]; + const isSelected = !!newRowSelection[rowId]; + + if (wasSelected !== isSelected) { + const row = table.getRow(rowId); + if (!row) continue; + + const originalId = row.original.id; + if (isSelected) { + updatedSelected.add(originalId); + } else { + updatedSelected.delete(originalId); + } + } + } + + setSelectedTableIds([...updatedSelected]); + }; + + const table = useReactTable({ + ...options, + state: { + data: options.data, + selectedTableIds, + }, + onRowSelectionChange, + getCoreRowModel: options.getCoreRowModel ?? getCoreRowModel(), + }); + + const selectedTableIdsSet = useMemo( + () => new Set(selectedTableIds), + [selectedTableIds] + ); + + const rowSelection = useMemo(() => { + const selection = {}; + table.getRowModel().rows.forEach((row) => { + if (selectedTableIdsSet.has(row.original.id)) { + selection[row.id] = true; + } + }); + return selection; + }, [selectedTableIdsSet, table.getRowModel().rows]); + + const onSelectAllChange = async (e) => { + const selectAll = e.target.checked; + if (selectAll) { + setSelectedTableIds(allRowIds); + } else { + setSelectedTableIds([]); + } + }; + + const rows = table.getRowModel().rows; + + const onRowExpansion = (row) => { + let isExpanded = false; + setExpandedRowIds((prev) => { + isExpanded = prev.includes(row.original.id) ? [] : [row.original.id]; + return isExpanded; + }); + setSelectedTableIds([row.original.id]); + }; + + const renderHeaderCell = useCallback( + (header) => { + if (table.headerCellRenderFns && table.headerCellRenderFns[header.id]) { + return table.headerCellRenderFns[header.id](header); + } + + switch (header.id) { + case 'select': + return ( +
+ 0 && + selectedTableIds.length !== rowCount + } + onChange={onSelectAllChange} + /> +
+ ); + + default: + return flexRender( + header.column.columnDef.header, + header.getContext() + ); + } + }, + [filters, selectedTableIds, rowCount, onSelectAllChange, sorting] + ); + + const bodyCellRenderFns = { + select: useCallback( + ({ row }) => { + return ( +
+ { + const newSet = new Set(selectedTableIds); + if (e.target.checked) { + newSet.add(row.original.id); + } else { + newSet.delete(row.original.id); + } + setSelectedTableIds([...newSet]); + }} + /> +
+ ); + }, + [rows, selectedTableIdsSet] + ), + expand: useCallback(({ row }) => { + const isExpanded = expandedRowIds.includes(row.original.id); + + return ( +
{ + onRowExpansion(row); + }} + > + {isExpanded ? : } +
+ ); + }), + }; + + // Return both the table instance and your custom methods + const tableInstance = useMemo( + () => ({ + ...table, + ...options, + sorting, + selectedTableIds, + setSelectedTableIds, + rowSelection, + allRowIds, + onSelectAllChange, + selectedTableIdsSet, + expandedRowIds, + expandedRowRenderer, + }), + [selectedTableIdsSet, expandedRowIds] + ); + + return { + ...tableInstance, + headerCellRenderFns, + renderHeaderCell, + bodyCellRenderFns, + }; +}; + +export { useTable, CustomTable, CustomTableHeader }; diff --git a/frontend/src/components/tables/table.css b/frontend/src/components/tables/table.css index c1c43f20..044c5c87 100644 --- a/frontend/src/components/tables/table.css +++ b/frontend/src/components/tables/table.css @@ -41,7 +41,7 @@ } .td { - height: 21px; + height: 28px; border-bottom: solid 1px rgb(68,68,68); } diff --git a/frontend/src/store/channelsTable b/frontend/src/store/channelsTable.jsx similarity index 72% rename from frontend/src/store/channelsTable rename to frontend/src/store/channelsTable.jsx index 2a230e84..76941e4f 100644 --- a/frontend/src/store/channelsTable +++ b/frontend/src/store/channelsTable.jsx @@ -6,7 +6,11 @@ import API from '../api'; const useChannelsTableStore = create((set, get) => ({ channels: [], count: 0, - pageCount: 0, + sorting: [{ id: 'channel_number', desc: false }], + pagination: { + pageIndex: 0, + pageCount: 50, + }, selectedChannelIds: [], queryChannels: ({ results, count }, params) => { @@ -29,6 +33,18 @@ const useChannelsTableStore = create((set, get) => ({ const channel = get().channels.find((c) => c.id === id); return channel?.streams ?? []; }, + + setPagination: (pagination) => { + set((state) => ({ + pagination, + })); + }, + + setSorting: (sorting) => { + set((state) => ({ + sorting, + })); + }, })); export default useChannelsTableStore; From 9f15c99c01056dd02b478e543fb64c7d0376ed78 Mon Sep 17 00:00:00 2001 From: dekzter Date: Wed, 23 Apr 2025 11:02:06 -0400 Subject: [PATCH 0025/1435] better error handling --- apps/m3u/tasks.py | 40 ++++++++++++++++++++-------------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index 36de852b..beacaaa2 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -217,27 +217,27 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): # compiled_filters = [(f.filter_type, re.compile(f.regex_pattern, re.IGNORECASE)) for f in filters] logger.debug(f"Processing batch of {len(batch)}") for stream_info in batch: - name, url = stream_info["name"], stream_info["url"] - tvg_id, tvg_logo = stream_info["attributes"].get("tvg-id", ""), stream_info["attributes"].get("tvg-logo", "") - group_title = stream_info["attributes"].get("group-title", "Default Group") - - # Filter out disabled groups for this account - if group_title not in groups: - logger.debug(f"Skipping stream in disabled group: {group_title}") - continue - - # if any(url.lower().endswith(ext) for ext in SKIP_EXTS) or len(url) > 2000: - # continue - - # if _matches_filters(name, group_title, account.filters.all()): - # continue - - # if any(compiled_pattern.search(current_info['name']) for ftype, compiled_pattern in compiled_filters if ftype == 'name'): - # excluded_count += 1 - # current_info = None - # continue - try: + name, url = stream_info["name"], stream_info["url"] + tvg_id, tvg_logo = stream_info["attributes"].get("tvg-id", ""), stream_info["attributes"].get("tvg-logo", "") + group_title = stream_info["attributes"].get("group-title", "Default Group") + + # Filter out disabled groups for this account + if group_title not in groups: + logger.debug(f"Skipping stream in disabled group: {group_title}") + continue + + # if any(url.lower().endswith(ext) for ext in SKIP_EXTS) or len(url) > 2000: + # continue + + # if _matches_filters(name, group_title, account.filters.all()): + # continue + + # if any(compiled_pattern.search(current_info['name']) for ftype, compiled_pattern in compiled_filters if ftype == 'name'): + # excluded_count += 1 + # current_info = None + # continue + stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys) # if redis_client.exists(f"m3u_refresh:{stream_hash}"): # # duplicate already processed by another batch From 260b57576a78bcddd02eb0c2aae7cfdb201b477c Mon Sep 17 00:00:00 2001 From: dekzter Date: Wed, 23 Apr 2025 11:02:28 -0400 Subject: [PATCH 0026/1435] initial commit --- frontend/src/hooks/useLocalStorage.jsx | 28 ++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 frontend/src/hooks/useLocalStorage.jsx diff --git a/frontend/src/hooks/useLocalStorage.jsx b/frontend/src/hooks/useLocalStorage.jsx new file mode 100644 index 00000000..450c55df --- /dev/null +++ b/frontend/src/hooks/useLocalStorage.jsx @@ -0,0 +1,28 @@ +import React, { useEffect, useState } from 'react'; + +const useLocalStorage = (key, defaultValue) => { + const localKey = key; + + const [value, setValue] = useState(() => { + try { + const item = localStorage.getItem(localKey); + return item ? JSON.parse(item) : defaultValue; + } catch (error) { + console.error(`Error reading key "${localKey}":`, error); + } + + return defaultValue; + }); + + useEffect(() => { + try { + localStorage.setItem(localKey, JSON.stringify(value)); + } catch (error) { + console.error(`Error saving setting: ${localKey}:`, error); + } + }, [localKey, value]); + + return [value, setValue]; +}; + +export default useLocalStorage; From 450628040076f5ab268e6a3acc79047f4388b03b Mon Sep 17 00:00:00 2001 From: dekzter Date: Wed, 23 Apr 2025 11:20:00 -0400 Subject: [PATCH 0027/1435] restored support for adding streams from streams table --- .../src/components/tables/ChannelsTable.jsx | 25 ++------- .../components/tables/CustomTable/index.jsx | 51 +++++-------------- 2 files changed, 17 insertions(+), 59 deletions(-) diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index 8568b52c..1d70800f 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -421,26 +421,9 @@ const ChannelsTable = ({}) => { showVideo(getChannelURL(channel)); } - // const onRowSelectionChange = (updater) => { - // setRowSelection((prevRowSelection) => { - // const newRowSelection = - // typeof updater === 'function' ? updater(prevRowSelection) : updater; - - // const updatedSelected = new Set([...selectedChannelIds]); - // getRowModel().rows.forEach((row) => { - // if (newRowSelection[row.id] === undefined || !newRowSelection[row.id]) { - // updatedSelected.delete(row.original.id); - // } else { - // updatedSelected.add(row.original.id); - // } - // }); - // const newSelection = [...updatedSelected]; - // setSelectedChannelIds(newSelection); - // setSelectedTableIds(newSelection); - - // return newRowSelection; - // }); - // }; + const onRowSelectionChange = (newSelection) => { + setSelectedTableIds(newSelection); + }; // const onSelectAllChange = async (e) => { // const selectAll = e.target.checked; @@ -883,7 +866,7 @@ const ChannelsTable = ({}) => { manualSorting: true, manualFiltering: true, enableRowSelection: true, - // onRowSelectionChange: onRowSelectionChange, + onRowSelectionChange: onRowSelectionChange, getCoreRowModel: getCoreRowModel(), // getFilteredRowModel: getFilteredRowModel(), // getSortedRowModel: getSortedRowModel(), diff --git a/frontend/src/components/tables/CustomTable/index.jsx b/frontend/src/components/tables/CustomTable/index.jsx index 514938cd..115e0ff6 100644 --- a/frontend/src/components/tables/CustomTable/index.jsx +++ b/frontend/src/components/tables/CustomTable/index.jsx @@ -14,9 +14,9 @@ const useTable = ({ allRowIds, headerCellRenderFns = {}, filters = {}, - pagination = {}, sorting = [], expandedRowRenderer = () => <>, + onRowSelectionChange = null, ...options }) => { const [selectedTableIds, setSelectedTableIds] = useState([]); @@ -24,44 +24,12 @@ const useTable = ({ const rowCount = allRowIds.length; - const onRowSelectionChange = (updater) => { - const newRowSelection = - typeof updater === 'function' ? updater(rowSelection) : updater; - - const updatedSelected = new Set(selectedTableIds); - - const allChangedRowIds = new Set([ - ...Object.keys(rowSelection), - ...Object.keys(newRowSelection), - ]); - - for (const rowId of allChangedRowIds) { - const wasSelected = !!rowSelection[rowId]; - const isSelected = !!newRowSelection[rowId]; - - if (wasSelected !== isSelected) { - const row = table.getRow(rowId); - if (!row) continue; - - const originalId = row.original.id; - if (isSelected) { - updatedSelected.add(originalId); - } else { - updatedSelected.delete(originalId); - } - } - } - - setSelectedTableIds([...updatedSelected]); - }; - const table = useReactTable({ ...options, state: { data: options.data, selectedTableIds, }, - onRowSelectionChange, getCoreRowModel: options.getCoreRowModel ?? getCoreRowModel(), }); @@ -70,6 +38,13 @@ const useTable = ({ [selectedTableIds] ); + const updateSelectedTableIds = (ids) => { + setSelectedTableIds(ids); + if (onRowSelectionChange) { + onRowSelectionChange(ids); + } + }; + const rowSelection = useMemo(() => { const selection = {}; table.getRowModel().rows.forEach((row) => { @@ -83,9 +58,9 @@ const useTable = ({ const onSelectAllChange = async (e) => { const selectAll = e.target.checked; if (selectAll) { - setSelectedTableIds(allRowIds); + updateSelectedTableIds(allRowIds); } else { - setSelectedTableIds([]); + updateSelectedTableIds([]); } }; @@ -97,7 +72,7 @@ const useTable = ({ isExpanded = prev.includes(row.original.id) ? [] : [row.original.id]; return isExpanded; }); - setSelectedTableIds([row.original.id]); + updateSelectedTableIds([row.original.id]); }; const renderHeaderCell = useCallback( @@ -149,7 +124,7 @@ const useTable = ({ } else { newSet.delete(row.original.id); } - setSelectedTableIds([...newSet]); + updateSelectedTableIds([...newSet]); }} /> @@ -180,7 +155,7 @@ const useTable = ({ ...options, sorting, selectedTableIds, - setSelectedTableIds, + updateSelectedTableIds, rowSelection, allRowIds, onSelectAllChange, From bdc36bf5a00815ebffc71f87798d06d5436eefb0 Mon Sep 17 00:00:00 2001 From: dekzter Date: Wed, 23 Apr 2025 11:22:54 -0400 Subject: [PATCH 0028/1435] restored channel group filter --- frontend/src/components/tables/ChannelsTable.jsx | 1 + 1 file changed, 1 insertion(+) diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index 1d70800f..15dc34c9 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -885,6 +885,7 @@ const ChannelsTable = ({}) => { }, headerCellRenderFns: { name: renderHeaderCell, + channel_group: renderHeaderCell, enabled: () => (
From 03f6c77391647882bdb01ff840ab3ec9a4666fe5 Mon Sep 17 00:00:00 2001 From: dekzter Date: Wed, 23 Apr 2025 13:02:01 -0400 Subject: [PATCH 0029/1435] alternating row colors are restored, row hover restored, fixed drag and drop functionality in streams table --- .../components/tables/ChannelTableStreams.jsx | 19 ++-- .../src/components/tables/ChannelsTable.jsx | 87 +------------------ .../tables/CustomTable/CustomTableBody.jsx | 8 +- frontend/src/components/tables/table.css | 8 +- frontend/src/store/channels.jsx | 1 + frontend/src/store/channelsTable.jsx | 7 +- 6 files changed, 28 insertions(+), 102 deletions(-) diff --git a/frontend/src/components/tables/ChannelTableStreams.jsx b/frontend/src/components/tables/ChannelTableStreams.jsx index c682705a..dae9d2da 100644 --- a/frontend/src/components/tables/ChannelTableStreams.jsx +++ b/frontend/src/components/tables/ChannelTableStreams.jsx @@ -23,6 +23,7 @@ import { MouseSensor, TouchSensor, closestCenter, + useDraggable, useSensor, useSensors, } from '@dnd-kit/core'; @@ -36,19 +37,22 @@ import { useSortable } from '@dnd-kit/sortable'; import { CSS } from '@dnd-kit/utilities'; import { shallow } from 'zustand/shallow'; -// Cell Component const RowDragHandleCell = ({ rowId }) => { - const { attributes, listeners } = useSortable({ + const { attributes, listeners, setNodeRef } = useDraggable({ id: rowId, }); + return ( - // Alternatively, you could set these attributes on the rows themselves
@@ -57,7 +61,7 @@ const RowDragHandleCell = ({ rowId }) => { }; // Row Component -const DraggableRow = ({ row }) => { +const DraggableRow = ({ row, index }) => { const { transform, transition, setNodeRef, isDragging } = useSortable({ id: row.original.id, }); @@ -73,7 +77,7 @@ const DraggableRow = ({ row }) => { { enableRowSelection: true, getRowId: (row) => row.id, getCoreRowModel: getCoreRowModel(), - // getFilteredRowModel: getFilteredRowModel(), - // getSortedRowModel: getSortedRowModel(), - // getPaginationRowModel: getPaginationRowModel(), }); function handleDragEnd(event) { diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index 15dc34c9..d395adee 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -31,8 +31,6 @@ import { ArrowUpNarrowWide, ArrowUpDown, ArrowDownWideNarrow, - ChevronDown, - ChevronRight, } from 'lucide-react'; import ghostImage from '../../images/ghost.svg'; import { @@ -54,7 +52,6 @@ import { MultiSelect, Pagination, NativeSelect, - Checkbox, UnstyledButton, CopyButton, } from '@mantine/core'; @@ -254,7 +251,6 @@ const ChannelRowActions = React.memo( const ChannelsTable = ({}) => { const data = useChannelsTableStore((s) => s.channels); - const rowCount = useChannelsTableStore((s) => s.count); const pageCount = useChannelsTableStore((s) => s.pageCount); const setSelectedTableIds = useChannelsTableStore( (s) => s.setSelectedChannelIds @@ -710,7 +706,9 @@ const ChannelsTable = ({}) => { }, { accessorFn: (row) => - row.channel_group_id ? channelGroups[row.channel_group_id].name : '', + row.channel_group_id && channelGroups + ? channelGroups[row.channel_group_id].name + : '', id: 'channel_group', cell: ({ getValue }) => ( { }, }); - const onRowExpansion = (row) => { - let isExpanded = false; - setExpandedRowIds((prev) => { - isExpanded = prev === row.original.id ? null : row.original.id; - return isExpanded; - }); - setRowSelection({ [row.index]: true }); - setSelectedChannelIds([row.original.id]); - setSelectedTableIds([row.original.id]); - }; - - // const renderBodyCell = (cell) => { - // switch (cell.column.id) { - // case 'select': - // return ChannelRowSelectCell({ row: cell.row }); - - // case 'expand': - // return ChannelExpandCell({ row: cell.row }); - - // default: - // return flexRender(cell.column.columnDef.cell, cell.getContext()); - // } - // }; - - const ChannelExpandCell = useCallback( - ({ row }) => { - const isExpanded = expandedRowIds === row.original.id; - - return ( -
{ - onRowExpansion(row); - }} - > - {isExpanded ? : } -
- ); - }, - [expandedRowIds] - ); - - // const ChannelRowSelectCell = useCallback( - // ({ row }) => { - // return ( - //
- // - //
- // ); - // }, - // [rows] - // ); - - // const ChannelRowSelectHeader = useCallback( - // ({ selectedChannelIds }) => { - // return ( - //
- // 0 && - // selectedChannelIds.length !== rowCount - // } - // onChange={onSelectAllChange} - // /> - //
- // ); - // }, - // [rows] - // ); - return ( {/* Header Row: outside the Paper */} @@ -1123,6 +1043,7 @@ const ChannelsTable = ({}) => { + + setGroupPopoverOpened(true)} + size="xs" + /> + + + e.stopPropagation()}> + + + setGroupFilter(event.currentTarget.value) + } + mb="xs" + size="xs" + /> + + + + + {({ index, style }) => ( + + + { + formik.setFieldValue( + 'channel_group_id', + filteredGroups[index].id + ); + setGroupPopoverOpened(false); + }} + > + + {filteredGroups[index].name} + + + + + )} + + + + + + {/* ({ - label: profile.name, - value: `${profile.id}`, - }))} - renderOption={renderProfileOption} - /> - - - - - - - - - - - - - - - - - - - - - - + {/* Table or ghost empty state inside Paper */} {Object.keys(channels).length === 0 && ( - -
- - - It’s recommended to create channels after adding your M3U or - streams. - - - You can still create channels without streams if you’d like, - and map them later. - - - -
- -
- -
-
+ )}
diff --git a/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx b/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx new file mode 100644 index 00000000..fd510902 --- /dev/null +++ b/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx @@ -0,0 +1,241 @@ +import React, { useState } from 'react'; +import { + ActionIcon, + Box, + Button, + Flex, + Group, + Popover, + Select, + TextInput, + Tooltip, + useMantineTheme, +} from '@mantine/core'; +import { + ArrowDown01, + Binary, + CircleCheck, + SquareMinus, + SquarePlus, +} from 'lucide-react'; +import API from '../../../api'; +import { notifications } from '@mantine/notifications'; +import useChannelsStore from '../../../store/channels'; + +const CreateProfilePopover = React.memo(() => { + const [opened, setOpened] = useState(false); + const [name, setName] = useState(''); + const theme = useMantineTheme(); + + const setOpen = () => { + setName(''); + setOpened(!opened); + }; + + const submit = async () => { + await API.addChannelProfile({ name }); + setName(''); + setOpened(false); + }; + + return ( + + + + + + + + + + setName(event.currentTarget.value)} + size="xs" + /> + + + + + + + + ); +}); + +const ChannelTableHeader = ({ + rows, + editChannel, + deleteChannels, + selectedTableIds, +}) => { + const theme = useMantineTheme(); + + const profiles = useChannelsStore((s) => s.profiles); + const selectedProfileId = useChannelsStore((s) => s.selectedProfileId); + const setSelectedProfileId = useChannelsStore((s) => s.setSelectedProfileId); + + const deleteProfile = async (id) => { + await API.deleteChannelProfile(id); + }; + + const matchEpg = async () => { + try { + // Hit our new endpoint that triggers the fuzzy matching Celery task + await API.matchEpg(); + + notifications.show({ + title: 'EPG matching task started!', + }); + } catch (err) { + notifications.show(`Error: ${err.message}`); + } + }; + + const assignChannels = async () => { + try { + // Get row order from the table + const rowOrder = rows.map((row) => row.original.id); + + // Call our custom API endpoint + const result = await API.assignChannelNumbers(rowOrder); + + // We might get { message: "Channels have been auto-assigned!" } + notifications.show({ + title: result.message || 'Channels assigned', + color: 'green.5', + }); + + // Refresh the channel list + // await fetchChannels(); + API.requeryChannels(); + } catch (err) { + console.error(err); + notifications.show({ + title: 'Failed to assign channels', + color: 'red.5', + }); + } + }; + + const renderProfileOption = ({ option, checked }) => { + return ( + + {option.label} + {option.value != '0' && ( + { + e.stopPropagation(); + deleteProfile(option.value); + }} + > + + + )} + + ); + }; + + return ( + + + ({ - label: playlist.name, - value: `${playlist.id}`, - }))} - variant="unstyled" - className="table-input-header" - /> -
- ), }, ], [playlists, groupOptions, filters, channelGroups] @@ -221,8 +306,6 @@ const StreamsTable = ({}) => { }; const fetchData = useCallback(async () => { - setIsLoading(true); - const params = new URLSearchParams(); params.append('page', pagination.pageIndex + 1); params.append('page_size', pagination.pageSize); @@ -241,6 +324,8 @@ const StreamsTable = ({}) => { try { const result = await API.queryStreams(params); + const ids = await API.getAllStreamIds(params); + setAllRowIds(ids); setData(result.results); setRowCount(result.count); setPageCount(Math.ceil(result.count / pagination.pageSize)); @@ -258,18 +343,6 @@ const StreamsTable = ({}) => { // Generate the string setPaginationString(`${startItem} to ${endItem} of ${result.count}`); - - const newSelection = {}; - result.results.forEach((item, index) => { - if (selectedStreamIds.includes(item.id)) { - newSelection[index] = true; - } - }); - - // ✅ Only update rowSelection if it's different - if (JSON.stringify(newSelection) !== JSON.stringify(rowSelection)) { - setRowSelection(newSelection); - } } catch (error) { console.error('Error fetching data:', error); } @@ -280,17 +353,6 @@ const StreamsTable = ({}) => { setIsLoading(false); }, [pagination, sorting, debouncedFilters]); - // Fallback: Individual creation (optional) - const createChannelFromStream = async (stream) => { - await API.createChannelFromStream({ - name: stream.name, - channel_number: null, - stream_id: stream.id, - }); - await API.requeryChannels(); - fetchLogos(); - }; - // Bulk creation: create channels from selected streams in one API call const createChannelsFromStreams = async () => { setIsLoading(true); @@ -339,56 +401,8 @@ const StreamsTable = ({}) => { await API.requeryChannels(); }; - const addStreamToChannel = async (streamId) => { - await API.updateChannel({ - id: selectedChannelIds[0], - stream_ids: [ - ...new Set( - channelSelectionStreams.map((stream) => stream.id).concat([streamId]) - ), - ], - }); - await API.requeryChannels(); - }; - - const onRowSelectionChange = (updater) => { - setRowSelection((prevRowSelection) => { - const newRowSelection = - typeof updater === 'function' ? updater(prevRowSelection) : updater; - - const updatedSelected = new Set([...selectedStreamIds]); - table.getRowModel().rows.forEach((row) => { - if (newRowSelection[row.id] === undefined || !newRowSelection[row.id]) { - updatedSelected.delete(row.original.id); - } else { - updatedSelected.add(row.original.id); - } - }); - setSelectedStreamIds([...updatedSelected]); - - return newRowSelection; - }); - }; - - const onSelectAllChange = async (e) => { - const selectAll = e.target.checked; - if (selectAll) { - // Get all stream IDs for current view - const params = new URLSearchParams(); - Object.entries(debouncedFilters).forEach(([key, value]) => { - if (value) params.append(key, value); - }); - const ids = await API.getAllStreamIds(params); - setSelectedStreamIds(ids); - } else { - setSelectedStreamIds([]); - } - - const newSelection = {}; - table.getRowModel().rows.forEach((item, index) => { - newSelection[index] = selectAll; - }); - setRowSelection(newSelection); + const onRowSelectionChange = (updatedIds) => { + setSelectedStreamIds(updatedIds); }; const onPageSizeChange = (e) => { @@ -409,16 +423,6 @@ const StreamsTable = ({}) => { }); }; - const onPaginationChange = (updater) => { - const newPagination = updater(pagination); - if (JSON.stringify(newPagination) === JSON.stringify(pagination)) { - // Prevent infinite re-render when there are no results - return; - } - - setPagination(updater); - }; - function handleWatchStream(streamHash) { let vidUrl = `/proxy/ts/stream/${streamHash}`; if (env_mode == 'dev') { @@ -427,202 +431,144 @@ const StreamsTable = ({}) => { showVideo(vidUrl); } - const table = useMantineReactTable({ - ...TableHelper.defaultProperties, + const onSortingChange = (column) => { + const sortField = sorting[0]?.id; + const sortDirection = sorting[0]?.desc; + + if (sortField == column) { + if (sortDirection == false) { + setSorting([ + { + id: column, + desc: true, + }, + ]); + } else { + setSorting([]); + } + } else { + setSorting([ + { + id: column, + desc: false, + }, + ]); + } + }; + + const renderHeaderCell = (header) => { + let sortingIcon = ArrowUpDown; + if (sorting[0]?.id == header.id) { + if (sorting[0].desc === false) { + sortingIcon = ArrowUpNarrowWide; + } else { + sortingIcon = ArrowDownWideNarrow; + } + } + + switch (header.id) { + case 'name': + return ( + + e.stopPropagation()} + onChange={handleFilterChange} + size="xs" + variant="unstyled" + className="table-input-header" + /> +
+ {React.createElement(sortingIcon, { + onClick: () => onSortingChange('name'), + size: 14, + })} +
+
+ ); + + case 'group': + return ( + + + + ); + + case 'm3u': + return ( + + + )} + @@ -301,7 +364,7 @@ const ChannelsPage = () => { minute: '2-digit', second: '2-digit', hour12: true, // 12-hour format with AM/PM - }); // This will give you a string like: "2025-03-14T14:00:00.000Z" + }); }, }, { @@ -439,6 +502,7 @@ const ChannelsPage = () => { stopClient={stopClient} stopChannel={stopChannel} logos={logos} // Pass logos to the component + channelsByUUID={channelsByUUID} // Pass channelsByUUID to fix the error /> )) )} From c61c7573bd1a6fdfca007efbf18c02196f3f4263 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 24 Apr 2025 19:35:34 -0500 Subject: [PATCH 0064/1435] Show the correct currently connect stream and display the available streams to switch to in the correct order. --- frontend/src/pages/Stats.jsx | 37 +++++++++++++++++++++++++++++++++--- 1 file changed, 34 insertions(+), 3 deletions(-) diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index 0709712e..7d346916 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -81,6 +81,7 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel const location = useLocation(); const [availableStreams, setAvailableStreams] = useState([]); const [isLoadingStreams, setIsLoadingStreams] = useState(false); + const [activeStreamId, setActiveStreamId] = useState(null); // Safety check - if channel doesn't have required data, don't render if (!channel || !channel.channel_id) { @@ -96,7 +97,25 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel const channelId = channelsByUUID[channel.channel_id]; if (channelId) { const streamData = await API.getChannelStreams(channelId); - setAvailableStreams(streamData); + + // Sort streams by ID to match the order in the channels view + const sortedStreamData = [...streamData].sort((a, b) => a.id - b.id); + setAvailableStreams(sortedStreamData); + + // If we have a channel URL, try to find the matching stream + if (channel.url && sortedStreamData.length > 0) { + // Try to find matching stream based on URL + const matchingStream = sortedStreamData.find(stream => + channel.url.includes(stream.url) || stream.url.includes(channel.url) + ); + + if (matchingStream) { + setActiveStreamId(matchingStream.id.toString()); + console.log("Found matching stream:", matchingStream.id, matchingStream.name); + } else { + console.log("No matching stream found for URL:", channel.url); + } + } } } catch (error) { console.error("Error fetching streams:", error); @@ -106,7 +125,7 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel }; fetchStreams(); - }, [channel.channel_id, channelsByUUID]); + }, [channel.channel_id, channel.url, channelsByUUID]); // Handle stream switching const handleStreamChange = async (streamId) => { @@ -216,6 +235,16 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel label: stream.name || `Stream #${stream.id}` })); + // Debug logging to see what stream_id values we have + useEffect(() => { + if (availableStreams.length > 0) { + console.log("Available streams:", availableStreams); + console.log("Current channel data:", channel); + console.log("Active stream_id from channel:", channel.stream_id); + console.log("Matched active stream ID:", activeStreamId); + } + }, [availableStreams, channel, activeStreamId]); + return ( { ...(channelData || {}), // Safely merge channel data if available bitrates, stream_profile: streamProfile || { name: 'Unknown' }, + // Make sure stream_id is set from the active stream info + stream_id: ch.stream_id || null, }; }); From 68731644788093656d7bf5a4be321615044e6bf9 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Fri, 25 Apr 2025 00:36:34 +0000 Subject: [PATCH 0065/1435] Increment build number to 19 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 7c1cadcc..83757bef 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '18' # Auto-incremented on builds +__build__ = '19' # Auto-incremented on builds From 1a1c5dea9eea33553a3727db4e69f537759235e6 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 24 Apr 2025 20:07:36 -0500 Subject: [PATCH 0066/1435] Uses correct api to slected specific stream id. --- apps/proxy/ts_proxy/views.py | 54 +++++++++++++++++++++++------------- frontend/src/api.js | 14 ++++++++++ frontend/src/pages/Stats.jsx | 33 ++++++++++++++++++++-- 3 files changed, 78 insertions(+), 23 deletions(-) diff --git a/apps/proxy/ts_proxy/views.py b/apps/proxy/ts_proxy/views.py index a924cd75..d71da8d4 100644 --- a/apps/proxy/ts_proxy/views.py +++ b/apps/proxy/ts_proxy/views.py @@ -335,14 +335,31 @@ def change_stream(request, channel_id): data = json.loads(request.body) new_url = data.get('url') user_agent = data.get('user_agent') + stream_id = data.get('stream_id') - if not new_url: - return JsonResponse({'error': 'No URL provided'}, status=400) + # If stream_id is provided, get the URL and user_agent from it + if stream_id: + logger.info(f"Stream ID {stream_id} provided, looking up stream info for channel {channel_id}") + stream_info = get_stream_info_for_switch(channel_id, stream_id) - logger.info(f"Attempting to change stream URL for channel {channel_id} to {new_url}") + if 'error' in stream_info: + return JsonResponse({ + 'error': stream_info['error'], + 'stream_id': stream_id + }, status=404) + + # Use the info from the stream + new_url = stream_info['url'] + user_agent = stream_info['user_agent'] + # Stream ID will be passed to change_stream_url later + elif not new_url: + return JsonResponse({'error': 'Either url or stream_id must be provided'}, status=400) + + logger.info(f"Attempting to change stream for channel {channel_id} to {new_url}") # Use the service layer instead of direct implementation - result = ChannelService.change_stream_url(channel_id, new_url, user_agent) + # Pass stream_id to ensure proper connection tracking + result = ChannelService.change_stream_url(channel_id, new_url, user_agent, stream_id) # Get the stream manager before updating URL stream_manager = proxy_server.stream_managers.get(channel_id) @@ -360,22 +377,19 @@ def change_stream(request, channel_id): }, status=404) # Format response based on whether it was a direct update or event-based - if result.get('direct_update'): - return JsonResponse({ - 'message': 'Stream URL updated', - 'channel': channel_id, - 'url': new_url, - 'owner': True, - 'worker_id': proxy_server.worker_id - }) - else: - return JsonResponse({ - 'message': 'Stream URL change requested', - 'channel': channel_id, - 'url': new_url, - 'owner': False, - 'worker_id': proxy_server.worker_id - }) + response_data = { + 'message': 'Stream changed successfully', + 'channel': channel_id, + 'url': new_url, + 'owner': result.get('direct_update', False), + 'worker_id': proxy_server.worker_id + } + + # Include stream_id in response if it was used + if stream_id: + response_data['stream_id'] = stream_id + + return JsonResponse(response_data) except json.JSONDecodeError: return JsonResponse({'error': 'Invalid JSON'}, status=400) diff --git a/frontend/src/api.js b/frontend/src/api.js index b0e2c5d9..38aac846 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -1240,6 +1240,20 @@ export default class API { } static async switchStream(channelId, streamId) { + try { + const response = await request(`${host}/proxy/ts/change_stream/${channelId}`, { + method: 'POST', + body: { stream_id: streamId }, + }); + + return response; + } catch (e) { + errorNotification('Failed to switch stream', e); + throw e; + } + } + + static async nextStream(channelId, streamId) { try { const response = await request(`${host}/proxy/ts/next_stream/${channelId}`, { method: 'POST', diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index 7d346916..e8f1b7e8 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -130,13 +130,40 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel // Handle stream switching const handleStreamChange = async (streamId) => { try { - await API.switchStream(channel.channel_id, streamId); + console.log("Switching to stream ID:", streamId); + // Find the selected stream in availableStreams for debugging + const selectedStream = availableStreams.find(s => s.id.toString() === streamId); + console.log("Selected stream details:", selectedStream); + + // Make sure we're passing the correct ID to the API + const response = await API.switchStream(channel.channel_id, streamId); + console.log("Stream switch API response:", response); + + // Update the local active stream ID immediately + setActiveStreamId(streamId); + + // Show detailed notification with stream name notifications.show({ title: 'Stream switching', - message: `Switching stream for ${channel.name}`, + message: `Switching to "${selectedStream?.name}" for ${channel.name}`, color: 'blue.5', }); + + // After a short delay, fetch streams again to confirm the switch + setTimeout(async () => { + try { + const channelId = channelsByUUID[channel.channel_id]; + if (channelId) { + const updatedStreamData = await API.getChannelStreams(channelId); + console.log("Channel streams after switch:", updatedStreamData); + } + } catch (error) { + console.error("Error checking streams after switch:", error); + } + }, 2000); + } catch (error) { + console.error("Stream switch error:", error); notifications.show({ title: 'Error switching stream', message: error.toString(), @@ -232,7 +259,7 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel // Create select options for available streams const streamOptions = availableStreams.map(stream => ({ value: stream.id.toString(), - label: stream.name || `Stream #${stream.id}` + label: `${stream.name || `Stream #${stream.id}`}`, // Make sure stream name is clear })); // Debug logging to see what stream_id values we have From 6f82eb42742927f0b85a7bff7ae39a23b63f7c12 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Fri, 25 Apr 2025 01:08:22 +0000 Subject: [PATCH 0067/1435] Increment build number to 20 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 83757bef..92b89212 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '19' # Auto-incremented on builds +__build__ = '20' # Auto-incremented on builds From 44ea86e59aa640066c79bc20ce8a71262b3a955c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 24 Apr 2025 20:30:04 -0500 Subject: [PATCH 0068/1435] Show connected time for each client. --- frontend/src/pages/Stats.jsx | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index e8f1b7e8..dd2c013b 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -179,6 +179,30 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel accessorKey: 'ip_address', size: 50, }, + { + header: 'Connected', + accessorFn: (row) => { + // Calculate based on connected_since (time elapsed since connection) + if (row.connected_since) { + // Current time minus the elapsed time gives us the connection timestamp + const connectedTime = dayjs().subtract(row.connected_since, 'second'); + return connectedTime.format('MM/DD HH:mm:ss'); + } + return 'Unknown'; + }, + size: 50, + }, + { + header: 'Duration', + accessorFn: (row) => { + // Use connected_since directly as it's already the duration + if (row.connected_since) { + return dayjs.duration(row.connected_since, 'seconds').humanize(); + } + return '-'; + }, + size: 50, + } ], [] ); From d15ff0d7c5cdf7387e5d24a13a8686c0144e1396 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 24 Apr 2025 20:55:28 -0500 Subject: [PATCH 0069/1435] Added tooltips. --- frontend/src/pages/Stats.jsx | 133 +++++++++++++++++++++-------------- 1 file changed, 82 insertions(+), 51 deletions(-) diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index dd2c013b..740f963a 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -111,9 +111,6 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel if (matchingStream) { setActiveStreamId(matchingStream.id.toString()); - console.log("Found matching stream:", matchingStream.id, matchingStream.name); - } else { - console.log("No matching stream found for URL:", channel.url); } } } @@ -179,28 +176,55 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel accessorKey: 'ip_address', size: 50, }, + // Updated Connected column with tooltip { header: 'Connected', accessorFn: (row) => { - // Calculate based on connected_since (time elapsed since connection) + // Check for connected_since (which is seconds since connection) if (row.connected_since) { - // Current time minus the elapsed time gives us the connection timestamp - const connectedTime = dayjs().subtract(row.connected_since, 'second'); + // Calculate the actual connection time by subtracting the seconds from current time + const currentTime = dayjs(); + const connectedTime = currentTime.subtract(row.connected_since, 'second'); return connectedTime.format('MM/DD HH:mm:ss'); } + + // Fallback to connected_at if it exists + if (row.connected_at) { + const connectedTime = dayjs(row.connected_at * 1000); + return connectedTime.format('MM/DD HH:mm:ss'); + } + return 'Unknown'; }, + Cell: ({ cell }) => ( + + {cell.getValue()} + + ), size: 50, }, + // Update Duration column with tooltip showing exact seconds { header: 'Duration', accessorFn: (row) => { - // Use connected_since directly as it's already the duration if (row.connected_since) { return dayjs.duration(row.connected_since, 'seconds').humanize(); } + + if (row.connection_duration) { + return dayjs.duration(row.connection_duration, 'seconds').humanize(); + } + return '-'; }, + Cell: ({ cell, row }) => { + const exactDuration = row.original.connected_since || row.original.connection_duration; + return ( + + {cell.getValue()} + + ); + }, size: 50, } ], @@ -231,16 +255,18 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel renderRowActions: ({ row }) => (
- - stopClient(row.original.channel.uuid, row.original.client_id) - } - > - - + + + stopClient(row.original.channel.uuid, row.original.client_id) + } + > + + +
), @@ -286,16 +312,6 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel label: `${stream.name || `Stream #${stream.id}`}`, // Make sure stream name is clear })); - // Debug logging to see what stream_id values we have - useEffect(() => { - if (availableStreams.length > 0) { - console.log("Available streams:", availableStreams); - console.log("Current channel data:", channel); - console.log("Active stream_id from channel:", channel.stream_id); - console.log("Matched active stream ID:", activeStreamId); - } - }, [availableStreams, channel, activeStreamId]); - return ( - channel logo + channel logo @@ -348,34 +369,50 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel {/* Add stream selection dropdown */} {availableStreams.length > 0 && ( - + )} - - {formatSpeed(bitrates.at(-1) || 0)} + + + + {formatSpeed(bitrates.at(-1) || 0)} + + - Avg: {avgBitrate} + + Avg: {avgBitrate} + - - {formatBytes(totalBytes)} + + + + {formatBytes(totalBytes)} + + - - {clientCount} + + + + {clientCount} + + @@ -563,12 +600,6 @@ const ChannelsPage = () => { setClients(clientStats); }, [channelStats, channels, channelsByUUID, streamProfiles]); - // Add debug output - useEffect(() => { - console.log("Channel stats from store:", channelStats); - console.log("Active channels state:", activeChannels); - }, [channelStats, activeChannels]); - return ( {Object.keys(activeChannels).length === 0 ? ( From 8bdc027c937406107af81e697c08aca9f9af9a82 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Fri, 25 Apr 2025 01:55:55 +0000 Subject: [PATCH 0070/1435] Increment build number to 21 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 92b89212..79efab47 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '20' # Auto-incremented on builds +__build__ = '21' # Auto-incremented on builds From 51e3c7cc51477415702780af92809dc8bbe55c67 Mon Sep 17 00:00:00 2001 From: dekzter Date: Fri, 25 Apr 2025 08:19:39 -0400 Subject: [PATCH 0071/1435] fixed channelgroups value --- frontend/src/components/forms/Channel.jsx | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index b2882815..167e3424 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -398,7 +398,11 @@ const Channel = ({ channel = null, isOpen, onClose }) => { name="channel_group_id" label="Channel Group" readOnly - value={channelGroups[formik.values.channel_group_id].name} + value={ + channelGroups + ? channelGroups[formik.values.channel_group_id].name + : '' + } onClick={() => setGroupPopoverOpened(true)} size="xs" /> From 64b7b8ab95edb29e7ff76abde5eab051e287f953 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Fri, 25 Apr 2025 12:20:04 +0000 Subject: [PATCH 0072/1435] Increment build number to 22 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 79efab47..a75411f8 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '21' # Auto-incremented on builds +__build__ = '22' # Auto-incremented on builds From d64215b5a6be6dce4a170ba57b6714198982133d Mon Sep 17 00:00:00 2001 From: dekzter Date: Fri, 25 Apr 2025 08:51:58 -0400 Subject: [PATCH 0073/1435] another attempt --- frontend/src/components/forms/Channel.jsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index 167e3424..33eca5ea 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -399,7 +399,7 @@ const Channel = ({ channel = null, isOpen, onClose }) => { label="Channel Group" readOnly value={ - channelGroups + channelGroups[formik.values.channel_group_id] ? channelGroups[formik.values.channel_group_id].name : '' } From 1924b71dea3365d899e78d3c27a60c7407128904 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Fri, 25 Apr 2025 13:13:23 +0000 Subject: [PATCH 0074/1435] Increment build number to 23 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index a75411f8..2bb907c2 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '22' # Auto-incremented on builds +__build__ = '23' # Auto-incremented on builds From 5ac27043e410714ee64cf5aa142c2cf89ca2776a Mon Sep 17 00:00:00 2001 From: dekzter Date: Fri, 25 Apr 2025 09:42:07 -0400 Subject: [PATCH 0075/1435] fixed mimetypes --- apps/channels/api_views.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 95ad05a8..cfe0b1e4 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -19,7 +19,7 @@ from rest_framework.filters import SearchFilter, OrderingFilter from apps.epg.models import EPGData from django.db.models import Q from django.http import StreamingHttpResponse, FileResponse, Http404 - +import mimetypes from rest_framework.pagination import PageNumberPagination @@ -629,13 +629,14 @@ class LogoViewSet(viewsets.ModelViewSet): if logo_url.startswith("/data"): # Local file if not os.path.exists(logo_url): raise Http404("Image not found") - return FileResponse(open(logo_url, "rb"), content_type="image/*") + mimetype = mimetype.guess_type(logo_url) + return FileResponse(open(logo_url, "rb"), content_type=mimetype) else: # Remote image try: remote_response = requests.get(logo_url, stream=True) if remote_response.status_code == 200: - return StreamingHttpResponse(remote_response.iter_content(chunk_size=8192), content_type="image/*") + return StreamingHttpResponse(remote_response.iter_content(chunk_size=8192), content_type=remote_response.headers['Content-Type']) raise Http404("Remote image not found") except requests.RequestException: raise Http404("Error fetching remote image") From cafb588e4bc3930f41c243f5e3bbab17f4002e6c Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Fri, 25 Apr 2025 13:42:26 +0000 Subject: [PATCH 0076/1435] Increment build number to 24 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 2bb907c2..6f04d406 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '23' # Auto-incremented on builds +__build__ = '24' # Auto-incremented on builds From e3100e2751551b408167e11c3440fbfe6db0c47c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 25 Apr 2025 08:46:21 -0500 Subject: [PATCH 0077/1435] Use correct content type for m3u file instead of m3u8 --- apps/output/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/output/views.py b/apps/output/views.py index 3bcd19b2..9429d9ca 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -44,7 +44,7 @@ def generate_m3u(request, profile_name=None): #stream_url = request.build_absolute_uri(reverse('output:stream', args=[channel.id])) m3u_content += extinf_line + stream_url + "\n" - response = HttpResponse(m3u_content, content_type="application/x-mpegURL") + response = HttpResponse(m3u_content, content_type="audio/x-mpegurl") response['Content-Disposition'] = 'attachment; filename="channels.m3u"' return response From 133e2c6787d0b8747399be187bb092d94dc0a760 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Fri, 25 Apr 2025 13:46:57 +0000 Subject: [PATCH 0078/1435] Increment build number to 25 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 6f04d406..c26ae7a9 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '24' # Auto-incremented on builds +__build__ = '25' # Auto-incremented on builds From d7b7a32396c03652e3049145d9e447cdcbf63599 Mon Sep 17 00:00:00 2001 From: dekzter Date: Fri, 25 Apr 2025 10:46:49 -0400 Subject: [PATCH 0079/1435] fixed bad library ref --- apps/channels/api_views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index cfe0b1e4..cec05ba1 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -629,7 +629,7 @@ class LogoViewSet(viewsets.ModelViewSet): if logo_url.startswith("/data"): # Local file if not os.path.exists(logo_url): raise Http404("Image not found") - mimetype = mimetype.guess_type(logo_url) + mimetype = mimetypes.guess_type(logo_url) return FileResponse(open(logo_url, "rb"), content_type=mimetype) else: # Remote image From 7d0ea15924e826768bbf3f86ada4976495a5e94b Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Fri, 25 Apr 2025 14:47:10 +0000 Subject: [PATCH 0080/1435] Increment build number to 26 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index c26ae7a9..1091568a 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '25' # Auto-incremented on builds +__build__ = '26' # Auto-incremented on builds From 4c3ff1cdbdd91ac88063cc5c89a6fa6c81fbd05c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 25 Apr 2025 10:59:56 -0500 Subject: [PATCH 0081/1435] Fixes streams being in incorrect order (for real this time) --- apps/channels/api_views.py | 3 ++- frontend/src/pages/Stats.jsx | 9 ++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index cec05ba1..78907f8f 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -649,7 +649,8 @@ class ChannelProfileViewSet(viewsets.ModelViewSet): class GetChannelStreamsAPIView(APIView): def get(self, request, channel_id): channel = get_object_or_404(Channel, id=channel_id) - streams = channel.streams + # Order the streams by channelstream__order to match the order in the channel view + streams = channel.streams.all().order_by('channelstream__order') serializer = StreamSerializer(streams, many=True) return Response(serializer.data) diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index 740f963a..4754f68a 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -98,14 +98,13 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel if (channelId) { const streamData = await API.getChannelStreams(channelId); - // Sort streams by ID to match the order in the channels view - const sortedStreamData = [...streamData].sort((a, b) => a.id - b.id); - setAvailableStreams(sortedStreamData); + // Use streams in the order returned by the API without sorting + setAvailableStreams(streamData); // If we have a channel URL, try to find the matching stream - if (channel.url && sortedStreamData.length > 0) { + if (channel.url && streamData.length > 0) { // Try to find matching stream based on URL - const matchingStream = sortedStreamData.find(stream => + const matchingStream = streamData.find(stream => channel.url.includes(stream.url) || stream.url.includes(channel.url) ); From 530ac8727dd7f3e7527d08ea9aec614575a47b70 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Fri, 25 Apr 2025 16:00:24 +0000 Subject: [PATCH 0082/1435] Increment build number to 27 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 1091568a..869719f9 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '26' # Auto-incremented on builds +__build__ = '27' # Auto-incremented on builds From 3dcc4902faea068c9fd529aa2679f85cb90a9244 Mon Sep 17 00:00:00 2001 From: dekzter Date: Fri, 25 Apr 2025 13:44:54 -0400 Subject: [PATCH 0083/1435] fixed epg cache --- apps/epg/models.py | 10 ++++++++++ apps/epg/tasks.py | 32 +++++++++++++++----------------- 2 files changed, 25 insertions(+), 17 deletions(-) diff --git a/apps/epg/models.py b/apps/epg/models.py index 09986bfe..f026c558 100644 --- a/apps/epg/models.py +++ b/apps/epg/models.py @@ -1,6 +1,7 @@ from django.db import models from django.utils import timezone from django_celery_beat.models import PeriodicTask +from django.conf import settings class EPGSource(models.Model): SOURCE_TYPE_CHOICES = [ @@ -29,6 +30,15 @@ class EPGSource(models.Model): def __str__(self): return self.name + def get_cache_file(self): + # Decide on file extension + file_ext = ".gz" if self.url.lower().endswith('.gz') else ".xml" + filename = f"{self.id}{file_ext}" + + # Build full path in MEDIA_ROOT/cached_epg + cache_dir = os.path.join(settings.MEDIA_ROOT, "cached_epg") + cache = os.path.join(cache_dir, filename) + class EPGData(models.Model): # Removed the Channel foreign key. We now just store the original tvg_id # and a name (which might simply be the tvg_id if no real channel exists). diff --git a/apps/epg/tasks.py b/apps/epg/tasks.py index 2b6852ac..a28db282 100644 --- a/apps/epg/tasks.py +++ b/apps/epg/tasks.py @@ -68,23 +68,12 @@ def fetch_xmltv(source): response.raise_for_status() logger.debug("XMLTV data fetched successfully.") - # Decide on file extension - file_ext = ".gz" if source.url.lower().endswith('.gz') else ".xml" - filename = f"{source.name}_{uuid.uuid4().hex[:8]}{file_ext}" - - # Build full path in MEDIA_ROOT/cached_epg - epg_dir = os.path.join(settings.MEDIA_ROOT, "cached_epg") - os.makedirs(epg_dir, exist_ok=True) - file_path = os.path.join(epg_dir, filename) + cache_file = source.get_cache_file() # Save raw data - with open(file_path, 'wb') as f: + with open(cache_file, 'wb') as f: f.write(response.content) - logger.info(f"Cached EPG file saved to {file_path}") - - # Save the file_path on the EPGSource instance so it can be retrieved later. - source.file_path = file_path - source.save(update_fields=['file_path']) + logger.info(f"Cached EPG file saved to {cache_file}") except Exception as e: logger.error(f"Error fetching XMLTV from {source.name}: {e}", exc_info=True) @@ -92,6 +81,9 @@ def fetch_xmltv(source): def parse_channels_only(source): file_path = source.file_path + if not file_path: + file_path = source.get_cache_file() + logger.info(f"Parsing channels from EPG file: {file_path}") existing_epgs = {e.tvg_id: e for e in EPGData.objects.filter(epg_source=source)} @@ -165,13 +157,19 @@ def parse_programs_for_tvg_id(epg_id): # First, remove all existing programs ProgramData.objects.filter(epg=epg).delete() + file_path = epg_source.file_path + if not file_path: + file_path = epg_source.get_cache_file() + if not os.exists(file_path): + fetch_xmltv(epg_source) + # Read entire file (decompress if .gz) - if epg_source.file_path.endswith('.gz'): - with open(epg_source.file_path, 'rb') as gz_file: + if file_path.endswith('.gz'): + with open(file_path, 'rb') as gz_file: decompressed = gzip.decompress(gz_file.read()) xml_data = decompressed.decode('utf-8') else: - with open(epg_source.file_path, 'r', encoding='utf-8') as xml_file: + with open(file_path, 'r', encoding='utf-8') as xml_file: xml_data = xml_file.read() root = ET.fromstring(xml_data) From 9c6e19fb3bf231df26f59a6dd004d2147fac329e Mon Sep 17 00:00:00 2001 From: dekzter Date: Fri, 25 Apr 2025 13:46:58 -0400 Subject: [PATCH 0084/1435] clean up old cache files when we refresh epg from remote source --- apps/epg/tasks.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/apps/epg/tasks.py b/apps/epg/tasks.py index a28db282..74411bdb 100644 --- a/apps/epg/tasks.py +++ b/apps/epg/tasks.py @@ -62,6 +62,9 @@ def fetch_xmltv(source): if not source.url: return + if os.path.exists(source.get_cache_file()): + os.remove(source.get_cache_file()) + logger.info(f"Fetching XMLTV data from source: {source.name}") try: response = requests.get(source.url, timeout=30) @@ -160,7 +163,7 @@ def parse_programs_for_tvg_id(epg_id): file_path = epg_source.file_path if not file_path: file_path = epg_source.get_cache_file() - if not os.exists(file_path): + if not os.path.exists(file_path): fetch_xmltv(epg_source) # Read entire file (decompress if .gz) From e4cb4bd1d2013a1790d83639fbbf9b00a2b0e9e3 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Fri, 25 Apr 2025 17:47:25 +0000 Subject: [PATCH 0085/1435] Increment build number to 28 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 869719f9..a8aae639 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '27' # Auto-incremented on builds +__build__ = '28' # Auto-incremented on builds From 575d696c35303ed90c0e0a7973b8d1a4b62220a4 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 25 Apr 2025 15:32:34 -0500 Subject: [PATCH 0086/1435] Changed proxy to use uwsgi socket and increased client_max_body_size to 128MB. --- docker/nginx.conf | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/docker/nginx.conf b/docker/nginx.conf index 06fa742a..8382b73f 100644 --- a/docker/nginx.conf +++ b/docker/nginx.conf @@ -7,6 +7,7 @@ server { proxy_connect_timeout 75; proxy_send_timeout 300; proxy_read_timeout 300; + client_max_body_size 128M; # Allow file uploads up to 128MB # Serve Django via uWSGI location / { @@ -84,13 +85,15 @@ server { # Route TS proxy requests to the dedicated instance location /proxy/ { - proxy_pass http://127.0.0.1:5656; + include uwsgi_params; + uwsgi_pass unix:/app/uwsgi.sock; + uwsgi_buffering off; # Explicitly disable uwsgi buffering for streaming proxy_http_version 1.1; proxy_set_header Connection ""; proxy_buffering off; proxy_cache off; - proxy_read_timeout 3600s; - proxy_send_timeout 3600s; + proxy_read_timeout 300s; + proxy_send_timeout 300s; client_max_body_size 0; proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; From 00a8609b352eb909915adab025097ab352b27cc1 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Fri, 25 Apr 2025 20:32:59 +0000 Subject: [PATCH 0087/1435] Increment build number to 29 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index a8aae639..8ad97b43 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '28' # Auto-incremented on builds +__build__ = '29' # Auto-incremented on builds From b6fe53ba2b3570abb3fcd8c0e8e64921824b5e27 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 25 Apr 2025 15:49:42 -0500 Subject: [PATCH 0088/1435] Removed no buffer for uwsgi. Caused issues. --- docker/nginx.conf | 1 - 1 file changed, 1 deletion(-) diff --git a/docker/nginx.conf b/docker/nginx.conf index 8382b73f..b440f773 100644 --- a/docker/nginx.conf +++ b/docker/nginx.conf @@ -87,7 +87,6 @@ server { location /proxy/ { include uwsgi_params; uwsgi_pass unix:/app/uwsgi.sock; - uwsgi_buffering off; # Explicitly disable uwsgi buffering for streaming proxy_http_version 1.1; proxy_set_header Connection ""; proxy_buffering off; From a2b499d45373cba7f272d99a9a839c12b9b1735f Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Fri, 25 Apr 2025 20:50:04 +0000 Subject: [PATCH 0089/1435] Increment build number to 30 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 8ad97b43..a0bbdd9e 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '29' # Auto-incremented on builds +__build__ = '30' # Auto-incremented on builds From 8b057818b74d95d40562c7bf557fd0b21d525187 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 25 Apr 2025 16:55:30 -0500 Subject: [PATCH 0090/1435] Adjusted logo box so data didn't shift with different size logos. --- frontend/src/pages/Stats.jsx | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index 4754f68a..e2e90a7d 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -325,12 +325,23 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel > - channel logo + + channel logo + From ff0deffe3672a74792ca1371ae14ca9b52d9b4c5 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Fri, 25 Apr 2025 21:56:53 +0000 Subject: [PATCH 0091/1435] Increment build number to 31 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index a0bbdd9e..360dc0ae 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '30' # Auto-incremented on builds +__build__ = '31' # Auto-incremented on builds From 81fecde3b56f59bf55ec231754247c695a6a8497 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 26 Apr 2025 08:29:18 -0500 Subject: [PATCH 0092/1435] Add stream name to channel status. --- apps/proxy/ts_proxy/channel_status.py | 39 +++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/apps/proxy/ts_proxy/channel_status.py b/apps/proxy/ts_proxy/channel_status.py index d4e33f02..fe5b1f3b 100644 --- a/apps/proxy/ts_proxy/channel_status.py +++ b/apps/proxy/ts_proxy/channel_status.py @@ -6,6 +6,7 @@ from .redis_keys import RedisKeys from .constants import TS_PACKET_SIZE, ChannelMetadataField from redis.exceptions import ConnectionError, TimeoutError from .utils import get_logger +from django.db import DatabaseError # Add import for error handling logger = get_logger() @@ -45,6 +46,25 @@ class ChannelStatus: 'buffer_index': int(buffer_index_value.decode('utf-8')) if buffer_index_value else 0, } + # Add stream ID and name information + stream_id_bytes = metadata.get(ChannelMetadataField.STREAM_ID.encode('utf-8')) + if stream_id_bytes: + try: + stream_id = int(stream_id_bytes.decode('utf-8')) + info['stream_id'] = stream_id + + # Look up stream name from database + try: + from apps.channels.models import Stream + stream = Stream.objects.filter(id=stream_id).first() + if stream: + info['stream_name'] = stream.name + logger.debug(f"Added stream name '{stream.name}' for stream ID {stream_id}") + except (ImportError, DatabaseError) as e: + logger.warning(f"Failed to get stream name for ID {stream_id}: {e}") + except ValueError: + logger.warning(f"Invalid stream_id format in Redis: {stream_id_bytes}") + # Add timing information state_changed_field = ChannelMetadataField.STATE_CHANGED_AT.encode('utf-8') if state_changed_field in metadata: @@ -285,6 +305,25 @@ class ChannelStatus: 'uptime': uptime } + # Add stream ID and name information + stream_id_bytes = metadata.get(ChannelMetadataField.STREAM_ID.encode('utf-8')) + if stream_id_bytes: + try: + stream_id = int(stream_id_bytes.decode('utf-8')) + info['stream_id'] = stream_id + + # Look up stream name from database + try: + from apps.channels.models import Stream + stream = Stream.objects.filter(id=stream_id).first() + if stream: + info['stream_name'] = stream.name + logger.debug(f"Added stream name '{stream.name}' for stream ID {stream_id}") + except (ImportError, DatabaseError) as e: + logger.warning(f"Failed to get stream name for ID {stream_id}: {e}") + except ValueError: + logger.warning(f"Invalid stream_id format in Redis: {stream_id_bytes}") + # Add data throughput information to basic info total_bytes_bytes = proxy_server.redis_client.hget(metadata_key, ChannelMetadataField.TOTAL_BYTES.encode('utf-8')) if total_bytes_bytes: From 7cfe7c2998ab101d0815fe25c57afcf60892b381 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Sat, 26 Apr 2025 13:29:51 +0000 Subject: [PATCH 0093/1435] Increment build number to 32 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 360dc0ae..0fedf3fc 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '31' # Auto-incremented on builds +__build__ = '32' # Auto-incremented on builds From d3a7dbca1074058a77cbc4e1a43a1d07c5166680 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 26 Apr 2025 08:36:46 -0500 Subject: [PATCH 0094/1435] Imported missing os --- apps/epg/models.py | 1 + 1 file changed, 1 insertion(+) diff --git a/apps/epg/models.py b/apps/epg/models.py index f026c558..2f7d5990 100644 --- a/apps/epg/models.py +++ b/apps/epg/models.py @@ -2,6 +2,7 @@ from django.db import models from django.utils import timezone from django_celery_beat.models import PeriodicTask from django.conf import settings +import os class EPGSource(models.Model): SOURCE_TYPE_CHOICES = [ From e02e1458fa74459f3a950c153e44002d6fb9ceac Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Sat, 26 Apr 2025 13:37:06 +0000 Subject: [PATCH 0095/1435] Increment build number to 33 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 0fedf3fc..7bb9e07f 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '32' # Auto-incremented on builds +__build__ = '33' # Auto-incremented on builds From 3054cf2ae9fa9ed5bea21cde7401374e45faef3f Mon Sep 17 00:00:00 2001 From: dekzter Date: Sun, 27 Apr 2025 10:32:29 -0400 Subject: [PATCH 0096/1435] initial xtreamcodes support --- ...upm3uaccount_custom_properties_and_more.py | 23 +++ apps/channels/models.py | 1 + ...count_type_m3uaccount_password_and_more.py | 28 +++ apps/m3u/models.py | 7 + apps/m3u/serializers.py | 8 +- apps/m3u/tasks.py | 164 +++++++++++++----- core/xtream_codes.py | 26 +++ frontend/src/api.js | 31 +++- frontend/src/components/forms/M3U.jsx | 84 +++++++-- 9 files changed, 305 insertions(+), 67 deletions(-) create mode 100644 apps/channels/migrations/0018_channelgroupm3uaccount_custom_properties_and_more.py create mode 100644 apps/m3u/migrations/0008_m3uaccount_account_type_m3uaccount_password_and_more.py create mode 100644 core/xtream_codes.py diff --git a/apps/channels/migrations/0018_channelgroupm3uaccount_custom_properties_and_more.py b/apps/channels/migrations/0018_channelgroupm3uaccount_custom_properties_and_more.py new file mode 100644 index 00000000..7d2dafb4 --- /dev/null +++ b/apps/channels/migrations/0018_channelgroupm3uaccount_custom_properties_and_more.py @@ -0,0 +1,23 @@ +# Generated by Django 5.1.6 on 2025-04-27 14:12 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dispatcharr_channels', '0017_alter_channelgroup_name'), + ] + + operations = [ + migrations.AddField( + model_name='channelgroupm3uaccount', + name='custom_properties', + field=models.TextField(blank=True, null=True), + ), + migrations.AlterField( + model_name='channel', + name='channel_number', + field=models.IntegerField(db_index=True), + ), + ] diff --git a/apps/channels/models.py b/apps/channels/models.py index 0b66c468..13172e36 100644 --- a/apps/channels/models.py +++ b/apps/channels/models.py @@ -441,6 +441,7 @@ class ChannelGroupM3UAccount(models.Model): on_delete=models.CASCADE, related_name='channel_group' ) + custom_properties = models.TextField(null=True, blank=True) enabled = models.BooleanField(default=True) class Meta: diff --git a/apps/m3u/migrations/0008_m3uaccount_account_type_m3uaccount_password_and_more.py b/apps/m3u/migrations/0008_m3uaccount_account_type_m3uaccount_password_and_more.py new file mode 100644 index 00000000..02d2937f --- /dev/null +++ b/apps/m3u/migrations/0008_m3uaccount_account_type_m3uaccount_password_and_more.py @@ -0,0 +1,28 @@ +# Generated by Django 5.1.6 on 2025-04-27 12:56 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('m3u', '0007_remove_m3uaccount_uploaded_file_m3uaccount_file_path'), + ] + + operations = [ + migrations.AddField( + model_name='m3uaccount', + name='account_type', + field=models.CharField(choices=[('STD', 'Standard'), ('XC', 'Xtream Codes')], default='STD'), + ), + migrations.AddField( + model_name='m3uaccount', + name='password', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AddField( + model_name='m3uaccount', + name='username', + field=models.CharField(blank=True, max_length=255, null=True), + ), + ] diff --git a/apps/m3u/models.py b/apps/m3u/models.py index 25a332c6..99ead627 100644 --- a/apps/m3u/models.py +++ b/apps/m3u/models.py @@ -10,6 +10,10 @@ from core.models import CoreSettings, UserAgent CUSTOM_M3U_ACCOUNT_NAME="custom" class M3UAccount(models.Model): + class Types(models.TextChoices): + STADNARD = "STD", "Standard" + XC = "XC", "Xtream Codes" + """Represents an M3U Account for IPTV streams.""" name = models.CharField( max_length=255, @@ -69,6 +73,9 @@ class M3UAccount(models.Model): blank=True, related_name='m3u_accounts' ) + account_type = models.CharField(choices=Types.choices, default=Types.STADNARD) + username = models.CharField(max_length=255, null=True, blank=True) + password = models.CharField(max_length=255, null=True, blank=True) custom_properties = models.TextField(null=True, blank=True) refresh_interval = models.IntegerField(default=24) refresh_task = models.ForeignKey( diff --git a/apps/m3u/serializers.py b/apps/m3u/serializers.py index d79b0117..dd9b0e7a 100644 --- a/apps/m3u/serializers.py +++ b/apps/m3u/serializers.py @@ -66,8 +66,14 @@ class M3UAccountSerializer(serializers.ModelSerializer): fields = [ 'id', 'name', 'server_url', 'file_path', 'server_group', 'max_streams', 'is_active', 'created_at', 'updated_at', 'filters', 'user_agent', 'profiles', 'locked', - 'channel_groups', 'refresh_interval' + 'channel_groups', 'refresh_interval', 'custom_properties', 'account_type', 'username', 'password' ] + extra_kwargs = { + 'password': { + 'required': False, + 'allow_blank': True, + }, + } def update(self, instance, validated_data): # Pop out channel group memberships so we can handle them manually diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index beacaaa2..978f9763 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -21,6 +21,7 @@ import json from core.utils import RedisClient, acquire_task_lock, release_task_lock from core.models import CoreSettings from asgiref.sync import async_to_sync +from core.xtream_codes import Client as XCClient logger = logging.getLogger(__name__) @@ -172,32 +173,33 @@ def check_field_lengths(streams_to_create): print("") @shared_task -def process_groups(account, group_names): - existing_groups = {group.name: group for group in ChannelGroup.objects.filter(name__in=group_names)} +def process_groups(account, groups): + existing_groups = {group.name: group for group in ChannelGroup.objects.filter(name__in=groups.keys())} logger.info(f"Currently {len(existing_groups)} existing groups") - groups = [] + group_objs = [] groups_to_create = [] - for group_name in group_names: + for group_name, custom_props in groups.items(): logger.info(f"Handling group: {group_name}") - if group_name in existing_groups: - groups.append(existing_groups[group_name]) - else: + if group_name not in existing_groups: groups_to_create.append(ChannelGroup( name=group_name, )) + else: + group_objs.append(existing_groups[group_name]) if groups_to_create: logger.info(f"Creating {len(groups_to_create)} groups") created = ChannelGroup.bulk_create_and_fetch(groups_to_create) logger.info(f"Created {len(created)} groups") - groups.extend(created) + group_objs.extend(created) relations = [] - for group in groups: + for group in group_objs: relations.append(ChannelGroupM3UAccount( channel_group=group, m3u_account=account, + custom_properties=json.dumps(groups[group.name]), )) ChannelGroupM3UAccount.objects.bulk_create( @@ -205,6 +207,78 @@ def process_groups(account, group_names): ignore_conflicts=True ) +@shared_task +def process_xc_category(account_id, batch, groups, hash_keys): + account = M3UAccount.objects.get(id=account_id) + + streams_to_create = [] + streams_to_update = [] + stream_hashes = {} + + xc_client = XCClient(account.server_url, account.username, account.password) + for group_name, props in batch.items(): + streams = xc_client.get_live_category_streams(props['xc_id']) + for stream in streams: + name = stream["name"] + url = xc_client.get_stream_url(stream["stream_id"]) + tvg_id = stream["epg_channel_id"] + tvg_logo = stream["stream_icon"] + group_title = group_name + + stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys) + stream_props = { + "name": name, + "url": url, + "logo_url": tvg_logo, + "tvg_id": tvg_id, + "m3u_account": account, + "channel_group_id": int(groups.get(group_title)), + "stream_hash": stream_hash, + "custom_properties": json.dumps(stream), + } + + if stream_hash not in stream_hashes: + stream_hashes[stream_hash] = stream_props + + existing_streams = {s.stream_hash: s for s in Stream.objects.filter(stream_hash__in=stream_hashes.keys())} + + for stream_hash, stream_props in stream_hashes.items(): + if stream_hash in existing_streams: + obj = existing_streams[stream_hash] + existing_attr = {field.name: getattr(obj, field.name) for field in Stream._meta.fields if field != 'channel_group_id'} + changed = any(existing_attr[key] != value for key, value in stream_props.items() if key != 'channel_group_id') + + if changed: + for key, value in stream_props.items(): + setattr(obj, key, value) + obj.last_seen = timezone.now() + streams_to_update.append(obj) + del existing_streams[stream_hash] + else: + existing_streams[stream_hash] = obj + else: + stream_props["last_seen"] = timezone.now() + streams_to_create.append(Stream(**stream_props)) + + try: + with transaction.atomic(): + if streams_to_create: + Stream.objects.bulk_create(streams_to_create, ignore_conflicts=True) + if streams_to_update: + Stream.objects.bulk_update(streams_to_update, { key for key in stream_props.keys() if key not in ["m3u_account", "stream_hash"] and key not in hash_keys}) + # if len(existing_streams.keys()) > 0: + # Stream.objects.bulk_update(existing_streams.values(), ["last_seen"]) + except Exception as e: + logger.error(f"Bulk create failed: {str(e)}") + + retval = f"Batch processed: {len(streams_to_create)} created, {len(streams_to_update)} updated." + + # Aggressive garbage collection + del streams_to_create, streams_to_update, stream_hashes, existing_streams + gc.collect() + + return retval + @shared_task def process_m3u_batch(account_id, batch, groups, hash_keys): """Processes a batch of M3U streams using bulk operations.""" @@ -227,23 +301,7 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): logger.debug(f"Skipping stream in disabled group: {group_title}") continue - # if any(url.lower().endswith(ext) for ext in SKIP_EXTS) or len(url) > 2000: - # continue - - # if _matches_filters(name, group_title, account.filters.all()): - # continue - - # if any(compiled_pattern.search(current_info['name']) for ftype, compiled_pattern in compiled_filters if ftype == 'name'): - # excluded_count += 1 - # current_info = None - # continue - stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys) - # if redis_client.exists(f"m3u_refresh:{stream_hash}"): - # # duplicate already processed by another batch - # continue - - # redis_client.set(f"m3u_refresh:{stream_hash}", "true") stream_props = { "name": name, "url": url, @@ -332,24 +390,38 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): return f"M3UAccount with ID={account_id} not found or inactive.", None extinf_data = [] - groups = set(["Default Group"]) + groups = {"Default Group": {}} - for line in fetch_m3u_lines(account, use_cache): - line = line.strip() - if line.startswith("#EXTINF"): - parsed = parse_extinf_line(line) - if parsed: - if "group-title" in parsed["attributes"]: - groups.add(parsed["attributes"]["group-title"]) + xc_client = None + if account.account_type == M3UAccount.Types.XC: + xc_client = XCClient(account.server_url, account.username, account.password) + try: + xc_client.authenticate() + except Exception as e: + release_task_lock('refresh_m3u_account_groups', account_id) + return f"M3UAccount with ID={account_id} failed to authenticate with XC server.", None - extinf_data.append(parsed) - elif extinf_data and line.startswith("http"): - # Associate URL with the last EXTINF line - extinf_data[-1]["url"] = line + xc_categories = xc_client.get_live_categories() + for category in xc_categories: + groups[category["category_name"]] = { + "xc_id": category["category_id"], + } + else: + for line in fetch_m3u_lines(account, use_cache): + line = line.strip() + if line.startswith("#EXTINF"): + parsed = parse_extinf_line(line) + if parsed: + if "group-title" in parsed["attributes"]: + groups[parsed["attributes"]["group-title"]] = {} + + extinf_data.append(parsed) + elif extinf_data and line.startswith("http"): + # Associate URL with the last EXTINF line + extinf_data[-1]["url"] = line send_m3u_update(account_id, "processing_groups", 0) - groups = list(groups) cache_path = os.path.join(m3u_dir, f"{account_id}.json") with open(cache_path, 'w', encoding='utf-8') as f: json.dump({ @@ -412,7 +484,7 @@ def refresh_single_m3u_account(account_id): if not extinf_data: try: extinf_data, groups = refresh_m3u_groups(account_id, full_refresh=True) - if not extinf_data or not groups: + if not groups: release_task_lock('refresh_single_m3u_account', account_id) return "Failed to update m3u account, task may already be running" except: @@ -426,9 +498,17 @@ def refresh_single_m3u_account(account_id): m3u_account__enabled=True # Filter by the enabled flag in the join table )} - # Break into batches and process in parallel - batches = [extinf_data[i:i + BATCH_SIZE] for i in range(0, len(extinf_data), BATCH_SIZE)] - task_group = group(process_m3u_batch.s(account_id, batch, existing_groups, hash_keys) for batch in batches) + if account.account_type == M3UAccount.Types.STADNARD: + # Break into batches and process in parallel + batches = [extinf_data[i:i + BATCH_SIZE] for i in range(0, len(extinf_data), BATCH_SIZE)] + task_group = group(process_m3u_batch.s(account_id, batch, existing_groups, hash_keys) for batch in batches) + else: + filtered_groups = [(k, v) for k, v in groups.items() if k in existing_groups] + batches = [ + dict(filtered_groups[i:i + 2]) + for i in range(0, len(filtered_groups), 2) + ] + task_group = group(process_xc_category.s(account_id, batch, existing_groups, hash_keys) for batch in batches) total_batches = len(batches) completed_batches = 0 diff --git a/core/xtream_codes.py b/core/xtream_codes.py new file mode 100644 index 00000000..e79ec5e9 --- /dev/null +++ b/core/xtream_codes.py @@ -0,0 +1,26 @@ +import requests + +class Client: + host = "" + username = "" + password = "" + + def __init__(self, host, username, password): + self.host = host + self.username = username + self.password = password + + def authenticate(self): + response = requests.get(f"{self.host}/player_api.php?username={self.username}&password={self.password}") + return response.json() + + def get_live_categories(self): + response = requests.get(f"{self.host}/player_api.php?username={self.username}&password={self.password}&action=get_live_categories") + return response.json() + + def get_live_category_streams(self, category_id): + response = requests.get(f"{self.host}/player_api.php?username={self.username}&password={self.password}&action=get_live_streams&category_id={category_id}") + return response.json() + + def get_stream_url(self, stream_id): + return f"{self.host}/{self.username}/{self.password}/{stream_id}" diff --git a/frontend/src/api.js b/frontend/src/api.js index 38aac846..2c7f3a05 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -650,6 +650,10 @@ export default class API { } static async addPlaylist(values) { + if (values.custom_properties) { + values.custom_properties = JSON.stringify(values.custom_properties); + } + try { let body = null; if (values.file) { @@ -717,6 +721,10 @@ export default class API { static async updatePlaylist(values) { const { id, ...payload } = values; + if (payload.custom_properties) { + payload.custom_properties = JSON.stringify(payload.custom_properties); + } + try { let body = null; if (payload.file) { @@ -735,6 +743,7 @@ export default class API { body = { ...payload }; delete body.file; } + console.log(body); const response = await request(`${host}/api/m3u/accounts/${id}/`, { method: 'PATCH', @@ -1241,10 +1250,13 @@ export default class API { static async switchStream(channelId, streamId) { try { - const response = await request(`${host}/proxy/ts/change_stream/${channelId}`, { - method: 'POST', - body: { stream_id: streamId }, - }); + const response = await request( + `${host}/proxy/ts/change_stream/${channelId}`, + { + method: 'POST', + body: { stream_id: streamId }, + } + ); return response; } catch (e) { @@ -1255,10 +1267,13 @@ export default class API { static async nextStream(channelId, streamId) { try { - const response = await request(`${host}/proxy/ts/next_stream/${channelId}`, { - method: 'POST', - body: { stream_id: streamId }, - }); + const response = await request( + `${host}/proxy/ts/next_stream/${channelId}`, + { + method: 'POST', + body: { stream_id: streamId }, + } + ); return response; } catch (e) { diff --git a/frontend/src/components/forms/M3U.jsx b/frontend/src/components/forms/M3U.jsx index bd9cf3c8..2d0f29ba 100644 --- a/frontend/src/components/forms/M3U.jsx +++ b/frontend/src/components/forms/M3U.jsx @@ -18,6 +18,8 @@ import { Stack, Group, Switch, + Box, + PasswordInput, } from '@mantine/core'; import M3UGroupFilter from './M3UGroupFilter'; import useChannelsStore from '../../store/channels'; @@ -35,13 +37,7 @@ const M3U = ({ playlist = null, isOpen, onClose, playlistCreated = false }) => { const [profileModalOpen, setProfileModalOpen] = useState(false); const [groupFilterModalOpen, setGroupFilterModalOpen] = useState(false); const [loadingText, setLoadingText] = useState(''); - - const handleFileChange = (file) => { - console.log(file); - if (file) { - setFile(file); - } - }; + const [showCredentialFields, setShowCredentialFields] = useState(false); const form = useForm({ mode: 'uncontrolled', @@ -52,6 +48,9 @@ const M3U = ({ playlist = null, isOpen, onClose, playlistCreated = false }) => { is_active: true, max_streams: 0, refresh_interval: 24, + is_xc: false, + username: '', + password: '', }, validate: { @@ -63,6 +62,7 @@ const M3U = ({ playlist = null, isOpen, onClose, playlistCreated = false }) => { useEffect(() => { if (playlist) { + const customProperties = JSON.parse(playlist.custom_properties || '{}'); form.setValues({ name: playlist.name, server_url: playlist.server_url, @@ -70,14 +70,43 @@ const M3U = ({ playlist = null, isOpen, onClose, playlistCreated = false }) => { user_agent: playlist.user_agent ? `${playlist.user_agent}` : '0', is_active: playlist.is_active, refresh_interval: playlist.refresh_interval, + is_xc: playlist.account_type == 'XC', + username: customProperties.username ?? '', + password: '', }); + + if (customProperties.is_xc) { + setShowCredentialFields(true); + } else { + setShowCredentialFields(false); + } } else { form.reset(); } }, [playlist]); + useEffect(() => { + if (form.values.is_xc) { + setShowCredentialFields(true); + } + }, [form.values.is_xc]); + const onSubmit = async () => { - const values = form.getValues(); + const { ...values } = form.getValues(); + + if (values.is_xc && values.password == '') { + // If account XC and no password input, assuming no password change + // from previously stored value. + delete values.password; + } + + if (values.is_xc) { + values.account_type = 'XC'; + } else { + values.account_type = 'STD'; + } + + delete values.is_xc; if (values.user_agent == '0') { values.user_agent = null; @@ -150,7 +179,6 @@ const M3U = ({ playlist = null, isOpen, onClose, playlistCreated = false }) => { {...form.getInputProps('name')} key={form.key('name')} /> - { {...form.getInputProps('server_url')} key={form.key('server_url')} /> - - + + {form.getValues().is_xc && ( + + + + + )} + + {!form.getValues().is_xc && ( + + )} From bfaa52ea133bf67554a75b62e8a141848130d821 Mon Sep 17 00:00:00 2001 From: dekzter Date: Sun, 27 Apr 2025 10:38:30 -0400 Subject: [PATCH 0097/1435] fixed username field --- frontend/src/components/forms/M3U.jsx | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/frontend/src/components/forms/M3U.jsx b/frontend/src/components/forms/M3U.jsx index 2d0f29ba..51370f27 100644 --- a/frontend/src/components/forms/M3U.jsx +++ b/frontend/src/components/forms/M3U.jsx @@ -62,7 +62,6 @@ const M3U = ({ playlist = null, isOpen, onClose, playlistCreated = false }) => { useEffect(() => { if (playlist) { - const customProperties = JSON.parse(playlist.custom_properties || '{}'); form.setValues({ name: playlist.name, server_url: playlist.server_url, @@ -71,11 +70,11 @@ const M3U = ({ playlist = null, isOpen, onClose, playlistCreated = false }) => { is_active: playlist.is_active, refresh_interval: playlist.refresh_interval, is_xc: playlist.account_type == 'XC', - username: customProperties.username ?? '', + username: playlist.username ?? '', password: '', }); - if (customProperties.is_xc) { + if (playlist.account_type == 'XC') { setShowCredentialFields(true); } else { setShowCredentialFields(false); From f295ee219cc5a7524cdb86a4f80e232dc19d39f8 Mon Sep 17 00:00:00 2001 From: dekzter Date: Sun, 27 Apr 2025 10:54:39 -0400 Subject: [PATCH 0098/1435] removed db index here, I don't think it's needed --- ...0018_channelgroupm3uaccount_custom_properties_and_more.py | 5 ----- apps/channels/models.py | 2 +- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/apps/channels/migrations/0018_channelgroupm3uaccount_custom_properties_and_more.py b/apps/channels/migrations/0018_channelgroupm3uaccount_custom_properties_and_more.py index 7d2dafb4..51507843 100644 --- a/apps/channels/migrations/0018_channelgroupm3uaccount_custom_properties_and_more.py +++ b/apps/channels/migrations/0018_channelgroupm3uaccount_custom_properties_and_more.py @@ -15,9 +15,4 @@ class Migration(migrations.Migration): name='custom_properties', field=models.TextField(blank=True, null=True), ), - migrations.AlterField( - model_name='channel', - name='channel_number', - field=models.IntegerField(db_index=True), - ), ] diff --git a/apps/channels/models.py b/apps/channels/models.py index 13172e36..ee92a43b 100644 --- a/apps/channels/models.py +++ b/apps/channels/models.py @@ -210,7 +210,7 @@ class ChannelManager(models.Manager): class Channel(models.Model): - channel_number = models.IntegerField(db_index=True) + channel_number = models.IntegerField() name = models.CharField(max_length=255) logo = models.ForeignKey( 'Logo', From 1ccf24fe5f936ccdd3dacb72125408c1f33cecb8 Mon Sep 17 00:00:00 2001 From: dekzter Date: Sun, 27 Apr 2025 11:08:12 -0400 Subject: [PATCH 0099/1435] Fixed caching path for non-xc only --- apps/m3u/tasks.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index 978f9763..ce536234 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -420,14 +420,14 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): # Associate URL with the last EXTINF line extinf_data[-1]["url"] = line - send_m3u_update(account_id, "processing_groups", 0) + cache_path = os.path.join(m3u_dir, f"{account_id}.json") + with open(cache_path, 'w', encoding='utf-8') as f: + json.dump({ + "extinf_data": extinf_data, + "groups": groups, + }, f) - cache_path = os.path.join(m3u_dir, f"{account_id}.json") - with open(cache_path, 'w', encoding='utf-8') as f: - json.dump({ - "extinf_data": extinf_data, - "groups": groups, - }, f) + send_m3u_update(account_id, "processing_groups", 0) process_groups(account, groups) From 88f27d62f1e72d26b11173bd449fdca03bdac452 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 27 Apr 2025 14:43:09 -0500 Subject: [PATCH 0100/1435] Adds current m3u profile to stats. --- apps/proxy/ts_proxy/channel_status.py | 38 +++++++++++++++ .../ts_proxy/services/channel_service.py | 25 ++++++---- apps/proxy/ts_proxy/views.py | 3 +- frontend/src/pages/Stats.jsx | 46 +++++++++++++++++++ 4 files changed, 102 insertions(+), 10 deletions(-) diff --git a/apps/proxy/ts_proxy/channel_status.py b/apps/proxy/ts_proxy/channel_status.py index fe5b1f3b..43e01df8 100644 --- a/apps/proxy/ts_proxy/channel_status.py +++ b/apps/proxy/ts_proxy/channel_status.py @@ -65,6 +65,25 @@ class ChannelStatus: except ValueError: logger.warning(f"Invalid stream_id format in Redis: {stream_id_bytes}") + # Add M3U profile information + m3u_profile_id_bytes = metadata.get(ChannelMetadataField.M3U_PROFILE.encode('utf-8')) + if m3u_profile_id_bytes: + try: + m3u_profile_id = int(m3u_profile_id_bytes.decode('utf-8')) + info['m3u_profile_id'] = m3u_profile_id + + # Look up M3U profile name from database + try: + from apps.m3u.models import M3UAccountProfile + m3u_profile = M3UAccountProfile.objects.filter(id=m3u_profile_id).first() + if m3u_profile: + info['m3u_profile_name'] = m3u_profile.name + logger.debug(f"Added M3U profile name '{m3u_profile.name}' for profile ID {m3u_profile_id}") + except (ImportError, DatabaseError) as e: + logger.warning(f"Failed to get M3U profile name for ID {m3u_profile_id}: {e}") + except ValueError: + logger.warning(f"Invalid m3u_profile_id format in Redis: {m3u_profile_id_bytes}") + # Add timing information state_changed_field = ChannelMetadataField.STATE_CHANGED_AT.encode('utf-8') if state_changed_field in metadata: @@ -380,6 +399,25 @@ class ChannelStatus: # Add clients to info info['clients'] = clients + # Add M3U profile information + m3u_profile_id_bytes = metadata.get(ChannelMetadataField.M3U_PROFILE.encode('utf-8')) + if m3u_profile_id_bytes: + try: + m3u_profile_id = int(m3u_profile_id_bytes.decode('utf-8')) + info['m3u_profile_id'] = m3u_profile_id + + # Look up M3U profile name from database + try: + from apps.m3u.models import M3UAccountProfile + m3u_profile = M3UAccountProfile.objects.filter(id=m3u_profile_id).first() + if m3u_profile: + info['m3u_profile_name'] = m3u_profile.name + logger.debug(f"Added M3U profile name '{m3u_profile.name}' for profile ID {m3u_profile_id}") + except (ImportError, DatabaseError) as e: + logger.warning(f"Failed to get M3U profile name for ID {m3u_profile_id}: {e}") + except ValueError: + logger.warning(f"Invalid m3u_profile_id format in Redis: {m3u_profile_id_bytes}") + return info except Exception as e: logger.error(f"Error getting channel info: {e}") diff --git a/apps/proxy/ts_proxy/services/channel_service.py b/apps/proxy/ts_proxy/services/channel_service.py index 3ac62af4..9aa8c66b 100644 --- a/apps/proxy/ts_proxy/services/channel_service.py +++ b/apps/proxy/ts_proxy/services/channel_service.py @@ -7,7 +7,7 @@ import logging import time import json from django.shortcuts import get_object_or_404 -from apps.channels.models import Channel +from apps.channels.models import Channel, Stream from apps.proxy.config import TSConfig as Config from ..server import ProxyServer from ..redis_keys import RedisKeys @@ -58,7 +58,7 @@ class ChannelService: # Verify the stream_id was set stream_id_value = proxy_server.redis_client.hget(metadata_key, ChannelMetadataField.STREAM_ID) if stream_id_value: - logger.info(f"Verified stream_id {stream_id_value.decode('utf-8')} is now set in Redis") + logger.debug(f"Verified stream_id {stream_id_value.decode('utf-8')} is now set in Redis") else: logger.error(f"Failed to set stream_id {stream_id} in Redis before initialization") @@ -82,7 +82,7 @@ class ChannelService: return success @staticmethod - def change_stream_url(channel_id, new_url=None, user_agent=None, target_stream_id=None): + def change_stream_url(channel_id, new_url=None, user_agent=None, target_stream_id=None, m3u_profile_id=None): """ Change the URL of an existing stream. @@ -91,6 +91,7 @@ class ChannelService: new_url: New stream URL (optional if target_stream_id is provided) user_agent: Optional user agent to update target_stream_id: Optional target stream ID to switch to + m3u_profile_id: Optional M3U profile ID to update Returns: dict: Result information including success status and diagnostics @@ -109,6 +110,10 @@ class ChannelService: new_url = stream_info['url'] user_agent = stream_info['user_agent'] stream_id = target_stream_id + # Extract M3U profile ID from stream info if available + if 'm3u_profile_id' in stream_info: + m3u_profile_id = stream_info['m3u_profile_id'] + logger.info(f"Found M3U profile ID {m3u_profile_id} for stream ID {stream_id}") elif target_stream_id: # If we have both URL and target_stream_id, use the target_stream_id stream_id = target_stream_id @@ -163,7 +168,7 @@ class ChannelService: # Update metadata in Redis regardless of ownership if proxy_server.redis_client: try: - ChannelService._update_channel_metadata(channel_id, new_url, user_agent, stream_id) + ChannelService._update_channel_metadata(channel_id, new_url, user_agent, stream_id, m3u_profile_id) result['metadata_updated'] = True except Exception as e: logger.error(f"Error updating Redis metadata: {e}", exc_info=True) @@ -188,7 +193,7 @@ class ChannelService: # If we're not the owner, publish an event for the owner to pick up logger.info(f"Not the owner, requesting URL change via Redis PubSub") if proxy_server.redis_client: - ChannelService._publish_stream_switch_event(channel_id, new_url, user_agent, stream_id) + ChannelService._publish_stream_switch_event(channel_id, new_url, user_agent, stream_id, m3u_profile_id) result.update({ 'direct_update': False, 'event_published': True, @@ -413,7 +418,7 @@ class ChannelService: # Helper methods for Redis operations @staticmethod - def _update_channel_metadata(channel_id, url, user_agent=None, stream_id=None): + def _update_channel_metadata(channel_id, url, user_agent=None, stream_id=None, m3u_profile_id=None): """Update channel metadata in Redis""" proxy_server = ProxyServer.get_instance() @@ -432,7 +437,8 @@ class ChannelService: metadata[ChannelMetadataField.USER_AGENT] = user_agent if stream_id: metadata[ChannelMetadataField.STREAM_ID] = str(stream_id) - logger.info(f"Updating stream ID to {stream_id} in Redis for channel {channel_id}") + if m3u_profile_id: + metadata[ChannelMetadataField.M3U_PROFILE] = str(m3u_profile_id) # Use the appropriate method based on the key type if key_type == 'hash': @@ -448,11 +454,11 @@ class ChannelService: switch_key = RedisKeys.switch_request(channel_id) proxy_server.redis_client.setex(switch_key, 30, url) # 30 second TTL - logger.info(f"Updated metadata for channel {channel_id} in Redis") + logger.debug(f"Updated metadata for channel {channel_id} in Redis") return True @staticmethod - def _publish_stream_switch_event(channel_id, new_url, user_agent=None, stream_id=None): + def _publish_stream_switch_event(channel_id, new_url, user_agent=None, stream_id=None, m3u_profile_id=None): """Publish a stream switch event to Redis pubsub""" proxy_server = ProxyServer.get_instance() @@ -465,6 +471,7 @@ class ChannelService: "url": new_url, "user_agent": user_agent, "stream_id": stream_id, + "m3u_profile_id": m3u_profile_id, "requester": proxy_server.worker_id, "timestamp": time.time() } diff --git a/apps/proxy/ts_proxy/views.py b/apps/proxy/ts_proxy/views.py index d71da8d4..87a8e51b 100644 --- a/apps/proxy/ts_proxy/views.py +++ b/apps/proxy/ts_proxy/views.py @@ -351,6 +351,7 @@ def change_stream(request, channel_id): # Use the info from the stream new_url = stream_info['url'] user_agent = stream_info['user_agent'] + m3u_profile_id = stream_info.get('m3u_profile_id') # Stream ID will be passed to change_stream_url later elif not new_url: return JsonResponse({'error': 'Either url or stream_id must be provided'}, status=400) @@ -359,7 +360,7 @@ def change_stream(request, channel_id): # Use the service layer instead of direct implementation # Pass stream_id to ensure proper connection tracking - result = ChannelService.change_stream_url(channel_id, new_url, user_agent, stream_id) + result = ChannelService.change_stream_url(channel_id, new_url, user_agent, stream_id, m3u_profile_id) # Get the stream manager before updating URL stream_manager = proxy_server.stream_managers.get(channel_id) diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index e2e90a7d..ef15d605 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -82,12 +82,23 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel const [availableStreams, setAvailableStreams] = useState([]); const [isLoadingStreams, setIsLoadingStreams] = useState(false); const [activeStreamId, setActiveStreamId] = useState(null); + const [currentM3UProfile, setCurrentM3UProfile] = useState(null); // Add state for current M3U profile // Safety check - if channel doesn't have required data, don't render if (!channel || !channel.channel_id) { return null; } + // Update M3U profile information when channel data changes + useEffect(() => { + // If the channel data includes M3U profile information, update our state + if (channel.m3u_profile || channel.m3u_profile_name) { + setCurrentM3UProfile({ + name: channel.m3u_profile?.name || channel.m3u_profile_name || 'Default M3U' + }); + } + }, [channel.m3u_profile, channel.m3u_profile_name, channel.stream_id]); + // Fetch available streams for this channel useEffect(() => { const fetchStreams = async () => { @@ -110,6 +121,11 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel if (matchingStream) { setActiveStreamId(matchingStream.id.toString()); + + // If the stream has M3U profile info, save it + if (matchingStream.m3u_profile) { + setCurrentM3UProfile(matchingStream.m3u_profile); + } } } } @@ -138,6 +154,14 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel // Update the local active stream ID immediately setActiveStreamId(streamId); + // Update M3U profile information if available in the response + if (response && response.m3u_profile) { + setCurrentM3UProfile(response.m3u_profile); + } else if (selectedStream && selectedStream.m3u_profile) { + // Fallback to the profile from the selected stream + setCurrentM3UProfile(selectedStream.m3u_profile); + } + // Show detailed notification with stream name notifications.show({ title: 'Stream switching', @@ -152,6 +176,12 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel if (channelId) { const updatedStreamData = await API.getChannelStreams(channelId); console.log("Channel streams after switch:", updatedStreamData); + + // Update current stream information with fresh data + const updatedStream = updatedStreamData.find(s => s.id.toString() === streamId); + if (updatedStream && updatedStream.m3u_profile) { + setCurrentM3UProfile(updatedStream.m3u_profile); + } } } catch (error) { console.error("Error checking streams after switch:", error); @@ -305,6 +335,12 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel const avgBitrate = channel.avg_bitrate || '0 Kbps'; const streamProfileName = channel.stream_profile?.name || 'Unknown Profile'; + // Use currentM3UProfile if available, otherwise fall back to channel data + const m3uProfileName = currentM3UProfile?.name || + channel.m3u_profile?.name || + channel.m3u_profile_name || + 'Default M3U'; + // Create select options for available streams const streamOptions = availableStreams.map(stream => ({ value: stream.id.toString(), @@ -377,6 +413,16 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel + {/* Display M3U profile information */} + + + + + {m3uProfileName} + + + + {/* Add stream selection dropdown */} {availableStreams.length > 0 && ( From 77c92d52bdfbd0665087a1d6dbff9f6841c26558 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Sun, 27 Apr 2025 19:43:43 +0000 Subject: [PATCH 0101/1435] Increment build number to 34 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 7bb9e07f..5f76a589 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '33' # Auto-incremented on builds +__build__ = '34' # Auto-incremented on builds From c51f3136ddad6b3cc689f397cffec13bead3cf3f Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 27 Apr 2025 14:51:13 -0500 Subject: [PATCH 0102/1435] Add tooltip for stream profile and add clarity to user-agent. --- frontend/src/pages/Stats.jsx | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index ef15d605..3fb818e3 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -299,7 +299,14 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel
), - renderDetailPanel: ({ row }) => {row.original.user_agent}, + renderDetailPanel: ({ row }) => ( + + + User Agent: + {row.original.user_agent || "Unknown"} + + + ), mantineExpandButtonProps: ({ row, table }) => ({ size: 'xs', style: { @@ -407,10 +414,12 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel {channelName} - - + + + + {/* Display M3U profile information */} From 9e99de77ec2cfdb27e8cd268d71b11b72f978597 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Sun, 27 Apr 2025 19:51:37 +0000 Subject: [PATCH 0103/1435] Increment build number to 35 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 5f76a589..a05aeda2 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '34' # Auto-incremented on builds +__build__ = '35' # Auto-incremented on builds From d59c8a9e3369ff38a1f2daa24bd2e325283d836d Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 27 Apr 2025 15:52:10 -0500 Subject: [PATCH 0104/1435] Properly track current stream id during stream switches. --- apps/proxy/ts_proxy/services/channel_service.py | 5 ++++- apps/proxy/ts_proxy/stream_manager.py | 14 +++++++++++--- 2 files changed, 15 insertions(+), 4 deletions(-) diff --git a/apps/proxy/ts_proxy/services/channel_service.py b/apps/proxy/ts_proxy/services/channel_service.py index 9aa8c66b..bd1f2f81 100644 --- a/apps/proxy/ts_proxy/services/channel_service.py +++ b/apps/proxy/ts_proxy/services/channel_service.py @@ -181,7 +181,7 @@ class ChannelService: old_url = manager.url # Update the stream - success = manager.update_url(new_url) + success = manager.update_url(new_url, stream_id) logger.info(f"Stream URL changed from {old_url} to {new_url}, result: {success}") result.update({ @@ -440,6 +440,9 @@ class ChannelService: if m3u_profile_id: metadata[ChannelMetadataField.M3U_PROFILE] = str(m3u_profile_id) + # Also update the stream switch time field + metadata[ChannelMetadataField.STREAM_SWITCH_TIME] = str(time.time()) + # Use the appropriate method based on the key type if key_type == 'hash': proxy_server.redis_client.hset(metadata_key, mapping=metadata) diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index 9d2847c1..2615758f 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -544,7 +544,7 @@ class StreamManager: # Set running to false to ensure thread exits self.running = False - def update_url(self, new_url): + def update_url(self, new_url, stream_id=None): """Update stream URL and reconnect with proper cleanup for both HTTP and transcode sessions""" if new_url == self.url: logger.info(f"URL unchanged: {new_url}") @@ -568,6 +568,14 @@ class StreamManager: self.url = new_url self.connected = False + # Update stream ID if provided + if stream_id: + old_stream_id = self.current_stream_id + self.current_stream_id = stream_id + # Add stream ID to tried streams for proper tracking + self.tried_stream_ids.add(stream_id) + logger.info(f"Updated stream ID from {old_stream_id} to {stream_id} for channel {self.buffer.channel_id}") + # Reset retry counter to allow immediate reconnect self.retry_count = 0 @@ -1005,7 +1013,7 @@ class StreamManager: logger.info(f"Stream metadata updated for channel {self.channel_id} to stream ID {stream_id}") # IMPORTANT: Just update the URL, don't stop the channel or release resources - switch_result = self.update_url(new_url) + switch_result = self.update_url(new_url, stream_id) if not switch_result: logger.error(f"Failed to update URL for stream ID {stream_id}") return False @@ -1015,4 +1023,4 @@ class StreamManager: except Exception as e: logger.error(f"Error trying next stream for channel {self.channel_id}: {e}", exc_info=True) - return False + return False \ No newline at end of file From 768ca0e353375460decdfc19aa6834c3a6cbaa95 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Sun, 27 Apr 2025 20:53:27 +0000 Subject: [PATCH 0105/1435] Increment build number to 36 [skip ci] --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index a05aeda2..4b1e4acc 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '35' # Auto-incremented on builds +__build__ = '36' # Auto-incremented on builds From c049e48c0877f40fcfeb42a2ec248d50afea6179 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 27 Apr 2025 17:46:27 -0500 Subject: [PATCH 0106/1435] Use timestamp instead of build number increase. --- .github/workflows/ci.yml | 23 +++++++---------------- core/api_views.py | 4 ++-- docker/build-dev.sh | 6 ++---- frontend/src/components/Sidebar.jsx | 6 +++--- version.py | 2 +- 5 files changed, 15 insertions(+), 26 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8aa06d0d..62f01629 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -44,28 +44,20 @@ jobs: username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - name: Increment Build Number - if: steps.check_actor.outputs.is_bot != 'true' - id: increment_build + - name: Generate timestamp for build + id: timestamp run: | - python scripts/increment_build.py - BUILD=$(python -c "import version; print(version.__build__)") - echo "build=${BUILD}" >> $GITHUB_OUTPUT + TIMESTAMP=$(date -u +'%Y%m%d%H%M%S') + echo "timestamp=${TIMESTAMP}" >> $GITHUB_OUTPUT - - name: Commit Build Number Update - if: steps.check_actor.outputs.is_bot != 'true' - run: | - git add version.py - git commit -m "Increment build number to ${{ steps.increment_build.outputs.build }} [skip ci]" - git push + # Update the timestamp in version.py + sed -i "s/__timestamp__ = None/__timestamp__ = '${TIMESTAMP}'/" version.py - name: Extract version info id: version run: | VERSION=$(python -c "import version; print(version.__version__)") - BUILD=$(python -c "import version; print(version.__build__)") echo "version=${VERSION}" >> $GITHUB_OUTPUT - echo "build=${BUILD}" >> $GITHUB_OUTPUT echo "sha_short=${GITHUB_SHA::7}" >> $GITHUB_OUTPUT - name: Set repository and image metadata @@ -98,7 +90,6 @@ jobs: echo "is_fork=true" >> $GITHUB_OUTPUT else echo "is_fork=false" >> $GITHUB_OUTPUT - fi - name: Build and push Docker image uses: docker/build-push-action@v4 @@ -108,7 +99,7 @@ jobs: platforms: linux/amd64 # Fast build - amd64 only tags: | ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:${{ steps.meta.outputs.branch_tag }} - ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:${{ steps.version.outputs.version }}-${{ steps.version.outputs.build }} + ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:${{ steps.version.outputs.version }}-${{ steps.timestamp.outputs.timestamp }} ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:${{ steps.version.outputs.sha_short }} build-args: | BRANCH=${{ github.ref_name }} diff --git a/core/api_views.py b/core/api_views.py index d9c0aba4..7f3ecf57 100644 --- a/core/api_views.py +++ b/core/api_views.py @@ -95,8 +95,8 @@ def environment(request): @api_view(['GET']) def version(request): # Import version information - from version import __version__, __build__ + from version import __version__, __timestamp__ return Response({ 'version': __version__, - 'build': __build__, + 'timestamp': __timestamp__, }) diff --git a/docker/build-dev.sh b/docker/build-dev.sh index 65d643a7..b02c314e 100755 --- a/docker/build-dev.sh +++ b/docker/build-dev.sh @@ -3,11 +3,9 @@ docker build --build-arg BRANCH=dev -t dispatcharr/dispatcharr:dev -f Dockerfile # Get version information VERSION=$(python -c "import sys; sys.path.append('..'); import version; print(version.__version__)") -BUILD=$(python -c "import sys; sys.path.append('..'); import version; print(version.__build__)") -# Build with version tags +# Build with version tag docker build --build-arg BRANCH=dev \ -t dispatcharr/dispatcharr:dev \ - -t dispatcharr/dispatcharr:${VERSION}-${BUILD} \ + -t dispatcharr/dispatcharr:${VERSION} \ -f Dockerfile .. -. diff --git a/frontend/src/components/Sidebar.jsx b/frontend/src/components/Sidebar.jsx index b5dc15b2..eb5a2226 100644 --- a/frontend/src/components/Sidebar.jsx +++ b/frontend/src/components/Sidebar.jsx @@ -67,7 +67,7 @@ const Sidebar = ({ collapsed, toggleDrawer, drawerWidth, miniDrawerWidth }) => { const environment = useSettingsStore((s) => s.environment); const isAuthenticated = useAuthStore((s) => s.isAuthenticated); const publicIPRef = useRef(null); - const [appVersion, setAppVersion] = useState({ version: '', build: '' }); + const [appVersion, setAppVersion] = useState({ version: '', timestamp: null }); // Fetch environment settings including version on component mount useEffect(() => { @@ -89,7 +89,7 @@ const Sidebar = ({ collapsed, toggleDrawer, drawerWidth, miniDrawerWidth }) => { const versionData = await API.getVersion(); setAppVersion({ version: versionData.version || '', - build: versionData.build || '', + timestamp: versionData.timestamp || null, }); } catch (error) { console.error('Failed to fetch version information:', error); @@ -266,7 +266,7 @@ const Sidebar = ({ collapsed, toggleDrawer, drawerWidth, miniDrawerWidth }) => { {!collapsed && ( v{appVersion?.version || '0.0.0'} - {appVersion?.build !== '0' ? `-${appVersion?.build}` : ''} + {appVersion?.timestamp ? `-${appVersion.timestamp}` : ''} )} diff --git a/version.py b/version.py index 4b1e4acc..9339fcbd 100644 --- a/version.py +++ b/version.py @@ -2,4 +2,4 @@ Dispatcharr version information. """ __version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) -__build__ = '36' # Auto-incremented on builds +__timestamp__ = None # Set during CI/CD build process From a8a6322e3057fe60e4f5d9fb43a69c1287d21652 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 27 Apr 2025 17:50:25 -0500 Subject: [PATCH 0107/1435] Missed closing if statement. --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 62f01629..20d3f150 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -90,6 +90,7 @@ jobs: echo "is_fork=true" >> $GITHUB_OUTPUT else echo "is_fork=false" >> $GITHUB_OUTPUT + fi - name: Build and push Docker image uses: docker/build-push-action@v4 From cb62a13c40b41d8f6201e64fcc893eee2d0a2c1c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 27 Apr 2025 18:27:33 -0500 Subject: [PATCH 0108/1435] Attempt at fixing timestamp not being added to version. --- .github/workflows/ci.yml | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 20d3f150..4ee8db93 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -51,7 +51,13 @@ jobs: echo "timestamp=${TIMESTAMP}" >> $GITHUB_OUTPUT # Update the timestamp in version.py - sed -i "s/__timestamp__ = None/__timestamp__ = '${TIMESTAMP}'/" version.py + echo "Updating timestamp to ${TIMESTAMP} in version.py" + sed -i "s/__timestamp__ = None.*/__timestamp__ = '${TIMESTAMP}' # Set during CI/CD build process/" version.py + cat version.py # Verify the file was updated correctly + + # Make the version.py change part of the Docker build context + git add version.py + git commit -m "Update build timestamp [skip ci]" || echo "No changes to commit" - name: Extract version info id: version @@ -90,7 +96,6 @@ jobs: echo "is_fork=true" >> $GITHUB_OUTPUT else echo "is_fork=false" >> $GITHUB_OUTPUT - fi - name: Build and push Docker image uses: docker/build-push-action@v4 From 202ef265dea0bfa9293e831d5865dab753147a03 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 27 Apr 2025 18:30:15 -0500 Subject: [PATCH 0109/1435] Fix sed command delimiter for updating timestamp in version.py --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4ee8db93..6a98d50f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -52,7 +52,7 @@ jobs: # Update the timestamp in version.py echo "Updating timestamp to ${TIMESTAMP} in version.py" - sed -i "s/__timestamp__ = None.*/__timestamp__ = '${TIMESTAMP}' # Set during CI/CD build process/" version.py + sed -i "s|__timestamp__ = None.*|__timestamp__ = '${TIMESTAMP}' # Set during CI/CD build process|" version.py cat version.py # Verify the file was updated correctly # Make the version.py change part of the Docker build context From a81daaea44bf46e472561512258f82ce3faf0746 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 27 Apr 2025 18:32:01 -0500 Subject: [PATCH 0110/1435] IDK WHY THAT KEEPS GETTING DELETED --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6a98d50f..5964f614 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -96,6 +96,7 @@ jobs: echo "is_fork=true" >> $GITHUB_OUTPUT else echo "is_fork=false" >> $GITHUB_OUTPUT + fi - name: Build and push Docker image uses: docker/build-push-action@v4 From 07edf270fb373c1439ddcedc1b1aa459d27e0b67 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 27 Apr 2025 18:53:39 -0500 Subject: [PATCH 0111/1435] Refactor CI workflow to update version.py with build timestamp in Dockerfile --- .github/workflows/ci.yml | 10 +--------- docker/Dockerfile | 14 ++++++++++++-- 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5964f614..1418cbf5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -50,15 +50,6 @@ jobs: TIMESTAMP=$(date -u +'%Y%m%d%H%M%S') echo "timestamp=${TIMESTAMP}" >> $GITHUB_OUTPUT - # Update the timestamp in version.py - echo "Updating timestamp to ${TIMESTAMP} in version.py" - sed -i "s|__timestamp__ = None.*|__timestamp__ = '${TIMESTAMP}' # Set during CI/CD build process|" version.py - cat version.py # Verify the file was updated correctly - - # Make the version.py change part of the Docker build context - git add version.py - git commit -m "Update build timestamp [skip ci]" || echo "No changes to commit" - - name: Extract version info id: version run: | @@ -111,4 +102,5 @@ jobs: build-args: | BRANCH=${{ github.ref_name }} REPO_URL=https://github.com/${{ github.repository }} + TIMESTAMP=${{ steps.timestamp.outputs.timestamp }} file: ./docker/Dockerfile diff --git a/docker/Dockerfile b/docker/Dockerfile index e3f8a165..4d313e2c 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -5,6 +5,8 @@ ARG BRANCH=main # This will be overridden by the GitHub Actions workflow # when building the Docker image for production. ARG REPO_URL=https://github.com/Dispatcharr/Dispatcharr +# Add timestamp argument +ARG TIMESTAMP ENV PATH="/dispatcharrpy/bin:$PATH" \ VIRTUAL_ENV=/dispatcharrpy \ @@ -26,8 +28,16 @@ RUN apt-get update && \ virtualenv /dispatcharrpy && \ git clone -b ${BRANCH} ${REPO_URL} /app && \ cd /app && \ - rm -rf .git && \ - cd /app && \ + rm -rf .git + +# Update version.py with build timestamp if provided +RUN if [ -n "$TIMESTAMP" ]; then \ + echo "Updating timestamp to ${TIMESTAMP} in version.py" && \ + sed -i "s|__timestamp__ = None.*|__timestamp__ = '${TIMESTAMP}' # Set during CI/CD build process|" /app/version.py && \ + cat /app/version.py; \ + fi + +RUN cd /app && \ pip install --no-cache-dir -r requirements.txt # Use a dedicated Node.js stage for frontend building From 164f0cdbb510e05f2cb86040634d057196ecb97a Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 27 Apr 2025 19:15:00 -0500 Subject: [PATCH 0112/1435] Increase thread stack size in uwsgi configuration for improved performance --- docker/uwsgi.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/uwsgi.ini b/docker/uwsgi.ini index 9db61495..326f4b5d 100644 --- a/docker/uwsgi.ini +++ b/docker/uwsgi.ini @@ -28,6 +28,7 @@ static-map = /static=/app/static workers = 4 threads = 4 enable-threads = true +thread-stacksize=512 # Optimize for streaming http = 0.0.0.0:5656 From 0b8f20dc22ae86606a128235237a0b7f62906b3c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 27 Apr 2025 19:19:48 -0500 Subject: [PATCH 0113/1435] Fixes not being able to set logo to default. --- frontend/src/api.js | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/frontend/src/api.js b/frontend/src/api.js index 38aac846..e54e628a 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -337,6 +337,11 @@ export default class API { payload.stream_profile_id = null; } + // Handle logo_id properly (0 means "no logo") + if (payload.logo_id === '0' || payload.logo_id === 0) { + payload.logo_id = null; + } + // Ensure tvg_id is included properly (not as empty string) if (payload.tvg_id === '') { payload.tvg_id = null; From 248ef90629b0a96740f33a14ebb11533078e3415 Mon Sep 17 00:00:00 2001 From: Nicolas Znamenski Date: Mon, 28 Apr 2025 14:08:04 -0400 Subject: [PATCH 0114/1435] Fixed minor typo --- frontend/src/components/tables/UserAgentsTable.jsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/components/tables/UserAgentsTable.jsx b/frontend/src/components/tables/UserAgentsTable.jsx index 2ea2d3b7..4ab1eb34 100644 --- a/frontend/src/components/tables/UserAgentsTable.jsx +++ b/frontend/src/components/tables/UserAgentsTable.jsx @@ -55,7 +55,7 @@ const UserAgentsTable = () => { ), }, { - header: 'Desecription', + header: 'Description', accessorKey: 'description', enableSorting: false, Cell: ({ cell }) => ( From b439eb810c38244aa688e03babc940dd8bfc81c9 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 28 Apr 2025 15:05:58 -0500 Subject: [PATCH 0115/1435] Cleanup channel lock instead of stream lock. --- apps/proxy/ts_proxy/stream_generator.py | 11 ++++++----- apps/proxy/ts_proxy/stream_manager.py | 19 ++++++++++--------- apps/proxy/ts_proxy/views.py | 1 - 3 files changed, 16 insertions(+), 15 deletions(-) diff --git a/apps/proxy/ts_proxy/stream_generator.py b/apps/proxy/ts_proxy/stream_generator.py index 9377a079..17e53b9d 100644 --- a/apps/proxy/ts_proxy/stream_generator.py +++ b/apps/proxy/ts_proxy/stream_generator.py @@ -380,14 +380,15 @@ class StreamGenerator: client_count = proxy_server.client_managers[self.channel_id].get_total_client_count() # Only the last client or owner should release the stream if client_count <= 1 and proxy_server.am_i_owner(self.channel_id): - from apps.channels.models import Stream + from apps.channels.models import Channel try: - stream = Stream.objects.get(pk=stream_id) - stream.release_stream() + # Get the channel by UUID + channel = Channel.objects.get(uuid=self.channel_id) + channel.release_stream() stream_released = True - logger.debug(f"[{self.client_id}] Released stream {stream_id} for channel {self.channel_id}") + logger.debug(f"[{self.client_id}] Released stream for channel {self.channel_id}") except Exception as e: - logger.error(f"[{self.client_id}] Error releasing stream {stream_id}: {e}") + logger.error(f"[{self.client_id}] Error releasing stream for channel {self.channel_id}: {e}") except Exception as e: logger.error(f"[{self.client_id}] Error checking stream data for release: {e}") diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index 2615758f..8f7b1817 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -502,15 +502,6 @@ class StreamManager: owner_key = RedisKeys.channel_owner(self.channel_id) current_owner = self.buffer.redis_client.get(owner_key) - if current_owner and current_owner.decode('utf-8') == self.worker_id: - try: - from apps.channels.models import Stream - stream = Stream.objects.get(pk=self.current_stream_id) - stream.release_stream() - logger.info(f"Released stream {self.current_stream_id} for channel {self.channel_id}") - except Exception as e: - logger.error(f"Error releasing stream {self.current_stream_id}: {e}") - # Cancel all buffer check timers for timer in list(self._buffer_check_timers): try: @@ -552,6 +543,16 @@ class StreamManager: logger.info(f"Switching stream URL from {self.url} to {new_url}") + # Release old stream resources if we have a current stream ID + if self.current_stream_id: + try: + from apps.channels.models import Stream + stream = Stream.objects.get(pk=self.current_stream_id) + stream.release_stream() + logger.info(f"Released stream {self.current_stream_id} for channel {self.channel_id}") + except Exception as e: + logger.error(f"Error releasing stream {self.current_stream_id}: {e}") + # CRITICAL: Set a flag to prevent immediate reconnection with old URL self.url_switching = True diff --git a/apps/proxy/ts_proxy/views.py b/apps/proxy/ts_proxy/views.py index 87a8e51b..222da4e3 100644 --- a/apps/proxy/ts_proxy/views.py +++ b/apps/proxy/ts_proxy/views.py @@ -261,7 +261,6 @@ def stream_ts(request, channel_id): logger.info(f"[{client_id}] Successfully initialized channel {channel_id}") channel_initializing = True - logger.info(f"[{client_id}] Channel {channel_id} initialization started") # Register client - can do this regardless of initialization state # Create local resources if needed From cd1da5a61c1fb8b4dc8115c5b39b3a2a02bb95d2 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 28 Apr 2025 17:25:03 -0500 Subject: [PATCH 0116/1435] Added a new channel model to update m3u profile counts and utilize it during stream switches. --- apps/channels/models.py | 47 +++++++++++++++++++++++++++ apps/proxy/ts_proxy/stream_manager.py | 26 +++++++++++---- 2 files changed, 66 insertions(+), 7 deletions(-) diff --git a/apps/channels/models.py b/apps/channels/models.py index 0b66c468..4485936e 100644 --- a/apps/channels/models.py +++ b/apps/channels/models.py @@ -407,6 +407,53 @@ class Channel(models.Model): if current_count > 0: redis_client.decr(profile_connections_key) + def update_stream_profile(self, new_profile_id): + """ + Updates the profile for the current stream and adjusts connection counts. + + Args: + new_profile_id: The ID of the new stream profile to use + + Returns: + bool: True if successful, False otherwise + """ + redis_client = RedisClient.get_client() + + # Get current stream ID + stream_id_bytes = redis_client.get(f"channel_stream:{self.id}") + if not stream_id_bytes: + logger.debug("No active stream found for channel") + return False + + stream_id = int(stream_id_bytes) + + # Get current profile ID + current_profile_id_bytes = redis_client.get(f"stream_profile:{stream_id}") + if not current_profile_id_bytes: + logger.debug("No profile found for current stream") + return False + + current_profile_id = int(current_profile_id_bytes) + + # Don't do anything if the profile is already set to the requested one + if current_profile_id == new_profile_id: + return True + + # Decrement connection count for old profile + old_profile_connections_key = f"profile_connections:{current_profile_id}" + old_count = int(redis_client.get(old_profile_connections_key) or 0) + if old_count > 0: + redis_client.decr(old_profile_connections_key) + + # Update the profile mapping + redis_client.set(f"stream_profile:{stream_id}", new_profile_id) + + # Increment connection count for new profile + new_profile_connections_key = f"profile_connections:{new_profile_id}" + redis_client.incr(new_profile_connections_key) + logger.info(f"Updated stream {stream_id} profile from {current_profile_id} to {new_profile_id}") + return True + class ChannelProfile(models.Model): name = models.CharField(max_length=100, unique=True) diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index 8f7b1817..6fd2b4b8 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -543,15 +543,27 @@ class StreamManager: logger.info(f"Switching stream URL from {self.url} to {new_url}") - # Release old stream resources if we have a current stream ID - if self.current_stream_id: + # Import both models for proper resource management + from apps.channels.models import Stream, Channel + + # Update stream profile if we're switching streams + if self.current_stream_id and stream_id and self.current_stream_id != stream_id: try: - from apps.channels.models import Stream - stream = Stream.objects.get(pk=self.current_stream_id) - stream.release_stream() - logger.info(f"Released stream {self.current_stream_id} for channel {self.channel_id}") + # Get the channel by UUID + channel = Channel.objects.get(uuid=self.channel_id) + + # Get stream to find its profile + new_stream = Stream.objects.get(pk=stream_id) + + # Use the new method to update the profile and manage connection counts + if new_stream.m3u_account_id: + success = channel.update_stream_profile(new_stream.m3u_account_id) + if success: + logger.debug(f"Updated stream profile for channel {self.channel_id} to use profile from stream {stream_id}") + else: + logger.warning(f"Failed to update stream profile for channel {self.channel_id}") except Exception as e: - logger.error(f"Error releasing stream {self.current_stream_id}: {e}") + logger.error(f"Error updating stream profile for channel {self.channel_id}: {e}") # CRITICAL: Set a flag to prevent immediate reconnection with old URL self.url_switching = True From 482803b24143569a0377e165fefcc22fc2d9d754 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 28 Apr 2025 17:29:27 -0500 Subject: [PATCH 0117/1435] Removed unnecessary logs. --- apps/proxy/ts_proxy/channel_status.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/apps/proxy/ts_proxy/channel_status.py b/apps/proxy/ts_proxy/channel_status.py index 43e01df8..dd18d922 100644 --- a/apps/proxy/ts_proxy/channel_status.py +++ b/apps/proxy/ts_proxy/channel_status.py @@ -59,7 +59,6 @@ class ChannelStatus: stream = Stream.objects.filter(id=stream_id).first() if stream: info['stream_name'] = stream.name - logger.debug(f"Added stream name '{stream.name}' for stream ID {stream_id}") except (ImportError, DatabaseError) as e: logger.warning(f"Failed to get stream name for ID {stream_id}: {e}") except ValueError: @@ -78,7 +77,6 @@ class ChannelStatus: m3u_profile = M3UAccountProfile.objects.filter(id=m3u_profile_id).first() if m3u_profile: info['m3u_profile_name'] = m3u_profile.name - logger.debug(f"Added M3U profile name '{m3u_profile.name}' for profile ID {m3u_profile_id}") except (ImportError, DatabaseError) as e: logger.warning(f"Failed to get M3U profile name for ID {m3u_profile_id}: {e}") except ValueError: @@ -337,7 +335,6 @@ class ChannelStatus: stream = Stream.objects.filter(id=stream_id).first() if stream: info['stream_name'] = stream.name - logger.debug(f"Added stream name '{stream.name}' for stream ID {stream_id}") except (ImportError, DatabaseError) as e: logger.warning(f"Failed to get stream name for ID {stream_id}: {e}") except ValueError: @@ -412,7 +409,6 @@ class ChannelStatus: m3u_profile = M3UAccountProfile.objects.filter(id=m3u_profile_id).first() if m3u_profile: info['m3u_profile_name'] = m3u_profile.name - logger.debug(f"Added M3U profile name '{m3u_profile.name}' for profile ID {m3u_profile_id}") except (ImportError, DatabaseError) as e: logger.warning(f"Failed to get M3U profile name for ID {m3u_profile_id}: {e}") except ValueError: From 06d30667830320342fec139cf944b34f06219808 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 28 Apr 2025 20:22:44 -0500 Subject: [PATCH 0118/1435] Improve logo handling in LogoViewSet: set default content type and add Content-Disposition for inline display. --- apps/channels/api_views.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 78907f8f..f3fb800a 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -629,14 +629,24 @@ class LogoViewSet(viewsets.ModelViewSet): if logo_url.startswith("/data"): # Local file if not os.path.exists(logo_url): raise Http404("Image not found") - mimetype = mimetypes.guess_type(logo_url) - return FileResponse(open(logo_url, "rb"), content_type=mimetype) + + # Get proper mime type (first item of the tuple) + content_type, _ = mimetypes.guess_type(logo_url) + if not content_type: + content_type = 'image/jpeg' # Default to a common image type + + # Use context manager and set Content-Disposition to inline + response = StreamingHttpResponse(open(logo_url, "rb"), content_type=content_type) + response['Content-Disposition'] = 'inline; filename="{}"'.format(os.path.basename(logo_url)) + return response else: # Remote image try: remote_response = requests.get(logo_url, stream=True) if remote_response.status_code == 200: - return StreamingHttpResponse(remote_response.iter_content(chunk_size=8192), content_type=remote_response.headers['Content-Type']) + response = StreamingHttpResponse(remote_response.iter_content(chunk_size=8192), content_type=remote_response.headers['Content-Type']) + response['Content-Disposition'] = 'inline; filename="{}"'.format(os.path.basename(logo_url)) + return response raise Http404("Remote image not found") except requests.RequestException: raise Http404("Error fetching remote image") @@ -679,7 +689,7 @@ class BulkUpdateChannelMembershipAPIView(APIView): if serializer.is_valid(): updates = serializer.validated_data['channels'] - channel_ids = [entry['channel_id'] for entry in updates] + channel_ids = [entry['channel_id'] for entry['channel_id'] in updates] memberships = ChannelProfileMembership.objects.filter( channel_profile=channel_profile, From 4cf4a0d68dbcb6057ba0a167dcc1aec8f1499045 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 28 Apr 2025 20:26:54 -0500 Subject: [PATCH 0119/1435] Reverted unintended change. --- apps/channels/api_views.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index f3fb800a..821b0ef4 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -689,7 +689,8 @@ class BulkUpdateChannelMembershipAPIView(APIView): if serializer.is_valid(): updates = serializer.validated_data['channels'] - channel_ids = [entry['channel_id'] for entry['channel_id'] in updates] + channel_ids = [entry['channel_id'] for entry in updates] + memberships = ChannelProfileMembership.objects.filter( channel_profile=channel_profile, From ee2c2194f81c26be4ec315fe1fa64f1bc8667a81 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 28 Apr 2025 20:36:09 -0500 Subject: [PATCH 0120/1435] Mimetype guessing as a fallback for remote images. --- apps/channels/api_views.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 821b0ef4..51952541 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -644,7 +644,18 @@ class LogoViewSet(viewsets.ModelViewSet): try: remote_response = requests.get(logo_url, stream=True) if remote_response.status_code == 200: - response = StreamingHttpResponse(remote_response.iter_content(chunk_size=8192), content_type=remote_response.headers['Content-Type']) + # Try to get content type from response headers first + content_type = remote_response.headers.get('Content-Type') + + # If no content type in headers or it's empty, guess based on URL + if not content_type: + content_type, _ = mimetypes.guess_type(logo_url) + + # If still no content type, default to common image type + if not content_type: + content_type = 'image/jpeg' + + response = StreamingHttpResponse(remote_response.iter_content(chunk_size=8192), content_type=content_type) response['Content-Disposition'] = 'inline; filename="{}"'.format(os.path.basename(logo_url)) return response raise Http404("Remote image not found") From 9b443a0a3ed8408902a401f3fc8cc30665f83196 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 29 Apr 2025 09:50:57 -0500 Subject: [PATCH 0121/1435] Adds m3u profile name to stream name. --- frontend/src/pages/Stats.jsx | 36 +++++++++++++++++++++++++++++++----- 1 file changed, 31 insertions(+), 5 deletions(-) diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index 3fb818e3..fc6705b0 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -34,6 +34,7 @@ import duration from 'dayjs/plugin/duration'; import relativeTime from 'dayjs/plugin/relativeTime'; import { Sparkline } from '@mantine/charts'; import useStreamProfilesStore from '../store/streamProfiles'; +import usePlaylistsStore from '../store/playlists'; // Add this import import { useLocation } from 'react-router-dom'; import { notifications } from '@mantine/notifications'; @@ -84,6 +85,22 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel const [activeStreamId, setActiveStreamId] = useState(null); const [currentM3UProfile, setCurrentM3UProfile] = useState(null); // Add state for current M3U profile + // Get M3U account data from the playlists store + const m3uAccounts = usePlaylistsStore((s) => s.playlists); + + // Create a map of M3U account IDs to names for quick lookup + const m3uAccountsMap = useMemo(() => { + const map = {}; + if (m3uAccounts && Array.isArray(m3uAccounts)) { + m3uAccounts.forEach(account => { + if (account.id) { + map[account.id] = account.name; + } + }); + } + return map; + }, [m3uAccounts]); + // Safety check - if channel doesn't have required data, don't render if (!channel || !channel.channel_id) { return null; @@ -346,13 +363,22 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel const m3uProfileName = currentM3UProfile?.name || channel.m3u_profile?.name || channel.m3u_profile_name || - 'Default M3U'; + 'Unknown M3U Profile'; // Create select options for available streams - const streamOptions = availableStreams.map(stream => ({ - value: stream.id.toString(), - label: `${stream.name || `Stream #${stream.id}`}`, // Make sure stream name is clear - })); + const streamOptions = availableStreams.map(stream => { + // Get account name from our mapping if it exists + const accountName = stream.m3u_account && m3uAccountsMap[stream.m3u_account] + ? m3uAccountsMap[stream.m3u_account] + : stream.m3u_account + ? `M3U #${stream.m3u_account}` + : 'Unknown M3U'; + + return { + value: stream.id.toString(), + label: `${stream.name || `Stream #${stream.id}`} [${accountName}]`, + }; + }); return ( Date: Tue, 29 Apr 2025 11:10:26 -0500 Subject: [PATCH 0122/1435] Add key prop to row Box in CustomTableBody for improved rendering --- frontend/src/components/tables/CustomTable/CustomTableBody.jsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/components/tables/CustomTable/CustomTableBody.jsx b/frontend/src/components/tables/CustomTable/CustomTableBody.jsx index ac7000d3..f3351541 100644 --- a/frontend/src/components/tables/CustomTable/CustomTableBody.jsx +++ b/frontend/src/components/tables/CustomTable/CustomTableBody.jsx @@ -73,7 +73,7 @@ const CustomTableBody = ({ const renderTableBodyRow = (row, index, style = {}) => { return ( - + Date: Tue, 29 Apr 2025 12:20:26 -0500 Subject: [PATCH 0123/1435] Allows holding shift and selecting rows. --- .../tables/CustomTable/CustomTable.jsx | 3 +- .../components/tables/CustomTable/index.jsx | 50 ++++++++++++++++--- 2 files changed, 45 insertions(+), 8 deletions(-) diff --git a/frontend/src/components/tables/CustomTable/CustomTable.jsx b/frontend/src/components/tables/CustomTable/CustomTable.jsx index 75e2445e..e1c05ff4 100644 --- a/frontend/src/components/tables/CustomTable/CustomTable.jsx +++ b/frontend/src/components/tables/CustomTable/CustomTable.jsx @@ -1,6 +1,6 @@ import { Box, Flex } from '@mantine/core'; import CustomTableHeader from './CustomTableHeader'; -import { useCallback, useState } from 'react'; +import { useCallback, useState, useRef } from 'react'; import { flexRender } from '@tanstack/react-table'; import table from '../../../helpers/table'; import CustomTableBody from './CustomTableBody'; @@ -11,7 +11,6 @@ const CustomTable = ({ table }) => { className="divTable table-striped" style={{ width: '100%', - // height: '100%', // ONLY required when using virtual tables display: 'flex', flexDirection: 'column', }} diff --git a/frontend/src/components/tables/CustomTable/index.jsx b/frontend/src/components/tables/CustomTable/index.jsx index 7be3a8e7..3303dffe 100644 --- a/frontend/src/components/tables/CustomTable/index.jsx +++ b/frontend/src/components/tables/CustomTable/index.jsx @@ -21,6 +21,7 @@ const useTable = ({ }) => { const [selectedTableIds, setSelectedTableIds] = useState([]); const [expandedRowIds, setExpandedRowIds] = useState([]); + const [lastClickedId, setLastClickedId] = useState(null); const rowCount = allRowIds.length; @@ -77,6 +78,34 @@ const useTable = ({ updateSelectedTableIds([row.original.id]); }; + // Handle the shift+click selection + const handleShiftSelect = (rowId, isShiftKey) => { + if (!isShiftKey || lastClickedId === null) { + // Normal selection behavior + setLastClickedId(rowId); + return false; // Return false to indicate we're not handling it + } + + // Handle shift-click range selection + const currentIndex = allRowIds.indexOf(rowId); + const lastIndex = allRowIds.indexOf(lastClickedId); + + if (currentIndex === -1 || lastIndex === -1) return false; + + // Determine range + const startIndex = Math.min(currentIndex, lastIndex); + const endIndex = Math.max(currentIndex, lastIndex); + const rangeIds = allRowIds.slice(startIndex, endIndex + 1); + + // Preserve existing selections outside the range + const idsOutsideRange = selectedTableIds.filter(id => !rangeIds.includes(id)); + const newSelection = [...new Set([...rangeIds, ...idsOutsideRange])]; + updateSelectedTableIds(newSelection); + + setLastClickedId(rowId); + return true; // Return true to indicate we've handled it + }; + const renderBodyCell = ({ row, cell }) => { if (bodyCellRenderFns[cell.column.id]) { return bodyCellRenderFns[cell.column.id]({ row, cell }); @@ -91,13 +120,22 @@ const useTable = ({ size="xs" checked={selectedTableIdsSet.has(row.original.id)} onChange={(e) => { - const newSet = new Set(selectedTableIds); - if (e.target.checked) { - newSet.add(row.original.id); - } else { - newSet.delete(row.original.id); + const rowId = row.original.id; + + // Get shift key state from the event + const isShiftKey = e.nativeEvent.shiftKey; + + // Try to handle with shift-select logic first + if (!handleShiftSelect(rowId, isShiftKey)) { + // If not handled by shift-select, do regular toggle + const newSet = new Set(selectedTableIds); + if (e.target.checked) { + newSet.add(rowId); + } else { + newSet.delete(rowId); + } + updateSelectedTableIds([...newSet]); } - updateSelectedTableIds([...newSet]); }} />
From 9be42ce53254cc3da265541e27393910d8a756ac Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 29 Apr 2025 12:47:16 -0500 Subject: [PATCH 0124/1435] Don't select text when shift is held --- .../components/tables/CustomTable/index.jsx | 62 +++++- frontend/src/components/tables/table.css | 207 ++++++++++++------ 2 files changed, 194 insertions(+), 75 deletions(-) diff --git a/frontend/src/components/tables/CustomTable/index.jsx b/frontend/src/components/tables/CustomTable/index.jsx index 3303dffe..0b1d824f 100644 --- a/frontend/src/components/tables/CustomTable/index.jsx +++ b/frontend/src/components/tables/CustomTable/index.jsx @@ -7,7 +7,7 @@ import { getCoreRowModel, flexRender, } from '@tanstack/react-table'; -import { useCallback, useMemo, useState } from 'react'; +import { useCallback, useMemo, useState, useEffect } from 'react'; import { ChevronDown, ChevronRight } from 'lucide-react'; const useTable = ({ @@ -22,6 +22,63 @@ const useTable = ({ const [selectedTableIds, setSelectedTableIds] = useState([]); const [expandedRowIds, setExpandedRowIds] = useState([]); const [lastClickedId, setLastClickedId] = useState(null); + const [isShiftKeyDown, setIsShiftKeyDown] = useState(false); + + // Event handlers for shift key detection with improved handling + const handleKeyDown = useCallback((e) => { + if (e.key === 'Shift') { + setIsShiftKeyDown(true); + // Apply the class to disable text selection immediately + document.body.classList.add('shift-key-active'); + // Set a style attribute directly on body for extra assurance + document.body.style.userSelect = 'none'; + document.body.style.webkitUserSelect = 'none'; + document.body.style.msUserSelect = 'none'; + document.body.style.cursor = 'pointer'; + } + }, []); + + const handleKeyUp = useCallback((e) => { + if (e.key === 'Shift') { + setIsShiftKeyDown(false); + // Remove the class when shift is released + document.body.classList.remove('shift-key-active'); + // Reset the style attributes + document.body.style.removeProperty('user-select'); + document.body.style.removeProperty('-webkit-user-select'); + document.body.style.removeProperty('-ms-user-select'); + document.body.style.removeProperty('cursor'); + } + }, []); + + // Add global event listeners for shift key detection with improved cleanup + useEffect(() => { + window.addEventListener('keydown', handleKeyDown); + window.addEventListener('keyup', handleKeyUp); + + // Also detect blur/focus events to handle cases where shift is held and window loses focus + window.addEventListener('blur', () => { + setIsShiftKeyDown(false); + document.body.classList.remove('shift-key-active'); + document.body.style.removeProperty('user-select'); + document.body.style.removeProperty('-webkit-user-select'); + document.body.style.removeProperty('-ms-user-select'); + document.body.style.removeProperty('cursor'); + }); + + return () => { + window.removeEventListener('keydown', handleKeyDown); + window.removeEventListener('keyup', handleKeyUp); + window.removeEventListener('blur', () => { + setIsShiftKeyDown(false); + document.body.classList.remove('shift-key-active'); + document.body.style.removeProperty('user-select'); + document.body.style.removeProperty('-webkit-user-select'); + document.body.style.removeProperty('-ms-user-select'); + document.body.style.removeProperty('cursor'); + }); + }; + }, [handleKeyDown, handleKeyUp]); const rowCount = allRowIds.length; @@ -175,8 +232,9 @@ const useTable = ({ expandedRowIds, expandedRowRenderer, setSelectedTableIds, + isShiftKeyDown, // Include shift key state in the table instance }), - [selectedTableIdsSet, expandedRowIds, allRowIds] + [selectedTableIdsSet, expandedRowIds, allRowIds, isShiftKeyDown] ); return { diff --git a/frontend/src/components/tables/table.css b/frontend/src/components/tables/table.css index 00198499..c3651246 100644 --- a/frontend/src/components/tables/table.css +++ b/frontend/src/components/tables/table.css @@ -1,94 +1,155 @@ * { - /* box-sizing: border-box; */ - } + /* box-sizing: border-box; */ +} - html { - font-family: sans-serif; - /* font-size: 14px; */ - } +html { + font-family: sans-serif; + /* font-size: 14px; */ +} - .divTable { - /* border: 1px solid lightgray; */ - /* width: fit-content; */ - /* display: flex; +.divTable { + /* border: 1px solid lightgray; */ + /* width: fit-content; */ + /* display: flex; flex-direction: column; */ - } +} - .tr { - display: flex; - } +.tr { + display: flex; +} - .table-striped .tbody .tr:hover { - background-color: rgb(68,68,68); - } +.table-striped .tbody .tr:hover { + background-color: rgb(68, 68, 68); +} - .tr { - /* width: fit-content; +.tr { + /* width: fit-content; width: 100%; */ - /* height: 30px; */ - } + /* height: 30px; */ +} - .th, - .td { - /* box-shadow: inset 0 0 0 1px lightgray; */ - /* padding: 0.25rem; */ - padding-left: 4px; - padding-right: 4px; - } +.th, +.td { + /* box-shadow: inset 0 0 0 1px lightgray; */ + /* padding: 0.25rem; */ + padding-left: 4px; + padding-right: 4px; +} - .th { - /* padding: 2px 4px; */ - position: relative; - font-weight: bold; - text-align: center; - /* height: 30px; */ - } +.th { + /* padding: 2px 4px; */ + position: relative; + font-weight: bold; + text-align: center; + /* height: 30px; */ +} - .td { - height: 28px; - border-bottom: solid 1px rgb(68,68,68); - } +.td { + height: 28px; + border-bottom: solid 1px rgb(68, 68, 68); +} +.resizer { + position: absolute; + top: 0; + height: 100%; + width: 5px; + background: rgba(0, 0, 0, 0.5); + cursor: col-resize; + user-select: none; + touch-action: none; +} + +.resizer.ltr { + right: 0; +} + +.resizer.rtl { + left: 0; +} + +.resizer.isResizing { + background: blue; + opacity: 1; +} + +@media (hover: hover) { .resizer { - position: absolute; - top: 0; - height: 100%; - width: 5px; - background: rgba(0, 0, 0, 0.5); - cursor: col-resize; - user-select: none; - touch-action: none; + opacity: 0; } - .resizer.ltr { - right: 0; - } - - .resizer.rtl { - left: 0; - } - - .resizer.isResizing { - background: blue; + *:hover>.resizer { opacity: 1; } +} - @media (hover: hover) { - .resizer { - opacity: 0; - } +/* .table-striped .tbody .tr:nth-child(odd), */ +.table-striped .tbody .tr-odd { + background-color: #18181b; +} - *:hover > .resizer { - opacity: 1; - } - } +/* .table-striped .tbody .tr:nth-child(even), */ +.table-striped .tbody .tr-even { + background-color: #27272A; +} - /* .table-striped .tbody .tr:nth-child(odd), */ - .table-striped .tbody .tr-odd { - background-color: #18181b; - } +/* Prevent text selection when shift key is pressed */ +.shift-key-active { + cursor: pointer !important; +} - /* .table-striped .tbody .tr:nth-child(even), */ - .table-striped .tbody .tr-even { - background-color: #27272A; - } +.shift-key-active *, +.shift-key-active .tr, +.shift-key-active .td, +.shift-key-active .tbody { + user-select: none !important; + -webkit-user-select: none !important; + -moz-user-select: none !important; + -ms-user-select: none !important; +} + +/* Always allow text selection in editable elements */ +.shift-key-active input, +.shift-key-active textarea, +.shift-key-active [contenteditable="true"], +.shift-key-active .table-input-header input { + user-select: text !important; + -webkit-user-select: text !important; + -moz-user-select: text !important; + -ms-user-select: text !important; + cursor: text !important; +} + +/* Improve specificity and ensure text selection is disabled when shift is pressed */ +.shift-key-active, +.shift-key-active * { + user-select: none !important; + -webkit-user-select: none !important; + -moz-user-select: none !important; + -ms-user-select: none !important; + cursor: pointer !important; +} + +/* Add a visual indicator when shift is pressed */ +.shift-key-active .tbody .tr { + transition: background-color 0.1s; +} + +.shift-key-active .tbody .tr:hover { + background-color: rgba(68, 68, 68, 0.7) !important; +} + +/* Always allow text selection in inputs even when shift is pressed */ +.shift-key-active input, +.shift-key-active textarea, +.shift-key-active [contenteditable="true"], +.shift-key-active select, +.shift-key-active .mantine-Select-input, +.shift-key-active .mantine-MultiSelect-input, +.shift-key-active .table-input-header input { + user-select: text !important; + -webkit-user-select: text !important; + -moz-user-select: text !important; + -ms-user-select: text !important; + cursor: text !important; +} \ No newline at end of file From d27e4b7e8abbec92152fa156721fefb3af7c0402 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 29 Apr 2025 14:14:40 -0500 Subject: [PATCH 0125/1435] Release stream lock before returning url if using redirect profile. --- apps/proxy/ts_proxy/views.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/apps/proxy/ts_proxy/views.py b/apps/proxy/ts_proxy/views.py index 222da4e3..35ca3648 100644 --- a/apps/proxy/ts_proxy/views.py +++ b/apps/proxy/ts_proxy/views.py @@ -193,7 +193,8 @@ def stream_ts(request, channel_id): break else: logger.warning(f"[{client_id}] Alternate stream #{alt['stream_id']} failed validation: {message}") - + # Release stream lock before redirecting + channel.release_stream() # Final decision based on validation results if is_valid: logger.info(f"[{client_id}] Redirecting to validated URL: {final_url} ({message})") From 2f23909bed50b940476fb12844331460ffd9ffa3 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 29 Apr 2025 15:13:15 -0500 Subject: [PATCH 0126/1435] Fixed bug overwriting tvg-id when loading TV Guide. --- apps/epg/api_views.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/apps/epg/api_views.py b/apps/epg/api_views.py index 48eb680d..526172f1 100644 --- a/apps/epg/api_views.py +++ b/apps/epg/api_views.py @@ -140,10 +140,6 @@ class EPGGridAPIView(APIView): } dummy_programs.append(dummy_program) - # Also update the channel to use this dummy tvg_id - channel.tvg_id = dummy_tvg_id - channel.save(update_fields=['tvg_id']) - except Exception as e: logger.error(f"Error creating dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}") From 418bf01449b42e9add51a4288308f833ad1e2715 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 29 Apr 2025 18:13:42 -0500 Subject: [PATCH 0127/1435] Notify user of how many matches auto-match found. Add batch EPG association endpoint and improve EPG matching logic - Implemented a new API endpoint to associate multiple channels with EPG data in a single request. - Enhanced the EPG matching process to normalize TVG IDs and log relevant information. - Updated frontend to handle batch EPG associations efficiently, falling back to legacy methods when necessary. --- apps/channels/api_views.py | 65 ++++++++++++++++++++++++++++++++++++++ apps/channels/tasks.py | 64 ++++++++++++++++++++++++++++--------- frontend/src/WebSocket.jsx | 14 +++++--- frontend/src/api.js | 29 ++++++++++++++++- scripts/epg_match.py | 18 +++++++++-- 5 files changed, 168 insertions(+), 22 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 51952541..ccd942d6 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -541,6 +541,71 @@ class ChannelViewSet(viewsets.ModelViewSet): except Exception as e: return Response({"error": str(e)}, status=400) + @swagger_auto_schema( + method='post', + operation_description="Associate multiple channels with EPG data without triggering a full refresh", + request_body=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + 'associations': openapi.Schema( + type=openapi.TYPE_ARRAY, + items=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + 'channel_id': openapi.Schema(type=openapi.TYPE_INTEGER), + 'epg_data_id': openapi.Schema(type=openapi.TYPE_INTEGER) + } + ) + ) + } + ), + responses={200: "EPG data linked for multiple channels"} + ) + @action(detail=False, methods=['post'], url_path='batch-set-epg') + def batch_set_epg(self, request): + """Efficiently associate multiple channels with EPG data at once.""" + associations = request.data.get('associations', []) + channels_updated = 0 + programs_refreshed = 0 + unique_epg_ids = set() + + for assoc in associations: + channel_id = assoc.get('channel_id') + epg_data_id = assoc.get('epg_data_id') + + if not channel_id: + continue + + try: + # Get the channel + channel = Channel.objects.get(id=channel_id) + + # Set the EPG data + channel.epg_data_id = epg_data_id + channel.save(update_fields=['epg_data']) + channels_updated += 1 + + # Track unique EPG data IDs + if epg_data_id: + unique_epg_ids.add(epg_data_id) + + except Channel.DoesNotExist: + logger.error(f"Channel with ID {channel_id} not found") + except Exception as e: + logger.error(f"Error setting EPG data for channel {channel_id}: {str(e)}") + + # Trigger program refresh for unique EPG data IDs + from apps.epg.tasks import parse_programs_for_tvg_id + for epg_id in unique_epg_ids: + parse_programs_for_tvg_id.delay(epg_id) + programs_refreshed += 1 + + return Response({ + 'success': True, + 'channels_updated': channels_updated, + 'programs_refreshed': programs_refreshed + }) + # ───────────────────────────────────────────────────────── # 4) Bulk Delete Streams # ───────────────────────────────────────────────────────── diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py index 94bb8ca9..88d040e8 100755 --- a/apps/channels/tasks.py +++ b/apps/channels/tasks.py @@ -75,21 +75,42 @@ def match_epg_channels(): matched_channels = [] channels_to_update = [] - channels_json = [{ - "id": channel.id, - "name": channel.name, - "tvg_id": channel.tvg_id, - "fallback_name": channel.tvg_id.strip() if channel.tvg_id else channel.name, - "norm_chan": normalize_name(channel.tvg_id.strip() if channel.tvg_id else channel.name) - } for channel in Channel.objects.all() if not channel.epg_data] + # Get channels that don't have EPG data assigned + channels_without_epg = Channel.objects.filter(epg_data__isnull=True) + logger.info(f"Found {channels_without_epg.count()} channels without EPG data") - epg_json = [{ - 'id': epg.id, - 'tvg_id': epg.tvg_id, - 'name': epg.name, - 'norm_name': normalize_name(epg.name), - 'epg_source_id': epg.epg_source.id, - } for epg in EPGData.objects.all()] + channels_json = [] + for channel in channels_without_epg: + # Normalize TVG ID - strip whitespace and convert to lowercase + normalized_tvg_id = channel.tvg_id.strip().lower() if channel.tvg_id else "" + if normalized_tvg_id: + logger.info(f"Processing channel {channel.id} '{channel.name}' with TVG ID='{normalized_tvg_id}'") + + channels_json.append({ + "id": channel.id, + "name": channel.name, + "tvg_id": normalized_tvg_id, # Use normalized TVG ID + "original_tvg_id": channel.tvg_id, # Keep original for reference + "fallback_name": normalized_tvg_id if normalized_tvg_id else channel.name, + "norm_chan": normalize_name(normalized_tvg_id if normalized_tvg_id else channel.name) + }) + + # Similarly normalize EPG data TVG IDs + epg_json = [] + for epg in EPGData.objects.all(): + normalized_tvg_id = epg.tvg_id.strip().lower() if epg.tvg_id else "" + epg_json.append({ + 'id': epg.id, + 'tvg_id': normalized_tvg_id, # Use normalized TVG ID + 'original_tvg_id': epg.tvg_id, # Keep original for reference + 'name': epg.name, + 'norm_name': normalize_name(epg.name), + 'epg_source_id': epg.epg_source.id if epg.epg_source else None, + }) + + # Log available EPG data TVG IDs for debugging + unique_epg_tvg_ids = set(e['tvg_id'] for e in epg_json if e['tvg_id']) + logger.info(f"Available EPG TVG IDs: {', '.join(sorted(unique_epg_tvg_ids))}") payload = { "channels": channels_json, @@ -159,12 +180,25 @@ def match_epg_channels(): logger.info("Finished EPG matching logic.") + # Send update with additional information for refreshing UI channel_layer = get_channel_layer() + associations = [ + {"channel_id": chan["id"], "epg_data_id": chan["epg_data_id"]} + for chan in channels_to_update_dicts + ] + async_to_sync(channel_layer.group_send)( 'updates', { 'type': 'update', - "data": {"success": True, "type": "epg_match"} + "data": { + "success": True, + "type": "epg_match", + "refresh_channels": True, # Flag to tell frontend to refresh channels + "matches_count": total_matched, + "message": f"EPG matching complete: {total_matched} channel(s) matched", + "associations": associations # Add the associations data + } } ) diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index cd4bca6f..0f5c4404 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -14,7 +14,7 @@ import useEPGsStore from './store/epgs'; import { Box, Button, Stack } from '@mantine/core'; import API from './api'; -export const WebsocketContext = createContext([false, () => {}, null]); +export const WebsocketContext = createContext([false, () => { }, null]); export const WebsocketProvider = ({ children }) => { const [isReady, setIsReady] = useState(false); @@ -121,11 +121,17 @@ export const WebsocketProvider = ({ children }) => { case 'epg_match': notifications.show({ - message: 'EPG match is complete!', + message: event.data.message || 'EPG match is complete!', color: 'green.5', }); - // fetchChannels(); - fetchEPGData(); + + // Check if we have associations data and use the more efficient batch API + if (event.data.associations && event.data.associations.length > 0) { + API.batchSetEPG(event.data.associations); + } else { + // Fall back to legacy full refresh method + API.requeryChannels(); + } break; case 'm3u_profile_test': diff --git a/frontend/src/api.js b/frontend/src/api.js index e54e628a..8e1fe46c 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -1130,7 +1130,7 @@ export default class API { return response; } catch (e) { - errorNotification('Failed to create channle profile', e); + errorNotification('Failed to create channel profile', e); } } @@ -1271,4 +1271,31 @@ export default class API { throw e; } } + + static async batchSetEPG(associations) { + try { + const response = await request( + `${host}/api/channels/channels/batch-set-epg/`, + { + method: 'POST', + body: { associations }, + } + ); + + // If successful, requery channels to update UI + if (response.success) { + notifications.show({ + title: 'EPG Association', + message: `Updated ${response.channels_updated} channels, refreshing ${response.programs_refreshed} EPG sources.`, + color: 'blue', + }); + + this.requeryChannels(); + } + + return response; + } catch (e) { + errorNotification('Failed to update channel EPGs', e); + } + } } diff --git a/scripts/epg_match.py b/scripts/epg_match.py index e5d17466..ed86d865 100644 --- a/scripts/epg_match.py +++ b/scripts/epg_match.py @@ -34,7 +34,7 @@ def process_data(input_data): channels = input_data["channels"] epg_data = input_data["epg_data"] - region_code = input_data["region_code"] + region_code = input_data.get("region_code", None) epg_embeddings = None if any(row["norm_name"] for row in epg_data): @@ -47,6 +47,21 @@ def process_data(input_data): matched_channels = [] for chan in channels: + normalized_tvg_id = chan.get("tvg_id", "") + fallback_name = chan["tvg_id"].strip() if chan["tvg_id"] else chan["name"] + + # Exact TVG ID match (direct match) + epg_by_tvg_id = next((epg for epg in epg_data if epg["tvg_id"] == normalized_tvg_id), None) + if normalized_tvg_id and epg_by_tvg_id: + chan["epg_data_id"] = epg_by_tvg_id["id"] + channels_to_update.append(chan) + + # Add to matched_channels list so it's counted in the total + matched_channels.append((chan['id'], fallback_name, epg_by_tvg_id["tvg_id"])) + + eprint(f"Channel {chan['id']} '{fallback_name}' => EPG found by tvg_id={epg_by_tvg_id['tvg_id']}") + continue + # If channel has a tvg_id that doesn't exist in EPGData, do direct check. # I don't THINK this should happen now that we assign EPG on channel creation. if chan["tvg_id"]: @@ -59,7 +74,6 @@ def process_data(input_data): continue # C) Perform name-based fuzzy matching - fallback_name = chan["tvg_id"].strip() if chan["tvg_id"] else chan["name"] if not chan["norm_chan"]: eprint(f"Channel {chan['id']} '{chan['name']}' => empty after normalization, skipping") continue From d61de87fffed000b5d30a410c567b274be19648f Mon Sep 17 00:00:00 2001 From: Jean-Paul Acneaux Date: Wed, 30 Apr 2025 05:10:31 +0200 Subject: [PATCH 0128/1435] Fix channel issues that i have with m3u files comming form tvheaden. And add tvg-chno tag for finding channel numbers. --- apps/channels/api_views.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index ccd942d6..36a5b0e1 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -267,6 +267,8 @@ class ChannelViewSet(viewsets.ModelViewSet): channel_number = None if 'tv-chno' in stream_custom_props: channel_number = int(stream_custom_props['tv-chno']) + elif 'tvg-chno' in stream_custom_props: + channel_number = int(stream_custom_props['tvg-chno']) elif 'channel-number' in stream_custom_props: channel_number = int(stream_custom_props['channel-number']) @@ -388,6 +390,8 @@ class ChannelViewSet(viewsets.ModelViewSet): channel_number = None if 'tv-chno' in stream_custom_props: channel_number = int(stream_custom_props['tv-chno']) + elif 'tvg-chno' in stream_custom_props: + channel_number = int(stream_custom_props['tvg-chno']) elif 'channel-number' in stream_custom_props: channel_number = int(stream_custom_props['channel-number']) From bdb8d326a59f1c259cd8c584bcb44affa70a9565 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 30 Apr 2025 12:17:11 -0500 Subject: [PATCH 0129/1435] Add better logging for which channel clients are getting chunks from. --- apps/proxy/ts_proxy/stream_generator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/proxy/ts_proxy/stream_generator.py b/apps/proxy/ts_proxy/stream_generator.py index 17e53b9d..26ed3a6b 100644 --- a/apps/proxy/ts_proxy/stream_generator.py +++ b/apps/proxy/ts_proxy/stream_generator.py @@ -208,7 +208,7 @@ class StreamGenerator: # Log empty reads periodically if self.empty_reads % 50 == 0: stream_status = "healthy" if (self.stream_manager and self.stream_manager.healthy) else "unknown" - logger.debug(f"[{self.client_id}] Waiting for chunks beyond {self.local_index} (buffer at {self.buffer.index}, stream: {stream_status})") + logger.debug(f"[{self.client_id}] Waiting for chunks beyond {self.local_index} for channel: {self.channel_id} (buffer at {self.buffer.index}, stream: {stream_status})") # Check for ghost clients if self._is_ghost_client(self.local_index): @@ -277,7 +277,7 @@ class StreamGenerator: yield chunk self.bytes_sent += len(chunk) self.chunks_sent += 1 - logger.debug(f"[{self.client_id}] Sent chunk {self.chunks_sent} ({len(chunk)} bytes) to client") + logger.debug(f"[{self.client_id}] Sent chunk {self.chunks_sent} ({len(chunk)} bytes) for channel {self.channel_id} to client") current_time = time.time() From b7c543b5f5da004739622d40a9028c425177a4d9 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 30 Apr 2025 12:48:50 -0500 Subject: [PATCH 0130/1435] Use gevent sleep instead of sleep. --- apps/proxy/ts_proxy/stream_buffer.py | 5 +++++ apps/proxy/ts_proxy/stream_generator.py | 9 +++++---- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/apps/proxy/ts_proxy/stream_buffer.py b/apps/proxy/ts_proxy/stream_buffer.py index 4d73bdc2..f0be1c52 100644 --- a/apps/proxy/ts_proxy/stream_buffer.py +++ b/apps/proxy/ts_proxy/stream_buffer.py @@ -11,6 +11,7 @@ from .redis_keys import RedisKeys from .config_helper import ConfigHelper from .constants import TS_PACKET_SIZE from .utils import get_logger +import gevent.event logger = get_logger() @@ -46,6 +47,7 @@ class StreamBuffer: # Track timers for proper cleanup self.stopping = False self.fill_timers = [] + self.chunk_available = gevent.event.Event() def add_chunk(self, chunk): """Add data with optimized Redis storage and TS packet alignment""" @@ -96,6 +98,9 @@ class StreamBuffer: if writes_done > 0: logger.debug(f"Added {writes_done} chunks ({self.target_chunk_size} bytes each) to Redis for channel {self.channel_id} at index {self.index}") + self.chunk_available.set() # Signal that new data is available + self.chunk_available.clear() # Reset for next notification + return True except Exception as e: diff --git a/apps/proxy/ts_proxy/stream_generator.py b/apps/proxy/ts_proxy/stream_generator.py index 26ed3a6b..bdd20874 100644 --- a/apps/proxy/ts_proxy/stream_generator.py +++ b/apps/proxy/ts_proxy/stream_generator.py @@ -6,6 +6,7 @@ This module handles generating and delivering video streams to clients. import time import logging import threading +import gevent # Add this import at the top of your file from apps.proxy.config import TSConfig as Config from .server import ProxyServer from .utils import create_ts_packet, get_logger @@ -135,7 +136,7 @@ class StreamGenerator: return False # Wait a bit before checking again - time.sleep(0.1) + gevent.sleep(0.1) # Timed out waiting logger.warning(f"[{self.client_id}] Timed out waiting for initialization") @@ -199,11 +200,11 @@ class StreamGenerator: self.bytes_sent += len(keepalive_packet) self.last_yield_time = time.time() self.consecutive_empty = 0 # Reset consecutive counter but keep total empty_reads - time.sleep(Config.KEEPALIVE_INTERVAL) + gevent.sleep(Config.KEEPALIVE_INTERVAL) # Replace time.sleep else: # Standard wait with backoff sleep_time = min(0.1 * self.consecutive_empty, 1.0) - time.sleep(sleep_time) + gevent.sleep(sleep_time) # Replace time.sleep # Log empty reads periodically if self.empty_reads % 50 == 0: @@ -416,7 +417,7 @@ class StreamGenerator: # Use the config setting instead of hardcoded value shutdown_delay = getattr(Config, 'CHANNEL_SHUTDOWN_DELAY', 5) logger.info(f"Waiting {shutdown_delay}s before checking if channel should be stopped") - time.sleep(shutdown_delay) + gevent.sleep(shutdown_delay) # After delay, check global client count if self.channel_id in proxy_server.client_managers: From 423020861c2179759931e49cc7bef4dbc242864c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 30 Apr 2025 13:32:16 -0500 Subject: [PATCH 0131/1435] Replace time.sleep with gevent.sleep for improved concurrency --- apps/proxy/ts_proxy/server.py | 9 +++++---- apps/proxy/ts_proxy/stream_generator.py | 6 ++---- apps/proxy/ts_proxy/stream_manager.py | 11 ++++++----- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/apps/proxy/ts_proxy/server.py b/apps/proxy/ts_proxy/server.py index 1a04ffb5..cebcc545 100644 --- a/apps/proxy/ts_proxy/server.py +++ b/apps/proxy/ts_proxy/server.py @@ -15,6 +15,7 @@ import time import sys import os import json +import gevent # Add gevent import from typing import Dict, Optional, Set from apps.proxy.config import TSConfig as Config from apps.channels.models import Channel, Stream @@ -209,7 +210,7 @@ class ProxyServer: if shutdown_delay > 0: logger.info(f"Waiting {shutdown_delay}s before stopping channel...") - time.sleep(shutdown_delay) + gevent.sleep(shutdown_delay) # REPLACE: time.sleep(shutdown_delay) # Re-check client count before stopping total = self.redis_client.scard(client_set_key) or 0 @@ -336,7 +337,7 @@ class ProxyServer: final_delay = delay + jitter logger.error(f"Error in event listener: {e}. Retrying in {final_delay:.1f}s (attempt {retry_count})") - time.sleep(final_delay) + gevent.sleep(final_delay) # REPLACE: time.sleep(final_delay) # Try to clean up the old connection try: @@ -350,7 +351,7 @@ class ProxyServer: except Exception as e: logger.error(f"Error in event listener: {e}") # Add a short delay to prevent rapid retries on persistent errors - time.sleep(5) + gevent.sleep(5) # REPLACE: time.sleep(5) thread = threading.Thread(target=event_listener, daemon=True) thread.name = "redis-event-listener" @@ -1000,7 +1001,7 @@ class ProxyServer: except Exception as e: logger.error(f"Error in cleanup thread: {e}", exc_info=True) - time.sleep(ConfigHelper.cleanup_check_interval()) + gevent.sleep(ConfigHelper.cleanup_check_interval()) # REPLACE: time.sleep(ConfigHelper.cleanup_check_interval()) thread = threading.Thread(target=cleanup_task, daemon=True) thread.name = "ts-proxy-cleanup" diff --git a/apps/proxy/ts_proxy/stream_generator.py b/apps/proxy/ts_proxy/stream_generator.py index bdd20874..82060f2f 100644 --- a/apps/proxy/ts_proxy/stream_generator.py +++ b/apps/proxy/ts_proxy/stream_generator.py @@ -417,7 +417,7 @@ class StreamGenerator: # Use the config setting instead of hardcoded value shutdown_delay = getattr(Config, 'CHANNEL_SHUTDOWN_DELAY', 5) logger.info(f"Waiting {shutdown_delay}s before checking if channel should be stopped") - gevent.sleep(shutdown_delay) + gevent.sleep(shutdown_delay) # Replace time.sleep # After delay, check global client count if self.channel_id in proxy_server.client_managers: @@ -428,9 +428,7 @@ class StreamGenerator: else: logger.info(f"Not shutting down channel {self.channel_id}, {total} clients still connected") - shutdown_thread = threading.Thread(target=delayed_shutdown) - shutdown_thread.daemon = True - shutdown_thread.start() + gevent.spawn(delayed_shutdown) def create_stream_generator(channel_id, client_id, client_ip, client_user_agent, channel_initializing=False): """ diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index 6fd2b4b8..e9a531d8 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -6,6 +6,7 @@ import time import socket import requests import subprocess +import gevent # Add this import from typing import Optional, List from django.shortcuts import get_object_or_404 from apps.proxy.config import TSConfig as Config @@ -157,7 +158,7 @@ class StreamManager: url_failed = False if self.url_switching: logger.debug("Skipping connection attempt during URL switch") - time.sleep(0.1) + gevent.sleep(0.1) # REPLACE time.sleep(0.1) continue # Connection retry loop for current URL while self.running and self.retry_count < self.max_retries and not url_failed: @@ -205,7 +206,7 @@ class StreamManager: # Wait with exponential backoff before retrying timeout = min(.25 * self.retry_count, 3) # Cap at 3 seconds logger.info(f"Reconnecting in {timeout} seconds... (attempt {self.retry_count}/{self.max_retries})") - time.sleep(timeout) + gevent.sleep(timeout) # REPLACE time.sleep(timeout) except Exception as e: logger.error(f"Connection error: {e}", exc_info=True) @@ -218,7 +219,7 @@ class StreamManager: # Wait with exponential backoff before retrying timeout = min(.25 * self.retry_count, 3) # Cap at 3 seconds logger.info(f"Reconnecting in {timeout} seconds after error... (attempt {self.retry_count}/{self.max_retries})") - time.sleep(timeout) + gevent.sleep(timeout) # REPLACE time.sleep(timeout) # If URL failed and we're still running, try switching to another stream if url_failed and self.running: @@ -425,7 +426,7 @@ class StreamManager: else: if not self.running: break - time.sleep(0.1) + gevent.sleep(0.1) # REPLACE time.sleep(0.1) else: # Handle direct HTTP connection chunk_count = 0 @@ -674,7 +675,7 @@ class StreamManager: except Exception as e: logger.error(f"Error in health monitor: {e}") - time.sleep(self.health_check_interval) + gevent.sleep(self.health_check_interval) # REPLACE time.sleep(self.health_check_interval) def _attempt_reconnect(self): """Attempt to reconnect to the current stream""" From 80fe7e02f84d67b7ddb8ab527d9b21d6f433f53b Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 30 Apr 2025 13:43:01 -0500 Subject: [PATCH 0132/1435] Added missing _attempt_health_recovery. --- apps/proxy/ts_proxy/stream_manager.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index e9a531d8..771ffba8 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -719,6 +719,29 @@ class StreamManager: logger.error(f"Error in reconnect attempt: {e}", exc_info=True) return False + def _attempt_health_recovery(self): + """Attempt to recover stream health by switching to another stream""" + try: + logger.info(f"Attempting health recovery for channel {self.channel_id}") + + # Don't try to switch if we're already in the process of switching URLs + if self.url_switching: + logger.info("URL switching already in progress, skipping health recovery") + return + + # Try to switch to next stream + switch_result = self._try_next_stream() + if switch_result: + logger.info(f"Health recovery successful - switched to new stream for channel {self.channel_id}") + return True + else: + logger.warning(f"Health recovery failed - no alternative streams available for channel {self.channel_id}") + return False + + except Exception as e: + logger.error(f"Error in health recovery attempt: {e}", exc_info=True) + return False + def _close_connection(self): """Close HTTP connection resources""" # Close response if it exists From 4f0c8333c6196c1246550518753ace3fb18b755f Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 30 Apr 2025 14:42:32 -0500 Subject: [PATCH 0133/1435] Add return statement in get_cache_file method of EPGSource model --- apps/epg/models.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/apps/epg/models.py b/apps/epg/models.py index 2f7d5990..e020821a 100644 --- a/apps/epg/models.py +++ b/apps/epg/models.py @@ -40,6 +40,8 @@ class EPGSource(models.Model): cache_dir = os.path.join(settings.MEDIA_ROOT, "cached_epg") cache = os.path.join(cache_dir, filename) + return cache + class EPGData(models.Model): # Removed the Channel foreign key. We now just store the original tvg_id # and a name (which might simply be the tvg_id if no real channel exists). From 91f5e2ad7c5c1c82588186da86ae6a30307d7914 Mon Sep 17 00:00:00 2001 From: dekzter Date: Wed, 30 Apr 2025 16:15:00 -0400 Subject: [PATCH 0134/1435] fixed stream URL sample for search / replace pattersn --- frontend/src/components/forms/M3UProfile.jsx | 21 ++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/frontend/src/components/forms/M3UProfile.jsx b/frontend/src/components/forms/M3UProfile.jsx index 2de99750..d1052c07 100644 --- a/frontend/src/components/forms/M3UProfile.jsx +++ b/frontend/src/components/forms/M3UProfile.jsx @@ -21,20 +21,33 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => { const profileSearchPreview = usePlaylistsStore((s) => s.profileSearchPreview); const profileResult = usePlaylistsStore((s) => s.profileResult); + const [streamUrl, setStreamUrl] = useState(''); const [searchPattern, setSearchPattern] = useState(''); const [replacePattern, setReplacePattern] = useState(''); const [debouncedPatterns, setDebouncedPatterns] = useState({}); + useEffect(() => { + async function fetchStreamUrl() { + const params = new URLSearchParams(); + params.append('page', 1); + params.append('page_size', 1); + params.append('m3u_account', m3u.id); + const response = await API.queryStreams(params); + setStreamUrl(response.results[0].url); + } + fetchStreamUrl(); + }, []); + useEffect(() => { sendMessage( JSON.stringify({ type: 'm3u_profile_test', - url: m3u.server_url, + url: streamUrl, search: debouncedPatterns['search'] || '', replace: debouncedPatterns['replace'] || '', }) ); - }, [m3u, debouncedPatterns]); + }, [m3u, debouncedPatterns, streamUrl]); useEffect(() => { const handler = setTimeout(() => { @@ -155,7 +168,7 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => { Search @@ -163,7 +176,7 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => { Replace - {profileResult || m3u.server_url} + {profileResult || streamUrl} ); From a9ea30d862c791ff5f0c275f624450e09ade9292 Mon Sep 17 00:00:00 2001 From: dekzter Date: Wed, 30 Apr 2025 16:36:16 -0400 Subject: [PATCH 0135/1435] channel number sorting restored, reset stream table default page size --- frontend/src/components/tables/ChannelsTable.jsx | 5 +++-- frontend/src/components/tables/StreamsTable.jsx | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index c3ab40cc..7982817c 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -523,9 +523,9 @@ const ChannelsTable = ({}) => { }, }, { + id: 'channel_number', accessorKey: 'channel_number', - size: 30, - header: () => #, + size: 40, cell: ({ getValue }) => ( {getValue()} @@ -706,6 +706,7 @@ const ChannelsTable = ({}) => { }, headerCellRenderFns: { name: renderHeaderCell, + channel_number: renderHeaderCell, channel_group: renderHeaderCell, enabled: renderHeaderCell, }, diff --git a/frontend/src/components/tables/StreamsTable.jsx b/frontend/src/components/tables/StreamsTable.jsx index c899fda9..276ce189 100644 --- a/frontend/src/components/tables/StreamsTable.jsx +++ b/frontend/src/components/tables/StreamsTable.jsx @@ -177,7 +177,7 @@ const StreamsTable = ({}) => { // const [allRowsSelected, setAllRowsSelected] = useState(false); const [pagination, setPagination] = useState({ pageIndex: 0, - pageSize: 250, + pageSize: 50, }); const [filters, setFilters] = useState({ name: '', From 79392bb129f69d8f3fca27c76e90fc95f1f5a47c Mon Sep 17 00:00:00 2001 From: dekzter Date: Wed, 30 Apr 2025 16:58:16 -0400 Subject: [PATCH 0136/1435] fixed channel form not updating some properties after saving --- frontend/src/components/forms/Channel.jsx | 1 + frontend/src/components/tables/ChannelsTable.jsx | 8 ++++---- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index 33eca5ea..3253d67b 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -161,6 +161,7 @@ const Channel = ({ channel = null, isOpen, onClose }) => { console.error('Error saving channel:', error); } + formik.resetForm(); API.requeryChannels(); setSubmitting(false); setTvgFilter(''); diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index 7982817c..bdca0722 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -95,19 +95,19 @@ const ChannelRowActions = React.memo( }) => { const onEdit = useCallback(() => { editChannel(row.original); - }, []); + }, [row.original]); const onDelete = useCallback(() => { deleteChannel(row.original.id); - }, []); + }, [row.original]); const onPreview = useCallback(() => { handleWatchStream(row.original); - }, []); + }, [row.original]); const onRecord = useCallback(() => { createRecording(row.original); - }, []); + }, [row.original]); return ( From e8355a78c670b46700c9ec54129edb7c0ed1bc94 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 30 Apr 2025 16:51:48 -0500 Subject: [PATCH 0137/1435] Fetch channels when auto-match is complete. --- apps/channels/api_views.py | 2 ++ frontend/src/WebSocket.jsx | 3 --- frontend/src/api.js | 3 +++ 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index ccd942d6..65b2a39c 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -600,6 +600,8 @@ class ChannelViewSet(viewsets.ModelViewSet): parse_programs_for_tvg_id.delay(epg_id) programs_refreshed += 1 + + return Response({ 'success': True, 'channels_updated': channels_updated, diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index 0f5c4404..f538ee29 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -128,9 +128,6 @@ export const WebsocketProvider = ({ children }) => { // Check if we have associations data and use the more efficient batch API if (event.data.associations && event.data.associations.length > 0) { API.batchSetEPG(event.data.associations); - } else { - // Fall back to legacy full refresh method - API.requeryChannels(); } break; diff --git a/frontend/src/api.js b/frontend/src/api.js index 8e1fe46c..3cec6e38 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -1290,6 +1290,9 @@ export default class API { color: 'blue', }); + // First fetch the complete channel data + await useChannelsStore.getState().fetchChannels(); + // Then refresh the current table view this.requeryChannels(); } From c058c4ed10b94bf0361cc0b0c7bf682a56a6cdfe Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 30 Apr 2025 17:03:36 -0500 Subject: [PATCH 0138/1435] Fixes spacing and padding in epg and m3u tables. --- frontend/src/components/tables/EPGsTable.jsx | 36 ++++++++++++++++---- frontend/src/components/tables/M3UsTable.jsx | 24 +++++++++++-- frontend/src/pages/ContentSources.jsx | 2 ++ 3 files changed, 53 insertions(+), 9 deletions(-) diff --git a/frontend/src/components/tables/EPGsTable.jsx b/frontend/src/components/tables/EPGsTable.jsx index 07f4128d..1f76eb33 100644 --- a/frontend/src/components/tables/EPGsTable.jsx +++ b/frontend/src/components/tables/EPGsTable.jsx @@ -42,20 +42,39 @@ const EPGsTable = () => { { header: 'Name', accessorKey: 'name', + size: 150, + minSize: 100, }, { header: 'Source Type', accessorKey: 'source_type', + size: 120, + minSize: 100, }, { header: 'URL / API Key', accessorKey: 'url', + size: 200, + minSize: 120, enableSorting: false, + Cell: ({ cell }) => ( +
+ {cell.getValue()} +
+ ), }, { header: 'Active', accessorKey: 'is_active', - size: 100, + size: 80, + minSize: 60, sortingFn: 'basic', mantineTableBodyCellProps: { align: 'left', @@ -73,6 +92,8 @@ const EPGsTable = () => { { header: 'Updated', accessorFn: (row) => dayjs(row.updated_at).format('MMMM D, YYYY h:mma'), + size: 180, + minSize: 100, enableSorting: false, }, ], @@ -144,6 +165,13 @@ const EPGsTable = () => { density: 'compact', }, enableRowActions: true, + positionActionsColumn: 'last', + displayColumnDefOptions: { + 'mrt-row-actions': { + size: 120, // Make action column wider + minSize: 120, // Ensure minimum width for action buttons + }, + }, renderRowActions: ({ row }) => ( <> { mantineTableContainerProps: { style: { height: 'calc(40vh - 10px)', - }, - }, - displayColumnDefOptions: { - 'mrt-row-actions': { - size: 10, + overflowX: 'auto', // Ensure horizontal scrolling works }, }, }); diff --git a/frontend/src/components/tables/M3UsTable.jsx b/frontend/src/components/tables/M3UsTable.jsx index 9765ca66..63a25118 100644 --- a/frontend/src/components/tables/M3UsTable.jsx +++ b/frontend/src/components/tables/M3UsTable.jsx @@ -99,16 +99,21 @@ const M3UTable = () => { { header: 'Name', accessorKey: 'name', + size: 150, + minSize: 100, // Minimum width }, { header: 'URL / File', accessorKey: 'server_url', + size: 200, + minSize: 120, Cell: ({ cell }) => (
{cell.getValue()} @@ -118,7 +123,8 @@ const M3UTable = () => { { header: 'Max Streams', accessorKey: 'max_streams', - size: 200, + size: 120, + minSize: 80, }, { header: 'Status', @@ -132,12 +138,14 @@ const M3UTable = () => { return generateStatusString(refreshProgress[row.id]); }, - size: 200, + size: 150, + minSize: 80, }, { header: 'Active', accessorKey: 'is_active', - size: 100, + size: 80, + minSize: 60, sortingFn: 'basic', mantineTableBodyCellProps: { align: 'left', @@ -155,6 +163,8 @@ const M3UTable = () => { { header: 'Updated', accessorFn: (row) => dayjs(row.updated_at).format('MMMM D, YYYY h:mma'), + size: 180, + minSize: 100, enableSorting: false, }, ], @@ -239,6 +249,13 @@ const M3UTable = () => { density: 'compact', }, enableRowActions: true, + positionActionsColumn: 'last', + displayColumnDefOptions: { + 'mrt-row-actions': { + size: 120, // Make action column wider + minSize: 120, // Ensure minimum width for action buttons + }, + }, renderRowActions: ({ row }) => ( <> { mantineTableContainerProps: { style: { height: 'calc(40vh - 10px)', + overflowX: 'auto', // Ensure horizontal scrolling works }, }, }); diff --git a/frontend/src/pages/ContentSources.jsx b/frontend/src/pages/ContentSources.jsx index eb62fe49..24e736d4 100644 --- a/frontend/src/pages/ContentSources.jsx +++ b/frontend/src/pages/ContentSources.jsx @@ -15,7 +15,9 @@ const M3UPage = () => { From 6adda8209f2cbc09930ef9ac459f972e08b6dcfe Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 30 Apr 2025 17:54:48 -0500 Subject: [PATCH 0139/1435] Ensure cache directory exists before saving EPG data --- apps/epg/models.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/apps/epg/models.py b/apps/epg/models.py index e020821a..a0e5343b 100644 --- a/apps/epg/models.py +++ b/apps/epg/models.py @@ -38,6 +38,10 @@ class EPGSource(models.Model): # Build full path in MEDIA_ROOT/cached_epg cache_dir = os.path.join(settings.MEDIA_ROOT, "cached_epg") + + # Create directory if it doesn't exist + os.makedirs(cache_dir, exist_ok=True) + cache = os.path.join(cache_dir, filename) return cache From a50a7372c1df3a4ca69ab14d47a025fe53729eb3 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 30 Apr 2025 18:22:27 -0500 Subject: [PATCH 0140/1435] Removed unnecessary elif for invalid flag. --- apps/channels/api_views.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 42334b44..ab206afb 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -265,9 +265,7 @@ class ChannelViewSet(viewsets.ModelViewSet): stream_custom_props = json.loads(stream.custom_properties) if stream.custom_properties else {} channel_number = None - if 'tv-chno' in stream_custom_props: - channel_number = int(stream_custom_props['tv-chno']) - elif 'tvg-chno' in stream_custom_props: + if 'tvg-chno' in stream_custom_props: channel_number = int(stream_custom_props['tvg-chno']) elif 'channel-number' in stream_custom_props: channel_number = int(stream_custom_props['channel-number']) @@ -388,9 +386,7 @@ class ChannelViewSet(viewsets.ModelViewSet): stream_custom_props = json.loads(stream.custom_properties) if stream.custom_properties else {} channel_number = None - if 'tv-chno' in stream_custom_props: - channel_number = int(stream_custom_props['tv-chno']) - elif 'tvg-chno' in stream_custom_props: + if 'tvg-chno' in stream_custom_props: channel_number = int(stream_custom_props['tvg-chno']) elif 'channel-number' in stream_custom_props: channel_number = int(stream_custom_props['channel-number']) From a6087d1010cebd49a3e0780c6fbb257c0ef257f8 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Wed, 30 Apr 2025 23:27:49 +0000 Subject: [PATCH 0141/1435] Release v0.4.0 --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 9339fcbd..25e27d60 100644 --- a/version.py +++ b/version.py @@ -1,5 +1,5 @@ """ Dispatcharr version information. """ -__version__ = '0.3.3' # Follow semantic versioning (MAJOR.MINOR.PATCH) +__version__ = '0.4.0' # Follow semantic versioning (MAJOR.MINOR.PATCH) __timestamp__ = None # Set during CI/CD build process From 2b44c122e787116a9a0a8a54e71067ed3e17c26c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 30 Apr 2025 18:46:10 -0500 Subject: [PATCH 0142/1435] Update version display to include timestamp instead of build --- docker/entrypoint.sh | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh index 478d94d0..d2afb3a3 100755 --- a/docker/entrypoint.sh +++ b/docker/entrypoint.sh @@ -40,8 +40,14 @@ export DISPATCHARR_PORT=${DISPATCHARR_PORT:-9191} # Extract version information from version.py export DISPATCHARR_VERSION=$(python -c "import sys; sys.path.append('/app'); import version; print(version.__version__)") -export DISPATCHARR_BUILD=$(python -c "import sys; sys.path.append('/app'); import version; print(version.__build__)") -echo "📦 Dispatcharr version: ${DISPATCHARR_VERSION}-${DISPATCHARR_BUILD}" +export DISPATCHARR_TIMESTAMP=$(python -c "import sys; sys.path.append('/app'); import version; print(version.__timestamp__ or '')") + +# Display version information with timestamp if available +if [ -n "$DISPATCHARR_TIMESTAMP" ]; then + echo "📦 Dispatcharr version: ${DISPATCHARR_VERSION} (build: ${DISPATCHARR_TIMESTAMP})" +else + echo "📦 Dispatcharr version: ${DISPATCHARR_VERSION}" +fi # READ-ONLY - don't let users change these export POSTGRES_DIR=/data/db @@ -64,7 +70,7 @@ if [[ ! -f /etc/profile.d/dispatcharr.sh ]]; then echo "export POSTGRES_DIR=$POSTGRES_DIR" >> /etc/profile.d/dispatcharr.sh echo "export DISPATCHARR_PORT=$DISPATCHARR_PORT" >> /etc/profile.d/dispatcharr.sh echo "export DISPATCHARR_VERSION=$DISPATCHARR_VERSION" >> /etc/profile.d/dispatcharr.sh - echo "export DISPATCHARR_BUILD=$DISPATCHARR_BUILD" >> /etc/profile.d/dispatcharr.sh + echo "export DISPATCHARR_TIMESTAMP=$DISPATCHARR_TIMESTAMP" >> /etc/profile.d/dispatcharr.sh fi chmod +x /etc/profile.d/dispatcharr.sh From 3381cb8695708b66ccc8b18f60fe7e8e75997af4 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 30 Apr 2025 19:14:18 -0500 Subject: [PATCH 0143/1435] Switch to yarn for building frontend. --- docker/Dockerfile | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 4d313e2c..f145fe2e 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -44,8 +44,9 @@ RUN cd /app && \ FROM node:20-slim AS frontend-builder WORKDIR /app/frontend COPY --from=builder /app /app -RUN npm install --legacy-peer-deps && \ - npm run build && \ +RUN corepack enable && corepack prepare yarn@stable --activate && \ + yarn install && \ + yarn build && \ find . -maxdepth 1 ! -name '.' ! -name 'dist' -exec rm -rf '{}' \; FROM python:3.13-slim From 7a67479e387fe39c07459ebb81677678dfbf4c3c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 30 Apr 2025 19:21:22 -0500 Subject: [PATCH 0144/1435] Back to NPM but use ignore scripts and rebuild. --- docker/Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index f145fe2e..f73cfb84 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -44,9 +44,9 @@ RUN cd /app && \ FROM node:20-slim AS frontend-builder WORKDIR /app/frontend COPY --from=builder /app /app -RUN corepack enable && corepack prepare yarn@stable --activate && \ - yarn install && \ - yarn build && \ +RUN npm install --ignore-scripts && \ + npm rebuild && \ + npm run build && \ find . -maxdepth 1 ! -name '.' ! -name 'dist' -exec rm -rf '{}' \; FROM python:3.13-slim From da1fae89a9092526216ab233fb53da01c40c9422 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Thu, 1 May 2025 00:30:25 +0000 Subject: [PATCH 0145/1435] Release v0.5.0 --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 25e27d60..18a1ef5d 100644 --- a/version.py +++ b/version.py @@ -1,5 +1,5 @@ """ Dispatcharr version information. """ -__version__ = '0.4.0' # Follow semantic versioning (MAJOR.MINOR.PATCH) +__version__ = '0.5.0' # Follow semantic versioning (MAJOR.MINOR.PATCH) __timestamp__ = None # Set during CI/CD build process From e975a13c0f5cc9d9661417a3cdd1f74e7f7a6c71 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 30 Apr 2025 19:30:53 -0500 Subject: [PATCH 0146/1435] Another attempt at using yarn. --- docker/Dockerfile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index f73cfb84..26b54975 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -41,12 +41,12 @@ RUN cd /app && \ pip install --no-cache-dir -r requirements.txt # Use a dedicated Node.js stage for frontend building -FROM node:20-slim AS frontend-builder +FROM node:20 AS frontend-builder WORKDIR /app/frontend COPY --from=builder /app /app -RUN npm install --ignore-scripts && \ - npm rebuild && \ - npm run build && \ +RUN corepack enable && corepack prepare yarn@stable --activate && \ + yarn install && \ + yarn build && \ find . -maxdepth 1 ! -name '.' ! -name 'dist' -exec rm -rf '{}' \; FROM python:3.13-slim From 8219773a68b24e34c8865f94292cf80851a11e72 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Thu, 1 May 2025 00:54:35 +0000 Subject: [PATCH 0147/1435] Release v0.6.0 --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 18a1ef5d..6c008d2b 100644 --- a/version.py +++ b/version.py @@ -1,5 +1,5 @@ """ Dispatcharr version information. """ -__version__ = '0.5.0' # Follow semantic versioning (MAJOR.MINOR.PATCH) +__version__ = '0.6.0' # Follow semantic versioning (MAJOR.MINOR.PATCH) __timestamp__ = None # Set during CI/CD build process From c65b431ebaf97afd1faef6abad52e670cbf8c580 Mon Sep 17 00:00:00 2001 From: SergeantPanda <61642231+SergeantPanda@users.noreply.github.com> Date: Wed, 30 Apr 2025 20:05:37 -0500 Subject: [PATCH 0148/1435] Rolled back after failed release. --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 6c008d2b..772064dd 100644 --- a/version.py +++ b/version.py @@ -1,5 +1,5 @@ """ Dispatcharr version information. """ -__version__ = '0.6.0' # Follow semantic versioning (MAJOR.MINOR.PATCH) +__version__ = '0.3.0' # Follow semantic versioning (MAJOR.MINOR.PATCH) __timestamp__ = None # Set during CI/CD build process From c6c5662472771612e4db0720e214a0c0020f3864 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Thu, 1 May 2025 01:09:08 +0000 Subject: [PATCH 0149/1435] Release v0.4.0 --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 772064dd..25e27d60 100644 --- a/version.py +++ b/version.py @@ -1,5 +1,5 @@ """ Dispatcharr version information. """ -__version__ = '0.3.0' # Follow semantic versioning (MAJOR.MINOR.PATCH) +__version__ = '0.4.0' # Follow semantic versioning (MAJOR.MINOR.PATCH) __timestamp__ = None # Set during CI/CD build process From a86ae715b9d41f0fe99dc783857c1342c1a2fb4b Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 30 Apr 2025 21:11:41 -0500 Subject: [PATCH 0150/1435] Fixes add streams to channel to follow correct logic of being disabled. --- .../src/components/tables/StreamsTable.jsx | 35 +++++++++---------- 1 file changed, 17 insertions(+), 18 deletions(-) diff --git a/frontend/src/components/tables/StreamsTable.jsx b/frontend/src/components/tables/StreamsTable.jsx index 276ce189..078e24d9 100644 --- a/frontend/src/components/tables/StreamsTable.jsx +++ b/frontend/src/components/tables/StreamsTable.jsx @@ -156,7 +156,7 @@ const StreamRowActions = ({ ); }; -const StreamsTable = ({}) => { +const StreamsTable = ({ }) => { const theme = useMantineTheme(); /** @@ -606,23 +606,22 @@ const StreamsTable = ({}) => { {/* Top toolbar with Remove, Assign, Auto-match, and Add buttons */} - {selectedStreamIds.length > 0 && ( - - )} + Date: Thu, 1 May 2025 09:05:51 -0500 Subject: [PATCH 0151/1435] More sleep events. --- apps/proxy/ts_proxy/views.py | 5 +++-- docker/uwsgi.ini | 1 + 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/apps/proxy/ts_proxy/views.py b/apps/proxy/ts_proxy/views.py index 35ca3648..ef232fd2 100644 --- a/apps/proxy/ts_proxy/views.py +++ b/apps/proxy/ts_proxy/views.py @@ -24,6 +24,7 @@ from .services.channel_service import ChannelService from .url_utils import generate_stream_url, transform_url, get_stream_info_for_switch, get_stream_object, get_alternate_streams from .utils import get_logger from uuid import UUID +import gevent logger = get_logger() @@ -119,7 +120,7 @@ def stream_ts(request, channel_id): # Wait before retrying (using exponential backoff with a cap) wait_time = min(0.5 * (2 ** attempt), 2.0) # Caps at 2 seconds logger.info(f"[{client_id}] Waiting {wait_time:.1f}s for a connection to become available (attempt {attempt+1}/{max_retries})") - time.sleep(wait_time) + gevent.sleep(wait_time) # FIXED: Using gevent.sleep instead of time.sleep if stream_url is None: # Make sure to release any stream locks that might have been acquired @@ -258,7 +259,7 @@ def stream_ts(request, channel_id): proxy_server.stop_channel(channel_id) return JsonResponse({'error': 'Failed to connect'}, status=502) - time.sleep(0.1) + gevent.sleep(0.1) # FIXED: Using gevent.sleep instead of time.sleep logger.info(f"[{client_id}] Successfully initialized channel {channel_id}") channel_initializing = True diff --git a/docker/uwsgi.ini b/docker/uwsgi.ini index 326f4b5d..b1ff362b 100644 --- a/docker/uwsgi.ini +++ b/docker/uwsgi.ini @@ -41,6 +41,7 @@ lazy-apps = true # Improve memory efficiency # Async mode (use gevent for high concurrency) gevent = 100 async = 100 +gevent-monkey-patch = true ; Ensure all blocking operations are patched (especially important for Ryzen CPUs) # Performance tuning thunder-lock = true From c11ce048c7fc60676a2e83cf8d5d149c36fb6e1c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 1 May 2025 09:24:16 -0500 Subject: [PATCH 0152/1435] Disable monkey patching. --- docker/uwsgi.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/uwsgi.ini b/docker/uwsgi.ini index b1ff362b..b40259c2 100644 --- a/docker/uwsgi.ini +++ b/docker/uwsgi.ini @@ -41,7 +41,7 @@ lazy-apps = true # Improve memory efficiency # Async mode (use gevent for high concurrency) gevent = 100 async = 100 -gevent-monkey-patch = true ; Ensure all blocking operations are patched (especially important for Ryzen CPUs) +#gevent-monkey-patch = true ; Ensure all blocking operations are patched (especially important for Ryzen CPUs) # Performance tuning thunder-lock = true From e8ee59cf00fd68e86f653efc59890a671cd31867 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 1 May 2025 09:31:26 -0500 Subject: [PATCH 0153/1435] Not sure why it didn't push. --- docker/uwsgi.ini | 1 - 1 file changed, 1 deletion(-) diff --git a/docker/uwsgi.ini b/docker/uwsgi.ini index b40259c2..326f4b5d 100644 --- a/docker/uwsgi.ini +++ b/docker/uwsgi.ini @@ -41,7 +41,6 @@ lazy-apps = true # Improve memory efficiency # Async mode (use gevent for high concurrency) gevent = 100 async = 100 -#gevent-monkey-patch = true ; Ensure all blocking operations are patched (especially important for Ryzen CPUs) # Performance tuning thunder-lock = true From 091d9a6823e0b4c9fc35a0afcc37c12556249b51 Mon Sep 17 00:00:00 2001 From: dekzter Date: Thu, 1 May 2025 11:22:38 -0400 Subject: [PATCH 0154/1435] immediately prompt for group filtering when using an XC account --- apps/m3u/api_views.py | 5 ++ apps/m3u/signals.py | 2 +- .../src/components/M3URefreshNotification.jsx | 4 +- frontend/src/components/forms/M3U.jsx | 76 ++++++++++++------- .../src/components/forms/M3UGroupFilter.jsx | 2 +- frontend/src/components/tables/M3UsTable.jsx | 2 +- 6 files changed, 61 insertions(+), 30 deletions(-) diff --git a/apps/m3u/api_views.py b/apps/m3u/api_views.py index 054bdaa9..ddccbc11 100644 --- a/apps/m3u/api_views.py +++ b/apps/m3u/api_views.py @@ -10,6 +10,7 @@ from django.core.cache import cache import os from rest_framework.decorators import action from django.conf import settings +from .tasks import refresh_m3u_groups # Import all models, including UserAgent. from .models import M3UAccount, M3UFilter, ServerGroup, M3UAccountProfile @@ -56,6 +57,10 @@ class M3UAccountViewSet(viewsets.ModelViewSet): # Now call super().create() to create the instance response = super().create(request, *args, **kwargs) + print(response.data.get('account_type')) + if response.data.get('account_type') == M3UAccount.Types.XC: + refresh_m3u_groups(response.data.get('id')) + # After the instance is created, return the response return response diff --git a/apps/m3u/signals.py b/apps/m3u/signals.py index 6e46a0ff..dc96ed57 100644 --- a/apps/m3u/signals.py +++ b/apps/m3u/signals.py @@ -13,7 +13,7 @@ def refresh_account_on_save(sender, instance, created, **kwargs): call a Celery task that fetches & parses that single account if it is active or newly created. """ - if created: + if created and instance.account_type != M3UAccount.Types.XC: refresh_m3u_groups.delay(instance.id) @receiver(post_save, sender=M3UAccount) diff --git a/frontend/src/components/M3URefreshNotification.jsx b/frontend/src/components/M3URefreshNotification.jsx index 9e469f43..b1c1984f 100644 --- a/frontend/src/components/M3URefreshNotification.jsx +++ b/frontend/src/components/M3URefreshNotification.jsx @@ -24,8 +24,10 @@ export default function M3URefreshNotification() { return; } - console.log(data); const playlist = playlists.find((pl) => pl.id == data.account); + if (!playlist) { + return; + } setNotificationStatus({ ...notificationStatus, diff --git a/frontend/src/components/forms/M3U.jsx b/frontend/src/components/forms/M3U.jsx index 51370f27..28ad382b 100644 --- a/frontend/src/components/forms/M3U.jsx +++ b/frontend/src/components/forms/M3U.jsx @@ -27,12 +27,19 @@ import usePlaylistsStore from '../../store/playlists'; import { notifications } from '@mantine/notifications'; import { isNotEmpty, useForm } from '@mantine/form'; -const M3U = ({ playlist = null, isOpen, onClose, playlistCreated = false }) => { +const M3U = ({ + m3uAccount = null, + isOpen, + onClose, + playlistCreated = false, +}) => { const theme = useMantineTheme(); const userAgents = useUserAgentsStore((s) => s.userAgents); const fetchChannelGroups = useChannelsStore((s) => s.fetchChannelGroups); + const fetchPlaylists = usePlaylistsStore((s) => s.fetchPlaylists); + const [playlist, setPlaylist] = useState(null); const [file, setFile] = useState(null); const [profileModalOpen, setProfileModalOpen] = useState(false); const [groupFilterModalOpen, setGroupFilterModalOpen] = useState(false); @@ -61,28 +68,31 @@ const M3U = ({ playlist = null, isOpen, onClose, playlistCreated = false }) => { }); useEffect(() => { - if (playlist) { + console.log(m3uAccount); + if (m3uAccount) { + setPlaylist(m3uAccount); form.setValues({ - name: playlist.name, - server_url: playlist.server_url, - max_streams: playlist.max_streams, - user_agent: playlist.user_agent ? `${playlist.user_agent}` : '0', - is_active: playlist.is_active, - refresh_interval: playlist.refresh_interval, - is_xc: playlist.account_type == 'XC', - username: playlist.username ?? '', + name: m3uAccount.name, + server_url: m3uAccount.server_url, + max_streams: m3uAccount.max_streams, + user_agent: m3uAccount.user_agent ? `${m3uAccount.user_agent}` : '0', + is_active: m3uAccount.is_active, + refresh_interval: m3uAccount.refresh_interval, + is_xc: m3uAccount.account_type == 'XC', + username: m3uAccount.username ?? '', password: '', }); - if (playlist.account_type == 'XC') { + if (m3uAccount.account_type == 'XC') { setShowCredentialFields(true); } else { setShowCredentialFields(false); } } else { + setPlaylist(null); form.reset(); } - }, [playlist]); + }, [m3uAccount]); useEffect(() => { if (form.values.is_xc) { @@ -124,15 +134,26 @@ const M3U = ({ playlist = null, isOpen, onClose, playlistCreated = false }) => { file, }); - notifications.show({ - title: 'Fetching M3U Groups', - message: 'Filter out groups or refresh M3U once complete.', - // color: 'green.5', - }); + if (values.account_type != 'XC') { + notifications.show({ + title: 'Fetching M3U Groups', + message: 'Filter out groups or refresh M3U once complete.', + // color: 'green.5', + }); - // Don't prompt for group filters, but keeping this here - // in case we want to revive it - newPlaylist = null; + // Don't prompt for group filters, but keeping this here + // in case we want to revive it + newPlaylist = null; + close(); + return; + } + + const updatedPlaylist = await API.getPlaylist(newPlaylist.id); + await Promise.all([fetchChannelGroups(), fetchPlaylists()]); + console.log('opening group options'); + setPlaylist(updatedPlaylist); + setGroupFilterModalOpen(true); + return; } form.reset(); @@ -140,13 +161,16 @@ const M3U = ({ playlist = null, isOpen, onClose, playlistCreated = false }) => { onClose(newPlaylist); }; + const close = () => { + form.reset(); + setFile(null); + setPlaylist(null); + onClose(); + }; + const closeGroupFilter = () => { setGroupFilterModalOpen(false); - if (playlistCreated) { - form.reset(); - setFile(null); - onClose(); - } + close(); }; useEffect(() => { @@ -160,7 +184,7 @@ const M3U = ({ playlist = null, isOpen, onClose, playlistCreated = false }) => { } return ( - + { name: channelGroups[group.channel_group].name, })) ); - }, [channelGroups]); + }, [playlist, channelGroups]); const toggleGroupEnabled = (id) => { setGroupStates( diff --git a/frontend/src/components/tables/M3UsTable.jsx b/frontend/src/components/tables/M3UsTable.jsx index 63a25118..75edba17 100644 --- a/frontend/src/components/tables/M3UsTable.jsx +++ b/frontend/src/components/tables/M3UsTable.jsx @@ -358,7 +358,7 @@ const M3UTable = () => { Date: Thu, 1 May 2025 10:43:07 -0500 Subject: [PATCH 0155/1435] Finding more timers that can be converted to gevents. --- apps/hdhr/ssdp.py | 3 ++- apps/proxy/ts_proxy/stream_buffer.py | 10 +++++----- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/apps/hdhr/ssdp.py b/apps/hdhr/ssdp.py index 660d9c2f..d794799a 100644 --- a/apps/hdhr/ssdp.py +++ b/apps/hdhr/ssdp.py @@ -2,6 +2,7 @@ import os import socket import threading import time +import gevent # Add this import from django.conf import settings # SSDP Multicast Address and Port @@ -59,7 +60,7 @@ def ssdp_broadcaster(host_ip): sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2) while True: sock.sendto(notify.encode("utf-8"), (SSDP_MULTICAST, SSDP_PORT)) - time.sleep(30) + gevent.sleep(30) # Replace time.sleep with gevent.sleep def start_ssdp(): host_ip = get_host_ip() diff --git a/apps/proxy/ts_proxy/stream_buffer.py b/apps/proxy/ts_proxy/stream_buffer.py index f0be1c52..a5169c3a 100644 --- a/apps/proxy/ts_proxy/stream_buffer.py +++ b/apps/proxy/ts_proxy/stream_buffer.py @@ -12,6 +12,7 @@ from .config_helper import ConfigHelper from .constants import TS_PACKET_SIZE from .utils import get_logger import gevent.event +import gevent # Make sure this import is at the top logger = get_logger() @@ -236,8 +237,8 @@ class StreamBuffer: timers_cancelled = 0 for timer in list(self.fill_timers): try: - if timer and timer.is_alive(): - timer.cancel() + if timer and not timer.dead: # Changed from timer.is_alive() + timer.kill() # Changed from timer.cancel() timers_cancelled += 1 except Exception as e: logger.error(f"Error canceling timer: {e}") @@ -325,8 +326,7 @@ class StreamBuffer: if self.stopping: return None - timer = threading.Timer(delay, callback, args=args, kwargs=kwargs) - timer.daemon = True - timer.start() + # Replace threading.Timer with gevent.spawn_later for better compatibility + timer = gevent.spawn_later(delay, callback, *args, **kwargs) self.fill_timers.append(timer) return timer From 78fc7d9f2b1e4221c3a6d68459551b80d1284942 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 1 May 2025 12:42:48 -0500 Subject: [PATCH 0156/1435] Use proper user agents when downloading epgs. And a little more robust user agent selection for m3u downloads. --- apps/epg/tasks.py | 32 +++++++++++++++++++++++++++++++- apps/m3u/tasks.py | 13 +++++++++---- 2 files changed, 40 insertions(+), 5 deletions(-) diff --git a/apps/epg/tasks.py b/apps/epg/tasks.py index 74411bdb..cb985a51 100644 --- a/apps/epg/tasks.py +++ b/apps/epg/tasks.py @@ -13,6 +13,7 @@ from django.conf import settings from django.db import transaction from django.utils import timezone from apps.channels.models import Channel +from core.models import UserAgent, CoreSettings from asgiref.sync import async_to_sync from channels.layers import get_channel_layer @@ -67,7 +68,22 @@ def fetch_xmltv(source): logger.info(f"Fetching XMLTV data from source: {source.name}") try: - response = requests.get(source.url, timeout=30) + # Get default user agent from settings + default_user_agent_setting = CoreSettings.objects.filter(key='default-user-agent').first() + user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:138.0) Gecko/20100101 Firefox/138.0" # Fallback default + if default_user_agent_setting and default_user_agent_setting.value: + try: + user_agent_obj = UserAgent.objects.filter(id=int(default_user_agent_setting.value)).first() + if user_agent_obj and user_agent_obj.user_agent: + user_agent = user_agent_obj.user_agent + logger.debug(f"Using default user agent: {user_agent}") + except (ValueError, Exception) as e: + logger.warning(f"Error retrieving default user agent, using fallback: {e}") + headers = { + 'User-Agent': user_agent + } + + response = requests.get(source.url, headers=headers, timeout=30) response.raise_for_status() logger.debug("XMLTV data fetched successfully.") @@ -296,10 +312,24 @@ def parse_programs_for_source(epg_source, tvg_id=None): def fetch_schedules_direct(source): logger.info(f"Fetching Schedules Direct data from source: {source.name}") try: + # Get default user agent from settings + default_user_agent_setting = CoreSettings.objects.filter(key='default-user-agent').first() + user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:138.0) Gecko/20100101 Firefox/138.0" # Fallback default + + if default_user_agent_setting and default_user_agent_setting.value: + try: + user_agent_obj = UserAgent.objects.filter(id=int(default_user_agent_setting.value)).first() + if user_agent_obj and user_agent_obj.user_agent: + user_agent = user_agent_obj.user_agent + logger.debug(f"Using default user agent: {user_agent}") + except (ValueError, Exception) as e: + logger.warning(f"Error retrieving default user agent, using fallback: {e}") + api_url = '' headers = { 'Content-Type': 'application/json', 'Authorization': f'Bearer {source.api_key}', + 'User-Agent': user_agent } logger.debug(f"Requesting subscriptions from Schedules Direct using URL: {api_url}") response = requests.get(api_url, headers=headers, timeout=30) diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index beacaaa2..c6393123 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -35,10 +35,15 @@ def fetch_m3u_lines(account, use_cache=False): """Fetch M3U file lines efficiently.""" if account.server_url: if not use_cache or not os.path.exists(file_path): - user_agent = account.get_user_agent() - headers = {"User-Agent": user_agent.user_agent} - logger.info(f"Fetching from URL {account.server_url}") try: + # Try to get account-specific user agent first + user_agent_obj = account.get_user_agent() + user_agent = user_agent_obj.user_agent if user_agent_obj else "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" + + logger.debug(f"Using user agent: {user_agent} for M3U account: {account.name}") + headers = {"User-Agent": user_agent} + logger.info(f"Fetching from URL {account.server_url}") + response = requests.get(account.server_url, headers=headers, stream=True) response.raise_for_status() @@ -74,7 +79,7 @@ def fetch_m3u_lines(account, use_cache=False): send_m3u_update(account.id, "downloading", progress, speed=speed, elapsed_time=elapsed_time, time_remaining=time_remaining) send_m3u_update(account.id, "downloading", 100) - except requests.exceptions.RequestException as e: + except Exception as e: logger.error(f"Error fetching M3U from URL {account.server_url}: {e}") return [] From 90c1c3d2eddce265922d5db31e7e55cc226b5f7b Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 1 May 2025 13:10:49 -0500 Subject: [PATCH 0157/1435] uwsgi config tuning. --- docker/uwsgi.ini | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/docker/uwsgi.ini b/docker/uwsgi.ini index 326f4b5d..726730bf 100644 --- a/docker/uwsgi.ini +++ b/docker/uwsgi.ini @@ -24,11 +24,8 @@ vacuum = true die-on-term = true static-map = /static=/app/static -# Worker management (Optimize for I/O bound tasks) +# Worker management workers = 4 -threads = 4 -enable-threads = true -thread-stacksize=512 # Optimize for streaming http = 0.0.0.0:5656 @@ -39,8 +36,9 @@ http-timeout = 600 # Prevent disconnects from long streams lazy-apps = true # Improve memory efficiency # Async mode (use gevent for high concurrency) -gevent = 100 -async = 100 +gevent = 400 # Each unused greenlet costs ~2-4KB of memory +# Higher values have minimal performance impact when idle, but provide capacity for traffic spikes +# If memory usage becomes an issue, reduce this value # Performance tuning thunder-lock = true From 4cb2cb7b206ac0166493f46b97370c0d65aa3721 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Thu, 1 May 2025 18:49:59 +0000 Subject: [PATCH 0158/1435] Release v0.4.1 --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 25e27d60..171eaa80 100644 --- a/version.py +++ b/version.py @@ -1,5 +1,5 @@ """ Dispatcharr version information. """ -__version__ = '0.4.0' # Follow semantic versioning (MAJOR.MINOR.PATCH) +__version__ = '0.4.1' # Follow semantic versioning (MAJOR.MINOR.PATCH) __timestamp__ = None # Set during CI/CD build process From d26944a7a51528a2db75abdebaec75c90e1f83bf Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 1 May 2025 16:01:08 -0500 Subject: [PATCH 0159/1435] Add stale_stream_days field to M3UAccount model and update related logic - Introduced stale_stream_days field to M3UAccount to specify the retention period for streams. - Updated cleanup_streams task to remove streams not seen within the specified stale_stream_days. - Enhanced M3U form to include stale_stream_days input for user configuration. --- .../0008_m3uaccount_stale_stream_days.py | 18 +++++++++++++++ apps/m3u/models.py | 4 ++++ apps/m3u/serializers.py | 2 +- apps/m3u/tasks.py | 23 +++++++++++++++---- frontend/src/components/forms/M3U.jsx | 12 +++++++++- 5 files changed, 52 insertions(+), 7 deletions(-) create mode 100644 apps/m3u/migrations/0008_m3uaccount_stale_stream_days.py diff --git a/apps/m3u/migrations/0008_m3uaccount_stale_stream_days.py b/apps/m3u/migrations/0008_m3uaccount_stale_stream_days.py new file mode 100644 index 00000000..69a1397d --- /dev/null +++ b/apps/m3u/migrations/0008_m3uaccount_stale_stream_days.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.6 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('m3u', '0007_remove_m3uaccount_uploaded_file_m3uaccount_file_path'), + ] + + operations = [ + migrations.AddField( + model_name='m3uaccount', + name='stale_stream_days', + field=models.PositiveIntegerField(default=7, help_text='Number of days after which a stream will be removed if not seen in the M3U source.'), + ), + ] diff --git a/apps/m3u/models.py b/apps/m3u/models.py index 25a332c6..06c206f6 100644 --- a/apps/m3u/models.py +++ b/apps/m3u/models.py @@ -74,6 +74,10 @@ class M3UAccount(models.Model): refresh_task = models.ForeignKey( PeriodicTask, on_delete=models.SET_NULL, null=True, blank=True ) + stale_stream_days = models.PositiveIntegerField( + default=7, + help_text="Number of days after which a stream will be removed if not seen in the M3U source." + ) def __str__(self): return self.name diff --git a/apps/m3u/serializers.py b/apps/m3u/serializers.py index d79b0117..43015713 100644 --- a/apps/m3u/serializers.py +++ b/apps/m3u/serializers.py @@ -65,7 +65,7 @@ class M3UAccountSerializer(serializers.ModelSerializer): model = M3UAccount fields = [ 'id', 'name', 'server_url', 'file_path', 'server_group', - 'max_streams', 'is_active', 'created_at', 'updated_at', 'filters', 'user_agent', 'profiles', 'locked', + 'max_streams', 'is_active', 'stale_stream_days', 'created_at', 'updated_at', 'filters', 'user_agent', 'profiles', 'locked', 'channel_groups', 'refresh_interval' ] diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index c6393123..20ed1acb 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -312,18 +312,31 @@ def cleanup_streams(account_id): m3u_account__enabled=True, ).values_list('id', flat=True) logger.info(f"Found {len(existing_groups)} active groups") - streams = Stream.objects.filter(m3u_account=account) + # Calculate cutoff date for stale streams + stale_cutoff = timezone.now() - timezone.timedelta(days=account.stale_stream_days) + logger.info(f"Removing streams not seen since {stale_cutoff}") + + # Delete streams that are not in active groups streams_to_delete = Stream.objects.filter( m3u_account=account ).exclude( - channel_group__in=existing_groups # Exclude products having any of the excluded tags + channel_group__in=existing_groups ) - # Delete the filtered products - streams_to_delete.delete() + # Also delete streams that haven't been seen for longer than stale_stream_days + stale_streams = Stream.objects.filter( + m3u_account=account, + last_seen__lt=stale_cutoff + ) - logger.info(f"Cleanup complete") + deleted_count = streams_to_delete.count() + stale_count = stale_streams.count() + + streams_to_delete.delete() + stale_streams.delete() + + logger.info(f"Cleanup complete: {deleted_count} streams removed due to group filter, {stale_count} removed as stale") @shared_task def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): diff --git a/frontend/src/components/forms/M3U.jsx b/frontend/src/components/forms/M3U.jsx index bd9cf3c8..8fee0d9b 100644 --- a/frontend/src/components/forms/M3U.jsx +++ b/frontend/src/components/forms/M3U.jsx @@ -52,6 +52,7 @@ const M3U = ({ playlist = null, isOpen, onClose, playlistCreated = false }) => { is_active: true, max_streams: 0, refresh_interval: 24, + stale_stream_days: 7, }, validate: { @@ -65,10 +66,11 @@ const M3U = ({ playlist = null, isOpen, onClose, playlistCreated = false }) => { if (playlist) { form.setValues({ name: playlist.name, - server_url: playlist.server_url, + server_url: playlist.server_url || '', max_streams: playlist.max_streams, user_agent: playlist.user_agent ? `${playlist.user_agent}` : '0', is_active: playlist.is_active, + stale_stream_days: playlist.stale_stream_days || 7, refresh_interval: playlist.refresh_interval, }); } else { @@ -202,6 +204,14 @@ const M3U = ({ playlist = null, isOpen, onClose, playlistCreated = false }) => { key={form.key('refresh_interval')} /> + + Date: Thu, 1 May 2025 17:30:19 -0400 Subject: [PATCH 0160/1435] updated last_seen of existing streams: --- apps/m3u/tasks.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index 20ed1acb..90992dbc 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -292,8 +292,8 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): Stream.objects.bulk_create(streams_to_create, ignore_conflicts=True) if streams_to_update: Stream.objects.bulk_update(streams_to_update, { key for key in stream_props.keys() if key not in ["m3u_account", "stream_hash"] and key not in hash_keys}) - # if len(existing_streams.keys()) > 0: - # Stream.objects.bulk_update(existing_streams.values(), ["last_seen"]) + if len(existing_streams.keys()) > 0: + Stream.objects.bulk_update(existing_streams.values(), ["last_seen"]) except Exception as e: logger.error(f"Bulk create failed: {str(e)}") From 0e54062c7358cf284c1394c853098b16aa9d8a12 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 1 May 2025 17:42:23 -0500 Subject: [PATCH 0161/1435] Added drivers for hardware acceleration. --- docker/Dockerfile | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 26b54975..92705403 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -76,7 +76,15 @@ RUN apt-get update && \ streamlink \ wget \ gnupg2 \ - lsb-release && \ + lsb-release \ + libva-drm2 \ + libva-x11-2 \ + libva-dev \ + libva-wayland2 \ + vainfo \ + i965-va-driver \ + intel-media-va-driver \ + mesa-va-drivers && \ cp /app/docker/nginx.conf /etc/nginx/sites-enabled/default && \ apt-get clean && \ rm -rf /var/lib/apt/lists/* From 6f1bae819501de06d6084e65a0c607e068554ced Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 2 May 2025 08:23:11 -0500 Subject: [PATCH 0162/1435] Fixes stream preview in stream table. --- frontend/src/components/FloatingVideo.jsx | 64 ++++++++++++++----- .../src/components/tables/StreamsTable.jsx | 3 +- 2 files changed, 51 insertions(+), 16 deletions(-) diff --git a/frontend/src/components/FloatingVideo.jsx b/frontend/src/components/FloatingVideo.jsx index 0be42029..9d8fb09e 100644 --- a/frontend/src/components/FloatingVideo.jsx +++ b/frontend/src/components/FloatingVideo.jsx @@ -18,23 +18,57 @@ export default function FloatingVideo() { return; } - // If the browser supports MSE for live playback, initialize mpegts.js - if (mpegts.getFeatureList().mseLivePlayback) { - const player = mpegts.createPlayer({ - type: 'mpegts', - url: streamUrl, - isLive: true, - // You can include other custom MPEGTS.js config fields here, e.g.: - // cors: true, - // withCredentials: false, - }); + // Check if we have an existing player and clean it up + if (playerRef.current) { + playerRef.current.destroy(); + playerRef.current = null; + } - player.attachMediaElement(videoRef.current); - player.load(); - player.play(); + // Debug log to help diagnose stream issues + console.log("Attempting to play stream:", streamUrl); - // Store player instance so we can clean up later - playerRef.current = player; + try { + // If the browser supports MSE for live playback, initialize mpegts.js + if (mpegts.getFeatureList().mseLivePlayback) { + const player = mpegts.createPlayer({ + type: 'mpegts', // MPEG-TS format + url: streamUrl, + isLive: true, + enableWorker: true, + enableStashBuffer: false, // Try disabling stash buffer for live streams + liveBufferLatencyChasing: true, + liveSync: true, + cors: true, // Enable CORS for cross-domain requests + }); + + player.attachMediaElement(videoRef.current); + + // Add error event handler + player.on(mpegts.Events.ERROR, (errorType, errorDetail) => { + console.error('Player error:', errorType, errorDetail); + // If it's a format issue, show a helpful message + if (errorDetail.includes('Unsupported media type')) { + const message = document.createElement('div'); + message.textContent = "Unsupported stream format. Please try a different stream."; + message.style.position = 'absolute'; + message.style.top = '50%'; + message.style.left = '50%'; + message.style.transform = 'translate(-50%, -50%)'; + message.style.color = 'white'; + message.style.textAlign = 'center'; + message.style.width = '100%'; + videoRef.current.parentNode.appendChild(message); + } + }); + + player.load(); + player.play(); + + // Store player instance so we can clean up later + playerRef.current = player; + } + } catch (error) { + console.error("Error initializing player:", error); } // Cleanup when component unmounts or streamUrl changes diff --git a/frontend/src/components/tables/StreamsTable.jsx b/frontend/src/components/tables/StreamsTable.jsx index 078e24d9..00d09b67 100644 --- a/frontend/src/components/tables/StreamsTable.jsx +++ b/frontend/src/components/tables/StreamsTable.jsx @@ -89,8 +89,9 @@ const StreamRowActions = ({ }, []); const onPreview = useCallback(() => { + console.log('Previewing stream:', row.original.name, 'ID:', row.original.id, 'Hash:', row.original.stream_hash); handleWatchStream(row.original.stream_hash); - }, []); + }, [row.original.id]); // Add proper dependency to ensure correct stream return ( <> From e81b6e3189726b2e2efb6d4dfc36647077d22cf7 Mon Sep 17 00:00:00 2001 From: dekzter Date: Fri, 2 May 2025 09:30:24 -0400 Subject: [PATCH 0163/1435] option to add epg source with xc account --- frontend/src/components/forms/M3U.jsx | 73 +++++++++++++++++++-------- 1 file changed, 51 insertions(+), 22 deletions(-) diff --git a/frontend/src/components/forms/M3U.jsx b/frontend/src/components/forms/M3U.jsx index 28ad382b..a080b401 100644 --- a/frontend/src/components/forms/M3U.jsx +++ b/frontend/src/components/forms/M3U.jsx @@ -26,6 +26,7 @@ import useChannelsStore from '../../store/channels'; import usePlaylistsStore from '../../store/playlists'; import { notifications } from '@mantine/notifications'; import { isNotEmpty, useForm } from '@mantine/form'; +import useEPGsStore from '../../store/epgs'; const M3U = ({ m3uAccount = null, @@ -38,6 +39,7 @@ const M3U = ({ const userAgents = useUserAgentsStore((s) => s.userAgents); const fetchChannelGroups = useChannelsStore((s) => s.fetchChannelGroups); const fetchPlaylists = usePlaylistsStore((s) => s.fetchPlaylists); + const fetchEPGs = useEPGsStore((s) => s.fetchEPGs); const [playlist, setPlaylist] = useState(null); const [file, setFile] = useState(null); @@ -55,7 +57,8 @@ const M3U = ({ is_active: true, max_streams: 0, refresh_interval: 24, - is_xc: false, + account_type: 'STD', + create_epg: false, username: '', password: '', }, @@ -78,7 +81,7 @@ const M3U = ({ user_agent: m3uAccount.user_agent ? `${m3uAccount.user_agent}` : '0', is_active: m3uAccount.is_active, refresh_interval: m3uAccount.refresh_interval, - is_xc: m3uAccount.account_type == 'XC', + account_type: m3uAccount.account_type, username: m3uAccount.username ?? '', password: '', }); @@ -95,28 +98,20 @@ const M3U = ({ }, [m3uAccount]); useEffect(() => { - if (form.values.is_xc) { + if (form.values.account_type == 'XC') { setShowCredentialFields(true); } - }, [form.values.is_xc]); + }, [form.values.account_type]); const onSubmit = async () => { - const { ...values } = form.getValues(); + const { create_epg, ...values } = form.getValues(); - if (values.is_xc && values.password == '') { + if (values.account_type == 'XC' && values.password == '') { // If account XC and no password input, assuming no password change // from previously stored value. delete values.password; } - if (values.is_xc) { - values.account_type = 'XC'; - } else { - values.account_type = 'STD'; - } - - delete values.is_xc; - if (values.user_agent == '0') { values.user_agent = null; } @@ -134,6 +129,17 @@ const M3U = ({ file, }); + if (create_epg) { + API.addEPG({ + name: values.name, + source_type: 'xmltv', + url: `${values.server_url}/xmltv.php?username=${values.username}&password=${values.password}`, + api_key: '', + is_active: true, + refresh_interval: 24, + }); + } + if (values.account_type != 'XC') { notifications.show({ title: 'Fetching M3U Groups', @@ -149,7 +155,7 @@ const M3U = ({ } const updatedPlaylist = await API.getPlaylist(newPlaylist.id); - await Promise.all([fetchChannelGroups(), fetchPlaylists()]); + await Promise.all([fetchChannelGroups(), fetchPlaylists(), fetchEPGs()]); console.log('opening group options'); setPlaylist(updatedPlaylist); setGroupFilterModalOpen(true); @@ -210,21 +216,44 @@ const M3U = ({ {...form.getInputProps('server_url')} key={form.key('server_url')} /> - - {form.getValues().is_xc && ( + {form.getValues().account_type == 'XC' && ( + + Create EPG + + + + )} - {!form.getValues().is_xc && ( + {form.getValues().account_type != 'XC' && ( Date: Fri, 2 May 2025 10:02:35 -0500 Subject: [PATCH 0164/1435] Fixes console error when playing a stream preview due to channelID being null. --- frontend/src/store/channels.jsx | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/frontend/src/store/channels.jsx b/frontend/src/store/channels.jsx index 503cb1cc..6d946f94 100644 --- a/frontend/src/store/channels.jsx +++ b/frontend/src/store/channels.jsx @@ -323,11 +323,17 @@ const useChannelsStore = create((set, get) => ({ acc[ch.channel_id] = ch; if (currentStats.channels) { if (oldChannels[ch.channel_id] === undefined) { - notifications.show({ - title: 'New channel streaming', - message: channels[channelsByUUID[ch.channel_id]].name, - color: 'blue.5', - }); + // Add null checks to prevent accessing properties on undefined + const channelId = channelsByUUID[ch.channel_id]; + const channel = channelId ? channels[channelId] : null; + + if (channel) { + notifications.show({ + title: 'New channel streaming', + message: channel.name, + color: 'blue.5', + }); + } } } ch.clients.map((client) => { From e67f31465606415e1fa361b373670453e718833e Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 2 May 2025 10:22:19 -0500 Subject: [PATCH 0165/1435] Better error handling for web video player. --- frontend/src/components/FloatingVideo.jsx | 106 +++++++++++++++++++--- 1 file changed, 93 insertions(+), 13 deletions(-) diff --git a/frontend/src/components/FloatingVideo.jsx b/frontend/src/components/FloatingVideo.jsx index 9d8fb09e..cbb5e1a8 100644 --- a/frontend/src/components/FloatingVideo.jsx +++ b/frontend/src/components/FloatingVideo.jsx @@ -12,17 +12,55 @@ export default function FloatingVideo() { const videoRef = useRef(null); const playerRef = useRef(null); const videoContainerRef = useRef(null); + const isLoadingRef = useRef(false); + + // Safely destroy the player to prevent errors + const safeDestroyPlayer = () => { + try { + if (playerRef.current) { + // Set a flag to ignore abort errors + isLoadingRef.current = false; + + // First unload the source to stop any in-progress fetches + if (videoRef.current) { + // Remove src attribute and force a load to clear any pending requests + videoRef.current.removeAttribute('src'); + videoRef.current.load(); + } + + // Pause the player first + try { + playerRef.current.pause(); + } catch (e) { + // Ignore pause errors + } + + // Use a try-catch block specifically for the destroy call + try { + playerRef.current.destroy(); + } catch (error) { + // Ignore expected abort errors + if (error.name !== 'AbortError' && !error.message?.includes('aborted')) { + console.log("Error during player destruction:", error.message); + } + } finally { + playerRef.current = null; + } + } + } catch (error) { + console.log("Error during player cleanup:", error); + playerRef.current = null; + } + }; useEffect(() => { if (!isVisible || !streamUrl) { + safeDestroyPlayer(); return; } // Check if we have an existing player and clean it up - if (playerRef.current) { - playerRef.current.destroy(); - playerRef.current = null; - } + safeDestroyPlayer(); // Debug log to help diagnose stream issues console.log("Attempting to play stream:", streamUrl); @@ -30,6 +68,9 @@ export default function FloatingVideo() { try { // If the browser supports MSE for live playback, initialize mpegts.js if (mpegts.getFeatureList().mseLivePlayback) { + // Set loading flag + isLoadingRef.current = true; + const player = mpegts.createPlayer({ type: 'mpegts', // MPEG-TS format url: streamUrl, @@ -39,15 +80,35 @@ export default function FloatingVideo() { liveBufferLatencyChasing: true, liveSync: true, cors: true, // Enable CORS for cross-domain requests + // Add error recovery options + autoCleanupSourceBuffer: true, + autoCleanupMaxBackwardDuration: 10, + autoCleanupMinBackwardDuration: 5, + reuseRedirectedURL: true, }); player.attachMediaElement(videoRef.current); + // Add events to track loading state + player.on(mpegts.Events.LOADING_COMPLETE, () => { + isLoadingRef.current = false; + }); + + player.on(mpegts.Events.METADATA_ARRIVED, () => { + isLoadingRef.current = false; + }); + // Add error event handler player.on(mpegts.Events.ERROR, (errorType, errorDetail) => { - console.error('Player error:', errorType, errorDetail); + isLoadingRef.current = false; + + // Filter out aborted errors + if (errorType !== 'NetworkError' || !errorDetail?.includes('aborted')) { + console.error('Player error:', errorType, errorDetail); + } + // If it's a format issue, show a helpful message - if (errorDetail.includes('Unsupported media type')) { + if (errorDetail?.includes('Unsupported media type')) { const message = document.createElement('div'); message.textContent = "Unsupported stream format. Please try a different stream."; message.style.position = 'absolute'; @@ -57,29 +118,48 @@ export default function FloatingVideo() { message.style.color = 'white'; message.style.textAlign = 'center'; message.style.width = '100%'; - videoRef.current.parentNode.appendChild(message); + if (videoRef.current?.parentNode) { + videoRef.current.parentNode.appendChild(message); + } } }); player.load(); - player.play(); + + // Don't auto-play until we've loaded properly + player.on(mpegts.Events.MEDIA_INFO, () => { + try { + player.play().catch(e => { + console.log("Auto-play prevented:", e); + }); + } catch (e) { + console.log("Error during play:", e); + } + }); // Store player instance so we can clean up later playerRef.current = player; } } catch (error) { + isLoadingRef.current = false; console.error("Error initializing player:", error); } // Cleanup when component unmounts or streamUrl changes return () => { - if (playerRef.current) { - playerRef.current.destroy(); - playerRef.current = null; - } + safeDestroyPlayer(); }; }, [isVisible, streamUrl]); + // Modified hideVideo handler to clean up player first + const handleClose = () => { + safeDestroyPlayer(); + // Small delay before hiding the video component to ensure cleanup is complete + setTimeout(() => { + hideVideo(); + }, 50); + }; + // If the floating video is hidden or no URL is selected, do not render if (!isVisible || !streamUrl) { return null; @@ -103,7 +183,7 @@ export default function FloatingVideo() { > {/* Simple header row with a close button */} - + {/* The ); From 35579e79fbfe5c8cd533598090b532fa6446279a Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 2 May 2025 13:02:34 -0500 Subject: [PATCH 0170/1435] Enhance FloatingVideo component with loading and error handling overlays --- frontend/src/components/FloatingVideo.jsx | 111 +++++++++++++++------- 1 file changed, 79 insertions(+), 32 deletions(-) diff --git a/frontend/src/components/FloatingVideo.jsx b/frontend/src/components/FloatingVideo.jsx index cbb5e1a8..46c191eb 100644 --- a/frontend/src/components/FloatingVideo.jsx +++ b/frontend/src/components/FloatingVideo.jsx @@ -1,9 +1,9 @@ // frontend/src/components/FloatingVideo.js -import React, { useEffect, useRef } from 'react'; +import React, { useEffect, useRef, useState } from 'react'; import Draggable from 'react-draggable'; import useVideoStore from '../store/useVideoStore'; import mpegts from 'mpegts.js'; -import { CloseButton, Flex } from '@mantine/core'; +import { CloseButton, Flex, Loader, Text, Box } from '@mantine/core'; export default function FloatingVideo() { const isVisible = useVideoStore((s) => s.isVisible); @@ -12,14 +12,17 @@ export default function FloatingVideo() { const videoRef = useRef(null); const playerRef = useRef(null); const videoContainerRef = useRef(null); - const isLoadingRef = useRef(false); + // Convert ref to state so we can use it for rendering + const [isLoading, setIsLoading] = useState(false); + const [loadError, setLoadError] = useState(null); // Safely destroy the player to prevent errors const safeDestroyPlayer = () => { try { if (playerRef.current) { - // Set a flag to ignore abort errors - isLoadingRef.current = false; + // Set loading to false when destroying player + setIsLoading(false); + setLoadError(null); // First unload the source to stop any in-progress fetches if (videoRef.current) { @@ -62,6 +65,10 @@ export default function FloatingVideo() { // Check if we have an existing player and clean it up safeDestroyPlayer(); + // Set loading state to true when starting a new stream + setIsLoading(true); + setLoadError(null); + // Debug log to help diagnose stream issues console.log("Attempting to play stream:", streamUrl); @@ -69,7 +76,7 @@ export default function FloatingVideo() { // If the browser supports MSE for live playback, initialize mpegts.js if (mpegts.getFeatureList().mseLivePlayback) { // Set loading flag - isLoadingRef.current = true; + setIsLoading(true); const player = mpegts.createPlayer({ type: 'mpegts', // MPEG-TS format @@ -91,36 +98,21 @@ export default function FloatingVideo() { // Add events to track loading state player.on(mpegts.Events.LOADING_COMPLETE, () => { - isLoadingRef.current = false; + setIsLoading(false); }); player.on(mpegts.Events.METADATA_ARRIVED, () => { - isLoadingRef.current = false; + setIsLoading(false); }); // Add error event handler player.on(mpegts.Events.ERROR, (errorType, errorDetail) => { - isLoadingRef.current = false; + setIsLoading(false); // Filter out aborted errors if (errorType !== 'NetworkError' || !errorDetail?.includes('aborted')) { console.error('Player error:', errorType, errorDetail); - } - - // If it's a format issue, show a helpful message - if (errorDetail?.includes('Unsupported media type')) { - const message = document.createElement('div'); - message.textContent = "Unsupported stream format. Please try a different stream."; - message.style.position = 'absolute'; - message.style.top = '50%'; - message.style.left = '50%'; - message.style.transform = 'translate(-50%, -50%)'; - message.style.color = 'white'; - message.style.textAlign = 'center'; - message.style.width = '100%'; - if (videoRef.current?.parentNode) { - videoRef.current.parentNode.appendChild(message); - } + setLoadError(`Error: ${errorType}${errorDetail ? ` - ${errorDetail}` : ''}`); } }); @@ -128,12 +120,15 @@ export default function FloatingVideo() { // Don't auto-play until we've loaded properly player.on(mpegts.Events.MEDIA_INFO, () => { + setIsLoading(false); try { player.play().catch(e => { console.log("Auto-play prevented:", e); + setLoadError("Auto-play was prevented. Click play to start."); }); } catch (e) { console.log("Error during play:", e); + setLoadError(`Playback error: ${e.message}`); } }); @@ -141,7 +136,8 @@ export default function FloatingVideo() { playerRef.current = player; } } catch (error) { - isLoadingRef.current = false; + setIsLoading(false); + setLoadError(`Initialization error: ${error.message}`); console.error("Error initializing player:", error); } @@ -186,12 +182,63 @@ export default function FloatingVideo() { - {/* The
); From 3de768d9541533a2325d19886bd60175803359cf Mon Sep 17 00:00:00 2001 From: dekzter Date: Fri, 2 May 2025 15:38:26 -0400 Subject: [PATCH 0171/1435] fixed conflicting migratino --- ...009_m3uaccount_account_type_m3uaccount_password_and_more.py} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename apps/m3u/migrations/{0008_m3uaccount_account_type_m3uaccount_password_and_more.py => 0009_m3uaccount_account_type_m3uaccount_password_and_more.py} (90%) diff --git a/apps/m3u/migrations/0008_m3uaccount_account_type_m3uaccount_password_and_more.py b/apps/m3u/migrations/0009_m3uaccount_account_type_m3uaccount_password_and_more.py similarity index 90% rename from apps/m3u/migrations/0008_m3uaccount_account_type_m3uaccount_password_and_more.py rename to apps/m3u/migrations/0009_m3uaccount_account_type_m3uaccount_password_and_more.py index 02d2937f..d57f7ccd 100644 --- a/apps/m3u/migrations/0008_m3uaccount_account_type_m3uaccount_password_and_more.py +++ b/apps/m3u/migrations/0009_m3uaccount_account_type_m3uaccount_password_and_more.py @@ -6,7 +6,7 @@ from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ - ('m3u', '0007_remove_m3uaccount_uploaded_file_m3uaccount_file_path'), + ('m3u', '0008_m3uaccount_stale_stream_days'), ] operations = [ From 2fff015206edb89545e39a7b825595faf7e3e932 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 2 May 2025 17:33:51 -0500 Subject: [PATCH 0172/1435] Enhance refresh_single_m3u_account to track completed tasks and ensure DB transactions are committed before cleanup --- apps/m3u/tasks.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index 90992dbc..7c2dd4a7 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -455,12 +455,15 @@ def refresh_single_m3u_account(account_id): # result = task_group.apply_async() result = task_group.apply_async() + # Wait for all tasks to complete and collect their result IDs + completed_task_ids = set() while completed_batches < total_batches: for async_result in result: - if async_result.ready(): # If the task has completed + if async_result.ready() and async_result.id not in completed_task_ids: # If the task has completed and we haven't counted it task_result = async_result.result # The result of the task logger.debug(f"Task completed with result: {task_result}") completed_batches += 1 + completed_task_ids.add(async_result.id) # Mark this task as processed # Calculate progress progress = int((completed_batches / total_batches) * 100) @@ -477,7 +480,12 @@ def refresh_single_m3u_account(account_id): else: logger.debug(f"Task is still running.") - # Run cleanup + # Ensure all database transactions are committed before cleanup + logger.info(f"All {total_batches} tasks completed, ensuring DB transactions are committed before cleanup") + # Force a simple DB query to ensure connection sync + Stream.objects.filter(id=-1).exists() # This will never find anything but ensures DB sync + + # Now run cleanup cleanup_streams(account_id) send_m3u_update(account_id, "parsing", 100) From 3b4edde90f0847ed7cb024291fe907a733025d6f Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 2 May 2025 21:56:30 -0500 Subject: [PATCH 0173/1435] Fixes wrong edit and stream previews playing when filtering. Include dependencies in useCallback for onEdit and onDelete, and update dependency array in StreamsTable for useTable --- frontend/src/components/tables/StreamsTable.jsx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/frontend/src/components/tables/StreamsTable.jsx b/frontend/src/components/tables/StreamsTable.jsx index 00d09b67..7e83ba0d 100644 --- a/frontend/src/components/tables/StreamsTable.jsx +++ b/frontend/src/components/tables/StreamsTable.jsx @@ -82,11 +82,11 @@ const StreamRowActions = ({ const onEdit = useCallback(() => { editStream(row.original); - }, []); + }, [row.original.id, editStream]); const onDelete = useCallback(() => { deleteStream(row.original.id); - }, []); + }, [row.original.id, deleteStream]); const onPreview = useCallback(() => { console.log('Previewing stream:', row.original.name, 'ID:', row.original.id, 'Hash:', row.original.stream_hash); @@ -543,7 +543,7 @@ const StreamsTable = ({ }) => { ); } }, - [selectedChannelIds, channelSelectionStreams] + [selectedChannelIds, channelSelectionStreams, theme, editStream, deleteStream, handleWatchStream] ); const table = useTable({ From 1a0d065ecab409a11d83a03511a3357607b3ecad Mon Sep 17 00:00:00 2001 From: dekzter Date: Sat, 3 May 2025 08:01:22 -0400 Subject: [PATCH 0174/1435] ui settings with configurable table size, added setting to set / rehash stream hashes --- apps/channels/models.py | 1 - core/api_views.py | 12 +- core/tasks.py | 34 +++ .../src/components/tables/ChannelsTable.jsx | 22 +- .../tables/CustomTable/CustomTable.jsx | 5 +- .../components/tables/StreamProfilesTable.jsx | 28 +- .../src/components/tables/StreamsTable.jsx | 64 +++-- .../src/components/tables/UserAgentsTable.jsx | 31 +- frontend/src/components/tables/table.css | 18 +- frontend/src/pages/Settings.jsx | 269 +++++++++++------- 10 files changed, 303 insertions(+), 181 deletions(-) diff --git a/apps/channels/models.py b/apps/channels/models.py index 56636347..f158c2d0 100644 --- a/apps/channels/models.py +++ b/apps/channels/models.py @@ -1,6 +1,5 @@ from django.db import models from django.core.exceptions import ValidationError -from core.models import StreamProfile from django.conf import settings from core.models import StreamProfile, CoreSettings from core.utils import RedisClient diff --git a/core/api_views.py b/core/api_views.py index 7f3ecf57..77473b5d 100644 --- a/core/api_views.py +++ b/core/api_views.py @@ -3,7 +3,7 @@ from rest_framework import viewsets, status from rest_framework.response import Response from django.shortcuts import get_object_or_404 -from .models import UserAgent, StreamProfile, CoreSettings +from .models import UserAgent, StreamProfile, CoreSettings, STREAM_HASH_KEY from .serializers import UserAgentSerializer, StreamProfileSerializer, CoreSettingsSerializer from rest_framework.permissions import IsAuthenticated from rest_framework.decorators import api_view, permission_classes @@ -11,6 +11,7 @@ from drf_yasg.utils import swagger_auto_schema import socket import requests import os +from core.tasks import rehash_streams class UserAgentViewSet(viewsets.ModelViewSet): """ @@ -34,6 +35,15 @@ class CoreSettingsViewSet(viewsets.ModelViewSet): queryset = CoreSettings.objects.all() serializer_class = CoreSettingsSerializer + def update(self, request, *args, **kwargs): + instance = self.get_object() + response = super().update(request, *args, **kwargs) + if instance.key == STREAM_HASH_KEY: + if instance.value != request.data['value']: + rehash_streams.delay(request.data['value'].split(',')) + + return response + @swagger_auto_schema( method='get', operation_description="Endpoint for environment details", diff --git a/core/tasks.py b/core/tasks.py index 83682a69..64a89c7a 100644 --- a/core/tasks.py +++ b/core/tasks.py @@ -15,6 +15,8 @@ from apps.epg.models import EPGSource from apps.m3u.tasks import refresh_single_m3u_account from apps.epg.tasks import refresh_epg_data from .models import CoreSettings +from apps.channels.models import Stream, ChannelStream +from django.db import transaction logger = logging.getLogger(__name__) @@ -249,3 +251,35 @@ def fetch_channel_stats(): "data": {"success": True, "type": "channel_stats", "stats": json.dumps({'channels': all_channels, 'count': len(all_channels)})} }, ) + +@shared_task +def rehash_streams(keys): + batch_size = 1000 + queryset = Stream.objects.all() + + hash_keys = {} + total_records = queryset.count() + for start in range(0, total_records, batch_size): + with transaction.atomic(): + batch = queryset[start:start + batch_size] + for obj in batch: + stream_hash = Stream.generate_hash_key(obj.name, obj.url, obj.tvg_id, keys) + if stream_hash in hash_keys: + # Handle duplicate keys and remove any without channels + stream_channels = ChannelStream.objects.filter(stream_id=obj.id).count() + if stream_channels == 0: + obj.delete() + continue + + + existing_stream_channels = ChannelStream.objects.filter(stream_id=hash_keys[stream_hash]).count() + if existing_stream_channels == 0: + Stream.objects.filter(id=hash_keys[stream_hash]).delete() + + obj.stream_hash = stream_hash + obj.save(update_fields=['stream_hash']) + hash_keys[stream_hash] = obj.id + + logger.debug(f"Re-hashed {batch_size} streams") + + logger.debug(f"Re-hashing complete") diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index bdca0722..1cf08e40 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -93,6 +93,8 @@ const ChannelRowActions = React.memo( createRecording, getChannelURL, }) => { + const [tableSize, _] = useLocalStorage('table-size', 'default'); + const onEdit = useCallback(() => { editChannel(row.original); }, [row.original]); @@ -109,11 +111,14 @@ const ChannelRowActions = React.memo( createRecording(row.original); }, [row.original]); + const iconSize = + tableSize == 'default' ? 'sm' : tableSize == 'compact' ? 'xs' : 'md'; + return (
- + @@ -218,6 +223,7 @@ const ChannelsTable = ({}) => { // store/settings const env_mode = useSettingsStore((s) => s.environment.env_mode); const showVideo = useVideoStore((s) => s.showVideo); + const [tableSize, _] = useLocalStorage('table-size', 'default'); /** * useMemo @@ -528,7 +534,7 @@ const ChannelsTable = ({}) => { size: 40, cell: ({ getValue }) => ( - {getValue()} + {getValue()} ), }, @@ -543,7 +549,7 @@ const ChannelsTable = ({}) => { textOverflow: 'ellipsis', }} > - {getValue()} + {getValue()} ), }, @@ -561,7 +567,7 @@ const ChannelsTable = ({}) => { textOverflow: 'ellipsis', }} > - {getValue()} + {getValue()} ), }, @@ -586,7 +592,7 @@ const ChannelsTable = ({}) => { }, { id: 'actions', - size: 75, + size: tableSize == 'compact' ? 75 : 100, header: '', cell: ({ row }) => ( { + const [tableSize, _] = useLocalStorage('table-size', 'default'); + return ( { ), mantineTableContainerProps: { style: { - height: 'calc(60vh - 100px)', + // height: 'calc(60vh - 100px)', overflowY: 'auto', }, }, }); return ( - - - - Stream Profiles - - - + { + const [tableSize, _] = useLocalStorage('table-size', 'default'); const channelSelectionStreams = useChannelsTableStore( (state) => state.channels.find((chan) => chan.id === selectedChannelIds[0])?.streams @@ -89,15 +91,25 @@ const StreamRowActions = ({ }, []); const onPreview = useCallback(() => { - console.log('Previewing stream:', row.original.name, 'ID:', row.original.id, 'Hash:', row.original.stream_hash); + console.log( + 'Previewing stream:', + row.original.name, + 'ID:', + row.original.id, + 'Hash:', + row.original.stream_hash + ); handleWatchStream(row.original.stream_hash); }, [row.original.id]); // Add proper dependency to ensure correct stream + const iconSize = + tableSize == 'default' ? 'sm' : tableSize == 'compact' ? 'xs' : 'md'; + return ( <> - + @@ -157,7 +169,7 @@ const StreamRowActions = ({ ); }; -const StreamsTable = ({ }) => { +const StreamsTable = ({}) => { const theme = useMantineTheme(); /** @@ -203,6 +215,7 @@ const StreamsTable = ({ }) => { ); const env_mode = useSettingsStore((s) => s.environment.env_mode); const showVideo = useVideoStore((s) => s.showVideo); + const [tableSize, _] = useLocalStorage('table-size', 'default'); const handleSelectClick = (e) => { e.stopPropagation(); @@ -216,7 +229,7 @@ const StreamsTable = ({ }) => { () => [ { id: 'actions', - size: 60, + size: tableSize == 'compact' ? 60 : 80, }, { id: 'select', @@ -233,7 +246,7 @@ const StreamsTable = ({ }) => { textOverflow: 'ellipsis', }} > - {getValue()} + {getValue()} ), }, @@ -251,7 +264,7 @@ const StreamsTable = ({ }) => { textOverflow: 'ellipsis', }} > - {getValue()} + {getValue()} ), }, @@ -269,7 +282,7 @@ const StreamsTable = ({ }) => { }} > - {getValue()} + {getValue()} ), @@ -609,17 +622,34 @@ const StreamsTable = ({ }) => { diff --git a/frontend/src/components/tables/UserAgentsTable.jsx b/frontend/src/components/tables/UserAgentsTable.jsx index e0c9b504..6d649ba7 100644 --- a/frontend/src/components/tables/UserAgentsTable.jsx +++ b/frontend/src/components/tables/UserAgentsTable.jsx @@ -221,7 +221,7 @@ const UserAgentsTable = () => { ), mantineTableContainerProps: { style: { - height: 'calc(60vh - 100px)', + maxHeight: 300, overflowY: 'auto', // margin: 5, }, @@ -234,34 +234,7 @@ const UserAgentsTable = () => { }); return ( - - - - User-Agents - - - + { const settings = useSettingsStore((s) => s.settings); const userAgents = useUserAgentsStore((s) => s.userAgents); const streamProfiles = useStreamProfilesStore((s) => s.profiles); + // UI / local storage settings + const [tableSize, setTableSize] = useLocalStorage('table-size', 'default'); + const regionChoices = [ { value: 'ad', label: 'AD' }, { value: 'ae', label: 'AE' }, @@ -284,6 +286,7 @@ const SettingsPage = () => { 'default-stream-profile': '', 'preferred-region': '', 'auto-import-mapped-files': true, + 'm3u-hash-key': [], }, validate: { @@ -295,8 +298,9 @@ const SettingsPage = () => { useEffect(() => { if (settings) { - form.setValues( - Object.entries(settings).reduce((acc, [key, value]) => { + console.log(settings); + const formValues = Object.entries(settings).reduce( + (acc, [key, value]) => { // Modify each value based on its own properties switch (value.value) { case 'true': @@ -307,10 +311,23 @@ const SettingsPage = () => { break; } - acc[key] = value.value; + let val = null; + switch (key) { + case 'm3u-hash-key': + val = value.value.split(','); + break; + default: + val = value.value; + break; + } + + acc[key] = val; return acc; - }, {}) + }, + {} ); + console.log(formValues); + form.setValues(formValues); } }, [settings]); @@ -333,98 +350,158 @@ const SettingsPage = () => { } }; + const onUISettingsChange = (name, value) => { + switch (name) { + case 'table-size': + setTableSize(value); + break; + } + }; + return ( - -
- - - Settings - -
- ({ - value: `${option.id}`, - label: option.name, - }))} - /> - onUISettingsChange('table-size', val)} + data={[ + { + value: 'default', + label: 'Default', + }, + { + value: 'compact', + label: 'Compact', + }, + { + value: 'large', + label: 'Large', + }, + ]} /> - + + - - - -
-
-
+ + Stream Settings + +
+ ({ + value: `${option.id}`, + label: option.name, + }))} + /> + { + return { + label: USER_LEVEL_LABELS[value], + value: `${value}`, + }; + })} + value={formik.values.user_level} + onChange={(value) => { + formik.setFieldValue('user_level', value); + }} + error={ + formik.errors.user_level ? formik.touched.user_level : '' + } + /> @@ -667,9 +692,9 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { : '' } size="xs" - step={0.1} // Add step prop to allow decimal inputs - precision={1} // Specify decimal precision - removeTrailingZeros // Optional: remove trailing zeros for cleaner display + step={0.1} // Add step prop to allow decimal inputs + precision={1} // Specify decimal precision + removeTrailingZeros // Optional: remove trailing zeros for cleaner display /> { label="Gracenote StationId" value={formik.values.tvc_guide_stationid} onChange={formik.handleChange} - error={formik.errors.tvc_guide_stationid ? formik.touched.tvc_guide_stationid : ''} + error={ + formik.errors.tvc_guide_stationid + ? formik.touched.tvc_guide_stationid + : '' + } size="xs" /> diff --git a/frontend/src/components/forms/Channels.jsx b/frontend/src/components/forms/Channels.jsx new file mode 100644 index 00000000..93be8012 --- /dev/null +++ b/frontend/src/components/forms/Channels.jsx @@ -0,0 +1,831 @@ +import React, { useState, useEffect, useRef } from 'react'; +import { useFormik } from 'formik'; +import * as Yup from 'yup'; +import useChannelsStore from '../../store/channels'; +import API from '../../api'; +import useStreamProfilesStore from '../../store/streamProfiles'; +import useStreamsStore from '../../store/streams'; +import ChannelGroupForm from './ChannelGroup'; +import usePlaylistsStore from '../../store/playlists'; +import logo from '../../images/logo.png'; +import { + Box, + Button, + Modal, + TextInput, + NativeSelect, + Text, + Group, + ActionIcon, + Center, + Grid, + Flex, + Select, + Divider, + Stack, + useMantineTheme, + Popover, + ScrollArea, + Tooltip, + NumberInput, + Image, + UnstyledButton, +} from '@mantine/core'; +import { ListOrdered, SquarePlus, SquareX, X } from 'lucide-react'; +import useEPGsStore from '../../store/epgs'; +import { Dropzone } from '@mantine/dropzone'; +import { FixedSizeList as List } from 'react-window'; + +const ChannelsForm = ({ channel = null, isOpen, onClose }) => { + const theme = useMantineTheme(); + + const listRef = useRef(null); + const logoListRef = useRef(null); + const groupListRef = useRef(null); + + const channelGroups = useChannelsStore((s) => s.channelGroups); + const logos = useChannelsStore((s) => s.logos); + const fetchLogos = useChannelsStore((s) => s.fetchLogos); + const streams = useStreamsStore((state) => state.streams); + const streamProfiles = useStreamProfilesStore((s) => s.profiles); + const playlists = usePlaylistsStore((s) => s.playlists); + const epgs = useEPGsStore((s) => s.epgs); + const tvgs = useEPGsStore((s) => s.tvgs); + const tvgsById = useEPGsStore((s) => s.tvgsById); + + const [logoPreview, setLogoPreview] = useState(null); + const [channelStreams, setChannelStreams] = useState([]); + const [channelGroupModelOpen, setChannelGroupModalOpen] = useState(false); + const [epgPopoverOpened, setEpgPopoverOpened] = useState(false); + const [logoPopoverOpened, setLogoPopoverOpened] = useState(false); + const [selectedEPG, setSelectedEPG] = useState(''); + const [tvgFilter, setTvgFilter] = useState(''); + const [logoFilter, setLogoFilter] = useState(''); + const [logoOptions, setLogoOptions] = useState([]); + + const [groupPopoverOpened, setGroupPopoverOpened] = useState(false); + const [groupFilter, setGroupFilter] = useState(''); + const groupOptions = Object.values(channelGroups); + + const addStream = (stream) => { + const streamSet = new Set(channelStreams); + streamSet.add(stream); + setChannelStreams(Array.from(streamSet)); + }; + + const removeStream = (stream) => { + const streamSet = new Set(channelStreams); + streamSet.delete(stream); + setChannelStreams(Array.from(streamSet)); + }; + + const handleLogoChange = async (files) => { + if (files.length === 1) { + const retval = await API.uploadLogo(files[0]); + await fetchLogos(); + setLogoPreview(retval.cache_url); + formik.setFieldValue('logo_id', retval.id); + } else { + setLogoPreview(null); + } + }; + + const formik = useFormik({ + initialValues: { + name: '', + channel_number: '', // Change from 0 to empty string for consistency + channel_group_id: + Object.keys(channelGroups).length > 0 + ? Object.keys(channelGroups)[0] + : '', + stream_profile_id: '0', + tvg_id: '', + tvc_guide_stationid: '', + epg_data_id: '', + logo_id: '', + }, + validationSchema: Yup.object({ + name: Yup.string().required('Name is required'), + channel_group_id: Yup.string().required('Channel group is required'), + }), + onSubmit: async (values, { setSubmitting }) => { + let response; + + try { + const formattedValues = { ...values }; + + // Convert empty or "0" stream_profile_id to null for the API + if ( + !formattedValues.stream_profile_id || + formattedValues.stream_profile_id === '0' + ) { + formattedValues.stream_profile_id = null; + } + + // Ensure tvg_id is properly included (no empty strings) + formattedValues.tvg_id = formattedValues.tvg_id || null; + + // Ensure tvc_guide_stationid is properly included (no empty strings) + formattedValues.tvc_guide_stationid = + formattedValues.tvc_guide_stationid || null; + + if (channel) { + // If there's an EPG to set, use our enhanced endpoint + if (values.epg_data_id !== (channel.epg_data_id ?? '')) { + // Use the special endpoint to set EPG and trigger refresh + const epgResponse = await API.setChannelEPG( + channel.id, + values.epg_data_id + ); + + // Remove epg_data_id from values since we've handled it separately + const { epg_data_id, ...otherValues } = formattedValues; + + // Update other channel fields if needed + if (Object.keys(otherValues).length > 0) { + response = await API.updateChannel({ + id: channel.id, + ...otherValues, + streams: channelStreams.map((stream) => stream.id), + }); + } + } else { + // No EPG change, regular update + response = await API.updateChannel({ + id: channel.id, + ...formattedValues, + streams: channelStreams.map((stream) => stream.id), + }); + } + } else { + // New channel creation - use the standard method + response = await API.addChannel({ + ...formattedValues, + streams: channelStreams.map((stream) => stream.id), + }); + } + } catch (error) { + console.error('Error saving channel:', error); + } + + formik.resetForm(); + API.requeryChannels(); + setSubmitting(false); + setTvgFilter(''); + setLogoFilter(''); + onClose(); + }, + }); + + useEffect(() => { + if (channel) { + if (channel.epg_data_id) { + const epgSource = epgs[tvgsById[channel.epg_data_id]?.epg_source]; + setSelectedEPG(epgSource ? `${epgSource.id}` : ''); + } + + formik.setValues({ + name: channel.name || '', + channel_number: + channel.channel_number !== null ? channel.channel_number : '', + channel_group_id: channel.channel_group_id + ? `${channel.channel_group_id}` + : '', + stream_profile_id: channel.stream_profile_id + ? `${channel.stream_profile_id}` + : '0', + tvg_id: channel.tvg_id || '', + tvc_guide_stationid: channel.tvc_guide_stationid || '', + epg_data_id: channel.epg_data_id ?? '', + logo_id: channel.logo_id ? `${channel.logo_id}` : '', + }); + + setChannelStreams(channel.streams || []); + } else { + formik.resetForm(); + setTvgFilter(''); + setLogoFilter(''); + } + }, [channel, tvgsById, channelGroups]); + + useEffect(() => { + setLogoOptions([{ id: '0', name: 'Default' }].concat(Object.values(logos))); + }, [logos]); + + const renderLogoOption = ({ option, checked }) => { + return ( +
+ +
+ ); + }; + + // const activeStreamsTable = useMantineReactTable({ + // data: channelStreams, + // columns: useMemo( + // () => [ + // { + // header: 'Name', + // accessorKey: 'name', + // Cell: ({ cell }) => ( + //
+ // {cell.getValue()} + //
+ // ), + // }, + // { + // header: 'M3U', + // accessorKey: 'group_name', + // Cell: ({ cell }) => ( + //
+ // {cell.getValue()} + //
+ // ), + // }, + // ], + // [] + // ), + // enableSorting: false, + // enableBottomToolbar: false, + // enableTopToolbar: false, + // columnFilterDisplayMode: 'popover', + // enablePagination: false, + // enableRowVirtualization: true, + // enableRowOrdering: true, + // rowVirtualizerOptions: { overscan: 5 }, //optionally customize the row virtualizer + // initialState: { + // density: 'compact', + // }, + // enableRowActions: true, + // positionActionsColumn: 'last', + // renderRowActions: ({ row }) => ( + // <> + // removeStream(row.original)} + // > + // {/* Small icon size */} + // + // + // ), + // mantineTableContainerProps: { + // style: { + // height: '200px', + // }, + // }, + // mantineRowDragHandleProps: ({ table }) => ({ + // onDragEnd: () => { + // const { draggingRow, hoveredRow } = table.getState(); + + // if (hoveredRow && draggingRow) { + // channelStreams.splice( + // hoveredRow.index, + // 0, + // channelStreams.splice(draggingRow.index, 1)[0] + // ); + + // setChannelStreams([...channelStreams]); + // } + // }, + // }), + // }); + + // const availableStreamsTable = useMantineReactTable({ + // data: streams, + // columns: useMemo( + // () => [ + // { + // header: 'Name', + // accessorKey: 'name', + // }, + // { + // header: 'M3U', + // accessorFn: (row) => + // playlists.find((playlist) => playlist.id === row.m3u_account)?.name, + // }, + // ], + // [] + // ), + // enableBottomToolbar: false, + // enableTopToolbar: false, + // columnFilterDisplayMode: 'popover', + // enablePagination: false, + // enableRowVirtualization: true, + // rowVirtualizerOptions: { overscan: 5 }, //optionally customize the row virtualizer + // initialState: { + // density: 'compact', + // }, + // enableRowActions: true, + // renderRowActions: ({ row }) => ( + // <> + // addStream(row.original)} + // > + // {/* Small icon size */} + // + // + // ), + // positionActionsColumn: 'last', + // mantineTableContainerProps: { + // style: { + // height: '200px', + // }, + // }, + // }); + + // Update the handler for when channel group modal is closed + const handleChannelGroupModalClose = (newGroup) => { + setChannelGroupModalOpen(false); + + // If a new group was created and returned, update the form with it + if (newGroup && newGroup.id) { + // Preserve all current form values while updating just the channel_group_id + formik.setValues({ + ...formik.values, + channel_group_id: `${newGroup.id}`, + }); + } + }; + + if (!isOpen) { + return <>; + } + + const filteredTvgs = tvgs + .filter((tvg) => tvg.epg_source == selectedEPG) + .filter( + (tvg) => + tvg.name.toLowerCase().includes(tvgFilter.toLowerCase()) || + tvg.tvg_id.toLowerCase().includes(tvgFilter.toLowerCase()) + ); + + const filteredLogos = logoOptions.filter((logo) => + logo.name.toLowerCase().includes(logoFilter.toLowerCase()) + ); + + const filteredGroups = groupOptions.filter((group) => + group.name.toLowerCase().includes(groupFilter.toLowerCase()) + ); + + return ( + + + Channels + + } + styles={{ content: { '--mantine-color-body': '#27272A' } }} + > + + + + + + + + + setGroupPopoverOpened(true)} + size="xs" + /> + + + e.stopPropagation()}> + + + setGroupFilter(event.currentTarget.value) + } + mb="xs" + size="xs" + /> + + + + + {({ index, style }) => ( + + + { + formik.setFieldValue( + 'channel_group_id', + filteredGroups[index].id + ); + setGroupPopoverOpened(false); + }} + > + + {filteredGroups[index].name} + + + + + )} + + + + + + {/* { + formik.setFieldValue('stream_profile_id', value); // Update Formik's state with the new value + }} + error={ + formik.errors.stream_profile_id + ? formik.touched.stream_profile_id + : '' + } + data={[{ value: '0', label: '(use default)' }].concat( + streamProfiles.map((option) => ({ + value: `${option.id}`, + label: option.name, + })) + )} + size="xs" + /> + + + + + + + + + setLogoPopoverOpened(true)} + size="xs" + /> + + + e.stopPropagation()}> + + + setLogoFilter(event.currentTarget.value) + } + mb="xs" + size="xs" + /> + + + + + {({ index, style }) => ( +
+
+ { + formik.setFieldValue( + 'logo_id', + filteredLogos[index].id + ); + }} + /> +
+
+ )} +
+
+
+
+ + +
+ + + + + OR + + + + + + Upload Logo + console.log('rejected files', files)} + maxSize={5 * 1024 ** 2} + > + + + Drag images here or click to select files + + + + +
+
+
+ + + + + + formik.setFieldValue('channel_number', value) + } + error={ + formik.errors.channel_number + ? formik.touched.channel_number + : '' + } + size="xs" + /> + + + + + + + + + EPG + +
+ } + readOnly + value={ + formik.values.epg_data_id + ? tvgsById[formik.values.epg_data_id].name + : 'Dummy' + } + onClick={() => setEpgPopoverOpened(true)} + size="xs" + rightSection={ + + { + e.stopPropagation(); + formik.setFieldValue('epg_data_id', null); + }} + title="Create new group" + size="small" + variant="transparent" + > + + + + } + /> + + + e.stopPropagation()}> + + { + return { + label: USER_LEVEL_LABELS[value], + value: `${value}`, + }; + })} + {...form.getInputProps('user_level')} + key={form.key('user_level')} + /> + + profile.id != 0) + .map((profile) => ({ + label: profile.name, + value: `${profile.id}`, + }))} + /> + + + + + + + +
+ ); +}; + +export default User; diff --git a/frontend/src/components/tables/ChannelTableStreams.jsx b/frontend/src/components/tables/ChannelTableStreams.jsx index 53542119..097a2ba8 100644 --- a/frontend/src/components/tables/ChannelTableStreams.jsx +++ b/frontend/src/components/tables/ChannelTableStreams.jsx @@ -36,6 +36,8 @@ import { import { useSortable } from '@dnd-kit/sortable'; import { CSS } from '@dnd-kit/utilities'; import { shallow } from 'zustand/shallow'; +import useAuthStore from '../../store/auth'; +import { USER_LEVELS } from '../../constants'; const RowDragHandleCell = ({ rowId }) => { const { attributes, listeners, setNodeRef } = useDraggable({ @@ -120,6 +122,7 @@ const ChannelStreams = ({ channel, isExpanded }) => { shallow ); const playlists = usePlaylistsStore((s) => s.playlists); + const authUser = useAuthStore((s) => s.user); const [data, setData] = useState(channelStreams || []); @@ -168,6 +171,7 @@ const ChannelStreams = ({ channel, isExpanded }) => { removeStream(row.original)} + disabled={authUser.user_level != USER_LEVELS.ADMIN} />
@@ -192,7 +196,11 @@ const ChannelStreams = ({ channel, isExpanded }) => { getCoreRowModel: getCoreRowModel(), }); - function handleDragEnd(event) { + const handleDragEnd = (event) => { + if (authUser.user_level != USER_LEVELS.ADMIN) { + return; + } + const { active, over } = event; if (active && over && active.id !== over.id) { setData((data) => { @@ -211,7 +219,7 @@ const ChannelStreams = ({ channel, isExpanded }) => { return retval; //this is just a splice util }); } - } + }; const sensors = useSensors( useSensor(MouseSensor, {}), diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index cb81e988..77c9d6a5 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -51,6 +51,8 @@ import ChannelsTableOnboarding from './ChannelsTable/ChannelsTableOnboarding'; import ChannelTableHeader from './ChannelsTable/ChannelTableHeader'; import useWarningsStore from '../../store/warnings'; import ConfirmationDialog from '../ConfirmationDialog'; +import useAuthStore from '../../store/auth'; +import { USER_LEVELS } from '../../constants'; const m3uUrlBase = `${window.location.protocol}//${window.location.host}/output/m3u`; const epgUrlBase = `${window.location.protocol}//${window.location.host}/output/epg`; @@ -108,6 +110,8 @@ const ChannelRowActions = React.memo( const channelUuid = row.original.uuid; const [tableSize, _] = useLocalStorage('table-size', 'default'); + const authUser = useAuthStore((s) => s.user); + const onEdit = useCallback(() => { // Use the ID directly to avoid issues with filtered tables console.log(`Editing channel ID: ${channelId}`); @@ -141,6 +145,7 @@ const ChannelRowActions = React.memo( variant="transparent" color={theme.tailwind.yellow[3]} onClick={onEdit} + disabled={authUser.user_level != USER_LEVELS.ADMIN} >
@@ -150,6 +155,7 @@ const ChannelRowActions = React.memo( variant="transparent" color={theme.tailwind.red[6]} onClick={onDelete} + disabled={authUser.user_level != USER_LEVELS.ADMIN} > @@ -181,6 +187,7 @@ const ChannelRowActions = React.memo( { +const ChannelsTable = ({}) => { const theme = useMantineTheme(); /** @@ -596,8 +603,12 @@ const ChannelsTable = ({ }) => { cell: ({ getValue }) => { const value = getValue(); // Format as integer if no decimal component - const formattedValue = value !== null && value !== undefined ? - (value === Math.floor(value) ? Math.floor(value) : value) : ''; + const formattedValue = + value !== null && value !== undefined + ? value === Math.floor(value) + ? Math.floor(value) + : value + : ''; return ( @@ -797,8 +808,8 @@ const ChannelsTable = ({ }) => { return hasStreams ? {} // Default style for channels with streams : { - className: 'no-streams-row', // Add a class instead of background color - }; + className: 'no-streams-row', // Add a class instead of background color + }; }, }); diff --git a/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx b/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx index 47b7a46c..354728bb 100644 --- a/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx +++ b/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx @@ -24,12 +24,16 @@ import { import API from '../../../api'; import { notifications } from '@mantine/notifications'; import useChannelsStore from '../../../store/channels'; +import useAuthStore from '../../../store/auth'; +import { USER_LEVELS } from '../../../constants'; const CreateProfilePopover = React.memo(() => { const [opened, setOpened] = useState(false); const [name, setName] = useState(''); const theme = useMantineTheme(); + const authUser = useAuthStore((s) => s.user); + const setOpen = () => { setName(''); setOpened(!opened); @@ -54,6 +58,7 @@ const CreateProfilePopover = React.memo(() => { variant="transparent" color={theme.tailwind.green[5]} onClick={setOpen} + disabled={authUser.user_level != USER_LEVELS.ADMIN} > @@ -95,6 +100,7 @@ const ChannelTableHeader = ({ const profiles = useChannelsStore((s) => s.profiles); const selectedProfileId = useChannelsStore((s) => s.selectedProfileId); const setSelectedProfileId = useChannelsStore((s) => s.setSelectedProfileId); + const authUser = useAuthStore((s) => s.user); const deleteProfile = async (id) => { await API.deleteChannelProfile(id); @@ -152,6 +158,7 @@ const ChannelTableHeader = ({ e.stopPropagation(); deleteProfile(option.value); }} + disabled={authUser.user_level != USER_LEVELS.ADMIN} > @@ -193,7 +200,10 @@ const ChannelTableHeader = ({ variant="default" size="xs" onClick={deleteChannels} - disabled={selectedTableIds.length == 0} + disabled={ + selectedTableIds.length == 0 || + authUser.user_level != USER_LEVELS.ADMIN + } > Remove @@ -206,7 +216,10 @@ const ChannelTableHeader = ({ variant="default" size="xs" p={5} - disabled={selectedTableIds.length == 0} + disabled={ + selectedTableIds.length == 0 || + authUser.user_level != USER_LEVELS.ADMIN + } > Assign @@ -240,6 +253,7 @@ const ChannelTableHeader = ({ size="xs" onClick={matchEpg} p={5} + disabled={authUser.user_level != USER_LEVELS.ADMIN} > Auto-Match @@ -250,12 +264,15 @@ const ChannelTableHeader = ({ variant="light" size="xs" onClick={() => editChannel()} + disabled={authUser.user_level != USER_LEVELS.ADMIN} p={5} color={theme.tailwind.green[5]} style={{ - borderWidth: '1px', - borderColor: theme.tailwind.green[5], - color: 'white', + ...(authUser.user_level == USER_LEVELS.ADMIN && { + borderWidth: '1px', + borderColor: theme.tailwind.green[5], + color: 'white', + }), }} > Add diff --git a/frontend/src/constants.js b/frontend/src/constants.js new file mode 100644 index 00000000..04c62ecb --- /dev/null +++ b/frontend/src/constants.js @@ -0,0 +1,11 @@ +export const USER_LEVELS = { + STREAMER: 0, + READ_ONLY: 1, + ADMIN: 10, +}; + +export const USER_LEVEL_LABELS = { + [USER_LEVELS.STREAMER]: 'Streamer', + [USER_LEVELS.READ_ONLY]: 'Read Only', + [USER_LEVELS.ADMIN]: 'Admin', +}; diff --git a/frontend/src/pages/Channels-test.jsx b/frontend/src/pages/Channels-test.jsx deleted file mode 100644 index 14e3319c..00000000 --- a/frontend/src/pages/Channels-test.jsx +++ /dev/null @@ -1,15 +0,0 @@ -import React from 'react'; -import { Allotment } from 'allotment'; -import { Box, Container } from '@mantine/core'; -import 'allotment/dist/style.css'; - -const ChannelsPage = () => { - return ( - -
Pane 1
-
Pane 1
-
- ); -}; - -export default ChannelsPage; diff --git a/frontend/src/pages/Channels.jsx b/frontend/src/pages/Channels.jsx index 079e1eaa..a2f3c55b 100644 --- a/frontend/src/pages/Channels.jsx +++ b/frontend/src/pages/Channels.jsx @@ -3,8 +3,20 @@ import ChannelsTable from '../components/tables/ChannelsTable'; import StreamsTable from '../components/tables/StreamsTable'; import { Box } from '@mantine/core'; import { Allotment } from 'allotment'; +import { USER_LEVELS } from '../constants'; +import useAuthStore from '../store/auth'; const ChannelsPage = () => { + const authUser = useAuthStore((s) => s.user); + + if (authUser.user_level <= USER_LEVELS.READ_ONLY) { + return ( + + + + ); + } + return (
{ const settings = useSettingsStore((s) => s.settings); const userAgents = useUserAgentsStore((s) => s.userAgents); const streamProfiles = useStreamProfilesStore((s) => s.profiles); + const authUser = useAuthStore((s) => s.user); // UI / local storage settings const [tableSize, setTableSize] = useLocalStorage('table-size', 'default'); @@ -366,137 +369,178 @@ const SettingsPage = () => { > - - UI Settings - - ({ - value: `${option.id}`, - label: option.name, - }))} - /> - - ({ - label: r.label, - value: `${r.value}`, - }))} - /> - - - - Auto-Import Mapped Files - - - - - onUISettingsChange('table-size', val)} data={[ { - value: 'name', - label: 'Name', + value: 'default', + label: 'Default', }, { - value: 'url', - label: 'URL', + value: 'compact', + label: 'Compact', }, { - value: 'tvg_id', - label: 'TVG-ID', + value: 'large', + label: 'Large', }, ]} - {...form.getInputProps('m3u-hash-key')} - key={form.key('m3u-hash-key')} /> + + , + ].concat( + authUser.user_level == USER_LEVELS.ADMIN + ? [ + + Stream Settings + +
+ ({ + value: `${option.id}`, + label: option.name, + }))} + /> + ({ + value: `${option.id}`, + label: option.name, + })) + )} + size="xs" + /> + + { - return { - label: USER_LEVEL_LABELS[value], - value: `${value}`, - }; - })} - {...form.getInputProps('user_level')} - key={form.key('user_level')} - /> + {showPermissions && ( + + { @@ -143,7 +162,40 @@ const User = ({ user = null, isOpen, onClose }) => { {...form.getInputProps('user_level')} key={form.key('user_level')} /> + )} + + + + + + + + + } + /> + + + {showPermissions && ( { value: `${profile.id}`, }))} /> - - )} + )} + From 8302acd78a6640c79e571fee0082d2beca2bd65d Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 22 May 2025 15:34:20 -0500 Subject: [PATCH 0402/1435] Additional cleanup while processing batches. --- apps/m3u/tasks.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index 39622024..3873af31 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -556,7 +556,7 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): retval = f"Batch processed: {len(streams_to_create)} created, {len(streams_to_update)} updated." # Aggressive garbage collection - del streams_to_create, streams_to_update, stream_hashes, existing_streams + del streams_to_create, streams_to_update, stream_hashes, existing_streams, stream_props, invalid_streams, changed_streams, unchanged_streams from core.utils import cleanup_memory cleanup_memory(log_usage=True, force_collection=True) @@ -1028,6 +1028,7 @@ def refresh_single_m3u_account(account_id): account.save(update_fields=['status']) if account.account_type == M3UAccount.Types.STADNARD: + logger.debug(f"Processing Standard account with groups: {existing_groups}") # Break into batches and process in parallel batches = [extinf_data[i:i + BATCH_SIZE] for i in range(0, len(extinf_data), BATCH_SIZE)] task_group = group(process_m3u_batch.s(account_id, batch, existing_groups, hash_keys) for batch in batches) From 925850a01217e82180fad40f02af4b75f5d4848e Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 22 May 2025 16:51:20 -0500 Subject: [PATCH 0403/1435] Fix change_streams and unchanged_streams possibly not existing when trying to clean up. --- apps/m3u/tasks.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index 3873af31..4fb6dfca 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -226,11 +226,6 @@ def parse_extinf_line(line: str) -> dict: logger.debug(f"EXTINF parsed result: {result}") return result -import re -import logging - -logger = logging.getLogger(__name__) - def _matches_filters(stream_name: str, group_name: str, filters): """Check if a stream or group name matches a precompiled regex filter.""" compiled_filters = [(re.compile(f.regex_pattern, re.IGNORECASE), f.exclude) for f in filters] @@ -437,6 +432,9 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): streams_to_update = [] stream_hashes = {} invalid_streams = [] + # Initialize these variables to prevent UnboundLocalError during cleanup + changed_streams = [] + unchanged_streams = [] # Log hash key configuration logger.debug(f"Using hash keys: {hash_keys}") From f87ab4b07171fa098e0cbb17fd400b52b458cb43 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 22 May 2025 21:52:28 -0500 Subject: [PATCH 0404/1435] Rolled back some earlier memory omptimizations that were causing issues with extremely large m3us. --- apps/m3u/tasks.py | 148 +++++++----------------------------------- core/utils.py | 29 +++++++-- dispatcharr/celery.py | 2 +- 3 files changed, 45 insertions(+), 134 deletions(-) diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index 4fb6dfca..b1b1170d 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -22,11 +22,11 @@ from core.utils import RedisClient, acquire_task_lock, release_task_lock from core.models import CoreSettings, UserAgent from asgiref.sync import async_to_sync from core.xtream_codes import Client as XCClient +from core.utils import send_websocket_update logger = logging.getLogger(__name__) BATCH_SIZE = 1000 -SKIP_EXTS = {} m3u_dir = os.path.join(settings.MEDIA_ROOT, "cached_m3u") def fetch_m3u_lines(account, use_cache=False): @@ -184,48 +184,22 @@ def parse_extinf_line(line: str) -> dict: - 'name': the value from tvg-name (if present) or the display name otherwise. """ if not line.startswith("#EXTINF:"): - logger.debug(f"Not an EXTINF line: {line[:50]}...") return None - content = line[len("#EXTINF:"):].strip() - logger.debug(f"Parsing EXTINF content: {content[:100]}...") - # Split on the first comma that is not inside quotes. parts = re.split(r',(?=(?:[^"]*"[^"]*")*[^"]*$)', content, maxsplit=1) if len(parts) != 2: - logger.warning(f"Invalid EXTINF format - couldn't split at comma: {content[:100]}...") return None - attributes_part, display_name = parts[0], parts[1].strip() - - # Debug raw attribute parsing - logger.debug(f"Attribute part: {attributes_part[:100]}...") - logger.debug(f"Display name: {display_name[:100]}...") - - # Extract attributes with more detailed logging - try: - attr_matches = re.findall(r'([^\s]+)=["\']([^"\']+)["\']', attributes_part) - if not attr_matches: - logger.warning(f"No attributes found in: {attributes_part[:100]}...") - - attrs = dict(attr_matches) - logger.debug(f"Extracted attributes: {attrs}") - except Exception as e: - logger.error(f"Error parsing attributes: {str(e)}", exc_info=True) - attrs = {} - + attrs = dict(re.findall(r'([^\s]+)=["\']([^"\']+)["\']', attributes_part)) # Use tvg-name attribute if available; otherwise, use the display name. name = attrs.get('tvg-name', display_name) - - result = { + return { 'attributes': attrs, 'display_name': display_name, 'name': name } - logger.debug(f"EXTINF parsed result: {result}") - return result - def _matches_filters(stream_name: str, group_name: str, filters): """Check if a stream or group name matches a precompiled regex filter.""" compiled_filters = [(re.compile(f.regex_pattern, re.IGNORECASE), f.exclude) for f in filters] @@ -266,7 +240,7 @@ def process_groups(account, groups): groups_to_create = [] for group_name, custom_props in groups.items(): logger.debug(f"Handling group: {group_name}") - if (group_name not in existing_groups) and (group_name not in SKIP_EXTS): + if (group_name not in existing_groups): groups_to_create.append(ChannelGroup( name=group_name, )) @@ -426,51 +400,24 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): """Processes a batch of M3U streams using bulk operations.""" account = M3UAccount.objects.get(id=account_id) - logger.debug(f"Processing batch of {len(batch)} streams for account {account_id}") - streams_to_create = [] streams_to_update = [] stream_hashes = {} - invalid_streams = [] - # Initialize these variables to prevent UnboundLocalError during cleanup - changed_streams = [] - unchanged_streams = [] - - # Log hash key configuration - logger.debug(f"Using hash keys: {hash_keys}") # compiled_filters = [(f.filter_type, re.compile(f.regex_pattern, re.IGNORECASE)) for f in filters] logger.debug(f"Processing batch of {len(batch)}") - for stream_index, stream_info in enumerate(batch): + for stream_info in batch: try: - # Extract basic stream info with better error handling - try: - name = stream_info["name"] - url = stream_info["url"] - attrs = stream_info["attributes"] - tvg_id = attrs.get("tvg-id", "") - tvg_logo = attrs.get("tvg-logo", "") - group_title = attrs.get("group-title", "Default Group") - except KeyError as e: - logger.warning(f"Missing required field in stream {stream_index}: {e}") - logger.debug(f"Stream data: {stream_info}") - invalid_streams.append((stream_index, f"Missing field: {e}")) - continue + name, url = stream_info["name"], stream_info["url"] + tvg_id, tvg_logo = stream_info["attributes"].get("tvg-id", ""), stream_info["attributes"].get("tvg-logo", "") + group_title = stream_info["attributes"].get("group-title", "Default Group") # Filter out disabled groups for this account if group_title not in groups: - logger.debug(f"Skipping stream in disabled group: '{group_title}', name: '{name}'") - continue - - # Generate hash key with error handling - try: - stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys) - logger.debug(f"Generated hash {stream_hash} for stream '{name}'") - except Exception as e: - logger.error(f"Error generating hash for stream '{name}': {str(e)}") - invalid_streams.append((stream_index, f"Hash generation error: {str(e)}")) + logger.debug(f"Skipping stream in disabled group: {group_title}") continue + stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys) stream_props = { "name": name, "url": url, @@ -479,25 +426,14 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): "m3u_account": account, "channel_group_id": int(groups.get(group_title)), "stream_hash": stream_hash, - "custom_properties": json.dumps(attrs), + "custom_properties": json.dumps(stream_info["attributes"]), } if stream_hash not in stream_hashes: stream_hashes[stream_hash] = stream_props except Exception as e: - logger.error(f"Failed to process stream at index {stream_index}: {e}", exc_info=True) - if "name" in stream_info: - logger.error(f"Stream name: {stream_info['name']}") - logger.error(f"Stream data: {json.dumps(stream_info)[:500]}") - invalid_streams.append((stream_index, f"Processing error: {str(e)}")) - - # Log invalid stream summary - if invalid_streams: - logger.warning(f"Found {len(invalid_streams)} invalid streams in batch") - for i, (idx, error) in enumerate(invalid_streams[:5]): # Log first 5 - logger.warning(f"Invalid stream #{i+1} at index {idx}: {error}") - if len(invalid_streams) > 5: - logger.warning(f"... and {len(invalid_streams) - 5} more invalid streams") + logger.error(f"Failed to process stream {name}: {e}") + logger.error(json.dumps(stream_info)) existing_streams = {s.stream_hash: s for s in Stream.objects.filter(stream_hash__in=stream_hashes.keys())} @@ -554,9 +490,9 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): retval = f"Batch processed: {len(streams_to_create)} created, {len(streams_to_update)} updated." # Aggressive garbage collection - del streams_to_create, streams_to_update, stream_hashes, existing_streams, stream_props, invalid_streams, changed_streams, unchanged_streams - from core.utils import cleanup_memory - cleanup_memory(log_usage=True, force_collection=True) + #del streams_to_create, streams_to_update, stream_hashes, existing_streams + #from core.utils import cleanup_memory + #cleanup_memory(log_usage=True, force_collection=True) return retval @@ -1193,8 +1129,6 @@ def refresh_single_m3u_account(account_id): return f"Dispatched jobs complete." -from core.utils import send_websocket_update - def send_m3u_update(account_id, action, progress, **kwargs): # Start with the base data dictionary data = { @@ -1215,51 +1149,13 @@ def send_m3u_update(account_id, action, progress, **kwargs): except: pass # If account can't be retrieved, continue without these fields - # Add the additional key-value pairs from kwargs with size limiting - for key, value in kwargs.items(): - # Handle large arrays - limit to summary data - if isinstance(value, (list, tuple)) and len(value) > 100: - data[key] = f"{len(value)} items (truncated for performance)" - # Handle very large integers - use abbreviations for streams - elif key in ['streams_processed', 'streams_created', 'streams_updated', 'streams_deleted'] and isinstance(value, int) and value > 10000: - # Format as "226K" instead of 226154 - data[key] = f"{value//1000}K" if value >= 1000 else value - # Handle other large values that might be serialized to JSON - elif isinstance(value, (dict, object)) and key not in ['status', 'action', 'progress']: - try: - # Use a safer approach for complex objects - if hasattr(value, '__dict__'): - # Just store the class name and id if available - data[key] = f"{value.__class__.__name__}" - if hasattr(value, 'id'): - data[key] += f"(id={value.id})" - else: - # For dictionaries, limit based on size - data[key] = value - except: - # If we can't serialize, skip this value - data[key] = f"[Object of type {type(value).__name__}]" - else: - # Default case - add the value as is - data[key] = value + # Add the additional key-value pairs from kwargs + data.update(kwargs) - # Protect against message size limits in WebSocket protocol - # Most implementations limit to ~1MB, we'll be conservative - try: - # Use the standardized function with memory management - # Enable garbage collection for certain operations - collect_garbage = action == "parsing" and progress % 25 == 0 - - # Add extra garbage collection for large stream operations - if any(key in kwargs for key in ['streams_processed', 'streams_created', 'streams_updated']) and \ - any(isinstance(kwargs.get(key), int) and kwargs.get(key, 0) > 10000 - for key in ['streams_processed', 'streams_created', 'streams_updated']): - collect_garbage = True - - send_websocket_update('updates', 'update', data, collect_garbage=collect_garbage) - except Exception as e: - # Log the error but don't crash the process - logger.warning(f"Error sending WebSocket update: {e}") + # Use the standardized function with memory management + # Enable garbage collection for certain operations + collect_garbage = action == "parsing" and progress % 25 == 0 + send_websocket_update('updates', 'update', data, collect_garbage=collect_garbage) # Explicitly clear data reference to help garbage collection data = None diff --git a/core/utils.py b/core/utils.py index fcff03e5..039b0695 100644 --- a/core/utils.py +++ b/core/utils.py @@ -59,9 +59,16 @@ class RedisClient: client.config_set('save', '') # Disable RDB snapshots client.config_set('appendonly', 'no') # Disable AOF logging - # Set optimal memory settings - client.config_set('maxmemory-policy', 'allkeys-lru') # Use LRU eviction - client.config_set('maxmemory', '256mb') # Set reasonable memory limit + # Set optimal memory settings with environment variable support + # Get max memory from environment or use a larger default (512MB instead of 256MB) + #max_memory = os.environ.get('REDIS_MAX_MEMORY', '512mb') + #eviction_policy = os.environ.get('REDIS_EVICTION_POLICY', 'allkeys-lru') + + # Apply memory settings + #client.config_set('maxmemory-policy', eviction_policy) + #client.config_set('maxmemory', max_memory) + + #logger.info(f"Redis configured with maxmemory={max_memory}, policy={eviction_policy}") # Disable protected mode when in debug mode if os.environ.get('DISPATCHARR_DEBUG', '').lower() == 'true': @@ -69,10 +76,18 @@ class RedisClient: logger.warning("Redis protected mode disabled for debug environment") logger.trace("Redis persistence disabled for better performance") - except redis.exceptions.ResponseError: - # This might fail if Redis is configured to prohibit CONFIG command - # or if running in protected mode - that's okay - logger.error("Could not modify Redis persistence settings (may be restricted)") + except redis.exceptions.ResponseError as e: + # Improve error handling for Redis configuration errors + if "OOM" in str(e): + logger.error(f"Redis OOM during configuration: {e}") + # Try to increase maxmemory as an emergency measure + try: + client.config_set('maxmemory', '768mb') + logger.warning("Applied emergency Redis memory increase to 768MB") + except: + pass + else: + logger.error(f"Redis configuration error: {e}") logger.info(f"Connected to Redis at {redis_host}:{redis_port}/{redis_db}") diff --git a/dispatcharr/celery.py b/dispatcharr/celery.py index 855acacd..8856d330 100644 --- a/dispatcharr/celery.py +++ b/dispatcharr/celery.py @@ -50,7 +50,7 @@ app.conf.update( ) # Add memory cleanup after task completion -@task_postrun.connect # Use the imported signal +#@task_postrun.connect # Use the imported signal def cleanup_task_memory(**kwargs): """Clean up memory after each task completes""" # Get task name from kwargs From 0b0373f4ee3a555534959c2b6dda08d9cb842b29 Mon Sep 17 00:00:00 2001 From: dekzter Date: Fri, 23 May 2025 08:21:31 -0400 Subject: [PATCH 0405/1435] allow 'all' for streamer class by relating no profiles --- apps/output/views.py | 46 +++++++++++++++++--------- apps/proxy/ts_proxy/views.py | 6 ++-- frontend/src/components/forms/User.jsx | 34 ++++++++++++++----- 3 files changed, 60 insertions(+), 26 deletions(-) diff --git a/apps/output/views.py b/apps/output/views.py index e666043f..f95e4861 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -22,13 +22,17 @@ def generate_m3u(request, profile_name=None, user=None): """ if user is not None: if user.user_level == 0: - channel_profiles = user.channel_profiles.all() filters = { - "channelprofilemembership__channel_profile__in": channel_profiles, "channelprofilemembership__enabled": True, "user_level__lte": user.user_level, } + if user.channel_profiles.count() != 0: + channel_profiles = user.channel_profiles.all() + filters["channelprofilemembership__channel_profile__in"] = ( + channel_profiles + ) + channels = Channel.objects.filter(**filters).order_by("channel_number") else: channels = Channel.objects.filter(user_level__lte=user.user_level).order_by( @@ -207,13 +211,17 @@ def generate_epg(request, profile_name=None, user=None): if user is not None: if user.user_level == 0: - channel_profiles = user.channel_profiles.all() filters = { - "channelprofilemembership__channel_profile__in": channel_profiles, "channelprofilemembership__enabled": True, "user_level__lte": user.user_level, } + if user.channel_profiles.count() != 0: + channel_profiles = user.channel_profiles.all() + filters["channelprofilemembership__channel_profile__in"] = ( + channel_profiles + ) + channels = Channel.objects.filter(**filters).order_by("channel_number") else: channels = Channel.objects.filter(user_level__lte=user.user_level).order_by( @@ -490,7 +498,7 @@ def xc_get(request): raise Http404() if not action: - return generate_m3u(request, user) + return generate_m3u(request, None, user) def xc_xmltv(request): @@ -506,15 +514,19 @@ def xc_get_live_categories(user): response = [] if user.user_level == 0: - # Only get data from active profile - channel_profiles = user.channel_profiles.all() - print(channel_profiles) + filters = { + "channels__channelprofilemembership__enabled": True, + "channels__user_level": 0, + } - channel_groups = ChannelGroup.objects.filter( - channels__channelprofilemembership__channel_profile__in=channel_profiles, - channels__channelprofilemembership__enabled=True, - channels__user_level=0, - ).distinct() + if user.channel_profiles.count() != 0: + # Only get data from active profile + channel_profiles = user.channel_profiles.all() + filters["channels__channelprofilemembership__channel_profile__in"] = ( + channel_profiles + ) + + channel_groups = ChannelGroup.objects.filter(**filters).distinct() else: channel_groups = ChannelGroup.objects.filter( channels__isnull=False, channels__user_level__lte=user.user_level @@ -536,14 +548,16 @@ def xc_get_live_streams(request, user, category_id=None): streams = [] if user.user_level == 0: - # Only get data from active profile - channel_profiles = user.channel_profiles.all() filters = { - "channelprofilemembership__channel_profile__in": channel_profiles, "channelprofilemembership__enabled": True, "user_level__lte": user.user_level, } + if user.channel_profiles.count() > 0: + # Only get data from active profile + channel_profiles = user.channel_profiles.all() + filters["channelprofilemembership__channel_profile__in"] = channel_profiles + if category_id is not None: filters["channel_group__id"] = category_id diff --git a/apps/proxy/ts_proxy/views.py b/apps/proxy/ts_proxy/views.py index ce363056..7949c33b 100644 --- a/apps/proxy/ts_proxy/views.py +++ b/apps/proxy/ts_proxy/views.py @@ -461,14 +461,16 @@ def stream_xc(request, username, password, channel_id): return Response({"error": "Invalid credentials"}, status=401) if user.user_level < 10: - channel_profiles = user.channel_profiles.all() filters = { "id": channel_id, - "channelprofilemembership__channel_profile__in": channel_profiles, "channelprofilemembership__enabled": True, "user_level__lte": user.user_level, } + if user.channel_profiles.count() > 0: + channel_profiles = user.channel_profiles.all() + filters["channelprofilemembership__channel_profile__in"] = channel_profiles + channel = get_object_or_404(Channel, **filters) else: channel = get_object_or_404(Channel, id=channel_id) diff --git a/frontend/src/components/forms/User.jsx b/frontend/src/components/forms/User.jsx index b99949be..3f087d0b 100644 --- a/frontend/src/components/forms/User.jsx +++ b/frontend/src/components/forms/User.jsx @@ -34,6 +34,7 @@ const User = ({ user = null, isOpen, onClose }) => { const authUser = useAuthStore((s) => s.user); const [enableXC, setEnableXC] = useState(false); + const [selectedProfiles, setSelectedProfiles] = useState(new Set()); const form = useForm({ mode: 'uncontrolled', @@ -68,21 +69,38 @@ const User = ({ user = null, isOpen, onClose }) => { }), }); + const onChannelProfilesChange = (values) => { + let newValues = new Set(values); + if (selectedProfiles.has('0')) { + newValues.delete('0'); + } else if (newValues.has('0')) { + newValues = new Set(['0']); + } + + setSelectedProfiles(newValues); + + form.setFieldValue('channel_profiles', [...newValues]); + }; + const onSubmit = async () => { const values = form.getValues(); const { xc_password, ...customProps } = JSON.parse( - user.custom_properties || '{}' + user?.custom_properties || '{}' ); if (values.xc_password) { customProps.xc_password = values.xc_password; } - delete values.xc_password; values.custom_properties = JSON.stringify(customProps); + // If 'All' is included, clear this and we assume access to all channels + if (values.channel_profiles.includes('0')) { + values.channel_profiles = []; + } + if (!user) { await API.createUser(values); } else { @@ -179,6 +197,7 @@ const User = ({ user = null, isOpen, onClose }) => { label="XC Password" description="Auto-generated - clear to disable XC API" {...form.getInputProps('xc_password')} + onChange={onChannelProfilesChange} key={form.key('xc_password')} style={{ flex: 1 }} rightSectionWidth={30} @@ -200,12 +219,11 @@ const User = ({ user = null, isOpen, onClose }) => { label="Channel Profiles" {...form.getInputProps('channel_profiles')} key={form.key('channel_profiles')} - data={Object.values(profiles) - .filter((profile) => profile.id != 0) - .map((profile) => ({ - label: profile.name, - value: `${profile.id}`, - }))} + onChange={onChannelProfilesChange} + data={Object.values(profiles).map((profile) => ({ + label: profile.name, + value: `${profile.id}`, + }))} /> )} From 9daa764fbb8d34504261a134cbe50a9708e493de Mon Sep 17 00:00:00 2001 From: dekzter Date: Fri, 23 May 2025 13:40:11 -0400 Subject: [PATCH 0406/1435] completely removed mantine-react-table, added empty / dummy output for VOD endpoints --- apps/output/views.py | 5 + frontend/package-lock.json | 364 +++++------ frontend/package.json | 17 +- frontend/src/App.jsx | 1 - .../src/components/ConfirmationDialog.jsx | 96 +-- frontend/src/components/forms/Channel.jsx | 141 ----- frontend/src/components/forms/Channels.jsx | 128 ---- frontend/src/components/forms/User.jsx | 17 +- .../tables/CustomTable/CustomTable.jsx | 3 + .../tables/CustomTable/CustomTableBody.jsx | 9 +- .../tables/CustomTable/CustomTableHeader.jsx | 2 + frontend/src/components/tables/EPGsTable.jsx | 421 ++++++++----- frontend/src/components/tables/M3UsTable.jsx | 563 ++++++++++++------ .../components/tables/StreamProfilesTable.jsx | 227 ++++--- .../src/components/tables/UserAgentsTable.jsx | 209 +++---- frontend/src/pages/Stats.jsx | 182 ++++-- frontend/src/store/auth.jsx | 2 + 17 files changed, 1224 insertions(+), 1163 deletions(-) diff --git a/apps/output/views.py b/apps/output/views.py index f95e4861..f6ee215d 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -489,6 +489,11 @@ def xc_player_api(request): if action == "get_live_streams": return xc_get_live_streams(request, user, request.GET.get("category_id")) + if action == "get_vod_categories": + return JsonResponse([], safe=False) + if action == "get_vod_streams": + return JsonResponse([], safe=False) + def xc_get(request): action = request.GET.get("action") diff --git a/frontend/package-lock.json b/frontend/package-lock.json index d8da7f76..291a265a 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -12,13 +12,13 @@ "@dnd-kit/modifiers": "^9.0.0", "@dnd-kit/sortable": "^10.0.0", "@dnd-kit/utilities": "^3.2.2", - "@mantine/charts": "^7.17.2", - "@mantine/core": "^7.17.2", - "@mantine/dates": "^7.17.2", - "@mantine/dropzone": "^7.17.2", - "@mantine/form": "^7.17.3", - "@mantine/hooks": "^7.17.2", - "@mantine/notifications": "^7.17.2", + "@mantine/charts": "~8.0.1", + "@mantine/core": "~8.0.1", + "@mantine/dates": "~8.0.1", + "@mantine/dropzone": "~8.0.1", + "@mantine/form": "~8.0.1", + "@mantine/hooks": "~8.0.1", + "@mantine/notifications": "~8.0.1", "@tabler/icons-react": "^3.31.0", "@tanstack/react-table": "^8.21.2", "allotment": "^1.20.3", @@ -27,8 +27,7 @@ "dayjs": "^1.11.13", "formik": "^2.4.6", "hls.js": "^1.5.20", - "lucide-react": "^0.479.0", - "mantine-react-table": "^2.0.0-beta.9", + "lucide-react": "^0.511.0", "mpegts.js": "^1.8.0", "prettier": "^3.5.3", "react": "^19.0.0", @@ -753,72 +752,72 @@ "license": "Apache-2.0" }, "node_modules/@mantine/charts": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@mantine/charts/-/charts-7.17.2.tgz", - "integrity": "sha512-ckB23pIqRjzysUz2EiWZD9AVyf7t0r7o7zfJbl01nzOezFgYq5RGeRoxvpcsfBC+YoSbB/43rjNcXtYhtA7QzA==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@mantine/charts/-/charts-8.0.1.tgz", + "integrity": "sha512-yntk4siXpQGSj83tDwftJw6fHTOBS6c/VWinjvTW29ptEdjBCxbKFfyyDc9UGVVuO7ovbdtpfCZBpuN2I7HPCA==", "license": "MIT", "peerDependencies": { - "@mantine/core": "7.17.2", - "@mantine/hooks": "7.17.2", + "@mantine/core": "8.0.1", + "@mantine/hooks": "8.0.1", "react": "^18.x || ^19.x", "react-dom": "^18.x || ^19.x", "recharts": "^2.13.3" } }, "node_modules/@mantine/core": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@mantine/core/-/core-7.17.2.tgz", - "integrity": "sha512-R6MYhitJ0JEgrhadd31Nw9FhRaQwDHjXUs5YIlitKH/fTOz9gKSxKjzmNng3bEBQCcbEDOkZj3FRcBgTUh/F0Q==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@mantine/core/-/core-8.0.1.tgz", + "integrity": "sha512-4ezaxKjChSPtawamQ3KrJq+x506uTouXlL0Z5fP+t105KnyxMrAJUENhbh2ivD4pq9Zh1BFiD9IWzyu3IXFR8w==", "license": "MIT", "dependencies": { "@floating-ui/react": "^0.26.28", "clsx": "^2.1.1", "react-number-format": "^5.4.3", "react-remove-scroll": "^2.6.2", - "react-textarea-autosize": "8.5.6", + "react-textarea-autosize": "8.5.9", "type-fest": "^4.27.0" }, "peerDependencies": { - "@mantine/hooks": "7.17.2", + "@mantine/hooks": "8.0.1", "react": "^18.x || ^19.x", "react-dom": "^18.x || ^19.x" } }, "node_modules/@mantine/dates": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@mantine/dates/-/dates-7.17.2.tgz", - "integrity": "sha512-7bB992j8f+uEi280jab0/8i5yfsN/3oSrMDFwatZ+7XSDUwiP0YFib/FVX0pNSSqdFpbXhUmsZEECX71QtHw+Q==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@mantine/dates/-/dates-8.0.1.tgz", + "integrity": "sha512-YCmV5jiGE9Ts2uhNS217IA1Hd5kAa8oaEtfnU0bS1sL36zKEf2s6elmzY718XdF8tFil0jJWAj0jiCrA3/udMg==", "license": "MIT", "dependencies": { "clsx": "^2.1.1" }, "peerDependencies": { - "@mantine/core": "7.17.2", - "@mantine/hooks": "7.17.2", + "@mantine/core": "8.0.1", + "@mantine/hooks": "8.0.1", "dayjs": ">=1.0.0", "react": "^18.x || ^19.x", "react-dom": "^18.x || ^19.x" } }, "node_modules/@mantine/dropzone": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@mantine/dropzone/-/dropzone-7.17.2.tgz", - "integrity": "sha512-NMQ1SDmnW0sf3GO6p1r/VIcg/xWqlRmfnWCr00/bGRbBEGbyaUwL3LSn+KYBJdY+3/jNGvGa+xflWDvnby5tzw==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@mantine/dropzone/-/dropzone-8.0.1.tgz", + "integrity": "sha512-8PH5yrtA/ebCIwjs0m4J9qOvEyS/P4XmNlHrw0E389/qq64Ol7+/ZH7Xtiq64IaY8kvsMW1XHaV0c+bdYrijiA==", "license": "MIT", "dependencies": { - "react-dropzone-esm": "15.2.0" + "react-dropzone": "14.3.8" }, "peerDependencies": { - "@mantine/core": "7.17.2", - "@mantine/hooks": "7.17.2", + "@mantine/core": "8.0.1", + "@mantine/hooks": "8.0.1", "react": "^18.x || ^19.x", "react-dom": "^18.x || ^19.x" } }, "node_modules/@mantine/form": { - "version": "7.17.3", - "resolved": "https://registry.npmjs.org/@mantine/form/-/form-7.17.3.tgz", - "integrity": "sha512-ktERldD8f9lrjjz6wIbwMnNbAZq8XEWPx4K5WuFyjXaK0PI8D+gsXIGKMtA5rVrAUFHCWCdbK3yLgtjJNki8ew==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@mantine/form/-/form-8.0.1.tgz", + "integrity": "sha512-lQ94gn/9p60C+tKEW7psQ1tZHod58Q0bXLbRDadRKMwnqBb2WFoIuaQWPDo7ox+PqyOv28dtflgS+Lm95EbBhg==", "license": "MIT", "dependencies": { "fast-deep-equal": "^3.1.3", @@ -829,34 +828,34 @@ } }, "node_modules/@mantine/hooks": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@mantine/hooks/-/hooks-7.17.2.tgz", - "integrity": "sha512-tbErVcGZu0E4dSmE6N0k6Tv1y9R3SQmmQgwqorcc+guEgKMdamc36lucZGlJnSGUmGj+WLUgELkEQ0asdfYBDA==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@mantine/hooks/-/hooks-8.0.1.tgz", + "integrity": "sha512-GvLdM4Ro3QcDyIgqrdXsUZmeeKye2TNL/k3mEr9JhM5KacHQjr83JPp0u9eLobn7kiyBqpLTYmVYAbmjJdCxHw==", "license": "MIT", "peerDependencies": { "react": "^18.x || ^19.x" } }, "node_modules/@mantine/notifications": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@mantine/notifications/-/notifications-7.17.2.tgz", - "integrity": "sha512-vg0L8cmihz0ODg4WJ9MAyK06WPt/6g67ksIUFxd4F8RfdJbIMLTsNG9yWoSfuhtXenUg717KaA917IWLjDSaqw==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@mantine/notifications/-/notifications-8.0.1.tgz", + "integrity": "sha512-7TX9OyAmUcok3qffnheS7gTAMKDczETy8XEYDr38Sy/XIoXLjM+3CwO+a/vfd1F9oW2LvkahkHT0Ey+vBOVd0Q==", "license": "MIT", "dependencies": { - "@mantine/store": "7.17.2", + "@mantine/store": "8.0.1", "react-transition-group": "4.4.5" }, "peerDependencies": { - "@mantine/core": "7.17.2", - "@mantine/hooks": "7.17.2", + "@mantine/core": "8.0.1", + "@mantine/hooks": "8.0.1", "react": "^18.x || ^19.x", "react-dom": "^18.x || ^19.x" } }, "node_modules/@mantine/store": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@mantine/store/-/store-7.17.2.tgz", - "integrity": "sha512-UoMUYQK/z58hMueCkpDIXc49gPgrVO/zcpb0k+B7MFU51EIUiFzHLxLFBmWrgCAM6rzJORqN8JjyCd/PB9j4aw==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@mantine/store/-/store-8.0.1.tgz", + "integrity": "sha512-3wfUDeiERXJEI+MGgRAbh+9aY35D9oE4UzquLqZh8cIiH5i5g64Y/eJx3PfjHgO5+Zeu6lbgTgL6k4lg4a2SBQ==", "license": "MIT", "peerDependencies": { "react": "^18.x || ^19.x" @@ -1016,22 +1015,6 @@ "react": ">= 16" } }, - "node_modules/@tanstack/match-sorter-utils": { - "version": "8.19.4", - "resolved": "https://registry.npmjs.org/@tanstack/match-sorter-utils/-/match-sorter-utils-8.19.4.tgz", - "integrity": "sha512-Wo1iKt2b9OT7d+YGhvEPD3DXvPv2etTusIMhMUoG7fbhmxcXCtIjJDEygy91Y2JFlwGyjqiBPRozme7UD8hoqg==", - "license": "MIT", - "dependencies": { - "remove-accents": "0.5.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/tannerlinsley" - } - }, "node_modules/@tanstack/react-table": { "version": "8.21.3", "resolved": "https://registry.npmjs.org/@tanstack/react-table/-/react-table-8.21.3.tgz", @@ -1052,23 +1035,6 @@ "react-dom": ">=16.8" } }, - "node_modules/@tanstack/react-virtual": { - "version": "3.11.2", - "resolved": "https://registry.npmjs.org/@tanstack/react-virtual/-/react-virtual-3.11.2.tgz", - "integrity": "sha512-OuFzMXPF4+xZgx8UzJha0AieuMihhhaWG0tCqpp6tDzlFwOmNBPYMuLOtMJ1Tr4pXLHmgjcWhG6RlknY2oNTdQ==", - "license": "MIT", - "dependencies": { - "@tanstack/virtual-core": "3.11.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/tannerlinsley" - }, - "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" - } - }, "node_modules/@tanstack/table-core": { "version": "8.21.3", "resolved": "https://registry.npmjs.org/@tanstack/table-core/-/table-core-8.21.3.tgz", @@ -1082,22 +1048,6 @@ "url": "https://github.com/sponsors/tannerlinsley" } }, - "node_modules/@tanstack/virtual-core": { - "version": "3.11.2", - "resolved": "https://registry.npmjs.org/@tanstack/virtual-core/-/virtual-core-3.11.2.tgz", - "integrity": "sha512-vTtpNt7mKCiZ1pwU9hfKPhpdVO2sVzFQsxoVBGtOSHxlrRRzYr8iQ2TlwbAcRYCcEiZ9ECAM8kBzH0v2+VzfKw==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/tannerlinsley" - } - }, - "node_modules/@types/cookie": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.6.0.tgz", - "integrity": "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==", - "license": "MIT" - }, "node_modules/@types/d3-array": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.1.tgz", @@ -1392,6 +1342,15 @@ "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", "license": "MIT" }, + "node_modules/attr-accept": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/attr-accept/-/attr-accept-2.2.5.tgz", + "integrity": "sha512-0bDNnY/u6pPwHDMoF0FieU354oBi0a8rD9FcsLwzcGWbc8KS8KPIi7y+s13OlVY+gMWc/9xEMUgNE6Qm8ZllYQ==", + "license": "MIT", + "engines": { + "node": ">=4" + } + }, "node_modules/axios": { "version": "1.8.3", "resolved": "https://registry.npmjs.org/axios/-/axios-1.8.3.tgz", @@ -2126,6 +2085,21 @@ "dev": true, "license": "MIT" }, + "node_modules/fdir": { + "version": "6.4.4", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz", + "integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, "node_modules/file-entry-cache": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", @@ -2139,6 +2113,18 @@ "node": ">=16.0.0" } }, + "node_modules/file-selector": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/file-selector/-/file-selector-2.1.2.tgz", + "integrity": "sha512-QgXo+mXTe8ljeqUFaX3QVHc5osSItJ/Km+xpocx0aSqWGMSCf6qYs/VnzZgS864Pjn5iceMRFigeAV7AfTlaig==", + "license": "MIT", + "dependencies": { + "tslib": "^2.7.0" + }, + "engines": { + "node": ">= 12" + } + }, "node_modules/find-root": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/find-root/-/find-root-1.1.0.tgz", @@ -2243,6 +2229,21 @@ "react": ">=16.8.0" } }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, "node_modules/function-bind": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", @@ -2682,9 +2683,9 @@ } }, "node_modules/lucide-react": { - "version": "0.479.0", - "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.479.0.tgz", - "integrity": "sha512-aBhNnveRhorBOK7uA4gDjgaf+YlHMdMhQ/3cupk6exM10hWlEU+2QtWYOfhXhjAsmdb6LeKR+NZnow4UxRRiTQ==", + "version": "0.511.0", + "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.511.0.tgz", + "integrity": "sha512-VK5a2ydJ7xm8GvBeKLS9mu1pVK6ucef9780JVUjw6bAjJL/QXnd4Y0p7SPeOUMC27YhzNCZvm5d/QX0Tp3rc0w==", "license": "ISC", "peerDependencies": { "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0" @@ -2701,67 +2702,6 @@ "global": "^4.4.0" } }, - "node_modules/mantine-react-table": { - "version": "2.0.0-beta.9", - "resolved": "https://registry.npmjs.org/mantine-react-table/-/mantine-react-table-2.0.0-beta.9.tgz", - "integrity": "sha512-ZdfcwebWaPERoDvAuk43VYcBCzamohARVclnbuepT0PHZ0wRcDPMBR+zgaocL+pFy8EXUGwvWTOKNh25ITpjNQ==", - "license": "MIT", - "dependencies": { - "@tanstack/match-sorter-utils": "8.19.4", - "@tanstack/react-table": "8.20.5", - "@tanstack/react-virtual": "3.11.2" - }, - "engines": { - "node": ">=16" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/kevinvandy" - }, - "peerDependencies": { - "@mantine/core": "^7.9", - "@mantine/dates": "^7.9", - "@mantine/hooks": "^7.9", - "@tabler/icons-react": ">=2.23.0", - "clsx": ">=2", - "dayjs": ">=1.11", - "react": ">=18.0", - "react-dom": ">=18.0" - } - }, - "node_modules/mantine-react-table/node_modules/@tanstack/react-table": { - "version": "8.20.5", - "resolved": "https://registry.npmjs.org/@tanstack/react-table/-/react-table-8.20.5.tgz", - "integrity": "sha512-WEHopKw3znbUZ61s9i0+i9g8drmDo6asTWbrQh8Us63DAk/M0FkmIqERew6P71HI75ksZ2Pxyuf4vvKh9rAkiA==", - "license": "MIT", - "dependencies": { - "@tanstack/table-core": "8.20.5" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/tannerlinsley" - }, - "peerDependencies": { - "react": ">=16.8", - "react-dom": ">=16.8" - } - }, - "node_modules/mantine-react-table/node_modules/@tanstack/table-core": { - "version": "8.20.5", - "resolved": "https://registry.npmjs.org/@tanstack/table-core/-/table-core-8.20.5.tgz", - "integrity": "sha512-P9dF7XbibHph2PFRz8gfBKEXEY/HJPOhym8CHmjF8y3q5mWpKx9xtZapXQUWCgkqvsK0R46Azuz+VaxD4Xl+Tg==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/tannerlinsley" - } - }, "node_modules/math-intrinsics": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", @@ -3023,6 +2963,19 @@ "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", "license": "ISC" }, + "node_modules/picomatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", + "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/pkcs7": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/pkcs7/-/pkcs7-1.0.4.tgz", @@ -3181,12 +3134,14 @@ "node": ">=6" } }, - "node_modules/react-dropzone-esm": { - "version": "15.2.0", - "resolved": "https://registry.npmjs.org/react-dropzone-esm/-/react-dropzone-esm-15.2.0.tgz", - "integrity": "sha512-pPwR8xWVL+tFLnbAb8KVH5f6Vtl397tck8dINkZ1cPMxHWH+l9dFmIgRWgbh7V7jbjIcuKXCsVrXbhQz68+dVA==", + "node_modules/react-dropzone": { + "version": "14.3.8", + "resolved": "https://registry.npmjs.org/react-dropzone/-/react-dropzone-14.3.8.tgz", + "integrity": "sha512-sBgODnq+lcA4P296DY4wacOZz3JFpD99fp+hb//iBO2HHnyeZU3FwWyXJ6salNpqQdsZrgMrotuko/BdJMV8Ug==", "license": "MIT", "dependencies": { + "attr-accept": "^2.2.4", + "file-selector": "^2.1.0", "prop-types": "^15.8.1" }, "engines": { @@ -3288,15 +3243,13 @@ } }, "node_modules/react-router": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.3.0.tgz", - "integrity": "sha512-466f2W7HIWaNXTKM5nHTqNxLrHTyXybm7R0eBlVSt0k/u55tTCDO194OIx/NrYD4TS5SXKTNekXfT37kMKUjgw==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.6.0.tgz", + "integrity": "sha512-GGufuHIVCJDbnIAXP3P9Sxzq3UUsddG3rrI3ut1q6m0FI6vxVBF3JoPQ38+W/blslLH4a5Yutp8drkEpXoddGQ==", "license": "MIT", "dependencies": { - "@types/cookie": "^0.6.0", "cookie": "^1.0.1", - "set-cookie-parser": "^2.6.0", - "turbo-stream": "2.4.0" + "set-cookie-parser": "^2.6.0" }, "engines": { "node": ">=20.0.0" @@ -3312,12 +3265,12 @@ } }, "node_modules/react-router-dom": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.3.0.tgz", - "integrity": "sha512-z7Q5FTiHGgQfEurX/FBinkOXhWREJIAB2RiU24lvcBa82PxUpwqvs/PAXb9lJyPjTs2jrl6UkLvCZVGJPeNuuQ==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.6.0.tgz", + "integrity": "sha512-DYgm6RDEuKdopSyGOWZGtDfSm7Aofb8CCzgkliTjtu/eDuB0gcsv6qdFhhi8HdtmA+KHkt5MfZ5K2PdzjugYsA==", "license": "MIT", "dependencies": { - "react-router": "7.3.0" + "react-router": "7.6.0" }, "engines": { "node": ">=20.0.0" @@ -3365,9 +3318,9 @@ } }, "node_modules/react-textarea-autosize": { - "version": "8.5.6", - "resolved": "https://registry.npmjs.org/react-textarea-autosize/-/react-textarea-autosize-8.5.6.tgz", - "integrity": "sha512-aT3ioKXMa8f6zHYGebhbdMD2L00tKeRX1zuVuDx9YQK/JLLRSaSxq3ugECEmUB9z2kvk6bFSIoRHLkkUv0RJiw==", + "version": "8.5.9", + "resolved": "https://registry.npmjs.org/react-textarea-autosize/-/react-textarea-autosize-8.5.9.tgz", + "integrity": "sha512-U1DGlIQN5AwgjTyOEnI1oCcMuEr1pv1qOtklB2l4nyMGbHzWrI0eFsYK0zos2YWqAolJyG0IWJaqWmWj5ETh0A==", "license": "MIT", "dependencies": { "@babel/runtime": "^7.20.13", @@ -3495,12 +3448,6 @@ "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==", "license": "MIT" }, - "node_modules/remove-accents": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/remove-accents/-/remove-accents-0.5.0.tgz", - "integrity": "sha512-8g3/Otx1eJaVD12e31UbJj1YzdtVvzH85HV7t+9MJYk/u3XmkOUJ5Ys9wQrf9PCPK8+xn4ymzqYCiZl6QWKn+A==", - "license": "MIT" - }, "node_modules/resolve": { "version": "1.22.10", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", @@ -3691,6 +3638,23 @@ "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==", "license": "MIT" }, + "node_modules/tinyglobby": { + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.13.tgz", + "integrity": "sha512-mEwzpUgrLySlveBwEVDMKk5B57bhLPYovRfPAXD5gA/98Opn0rCDj3GtLwFvCvH5RK9uPCExUROW5NjDwvqkxw==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.4.4", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, "node_modules/toposort": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/toposort/-/toposort-2.0.2.tgz", @@ -3703,12 +3667,6 @@ "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", "license": "0BSD" }, - "node_modules/turbo-stream": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/turbo-stream/-/turbo-stream-2.4.0.tgz", - "integrity": "sha512-FHncC10WpBd2eOmGwpmQsWLDoK4cqsA/UT/GqNoaKOQnT8uzhtCbg3EoUDMvqpOSAI0S26mr0rkjzbOO6S3v1g==", - "license": "ISC" - }, "node_modules/type-check": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", @@ -3780,9 +3738,9 @@ } }, "node_modules/use-isomorphic-layout-effect": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/use-isomorphic-layout-effect/-/use-isomorphic-layout-effect-1.2.0.tgz", - "integrity": "sha512-q6ayo8DWoPZT0VdG4u3D3uxcgONP3Mevx2i2b0434cwWBoL+aelL1DzkXI6w3PhTZzUeR2kaVlZn70iCiseP6w==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/use-isomorphic-layout-effect/-/use-isomorphic-layout-effect-1.2.1.tgz", + "integrity": "sha512-tpZZ+EX0gaghDAiFR37hj5MgY6ZN55kLiPkJsKxBMZ6GZdOSPJXiOzPM984oPYZ5AnehYx5WQp1+ME8I/P/pRA==", "license": "MIT", "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" @@ -3906,15 +3864,18 @@ } }, "node_modules/vite": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/vite/-/vite-6.2.1.tgz", - "integrity": "sha512-n2GnqDb6XPhlt9B8olZPrgMD/es/Nd1RdChF6CBD/fHW6pUyUTt2sQW2fPRX5GiD9XEa6+8A6A4f2vT6pSsE7Q==", + "version": "6.3.5", + "resolved": "https://registry.npmjs.org/vite/-/vite-6.3.5.tgz", + "integrity": "sha512-cZn6NDFE7wdTpINgs++ZJ4N49W2vRp8LCKrn3Ob1kYNtOo21vfDoaV5GzBfLU4MovSAB8uNRm4jgzVQZ+mBzPQ==", "dev": true, "license": "MIT", "dependencies": { "esbuild": "^0.25.0", + "fdir": "^6.4.4", + "picomatch": "^4.0.2", "postcss": "^8.5.3", - "rollup": "^4.30.1" + "rollup": "^4.34.9", + "tinyglobby": "^0.2.13" }, "bin": { "vite": "bin/vite.js" @@ -4008,21 +3969,6 @@ "node": ">=0.10.0" } }, - "node_modules/yaml": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.7.0.tgz", - "integrity": "sha512-+hSoy/QHluxmC9kCIJyL/uyFmLmc+e5CFR5Wa+bpIhIj85LVb9ZH2nVnqrHoSvKogwODv0ClqZkmiSSaIH5LTA==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true, - "bin": { - "yaml": "bin.mjs" - }, - "engines": { - "node": ">= 14" - } - }, "node_modules/yocto-queue": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", diff --git a/frontend/package.json b/frontend/package.json index 3b287d79..8802db2c 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -14,13 +14,13 @@ "@dnd-kit/modifiers": "^9.0.0", "@dnd-kit/sortable": "^10.0.0", "@dnd-kit/utilities": "^3.2.2", - "@mantine/charts": "^7.17.2", - "@mantine/core": "^7.17.2", - "@mantine/dates": "^7.17.2", - "@mantine/dropzone": "^7.17.2", - "@mantine/form": "^7.17.3", - "@mantine/hooks": "^7.17.2", - "@mantine/notifications": "^7.17.2", + "@mantine/charts": "~8.0.1", + "@mantine/core": "~8.0.1", + "@mantine/dates": "~8.0.1", + "@mantine/dropzone": "~8.0.1", + "@mantine/form": "~8.0.1", + "@mantine/hooks": "~8.0.1", + "@mantine/notifications": "~8.0.1", "@tabler/icons-react": "^3.31.0", "@tanstack/react-table": "^8.21.2", "allotment": "^1.20.3", @@ -29,8 +29,7 @@ "dayjs": "^1.11.13", "formik": "^2.4.6", "hls.js": "^1.5.20", - "lucide-react": "^0.479.0", - "mantine-react-table": "^2.0.0-beta.9", + "lucide-react": "^0.511.0", "mpegts.js": "^1.8.0", "prettier": "^3.5.3", "react": "^19.0.0", diff --git a/frontend/src/App.jsx b/frontend/src/App.jsx index a3ddfff7..a057be50 100644 --- a/frontend/src/App.jsx +++ b/frontend/src/App.jsx @@ -21,7 +21,6 @@ import { WebsocketProvider } from './WebSocket'; import { Box, AppShell, MantineProvider } from '@mantine/core'; import '@mantine/core/styles.css'; // Ensure Mantine global styles load import '@mantine/notifications/styles.css'; -import 'mantine-react-table/styles.css'; import '@mantine/dropzone/styles.css'; import '@mantine/dates/styles.css'; import './index.css'; diff --git a/frontend/src/components/ConfirmationDialog.jsx b/frontend/src/components/ConfirmationDialog.jsx index 3c0f15e7..822b46f1 100644 --- a/frontend/src/components/ConfirmationDialog.jsx +++ b/frontend/src/components/ConfirmationDialog.jsx @@ -18,60 +18,60 @@ import useWarningsStore from '../store/warnings'; * @param {string} [props.size='md'] - Size of the modal */ const ConfirmationDialog = ({ - opened, - onClose, - onConfirm, - title = 'Confirm Action', - message = 'Are you sure you want to proceed?', - confirmLabel = 'Confirm', - cancelLabel = 'Cancel', - actionKey, - onSuppressChange, - size = 'md', // Add default size parameter - md is a medium width + opened, + onClose, + onConfirm, + title = 'Confirm Action', + message = 'Are you sure you want to proceed?', + confirmLabel = 'Confirm', + cancelLabel = 'Cancel', + actionKey, + onSuppressChange, + size = 'md', // Add default size parameter - md is a medium width }) => { - const suppressWarning = useWarningsStore((s) => s.suppressWarning); - const isWarningSuppressed = useWarningsStore((s) => s.isWarningSuppressed); - const [suppressChecked, setSuppressChecked] = useState( - isWarningSuppressed(actionKey) - ); + const suppressWarning = useWarningsStore((s) => s.suppressWarning); + const isWarningSuppressed = useWarningsStore((s) => s.isWarningSuppressed); + const [suppressChecked, setSuppressChecked] = useState( + isWarningSuppressed(actionKey) + ); - const handleToggleSuppress = (e) => { - setSuppressChecked(e.currentTarget.checked); - if (onSuppressChange) { - onSuppressChange(e.currentTarget.checked); - } - }; + const handleToggleSuppress = (e) => { + setSuppressChecked(e.currentTarget.checked); + if (onSuppressChange) { + onSuppressChange(e.currentTarget.checked); + } + }; - const handleConfirm = () => { - if (suppressChecked) { - suppressWarning(actionKey); - } - onConfirm(); - }; + const handleConfirm = () => { + if (suppressChecked) { + suppressWarning(actionKey); + } + onConfirm(); + }; - return ( - - {message} + return ( + + {message} - {actionKey && ( - - )} + {actionKey && ( + + )} - - - - - - ); + + + + + + ); }; export default ConfirmationDialog; diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index 17abbc8d..452db052 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -5,7 +5,6 @@ import useChannelsStore from '../../store/channels'; import API from '../../api'; import useStreamProfilesStore from '../../store/streamProfiles'; import useStreamsStore from '../../store/streams'; -import { MantineReactTable, useMantineReactTable } from 'mantine-react-table'; import ChannelGroupForm from './ChannelGroup'; import usePlaylistsStore from '../../store/playlists'; import logo from '../../images/logo.png'; @@ -224,134 +223,6 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { ); }; - // const activeStreamsTable = useMantineReactTable({ - // data: channelStreams, - // columns: useMemo( - // () => [ - // { - // header: 'Name', - // accessorKey: 'name', - // Cell: ({ cell }) => ( - //
- // {cell.getValue()} - //
- // ), - // }, - // { - // header: 'M3U', - // accessorKey: 'group_name', - // Cell: ({ cell }) => ( - //
- // {cell.getValue()} - //
- // ), - // }, - // ], - // [] - // ), - // enableSorting: false, - // enableBottomToolbar: false, - // enableTopToolbar: false, - // columnFilterDisplayMode: 'popover', - // enablePagination: false, - // enableRowVirtualization: true, - // enableRowOrdering: true, - // rowVirtualizerOptions: { overscan: 5 }, //optionally customize the row virtualizer - // initialState: { - // density: 'compact', - // }, - // enableRowActions: true, - // positionActionsColumn: 'last', - // renderRowActions: ({ row }) => ( - // <> - // removeStream(row.original)} - // > - // {/* Small icon size */} - // - // - // ), - // mantineTableContainerProps: { - // style: { - // height: '200px', - // }, - // }, - // mantineRowDragHandleProps: ({ table }) => ({ - // onDragEnd: () => { - // const { draggingRow, hoveredRow } = table.getState(); - - // if (hoveredRow && draggingRow) { - // channelStreams.splice( - // hoveredRow.index, - // 0, - // channelStreams.splice(draggingRow.index, 1)[0] - // ); - - // setChannelStreams([...channelStreams]); - // } - // }, - // }), - // }); - - // const availableStreamsTable = useMantineReactTable({ - // data: streams, - // columns: useMemo( - // () => [ - // { - // header: 'Name', - // accessorKey: 'name', - // }, - // { - // header: 'M3U', - // accessorFn: (row) => - // playlists.find((playlist) => playlist.id === row.m3u_account)?.name, - // }, - // ], - // [] - // ), - // enableBottomToolbar: false, - // enableTopToolbar: false, - // columnFilterDisplayMode: 'popover', - // enablePagination: false, - // enableRowVirtualization: true, - // rowVirtualizerOptions: { overscan: 5 }, //optionally customize the row virtualizer - // initialState: { - // density: 'compact', - // }, - // enableRowActions: true, - // renderRowActions: ({ row }) => ( - // <> - // addStream(row.original)} - // > - // {/* Small icon size */} - // - // - // ), - // positionActionsColumn: 'last', - // mantineTableContainerProps: { - // style: { - // height: '200px', - // }, - // }, - // }); - // Update the handler for when channel group modal is closed const handleChannelGroupModalClose = (newGroup) => { setChannelGroupModalOpen(false); @@ -839,18 +710,6 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { - {/* - - Active Streams - - - - - Available Streams - - - */} - - - - - -
- ); -}; - -export default ProxyManager; \ No newline at end of file diff --git a/frontend/src/components/forms/EPG.jsx b/frontend/src/components/forms/EPG.jsx index 0c7f78c0..603d0c81 100644 --- a/frontend/src/components/forms/EPG.jsx +++ b/frontend/src/components/forms/EPG.jsx @@ -22,7 +22,6 @@ import { Box, } from '@mantine/core'; import { isNotEmpty, useForm } from '@mantine/form'; -import { IconUpload } from '@tabler/icons-react'; const EPG = ({ epg = null, isOpen, onClose }) => { const epgs = useEPGsStore((state) => state.epgs); @@ -123,7 +122,9 @@ const EPG = ({ epg = null, isOpen, onClose }) => { value: 'schedules_direct', }, ]} - onChange={(event) => handleSourceTypeChange(event.currentTarget.value)} + onChange={(event) => + handleSourceTypeChange(event.currentTarget.value) + } /> { {/* Put checkbox at the same level as Refresh Interval */} - Status - When enabled, this EPG source will auto update. - + + Status + + + When enabled, this EPG source will auto update. + + { - - + diff --git a/frontend/src/components/forms/M3UProfile.jsx b/frontend/src/components/forms/M3UProfile.jsx index ff3378a1..ac6adca2 100644 --- a/frontend/src/components/forms/M3UProfile.jsx +++ b/frontend/src/components/forms/M3UProfile.jsx @@ -15,7 +15,6 @@ import { } from '@mantine/core'; import { useWebSocket } from '../../WebSocket'; import usePlaylistsStore from '../../store/playlists'; -import { useDebounce } from '../../utils'; const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => { const [websocketReady, sendMessage] = useWebSocket(); @@ -139,7 +138,10 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => { if (!searchPattern || !sampleInput) return sampleInput; try { const regex = new RegExp(searchPattern, 'g'); - return sampleInput.replace(regex, match => `${match}`); + return sampleInput.replace( + regex, + (match) => `${match}` + ); } catch (e) { return sampleInput; } @@ -213,10 +215,14 @@ const RegexFormAndView = ({ profile = null, m3u, isOpen, onClose }) => {
- Live Regex Demonstration + + Live Regex Demonstration + - Sample Text + + Sample Text + { - Matched Text highlighted + + Matched Text{' '} + + highlighted + + { - Result After Replace - + + Result After Replace + + {getLocalReplaceResult()} diff --git a/frontend/src/components/forms/User.jsx b/frontend/src/components/forms/User.jsx index 77206725..00ea0537 100644 --- a/frontend/src/components/forms/User.jsx +++ b/frontend/src/components/forms/User.jsx @@ -205,7 +205,7 @@ const User = ({ user = null, isOpen, onClose }) => { { const formik = useFormik({ diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index 6e39de37..3bf71d00 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -300,7 +300,12 @@ const ChannelsTable = ({}) => { const groupOptions = Object.values(channelGroups) .filter((group) => activeGroupIds.has(group.id)) .map((group) => group.name); - const debouncedFilters = useDebounce(filters, 500); + const debouncedFilters = useDebounce(filters, 500, () => { + setPagination({ + ...pagination, + pageIndex: 0, + }); + }); /** * Functions @@ -338,14 +343,8 @@ const ChannelsTable = ({}) => { e.stopPropagation(); }, []); - // Remove useCallback to ensure we're using the latest setPagination function const handleFilterChange = (e) => { const { name, value } = e.target; - // First reset pagination to page 0 - setPagination({ - ...pagination, - pageIndex: 0, - }); // Then update filters setFilters((prev) => ({ ...prev, @@ -354,11 +353,6 @@ const ChannelsTable = ({}) => { }; const handleGroupChange = (value) => { - // First reset pagination to page 0 - setPagination({ - ...pagination, - pageIndex: 0, - }); // Then update filters setFilters((prev) => ({ ...prev, diff --git a/frontend/src/components/tables/EPGsTable.jsx b/frontend/src/components/tables/EPGsTable.jsx index e5fefc96..6978d005 100644 --- a/frontend/src/components/tables/EPGsTable.jsx +++ b/frontend/src/components/tables/EPGsTable.jsx @@ -19,7 +19,6 @@ import { Group, } from '@mantine/core'; import { notifications } from '@mantine/notifications'; -import { IconSquarePlus } from '@tabler/icons-react'; import { ArrowDownWideNarrow, ArrowUpDown, @@ -27,6 +26,7 @@ import { RefreshCcw, SquareMinus, SquarePen, + SquarePlus, } from 'lucide-react'; import dayjs from 'dayjs'; import useSettingsStore from '../../store/settings'; @@ -283,7 +283,7 @@ const EPGsTable = () => { { header: 'Updated', accessorKey: 'updated_at', - size: 150, + size: 175, enableSorting: false, cell: ({ cell }) => { const value = cell.getValue(); @@ -540,7 +540,7 @@ const EPGsTable = () => { diff --git a/frontend/src/components/tables/StreamProfilesTable.jsx b/frontend/src/components/tables/StreamProfilesTable.jsx index 9913eaa1..9dc82b5f 100644 --- a/frontend/src/components/tables/StreamProfilesTable.jsx +++ b/frontend/src/components/tables/StreamProfilesTable.jsx @@ -18,8 +18,15 @@ import { Switch, Stack, } from '@mantine/core'; -import { IconSquarePlus } from '@tabler/icons-react'; -import { SquareMinus, SquarePen, Check, X, Eye, EyeOff } from 'lucide-react'; +import { + SquareMinus, + SquarePen, + Check, + X, + Eye, + EyeOff, + SquarePlus, +} from 'lucide-react'; import { CustomTable, useTable } from './CustomTable'; import useLocalStorage from '../../hooks/useLocalStorage'; @@ -273,7 +280,7 @@ const StreamProfiles = () => { +
+ + + + , ] : [] )} diff --git a/frontend/src/routes.js b/frontend/src/routes.js deleted file mode 100644 index 93fadefe..00000000 --- a/frontend/src/routes.js +++ /dev/null @@ -1,14 +0,0 @@ -import ProxyManager from './components/ProxyManager'; - -// ...existing code... - -const routes = [ - ...existingRoutes, - { - path: '/proxy', - element: , - name: 'Proxy Manager', - }, -]; - -export default routes; diff --git a/frontend/src/utils.js b/frontend/src/utils.js index 619823c8..a488ce8d 100644 --- a/frontend/src/utils.js +++ b/frontend/src/utils.js @@ -38,12 +38,15 @@ export default { }; // Custom debounce hook -export function useDebounce(value, delay = 500) { +export function useDebounce(value, delay = 500, callback = null) { const [debouncedValue, setDebouncedValue] = useState(value); useEffect(() => { const handler = setTimeout(() => { setDebouncedValue(value); + if (callback) { + callback(); + } }, delay); return () => clearTimeout(handler); // Cleanup timeout on unmount or value change From 70e4e43d88cf8278999f7153d3b5bb3853a9d2ab Mon Sep 17 00:00:00 2001 From: Sam LaManna Date: Sun, 1 Jun 2025 12:53:17 -0400 Subject: [PATCH 0438/1435] Add Issue Forms --- .github/ISSUE_TEMPLATE/bug_report.yml | 63 ++++++++++++++++++++++ .github/ISSUE_TEMPLATE/config.yml | 1 + .github/ISSUE_TEMPLATE/feature_request.yml | 38 +++++++++++++ 3 files changed, 102 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/bug_report.yml create mode 100644 .github/ISSUE_TEMPLATE/config.yml create mode 100644 .github/ISSUE_TEMPLATE/feature_request.yml diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 00000000..ed29c346 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,63 @@ +name: Bug Report +description: I have an issue with Dispatcharr +title: "[Bug]: " +labels: ["Bug", "Triage"] +projects: [] +assignees: [] +body: + - type: markdown + attributes: + value: | + Please make sure you search for similar issues before submitting. Thank you for your bug report! + - type: textarea + id: describe-the-bug + attributes: + label: Describe the bug + description: Make sure to attach screenshots if possible! + placeholder: Tell us what you see! + value: "A clear and concise description of what the bug is. What did you expect to happen?" + validations: + required: true + - type: textarea + id: reproduce + attributes: + label: How can we recreate this bug? + description: Be detailed! + placeholder: Tell us what you see! + value: "1. Go to '...' 2. Click on '....' 3. Scroll down to '....' 4. See error" + validations: + required: true + - type: input + id: dispatcharr-version + attributes: + label: Dispatcharr Version + description: What version of Dispatcharr are you running? + placeholder: Located bottom left of main screen + validations: + required: true + - type: input + id: docker-version + attributes: + label: Docker Version + description: What version of Docker are you running? + placeholder: docker --version + validations: + required: true + - type: textarea + id: docker-compose + attributes: + label: What's in your Docker Compose file? + description: Please share your docker-compose.yml file + placeholder: Tell us what you see! + value: "If not using Docker Compose just put not using." + validations: + required: true + - type: textarea + id: client-info + attributes: + label: Client Information + description: What are you using the view the streams from Dispatcharr + placeholder: Tell us what you see! + value: "Device, App, Versions for both, etc..." + validations: + required: true \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 00000000..ec4bb386 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1 @@ +blank_issues_enabled: false \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 00000000..2de56f8c --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,38 @@ +name: Feature request +description: I want to suggest a new feature for Dispatcharr +title: "[Feature]: " +labels: ["Feature Request"] +projects: [] +assignees: [] +body: + - type: markdown + attributes: + value: | + Thank you for helping to make Dispatcharr better! + - type: textarea + id: describe-problem + attributes: + label: Is your feature request related to a problem? + description: Make sure to attach screenshots if possible! + placeholder: Tell us what you see! + value: "A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]" + validations: + required: true + - type: textarea + id: describe-solution + attributes: + label: Describe the solution you'd like + description: A clear and concise description of what you want to happen. + placeholder: Tell us what you see! + value: "Describe here." + validations: + required: true + - type: textarea + id: extras + attributes: + label: Additional context + description: Anything else you want to add? + placeholder: Tell us what you see! + value: "Nothing Extra" + validations: + required: true \ No newline at end of file From 39a06f9ba2ee2e6577d7d965342af18f3be52781 Mon Sep 17 00:00:00 2001 From: Sam LaManna Date: Sun, 1 Jun 2025 14:10:50 -0400 Subject: [PATCH 0439/1435] Add Support for Github Organization Issue Type --- .github/ISSUE_TEMPLATE/bug_report.yml | 1 + .github/ISSUE_TEMPLATE/feature_request.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index ed29c346..d36be10c 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -2,6 +2,7 @@ name: Bug Report description: I have an issue with Dispatcharr title: "[Bug]: " labels: ["Bug", "Triage"] +type: "Bug" projects: [] assignees: [] body: diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml index 2de56f8c..bf7db830 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.yml +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -2,6 +2,7 @@ name: Feature request description: I want to suggest a new feature for Dispatcharr title: "[Feature]: " labels: ["Feature Request"] +type: "Feature" projects: [] assignees: [] body: From 58f5287a530fa51173e4c84a2043f2da1b88c87c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 2 Jun 2025 10:44:30 -0500 Subject: [PATCH 0440/1435] Improved logging for M3U processing. --- apps/m3u/tasks.py | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index b1b1170d..9756a1f2 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -239,7 +239,7 @@ def process_groups(account, groups): group_objs = [] groups_to_create = [] for group_name, custom_props in groups.items(): - logger.debug(f"Handling group: {group_name}") + logger.debug(f"Handling group for M3U account {account.id}: {group_name}") if (group_name not in existing_groups): groups_to_create.append(ChannelGroup( name=group_name, @@ -405,7 +405,7 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): stream_hashes = {} # compiled_filters = [(f.filter_type, re.compile(f.regex_pattern, re.IGNORECASE)) for f in filters] - logger.debug(f"Processing batch of {len(batch)}") + logger.debug(f"Processing batch of {len(batch)} for M3U account {account_id}") for stream_info in batch: try: name, url = stream_info["name"], stream_info["url"] @@ -487,7 +487,7 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): except Exception as e: logger.error(f"Bulk create failed: {str(e)}") - retval = f"Batch processed: {len(streams_to_create)} created, {len(streams_to_update)} updated." + retval = f"M3U account: {account_id}, Batch processed: {len(streams_to_create)} created, {len(streams_to_update)} updated." # Aggressive garbage collection #del streams_to_create, streams_to_update, stream_hashes, existing_streams @@ -502,11 +502,11 @@ def cleanup_streams(account_id): m3u_account__m3u_account=account, m3u_account__enabled=True, ).values_list('id', flat=True) - logger.info(f"Found {len(existing_groups)} active groups") + logger.info(f"Found {len(existing_groups)} active groups for M3U account {account_id}") # Calculate cutoff date for stale streams stale_cutoff = timezone.now() - timezone.timedelta(days=account.stale_stream_days) - logger.info(f"Removing streams not seen since {stale_cutoff}") + logger.info(f"Removing streams not seen since {stale_cutoff} for M3U account {account_id}") # Delete streams that are not in active groups streams_to_delete = Stream.objects.filter( @@ -527,7 +527,7 @@ def cleanup_streams(account_id): streams_to_delete.delete() stale_streams.delete() - logger.info(f"Cleanup complete: {deleted_count} streams removed due to group filter, {stale_count} removed as stale") + logger.info(f"Cleanup for M3U account {account_id} complete: {deleted_count} streams removed due to group filter, {stale_count} removed as stale") @shared_task def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): @@ -712,7 +712,7 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): group_name = parsed["attributes"]["group-title"] # Log new groups as they're discovered if group_name not in groups: - logger.debug(f"Found new group: '{group_name}'") + logger.debug(f"Found new group for M3U account {account_id}: '{group_name}'") groups[group_name] = {} extinf_data.append(parsed) @@ -729,7 +729,7 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): # Periodically log progress for large files if valid_stream_count % 1000 == 0: - logger.debug(f"Processed {valid_stream_count} valid streams so far...") + logger.debug(f"Processed {valid_stream_count} valid streams so far for M3U account: {account_id}") # Log summary statistics logger.info(f"M3U parsing complete - Lines: {line_count}, EXTINF: {extinf_count}, URLs: {url_count}, Valid streams: {valid_stream_count}") @@ -962,7 +962,7 @@ def refresh_single_m3u_account(account_id): account.save(update_fields=['status']) if account.account_type == M3UAccount.Types.STADNARD: - logger.debug(f"Processing Standard account with groups: {existing_groups}") + logger.debug(f"Processing Standard account ({account_id}) with groups: {existing_groups}") # Break into batches and process in parallel batches = [extinf_data[i:i + BATCH_SIZE] for i in range(0, len(extinf_data), BATCH_SIZE)] task_group = group(process_m3u_batch.s(account_id, batch, existing_groups, hash_keys) for batch in batches) @@ -1107,8 +1107,6 @@ def refresh_single_m3u_account(account_id): message=account.last_message ) - print(f"Function took {elapsed_time} seconds to execute.") - except Exception as e: logger.error(f"Error processing M3U for account {account_id}: {str(e)}") account.status = M3UAccount.Status.ERROR From 6ce387b0b07f02da54389abf37b407cee1e830cf Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 2 Jun 2025 18:03:32 -0500 Subject: [PATCH 0441/1435] Auto-scales Celery based on demand. Should lower overall memory and CPU usage while allowing for high cpu demand tasks to complete quickly. Closes #111 --- dispatcharr/settings.py | 9 --------- docker/uwsgi.debug.ini | 2 +- docker/uwsgi.dev.ini | 2 +- docker/uwsgi.ini | 2 +- 4 files changed, 3 insertions(+), 12 deletions(-) diff --git a/dispatcharr/settings.py b/dispatcharr/settings.py index 4e1e0d55..06084b49 100644 --- a/dispatcharr/settings.py +++ b/dispatcharr/settings.py @@ -199,15 +199,6 @@ CELERY_BROKER_TRANSPORT_OPTIONS = { CELERY_ACCEPT_CONTENT = ['json'] CELERY_TASK_SERIALIZER = 'json' -# Memory management settings -#CELERY_WORKER_MAX_TASKS_PER_CHILD = 10 # Restart worker after 10 tasks to free memory -#CELERY_WORKER_PREFETCH_MULTIPLIER = 1 # Don't prefetch tasks - process one at a time -#CELERY_TASK_ACKS_LATE = True # Only acknowledge tasks after they're processed -#CELERY_TASK_TIME_LIMIT = 3600 # 1 hour time limit per task -#CELERY_TASK_SOFT_TIME_LIMIT = 3540 # Soft limit 60 seconds before hard limit -#CELERY_WORKER_CANCEL_LONG_RUNNING_TASKS_ON_CONNECTION_LOSS = True # Cancel tasks if connection lost -#CELERY_TASK_IGNORE_RESULT = True # Don't store results unless explicitly needed - CELERY_BEAT_SCHEDULER = "django_celery_beat.schedulers.DatabaseScheduler" CELERY_BEAT_SCHEDULE = { 'fetch-channel-statuses': { diff --git a/docker/uwsgi.debug.ini b/docker/uwsgi.debug.ini index 6ca855f3..e049df87 100644 --- a/docker/uwsgi.debug.ini +++ b/docker/uwsgi.debug.ini @@ -8,7 +8,7 @@ exec-before = python /app/scripts/wait_for_redis.py ; Start Redis first attach-daemon = redis-server ; Then start other services -attach-daemon = celery -A dispatcharr worker --concurrency=4 +attach-daemon = celery -A dispatcharr worker --autoscale=6,1 attach-daemon = celery -A dispatcharr beat attach-daemon = daphne -b 0.0.0.0 -p 8001 dispatcharr.asgi:application attach-daemon = cd /app/frontend && npm run dev diff --git a/docker/uwsgi.dev.ini b/docker/uwsgi.dev.ini index f3e5238e..7e50f2ef 100644 --- a/docker/uwsgi.dev.ini +++ b/docker/uwsgi.dev.ini @@ -10,7 +10,7 @@ exec-pre = python /app/scripts/wait_for_redis.py ; Start Redis first attach-daemon = redis-server ; Then start other services -attach-daemon = celery -A dispatcharr worker --concurrency=4 +attach-daemon = celery -A dispatcharr worker --autoscale=6,1 attach-daemon = celery -A dispatcharr beat attach-daemon = daphne -b 0.0.0.0 -p 8001 dispatcharr.asgi:application attach-daemon = cd /app/frontend && npm run dev diff --git a/docker/uwsgi.ini b/docker/uwsgi.ini index 32eb6e3c..b35ea5bf 100644 --- a/docker/uwsgi.ini +++ b/docker/uwsgi.ini @@ -10,7 +10,7 @@ exec-pre = python /app/scripts/wait_for_redis.py ; Start Redis first attach-daemon = redis-server ; Then start other services -attach-daemon = celery -A dispatcharr worker --concurrency=4 +attach-daemon = celery -A dispatcharr worker --autoscale=6,1 attach-daemon = celery -A dispatcharr beat attach-daemon = daphne -b 0.0.0.0 -p 8001 dispatcharr.asgi:application From a72eaf118ff1fe204f7e4967835d752004927cf1 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 3 Jun 2025 10:59:53 -0500 Subject: [PATCH 0442/1435] Refactor channel info retrieval for safer decoding and improved error logging. Hopefully fixes stats not showing sometimes. --- apps/proxy/ts_proxy/channel_status.py | 32 +++++++++++++++++---------- 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/apps/proxy/ts_proxy/channel_status.py b/apps/proxy/ts_proxy/channel_status.py index dd18d922..50e84eec 100644 --- a/apps/proxy/ts_proxy/channel_status.py +++ b/apps/proxy/ts_proxy/channel_status.py @@ -307,16 +307,23 @@ class ChannelStatus: client_count = proxy_server.redis_client.scard(client_set_key) or 0 # Calculate uptime - created_at = float(metadata.get(ChannelMetadataField.INIT_TIME.encode('utf-8'), b'0').decode('utf-8')) + init_time_bytes = metadata.get(ChannelMetadataField.INIT_TIME.encode('utf-8'), b'0') + created_at = float(init_time_bytes.decode('utf-8')) uptime = time.time() - created_at if created_at > 0 else 0 + # Safely decode bytes or use defaults + def safe_decode(bytes_value, default="unknown"): + if bytes_value is None: + return default + return bytes_value.decode('utf-8') + # Simplified info info = { 'channel_id': channel_id, - 'state': metadata.get(ChannelMetadataField.STATE.encode('utf-8'), b'unknown').decode('utf-8'), - 'url': metadata.get(ChannelMetadataField.URL.encode('utf-8'), b'').decode('utf-8'), - 'stream_profile': metadata.get(ChannelMetadataField.STREAM_PROFILE.encode('utf-8'), b'').decode('utf-8'), - 'owner': metadata.get(ChannelMetadataField.OWNER.encode('utf-8'), b'unknown').decode('utf-8'), + 'state': safe_decode(metadata.get(ChannelMetadataField.STATE.encode('utf-8'))), + 'url': safe_decode(metadata.get(ChannelMetadataField.URL.encode('utf-8')), ""), + 'stream_profile': safe_decode(metadata.get(ChannelMetadataField.STREAM_PROFILE.encode('utf-8')), ""), + 'owner': safe_decode(metadata.get(ChannelMetadataField.OWNER.encode('utf-8'))), 'buffer_index': int(buffer_index_value.decode('utf-8')) if buffer_index_value else 0, 'client_count': client_count, 'uptime': uptime @@ -376,14 +383,15 @@ class ChannelStatus: # Efficient way - just retrieve the essentials client_info = { 'client_id': client_id_str, - 'user_agent': proxy_server.redis_client.hget(client_key, 'user_agent'), - 'ip_address': proxy_server.redis_client.hget(client_key, 'ip_address').decode('utf-8'), } - if client_info['user_agent']: - client_info['user_agent'] = client_info['user_agent'].decode('utf-8') - else: - client_info['user_agent'] = 'unknown' + # Safely get user_agent and ip_address + user_agent_bytes = proxy_server.redis_client.hget(client_key, 'user_agent') + client_info['user_agent'] = safe_decode(user_agent_bytes) + + ip_address_bytes = proxy_server.redis_client.hget(client_key, 'ip_address') + if ip_address_bytes: + client_info['ip_address'] = safe_decode(ip_address_bytes) # Just get connected_at for client age connected_at_bytes = proxy_server.redis_client.hget(client_key, 'connected_at') @@ -416,5 +424,5 @@ class ChannelStatus: return info except Exception as e: - logger.error(f"Error getting channel info: {e}") + logger.error(f"Error getting channel info: {e}", exc_info=True) # Added exc_info for better debugging return None From e6c30f178f112f2671750c280f07a29d3ade40b4 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Tue, 3 Jun 2025 21:35:26 +0000 Subject: [PATCH 0443/1435] Release v0.5.2 --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 62c02ebc..193a1ea5 100644 --- a/version.py +++ b/version.py @@ -1,5 +1,5 @@ """ Dispatcharr version information. """ -__version__ = '0.5.1' # Follow semantic versioning (MAJOR.MINOR.PATCH) +__version__ = '0.5.2' # Follow semantic versioning (MAJOR.MINOR.PATCH) __timestamp__ = None # Set during CI/CD build process From 1f0e643954b801bae8a6e90a4a165c9f0662206c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 3 Jun 2025 19:10:14 -0500 Subject: [PATCH 0444/1435] Add support for dynamic tvg-id source selection in M3U generation `tvg_id_source` accepts `tvg_id`, `gracenote` or the default if nothing is selected `channel_number` --- apps/output/views.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/apps/output/views.py b/apps/output/views.py index d550ec8d..bbcc9269 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -24,6 +24,10 @@ def generate_m3u(request, profile_name=None): # Check if the request wants to use direct logo URLs instead of cache use_cached_logos = request.GET.get('cachedlogos', 'true').lower() != 'false' + # Get the source to use for tvg-id value + # Options: 'channel_number' (default), 'tvg_id', 'gracenote' + tvg_id_source = request.GET.get('tvg_id_source', 'channel_number').lower() + m3u_content = "#EXTM3U\n" for channel in channels: group_title = channel.channel_group.name if channel.channel_group else "Default" @@ -37,8 +41,15 @@ def generate_m3u(request, profile_name=None): else: formatted_channel_number = "" - # Use formatted channel number for tvg_id to ensure proper matching with EPG - tvg_id = str(formatted_channel_number) if formatted_channel_number != "" else str(channel.id) + # Determine the tvg-id based on the selected source + if tvg_id_source == 'tvg_id' and channel.tvg_id: + tvg_id = channel.tvg_id + elif tvg_id_source == 'gracenote' and channel.tvc_guide_stationid: + tvg_id = channel.tvc_guide_stationid + else: + # Default to channel number (original behavior) + tvg_id = str(formatted_channel_number) if formatted_channel_number != "" else str(channel.id) + tvg_name = channel.name tvg_logo = "" From 5cb2be7c9334af7cc28ed7dbbbd0b3371c302dab Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 3 Jun 2025 19:37:57 -0500 Subject: [PATCH 0445/1435] Add support for dynamic tvg-id source selection in EPG generation tvg_id_source accepts tvg_id, gracenote or the default if nothing is selected channel_number --- apps/output/views.py | 53 +++++++++++++++++++++++++++++++------------- 1 file changed, 38 insertions(+), 15 deletions(-) diff --git a/apps/output/views.py b/apps/output/views.py index bbcc9269..eb00270d 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -199,18 +199,33 @@ def generate_epg(request, profile_name=None): else: channels = Channel.objects.all() + # Check if the request wants to use direct logo URLs instead of cache + use_cached_logos = request.GET.get('cachedlogos', 'true').lower() != 'false' + + # Get the source to use for tvg-id value + # Options: 'channel_number' (default), 'tvg_id', 'gracenote' + tvg_id_source = request.GET.get('tvg_id_source', 'channel_number').lower() + # Retrieve all active channels for channel in channels: # Format channel number as integer if it has no decimal component - same as M3U generation if channel.channel_number is not None: if channel.channel_number == int(channel.channel_number): - formatted_channel_number = str(int(channel.channel_number)) + formatted_channel_number = int(channel.channel_number) else: - formatted_channel_number = str(channel.channel_number) + formatted_channel_number = channel.channel_number else: - formatted_channel_number = str(channel.id) - # Check if the request wants to use direct logo URLs instead of cache - use_cached_logos = request.GET.get('cachedlogos', 'true').lower() != 'false' + formatted_channel_number = "" + + # Determine the channel ID based on the selected source + if tvg_id_source == 'tvg_id' and channel.tvg_id: + channel_id = channel.tvg_id + elif tvg_id_source == 'gracenote' and channel.tvc_guide_stationid: + channel_id = channel.tvc_guide_stationid + else: + # Default to channel number (original behavior) + channel_id = str(formatted_channel_number) if formatted_channel_number != "" else str(channel.id) + # Add channel logo if available tvg_logo = "" if channel.logo: @@ -226,21 +241,29 @@ def generate_epg(request, profile_name=None): else: tvg_logo = request.build_absolute_uri(reverse('api:channels:logo-cache', args=[channel.logo.id])) display_name = channel.epg_data.name if channel.epg_data else channel.name - xml_lines.append(f' ') + xml_lines.append(f' ') xml_lines.append(f' {html.escape(display_name)}') xml_lines.append(f' ') xml_lines.append(' ') for channel in channels: - # Use the same formatting for channel ID in program entries - if channel.channel_number is not None: - if channel.channel_number == int(channel.channel_number): - formatted_channel_number = str(int(channel.channel_number)) - else: - formatted_channel_number = str(channel.channel_number) + # Use the same channel ID determination for program entries + if tvg_id_source == 'tvg_id' and channel.tvg_id: + channel_id = channel.tvg_id + elif tvg_id_source == 'gracenote' and channel.tvc_guide_stationid: + channel_id = channel.tvc_guide_stationid else: - formatted_channel_number = str(channel.id) + # Get formatted channel number + if channel.channel_number is not None: + if channel.channel_number == int(channel.channel_number): + formatted_channel_number = int(channel.channel_number) + else: + formatted_channel_number = channel.channel_number + else: + formatted_channel_number = "" + # Default to channel number + channel_id = str(formatted_channel_number) if formatted_channel_number != "" else str(channel.id) display_name = channel.epg_data.name if channel.epg_data else channel.name if not channel.epg_data: @@ -249,7 +272,7 @@ def generate_epg(request, profile_name=None): num_days = 1 # Default to 1 days of dummy EPG data program_length_hours = 4 # Default to 4-hour program blocks generate_dummy_epg( - formatted_channel_number, + channel_id, display_name, xml_lines, num_days=num_days, @@ -260,7 +283,7 @@ def generate_epg(request, profile_name=None): for prog in programs: start_str = prog.start_time.strftime("%Y%m%d%H%M%S %z") stop_str = prog.end_time.strftime("%Y%m%d%H%M%S %z") - xml_lines.append(f' ') + xml_lines.append(f' ') xml_lines.append(f' {html.escape(prog.title)}') # Add subtitle if available From abef4620d06370ffbb1d316f0e07f368024d4d75 Mon Sep 17 00:00:00 2001 From: Sam LaManna Date: Tue, 3 Jun 2025 22:19:28 -0400 Subject: [PATCH 0446/1435] add triage to feature requests too --- .github/ISSUE_TEMPLATE/feature_request.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml index bf7db830..5aa0c337 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.yml +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -1,7 +1,7 @@ name: Feature request description: I want to suggest a new feature for Dispatcharr title: "[Feature]: " -labels: ["Feature Request"] +labels: ["Feature Request", "Triage"] type: "Feature" projects: [] assignees: [] From 2a9a98cad7101a91b9709e9b305c1f8550ce0fe4 Mon Sep 17 00:00:00 2001 From: Sam LaManna Date: Tue, 3 Jun 2025 22:20:00 -0400 Subject: [PATCH 0447/1435] Remove redundant labels now that we have types fully setup --- .github/ISSUE_TEMPLATE/bug_report.yml | 2 +- .github/ISSUE_TEMPLATE/feature_request.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index d36be10c..47f12f7d 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -1,7 +1,7 @@ name: Bug Report description: I have an issue with Dispatcharr title: "[Bug]: " -labels: ["Bug", "Triage"] +labels: ["Triage"] type: "Bug" projects: [] assignees: [] diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml index 5aa0c337..77a03df7 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.yml +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -1,7 +1,7 @@ name: Feature request description: I want to suggest a new feature for Dispatcharr title: "[Feature]: " -labels: ["Feature Request", "Triage"] +labels: ["Triage"] type: "Feature" projects: [] assignees: [] From 722965b987c498ba66ced2480b36bfb3bc51b539 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 3 Jun 2025 21:32:24 -0500 Subject: [PATCH 0448/1435] Replaced old images with ghcr images. --- docker/docker-compose.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 59e21010..d195fbdc 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -1,6 +1,6 @@ services: web: - image: dispatcharr/dispatcharr:alpha-v1 + image: ghcr.io/dispatcharr/dispatcharr:latest container_name: dispatcharr_web ports: - 9191:9191 @@ -32,7 +32,7 @@ services: # capabilities: [gpu] celery: - image: dispatcharr/dispatcharr:alpha-v1 + image: ghcr.io/dispatcharr/dispatcharr:latest container_name: dispatcharr_celery depends_on: - db From e7bf8cbedebb4d72c7edf867e1b198ea3cae653e Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 3 Jun 2025 22:00:17 -0500 Subject: [PATCH 0449/1435] Added support for LIVE tag and dd_progrid numbering systems for epg. --- apps/epg/tasks.py | 5 ++++- apps/output/views.py | 7 +++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/apps/epg/tasks.py b/apps/epg/tasks.py index 3c2df895..d3062171 100644 --- a/apps/epg/tasks.py +++ b/apps/epg/tasks.py @@ -1634,6 +1634,9 @@ def extract_custom_properties(prog): elif system == 'onscreen' and ep_num.text: # Just store the raw onscreen format custom_props['onscreen_episode'] = ep_num.text.strip() + elif system == 'dd_progid' and ep_num.text: + # Store the dd_progid format + custom_props['dd_progid'] = ep_num.text.strip() # Extract ratings more efficiently rating_elem = prog.find('rating') @@ -1669,7 +1672,7 @@ def extract_custom_properties(prog): custom_props['icon'] = icon_elem.get('src') # Simpler approach for boolean flags - for kw in ['previously-shown', 'premiere', 'new']: + for kw in ['previously-shown', 'premiere', 'new', 'live']: if prog.find(kw) is not None: custom_props[kw.replace('-', '_')] = True diff --git a/apps/output/views.py b/apps/output/views.py index eb00270d..afe3927a 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -314,6 +314,10 @@ def generate_epg(request, profile_name=None): if 'onscreen_episode' in custom_data: xml_lines.append(f' {html.escape(custom_data["onscreen_episode"])}') + # Handle dd_progid format + if 'dd_progid' in custom_data: + xml_lines.append(f' {html.escape(custom_data["dd_progid"])}') + # Add season and episode numbers in xmltv_ns format if available if 'season' in custom_data and 'episode' in custom_data: season = int(custom_data['season']) - 1 if str(custom_data['season']).isdigit() else 0 @@ -360,6 +364,9 @@ def generate_epg(request, profile_name=None): if custom_data.get('new', False): xml_lines.append(f' ') + if custom_data.get('live', False): + xml_lines.append(f' ') + except Exception as e: xml_lines.append(f' ') From a767f28eb66745e0c713c360314d5e3cbb7bed20 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 4 Jun 2025 17:06:00 -0500 Subject: [PATCH 0450/1435] Allow stale stream days to be set to 0 to disable retention completely. --- frontend/src/components/forms/M3U.jsx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/frontend/src/components/forms/M3U.jsx b/frontend/src/components/forms/M3U.jsx index 9affa984..6a2a161a 100644 --- a/frontend/src/components/forms/M3U.jsx +++ b/frontend/src/components/forms/M3U.jsx @@ -225,7 +225,7 @@ const M3U = ({ id="account_type" name="account_type" label="Account Type" - description="Standard for direct M3U URLs, Xtream Codes for panel-based services" + description={<>Standard for direct M3U URLs,
Xtream Codes for panel-based services} data={[ { value: 'STD', @@ -233,7 +233,7 @@ const M3U = ({ }, { value: 'XC', - label: 'XTream Codes', + label: 'Xtream Codes', }, ]} key={form.key('account_type')} @@ -324,7 +324,7 @@ const M3U = ({ /> Date: Wed, 4 Jun 2025 17:16:59 -0500 Subject: [PATCH 0451/1435] Don't auto populate stale field to 7 days if 0 is set. Closes #123 --- frontend/src/components/forms/M3U.jsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/components/forms/M3U.jsx b/frontend/src/components/forms/M3U.jsx index 6a2a161a..24ddd377 100644 --- a/frontend/src/components/forms/M3U.jsx +++ b/frontend/src/components/forms/M3U.jsx @@ -85,7 +85,7 @@ const M3U = ({ account_type: m3uAccount.account_type, username: m3uAccount.username ?? '', password: '', - stale_stream_days: m3uAccount.stale_stream_days || 7, + stale_stream_days: m3uAccount.stale_stream_days !== undefined && m3uAccount.stale_stream_days !== null ? m3uAccount.stale_stream_days : 7, }); if (m3uAccount.account_type == 'XC') { From 5e2757f578156346f9e9d1384393a9f5c106e8ce Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 4 Jun 2025 17:42:11 -0500 Subject: [PATCH 0452/1435] Set back to a minimum of 1. Task needs to be updated first or all streams get removed. --- frontend/src/components/forms/M3U.jsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/components/forms/M3U.jsx b/frontend/src/components/forms/M3U.jsx index 24ddd377..caa7ae31 100644 --- a/frontend/src/components/forms/M3U.jsx +++ b/frontend/src/components/forms/M3U.jsx @@ -324,7 +324,7 @@ const M3U = ({ /> Date: Thu, 5 Jun 2025 21:26:11 -0500 Subject: [PATCH 0453/1435] Calculate stale time from start of scan not start of stale cleanup function. --- apps/m3u/tasks.py | 9 +++++---- frontend/src/components/forms/M3U.jsx | 2 +- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index 9756a1f2..e5159605 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -496,7 +496,7 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): return retval -def cleanup_streams(account_id): +def cleanup_streams(account_id, scan_start_time=timezone.now): account = M3UAccount.objects.get(id=account_id, is_active=True) existing_groups = ChannelGroup.objects.filter( m3u_account__m3u_account=account, @@ -505,7 +505,7 @@ def cleanup_streams(account_id): logger.info(f"Found {len(existing_groups)} active groups for M3U account {account_id}") # Calculate cutoff date for stale streams - stale_cutoff = timezone.now() - timezone.timedelta(days=account.stale_stream_days) + stale_cutoff = scan_start_time - timezone.timedelta(days=account.stale_stream_days) logger.info(f"Removing streams not seen since {stale_cutoff} for M3U account {account_id}") # Delete streams that are not in active groups @@ -833,7 +833,8 @@ def refresh_single_m3u_account(account_id): return f"Task already running for account_id={account_id}." # Record start time - start_time = time.time() + refresh_start_timestamp = timezone.now() # For the cleanup function + start_time = time.time() # For tracking elapsed time as float streams_created = 0 streams_updated = 0 streams_deleted = 0 @@ -1077,7 +1078,7 @@ def refresh_single_m3u_account(account_id): Stream.objects.filter(id=-1).exists() # This will never find anything but ensures DB sync # Now run cleanup - cleanup_streams(account_id) + cleanup_streams(account_id, refresh_start_timestamp) # Calculate elapsed time elapsed_time = time.time() - start_time diff --git a/frontend/src/components/forms/M3U.jsx b/frontend/src/components/forms/M3U.jsx index caa7ae31..24ddd377 100644 --- a/frontend/src/components/forms/M3U.jsx +++ b/frontend/src/components/forms/M3U.jsx @@ -324,7 +324,7 @@ const M3U = ({ /> Date: Thu, 5 Jun 2025 21:34:14 -0500 Subject: [PATCH 0454/1435] Properly keep track of removed streams. --- apps/m3u/tasks.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index e5159605..ce46a2ec 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -527,8 +527,12 @@ def cleanup_streams(account_id, scan_start_time=timezone.now): streams_to_delete.delete() stale_streams.delete() + total_deleted = deleted_count + stale_count logger.info(f"Cleanup for M3U account {account_id} complete: {deleted_count} streams removed due to group filter, {stale_count} removed as stale") + # Return the total count of deleted streams + return total_deleted + @shared_task def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): if not acquire_task_lock('refresh_m3u_account_groups', account_id): @@ -1078,7 +1082,7 @@ def refresh_single_m3u_account(account_id): Stream.objects.filter(id=-1).exists() # This will never find anything but ensures DB sync # Now run cleanup - cleanup_streams(account_id, refresh_start_timestamp) + streams_deleted = cleanup_streams(account_id, refresh_start_timestamp) # Calculate elapsed time elapsed_time = time.time() - start_time From 0ec5ffff33a4f732737bea885f865e4a3beffe6e Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 6 Jun 2025 08:00:19 -0500 Subject: [PATCH 0455/1435] Fix status message not updating the frontend during M3U refresh. --- frontend/src/WebSocket.jsx | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index 260ec468..aeed8826 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -192,9 +192,10 @@ export const WebsocketProvider = ({ children }) => { // Update the playlist status whenever we receive a status update // Not just when progress is 100% or status is pending_setup if (parsedEvent.data.status && parsedEvent.data.account) { - const playlist = playlists.find( - (p) => p.id === parsedEvent.data.account - ); + // Check if playlists is an object with IDs as keys or an array + const playlist = Array.isArray(playlists) + ? playlists.find((p) => p.id === parsedEvent.data.account) + : playlists[parsedEvent.data.account]; if (playlist) { // When we receive a "success" status with 100% progress, this is a completed refresh @@ -212,9 +213,18 @@ export const WebsocketProvider = ({ children }) => { parsedEvent.data.progress === 100 ) { updateData.updated_at = new Date().toISOString(); + // Log successful completion for debugging + console.log('M3U refresh completed successfully:', updateData); } updatePlaylist(updateData); + } else { + // Log when playlist can't be found for debugging purposes + console.warn( + `Received update for unknown playlist ID: ${parsedEvent.data.account}`, + Array.isArray(playlists) ? 'playlists is array' : 'playlists is object', + Object.keys(playlists).length + ); } } break; From 343ecfbca602fa255a4d95ed7c7d5972e0cd4bca Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 6 Jun 2025 12:00:08 -0500 Subject: [PATCH 0456/1435] Allow for 'days' to be set when using the EPG url which will limit the number of days to output epg data for. --- apps/output/views.py | 34 +++++++++++++++++++++++++++++----- 1 file changed, 29 insertions(+), 5 deletions(-) diff --git a/apps/output/views.py b/apps/output/views.py index afe3927a..f3237232 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -184,7 +184,7 @@ def generate_epg(request, profile_name=None): Dynamically generate an XMLTV (EPG) file using the new EPGData/ProgramData models. Since the EPG data is stored independently of Channels, we group programmes by their associated EPGData record. - This version does not filter by time, so it includes the entire EPG saved in the DB. + This version filters data based on the 'days' parameter. """ xml_lines = [] xml_lines.append('') @@ -206,6 +206,23 @@ def generate_epg(request, profile_name=None): # Options: 'channel_number' (default), 'tvg_id', 'gracenote' tvg_id_source = request.GET.get('tvg_id_source', 'channel_number').lower() + # Get the number of days for EPG data + try: + # Default to 0 days (everything) for real EPG if not specified + days_param = request.GET.get('days', '0') + num_days = int(days_param) + # Set reasonable limits + num_days = max(0, min(num_days, 365)) # Between 0 and 365 days + except ValueError: + num_days = 0 # Default to all data if invalid value + + # For dummy EPG, use either the specified value or default to 3 days + dummy_days = num_days if num_days > 0 else 3 + + # Calculate cutoff date for EPG data filtering (only if days > 0) + now = timezone.now() + cutoff_date = now + timedelta(days=num_days) if num_days > 0 else None + # Retrieve all active channels for channel in channels: # Format channel number as integer if it has no decimal component - same as M3U generation @@ -268,18 +285,25 @@ def generate_epg(request, profile_name=None): display_name = channel.epg_data.name if channel.epg_data else channel.name if not channel.epg_data: # Use the enhanced dummy EPG generation function with defaults - # These values could be made configurable via settings or request parameters - num_days = 1 # Default to 1 days of dummy EPG data program_length_hours = 4 # Default to 4-hour program blocks generate_dummy_epg( channel_id, display_name, xml_lines, - num_days=num_days, + num_days=dummy_days, # Use dummy_days (3 days by default) program_length_hours=program_length_hours ) else: - programs = channel.epg_data.programs.all() + # For real EPG data - filter only if days parameter was specified + if num_days > 0: + programs = channel.epg_data.programs.filter( + start_time__gte=now, + start_time__lt=cutoff_date + ) + else: + # Return all programs if days=0 or not specified + programs = channel.epg_data.programs.all() + for prog in programs: start_str = prog.start_time.strftime("%Y%m%d%H%M%S %z") stop_str = prog.end_time.strftime("%Y%m%d%H%M%S %z") From 708a269ae58c2615844039b79a6dbe2da05d86a5 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 6 Jun 2025 12:21:08 -0500 Subject: [PATCH 0457/1435] Adds the ability to add 'direct=true' to m3u output url. Doing so will use the URL from the provider instead of the Dispatcharr URL. --- apps/output/views.py | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/apps/output/views.py b/apps/output/views.py index f3237232..cb254d9a 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -24,6 +24,9 @@ def generate_m3u(request, profile_name=None): # Check if the request wants to use direct logo URLs instead of cache use_cached_logos = request.GET.get('cachedlogos', 'true').lower() != 'false' + # Check if direct stream URLs should be used instead of proxy + use_direct_urls = request.GET.get('direct', 'false').lower() == 'true' + # Get the source to use for tvg-id value # Options: 'channel_number' (default), 'tvg_id', 'gracenote' tvg_id_source = request.GET.get('tvg_id_source', 'channel_number').lower() @@ -76,10 +79,22 @@ def generate_m3u(request, profile_name=None): f'tvg-chno="{formatted_channel_number}" {tvc_guide_stationid}group-title="{group_title}",{channel.name}\n' ) - base_url = request.build_absolute_uri('/')[:-1] - stream_url = f"{base_url}/proxy/ts/stream/{channel.uuid}" + # Determine the stream URL based on the direct parameter + if use_direct_urls: + # Try to get the first stream's direct URL + first_stream = channel.streams.first() + if first_stream and first_stream.url: + # Use the direct stream URL + stream_url = first_stream.url + else: + # Fall back to proxy URL if no direct URL available + base_url = request.build_absolute_uri('/')[:-1] + stream_url = f"{base_url}/proxy/ts/stream/{channel.uuid}" + else: + # Standard behavior - use proxy URL + base_url = request.build_absolute_uri('/')[:-1] + stream_url = f"{base_url}/proxy/ts/stream/{channel.uuid}" - #stream_url = request.build_absolute_uri(reverse('output:stream', args=[channel.id])) m3u_content += extinf_line + stream_url + "\n" response = HttpResponse(m3u_content, content_type="audio/x-mpegurl") From 8ee68a2349830c425b3054de6177bd1b0cb6b32b Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 6 Jun 2025 19:04:19 -0500 Subject: [PATCH 0458/1435] Deny POST if there body isn't empty. --- apps/output/views.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/apps/output/views.py b/apps/output/views.py index cb254d9a..68b32a7f 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -1,5 +1,7 @@ -from django.http import HttpResponse +from django.http import HttpResponse, HttpResponseForbidden from django.urls import reverse +from django.views.decorators.csrf import csrf_exempt +from django.views.decorators.http import require_http_methods from apps.channels.models import Channel, ChannelProfile from apps.epg.models import ProgramData from django.utils import timezone @@ -7,11 +9,18 @@ from datetime import datetime, timedelta import re import html # Add this import for XML escaping +@csrf_exempt +@require_http_methods(["GET", "POST"]) def generate_m3u(request, profile_name=None): """ Dynamically generate an M3U file from channels. The stream URL now points to the new stream_view that uses StreamProfile. + Supports both GET and POST methods for compatibility with IPTVSmarters. """ + # Check if this is a POST request with data (which we don't want to allow) + if request.method == "POST" and request.body: + return HttpResponseForbidden("POST requests with content are not allowed") + if profile_name is not None: channel_profile = ChannelProfile.objects.get(name=profile_name) channels = Channel.objects.filter( From 7acc31ec979240b16e31306474cd225b7f6febd0 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 7 Jun 2025 19:27:59 -0500 Subject: [PATCH 0459/1435] Allow for tuners as low as 1 instead of 2. Closes #149 --- apps/hdhr/api_views.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/hdhr/api_views.py b/apps/hdhr/api_views.py index 278efc36..0d7b77e0 100644 --- a/apps/hdhr/api_views.py +++ b/apps/hdhr/api_views.py @@ -79,8 +79,8 @@ class DiscoverAPIView(APIView): # Otherwise use the limited profile sum plus custom streams tuner_count = limited_tuners + custom_stream_count - # 5. Ensure minimum of 2 tuners - tuner_count = max(2, tuner_count) + # 5. Ensure minimum of 1 tuners + tuner_count = max(1, tuner_count) logger.debug(f"Calculated tuner count: {tuner_count} (limited profiles: {limited_tuners}, custom streams: {custom_stream_count}, unlimited: {has_unlimited})") From 789d29c97a60d6a7e8443dfb75de9951b4270321 Mon Sep 17 00:00:00 2001 From: dekzter Date: Sun, 8 Jun 2025 08:29:25 -0400 Subject: [PATCH 0460/1435] proper cidr validation server-side --- core/serializers.py | 56 ++++++++++++++++++++++++++++++--- dispatcharr/utils.py | 4 +-- frontend/src/pages/Settings.jsx | 29 ++++++++++++++--- requirements.txt | 1 - 4 files changed, 78 insertions(+), 12 deletions(-) diff --git a/core/serializers.py b/core/serializers.py index c80ad630..289a43ea 100644 --- a/core/serializers.py +++ b/core/serializers.py @@ -1,19 +1,67 @@ # core/serializers.py +import json +import ipaddress from rest_framework import serializers -from .models import UserAgent, StreamProfile, CoreSettings +from .models import UserAgent, StreamProfile, CoreSettings, NETWORK_ACCESS + class UserAgentSerializer(serializers.ModelSerializer): class Meta: model = UserAgent - fields = ['id', 'name', 'user_agent', 'description', 'is_active', 'created_at', 'updated_at'] + fields = [ + "id", + "name", + "user_agent", + "description", + "is_active", + "created_at", + "updated_at", + ] + class StreamProfileSerializer(serializers.ModelSerializer): class Meta: model = StreamProfile - fields = ['id', 'name', 'command', 'parameters', 'is_active', 'user_agent', 'locked'] + fields = [ + "id", + "name", + "command", + "parameters", + "is_active", + "user_agent", + "locked", + ] + class CoreSettingsSerializer(serializers.ModelSerializer): class Meta: model = CoreSettings - fields = '__all__' + fields = "__all__" + + def update(self, instance, validated_data): + if instance.key == NETWORK_ACCESS: + errors = False + invalid = {} + value = json.loads(validated_data.get("value")) + for key, val in value.items(): + cidrs = val.split(",") + for cidr in cidrs: + try: + ipaddress.ip_network(cidr) + except: + errors = True + if key not in invalid: + invalid[key] = [] + invalid[key].append(cidr) + + if errors: + # Perform CIDR validation + raise serializers.ValidationError( + { + "message": "Invalid CIDRs", + "value": invalid, + } + ) + + return super().update(instance, validated_data) diff --git a/dispatcharr/utils.py b/dispatcharr/utils.py index 5f75121a..767913c6 100644 --- a/dispatcharr/utils.py +++ b/dispatcharr/utils.py @@ -29,7 +29,7 @@ def validate_logo_file(file): def get_client_ip(request): - x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR") + x_forwarded_for = request.META.get("HTTP_X_REAL_IP") if x_forwarded_for: # X-Forwarded-For can be a comma-separated list of IPs ip = x_forwarded_for.split(",")[0].strip() @@ -44,7 +44,7 @@ def network_access_allowed(request, settings_key): cidrs = ( network_access[settings_key].split(",") if settings_key in network_access - else "0.0.0.0/0" + else ["0.0.0.0/0"] ) network_allowed = False diff --git a/frontend/src/pages/Settings.jsx b/frontend/src/pages/Settings.jsx index f61988e5..6606f977 100644 --- a/frontend/src/pages/Settings.jsx +++ b/frontend/src/pages/Settings.jsx @@ -5,6 +5,7 @@ import useUserAgentsStore from '../store/userAgents'; import useStreamProfilesStore from '../store/streamProfiles'; import { Accordion, + Alert, Box, Button, Center, @@ -31,6 +32,7 @@ const SettingsPage = () => { const authUser = useAuthStore((s) => s.user); const [accordianValue, setAccordianValue] = useState(null); + const [networkAccessSaved, setNetworkAccessSaved] = useState(false); // UI / local storage settings const [tableSize, setTableSize] = useLocalStorage('table-size', 'default'); @@ -376,11 +378,21 @@ const SettingsPage = () => { }; const onNetworkAccessSubmit = async () => { - console.log(networkAccessForm.getValues()); - API.updateSetting({ - ...settings['network-access'], - value: JSON.stringify(networkAccessForm.getValues()), - }); + let result = null; + setNetworkAccessSaved(false); + try { + await API.updateSetting({ + ...settings['network-access'], + value: JSON.stringify(networkAccessForm.getValues()), + }); + setNetworkAccessSaved(true); + } catch (e) { + const errors = {}; + for (const key in e.body.value) { + errors[key] = `Invalid CIDR(s): ${e.body.value[key]}`; + } + networkAccessForm.setErrors(errors); + } }; const onUISettingsChange = (name, value) => { @@ -589,6 +601,13 @@ const SettingsPage = () => { )} > + {networkAccessSaved && ( + + )} {Object.entries(NETWORK_ACCESS_OPTIONS).map( ([key, config]) => { return ( diff --git a/requirements.txt b/requirements.txt index d029bd1a..f1526ceb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -23,7 +23,6 @@ tzlocal # PyTorch dependencies (CPU only) --extra-index-url https://download.pytorch.org/whl/cpu/ torch==2.6.0+cpu -tzlocal # ML/NLP dependencies sentence-transformers==3.4.1 From 7e5be6094f6fb9bc00ff3eee9e4e4b799c98ce3c Mon Sep 17 00:00:00 2001 From: Marlon Alkan Date: Sun, 8 Jun 2025 16:45:34 +0200 Subject: [PATCH 0461/1435] docker: init: 02-postgres.sh: allow DB user to create new DB (for tests) --- docker/init/02-postgres.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker/init/02-postgres.sh b/docker/init/02-postgres.sh index 69a81dd4..7bb90671 100644 --- a/docker/init/02-postgres.sh +++ b/docker/init/02-postgres.sh @@ -57,13 +57,14 @@ if [ -z "$(ls -A $POSTGRES_DIR)" ]; then echo "Creating PostgreSQL database..." su - postgres -c "createdb -p ${POSTGRES_PORT} ${POSTGRES_DB}" - # Create user, set ownership, and grant privileges + # Create user, set ownership, and grant privileges, including privileges to create new databases echo "Creating PostgreSQL user..." su - postgres -c "psql -p ${POSTGRES_PORT} -d ${POSTGRES_DB}" < Date: Sun, 8 Jun 2025 16:47:00 +0200 Subject: [PATCH 0462/1435] apps: output: change body detection logic and add tests --- apps/output/tests.py | 23 +++++++++++++++++++++++ apps/output/views.py | 5 +++-- 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/apps/output/tests.py b/apps/output/tests.py index e1e857ee..f87c8340 100644 --- a/apps/output/tests.py +++ b/apps/output/tests.py @@ -14,3 +14,26 @@ class OutputM3UTest(TestCase): self.assertEqual(response.status_code, 200) content = response.content.decode() self.assertIn("#EXTM3U", content) + + def test_generate_m3u_response_post_empty_body(self): + """ + Test that a POST request with an empty body returns 200 OK. + """ + url = reverse('output:generate_m3u') + + response = self.client.post(url, data=None, content_type='application/x-www-form-urlencoded') + content = response.content.decode() + + self.assertEqual(response.status_code, 200, "POST with empty body should return 200 OK") + self.assertIn("#EXTM3U", content) + + def test_generate_m3u_response_post_with_body(self): + """ + Test that a POST request with a non-empty body returns 403 Forbidden. + """ + url = reverse('output:generate_m3u') + + response = self.client.post(url, data={'evilstring': 'muhahaha'}) + + self.assertEqual(response.status_code, 403, "POST with body should return 403 Forbidden") + self.assertIn("POST requests with body are not allowed, body is:", response.content.decode()) diff --git a/apps/output/views.py b/apps/output/views.py index 2b18d185..ff02560c 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -18,9 +18,10 @@ def generate_m3u(request, profile_name=None): The stream URL now points to the new stream_view that uses StreamProfile. Supports both GET and POST methods for compatibility with IPTVSmarters. """ - # Check if this is a POST request with data (which we don't want to allow) + # Check if this is a POST request and the body is not empty (which we don't want to allow) if request.method == "POST" and request.body: - return HttpResponseForbidden("POST requests with content are not allowed") + if request.body.decode() != '{}': + return HttpResponseForbidden("POST requests with body are not allowed, body is: {}".format(request.body.decode())) if profile_name is not None: channel_profile = ChannelProfile.objects.get(name=profile_name) From 18a6c428c1a92142317bcf367501a63b231ba650 Mon Sep 17 00:00:00 2001 From: Marlon Alkan Date: Sun, 8 Jun 2025 19:26:34 +0200 Subject: [PATCH 0463/1435] core: api_views.py: add fallback IP geo provider Fixes #127 - add ip-api.com as fallback geo provider - fix silent JSONException by parsing only if HTTP 200 OK - add logger and log error if IP geo can't be fetched --- core/api_views.py | 33 +++++++++++++++++++++++++++------ 1 file changed, 27 insertions(+), 6 deletions(-) diff --git a/core/api_views.py b/core/api_views.py index 77473b5d..9efefe12 100644 --- a/core/api_views.py +++ b/core/api_views.py @@ -1,5 +1,6 @@ # core/api_views.py +import logging from rest_framework import viewsets, status from rest_framework.response import Response from django.shortcuts import get_object_or_404 @@ -13,6 +14,8 @@ import requests import os from core.tasks import rehash_streams +logger = logging.getLogger(__name__) + class UserAgentViewSet(viewsets.ModelViewSet): """ API endpoint that allows user agents to be viewed, created, edited, or deleted. @@ -77,14 +80,32 @@ def environment(request): except Exception as e: local_ip = f"Error: {e}" - # 3) If we got a valid public_ip, fetch geo info from ipapi.co + # 3) If we got a valid public_ip, fetch geo info from ipapi.co or ip-api.com if public_ip and "Error" not in public_ip: try: - geo = requests.get(f"https://ipapi.co/{public_ip}/json/", timeout=5).json() - # ipapi returns fields like country_code, country_name, etc. - country_code = geo.get("country_code", "") # e.g. "US" - country_name = geo.get("country_name", "") # e.g. "United States" - except requests.RequestException as e: + # Attempt to get geo information from ipapi.co first + r = requests.get(f"https://ipapi.co/{public_ip}/json/", timeout=5) + + if r.status_code == requests.codes.ok: + geo = r.json() + country_code = geo.get("country_code") # e.g. "US" + country_name = geo.get("country_name") # e.g. "United States" + + else: + # If ipapi.co fails, fallback to ip-api.com + # only supports http requests for free tier + r = requests.get("http://ip-api.com/json/", timeout=5) + + if r.status_code == requests.codes.ok: + geo = r.json() + country_code = geo.get("countryCode") # e.g. "US" + country_name = geo.get("country") # e.g. "United States" + + else: + raise Exception("Geo lookup failed with both services") + + except Exception as e: + logger.error(f"Error during geo lookup: {e}") country_code = None country_name = None From 9d8e011e2c9b61f6e75a61a7c50af3d5cfecdf4e Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 9 Jun 2025 17:33:59 -0500 Subject: [PATCH 0464/1435] Store FFmpeg in redis. --- apps/proxy/ts_proxy/constants.py | 7 + apps/proxy/ts_proxy/stream_manager.py | 212 ++++++++++++++++++++------ 2 files changed, 169 insertions(+), 50 deletions(-) diff --git a/apps/proxy/ts_proxy/constants.py b/apps/proxy/ts_proxy/constants.py index 4827b24b..91357bbe 100644 --- a/apps/proxy/ts_proxy/constants.py +++ b/apps/proxy/ts_proxy/constants.py @@ -63,6 +63,13 @@ class ChannelMetadataField: STREAM_SWITCH_TIME = "stream_switch_time" STREAM_SWITCH_REASON = "stream_switch_reason" + # FFmpeg performance metrics + FFMPEG_SPEED = "ffmpeg_speed" + FFMPEG_FPS = "ffmpeg_fps" + ACTUAL_FPS = "actual_fps" + FFMPEG_BITRATE = "ffmpeg_bitrate" + FFMPEG_STATS_UPDATED = "ffmpeg_stats_updated" + # Client metadata fields CONNECTED_AT = "connected_at" LAST_ACTIVE = "last_active" diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index 973f86b9..7ba5f9f8 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -7,6 +7,7 @@ import socket import requests import subprocess import gevent # Add this import +import re # Add this import at the top from typing import Optional, List from django.shortcuts import get_object_or_404 from apps.proxy.config import TSConfig as Config @@ -376,73 +377,184 @@ class StreamManager: logger.debug(f"Started stderr reader thread for channel {self.channel_id}") def _read_stderr(self): - """Read and log ffmpeg stderr output with stats lines combined""" + """Read and log ffmpeg stderr output with real-time stats parsing""" try: buffer = b"" - in_stats_line = False + stats_buffer = b"" # Common FFmpeg stats line prefixes for detection stats_prefixes = [b"frame=", b"size=", b"time=", b"bitrate=", b"speed="] # Read in small chunks while self.transcode_process and self.transcode_process.stderr: - chunk = self.transcode_process.stderr.read(1) - if not chunk: # EOF + try: + chunk = self.transcode_process.stderr.read(256) # Smaller chunks for real-time processing + if not chunk: + break + + buffer += chunk + + # Check for stats data in the buffer (stats usually start with "frame=") + if b"frame=" in buffer: + # Split buffer at frame= to isolate potential stats + parts = buffer.split(b"frame=") + + # Process any complete lines before the stats + if parts[0]: + line_buffer = parts[0] + while b'\n' in line_buffer or b'\r' in line_buffer: + if b'\n' in line_buffer: + line, line_buffer = line_buffer.split(b'\n', 1) + else: + line, line_buffer = line_buffer.split(b'\r', 1) + + if line.strip(): + self._log_stderr_content(line.decode('utf-8', errors='ignore')) + + # Handle stats data - combine with previous stats buffer + if len(parts) > 1: + stats_buffer = b"frame=" + parts[-1] + + # Look for common stats patterns to determine if we have a complete stats line + # Stats typically contain: frame=X fps=Y q=Z size=A time=B bitrate=C speed=D + if b"speed=" in stats_buffer: + # We likely have a complete or near-complete stats line + try: + stats_text = stats_buffer.decode('utf-8', errors='ignore') + self._parse_ffmpeg_stats(stats_text) + self._log_stderr_content(stats_text) + stats_buffer = b"" # Clear stats buffer after processing + except Exception as e: + logger.debug(f"Error parsing stats: {e}") + + # Keep any remaining parts for next iteration + buffer = b"" + else: + # No stats data, process as regular lines + while b'\n' in buffer or b'\r' in buffer: + if b'\n' in buffer: + line, buffer = buffer.split(b'\n', 1) + else: + line, buffer = buffer.split(b'\r', 1) + + if line.strip(): + self._log_stderr_content(line.decode('utf-8', errors='ignore')) + + # Prevent buffer from growing too large + if len(buffer) > 4096: + # Keep only the last 1024 bytes to preserve potential incomplete data + buffer = buffer[-1024:] + + if len(stats_buffer) > 2048: # Stats lines shouldn't be this long + stats_buffer = b"" + + except Exception as e: + logger.error(f"Error reading stderr: {e}") break - buffer += chunk - - # Check if we have a complete line - if chunk == b'\n' or chunk == b'\r': - # We have a complete line - if buffer.strip(): - line = buffer.decode('utf-8', errors='replace').strip() - # Check if this is a stats line - if any(stat_prefix in line for stat_prefix in [p.decode() for p in stats_prefixes]): - self._log_stderr_content(f"FFmpeg stats: {line}") - else: - self._log_stderr_content(line) - buffer = b"" - in_stats_line = False - continue - - # Check if this is the start of a new non-stats line after we were in a stats line - if in_stats_line: - # If we see two consecutive newlines or a non-stats-related line, - # consider the stats block complete - if chunk == b'\n' or chunk == b'\r': - in_stats_line = False - if buffer.strip(): - self._log_stderr_content(buffer.decode('utf-8', errors='replace').strip()) - buffer = b"" - # Check if this is the start of a new stats line - elif any(prefix in buffer for prefix in stats_prefixes): - # We're now in a stats line - in_stats_line = True except Exception as e: # Catch any other exceptions in the thread to prevent crashes try: - logger.error(f"Error in stderr reader thread: {e}") + logger.error(f"Error in stderr reader thread for channel {self.channel_id}: {e}") except: - # Again, if logging fails, continue silently pass def _log_stderr_content(self, content): - """Helper method to log stderr content with error handling""" + """Log stderr content from FFmpeg with appropriate log levels""" try: - # Wrap the logging call in a try-except to prevent crashes due to logging errors - logger.debug(f"Transcode stderr [{self.channel_id}]: {content}") - except OSError as e: - # If logging fails, try a simplified log message - if e.errno == 105: # No buffer space available - try: - # Try a much shorter message without the error content - logger.warning(f"Logging error (buffer full) in channel {self.channel_id}") - except: - # If even that fails, we have to silently continue - pass - except Exception: - # Ignore other logging errors to prevent thread crashes - pass + content = content.strip() + if not content: + return + + # Convert to lowercase for easier matching + content_lower = content.lower() + + # Determine log level based on content + if any(keyword in content_lower for keyword in ['error', 'failed', 'cannot', 'invalid', 'corrupt']): + logger.error(f"FFmpeg stderr: {content}") + elif any(keyword in content_lower for keyword in ['warning', 'deprecated', 'ignoring']): + logger.warning(f"FFmpeg stderr: {content}") + elif content.startswith('frame=') or 'fps=' in content or 'speed=' in content: + # Stats lines - log at debug level to avoid spam + logger.debug(f"FFmpeg stats: {content}") + elif any(keyword in content_lower for keyword in ['input', 'output', 'stream', 'video', 'audio']): + # Stream info - log at info level + logger.info(f"FFmpeg info: {content}") + else: + # Everything else at debug level + logger.debug(f"FFmpeg stderr: {content}") + + except Exception as e: + logger.error(f"Error logging stderr content: {e}") + + def _parse_ffmpeg_stats(self, stats_line): + """Parse FFmpeg stats line and extract speed, fps, and bitrate""" + try: + # Example FFmpeg stats line: + # frame= 1234 fps= 30 q=28.0 size= 2048kB time=00:00:41.33 bitrate= 406.1kbits/s speed=1.02x + + # Extract speed (e.g., "speed=1.02x") + speed_match = re.search(r'speed=\s*([0-9.]+)x?', stats_line) + ffmpeg_speed = float(speed_match.group(1)) if speed_match else None + + # Extract fps (e.g., "fps= 30") + fps_match = re.search(r'fps=\s*([0-9.]+)', stats_line) + ffmpeg_fps = float(fps_match.group(1)) if fps_match else None + + # Extract bitrate (e.g., "bitrate= 406.1kbits/s") + bitrate_match = re.search(r'bitrate=\s*([0-9.]+(?:\.[0-9]+)?)\s*([kmg]?)bits/s', stats_line, re.IGNORECASE) + ffmpeg_bitrate = None + if bitrate_match: + bitrate_value = float(bitrate_match.group(1)) + unit = bitrate_match.group(2).lower() + # Convert to kbps + if unit == 'm': + bitrate_value *= 1000 + elif unit == 'g': + bitrate_value *= 1000000 + # If no unit or 'k', it's already in kbps + ffmpeg_bitrate = bitrate_value + + # Calculate actual FPS + actual_fps = None + if ffmpeg_fps is not None and ffmpeg_speed is not None and ffmpeg_speed > 0: + actual_fps = ffmpeg_fps / ffmpeg_speed + + # Store in Redis if we have valid data + if any(x is not None for x in [ffmpeg_speed, ffmpeg_fps, actual_fps, ffmpeg_bitrate]): + self._update_ffmpeg_stats_in_redis(ffmpeg_speed, ffmpeg_fps, actual_fps, ffmpeg_bitrate) + + logger.debug(f"FFmpeg stats - Speed: {ffmpeg_speed}x, FFmpeg FPS: {ffmpeg_fps}, " + f"Actual FPS: {actual_fps:.1f if actual_fps else 'N/A'}, " + f"Bitrate: {ffmpeg_bitrate:.1f if ffmpeg_bitrate else 'N/A'} kbps") + + except Exception as e: + logger.debug(f"Error parsing FFmpeg stats: {e}") + + def _update_ffmpeg_stats_in_redis(self, speed, fps, actual_fps, bitrate): + """Update FFmpeg performance stats in Redis metadata""" + try: + if hasattr(self.buffer, 'redis_client') and self.buffer.redis_client: + metadata_key = RedisKeys.channel_metadata(self.channel_id) + update_data = { + ChannelMetadataField.FFMPEG_STATS_UPDATED: str(time.time()) + } + + if speed is not None: + update_data[ChannelMetadataField.FFMPEG_SPEED] = str(round(speed, 3)) + + if fps is not None: + update_data[ChannelMetadataField.FFMPEG_FPS] = str(round(fps, 1)) + + if actual_fps is not None: + update_data[ChannelMetadataField.ACTUAL_FPS] = str(round(actual_fps, 1)) + + if bitrate is not None: + update_data[ChannelMetadataField.FFMPEG_BITRATE] = str(round(bitrate, 1)) + + self.buffer.redis_client.hset(metadata_key, mapping=update_data) + + except Exception as e: + logger.error(f"Error updating FFmpeg stats in Redis: {e}") def _establish_http_connection(self): """Establish a direct HTTP connection to the stream""" From 7d0c32ef3fd0ab0e8417a8a9f80491f737059651 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 9 Jun 2025 17:45:00 -0500 Subject: [PATCH 0465/1435] Break line breaking for stats. --- apps/proxy/ts_proxy/stream_manager.py | 109 +++++++++++++++----------- 1 file changed, 62 insertions(+), 47 deletions(-) diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index 7ba5f9f8..87eb2251 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -380,9 +380,7 @@ class StreamManager: """Read and log ffmpeg stderr output with real-time stats parsing""" try: buffer = b"" - stats_buffer = b"" - # Common FFmpeg stats line prefixes for detection - stats_prefixes = [b"frame=", b"size=", b"time=", b"bitrate=", b"speed="] + last_stats_line = b"" # Read in small chunks while self.transcode_process and self.transcode_process.stderr: @@ -393,59 +391,76 @@ class StreamManager: buffer += chunk - # Check for stats data in the buffer (stats usually start with "frame=") - if b"frame=" in buffer: - # Split buffer at frame= to isolate potential stats - parts = buffer.split(b"frame=") + # Look for stats updates (overwrite previous stats with \r) + if b'\r' in buffer and b"frame=" in buffer: + # Split on \r to handle overwriting stats + parts = buffer.split(b'\r') - # Process any complete lines before the stats - if parts[0]: - line_buffer = parts[0] - while b'\n' in line_buffer or b'\r' in line_buffer: - if b'\n' in line_buffer: - line, line_buffer = line_buffer.split(b'\n', 1) + # Process all parts except the last (which might be incomplete) + for i, part in enumerate(parts[:-1]): + if part.strip(): + if part.startswith(b"frame=") or b"frame=" in part: + # This is a stats line - keep it intact + try: + stats_text = part.decode('utf-8', errors='ignore').strip() + if stats_text and "frame=" in stats_text: + # Extract just the stats portion if there's other content + if "frame=" in stats_text: + frame_start = stats_text.find("frame=") + stats_text = stats_text[frame_start:] + + self._parse_ffmpeg_stats(stats_text) + self._log_stderr_content(stats_text) + last_stats_line = part + except Exception as e: + logger.debug(f"Error parsing stats line: {e}") else: - line, line_buffer = line_buffer.split(b'\r', 1) + # Regular content - process line by line + line_content = part + while b'\n' in line_content: + line, line_content = line_content.split(b'\n', 1) + if line.strip(): + self._log_stderr_content(line.decode('utf-8', errors='ignore')) - if line.strip(): - self._log_stderr_content(line.decode('utf-8', errors='ignore')) + # Handle remaining content without newline + if line_content.strip(): + self._log_stderr_content(line_content.decode('utf-8', errors='ignore')) - # Handle stats data - combine with previous stats buffer - if len(parts) > 1: - stats_buffer = b"frame=" + parts[-1] - - # Look for common stats patterns to determine if we have a complete stats line - # Stats typically contain: frame=X fps=Y q=Z size=A time=B bitrate=C speed=D - if b"speed=" in stats_buffer: - # We likely have a complete or near-complete stats line - try: - stats_text = stats_buffer.decode('utf-8', errors='ignore') - self._parse_ffmpeg_stats(stats_text) - self._log_stderr_content(stats_text) - stats_buffer = b"" # Clear stats buffer after processing - except Exception as e: - logger.debug(f"Error parsing stats: {e}") - - # Keep any remaining parts for next iteration - buffer = b"" - else: - # No stats data, process as regular lines - while b'\n' in buffer or b'\r' in buffer: - if b'\n' in buffer: - line, buffer = buffer.split(b'\n', 1) - else: - line, buffer = buffer.split(b'\r', 1) + # Keep the last part as it might be incomplete + buffer = parts[-1] + # Handle regular line breaks for non-stats content + elif b'\n' in buffer: + while b'\n' in buffer: + line, buffer = buffer.split(b'\n', 1) if line.strip(): - self._log_stderr_content(line.decode('utf-8', errors='ignore')) + line_text = line.decode('utf-8', errors='ignore').strip() + if line_text and not line_text.startswith("frame="): + self._log_stderr_content(line_text) + + # If we have a potential stats line in buffer without line breaks + elif b"frame=" in buffer and (b"speed=" in buffer or len(buffer) > 200): + # We likely have a complete or substantial stats line + try: + stats_text = buffer.decode('utf-8', errors='ignore').strip() + if "frame=" in stats_text: + # Extract just the stats portion + frame_start = stats_text.find("frame=") + stats_text = stats_text[frame_start:] + + self._parse_ffmpeg_stats(stats_text) + self._log_stderr_content(stats_text) + buffer = b"" # Clear buffer after processing + except Exception as e: + logger.debug(f"Error parsing buffered stats: {e}") # Prevent buffer from growing too large if len(buffer) > 4096: - # Keep only the last 1024 bytes to preserve potential incomplete data - buffer = buffer[-1024:] - - if len(stats_buffer) > 2048: # Stats lines shouldn't be this long - stats_buffer = b"" + # Try to preserve any potential stats line at the end + if b"frame=" in buffer[-1024:]: + buffer = buffer[-1024:] + else: + buffer = buffer[-512:] except Exception as e: logger.error(f"Error reading stderr: {e}") From 7e25be0717fae7459238744dd2957e79f2357dc1 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 9 Jun 2025 18:57:36 -0500 Subject: [PATCH 0466/1435] Store video and audio information in redis. --- apps/proxy/ts_proxy/constants.py | 18 ++++ apps/proxy/ts_proxy/stream_manager.py | 149 +++++++++++++++++++++++++- 2 files changed, 163 insertions(+), 4 deletions(-) diff --git a/apps/proxy/ts_proxy/constants.py b/apps/proxy/ts_proxy/constants.py index 91357bbe..daaf7bb3 100644 --- a/apps/proxy/ts_proxy/constants.py +++ b/apps/proxy/ts_proxy/constants.py @@ -70,6 +70,24 @@ class ChannelMetadataField: FFMPEG_BITRATE = "ffmpeg_bitrate" FFMPEG_STATS_UPDATED = "ffmpeg_stats_updated" + # Video stream info + VIDEO_CODEC = "video_codec" + RESOLUTION = "resolution" + WIDTH = "width" + HEIGHT = "height" + SOURCE_FPS = "source_fps" + PIXEL_FORMAT = "pixel_format" + VIDEO_BITRATE = "video_bitrate" + + # Audio stream info + AUDIO_CODEC = "audio_codec" + SAMPLE_RATE = "sample_rate" + AUDIO_CHANNELS = "audio_channels" + AUDIO_BITRATE = "audio_bitrate" + + # Stream info timestamp + STREAM_INFO_UPDATED = "stream_info_updated" + # Client metadata fields CONNECTED_AT = "connected_at" LAST_ACTIVE = "last_active" diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index 87eb2251..883d312e 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -483,14 +483,21 @@ class StreamManager: # Convert to lowercase for easier matching content_lower = content.lower() + # Check for stream info lines first + if "stream #" in content_lower and ("video:" in content_lower or "audio:" in content_lower): + if "video:" in content_lower: + self._parse_ffmpeg_stream_info(content, stream_type="video") + elif "audio:" in content_lower: + self._parse_ffmpeg_stream_info(content, stream_type="audio") + # Determine log level based on content if any(keyword in content_lower for keyword in ['error', 'failed', 'cannot', 'invalid', 'corrupt']): logger.error(f"FFmpeg stderr: {content}") elif any(keyword in content_lower for keyword in ['warning', 'deprecated', 'ignoring']): logger.warning(f"FFmpeg stderr: {content}") elif content.startswith('frame=') or 'fps=' in content or 'speed=' in content: - # Stats lines - log at debug level to avoid spam - logger.debug(f"FFmpeg stats: {content}") + # Stats lines - log at trace level to avoid spam + logger.trace(f"FFmpeg stats: {content}") elif any(keyword in content_lower for keyword in ['input', 'output', 'stream', 'video', 'audio']): # Stream info - log at info level logger.info(f"FFmpeg info: {content}") @@ -501,6 +508,136 @@ class StreamManager: except Exception as e: logger.error(f"Error logging stderr content: {e}") + def _parse_ffmpeg_stream_info(self, stream_info_line, stream_type="video"): + """Parse FFmpeg stream info line to extract video/audio codec, resolution, and FPS""" + try: + if stream_type == "video": + # Example line: + # Stream #0:0: Video: h264 (Main), yuv420p(tv, progressive), 1280x720 [SAR 1:1 DAR 16:9], q=2-31, 2000 kb/s, 29.97 fps, 90k tbn + + # Extract video codec (e.g., "h264", "mpeg2video", etc.) + codec_match = re.search(r'Video:\s*([a-zA-Z0-9_]+)', stream_info_line) + video_codec = codec_match.group(1) if codec_match else None + + # Extract resolution (e.g., "1280x720") + resolution_match = re.search(r'(\d+)x(\d+)', stream_info_line) + if resolution_match: + width = int(resolution_match.group(1)) + height = int(resolution_match.group(2)) + resolution = f"{width}x{height}" + else: + width = height = resolution = None + + # Extract source FPS (e.g., "29.97 fps") + fps_match = re.search(r'(\d+(?:\.\d+)?)\s*fps', stream_info_line) + source_fps = float(fps_match.group(1)) if fps_match else None + + # Extract pixel format (e.g., "yuv420p") + pixel_format_match = re.search(r'Video:\s*[^,]+,\s*([^,(]+)', stream_info_line) + pixel_format = None + if pixel_format_match: + pf = pixel_format_match.group(1).strip() + # Clean up pixel format (remove extra info in parentheses) + if '(' in pf: + pf = pf.split('(')[0].strip() + pixel_format = pf + + # Extract bitrate if present (e.g., "2000 kb/s") + video_bitrate = None + bitrate_match = re.search(r'(\d+(?:\.\d+)?)\s*kb/s', stream_info_line) + if bitrate_match: + video_bitrate = float(bitrate_match.group(1)) + + # Store in Redis if we have valid data + if any(x is not None for x in [video_codec, resolution, source_fps, pixel_format, video_bitrate]): + self._update_stream_info_in_redis(video_codec, resolution, width, height, source_fps, pixel_format, video_bitrate, None, None, None) + + logger.info(f"Video stream info - Codec: {video_codec}, Resolution: {resolution}, " + f"Source FPS: {source_fps}, Pixel Format: {pixel_format}, " + f"Video Bitrate: {video_bitrate} kb/s") + + elif stream_type == "audio": + # Example line: + # Stream #0:1[0x101]: Audio: aac (LC) ([15][0][0][0] / 0x000F), 48000 Hz, stereo, fltp, 64 kb/s + + # Extract audio codec (e.g., "aac", "mp3", etc.) + codec_match = re.search(r'Audio:\s*([a-zA-Z0-9_]+)', stream_info_line) + audio_codec = codec_match.group(1) if codec_match else None + + # Extract sample rate (e.g., "48000 Hz") + sample_rate_match = re.search(r'(\d+)\s*Hz', stream_info_line) + sample_rate = int(sample_rate_match.group(1)) if sample_rate_match else None + + # Extract channel layout (e.g., "stereo", "5.1", "mono") + # Look for common channel layouts + channel_match = re.search(r'\b(mono|stereo|5\.1|7\.1|quad|2\.1)\b', stream_info_line, re.IGNORECASE) + channels = channel_match.group(1) if channel_match else None + + # Extract audio bitrate if present (e.g., "64 kb/s") + audio_bitrate = None + bitrate_match = re.search(r'(\d+(?:\.\d+)?)\s*kb/s', stream_info_line) + if bitrate_match: + audio_bitrate = float(bitrate_match.group(1)) + + # Store in Redis if we have valid data + if any(x is not None for x in [audio_codec, sample_rate, channels, audio_bitrate]): + self._update_stream_info_in_redis(None, None, None, None, None, None, None, audio_codec, sample_rate, channels, audio_bitrate) + + logger.info(f"Audio stream info - Codec: {audio_codec}, Sample Rate: {sample_rate} Hz, " + f"Channels: {channels}, Audio Bitrate: {audio_bitrate} kb/s") + + except Exception as e: + logger.debug(f"Error parsing FFmpeg {stream_type} stream info: {e}") + + def _update_stream_info_in_redis(self, codec, resolution, width, height, fps, pixel_format, video_bitrate, audio_codec=None, sample_rate=None, channels=None, audio_bitrate=None): + """Update stream info in Redis metadata""" + try: + if hasattr(self.buffer, 'redis_client') and self.buffer.redis_client: + metadata_key = RedisKeys.channel_metadata(self.channel_id) + update_data = { + ChannelMetadataField.STREAM_INFO_UPDATED: str(time.time()) + } + + # Video info + if codec is not None: + update_data[ChannelMetadataField.VIDEO_CODEC] = str(codec) + + if resolution is not None: + update_data[ChannelMetadataField.RESOLUTION] = str(resolution) + + if width is not None: + update_data[ChannelMetadataField.WIDTH] = str(width) + + if height is not None: + update_data[ChannelMetadataField.HEIGHT] = str(height) + + if fps is not None: + update_data[ChannelMetadataField.SOURCE_FPS] = str(round(fps, 2)) + + if pixel_format is not None: + update_data[ChannelMetadataField.PIXEL_FORMAT] = str(pixel_format) + + if video_bitrate is not None: + update_data[ChannelMetadataField.VIDEO_BITRATE] = str(round(video_bitrate, 1)) + + # Audio info + if audio_codec is not None: + update_data[ChannelMetadataField.AUDIO_CODEC] = str(audio_codec) + + if sample_rate is not None: + update_data[ChannelMetadataField.SAMPLE_RATE] = str(sample_rate) + + if channels is not None: + update_data[ChannelMetadataField.AUDIO_CHANNELS] = str(channels) + + if audio_bitrate is not None: + update_data[ChannelMetadataField.AUDIO_BITRATE] = str(round(audio_bitrate, 1)) + + self.buffer.redis_client.hset(metadata_key, mapping=update_data) + + except Exception as e: + logger.error(f"Error updating stream info in Redis: {e}") + def _parse_ffmpeg_stats(self, stats_line): """Parse FFmpeg stats line and extract speed, fps, and bitrate""" try: @@ -538,9 +675,13 @@ class StreamManager: if any(x is not None for x in [ffmpeg_speed, ffmpeg_fps, actual_fps, ffmpeg_bitrate]): self._update_ffmpeg_stats_in_redis(ffmpeg_speed, ffmpeg_fps, actual_fps, ffmpeg_bitrate) + # Fix the f-string formatting + actual_fps_str = f"{actual_fps:.1f}" if actual_fps is not None else "N/A" + ffmpeg_bitrate_str = f"{ffmpeg_bitrate:.1f}" if ffmpeg_bitrate is not None else "N/A" + logger.debug(f"FFmpeg stats - Speed: {ffmpeg_speed}x, FFmpeg FPS: {ffmpeg_fps}, " - f"Actual FPS: {actual_fps:.1f if actual_fps else 'N/A'}, " - f"Bitrate: {ffmpeg_bitrate:.1f if ffmpeg_bitrate else 'N/A'} kbps") + f"Actual FPS: {actual_fps_str}, " + f"Bitrate: {ffmpeg_bitrate_str} kbps") except Exception as e: logger.debug(f"Error parsing FFmpeg stats: {e}") From 47500daafaf4e1d90a3afd32b5e6e1dd1eae421d Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 9 Jun 2025 19:10:52 -0500 Subject: [PATCH 0467/1435] Moved some functions to channel_service --- .../ts_proxy/services/channel_service.py | 138 ++++++++++++++++++ apps/proxy/ts_proxy/stream_manager.py | 138 +----------------- 2 files changed, 143 insertions(+), 133 deletions(-) diff --git a/apps/proxy/ts_proxy/services/channel_service.py b/apps/proxy/ts_proxy/services/channel_service.py index bd1f2f81..761d56ac 100644 --- a/apps/proxy/ts_proxy/services/channel_service.py +++ b/apps/proxy/ts_proxy/services/channel_service.py @@ -6,6 +6,7 @@ This separates business logic from HTTP handling in views. import logging import time import json +import re from django.shortcuts import get_object_or_404 from apps.channels.models import Channel, Stream from apps.proxy.config import TSConfig as Config @@ -415,6 +416,143 @@ class ChannelService: logger.error(f"Error validating channel state: {e}", exc_info=True) return False, None, None, {"error": f"Exception: {str(e)}"} + @staticmethod + def parse_and_store_stream_info(channel_id, stream_info_line, stream_type="video"): + """Parse FFmpeg stream info line and store in Redis metadata""" + try: + if stream_type == "video": + # Example line: + # Stream #0:0: Video: h264 (Main), yuv420p(tv, progressive), 1280x720 [SAR 1:1 DAR 16:9], q=2-31, 2000 kb/s, 29.97 fps, 90k tbn + + # Extract video codec (e.g., "h264", "mpeg2video", etc.) + codec_match = re.search(r'Video:\s*([a-zA-Z0-9_]+)', stream_info_line) + video_codec = codec_match.group(1) if codec_match else None + + # Extract resolution (e.g., "1280x720") + resolution_match = re.search(r'(\d+)x(\d+)', stream_info_line) + if resolution_match: + width = int(resolution_match.group(1)) + height = int(resolution_match.group(2)) + resolution = f"{width}x{height}" + else: + width = height = resolution = None + + # Extract source FPS (e.g., "29.97 fps") + fps_match = re.search(r'(\d+(?:\.\d+)?)\s*fps', stream_info_line) + source_fps = float(fps_match.group(1)) if fps_match else None + + # Extract pixel format (e.g., "yuv420p") + pixel_format_match = re.search(r'Video:\s*[^,]+,\s*([^,(]+)', stream_info_line) + pixel_format = None + if pixel_format_match: + pf = pixel_format_match.group(1).strip() + # Clean up pixel format (remove extra info in parentheses) + if '(' in pf: + pf = pf.split('(')[0].strip() + pixel_format = pf + + # Extract bitrate if present (e.g., "2000 kb/s") + video_bitrate = None + bitrate_match = re.search(r'(\d+(?:\.\d+)?)\s*kb/s', stream_info_line) + if bitrate_match: + video_bitrate = float(bitrate_match.group(1)) + + # Store in Redis if we have valid data + if any(x is not None for x in [video_codec, resolution, source_fps, pixel_format, video_bitrate]): + ChannelService._update_stream_info_in_redis(channel_id, video_codec, resolution, width, height, source_fps, pixel_format, video_bitrate, None, None, None, None) + + logger.info(f"Video stream info - Codec: {video_codec}, Resolution: {resolution}, " + f"Source FPS: {source_fps}, Pixel Format: {pixel_format}, " + f"Video Bitrate: {video_bitrate} kb/s") + + elif stream_type == "audio": + # Example line: + # Stream #0:1[0x101]: Audio: aac (LC) ([15][0][0][0] / 0x000F), 48000 Hz, stereo, fltp, 64 kb/s + + # Extract audio codec (e.g., "aac", "mp3", etc.) + codec_match = re.search(r'Audio:\s*([a-zA-Z0-9_]+)', stream_info_line) + audio_codec = codec_match.group(1) if codec_match else None + + # Extract sample rate (e.g., "48000 Hz") + sample_rate_match = re.search(r'(\d+)\s*Hz', stream_info_line) + sample_rate = int(sample_rate_match.group(1)) if sample_rate_match else None + + # Extract channel layout (e.g., "stereo", "5.1", "mono") + # Look for common channel layouts + channel_match = re.search(r'\b(mono|stereo|5\.1|7\.1|quad|2\.1)\b', stream_info_line, re.IGNORECASE) + channels = channel_match.group(1) if channel_match else None + + # Extract audio bitrate if present (e.g., "64 kb/s") + audio_bitrate = None + bitrate_match = re.search(r'(\d+(?:\.\d+)?)\s*kb/s', stream_info_line) + if bitrate_match: + audio_bitrate = float(bitrate_match.group(1)) + + # Store in Redis if we have valid data + if any(x is not None for x in [audio_codec, sample_rate, channels, audio_bitrate]): + ChannelService._update_stream_info_in_redis(channel_id, None, None, None, None, None, None, None, audio_codec, sample_rate, channels, audio_bitrate) + + logger.info(f"Audio stream info - Codec: {audio_codec}, Sample Rate: {sample_rate} Hz, " + f"Channels: {channels}, Audio Bitrate: {audio_bitrate} kb/s") + + except Exception as e: + logger.debug(f"Error parsing FFmpeg {stream_type} stream info: {e}") + + @staticmethod + def _update_stream_info_in_redis(channel_id, codec, resolution, width, height, fps, pixel_format, video_bitrate, audio_codec=None, sample_rate=None, channels=None, audio_bitrate=None): + """Update stream info in Redis metadata""" + try: + proxy_server = ProxyServer.get_instance() + if not proxy_server.redis_client: + return False + + metadata_key = RedisKeys.channel_metadata(channel_id) + update_data = { + ChannelMetadataField.STREAM_INFO_UPDATED: str(time.time()) + } + + # Video info + if codec is not None: + update_data[ChannelMetadataField.VIDEO_CODEC] = str(codec) + + if resolution is not None: + update_data[ChannelMetadataField.RESOLUTION] = str(resolution) + + if width is not None: + update_data[ChannelMetadataField.WIDTH] = str(width) + + if height is not None: + update_data[ChannelMetadataField.HEIGHT] = str(height) + + if fps is not None: + update_data[ChannelMetadataField.SOURCE_FPS] = str(round(fps, 2)) + + if pixel_format is not None: + update_data[ChannelMetadataField.PIXEL_FORMAT] = str(pixel_format) + + if video_bitrate is not None: + update_data[ChannelMetadataField.VIDEO_BITRATE] = str(round(video_bitrate, 1)) + + # Audio info + if audio_codec is not None: + update_data[ChannelMetadataField.AUDIO_CODEC] = str(audio_codec) + + if sample_rate is not None: + update_data[ChannelMetadataField.SAMPLE_RATE] = str(sample_rate) + + if channels is not None: + update_data[ChannelMetadataField.AUDIO_CHANNELS] = str(channels) + + if audio_bitrate is not None: + update_data[ChannelMetadataField.AUDIO_BITRATE] = str(round(audio_bitrate, 1)) + + proxy_server.redis_client.hset(metadata_key, mapping=update_data) + return True + + except Exception as e: + logger.error(f"Error updating stream info in Redis: {e}") + return False + # Helper methods for Redis operations @staticmethod diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index 883d312e..7f81e29e 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -483,12 +483,13 @@ class StreamManager: # Convert to lowercase for easier matching content_lower = content.lower() - # Check for stream info lines first + # Check for stream info lines first and delegate to ChannelService if "stream #" in content_lower and ("video:" in content_lower or "audio:" in content_lower): + from .services.channel_service import ChannelService if "video:" in content_lower: - self._parse_ffmpeg_stream_info(content, stream_type="video") + ChannelService.parse_and_store_stream_info(self.channel_id, content, "video") elif "audio:" in content_lower: - self._parse_ffmpeg_stream_info(content, stream_type="audio") + ChannelService.parse_and_store_stream_info(self.channel_id, content, "audio") # Determine log level based on content if any(keyword in content_lower for keyword in ['error', 'failed', 'cannot', 'invalid', 'corrupt']): @@ -508,136 +509,6 @@ class StreamManager: except Exception as e: logger.error(f"Error logging stderr content: {e}") - def _parse_ffmpeg_stream_info(self, stream_info_line, stream_type="video"): - """Parse FFmpeg stream info line to extract video/audio codec, resolution, and FPS""" - try: - if stream_type == "video": - # Example line: - # Stream #0:0: Video: h264 (Main), yuv420p(tv, progressive), 1280x720 [SAR 1:1 DAR 16:9], q=2-31, 2000 kb/s, 29.97 fps, 90k tbn - - # Extract video codec (e.g., "h264", "mpeg2video", etc.) - codec_match = re.search(r'Video:\s*([a-zA-Z0-9_]+)', stream_info_line) - video_codec = codec_match.group(1) if codec_match else None - - # Extract resolution (e.g., "1280x720") - resolution_match = re.search(r'(\d+)x(\d+)', stream_info_line) - if resolution_match: - width = int(resolution_match.group(1)) - height = int(resolution_match.group(2)) - resolution = f"{width}x{height}" - else: - width = height = resolution = None - - # Extract source FPS (e.g., "29.97 fps") - fps_match = re.search(r'(\d+(?:\.\d+)?)\s*fps', stream_info_line) - source_fps = float(fps_match.group(1)) if fps_match else None - - # Extract pixel format (e.g., "yuv420p") - pixel_format_match = re.search(r'Video:\s*[^,]+,\s*([^,(]+)', stream_info_line) - pixel_format = None - if pixel_format_match: - pf = pixel_format_match.group(1).strip() - # Clean up pixel format (remove extra info in parentheses) - if '(' in pf: - pf = pf.split('(')[0].strip() - pixel_format = pf - - # Extract bitrate if present (e.g., "2000 kb/s") - video_bitrate = None - bitrate_match = re.search(r'(\d+(?:\.\d+)?)\s*kb/s', stream_info_line) - if bitrate_match: - video_bitrate = float(bitrate_match.group(1)) - - # Store in Redis if we have valid data - if any(x is not None for x in [video_codec, resolution, source_fps, pixel_format, video_bitrate]): - self._update_stream_info_in_redis(video_codec, resolution, width, height, source_fps, pixel_format, video_bitrate, None, None, None) - - logger.info(f"Video stream info - Codec: {video_codec}, Resolution: {resolution}, " - f"Source FPS: {source_fps}, Pixel Format: {pixel_format}, " - f"Video Bitrate: {video_bitrate} kb/s") - - elif stream_type == "audio": - # Example line: - # Stream #0:1[0x101]: Audio: aac (LC) ([15][0][0][0] / 0x000F), 48000 Hz, stereo, fltp, 64 kb/s - - # Extract audio codec (e.g., "aac", "mp3", etc.) - codec_match = re.search(r'Audio:\s*([a-zA-Z0-9_]+)', stream_info_line) - audio_codec = codec_match.group(1) if codec_match else None - - # Extract sample rate (e.g., "48000 Hz") - sample_rate_match = re.search(r'(\d+)\s*Hz', stream_info_line) - sample_rate = int(sample_rate_match.group(1)) if sample_rate_match else None - - # Extract channel layout (e.g., "stereo", "5.1", "mono") - # Look for common channel layouts - channel_match = re.search(r'\b(mono|stereo|5\.1|7\.1|quad|2\.1)\b', stream_info_line, re.IGNORECASE) - channels = channel_match.group(1) if channel_match else None - - # Extract audio bitrate if present (e.g., "64 kb/s") - audio_bitrate = None - bitrate_match = re.search(r'(\d+(?:\.\d+)?)\s*kb/s', stream_info_line) - if bitrate_match: - audio_bitrate = float(bitrate_match.group(1)) - - # Store in Redis if we have valid data - if any(x is not None for x in [audio_codec, sample_rate, channels, audio_bitrate]): - self._update_stream_info_in_redis(None, None, None, None, None, None, None, audio_codec, sample_rate, channels, audio_bitrate) - - logger.info(f"Audio stream info - Codec: {audio_codec}, Sample Rate: {sample_rate} Hz, " - f"Channels: {channels}, Audio Bitrate: {audio_bitrate} kb/s") - - except Exception as e: - logger.debug(f"Error parsing FFmpeg {stream_type} stream info: {e}") - - def _update_stream_info_in_redis(self, codec, resolution, width, height, fps, pixel_format, video_bitrate, audio_codec=None, sample_rate=None, channels=None, audio_bitrate=None): - """Update stream info in Redis metadata""" - try: - if hasattr(self.buffer, 'redis_client') and self.buffer.redis_client: - metadata_key = RedisKeys.channel_metadata(self.channel_id) - update_data = { - ChannelMetadataField.STREAM_INFO_UPDATED: str(time.time()) - } - - # Video info - if codec is not None: - update_data[ChannelMetadataField.VIDEO_CODEC] = str(codec) - - if resolution is not None: - update_data[ChannelMetadataField.RESOLUTION] = str(resolution) - - if width is not None: - update_data[ChannelMetadataField.WIDTH] = str(width) - - if height is not None: - update_data[ChannelMetadataField.HEIGHT] = str(height) - - if fps is not None: - update_data[ChannelMetadataField.SOURCE_FPS] = str(round(fps, 2)) - - if pixel_format is not None: - update_data[ChannelMetadataField.PIXEL_FORMAT] = str(pixel_format) - - if video_bitrate is not None: - update_data[ChannelMetadataField.VIDEO_BITRATE] = str(round(video_bitrate, 1)) - - # Audio info - if audio_codec is not None: - update_data[ChannelMetadataField.AUDIO_CODEC] = str(audio_codec) - - if sample_rate is not None: - update_data[ChannelMetadataField.SAMPLE_RATE] = str(sample_rate) - - if channels is not None: - update_data[ChannelMetadataField.AUDIO_CHANNELS] = str(channels) - - if audio_bitrate is not None: - update_data[ChannelMetadataField.AUDIO_BITRATE] = str(round(audio_bitrate, 1)) - - self.buffer.redis_client.hset(metadata_key, mapping=update_data) - - except Exception as e: - logger.error(f"Error updating stream info in Redis: {e}") - def _parse_ffmpeg_stats(self, stats_line): """Parse FFmpeg stats line and extract speed, fps, and bitrate""" try: @@ -712,6 +583,7 @@ class StreamManager: except Exception as e: logger.error(f"Error updating FFmpeg stats in Redis: {e}") + def _establish_http_connection(self): """Establish a direct HTTP connection to the stream""" try: From 71079aead3f37a2a4e17379d3c93125576d97086 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 9 Jun 2025 19:36:27 -0500 Subject: [PATCH 0468/1435] Add ffmpeg stats to channel status api. --- apps/proxy/ts_proxy/channel_status.py | 67 +++++++++++++++++++++++++++ 1 file changed, 67 insertions(+) diff --git a/apps/proxy/ts_proxy/channel_status.py b/apps/proxy/ts_proxy/channel_status.py index 50e84eec..edb1bf5a 100644 --- a/apps/proxy/ts_proxy/channel_status.py +++ b/apps/proxy/ts_proxy/channel_status.py @@ -264,6 +264,60 @@ class ChannelStatus: 'last_data_age': time.time() - manager.last_data_time } + # Add FFmpeg stream information + video_codec = metadata.get(ChannelMetadataField.VIDEO_CODEC.encode('utf-8')) + if video_codec: + info['video_codec'] = video_codec.decode('utf-8') + + resolution = metadata.get(ChannelMetadataField.RESOLUTION.encode('utf-8')) + if resolution: + info['resolution'] = resolution.decode('utf-8') + + source_fps = metadata.get(ChannelMetadataField.SOURCE_FPS.encode('utf-8')) + if source_fps: + info['source_fps'] = float(source_fps.decode('utf-8')) + + pixel_format = metadata.get(ChannelMetadataField.PIXEL_FORMAT.encode('utf-8')) + if pixel_format: + info['pixel_format'] = pixel_format.decode('utf-8') + + source_bitrate = metadata.get(ChannelMetadataField.SOURCE_BITRATE.encode('utf-8')) + if source_bitrate: + info['source_bitrate'] = float(source_bitrate.decode('utf-8')) + + audio_codec = metadata.get(ChannelMetadataField.AUDIO_CODEC.encode('utf-8')) + if audio_codec: + info['audio_codec'] = audio_codec.decode('utf-8') + + sample_rate = metadata.get(ChannelMetadataField.SAMPLE_RATE.encode('utf-8')) + if sample_rate: + info['sample_rate'] = int(sample_rate.decode('utf-8')) + + audio_channels = metadata.get(ChannelMetadataField.AUDIO_CHANNELS.encode('utf-8')) + if audio_channels: + info['audio_channels'] = audio_channels.decode('utf-8') + + audio_bitrate = metadata.get(ChannelMetadataField.AUDIO_BITRATE.encode('utf-8')) + if audio_bitrate: + info['audio_bitrate'] = float(audio_bitrate.decode('utf-8')) + + # Add FFmpeg performance stats + ffmpeg_speed = metadata.get(ChannelMetadataField.FFMPEG_SPEED.encode('utf-8')) + if ffmpeg_speed: + info['ffmpeg_speed'] = float(ffmpeg_speed.decode('utf-8')) + + ffmpeg_fps = metadata.get(ChannelMetadataField.FFMPEG_FPS.encode('utf-8')) + if ffmpeg_fps: + info['ffmpeg_fps'] = float(ffmpeg_fps.decode('utf-8')) + + actual_fps = metadata.get(ChannelMetadataField.ACTUAL_FPS.encode('utf-8')) + if actual_fps: + info['actual_fps'] = float(actual_fps.decode('utf-8')) + + ffmpeg_bitrate = metadata.get(ChannelMetadataField.FFMPEG_BITRATE.encode('utf-8')) + if ffmpeg_bitrate: + info['ffmpeg_bitrate'] = float(ffmpeg_bitrate.decode('utf-8')) + return info @staticmethod @@ -422,6 +476,19 @@ class ChannelStatus: except ValueError: logger.warning(f"Invalid m3u_profile_id format in Redis: {m3u_profile_id_bytes}") + # Add stream info to basic info as well + video_codec = metadata.get(ChannelMetadataField.VIDEO_CODEC.encode('utf-8')) + if video_codec: + info['video_codec'] = video_codec.decode('utf-8') + + resolution = metadata.get(ChannelMetadataField.RESOLUTION.encode('utf-8')) + if resolution: + info['resolution'] = resolution.decode('utf-8') + + source_fps = metadata.get(ChannelMetadataField.SOURCE_FPS.encode('utf-8')) + if source_fps: + info['source_fps'] = float(source_fps.decode('utf-8')) + return info except Exception as e: logger.error(f"Error getting channel info: {e}", exc_info=True) # Added exc_info for better debugging From 677fbba1ac9dcef940c709f9275ff9f8125705d3 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 9 Jun 2025 19:42:58 -0500 Subject: [PATCH 0469/1435] FFmpeg stats added to channel card. --- frontend/src/pages/Stats.jsx | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index fa3250ef..09163a5c 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -14,6 +14,7 @@ import { Tooltip, useMantineTheme, Select, + Badge, } from '@mantine/core'; import { MantineReactTable, useMantineReactTable } from 'mantine-react-table'; import { TableHelper } from '../helpers'; @@ -474,6 +475,30 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel )} + {/* Add stream information badges */} + + {channel.video_codec && ( + + {channel.video_codec.toUpperCase()} + + )} + {channel.resolution && ( + + {channel.resolution} + + )} + {channel.source_fps && ( + + {channel.source_fps} FPS + + )} + {channel.audio_codec && ( + + {channel.audio_codec.toUpperCase()} + + )} + + From 0fed65a4787b4ddf403d4ecf16d6eb0f577fb162 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 9 Jun 2025 19:55:10 -0500 Subject: [PATCH 0470/1435] Add FFmpeg speed and audio codec information to channel details --- apps/proxy/ts_proxy/channel_status.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/apps/proxy/ts_proxy/channel_status.py b/apps/proxy/ts_proxy/channel_status.py index edb1bf5a..864ddac8 100644 --- a/apps/proxy/ts_proxy/channel_status.py +++ b/apps/proxy/ts_proxy/channel_status.py @@ -488,6 +488,12 @@ class ChannelStatus: source_fps = metadata.get(ChannelMetadataField.SOURCE_FPS.encode('utf-8')) if source_fps: info['source_fps'] = float(source_fps.decode('utf-8')) + ffmpeg_speed = metadata.get(ChannelMetadataField.FFMPEG_SPEED.encode('utf-8')) + if ffmpeg_speed: + info['ffmpeg_speed'] = float(ffmpeg_speed.decode('utf-8')) + audio_codec = metadata.get(ChannelMetadataField.AUDIO_CODEC.encode('utf-8')) + if audio_codec: + info['audio_codec'] = audio_codec.decode('utf-8') return info except Exception as e: From cd47e762451a41d9605d1293802499464bbdd79c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 9 Jun 2025 19:56:37 -0500 Subject: [PATCH 0471/1435] Add FFmpeg speed display and audio codec to channel card --- frontend/src/pages/Stats.jsx | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index 09163a5c..ab13c1e7 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -497,6 +497,17 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel {channel.audio_codec.toUpperCase()} )} + {channel.ffmpeg_speed && ( + + = 1.0 ? "green" : "red"} + > + {channel.ffmpeg_speed}x + + + )} From 82f35d2aef35c58ee8654b45e1609f46e9199530 Mon Sep 17 00:00:00 2001 From: dekzter Date: Tue, 10 Jun 2025 08:46:36 -0400 Subject: [PATCH 0472/1435] check and warn before saving a network access setting that could block current client access --- .dockerignore | 5 ++ apps/m3u/serializers.py | 88 +++++++++++++++++++++++---------- core/api_views.py | 22 ++++++++- frontend/src/api.js | 15 ++++++ frontend/src/pages/Settings.jsx | 47 +++++++++++++++++- 5 files changed, 148 insertions(+), 29 deletions(-) diff --git a/.dockerignore b/.dockerignore index 5073af60..c79ca7b4 100755 --- a/.dockerignore +++ b/.dockerignore @@ -11,6 +11,10 @@ **/.toolstarget **/.vs **/.vscode +**/.history +**/media +**/models +**/static **/*.*proj.user **/*.dbmdl **/*.jfm @@ -26,3 +30,4 @@ **/values.dev.yaml LICENSE README.md +data/ diff --git a/apps/m3u/serializers.py b/apps/m3u/serializers.py index 038af628..7394f00b 100644 --- a/apps/m3u/serializers.py +++ b/apps/m3u/serializers.py @@ -3,33 +3,45 @@ from rest_framework.response import Response from .models import M3UAccount, M3UFilter, ServerGroup, M3UAccountProfile from core.models import UserAgent from apps.channels.models import ChannelGroup, ChannelGroupM3UAccount -from apps.channels.serializers import ChannelGroupM3UAccountSerializer, ChannelGroupSerializer +from apps.channels.serializers import ( + ChannelGroupM3UAccountSerializer, + ChannelGroupSerializer, +) import logging logger = logging.getLogger(__name__) + class M3UFilterSerializer(serializers.ModelSerializer): """Serializer for M3U Filters""" - channel_groups = ChannelGroupM3UAccountSerializer(source='m3u_account', many=True) + + channel_groups = ChannelGroupM3UAccountSerializer(source="m3u_account", many=True) class Meta: model = M3UFilter - fields = ['id', 'filter_type', 'regex_pattern', 'exclude', 'channel_groups'] + fields = ["id", "filter_type", "regex_pattern", "exclude", "channel_groups"] -from rest_framework import serializers -from .models import M3UAccountProfile class M3UAccountProfileSerializer(serializers.ModelSerializer): class Meta: model = M3UAccountProfile - fields = ['id', 'name', 'max_streams', 'is_active', 'is_default', 'current_viewers', 'search_pattern', 'replace_pattern'] - read_only_fields = ['id'] + fields = [ + "id", + "name", + "max_streams", + "is_active", + "is_default", + "current_viewers", + "search_pattern", + "replace_pattern", + ] + read_only_fields = ["id"] def create(self, validated_data): - m3u_account = self.context.get('m3u_account') + m3u_account = self.context.get("m3u_account") # Use the m3u_account when creating the profile - validated_data['m3u_account_id'] = m3u_account.id + validated_data["m3u_account_id"] = m3u_account.id return super().create(validated_data) @@ -43,12 +55,14 @@ class M3UAccountProfileSerializer(serializers.ModelSerializer): if instance.is_default: return Response( {"error": "Default profiles cannot be deleted."}, - status=status.HTTP_400_BAD_REQUEST + status=status.HTTP_400_BAD_REQUEST, ) return super().destroy(request, *args, **kwargs) + class M3UAccountSerializer(serializers.ModelSerializer): """Serializer for M3U Account""" + filters = M3UFilterSerializer(many=True, read_only=True) # Include user_agent as a mandatory field using its primary key. user_agent = serializers.PrimaryKeyRelatedField( @@ -57,28 +71,48 @@ class M3UAccountSerializer(serializers.ModelSerializer): allow_null=True, ) profiles = M3UAccountProfileSerializer(many=True, read_only=True) - read_only_fields = ['locked', 'created_at', 'updated_at'] + read_only_fields = ["locked", "created_at", "updated_at"] # channel_groups = serializers.SerializerMethodField() - channel_groups = ChannelGroupM3UAccountSerializer(source='channel_group', many=True, required=False) + channel_groups = ChannelGroupM3UAccountSerializer( + source="channel_group", many=True, required=False + ) class Meta: model = M3UAccount fields = [ - 'id', 'name', 'server_url', 'file_path', 'server_group', - 'max_streams', 'is_active', 'created_at', 'updated_at', 'filters', 'user_agent', 'profiles', 'locked', - 'channel_groups', 'refresh_interval', 'custom_properties', 'account_type', 'username', 'password', 'stale_stream_days', - 'status', 'last_message', + "id", + "name", + "server_url", + "file_path", + "server_group", + "max_streams", + "is_active", + "created_at", + "updated_at", + "filters", + "user_agent", + "profiles", + "locked", + "channel_groups", + "refresh_interval", + "custom_properties", + "account_type", + "username", + "password", + "stale_stream_days", + "status", + "last_message", ] extra_kwargs = { - 'password': { - 'required': False, - 'allow_blank': True, + "password": { + "required": False, + "allow_blank": True, }, } def update(self, instance, validated_data): # Pop out channel group memberships so we can handle them manually - channel_group_data = validated_data.pop('channel_group', []) + channel_group_data = validated_data.pop("channel_group", []) # First, update the M3UAccount itself for attr, value in validated_data.items(): @@ -88,13 +122,12 @@ class M3UAccountSerializer(serializers.ModelSerializer): # Prepare a list of memberships to update memberships_to_update = [] for group_data in channel_group_data: - group = group_data.get('channel_group') - enabled = group_data.get('enabled') + group = group_data.get("channel_group") + enabled = group_data.get("enabled") try: membership = ChannelGroupM3UAccount.objects.get( - m3u_account=instance, - channel_group=group + m3u_account=instance, channel_group=group ) membership.enabled = enabled memberships_to_update.append(membership) @@ -103,13 +136,16 @@ class M3UAccountSerializer(serializers.ModelSerializer): # Perform the bulk update if memberships_to_update: - ChannelGroupM3UAccount.objects.bulk_update(memberships_to_update, ['enabled']) + ChannelGroupM3UAccount.objects.bulk_update( + memberships_to_update, ["enabled"] + ) return instance + class ServerGroupSerializer(serializers.ModelSerializer): """Serializer for Server Group""" class Meta: model = ServerGroup - fields = ['id', 'name'] + fields = ["id", "name"] diff --git a/core/api_views.py b/core/api_views.py index b3e0c1bb..bf6ee2ba 100644 --- a/core/api_views.py +++ b/core/api_views.py @@ -1,5 +1,7 @@ # core/api_views.py +import json +import ipaddress from rest_framework import viewsets, status from rest_framework.response import Response from django.shortcuts import get_object_or_404 @@ -9,7 +11,7 @@ from .serializers import ( StreamProfileSerializer, CoreSettingsSerializer, ) -from rest_framework.decorators import api_view, permission_classes +from rest_framework.decorators import api_view, permission_classes, action from drf_yasg.utils import swagger_auto_schema import socket import requests @@ -18,6 +20,7 @@ from core.tasks import rehash_streams from apps.accounts.permissions import ( Authenticated, ) +from dispatcharr.utils import get_client_ip class UserAgentViewSet(viewsets.ModelViewSet): @@ -56,6 +59,23 @@ class CoreSettingsViewSet(viewsets.ModelViewSet): return response + @action(detail=False, methods=["post"], url_path="check") + def check(self, request, *args, **kwargs): + data = request.data + + client_ip = ipaddress.ip_address(get_client_ip(request)) + in_network = [] + key = data.get("key") + value = json.loads(data.get("value", "{}")) + for key, val in value.items(): + cidrs = val.split(",") + for cidr in cidrs: + network = ipaddress.ip_network(cidr) + if client_ip not in network: + in_network.append(cidr) + + return Response(in_network, status=status.HTTP_200_OK) + @swagger_auto_schema( method="get", diff --git a/frontend/src/api.js b/frontend/src/api.js index 488f0f31..17c38b90 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -1085,6 +1085,21 @@ export default class API { } } + static async checkSetting(values) { + const { id, ...payload } = values; + + try { + const response = await request(`${host}/api/core/settings/check/`, { + method: 'POST', + body: payload, + }); + + return response; + } catch (e) { + errorNotification('Failed to update settings', e); + } + } + static async updateSetting(values) { const { id, ...payload } = values; diff --git a/frontend/src/pages/Settings.jsx b/frontend/src/pages/Settings.jsx index 6606f977..b4fc37cc 100644 --- a/frontend/src/pages/Settings.jsx +++ b/frontend/src/pages/Settings.jsx @@ -24,6 +24,7 @@ import StreamProfilesTable from '../components/tables/StreamProfilesTable'; import useLocalStorage from '../hooks/useLocalStorage'; import useAuthStore from '../store/auth'; import { USER_LEVELS, NETWORK_ACCESS_OPTIONS } from '../constants'; +import ConfirmationDialog from '../components/ConfirmationDialog'; const SettingsPage = () => { const settings = useSettingsStore((s) => s.settings); @@ -33,6 +34,10 @@ const SettingsPage = () => { const [accordianValue, setAccordianValue] = useState(null); const [networkAccessSaved, setNetworkAccessSaved] = useState(false); + const [networkAccessConfirmOpen, setNetworkAccessConfirmOpen] = + useState(false); + const [netNetworkAccessConfirmCIDRs, setNetNetworkAccessConfirmCIDRs] = + useState([]); // UI / local storage settings const [tableSize, setTableSize] = useLocalStorage('table-size', 'default'); @@ -315,7 +320,6 @@ const SettingsPage = () => { useEffect(() => { if (settings) { - console.log(settings); const formValues = Object.entries(settings).reduce( (acc, [key, value]) => { // Modify each value based on its own properties @@ -378,7 +382,21 @@ const SettingsPage = () => { }; const onNetworkAccessSubmit = async () => { - let result = null; + setNetworkAccessSaved(false); + const check = await API.checkSetting({ + ...settings['network-access'], + value: JSON.stringify(networkAccessForm.getValues()), + }); + + if (check.length == 0) { + return saveNetworkAccess(); + } + + setNetNetworkAccessConfirmCIDRs(check); + setNetworkAccessConfirmOpen(true); + }; + + const saveNetworkAccess = async () => { setNetworkAccessSaved(false); try { await API.updateSetting({ @@ -386,6 +404,7 @@ const SettingsPage = () => { value: JSON.stringify(networkAccessForm.getValues()), }); setNetworkAccessSaved(true); + setNetworkAccessConfirmOpen(false); } catch (e) { const errors = {}; for (const key in e.body.value) { @@ -644,6 +663,30 @@ const SettingsPage = () => { )}
+ + setNetworkAccessConfirmOpen(false)} + onConfirm={saveNetworkAccess} + title={`Confirm Network Access Blocks`} + message={ + <> + + Your client is included in the following CIDRs and could block + access Are you sure you want to proceed? + + +
    + {netNetworkAccessConfirmCIDRs.map((cidr) => ( +
  • {cidr}
  • + ))} +
+ + } + confirmLabel="Save" + cancelLabel="Cancel" + size="md" + /> ); }; From efaa64d00b10324fc254982e3e173d177ea05dad Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 10 Jun 2025 09:08:04 -0500 Subject: [PATCH 0473/1435] Fix resolution not always parsing correctly. --- apps/proxy/ts_proxy/services/channel_service.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/apps/proxy/ts_proxy/services/channel_service.py b/apps/proxy/ts_proxy/services/channel_service.py index 761d56ac..42f5efee 100644 --- a/apps/proxy/ts_proxy/services/channel_service.py +++ b/apps/proxy/ts_proxy/services/channel_service.py @@ -428,12 +428,17 @@ class ChannelService: codec_match = re.search(r'Video:\s*([a-zA-Z0-9_]+)', stream_info_line) video_codec = codec_match.group(1) if codec_match else None - # Extract resolution (e.g., "1280x720") - resolution_match = re.search(r'(\d+)x(\d+)', stream_info_line) + # Extract resolution (e.g., "1280x720") - be more specific to avoid hex values + # Look for resolution patterns that are realistic video dimensions + resolution_match = re.search(r'\b(\d{3,5})x(\d{3,5})\b', stream_info_line) if resolution_match: width = int(resolution_match.group(1)) height = int(resolution_match.group(2)) - resolution = f"{width}x{height}" + # Validate that these look like reasonable video dimensions + if 100 <= width <= 10000 and 100 <= height <= 10000: + resolution = f"{width}x{height}" + else: + width = height = resolution = None else: width = height = resolution = None From b8992bde641a6bbdf9827f774d3f7ffff0d09a38 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 10 Jun 2025 09:23:43 -0500 Subject: [PATCH 0474/1435] Add audio channels to stats page. --- apps/proxy/ts_proxy/channel_status.py | 3 +++ frontend/src/pages/Stats.jsx | 5 +++++ 2 files changed, 8 insertions(+) diff --git a/apps/proxy/ts_proxy/channel_status.py b/apps/proxy/ts_proxy/channel_status.py index 864ddac8..77b4482d 100644 --- a/apps/proxy/ts_proxy/channel_status.py +++ b/apps/proxy/ts_proxy/channel_status.py @@ -494,6 +494,9 @@ class ChannelStatus: audio_codec = metadata.get(ChannelMetadataField.AUDIO_CODEC.encode('utf-8')) if audio_codec: info['audio_codec'] = audio_codec.decode('utf-8') + audio_channels = metadata.get(ChannelMetadataField.AUDIO_CHANNELS.encode('utf-8')) + if audio_channels: + info['audio_channels'] = audio_channels.decode('utf-8') return info except Exception as e: diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index ab13c1e7..2b3fd8bb 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -497,6 +497,11 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel {channel.audio_codec.toUpperCase()} )} + {channel.audio_channels && ( + + {channel.audio_channels} + + )} {channel.ffmpeg_speed && ( Date: Tue, 10 Jun 2025 09:51:56 -0500 Subject: [PATCH 0475/1435] Add stream type for stream (HLS/MPEGTS, ETC) --- apps/proxy/ts_proxy/constants.py | 2 ++ .../ts_proxy/services/channel_service.py | 25 ++++++++++++++++--- apps/proxy/ts_proxy/stream_manager.py | 8 ++++-- 3 files changed, 29 insertions(+), 6 deletions(-) diff --git a/apps/proxy/ts_proxy/constants.py b/apps/proxy/ts_proxy/constants.py index daaf7bb3..385d17c1 100644 --- a/apps/proxy/ts_proxy/constants.py +++ b/apps/proxy/ts_proxy/constants.py @@ -85,6 +85,8 @@ class ChannelMetadataField: AUDIO_CHANNELS = "audio_channels" AUDIO_BITRATE = "audio_bitrate" + # Stream format info + STREAM_TYPE = "stream_type" # Stream info timestamp STREAM_INFO_UPDATED = "stream_info_updated" diff --git a/apps/proxy/ts_proxy/services/channel_service.py b/apps/proxy/ts_proxy/services/channel_service.py index 42f5efee..2bf26364 100644 --- a/apps/proxy/ts_proxy/services/channel_service.py +++ b/apps/proxy/ts_proxy/services/channel_service.py @@ -420,7 +420,22 @@ class ChannelService: def parse_and_store_stream_info(channel_id, stream_info_line, stream_type="video"): """Parse FFmpeg stream info line and store in Redis metadata""" try: - if stream_type == "video": + if stream_type == "input": + # Example lines: + # Input #0, mpegts, from 'http://example.com/stream.ts': + # Input #0, hls, from 'http://example.com/stream.m3u8': + + # Extract input format (e.g., "mpegts", "hls", "flv", etc.) + input_match = re.search(r'Input #\d+,\s*([^,]+)', stream_info_line) + input_format = input_match.group(1).strip() if input_match else None + + # Store in Redis if we have valid data + if input_format: + ChannelService._update_stream_info_in_redis(channel_id, None, None, None, None, None, None, None, None, None, None, None, input_format) + + logger.debug(f"Input format info - Format: {input_format} for channel {channel_id}") + + elif stream_type == "video": # Example line: # Stream #0:0: Video: h264 (Main), yuv420p(tv, progressive), 1280x720 [SAR 1:1 DAR 16:9], q=2-31, 2000 kb/s, 29.97 fps, 90k tbn @@ -464,7 +479,7 @@ class ChannelService: # Store in Redis if we have valid data if any(x is not None for x in [video_codec, resolution, source_fps, pixel_format, video_bitrate]): - ChannelService._update_stream_info_in_redis(channel_id, video_codec, resolution, width, height, source_fps, pixel_format, video_bitrate, None, None, None, None) + ChannelService._update_stream_info_in_redis(channel_id, video_codec, resolution, width, height, source_fps, pixel_format, video_bitrate, None, None, None, None, None) logger.info(f"Video stream info - Codec: {video_codec}, Resolution: {resolution}, " f"Source FPS: {source_fps}, Pixel Format: {pixel_format}, " @@ -495,7 +510,7 @@ class ChannelService: # Store in Redis if we have valid data if any(x is not None for x in [audio_codec, sample_rate, channels, audio_bitrate]): - ChannelService._update_stream_info_in_redis(channel_id, None, None, None, None, None, None, None, audio_codec, sample_rate, channels, audio_bitrate) + ChannelService._update_stream_info_in_redis(channel_id, None, None, None, None, None, None, None, audio_codec, sample_rate, channels, audio_bitrate, None) logger.info(f"Audio stream info - Codec: {audio_codec}, Sample Rate: {sample_rate} Hz, " f"Channels: {channels}, Audio Bitrate: {audio_bitrate} kb/s") @@ -504,7 +519,7 @@ class ChannelService: logger.debug(f"Error parsing FFmpeg {stream_type} stream info: {e}") @staticmethod - def _update_stream_info_in_redis(channel_id, codec, resolution, width, height, fps, pixel_format, video_bitrate, audio_codec=None, sample_rate=None, channels=None, audio_bitrate=None): + def _update_stream_info_in_redis(channel_id, codec, resolution, width, height, fps, pixel_format, video_bitrate, audio_codec=None, sample_rate=None, channels=None, audio_bitrate=None, input_format=None): """Update stream info in Redis metadata""" try: proxy_server = ProxyServer.get_instance() @@ -550,6 +565,8 @@ class ChannelService: if audio_bitrate is not None: update_data[ChannelMetadataField.AUDIO_BITRATE] = str(round(audio_bitrate, 1)) + if input_format is not None: + update_data[ChannelMetadataField.STREAM_TYPE] = str(input_format) proxy_server.redis_client.hset(metadata_key, mapping=update_data) return True diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index 7f81e29e..6e3c9e73 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -6,8 +6,8 @@ import time import socket import requests import subprocess -import gevent # Add this import -import re # Add this import at the top +import gevent +import re from typing import Optional, List from django.shortcuts import get_object_or_404 from apps.proxy.config import TSConfig as Config @@ -502,6 +502,10 @@ class StreamManager: elif any(keyword in content_lower for keyword in ['input', 'output', 'stream', 'video', 'audio']): # Stream info - log at info level logger.info(f"FFmpeg info: {content}") + if content.startswith('Input #0'): + # If it's input 0, parse stream info + from .services.channel_service import ChannelService + ChannelService.parse_and_store_stream_info(self.channel_id, content, "input") else: # Everything else at debug level logger.debug(f"FFmpeg stderr: {content}") From 1f6f15ed73b1adc76cc6cdccbe80c89a7ea80939 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 10 Jun 2025 10:10:05 -0500 Subject: [PATCH 0476/1435] Add stream type to stats page. --- apps/proxy/ts_proxy/channel_status.py | 6 ++++++ frontend/src/pages/Stats.jsx | 15 ++++++++++----- 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/apps/proxy/ts_proxy/channel_status.py b/apps/proxy/ts_proxy/channel_status.py index 77b4482d..8f1d0649 100644 --- a/apps/proxy/ts_proxy/channel_status.py +++ b/apps/proxy/ts_proxy/channel_status.py @@ -317,6 +317,9 @@ class ChannelStatus: ffmpeg_bitrate = metadata.get(ChannelMetadataField.FFMPEG_BITRATE.encode('utf-8')) if ffmpeg_bitrate: info['ffmpeg_bitrate'] = float(ffmpeg_bitrate.decode('utf-8')) + stream_type = metadata.get(ChannelMetadataField.STREAM_TYPE.encode('utf-8')) + if stream_type: + info['stream_type'] = stream_type.decode('utf-8') return info @@ -497,6 +500,9 @@ class ChannelStatus: audio_channels = metadata.get(ChannelMetadataField.AUDIO_CHANNELS.encode('utf-8')) if audio_channels: info['audio_channels'] = audio_channels.decode('utf-8') + stream_type = metadata.get(ChannelMetadataField.STREAM_TYPE.encode('utf-8')) + if stream_type: + info['stream_type'] = stream_type.decode('utf-8') return info except Exception as e: diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index 2b3fd8bb..fea8653e 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -477,11 +477,6 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel {/* Add stream information badges */} - {channel.video_codec && ( - - {channel.video_codec.toUpperCase()} - - )} {channel.resolution && ( {channel.resolution} @@ -492,6 +487,16 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel {channel.source_fps} FPS )} + {channel.video_codec && ( + + {channel.video_codec.toUpperCase()} + + )} + {channel.stream_type && ( + + {channel.stream_type.toUpperCase()} + + )} {channel.audio_codec && ( {channel.audio_codec.toUpperCase()} From 3522066867a6f4e15bde545c98330ca169131735 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 10 Jun 2025 10:17:28 -0500 Subject: [PATCH 0477/1435] Changed some badge colors and added tooltips. --- frontend/src/pages/Stats.jsx | 50 ++++++++++++++++++++++-------------- 1 file changed, 31 insertions(+), 19 deletions(-) diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index fea8653e..ce77f9e8 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -478,37 +478,49 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel {/* Add stream information badges */} {channel.resolution && ( - - {channel.resolution} - + + + {channel.resolution} + + )} {channel.source_fps && ( - - {channel.source_fps} FPS - + + + {channel.source_fps} FPS + + )} {channel.video_codec && ( - - {channel.video_codec.toUpperCase()} - + + + {channel.video_codec.toUpperCase()} + + )} {channel.stream_type && ( - - {channel.stream_type.toUpperCase()} - + + + {channel.stream_type.toUpperCase()} + + )} {channel.audio_codec && ( - - {channel.audio_codec.toUpperCase()} - + + + {channel.audio_codec.toUpperCase()} + + )} {channel.audio_channels && ( - - {channel.audio_channels} - + + + {channel.audio_channels} + + )} {channel.ffmpeg_speed && ( - + Date: Tue, 10 Jun 2025 11:38:52 -0500 Subject: [PATCH 0478/1435] Move stream type to after audio. --- frontend/src/pages/Stats.jsx | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index ce77f9e8..f60e14d0 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -498,13 +498,6 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel )} - {channel.stream_type && ( - - - {channel.stream_type.toUpperCase()} - - - )} {channel.audio_codec && ( @@ -519,6 +512,13 @@ const ChannelCard = ({ channel, clients, stopClient, stopChannel, logos, channel )} + {channel.stream_type && ( + + + {channel.stream_type.toUpperCase()} + + + )} {channel.ffmpeg_speed && ( Date: Tue, 10 Jun 2025 13:58:34 -0500 Subject: [PATCH 0479/1435] Better error messaging for unsupported codecs in the web player. Also don't block controls with error messages. --- frontend/src/components/FloatingVideo.jsx | 202 ++++++++++++---------- 1 file changed, 115 insertions(+), 87 deletions(-) diff --git a/frontend/src/components/FloatingVideo.jsx b/frontend/src/components/FloatingVideo.jsx index 46c191eb..7f1e1c53 100644 --- a/frontend/src/components/FloatingVideo.jsx +++ b/frontend/src/components/FloatingVideo.jsx @@ -73,72 +73,109 @@ export default function FloatingVideo() { console.log("Attempting to play stream:", streamUrl); try { - // If the browser supports MSE for live playback, initialize mpegts.js - if (mpegts.getFeatureList().mseLivePlayback) { - // Set loading flag - setIsLoading(true); - - const player = mpegts.createPlayer({ - type: 'mpegts', // MPEG-TS format - url: streamUrl, - isLive: true, - enableWorker: true, - enableStashBuffer: false, // Try disabling stash buffer for live streams - liveBufferLatencyChasing: true, - liveSync: true, - cors: true, // Enable CORS for cross-domain requests - // Add error recovery options - autoCleanupSourceBuffer: true, - autoCleanupMaxBackwardDuration: 10, - autoCleanupMinBackwardDuration: 5, - reuseRedirectedURL: true, - }); - - player.attachMediaElement(videoRef.current); - - // Add events to track loading state - player.on(mpegts.Events.LOADING_COMPLETE, () => { - setIsLoading(false); - }); - - player.on(mpegts.Events.METADATA_ARRIVED, () => { - setIsLoading(false); - }); - - // Add error event handler - player.on(mpegts.Events.ERROR, (errorType, errorDetail) => { - setIsLoading(false); - - // Filter out aborted errors - if (errorType !== 'NetworkError' || !errorDetail?.includes('aborted')) { - console.error('Player error:', errorType, errorDetail); - setLoadError(`Error: ${errorType}${errorDetail ? ` - ${errorDetail}` : ''}`); - } - }); - - player.load(); - - // Don't auto-play until we've loaded properly - player.on(mpegts.Events.MEDIA_INFO, () => { - setIsLoading(false); - try { - player.play().catch(e => { - console.log("Auto-play prevented:", e); - setLoadError("Auto-play was prevented. Click play to start."); - }); - } catch (e) { - console.log("Error during play:", e); - setLoadError(`Playback error: ${e.message}`); - } - }); - - // Store player instance so we can clean up later - playerRef.current = player; + // Check for MSE support first + if (!mpegts.getFeatureList().mseLivePlayback) { + setIsLoading(false); + setLoadError("Your browser doesn't support live video streaming. Please try Chrome or Edge."); + return; } + + // Check for basic codec support + const video = document.createElement('video'); + const h264Support = video.canPlayType('video/mp4; codecs="avc1.42E01E"'); + const aacSupport = video.canPlayType('audio/mp4; codecs="mp4a.40.2"'); + + console.log("Browser codec support - H264:", h264Support, "AAC:", aacSupport); + + // If the browser supports MSE for live playback, initialize mpegts.js + setIsLoading(true); + + const player = mpegts.createPlayer({ + type: 'mpegts', + url: streamUrl, + isLive: true, + enableWorker: true, + enableStashBuffer: false, + liveBufferLatencyChasing: true, + liveSync: true, + cors: true, + autoCleanupSourceBuffer: true, + autoCleanupMaxBackwardDuration: 10, + autoCleanupMinBackwardDuration: 5, + reuseRedirectedURL: true, + }); + + player.attachMediaElement(videoRef.current); + + // Add events to track loading state + player.on(mpegts.Events.LOADING_COMPLETE, () => { + setIsLoading(false); + }); + + player.on(mpegts.Events.METADATA_ARRIVED, () => { + setIsLoading(false); + }); + + // Enhanced error event handler with codec-specific messages + player.on(mpegts.Events.ERROR, (errorType, errorDetail) => { + setIsLoading(false); + + // Filter out aborted errors + if (errorType !== 'NetworkError' || !errorDetail?.includes('aborted')) { + console.error('Player error:', errorType, errorDetail); + + // Provide specific error messages based on error type + let errorMessage = `Error: ${errorType}`; + + if (errorType === 'MediaError') { + // Try to determine if it's an audio or video codec issue + const errorString = errorDetail?.toLowerCase() || ''; + + if (errorString.includes('audio') || errorString.includes('ac3') || errorString.includes('ac-3')) { + errorMessage = "Audio codec not supported by your browser. Try Chrome or Edge for better audio codec support."; + } else if (errorString.includes('video') || errorString.includes('h264') || errorString.includes('h.264')) { + errorMessage = "Video codec not supported by your browser. Try Chrome or Edge for better video codec support."; + } else if (errorString.includes('mse')) { + errorMessage = "Your browser doesn't support the codecs used in this stream. Try Chrome or Edge for better compatibility."; + } else { + errorMessage = "Media codec not supported by your browser. This may be due to unsupported audio (AC3) or video codecs. Try Chrome or Edge."; + } + } else if (errorDetail) { + errorMessage += ` - ${errorDetail}`; + } + + setLoadError(errorMessage); + } + }); + + player.load(); + + // Don't auto-play until we've loaded properly + player.on(mpegts.Events.MEDIA_INFO, () => { + setIsLoading(false); + try { + player.play().catch(e => { + console.log("Auto-play prevented:", e); + setLoadError("Auto-play was prevented. Click play to start."); + }); + } catch (e) { + console.log("Error during play:", e); + setLoadError(`Playback error: ${e.message}`); + } + }); + + // Store player instance so we can clean up later + playerRef.current = player; } catch (error) { setIsLoading(false); - setLoadError(`Initialization error: ${error.message}`); console.error("Error initializing player:", error); + + // Provide helpful error message based on the error + if (error.message?.includes('codec') || error.message?.includes('format')) { + setLoadError("Codec not supported by your browser. Please try a different browser (Chrome/Edge recommended)."); + } else { + setLoadError(`Initialization error: ${error.message}`); + } } // Cleanup when component unmounts or streamUrl changes @@ -191,7 +228,7 @@ export default function FloatingVideo() { style={{ width: '100%', height: '180px', backgroundColor: '#000' }} /> - {/* Loading overlay */} + {/* Loading overlay - only show when loading */} {isLoading && ( )} - - {/* Error message overlay */} - {!isLoading && loadError && ( - - - {loadError} - - - )}
+ + {/* Error message below video - doesn't block controls */} + {!isLoading && loadError && ( + + + {loadError} + + + )} ); From 11d3d7a15aecca49d4ebc05dac42315c22f28757 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 10 Jun 2025 14:45:49 -0500 Subject: [PATCH 0480/1435] Add case-insensitive attribute lookup for M3U parsing --- apps/m3u/tasks.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index ce46a2ec..d6e0755b 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -172,6 +172,13 @@ def fetch_m3u_lines(account, use_cache=False): send_m3u_update(account.id, "downloading", 100, status="error", error=error_msg) return [], False +def get_case_insensitive_attr(attributes, key, default=""): + """Get attribute value using case-insensitive key lookup.""" + for attr_key, attr_value in attributes.items(): + if attr_key.lower() == key.lower(): + return attr_value + return default + def parse_extinf_line(line: str) -> dict: """ Parse an EXTINF line from an M3U file. @@ -193,7 +200,7 @@ def parse_extinf_line(line: str) -> dict: attributes_part, display_name = parts[0], parts[1].strip() attrs = dict(re.findall(r'([^\s]+)=["\']([^"\']+)["\']', attributes_part)) # Use tvg-name attribute if available; otherwise, use the display name. - name = attrs.get('tvg-name', display_name) + name = get_case_insensitive_attr(attrs, 'tvg-name', display_name) return { 'attributes': attrs, 'display_name': display_name, @@ -409,8 +416,8 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): for stream_info in batch: try: name, url = stream_info["name"], stream_info["url"] - tvg_id, tvg_logo = stream_info["attributes"].get("tvg-id", ""), stream_info["attributes"].get("tvg-logo", "") - group_title = stream_info["attributes"].get("group-title", "Default Group") + tvg_id, tvg_logo = get_case_insensitive_attr(stream_info["attributes"], "tvg-id", ""), get_case_insensitive_attr(stream_info["attributes"], "tvg-logo", "") + group_title = get_case_insensitive_attr(stream_info["attributes"], "group-title", "Default Group") # Filter out disabled groups for this account if group_title not in groups: @@ -712,8 +719,9 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): extinf_count += 1 parsed = parse_extinf_line(line) if parsed: - if "group-title" in parsed["attributes"]: - group_name = parsed["attributes"]["group-title"] + group_title_attr = get_case_insensitive_attr(parsed["attributes"], "group-title", "") + if group_title_attr: + group_name = group_title_attr # Log new groups as they're discovered if group_name not in groups: logger.debug(f"Found new group for M3U account {account_id}: '{group_name}'") From d850166a803a6dc4916c43774f0e68cdc18002a9 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 10 Jun 2025 15:47:27 -0500 Subject: [PATCH 0481/1435] Add conditional channel_group_id assignment in ChannelViewSet. This fixes an issue where if a group isn't assigned to a stream it would fail to create a channel from the stream. Closes #122 --- apps/channels/api_views.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 890dd247..bdeb31cb 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -322,17 +322,18 @@ class ChannelViewSet(viewsets.ModelViewSet): if 'tvc-guide-stationid' in stream_custom_props: tvc_guide_stationid = stream_custom_props['tvc-guide-stationid'] - - channel_data = { 'channel_number': channel_number, 'name': name, 'tvg_id': stream.tvg_id, 'tvc_guide_stationid': tvc_guide_stationid, - 'channel_group_id': channel_group.id, 'streams': [stream_id], } + # Only add channel_group_id if the stream has a channel group + if channel_group: + channel_data['channel_group_id'] = channel_group.id + if stream.logo_url: logo, _ = Logo.objects.get_or_create(url=stream.logo_url, defaults={ "name": stream.name or stream.tvg_id @@ -453,9 +454,12 @@ class ChannelViewSet(viewsets.ModelViewSet): "name": name, 'tvc_guide_stationid': tvc_guide_stationid, "tvg_id": stream.tvg_id, - "channel_group_id": channel_group.id, } + # Only add channel_group_id if the stream has a channel group + if channel_group: + channel_data["channel_group_id"] = channel_group.id + # Attempt to find existing EPGs with the same tvg-id epgs = EPGData.objects.filter(tvg_id=stream.tvg_id) if epgs: From a2c7fc3046204a4fc067b72edb821616824166f8 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 10 Jun 2025 17:43:37 -0500 Subject: [PATCH 0482/1435] [New feature] Switch streams when buffering is detected. --- apps/proxy/config.py | 2 ++ apps/proxy/ts_proxy/config_helper.py | 8 +++++ apps/proxy/ts_proxy/constants.py | 1 + apps/proxy/ts_proxy/stream_manager.py | 48 +++++++++++++++++++++++++-- 4 files changed, 57 insertions(+), 2 deletions(-) diff --git a/apps/proxy/config.py b/apps/proxy/config.py index b00bd224..b369a92f 100644 --- a/apps/proxy/config.py +++ b/apps/proxy/config.py @@ -11,6 +11,8 @@ class BaseConfig: BUFFER_CHUNK_SIZE = 188 * 1361 # ~256KB # Redis settings REDIS_CHUNK_TTL = 60 # Number in seconds - Chunks expire after 1 minute + BUFFERING_TIMEOUT = 15 # Seconds to wait for buffering before switching streams + BUFFER_SPEED = 1 # What speed to condsider the stream buffering, 1x is normal speed, 2x is double speed, etc. class HLSConfig(BaseConfig): MIN_SEGMENTS = 12 diff --git a/apps/proxy/ts_proxy/config_helper.py b/apps/proxy/ts_proxy/config_helper.py index 773ab378..4057a2d5 100644 --- a/apps/proxy/ts_proxy/config_helper.py +++ b/apps/proxy/ts_proxy/config_helper.py @@ -85,3 +85,11 @@ class ConfigHelper: def failover_grace_period(): """Get extra time (in seconds) to allow for stream switching before disconnecting clients""" return ConfigHelper.get('FAILOVER_GRACE_PERIOD', 20) # Default to 20 seconds + @staticmethod + def buffering_timeout(): + """Get buffering timeout in seconds""" + return ConfigHelper.get('BUFFERING_TIMEOUT', 15) # Default to 15 seconds + @staticmethod + def buffering_speed(): + """Get buffering speed in bytes per second""" + return ConfigHelper.get('BUFFERING_SPEED',1) # Default to 1x diff --git a/apps/proxy/ts_proxy/constants.py b/apps/proxy/ts_proxy/constants.py index 385d17c1..55d6e006 100644 --- a/apps/proxy/ts_proxy/constants.py +++ b/apps/proxy/ts_proxy/constants.py @@ -18,6 +18,7 @@ class ChannelState: ERROR = "error" STOPPING = "stopping" STOPPED = "stopped" + BUFFERING = "buffering" # Event types class EventType: diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index 6e3c9e73..f8a7323b 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -40,6 +40,10 @@ class StreamManager: self.url_switching = False self.url_switch_start_time = 0 self.url_switch_timeout = ConfigHelper.url_switch_timeout() + self.buffering = False + self.buffering_timeout = ConfigHelper.buffering_timeout() + self.buffering_speed = ConfigHelper.buffering_speed() + self.buffering_start_time = None # Store worker_id for ownership checks self.worker_id = worker_id @@ -545,7 +549,6 @@ class StreamManager: actual_fps = None if ffmpeg_fps is not None and ffmpeg_speed is not None and ffmpeg_speed > 0: actual_fps = ffmpeg_fps / ffmpeg_speed - # Store in Redis if we have valid data if any(x is not None for x in [ffmpeg_speed, ffmpeg_fps, actual_fps, ffmpeg_bitrate]): self._update_ffmpeg_stats_in_redis(ffmpeg_speed, ffmpeg_fps, actual_fps, ffmpeg_bitrate) @@ -553,10 +556,51 @@ class StreamManager: # Fix the f-string formatting actual_fps_str = f"{actual_fps:.1f}" if actual_fps is not None else "N/A" ffmpeg_bitrate_str = f"{ffmpeg_bitrate:.1f}" if ffmpeg_bitrate is not None else "N/A" - + # Log the stats logger.debug(f"FFmpeg stats - Speed: {ffmpeg_speed}x, FFmpeg FPS: {ffmpeg_fps}, " f"Actual FPS: {actual_fps_str}, " f"Bitrate: {ffmpeg_bitrate_str} kbps") + # If we have a valid speed, check for buffering + if ffmpeg_speed is not None and ffmpeg_speed < self.buffering_speed: + if self.buffering: + # Buffering is still ongoing, check for how long + if self.buffering_start_time is None: + self.buffering_start_time = time.time() + else: + buffering_duration = time.time() - self.buffering_start_time + if buffering_duration > self.buffering_timeout: + # Buffering timeout reached, log error and try next stream + logger.error(f"Buffering timeout reached for channel {self.channel_id} after {buffering_duration:.1f} seconds") + # Send next stream request + if self._try_next_stream(): + logger.info(f"Switched to next stream for channel {self.channel_id} after buffering timeout") + # Reset buffering state + self.buffering = False + self.buffering_start_time = None + else: + logger.error(f"Failed to switch to next stream for channel {self.channel_id} after buffering timeout") + else: + # Buffering just started, set the flag and start timer + self.buffering = True + self.buffering_start_time = time.time() + logger.warning(f"Buffering started for channel {self.channel_id} - speed: {ffmpeg_speed}x") + # Log buffering warning + logger.debug(f"FFmpeg speed on channel {self.channel_id} is below {self.buffering_speed} ({ffmpeg_speed}x) - buffering detected") + # Set channel state to buffering + if hasattr(self.buffer, 'redis_client') and self.buffer.redis_client: + metadata_key = RedisKeys.channel_metadata(self.channel_id) + self.buffer.redis_client.hset(metadata_key, ChannelMetadataField.STATE, ChannelState.BUFFERING) + elif ffmpeg_speed is not None and ffmpeg_speed >= self.buffering_speed: + # Speed is good, check if we were buffering + if self.buffering: + # Reset buffering state + logger.info(f"Buffering ended for channel {self.channel_id} - speed: {ffmpeg_speed}x") + self.buffering = False + self.buffering_start_time = None + # Set channel state to active if speed is good + if hasattr(self.buffer, 'redis_client') and self.buffer.redis_client: + metadata_key = RedisKeys.channel_metadata(self.channel_id) + self.buffer.redis_client.hset(metadata_key, ChannelMetadataField.STATE, ChannelState.ACTIVE) except Exception as e: logger.debug(f"Error parsing FFmpeg stats: {e}") From e753d9b9f810f9b4933257cb9941512be2c9bd55 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 10 Jun 2025 19:16:52 -0500 Subject: [PATCH 0483/1435] Fixes a bug where stream profile name wouldn't update in stats. (Was outputting name string instead of ID --- apps/proxy/ts_proxy/url_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/proxy/ts_proxy/url_utils.py b/apps/proxy/ts_proxy/url_utils.py index e3b1c264..dbd3c5dd 100644 --- a/apps/proxy/ts_proxy/url_utils.py +++ b/apps/proxy/ts_proxy/url_utils.py @@ -172,7 +172,7 @@ def get_stream_info_for_switch(channel_id: str, target_stream_id: Optional[int] # Get transcode info from the channel's stream profile stream_profile = channel.get_stream_profile() transcode = not (stream_profile.is_proxy() or stream_profile is None) - profile_value = str(stream_profile) + profile_value = stream_profile.id return { 'url': stream_url, From 7812a410b3dd78e09769a7e4fc366cd26942791c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 10 Jun 2025 21:17:30 -0500 Subject: [PATCH 0484/1435] Allow users to change proxy settings. --- apps/proxy/ts_proxy/config_helper.py | 46 ++++++++++++++++++++- core/api_views.py | 62 +++++++++++++++++++++++++++- core/models.py | 38 +++++++++++++++++ core/serializers.py | 42 ++++++++++++++++++- 4 files changed, 183 insertions(+), 5 deletions(-) diff --git a/apps/proxy/ts_proxy/config_helper.py b/apps/proxy/ts_proxy/config_helper.py index 4057a2d5..28474b37 100644 --- a/apps/proxy/ts_proxy/config_helper.py +++ b/apps/proxy/ts_proxy/config_helper.py @@ -34,6 +34,13 @@ class ConfigHelper: @staticmethod def channel_shutdown_delay(): """Get channel shutdown delay in seconds""" + try: + from core.models import ProxySettings + settings = ProxySettings.objects.first() + if settings: + return settings.channel_shutdown_delay + except: + pass return ConfigHelper.get('CHANNEL_SHUTDOWN_DELAY', 0) @staticmethod @@ -54,6 +61,13 @@ class ConfigHelper: @staticmethod def redis_chunk_ttl(): """Get Redis chunk TTL in seconds""" + try: + from core.models import ProxySettings + settings = ProxySettings.objects.first() + if settings: + return settings.redis_chunk_ttl + except: + pass return ConfigHelper.get('REDIS_CHUNK_TTL', 60) @staticmethod @@ -85,11 +99,39 @@ class ConfigHelper: def failover_grace_period(): """Get extra time (in seconds) to allow for stream switching before disconnecting clients""" return ConfigHelper.get('FAILOVER_GRACE_PERIOD', 20) # Default to 20 seconds + @staticmethod def buffering_timeout(): """Get buffering timeout in seconds""" + try: + from core.models import ProxySettings + settings = ProxySettings.objects.first() + if settings: + return settings.buffering_timeout + except: + pass return ConfigHelper.get('BUFFERING_TIMEOUT', 15) # Default to 15 seconds + @staticmethod def buffering_speed(): - """Get buffering speed in bytes per second""" - return ConfigHelper.get('BUFFERING_SPEED',1) # Default to 1x + """Get buffering speed threshold""" + try: + from core.models import ProxySettings + settings = ProxySettings.objects.first() + if settings: + return settings.buffering_speed + except: + pass + return ConfigHelper.get('BUFFERING_SPEED', 1) # Default to 1x + + @staticmethod + def channel_init_grace_period(): + """Get channel initialization grace period in seconds""" + try: + from core.models import ProxySettings + settings = ProxySettings.objects.first() + if settings: + return settings.channel_init_grace_period + except: + pass + return ConfigHelper.get('CHANNEL_INIT_GRACE_PERIOD', 5) # Default to 5 seconds diff --git a/core/api_views.py b/core/api_views.py index 77473b5d..84eb4918 100644 --- a/core/api_views.py +++ b/core/api_views.py @@ -1,10 +1,11 @@ # core/api_views.py from rest_framework import viewsets, status +from rest_framework.decorators import action from rest_framework.response import Response from django.shortcuts import get_object_or_404 -from .models import UserAgent, StreamProfile, CoreSettings, STREAM_HASH_KEY -from .serializers import UserAgentSerializer, StreamProfileSerializer, CoreSettingsSerializer +from .models import UserAgent, StreamProfile, CoreSettings, ProxySettings, STREAM_HASH_KEY +from .serializers import UserAgentSerializer, StreamProfileSerializer, CoreSettingsSerializer, ProxySettingsSerializer from rest_framework.permissions import IsAuthenticated from rest_framework.decorators import api_view, permission_classes from drf_yasg.utils import swagger_auto_schema @@ -44,6 +45,63 @@ class CoreSettingsViewSet(viewsets.ModelViewSet): return response +class ProxySettingsViewSet(viewsets.ModelViewSet): + """ + API endpoint for proxy settings. + This is treated as a singleton: only one instance should exist. + """ + serializer_class = ProxySettingsSerializer + + def get_queryset(self): + # Always return the singleton settings + return ProxySettings.objects.all() + + def get_object(self): + # Always return the singleton settings (create if doesn't exist) + return ProxySettings.get_settings() + + def list(self, request, *args, **kwargs): + # Return the singleton settings as a single object + settings = self.get_object() + serializer = self.get_serializer(settings) + return Response(serializer.data) + + def retrieve(self, request, *args, **kwargs): + # Always return the singleton settings regardless of ID + settings = self.get_object() + serializer = self.get_serializer(settings) + return Response(serializer.data) + + def update(self, request, *args, **kwargs): + # Update the singleton settings + settings = self.get_object() + serializer = self.get_serializer(settings, data=request.data, partial=True) + serializer.is_valid(raise_exception=True) + serializer.save() + return Response(serializer.data) + + def partial_update(self, request, *args, **kwargs): + return self.update(request, *args, **kwargs) + + @action(detail=False, methods=['get', 'patch']) + def settings(self, request): + """ + Get or update the proxy settings. + """ + settings = self.get_object() + + if request.method == 'GET': + # Return current settings + serializer = self.get_serializer(settings) + return Response(serializer.data) + + elif request.method == 'PATCH': + # Update settings + serializer = self.get_serializer(settings, data=request.data, partial=True) + serializer.is_valid(raise_exception=True) + serializer.save() + return Response(serializer.data) + @swagger_auto_schema( method='get', operation_description="Endpoint for environment details", diff --git a/core/models.py b/core/models.py index fe7e9eb5..3af21628 100644 --- a/core/models.py +++ b/core/models.py @@ -183,3 +183,41 @@ class CoreSettings(models.Model): return cls.objects.get(key=AUTO_IMPORT_MAPPED_FILES).value except cls.DoesNotExist: return None + +class ProxySettings(models.Model): + """Proxy configuration settings""" + + buffering_timeout = models.IntegerField( + default=15, + help_text="Seconds to wait for buffering before switching streams" + ) + + buffering_speed = models.FloatField( + default=1.0, + help_text="Speed threshold to consider stream buffering (1.0 = normal speed)" + ) + + redis_chunk_ttl = models.IntegerField( + default=60, + help_text="Time in seconds before Redis chunks expire" + ) + + channel_shutdown_delay = models.IntegerField( + default=0, + help_text="Seconds to wait after last client before shutting down channel" + ) + + channel_init_grace_period = models.IntegerField( + default=5, + help_text="Seconds to wait for first client after channel initialization" + ) + + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + verbose_name = "Proxy Settings" + verbose_name_plural = "Proxy Settings" + + def __str__(self): + return "Proxy Settings" diff --git a/core/serializers.py b/core/serializers.py index c80ad630..4648a74a 100644 --- a/core/serializers.py +++ b/core/serializers.py @@ -1,7 +1,7 @@ # core/serializers.py from rest_framework import serializers -from .models import UserAgent, StreamProfile, CoreSettings +from .models import CoreSettings, UserAgent, StreamProfile, ProxySettings class UserAgentSerializer(serializers.ModelSerializer): class Meta: @@ -17,3 +17,43 @@ class CoreSettingsSerializer(serializers.ModelSerializer): class Meta: model = CoreSettings fields = '__all__' + +class ProxySettingsSerializer(serializers.ModelSerializer): + class Meta: + model = ProxySettings + fields = [ + 'id', + 'buffering_timeout', + 'buffering_speed', + 'redis_chunk_ttl', + 'channel_shutdown_delay', + 'channel_init_grace_period', + 'created_at', + 'updated_at' + ] + read_only_fields = ['id', 'created_at', 'updated_at'] + + def validate_buffering_timeout(self, value): + if value < 1 or value > 300: + raise serializers.ValidationError("Buffering timeout must be between 1 and 300 seconds") + return value + + def validate_buffering_speed(self, value): + if value < 0.1 or value > 10.0: + raise serializers.ValidationError("Buffering speed must be between 0.1 and 10.0") + return value + + def validate_redis_chunk_ttl(self, value): + if value < 10 or value > 3600: + raise serializers.ValidationError("Redis chunk TTL must be between 10 and 3600 seconds") + return value + + def validate_channel_shutdown_delay(self, value): + if value < 0 or value > 300: + raise serializers.ValidationError("Channel shutdown delay must be between 0 and 300 seconds") + return value + + def validate_channel_init_grace_period(self, value): + if value < 1 or value > 60: + raise serializers.ValidationError("Channel init grace period must be between 1 and 60 seconds") + return value From 2add2c1dd21c8faf0ea30c36e0f814a65777a3be Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 10 Jun 2025 21:23:04 -0500 Subject: [PATCH 0485/1435] Add new settings to database. --- core/migrations/0013_proxysettings.py | 30 +++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 core/migrations/0013_proxysettings.py diff --git a/core/migrations/0013_proxysettings.py b/core/migrations/0013_proxysettings.py new file mode 100644 index 00000000..1a69c4ef --- /dev/null +++ b/core/migrations/0013_proxysettings.py @@ -0,0 +1,30 @@ +# Generated by Django 5.1.6 on 2025-06-11 02:22 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('core', '0012_default_active_m3u_accounts'), + ] + + operations = [ + migrations.CreateModel( + name='ProxySettings', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('buffering_timeout', models.IntegerField(default=15, help_text='Seconds to wait for buffering before switching streams')), + ('buffering_speed', models.FloatField(default=1.0, help_text='Speed threshold to consider stream buffering (1.0 = normal speed)')), + ('redis_chunk_ttl', models.IntegerField(default=60, help_text='Time in seconds before Redis chunks expire')), + ('channel_shutdown_delay', models.IntegerField(default=0, help_text='Seconds to wait after last client before shutting down channel')), + ('channel_init_grace_period', models.IntegerField(default=5, help_text='Seconds to wait for first client after channel initialization')), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ], + options={ + 'verbose_name': 'Proxy Settings', + 'verbose_name_plural': 'Proxy Settings', + }, + ), + ] From e2e8b7088aee31f95afbd471781067a8e385a344 Mon Sep 17 00:00:00 2001 From: dekzter Date: Wed, 11 Jun 2025 08:23:51 -0400 Subject: [PATCH 0486/1435] Fixed bad merge conflict --- apps/output/views.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/apps/output/views.py b/apps/output/views.py index 0958e1fd..577f93d0 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -32,8 +32,6 @@ def epg_endpoint(request, profile_name=None, user=None): return generate_epg(request, profile_name, user) -def generate_m3u(request, profile_name=None, user=None): - @csrf_exempt @require_http_methods(["GET", "POST"]) def generate_m3u(request, profile_name=None, user=None): From 788667b6877eb15dff5054f3667abebbe6d11385 Mon Sep 17 00:00:00 2001 From: dekzter Date: Wed, 11 Jun 2025 08:24:32 -0400 Subject: [PATCH 0487/1435] better error checking, only warn for UI blocking --- core/api_views.py | 53 +++++++++++++++++++++++++-------- frontend/src/pages/Settings.jsx | 39 +++++++++++++++++++++--- 2 files changed, 76 insertions(+), 16 deletions(-) diff --git a/core/api_views.py b/core/api_views.py index bf6ee2ba..2f01b503 100644 --- a/core/api_views.py +++ b/core/api_views.py @@ -5,7 +5,13 @@ import ipaddress from rest_framework import viewsets, status from rest_framework.response import Response from django.shortcuts import get_object_or_404 -from .models import UserAgent, StreamProfile, CoreSettings, STREAM_HASH_KEY +from .models import ( + UserAgent, + StreamProfile, + CoreSettings, + STREAM_HASH_KEY, + NETWORK_ACCESS, +) from .serializers import ( UserAgentSerializer, StreamProfileSerializer, @@ -63,18 +69,41 @@ class CoreSettingsViewSet(viewsets.ModelViewSet): def check(self, request, *args, **kwargs): data = request.data - client_ip = ipaddress.ip_address(get_client_ip(request)) - in_network = [] - key = data.get("key") - value = json.loads(data.get("value", "{}")) - for key, val in value.items(): - cidrs = val.split(",") - for cidr in cidrs: - network = ipaddress.ip_network(cidr) - if client_ip not in network: - in_network.append(cidr) + if data.get("key") == NETWORK_ACCESS: + client_ip = ipaddress.ip_address(get_client_ip(request)) - return Response(in_network, status=status.HTTP_200_OK) + in_network = {} + invalid = [] + + value = json.loads(data.get("value", "{}")) + for key, val in value.items(): + in_network[key] = [] + cidrs = val.split(",") + for cidr in cidrs: + try: + network = ipaddress.ip_network(cidr) + + if client_ip in network: + in_network[key] = [] + break + + in_network[key].append(cidr) + except: + invalid.append(cidr) + + if len(invalid) > 0: + return Response( + { + "error": True, + "message": "Invalid CIDR(s)", + "data": invalid, + }, + status=status.HTTP_200_OK, + ) + + return Response(in_network, status=status.HTTP_200_OK) + + return Response({}, status=status.HTTP_200_OK) @swagger_auto_schema( diff --git a/frontend/src/pages/Settings.jsx b/frontend/src/pages/Settings.jsx index b4fc37cc..073af337 100644 --- a/frontend/src/pages/Settings.jsx +++ b/frontend/src/pages/Settings.jsx @@ -34,6 +34,7 @@ const SettingsPage = () => { const [accordianValue, setAccordianValue] = useState(null); const [networkAccessSaved, setNetworkAccessSaved] = useState(false); + const [networkAccessError, setNetworkAccessError] = useState(null); const [networkAccessConfirmOpen, setNetworkAccessConfirmOpen] = useState(false); const [netNetworkAccessConfirmCIDRs, setNetNetworkAccessConfirmCIDRs] = @@ -316,6 +317,21 @@ const SettingsPage = () => { acc[key] = '0.0.0.0/0'; return acc; }, {}), + validate: Object.keys(NETWORK_ACCESS_OPTIONS).reduce((acc, key) => { + acc[key] = (value) => { + const cidrs = value.split(','); + for (const cidr of cidrs) { + if (cidr.match(/^([0-9]{1,3}\.){3}[0-9]{1,3}\/\d+$/)) { + continue; + } + + return 'Invalid CIDR range'; + } + + return null; + }; + return acc; + }, {}), }); useEffect(() => { @@ -383,16 +399,24 @@ const SettingsPage = () => { const onNetworkAccessSubmit = async () => { setNetworkAccessSaved(false); + setNetworkAccessError(null); const check = await API.checkSetting({ ...settings['network-access'], value: JSON.stringify(networkAccessForm.getValues()), }); - if (check.length == 0) { + if (check.error && check.message) { + setNetworkAccessError(`${check.message}: ${check.data}`); + return; + } + + // For now, only warn if we're blocking the UI + const blockedAccess = check.UI; + if (blockedAccess.length == 0) { return saveNetworkAccess(); } - setNetNetworkAccessConfirmCIDRs(check); + setNetNetworkAccessConfirmCIDRs(blockedAccess); setNetworkAccessConfirmOpen(true); }; @@ -627,6 +651,13 @@ const SettingsPage = () => { title="Saved Successfully" > )} + {networkAccessError && ( + + )} {Object.entries(NETWORK_ACCESS_OPTIONS).map( ([key, config]) => { return ( @@ -672,8 +703,8 @@ const SettingsPage = () => { message={ <> - Your client is included in the following CIDRs and could block - access Are you sure you want to proceed? + Your client is not included in the allowed networks for the web + UI. Are you sure you want to proceed?
    From 743cf4e297566cef9ef4cf59e4cdd10baf572fd2 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 11 Jun 2025 16:55:14 -0500 Subject: [PATCH 0488/1435] Smarter parsing of ffmpeg stats output. --- apps/proxy/ts_proxy/stream_manager.py | 143 ++++++++++++++------------ 1 file changed, 76 insertions(+), 67 deletions(-) diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index f8a7323b..c413a9f5 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -386,90 +386,99 @@ class StreamManager: buffer = b"" last_stats_line = b"" - # Read in small chunks + # Read byte by byte for immediate detection while self.transcode_process and self.transcode_process.stderr: try: - chunk = self.transcode_process.stderr.read(256) # Smaller chunks for real-time processing - if not chunk: + # Read one byte at a time for immediate processing + byte = self.transcode_process.stderr.read(1) + if not byte: break - buffer += chunk + buffer += byte - # Look for stats updates (overwrite previous stats with \r) - if b'\r' in buffer and b"frame=" in buffer: - # Split on \r to handle overwriting stats - parts = buffer.split(b'\r') + # Check for frame= at the start of buffer (new stats line) + if buffer == b"frame=": + # We detected the start of a stats line, read until we get a complete line + # or hit a carriage return (which overwrites the previous stats) + while True: + next_byte = self.transcode_process.stderr.read(1) + if not next_byte: + break - # Process all parts except the last (which might be incomplete) - for i, part in enumerate(parts[:-1]): - if part.strip(): - if part.startswith(b"frame=") or b"frame=" in part: - # This is a stats line - keep it intact - try: - stats_text = part.decode('utf-8', errors='ignore').strip() - if stats_text and "frame=" in stats_text: - # Extract just the stats portion if there's other content - if "frame=" in stats_text: - frame_start = stats_text.find("frame=") - stats_text = stats_text[frame_start:] + buffer += next_byte - self._parse_ffmpeg_stats(stats_text) - self._log_stderr_content(stats_text) - last_stats_line = part - except Exception as e: - logger.debug(f"Error parsing stats line: {e}") - else: - # Regular content - process line by line - line_content = part - while b'\n' in line_content: - line, line_content = line_content.split(b'\n', 1) - if line.strip(): - self._log_stderr_content(line.decode('utf-8', errors='ignore')) + # Break on carriage return (stats overwrite) or newline + if next_byte in (b'\r', b'\n'): + break - # Handle remaining content without newline - if line_content.strip(): - self._log_stderr_content(line_content.decode('utf-8', errors='ignore')) + # Also break if we have enough data for a typical stats line + if len(buffer) > 200: # Typical stats line length + break - # Keep the last part as it might be incomplete - buffer = parts[-1] + # Process the stats line immediately + if buffer.strip(): + try: + stats_text = buffer.decode('utf-8', errors='ignore').strip() + if stats_text and "frame=" in stats_text: + self._parse_ffmpeg_stats(stats_text) + self._log_stderr_content(stats_text) + except Exception as e: + logger.debug(f"Error parsing immediate stats line: {e}") + + # Clear buffer after processing + buffer = b"" + continue # Handle regular line breaks for non-stats content - elif b'\n' in buffer: - while b'\n' in buffer: - line, buffer = buffer.split(b'\n', 1) - if line.strip(): - line_text = line.decode('utf-8', errors='ignore').strip() - if line_text and not line_text.startswith("frame="): - self._log_stderr_content(line_text) + elif byte == b'\n': + if buffer.strip(): + line_text = buffer.decode('utf-8', errors='ignore').strip() + if line_text and not line_text.startswith("frame="): + self._log_stderr_content(line_text) + buffer = b"" - # If we have a potential stats line in buffer without line breaks - elif b"frame=" in buffer and (b"speed=" in buffer or len(buffer) > 200): - # We likely have a complete or substantial stats line - try: - stats_text = buffer.decode('utf-8', errors='ignore').strip() - if "frame=" in stats_text: - # Extract just the stats portion - frame_start = stats_text.find("frame=") - stats_text = stats_text[frame_start:] + # Handle carriage returns (potential stats overwrite) + elif byte == b'\r': + # Check if this might be a stats line + if b"frame=" in buffer: + try: + stats_text = buffer.decode('utf-8', errors='ignore').strip() + if stats_text and "frame=" in stats_text: + self._parse_ffmpeg_stats(stats_text) + self._log_stderr_content(stats_text) + except Exception as e: + logger.debug(f"Error parsing stats on carriage return: {e}") + elif buffer.strip(): + # Regular content with carriage return + line_text = buffer.decode('utf-8', errors='ignore').strip() + if line_text: + self._log_stderr_content(line_text) + buffer = b"" - self._parse_ffmpeg_stats(stats_text) - self._log_stderr_content(stats_text) - buffer = b"" # Clear buffer after processing - except Exception as e: - logger.debug(f"Error parsing buffered stats: {e}") - - # Prevent buffer from growing too large - if len(buffer) > 4096: - # Try to preserve any potential stats line at the end - if b"frame=" in buffer[-1024:]: - buffer = buffer[-1024:] - else: - buffer = buffer[-512:] + # Prevent buffer from growing too large for non-stats content + elif len(buffer) > 1024 and b"frame=" not in buffer: + # Process whatever we have if it's not a stats line + if buffer.strip(): + line_text = buffer.decode('utf-8', errors='ignore').strip() + if line_text: + self._log_stderr_content(line_text) + buffer = b"" except Exception as e: - logger.error(f"Error reading stderr: {e}") + logger.error(f"Error reading stderr byte: {e}") break + # Process any remaining buffer content + if buffer.strip(): + try: + remaining_text = buffer.decode('utf-8', errors='ignore').strip() + if remaining_text: + if "frame=" in remaining_text: + self._parse_ffmpeg_stats(remaining_text) + self._log_stderr_content(remaining_text) + except Exception as e: + logger.debug(f"Error processing remaining buffer: {e}") + except Exception as e: # Catch any other exceptions in the thread to prevent crashes try: From bd53837f804736bd9eca6ac4c4a51da32786e634 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 11 Jun 2025 17:20:27 -0500 Subject: [PATCH 0489/1435] Better detection of input vs output stream information. --- apps/proxy/ts_proxy/stream_manager.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index c413a9f5..a57f1384 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -497,13 +497,18 @@ class StreamManager: content_lower = content.lower() # Check for stream info lines first and delegate to ChannelService + # Only parse INPUT streams (which have hex identifiers like [0x100]) not output streams if "stream #" in content_lower and ("video:" in content_lower or "audio:" in content_lower): - from .services.channel_service import ChannelService - if "video:" in content_lower: - ChannelService.parse_and_store_stream_info(self.channel_id, content, "video") - elif "audio:" in content_lower: - ChannelService.parse_and_store_stream_info(self.channel_id, content, "audio") - + # Check if this is an input stream by looking for the hex identifier pattern [0x...] + if "stream #0:" in content_lower and "[0x" in content_lower: + from .services.channel_service import ChannelService + if "video:" in content_lower: + ChannelService.parse_and_store_stream_info(self.channel_id, content, "video") + elif "audio:" in content_lower: + ChannelService.parse_and_store_stream_info(self.channel_id, content, "audio") + else: + # This is likely an output stream (no hex identifier), don't parse it + logger.debug(f"Skipping output stream info: {content}") # Determine log level based on content if any(keyword in content_lower for keyword in ['error', 'failed', 'cannot', 'invalid', 'corrupt']): logger.error(f"FFmpeg stderr: {content}") From a99a6431b2a258254a76883c07ad9cacbad59285 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 12 Jun 2025 10:45:57 -0500 Subject: [PATCH 0490/1435] More merge fixes. --- core/api_views.py | 13 +++---------- ...{0013_proxysettings.py => 0014_proxysettings.py} | 4 ++-- 2 files changed, 5 insertions(+), 12 deletions(-) rename core/migrations/{0013_proxysettings.py => 0014_proxysettings.py} (92%) diff --git a/core/api_views.py b/core/api_views.py index db9725ee..01bd3f5a 100644 --- a/core/api_views.py +++ b/core/api_views.py @@ -4,13 +4,11 @@ import json import ipaddress import logging from rest_framework import viewsets, status -from rest_framework.decorators import action from rest_framework.response import Response from django.shortcuts import get_object_or_404 -<<<<<<< HEAD from rest_framework.permissions import IsAuthenticated -from rest_framework.decorators import api_view, permission_classes -======= +from rest_framework.decorators import api_view, permission_classes, action +from drf_yasg.utils import swagger_auto_schema from .models import ( UserAgent, StreamProfile, @@ -25,9 +23,7 @@ from .serializers import ( CoreSettingsSerializer, ProxySettingsSerializer, ) -from rest_framework.decorators import api_view, permission_classes, action ->>>>>>> 59e4a28b311d00d073f238e01e735d68a821c3f3 -from drf_yasg.utils import swagger_auto_schema + import socket import requests import os @@ -116,7 +112,6 @@ class CoreSettingsViewSet(viewsets.ModelViewSet): return Response({}, status=status.HTTP_200_OK) -<<<<<<< HEAD class ProxySettingsViewSet(viewsets.ModelViewSet): """ API endpoint for proxy settings. @@ -173,10 +168,8 @@ class ProxySettingsViewSet(viewsets.ModelViewSet): serializer.is_valid(raise_exception=True) serializer.save() return Response(serializer.data) -======= ->>>>>>> 59e4a28b311d00d073f238e01e735d68a821c3f3 @swagger_auto_schema( method="get", diff --git a/core/migrations/0013_proxysettings.py b/core/migrations/0014_proxysettings.py similarity index 92% rename from core/migrations/0013_proxysettings.py rename to core/migrations/0014_proxysettings.py index 1a69c4ef..1f50adea 100644 --- a/core/migrations/0013_proxysettings.py +++ b/core/migrations/0014_proxysettings.py @@ -1,4 +1,4 @@ -# Generated by Django 5.1.6 on 2025-06-11 02:22 +# Generated by Django 5.1.6 on 2025-06-12 15:44 from django.db import migrations, models @@ -6,7 +6,7 @@ from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ - ('core', '0012_default_active_m3u_accounts'), + ('core', '0013_default_network_access_settings'), ] operations = [ From b4ae6911c95cb4c2217cb80696f2eeb213d9e418 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 12 Jun 2025 15:42:26 -0500 Subject: [PATCH 0491/1435] Pull settings from database --- apps/proxy/config.py | 70 ++++++++++++++++++++++++---- apps/proxy/ts_proxy/config_helper.py | 45 ++---------------- core/models.py | 15 ++++++ 3 files changed, 81 insertions(+), 49 deletions(-) diff --git a/apps/proxy/config.py b/apps/proxy/config.py index b369a92f..bb53beba 100644 --- a/apps/proxy/config.py +++ b/apps/proxy/config.py @@ -9,11 +9,28 @@ class BaseConfig: CONNECTION_TIMEOUT = 10 # seconds to wait for initial connection MAX_STREAM_SWITCHES = 10 # Maximum number of stream switch attempts before giving up BUFFER_CHUNK_SIZE = 188 * 1361 # ~256KB - # Redis settings - REDIS_CHUNK_TTL = 60 # Number in seconds - Chunks expire after 1 minute BUFFERING_TIMEOUT = 15 # Seconds to wait for buffering before switching streams BUFFER_SPEED = 1 # What speed to condsider the stream buffering, 1x is normal speed, 2x is double speed, etc. + @classmethod + def get_proxy_settings(cls): + """Get ProxySettings from database with fallback to defaults""" + try: + from core.models import ProxySettings + return ProxySettings.objects.first() + except Exception: + return None + + @classmethod + def get_redis_chunk_ttl(cls): + """Get Redis chunk TTL from database or default""" + settings = cls.get_proxy_settings() + return settings.redis_chunk_ttl if settings else 60 + + @property + def REDIS_CHUNK_TTL(self): + return self.get_redis_chunk_ttl() + class HLSConfig(BaseConfig): MIN_SEGMENTS = 12 MAX_SEGMENTS = 16 @@ -42,21 +59,14 @@ class TSConfig(BaseConfig): # Resource management CLEANUP_INTERVAL = 60 # Check for inactive channels every 60 seconds - CHANNEL_SHUTDOWN_DELAY = 0 # How long to wait after last client before shutdown (seconds) # Client tracking settings CLIENT_RECORD_TTL = 5 # How long client records persist in Redis (seconds). Client will be considered MIA after this time. CLEANUP_CHECK_INTERVAL = 1 # How often to check for disconnected clients (seconds) - CHANNEL_INIT_GRACE_PERIOD = 5 # How long to wait for first client after initialization (seconds) CLIENT_HEARTBEAT_INTERVAL = 1 # How often to send client heartbeats (seconds) GHOST_CLIENT_MULTIPLIER = 5.0 # How many heartbeat intervals before client considered ghost (5 would mean 5 secondsif heartbeat interval is 1) CLIENT_WAIT_TIMEOUT = 30 # Seconds to wait for client to connect - - # TS packets are 188 bytes - # Make chunk size a multiple of TS packet size for perfect alignment - # ~1MB is ideal for streaming (matches typical media buffer sizes) - # Stream health and recovery settings MAX_HEALTH_RECOVERY_ATTEMPTS = 2 # Maximum times to attempt recovery for a single stream MAX_RECONNECT_ATTEMPTS = 3 # Maximum reconnects to try before switching streams @@ -64,5 +74,47 @@ class TSConfig(BaseConfig): FAILOVER_GRACE_PERIOD = 20 # Extra time (seconds) to allow for stream switching before disconnecting clients URL_SWITCH_TIMEOUT = 20 # Max time allowed for a stream switch operation + # Database-dependent settings with fallbacks + @classmethod + def get_channel_shutdown_delay(cls): + """Get channel shutdown delay from database or default""" + settings = cls.get_proxy_settings() + return settings.channel_shutdown_delay if settings else 0 + + @classmethod + def get_buffering_timeout(cls): + """Get buffering timeout from database or default""" + settings = cls.get_proxy_settings() + return settings.buffering_timeout if settings else 15 + + @classmethod + def get_buffering_speed(cls): + """Get buffering speed threshold from database or default""" + settings = cls.get_proxy_settings() + return settings.buffering_speed if settings else 1.0 + + @classmethod + def get_channel_init_grace_period(cls): + """Get channel init grace period from database or default""" + settings = cls.get_proxy_settings() + return settings.channel_init_grace_period if settings else 5 + + # Dynamic property access for these settings + @property + def CHANNEL_SHUTDOWN_DELAY(self): + return self.get_channel_shutdown_delay() + + @property + def BUFFERING_TIMEOUT(self): + return self.get_buffering_timeout() + + @property + def BUFFERING_SPEED(self): + return self.get_buffering_speed() + + @property + def CHANNEL_INIT_GRACE_PERIOD(self): + return self.get_channel_init_grace_period() + diff --git a/apps/proxy/ts_proxy/config_helper.py b/apps/proxy/ts_proxy/config_helper.py index 28474b37..d59fa1f9 100644 --- a/apps/proxy/ts_proxy/config_helper.py +++ b/apps/proxy/ts_proxy/config_helper.py @@ -34,14 +34,7 @@ class ConfigHelper: @staticmethod def channel_shutdown_delay(): """Get channel shutdown delay in seconds""" - try: - from core.models import ProxySettings - settings = ProxySettings.objects.first() - if settings: - return settings.channel_shutdown_delay - except: - pass - return ConfigHelper.get('CHANNEL_SHUTDOWN_DELAY', 0) + return Config.get_channel_shutdown_delay() @staticmethod def initial_behind_chunks(): @@ -61,14 +54,7 @@ class ConfigHelper: @staticmethod def redis_chunk_ttl(): """Get Redis chunk TTL in seconds""" - try: - from core.models import ProxySettings - settings = ProxySettings.objects.first() - if settings: - return settings.redis_chunk_ttl - except: - pass - return ConfigHelper.get('REDIS_CHUNK_TTL', 60) + return Config.get_redis_chunk_ttl() @staticmethod def chunk_size(): @@ -103,35 +89,14 @@ class ConfigHelper: @staticmethod def buffering_timeout(): """Get buffering timeout in seconds""" - try: - from core.models import ProxySettings - settings = ProxySettings.objects.first() - if settings: - return settings.buffering_timeout - except: - pass - return ConfigHelper.get('BUFFERING_TIMEOUT', 15) # Default to 15 seconds + return Config.get_buffering_timeout() @staticmethod def buffering_speed(): """Get buffering speed threshold""" - try: - from core.models import ProxySettings - settings = ProxySettings.objects.first() - if settings: - return settings.buffering_speed - except: - pass - return ConfigHelper.get('BUFFERING_SPEED', 1) # Default to 1x + return Config.get_buffering_speed() @staticmethod def channel_init_grace_period(): """Get channel initialization grace period in seconds""" - try: - from core.models import ProxySettings - settings = ProxySettings.objects.first() - if settings: - return settings.channel_init_grace_period - except: - pass - return ConfigHelper.get('CHANNEL_INIT_GRACE_PERIOD', 5) # Default to 5 seconds + return Config.get_channel_init_grace_period() diff --git a/core/models.py b/core/models.py index dcb4a68f..57c76553 100644 --- a/core/models.py +++ b/core/models.py @@ -232,3 +232,18 @@ class ProxySettings(models.Model): def __str__(self): return "Proxy Settings" + + @classmethod + def get_settings(cls): + """Get or create the singleton proxy settings instance""" + settings, created = cls.objects.get_or_create( + pk=1, # Force single instance + defaults={ + 'buffering_timeout': 15, + 'buffering_speed': 1.0, + 'redis_chunk_ttl': 60, + 'channel_shutdown_delay': 0, + 'channel_init_grace_period': 20, + } + ) + return settings From 2f91e0ce1c61acec2e63149133d9ab216612bee6 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 12 Jun 2025 16:02:08 -0500 Subject: [PATCH 0492/1435] Properly populate default values. --- core/migrations/0014_proxysettings.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/core/migrations/0014_proxysettings.py b/core/migrations/0014_proxysettings.py index 1f50adea..75a3096f 100644 --- a/core/migrations/0014_proxysettings.py +++ b/core/migrations/0014_proxysettings.py @@ -3,6 +3,19 @@ from django.db import migrations, models +def create_default_proxy_settings(apps, schema_editor): + """Create the default ProxySettings instance""" + ProxySettings = apps.get_model("core", "ProxySettings") + ProxySettings.objects.create( + id=1, # Force singleton ID + buffering_timeout=15, + buffering_speed=1.0, + redis_chunk_ttl=60, + channel_shutdown_delay=0, + channel_init_grace_period=5, + ) + + class Migration(migrations.Migration): dependencies = [ @@ -27,4 +40,5 @@ class Migration(migrations.Migration): 'verbose_name_plural': 'Proxy Settings', }, ), + migrations.RunPython(create_default_proxy_settings), ] From 1e9ab5460909f45e131610fdb469712f6d89b814 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 12 Jun 2025 16:11:43 -0500 Subject: [PATCH 0493/1435] Use new methods for getting settings. --- apps/proxy/ts_proxy/server.py | 2 +- apps/proxy/ts_proxy/stream_manager.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/proxy/ts_proxy/server.py b/apps/proxy/ts_proxy/server.py index 3d0a53d9..9061665e 100644 --- a/apps/proxy/ts_proxy/server.py +++ b/apps/proxy/ts_proxy/server.py @@ -941,7 +941,7 @@ class ProxyServer: # If waiting for clients, check grace period if connection_ready_time: - grace_period = ConfigHelper.get('CHANNEL_INIT_GRACE_PERIOD', 20) + grace_period = ConfigHelper.channel_init_grace_period() time_since_ready = time.time() - connection_ready_time # Add this debug log diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index a57f1384..06a00513 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -1240,7 +1240,7 @@ class StreamManager: redis_client.hset(metadata_key, mapping=update_data) # Get configured grace period or default - grace_period = ConfigHelper.get('CHANNEL_INIT_GRACE_PERIOD', 20) + grace_period = ConfigHelper.channel_init_grace_period() logger.info(f"STREAM MANAGER: Updated channel {channel_id} state: {current_state or 'None'} -> {ChannelState.WAITING_FOR_CLIENTS} with {current_buffer_index} buffer chunks") logger.info(f"Started initial connection grace period ({grace_period}s) for channel {channel_id}") else: From 5360f38b14ea89144604b81a7b51c68c5f81cc05 Mon Sep 17 00:00:00 2001 From: xham3 <215794250+xham3@users.noreply.github.com> Date: Tue, 10 Jun 2025 23:12:17 -0700 Subject: [PATCH 0494/1435] Fix process communicate() deadlock when epg match data overfill subprocess.PIPE buffer. --- apps/channels/tasks.py | 32 +++++++++++++++++++++++++------- 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py index 6217a4ca..a551028f 100755 --- a/apps/channels/tasks.py +++ b/apps/channels/tasks.py @@ -1,6 +1,7 @@ # apps/channels/tasks.py import logging import os +import select import re import requests import time @@ -136,18 +137,35 @@ def match_epg_channels(): text=True ) - # Log stderr in real-time - for line in iter(process.stderr.readline, ''): - if line: - logger.info(line.strip()) + stdout = '' + block_size = 1024 - process.stderr.close() - stdout, stderr = process.communicate() + while True: + # Monitor stdout and stderr for readability + readable, _, _ = select.select([process.stdout, process.stderr], [], [], 1) # timeout of 1 second + if not readable: # timeout expired + if process.poll() is not None: # check if process finished + break + else: # process still running, continue + continue + + for stream in readable: + if stream == process.stdout: + stdout += stream.read(block_size) + elif stream == process.stderr: + error = stream.readline() + if error: + logger.info(error.strip()) + + if process.poll() is not None: + break + + process.wait() os.remove(temp_file_path) if process.returncode != 0: - return f"Failed to process EPG matching: {stderr}" + return f"Failed to process EPG matching" result = json.loads(stdout) # This returns lists of dicts, not model objects From 8eec41cfbbf2a6d6d78a6849602e5f26e50c42d7 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 13 Jun 2025 10:27:51 -0500 Subject: [PATCH 0495/1435] Fixes a bug where heartbeat thread will exit if channel is in shutdown delay. This may also fix #129 --- apps/proxy/ts_proxy/client_manager.py | 35 ++++++++++++++++++++++----- apps/proxy/ts_proxy/server.py | 2 +- 2 files changed, 30 insertions(+), 7 deletions(-) diff --git a/apps/proxy/ts_proxy/client_manager.py b/apps/proxy/ts_proxy/client_manager.py index 98dbf072..bcc54602 100644 --- a/apps/proxy/ts_proxy/client_manager.py +++ b/apps/proxy/ts_proxy/client_manager.py @@ -4,6 +4,7 @@ import threading import logging import time import json +import gevent from typing import Set, Optional from apps.proxy.config import TSConfig as Config from redis.exceptions import ConnectionError, TimeoutError @@ -46,7 +47,7 @@ class ClientManager: while True: try: # Wait for the interval - time.sleep(self.heartbeat_interval) + gevent.sleep(self.heartbeat_interval) # Send heartbeat for all local clients with self.lock: @@ -54,13 +55,35 @@ class ClientManager: # No clients left, increment our counter no_clients_count += 1 - # If we've seen no clients for several consecutive checks, exit the thread - if no_clients_count >= max_empty_cycles: - logger.info(f"No clients for channel {self.channel_id} after {no_clients_count} consecutive checks, exiting heartbeat thread") + # Check if we're in a shutdown delay period before exiting + in_shutdown_delay = False + if self.redis_client: + try: + disconnect_key = RedisKeys.last_client_disconnect(self.channel_id) + disconnect_time_bytes = self.redis_client.get(disconnect_key) + if disconnect_time_bytes: + disconnect_time = float(disconnect_time_bytes.decode('utf-8')) + elapsed = time.time() - disconnect_time + shutdown_delay = ConfigHelper.channel_shutdown_delay() + + if elapsed < shutdown_delay: + in_shutdown_delay = True + logger.debug(f"Channel {self.channel_id} in shutdown delay: {elapsed:.1f}s of {shutdown_delay}s elapsed") + except Exception as e: + logger.debug(f"Error checking shutdown delay: {e}") + + # Only exit if we've seen no clients for several consecutive checks AND we're not in shutdown delay + if no_clients_count >= max_empty_cycles and not in_shutdown_delay: + logger.info(f"No clients for channel {self.channel_id} after {no_clients_count} consecutive checks and not in shutdown delay, exiting heartbeat thread") return # This exits the thread - # Skip this cycle if we have no clients - continue + # Skip this cycle if we have no clients but continue if in shutdown delay + if not in_shutdown_delay: + continue + else: + # Reset counter during shutdown delay to prevent premature exit + no_clients_count = 0 + continue else: # Reset counter when we see clients no_clients_count = 0 diff --git a/apps/proxy/ts_proxy/server.py b/apps/proxy/ts_proxy/server.py index 9061665e..bf5d4981 100644 --- a/apps/proxy/ts_proxy/server.py +++ b/apps/proxy/ts_proxy/server.py @@ -206,7 +206,7 @@ class ProxyServer: self.redis_client.setex(disconnect_key, 60, str(time.time())) # Get configured shutdown delay or default - shutdown_delay = getattr(Config, 'CHANNEL_SHUTDOWN_DELAY', 0) + shutdown_delay = ConfigHelper.channel_shutdown_delay() if shutdown_delay > 0: logger.info(f"Waiting {shutdown_delay}s before stopping channel...") From d0cefd3813aebb87946c283330128cad87387282 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 13 Jun 2025 12:48:04 -0500 Subject: [PATCH 0496/1435] Fixes being unable to edit stream profile. --- frontend/src/components/tables/StreamProfilesTable.jsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/components/tables/StreamProfilesTable.jsx b/frontend/src/components/tables/StreamProfilesTable.jsx index 9dc82b5f..a33e5d9f 100644 --- a/frontend/src/components/tables/StreamProfilesTable.jsx +++ b/frontend/src/components/tables/StreamProfilesTable.jsx @@ -220,8 +220,8 @@ const StreamProfiles = () => { return ( ); } From c4a6b1469ee9196a08c14bc584bd798f42508ac9 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 13 Jun 2025 14:36:08 -0500 Subject: [PATCH 0497/1435] Change to JSON settings --- apps/proxy/config.py | 24 +++--- core/api_views.py | 103 ++++++++++++++++---------- core/migrations/0014_proxysettings.py | 44 ----------- core/models.py | 65 ++++------------ core/serializers.py | 31 +++----- dispatcharr/settings.py | 1 - 6 files changed, 105 insertions(+), 163 deletions(-) delete mode 100644 core/migrations/0014_proxysettings.py diff --git a/apps/proxy/config.py b/apps/proxy/config.py index bb53beba..ca246b78 100644 --- a/apps/proxy/config.py +++ b/apps/proxy/config.py @@ -14,18 +14,24 @@ class BaseConfig: @classmethod def get_proxy_settings(cls): - """Get ProxySettings from database with fallback to defaults""" + """Get proxy settings from CoreSettings JSON data with fallback to defaults""" try: - from core.models import ProxySettings - return ProxySettings.objects.first() + from core.models import CoreSettings + return CoreSettings.get_proxy_settings() except Exception: - return None + return { + "buffering_timeout": 15, + "buffering_speed": 1.0, + "redis_chunk_ttl": 60, + "channel_shutdown_delay": 0, + "channel_init_grace_period": 5, + } @classmethod def get_redis_chunk_ttl(cls): """Get Redis chunk TTL from database or default""" settings = cls.get_proxy_settings() - return settings.redis_chunk_ttl if settings else 60 + return settings.get("redis_chunk_ttl", 60) @property def REDIS_CHUNK_TTL(self): @@ -79,25 +85,25 @@ class TSConfig(BaseConfig): def get_channel_shutdown_delay(cls): """Get channel shutdown delay from database or default""" settings = cls.get_proxy_settings() - return settings.channel_shutdown_delay if settings else 0 + return settings.get("channel_shutdown_delay", 0) @classmethod def get_buffering_timeout(cls): """Get buffering timeout from database or default""" settings = cls.get_proxy_settings() - return settings.buffering_timeout if settings else 15 + return settings.get("buffering_timeout", 15) @classmethod def get_buffering_speed(cls): """Get buffering speed threshold from database or default""" settings = cls.get_proxy_settings() - return settings.buffering_speed if settings else 1.0 + return settings.get("buffering_speed", 1.0) @classmethod def get_channel_init_grace_period(cls): """Get channel init grace period from database or default""" settings = cls.get_proxy_settings() - return settings.channel_init_grace_period if settings else 5 + return settings.get("channel_init_grace_period", 5) # Dynamic property access for these settings @property diff --git a/core/api_views.py b/core/api_views.py index 01bd3f5a..04e39f11 100644 --- a/core/api_views.py +++ b/core/api_views.py @@ -15,7 +15,7 @@ from .models import ( CoreSettings, STREAM_HASH_KEY, NETWORK_ACCESS, - ProxySettings, + PROXY_SETTINGS_KEY, ) from .serializers import ( UserAgentSerializer, @@ -112,62 +112,85 @@ class CoreSettingsViewSet(viewsets.ModelViewSet): return Response({}, status=status.HTTP_200_OK) -class ProxySettingsViewSet(viewsets.ModelViewSet): +class ProxySettingsViewSet(viewsets.ViewSet): """ - API endpoint for proxy settings. - This is treated as a singleton: only one instance should exist. + API endpoint for proxy settings stored as JSON in CoreSettings. """ serializer_class = ProxySettingsSerializer - def get_queryset(self): - # Always return the singleton settings - return ProxySettings.objects.all() + def _get_or_create_settings(self): + """Get or create the proxy settings CoreSettings entry""" + try: + settings_obj = CoreSettings.objects.get(key=PROXY_SETTINGS_KEY) + settings_data = json.loads(settings_obj.value) + except (CoreSettings.DoesNotExist, json.JSONDecodeError): + # Create default settings + settings_data = { + "buffering_timeout": 15, + "buffering_speed": 1.0, + "redis_chunk_ttl": 60, + "channel_shutdown_delay": 0, + "channel_init_grace_period": 5, + } + settings_obj, created = CoreSettings.objects.get_or_create( + key=PROXY_SETTINGS_KEY, + defaults={ + "name": "Proxy Settings", + "value": json.dumps(settings_data) + } + ) + return settings_obj, settings_data - def get_object(self): - # Always return the singleton settings (create if doesn't exist) - return ProxySettings.get_settings() - - def list(self, request, *args, **kwargs): - # Return the singleton settings as a single object - settings = self.get_object() - serializer = self.get_serializer(settings) + def list(self, request): + """Return proxy settings""" + settings_obj, settings_data = self._get_or_create_settings() + serializer = ProxySettingsSerializer(data=settings_data) + serializer.is_valid() return Response(serializer.data) - def retrieve(self, request, *args, **kwargs): - # Always return the singleton settings regardless of ID - settings = self.get_object() - serializer = self.get_serializer(settings) + def retrieve(self, request, pk=None): + """Return proxy settings regardless of ID""" + settings_obj, settings_data = self._get_or_create_settings() + serializer = ProxySettingsSerializer(data=settings_data) + serializer.is_valid() return Response(serializer.data) - def update(self, request, *args, **kwargs): - # Update the singleton settings - settings = self.get_object() - serializer = self.get_serializer(settings, data=request.data, partial=True) + def update(self, request, pk=None): + """Update proxy settings""" + settings_obj, current_data = self._get_or_create_settings() + + serializer = ProxySettingsSerializer(data=request.data) serializer.is_valid(raise_exception=True) - serializer.save() + + # Update the JSON data + settings_obj.value = json.dumps(serializer.validated_data) + settings_obj.save() + return Response(serializer.data) - def partial_update(self, request, *args, **kwargs): - return self.update(request, *args, **kwargs) + def partial_update(self, request, pk=None): + """Partially update proxy settings""" + settings_obj, current_data = self._get_or_create_settings() + + # Merge current data with new data + updated_data = {**current_data, **request.data} + + serializer = ProxySettingsSerializer(data=updated_data) + serializer.is_valid(raise_exception=True) + + # Update the JSON data + settings_obj.value = json.dumps(serializer.validated_data) + settings_obj.save() + + return Response(serializer.data) @action(detail=False, methods=['get', 'patch']) def settings(self, request): - """ - Get or update the proxy settings. - """ - settings = self.get_object() - + """Get or update the proxy settings.""" if request.method == 'GET': - # Return current settings - serializer = self.get_serializer(settings) - return Response(serializer.data) - + return self.list(request) elif request.method == 'PATCH': - # Update settings - serializer = self.get_serializer(settings, data=request.data, partial=True) - serializer.is_valid(raise_exception=True) - serializer.save() - return Response(serializer.data) + return self.partial_update(request) diff --git a/core/migrations/0014_proxysettings.py b/core/migrations/0014_proxysettings.py deleted file mode 100644 index 75a3096f..00000000 --- a/core/migrations/0014_proxysettings.py +++ /dev/null @@ -1,44 +0,0 @@ -# Generated by Django 5.1.6 on 2025-06-12 15:44 - -from django.db import migrations, models - - -def create_default_proxy_settings(apps, schema_editor): - """Create the default ProxySettings instance""" - ProxySettings = apps.get_model("core", "ProxySettings") - ProxySettings.objects.create( - id=1, # Force singleton ID - buffering_timeout=15, - buffering_speed=1.0, - redis_chunk_ttl=60, - channel_shutdown_delay=0, - channel_init_grace_period=5, - ) - - -class Migration(migrations.Migration): - - dependencies = [ - ('core', '0013_default_network_access_settings'), - ] - - operations = [ - migrations.CreateModel( - name='ProxySettings', - fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('buffering_timeout', models.IntegerField(default=15, help_text='Seconds to wait for buffering before switching streams')), - ('buffering_speed', models.FloatField(default=1.0, help_text='Speed threshold to consider stream buffering (1.0 = normal speed)')), - ('redis_chunk_ttl', models.IntegerField(default=60, help_text='Time in seconds before Redis chunks expire')), - ('channel_shutdown_delay', models.IntegerField(default=0, help_text='Seconds to wait after last client before shutting down channel')), - ('channel_init_grace_period', models.IntegerField(default=5, help_text='Seconds to wait for first client after channel initialization')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ], - options={ - 'verbose_name': 'Proxy Settings', - 'verbose_name_plural': 'Proxy Settings', - }, - ), - migrations.RunPython(create_default_proxy_settings), - ] diff --git a/core/models.py b/core/models.py index 57c76553..557776c2 100644 --- a/core/models.py +++ b/core/models.py @@ -149,6 +149,7 @@ STREAM_HASH_KEY = slugify("M3U Hash Key") PREFERRED_REGION_KEY = slugify("Preferred Region") AUTO_IMPORT_MAPPED_FILES = slugify("Auto-Import Mapped Files") NETWORK_ACCESS = slugify("Network Access") +PROXY_SETTINGS_KEY = slugify("Proxy Settings") class CoreSettings(models.Model): @@ -195,55 +196,19 @@ class CoreSettings(models.Model): except cls.DoesNotExist: return None -class ProxySettings(models.Model): - """Proxy configuration settings""" - - buffering_timeout = models.IntegerField( - default=15, - help_text="Seconds to wait for buffering before switching streams" - ) - - buffering_speed = models.FloatField( - default=1.0, - help_text="Speed threshold to consider stream buffering (1.0 = normal speed)" - ) - - redis_chunk_ttl = models.IntegerField( - default=60, - help_text="Time in seconds before Redis chunks expire" - ) - - channel_shutdown_delay = models.IntegerField( - default=0, - help_text="Seconds to wait after last client before shutting down channel" - ) - - channel_init_grace_period = models.IntegerField( - default=5, - help_text="Seconds to wait for first client after channel initialization" - ) - - created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) - - class Meta: - verbose_name = "Proxy Settings" - verbose_name_plural = "Proxy Settings" - - def __str__(self): - return "Proxy Settings" - @classmethod - def get_settings(cls): - """Get or create the singleton proxy settings instance""" - settings, created = cls.objects.get_or_create( - pk=1, # Force single instance - defaults={ - 'buffering_timeout': 15, - 'buffering_speed': 1.0, - 'redis_chunk_ttl': 60, - 'channel_shutdown_delay': 0, - 'channel_init_grace_period': 20, + def get_proxy_settings(cls): + """Retrieve proxy settings as dict (or return defaults if not found).""" + try: + import json + settings_json = cls.objects.get(key=PROXY_SETTINGS_KEY).value + return json.loads(settings_json) + except (cls.DoesNotExist, json.JSONDecodeError): + # Return defaults if not found or invalid JSON + return { + "buffering_timeout": 15, + "buffering_speed": 1.0, + "redis_chunk_ttl": 60, + "channel_shutdown_delay": 0, + "channel_init_grace_period": 5, } - ) - return settings diff --git a/core/serializers.py b/core/serializers.py index fcc813fe..c6029bc4 100644 --- a/core/serializers.py +++ b/core/serializers.py @@ -3,7 +3,7 @@ import json import ipaddress from rest_framework import serializers -from .models import CoreSettings, UserAgent, StreamProfile, ProxySettings, NETWORK_ACCESS +from .models import CoreSettings, UserAgent, StreamProfile, NETWORK_ACCESS class UserAgentSerializer(serializers.ModelSerializer): @@ -66,24 +66,17 @@ class CoreSettingsSerializer(serializers.ModelSerializer): return super().update(instance, validated_data) -class ProxySettingsSerializer(serializers.ModelSerializer): - class Meta: - model = ProxySettings - fields = [ - 'id', - 'buffering_timeout', - 'buffering_speed', - 'redis_chunk_ttl', - 'channel_shutdown_delay', - 'channel_init_grace_period', - 'created_at', - 'updated_at' - ] - read_only_fields = ['id', 'created_at', 'updated_at'] +class ProxySettingsSerializer(serializers.Serializer): + """Serializer for proxy settings stored as JSON in CoreSettings""" + buffering_timeout = serializers.IntegerField(min_value=0, max_value=300) + buffering_speed = serializers.FloatField(min_value=0.1, max_value=10.0) + redis_chunk_ttl = serializers.IntegerField(min_value=10, max_value=3600) + channel_shutdown_delay = serializers.IntegerField(min_value=0, max_value=300) + channel_init_grace_period = serializers.IntegerField(min_value=0, max_value=60) def validate_buffering_timeout(self, value): - if value < 1 or value > 300: - raise serializers.ValidationError("Buffering timeout must be between 1 and 300 seconds") + if value < 0 or value > 300: + raise serializers.ValidationError("Buffering timeout must be between 0 and 300 seconds") return value def validate_buffering_speed(self, value): @@ -102,6 +95,6 @@ class ProxySettingsSerializer(serializers.ModelSerializer): return value def validate_channel_init_grace_period(self, value): - if value < 1 or value > 60: - raise serializers.ValidationError("Channel init grace period must be between 1 and 60 seconds") + if value < 0 or value > 60: + raise serializers.ValidationError("Channel init grace period must be between 0 and 60 seconds") return value diff --git a/dispatcharr/settings.py b/dispatcharr/settings.py index 63b88c7c..acac4c1a 100644 --- a/dispatcharr/settings.py +++ b/dispatcharr/settings.py @@ -254,7 +254,6 @@ PROXY_SETTINGS = { "BUFFER_SIZE": 1000, "RECONNECT_DELAY": 5, "USER_AGENT": "VLC/3.0.20 LibVLC/3.0.20", - "REDIS_CHUNK_TTL": 60, # How long to keep chunks in Redis (seconds) }, } From cfff51a9ebdb4a4bd698d136d74ed9624138d3fb Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 13 Jun 2025 14:40:28 -0500 Subject: [PATCH 0498/1435] Add missing import. --- core/models.py | 1 + 1 file changed, 1 insertion(+) diff --git a/core/models.py b/core/models.py index 557776c2..843a708c 100644 --- a/core/models.py +++ b/core/models.py @@ -1,6 +1,7 @@ # core/models.py from django.db import models from django.utils.text import slugify +from django.core.exceptions import ValidationError class UserAgent(models.Model): From fa3ee35d4d8e2929bd93f8dd8afa085b3565cd78 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 13 Jun 2025 14:51:56 -0500 Subject: [PATCH 0499/1435] Prepopulate settings in database. --- .../migrations/0014_default_proxy_settings.py | 35 +++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 core/migrations/0014_default_proxy_settings.py diff --git a/core/migrations/0014_default_proxy_settings.py b/core/migrations/0014_default_proxy_settings.py new file mode 100644 index 00000000..f4a61a9e --- /dev/null +++ b/core/migrations/0014_default_proxy_settings.py @@ -0,0 +1,35 @@ +# Generated by Django 5.1.6 on 2025-03-01 14:01 + +import json +from django.db import migrations +from django.utils.text import slugify + + +def preload_proxy_settings(apps, schema_editor): + CoreSettings = apps.get_model("core", "CoreSettings") + + # Default proxy settings + default_proxy_settings = { + "buffering_timeout": 15, + "buffering_speed": 1.0, + "redis_chunk_ttl": 60, + "channel_shutdown_delay": 0, + "channel_init_grace_period": 5, + } + + CoreSettings.objects.create( + key=slugify("Proxy Settings"), + name="Proxy Settings", + value=json.dumps(default_proxy_settings), + ) + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0013_default_network_access_settings"), + ] + + operations = [ + migrations.RunPython(preload_proxy_settings), + ] From 72542bf0fd1d342b09f296d3cefc44fe30ae4e82 Mon Sep 17 00:00:00 2001 From: dekzter Date: Sat, 14 Jun 2025 08:52:56 -0400 Subject: [PATCH 0500/1435] default cidrs in network access form, added proxy settings to ui --- frontend/src/constants.js | 23 +++++++++ frontend/src/pages/Settings.jsx | 89 ++++++++++++++++++++++++++++++++- 2 files changed, 110 insertions(+), 2 deletions(-) diff --git a/frontend/src/constants.js b/frontend/src/constants.js index f8077b12..89b081ae 100644 --- a/frontend/src/constants.js +++ b/frontend/src/constants.js @@ -29,3 +29,26 @@ export const NETWORK_ACCESS_OPTIONS = { description: 'Limit access to the Dispatcharr UI', }, }; + +export const PROXY_SETTINGS_OPTIONS = { + buffering_timeout: { + label: 'Buffering Timeout', + description: '', + }, + buffering_speed: { + label: 'Buffering Speed', + description: '', + }, + redis_chunk_ttl: { + label: 'Redis Chunk TTL', + description: '', + }, + channel_shutdown_delay: { + label: 'Channel Shutdown Delay', + description: '', + }, + channel_init_grace_period: { + label: 'Channel Init Grace Period', + description: '', + }, +}; diff --git a/frontend/src/pages/Settings.jsx b/frontend/src/pages/Settings.jsx index 073af337..0242ed12 100644 --- a/frontend/src/pages/Settings.jsx +++ b/frontend/src/pages/Settings.jsx @@ -23,7 +23,11 @@ import UserAgentsTable from '../components/tables/UserAgentsTable'; import StreamProfilesTable from '../components/tables/StreamProfilesTable'; import useLocalStorage from '../hooks/useLocalStorage'; import useAuthStore from '../store/auth'; -import { USER_LEVELS, NETWORK_ACCESS_OPTIONS } from '../constants'; +import { + USER_LEVELS, + NETWORK_ACCESS_OPTIONS, + PROXY_SETTINGS_OPTIONS, +} from '../constants'; import ConfirmationDialog from '../components/ConfirmationDialog'; const SettingsPage = () => { @@ -40,6 +44,8 @@ const SettingsPage = () => { const [netNetworkAccessConfirmCIDRs, setNetNetworkAccessConfirmCIDRs] = useState([]); + const [proxySettingsSaved, setProxySettingsSaved] = useState(false); + // UI / local storage settings const [tableSize, setTableSize] = useLocalStorage('table-size', 'default'); @@ -334,6 +340,14 @@ const SettingsPage = () => { }, {}), }); + const proxySettingsForm = useForm({ + mode: 'uncontrolled', + initialValues: Object.keys(PROXY_SETTINGS_OPTIONS).reduce((acc, key) => { + acc[key] = ''; + return acc; + }, {}), + }); + useEffect(() => { if (settings) { const formValues = Object.entries(settings).reduce( @@ -371,7 +385,17 @@ const SettingsPage = () => { ); networkAccessForm.setValues( Object.keys(NETWORK_ACCESS_OPTIONS).reduce((acc, key) => { - acc[key] = networkAccessSettings[key]; + acc[key] = networkAccessSettings[key] || '0.0.0.0/0'; + return acc; + }, {}) + ); + + const proxySettings = JSON.parse( + settings['proxy-settings'].value || '{}' + ); + proxySettingsForm.setValues( + Object.keys(PROXY_SETTINGS_OPTIONS).reduce((acc, key) => { + acc[key] = proxySettings[key] || ''; return acc; }, {}) ); @@ -420,6 +444,17 @@ const SettingsPage = () => { setNetworkAccessConfirmOpen(true); }; + const onProxySettingsSubmit = async () => { + setProxySettingsSaved(false); + + await API.updateSetting({ + ...settings['proxy-settings'], + value: JSON.stringify(proxySettingsForm.getValues()), + }); + + setProxySettingsSaved(true); + }; + const saveNetworkAccess = async () => { setNetworkAccessSaved(false); try { @@ -689,6 +724,56 @@ const SettingsPage = () => { , + + + + Proxy Settings + + +
    + + {proxySettingsSaved && ( + + )} + {Object.entries(PROXY_SETTINGS_OPTIONS).map( + ([key, config]) => { + return ( + + ); + } + )} + + + + + +
    +
    +
    , ] : [] )} From 0c4d320dc23373fd3a7bc1344188fe5a569f901b Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 14 Jun 2025 12:39:07 -0500 Subject: [PATCH 0501/1435] Add descriptions to proxy settings --- frontend/src/constants.js | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/frontend/src/constants.js b/frontend/src/constants.js index 89b081ae..62336929 100644 --- a/frontend/src/constants.js +++ b/frontend/src/constants.js @@ -33,22 +33,22 @@ export const NETWORK_ACCESS_OPTIONS = { export const PROXY_SETTINGS_OPTIONS = { buffering_timeout: { label: 'Buffering Timeout', - description: '', + description: 'Maximum time (in seconds) to wait for buffering before switching streams', }, buffering_speed: { label: 'Buffering Speed', - description: '', + description: 'Speed threshold below which buffering is detected (1.0 = normal speed)', }, redis_chunk_ttl: { label: 'Redis Chunk TTL', - description: '', + description: 'Time-to-live for Redis chunks in seconds (how long stream data is cached)', }, channel_shutdown_delay: { label: 'Channel Shutdown Delay', - description: '', + description: 'Delay in seconds before shutting down a channel after last client disconnects', }, channel_init_grace_period: { label: 'Channel Init Grace Period', - description: '', + description: 'Grace period in seconds during channel initialization', }, }; From 51ce2d241c99257f930ad3ee6ad4950658b5a9a1 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 14 Jun 2025 12:42:49 -0500 Subject: [PATCH 0502/1435] Use integers and floats instead of strings for proxy settings. --- frontend/src/pages/Settings.jsx | 483 +++++++++++++++++--------------- 1 file changed, 261 insertions(+), 222 deletions(-) diff --git a/frontend/src/pages/Settings.jsx b/frontend/src/pages/Settings.jsx index 0242ed12..0e8f61e8 100644 --- a/frontend/src/pages/Settings.jsx +++ b/frontend/src/pages/Settings.jsx @@ -17,6 +17,7 @@ import { Switch, Text, TextInput, + NumberInput, } from '@mantine/core'; import { isNotEmpty, useForm } from '@mantine/form'; import UserAgentsTable from '../components/tables/UserAgentsTable'; @@ -521,115 +522,190 @@ const SettingsPage = () => { ].concat( authUser.user_level == USER_LEVELS.ADMIN ? [ - - Stream Settings - -
    - ({ + value: `${option.id}`, + label: option.name, + }))} + /> - ({ - label: r.label, - value: `${r.value}`, - }))} - /> + ({ + label: r.label, + value: `${r.value}`, + }))} + /> - + + Auto-Import Mapped Files + + + + + + + + + + +
    +
    , - + + User-Agents + + + + , + + + Stream Profiles + + + + , + + + + Network Access + {accordianValue == 'network-access' && ( + + Comma-Delimited CIDR ranges + + )} + + +
    + + {networkAccessSaved && ( + + )} + {networkAccessError && ( + + )} + {Object.entries(NETWORK_ACCESS_OPTIONS).map( + ([key, config]) => { + return ( + + ); + } + )} { > - -
    -
    , + + + + , - - User-Agents - - - - , - - - Stream Profiles - - - - , - - - - Network Access - {accordianValue == 'network-access' && ( - - Comma-Delimited CIDR ranges - + + + Proxy Settings + + +
    - - + + {proxySettingsSaved && ( + )} - > - - {networkAccessSaved && ( - - )} - {networkAccessError && ( - - )} - {Object.entries(NETWORK_ACCESS_OPTIONS).map( - ([key, config]) => { - return ( - - ); - } - )} + {Object.entries(PROXY_SETTINGS_OPTIONS).map( + ([key, config]) => { + // Determine if this field should be a NumberInput + const isNumericField = [ + 'buffering_timeout', + 'redis_chunk_ttl', + 'channel_shutdown_delay', + 'channel_init_grace_period' + ].includes(key); - - - - - -
    -
    , + const isFloatField = key === 'buffering_speed'; - - - Proxy Settings - - -
    - - {proxySettingsSaved && ( - - )} - {Object.entries(PROXY_SETTINGS_OPTIONS).map( - ([key, config]) => { + if (isNumericField) { return ( - + ); + } else if (isFloatField) { + return ( + + ); + } else { + return ( + ); } - )} + } + )} - + - - -
    -
    -
    , - ] + Save + + + + + +
    , + ] : [] )} From 9c5a174409f2cb08c9d65316172639e09aaa80e6 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 14 Jun 2025 13:08:56 -0500 Subject: [PATCH 0503/1435] Move regions to constants. --- frontend/src/constants.js | 251 +++++++++++++++++++++++++++++++ frontend/src/pages/Settings.jsx | 252 +------------------------------- 2 files changed, 253 insertions(+), 250 deletions(-) diff --git a/frontend/src/constants.js b/frontend/src/constants.js index 62336929..d87a1f7d 100644 --- a/frontend/src/constants.js +++ b/frontend/src/constants.js @@ -52,3 +52,254 @@ export const PROXY_SETTINGS_OPTIONS = { description: 'Grace period in seconds during channel initialization', }, }; + +export const REGION_CHOICES = [ + { value: 'ad', label: 'AD' }, + { value: 'ae', label: 'AE' }, + { value: 'af', label: 'AF' }, + { value: 'ag', label: 'AG' }, + { value: 'ai', label: 'AI' }, + { value: 'al', label: 'AL' }, + { value: 'am', label: 'AM' }, + { value: 'ao', label: 'AO' }, + { value: 'aq', label: 'AQ' }, + { value: 'ar', label: 'AR' }, + { value: 'as', label: 'AS' }, + { value: 'at', label: 'AT' }, + { value: 'au', label: 'AU' }, + { value: 'aw', label: 'AW' }, + { value: 'ax', label: 'AX' }, + { value: 'az', label: 'AZ' }, + { value: 'ba', label: 'BA' }, + { value: 'bb', label: 'BB' }, + { value: 'bd', label: 'BD' }, + { value: 'be', label: 'BE' }, + { value: 'bf', label: 'BF' }, + { value: 'bg', label: 'BG' }, + { value: 'bh', label: 'BH' }, + { value: 'bi', label: 'BI' }, + { value: 'bj', label: 'BJ' }, + { value: 'bl', label: 'BL' }, + { value: 'bm', label: 'BM' }, + { value: 'bn', label: 'BN' }, + { value: 'bo', label: 'BO' }, + { value: 'bq', label: 'BQ' }, + { value: 'br', label: 'BR' }, + { value: 'bs', label: 'BS' }, + { value: 'bt', label: 'BT' }, + { value: 'bv', label: 'BV' }, + { value: 'bw', label: 'BW' }, + { value: 'by', label: 'BY' }, + { value: 'bz', label: 'BZ' }, + { value: 'ca', label: 'CA' }, + { value: 'cc', label: 'CC' }, + { value: 'cd', label: 'CD' }, + { value: 'cf', label: 'CF' }, + { value: 'cg', label: 'CG' }, + { value: 'ch', label: 'CH' }, + { value: 'ci', label: 'CI' }, + { value: 'ck', label: 'CK' }, + { value: 'cl', label: 'CL' }, + { value: 'cm', label: 'CM' }, + { value: 'cn', label: 'CN' }, + { value: 'co', label: 'CO' }, + { value: 'cr', label: 'CR' }, + { value: 'cu', label: 'CU' }, + { value: 'cv', label: 'CV' }, + { value: 'cw', label: 'CW' }, + { value: 'cx', label: 'CX' }, + { value: 'cy', label: 'CY' }, + { value: 'cz', label: 'CZ' }, + { value: 'de', label: 'DE' }, + { value: 'dj', label: 'DJ' }, + { value: 'dk', label: 'DK' }, + { value: 'dm', label: 'DM' }, + { value: 'do', label: 'DO' }, + { value: 'dz', label: 'DZ' }, + { value: 'ec', label: 'EC' }, + { value: 'ee', label: 'EE' }, + { value: 'eg', label: 'EG' }, + { value: 'eh', label: 'EH' }, + { value: 'er', label: 'ER' }, + { value: 'es', label: 'ES' }, + { value: 'et', label: 'ET' }, + { value: 'fi', label: 'FI' }, + { value: 'fj', label: 'FJ' }, + { value: 'fk', label: 'FK' }, + { value: 'fm', label: 'FM' }, + { value: 'fo', label: 'FO' }, + { value: 'fr', label: 'FR' }, + { value: 'ga', label: 'GA' }, + { value: 'gb', label: 'GB' }, + { value: 'gd', label: 'GD' }, + { value: 'ge', label: 'GE' }, + { value: 'gf', label: 'GF' }, + { value: 'gg', label: 'GG' }, + { value: 'gh', label: 'GH' }, + { value: 'gi', label: 'GI' }, + { value: 'gl', label: 'GL' }, + { value: 'gm', label: 'GM' }, + { value: 'gn', label: 'GN' }, + { value: 'gp', label: 'GP' }, + { value: 'gq', label: 'GQ' }, + { value: 'gr', label: 'GR' }, + { value: 'gs', label: 'GS' }, + { value: 'gt', label: 'GT' }, + { value: 'gu', label: 'GU' }, + { value: 'gw', label: 'GW' }, + { value: 'gy', label: 'GY' }, + { value: 'hk', label: 'HK' }, + { value: 'hm', label: 'HM' }, + { value: 'hn', label: 'HN' }, + { value: 'hr', label: 'HR' }, + { value: 'ht', label: 'HT' }, + { value: 'hu', label: 'HU' }, + { value: 'id', label: 'ID' }, + { value: 'ie', label: 'IE' }, + { value: 'il', label: 'IL' }, + { value: 'im', label: 'IM' }, + { value: 'in', label: 'IN' }, + { value: 'io', label: 'IO' }, + { value: 'iq', label: 'IQ' }, + { value: 'ir', label: 'IR' }, + { value: 'is', label: 'IS' }, + { value: 'it', label: 'IT' }, + { value: 'je', label: 'JE' }, + { value: 'jm', label: 'JM' }, + { value: 'jo', label: 'JO' }, + { value: 'jp', label: 'JP' }, + { value: 'ke', label: 'KE' }, + { value: 'kg', label: 'KG' }, + { value: 'kh', label: 'KH' }, + { value: 'ki', label: 'KI' }, + { value: 'km', label: 'KM' }, + { value: 'kn', label: 'KN' }, + { value: 'kp', label: 'KP' }, + { value: 'kr', label: 'KR' }, + { value: 'kw', label: 'KW' }, + { value: 'ky', label: 'KY' }, + { value: 'kz', label: 'KZ' }, + { value: 'la', label: 'LA' }, + { value: 'lb', label: 'LB' }, + { value: 'lc', label: 'LC' }, + { value: 'li', label: 'LI' }, + { value: 'lk', label: 'LK' }, + { value: 'lr', label: 'LR' }, + { value: 'ls', label: 'LS' }, + { value: 'lt', label: 'LT' }, + { value: 'lu', label: 'LU' }, + { value: 'lv', label: 'LV' }, + { value: 'ly', label: 'LY' }, + { value: 'ma', label: 'MA' }, + { value: 'mc', label: 'MC' }, + { value: 'md', label: 'MD' }, + { value: 'me', label: 'ME' }, + { value: 'mf', label: 'MF' }, + { value: 'mg', label: 'MG' }, + { value: 'mh', label: 'MH' }, + { value: 'ml', label: 'ML' }, + { value: 'mm', label: 'MM' }, + { value: 'mn', label: 'MN' }, + { value: 'mo', label: 'MO' }, + { value: 'mp', label: 'MP' }, + { value: 'mq', label: 'MQ' }, + { value: 'mr', label: 'MR' }, + { value: 'ms', label: 'MS' }, + { value: 'mt', label: 'MT' }, + { value: 'mu', label: 'MU' }, + { value: 'mv', label: 'MV' }, + { value: 'mw', label: 'MW' }, + { value: 'mx', label: 'MX' }, + { value: 'my', label: 'MY' }, + { value: 'mz', label: 'MZ' }, + { value: 'na', label: 'NA' }, + { value: 'nc', label: 'NC' }, + { value: 'ne', label: 'NE' }, + { value: 'nf', label: 'NF' }, + { value: 'ng', label: 'NG' }, + { value: 'ni', label: 'NI' }, + { value: 'nl', label: 'NL' }, + { value: 'no', label: 'NO' }, + { value: 'np', label: 'NP' }, + { value: 'nr', label: 'NR' }, + { value: 'nu', label: 'NU' }, + { value: 'nz', label: 'NZ' }, + { value: 'om', label: 'OM' }, + { value: 'pa', label: 'PA' }, + { value: 'pe', label: 'PE' }, + { value: 'pf', label: 'PF' }, + { value: 'pg', label: 'PG' }, + { value: 'ph', label: 'PH' }, + { value: 'pk', label: 'PK' }, + { value: 'pl', label: 'PL' }, + { value: 'pm', label: 'PM' }, + { value: 'pn', label: 'PN' }, + { value: 'pr', label: 'PR' }, + { value: 'ps', label: 'PS' }, + { value: 'pt', label: 'PT' }, + { value: 'pw', label: 'PW' }, + { value: 'py', label: 'PY' }, + { value: 'qa', label: 'QA' }, + { value: 're', label: 'RE' }, + { value: 'ro', label: 'RO' }, + { value: 'rs', label: 'RS' }, + { value: 'ru', label: 'RU' }, + { value: 'rw', label: 'RW' }, + { value: 'sa', label: 'SA' }, + { value: 'sb', label: 'SB' }, + { value: 'sc', label: 'SC' }, + { value: 'sd', label: 'SD' }, + { value: 'se', label: 'SE' }, + { value: 'sg', label: 'SG' }, + { value: 'sh', label: 'SH' }, + { value: 'si', label: 'SI' }, + { value: 'sj', label: 'SJ' }, + { value: 'sk', label: 'SK' }, + { value: 'sl', label: 'SL' }, + { value: 'sm', label: 'SM' }, + { value: 'sn', label: 'SN' }, + { value: 'so', label: 'SO' }, + { value: 'sr', label: 'SR' }, + { value: 'ss', label: 'SS' }, + { value: 'st', label: 'ST' }, + { value: 'sv', label: 'SV' }, + { value: 'sx', label: 'SX' }, + { value: 'sy', label: 'SY' }, + { value: 'sz', label: 'SZ' }, + { value: 'tc', label: 'TC' }, + { value: 'td', label: 'TD' }, + { value: 'tf', label: 'TF' }, + { value: 'tg', label: 'TG' }, + { value: 'th', label: 'TH' }, + { value: 'tj', label: 'TJ' }, + { value: 'tk', label: 'TK' }, + { value: 'tl', label: 'TL' }, + { value: 'tm', label: 'TM' }, + { value: 'tn', label: 'TN' }, + { value: 'to', label: 'TO' }, + { value: 'tr', label: 'TR' }, + { value: 'tt', label: 'TT' }, + { value: 'tv', label: 'TV' }, + { value: 'tw', label: 'TW' }, + { value: 'tz', label: 'TZ' }, + { value: 'ua', label: 'UA' }, + { value: 'ug', label: 'UG' }, + { value: 'um', label: 'UM' }, + { value: 'us', label: 'US' }, + { value: 'uy', label: 'UY' }, + { value: 'uz', label: 'UZ' }, + { value: 'va', label: 'VA' }, + { value: 'vc', label: 'VC' }, + { value: 've', label: 'VE' }, + { value: 'vg', label: 'VG' }, + { value: 'vi', label: 'VI' }, + { value: 'vn', label: 'VN' }, + { value: 'vu', label: 'VU' }, + { value: 'wf', label: 'WF' }, + { value: 'ws', label: 'WS' }, + { value: 'ye', label: 'YE' }, + { value: 'yt', label: 'YT' }, + { value: 'za', label: 'ZA' }, + { value: 'zm', label: 'ZM' }, + { value: 'zw', label: 'ZW' }, +]; diff --git a/frontend/src/pages/Settings.jsx b/frontend/src/pages/Settings.jsx index 0e8f61e8..ac1688e0 100644 --- a/frontend/src/pages/Settings.jsx +++ b/frontend/src/pages/Settings.jsx @@ -28,6 +28,7 @@ import { USER_LEVELS, NETWORK_ACCESS_OPTIONS, PROXY_SETTINGS_OPTIONS, + REGION_CHOICES, } from '../constants'; import ConfirmationDialog from '../components/ConfirmationDialog'; @@ -50,256 +51,7 @@ const SettingsPage = () => { // UI / local storage settings const [tableSize, setTableSize] = useLocalStorage('table-size', 'default'); - const regionChoices = [ - { value: 'ad', label: 'AD' }, - { value: 'ae', label: 'AE' }, - { value: 'af', label: 'AF' }, - { value: 'ag', label: 'AG' }, - { value: 'ai', label: 'AI' }, - { value: 'al', label: 'AL' }, - { value: 'am', label: 'AM' }, - { value: 'ao', label: 'AO' }, - { value: 'aq', label: 'AQ' }, - { value: 'ar', label: 'AR' }, - { value: 'as', label: 'AS' }, - { value: 'at', label: 'AT' }, - { value: 'au', label: 'AU' }, - { value: 'aw', label: 'AW' }, - { value: 'ax', label: 'AX' }, - { value: 'az', label: 'AZ' }, - { value: 'ba', label: 'BA' }, - { value: 'bb', label: 'BB' }, - { value: 'bd', label: 'BD' }, - { value: 'be', label: 'BE' }, - { value: 'bf', label: 'BF' }, - { value: 'bg', label: 'BG' }, - { value: 'bh', label: 'BH' }, - { value: 'bi', label: 'BI' }, - { value: 'bj', label: 'BJ' }, - { value: 'bl', label: 'BL' }, - { value: 'bm', label: 'BM' }, - { value: 'bn', label: 'BN' }, - { value: 'bo', label: 'BO' }, - { value: 'bq', label: 'BQ' }, - { value: 'br', label: 'BR' }, - { value: 'bs', label: 'BS' }, - { value: 'bt', label: 'BT' }, - { value: 'bv', label: 'BV' }, - { value: 'bw', label: 'BW' }, - { value: 'by', label: 'BY' }, - { value: 'bz', label: 'BZ' }, - { value: 'ca', label: 'CA' }, - { value: 'cc', label: 'CC' }, - { value: 'cd', label: 'CD' }, - { value: 'cf', label: 'CF' }, - { value: 'cg', label: 'CG' }, - { value: 'ch', label: 'CH' }, - { value: 'ci', label: 'CI' }, - { value: 'ck', label: 'CK' }, - { value: 'cl', label: 'CL' }, - { value: 'cm', label: 'CM' }, - { value: 'cn', label: 'CN' }, - { value: 'co', label: 'CO' }, - { value: 'cr', label: 'CR' }, - { value: 'cu', label: 'CU' }, - { value: 'cv', label: 'CV' }, - { value: 'cw', label: 'CW' }, - { value: 'cx', label: 'CX' }, - { value: 'cy', label: 'CY' }, - { value: 'cz', label: 'CZ' }, - { value: 'de', label: 'DE' }, - { value: 'dj', label: 'DJ' }, - { value: 'dk', label: 'DK' }, - { value: 'dm', label: 'DM' }, - { value: 'do', label: 'DO' }, - { value: 'dz', label: 'DZ' }, - { value: 'ec', label: 'EC' }, - { value: 'ee', label: 'EE' }, - { value: 'eg', label: 'EG' }, - { value: 'eh', label: 'EH' }, - { value: 'er', label: 'ER' }, - { value: 'es', label: 'ES' }, - { value: 'et', label: 'ET' }, - { value: 'fi', label: 'FI' }, - { value: 'fj', label: 'FJ' }, - { value: 'fk', label: 'FK' }, - { value: 'fm', label: 'FM' }, - { value: 'fo', label: 'FO' }, - { value: 'fr', label: 'FR' }, - { value: 'ga', label: 'GA' }, - { value: 'gb', label: 'GB' }, - { value: 'gd', label: 'GD' }, - { value: 'ge', label: 'GE' }, - { value: 'gf', label: 'GF' }, - { value: 'gg', label: 'GG' }, - { value: 'gh', label: 'GH' }, - { value: 'gi', label: 'GI' }, - { value: 'gl', label: 'GL' }, - { value: 'gm', label: 'GM' }, - { value: 'gn', label: 'GN' }, - { value: 'gp', label: 'GP' }, - { value: 'gq', label: 'GQ' }, - { value: 'gr', label: 'GR' }, - { value: 'gs', label: 'GS' }, - { value: 'gt', label: 'GT' }, - { value: 'gu', label: 'GU' }, - { value: 'gw', label: 'GW' }, - { value: 'gy', label: 'GY' }, - { value: 'hk', label: 'HK' }, - { value: 'hm', label: 'HM' }, - { value: 'hn', label: 'HN' }, - { value: 'hr', label: 'HR' }, - { value: 'ht', label: 'HT' }, - { value: 'hu', label: 'HU' }, - { value: 'id', label: 'ID' }, - { value: 'ie', label: 'IE' }, - { value: 'il', label: 'IL' }, - { value: 'im', label: 'IM' }, - { value: 'in', label: 'IN' }, - { value: 'io', label: 'IO' }, - { value: 'iq', label: 'IQ' }, - { value: 'ir', label: 'IR' }, - { value: 'is', label: 'IS' }, - { value: 'it', label: 'IT' }, - { value: 'je', label: 'JE' }, - { value: 'jm', label: 'JM' }, - { value: 'jo', label: 'JO' }, - { value: 'jp', label: 'JP' }, - { value: 'ke', label: 'KE' }, - { value: 'kg', label: 'KG' }, - { value: 'kh', label: 'KH' }, - { value: 'ki', label: 'KI' }, - { value: 'km', label: 'KM' }, - { value: 'kn', label: 'KN' }, - { value: 'kp', label: 'KP' }, - { value: 'kr', label: 'KR' }, - { value: 'kw', label: 'KW' }, - { value: 'ky', label: 'KY' }, - { value: 'kz', label: 'KZ' }, - { value: 'la', label: 'LA' }, - { value: 'lb', label: 'LB' }, - { value: 'lc', label: 'LC' }, - { value: 'li', label: 'LI' }, - { value: 'lk', label: 'LK' }, - { value: 'lr', label: 'LR' }, - { value: 'ls', label: 'LS' }, - { value: 'lt', label: 'LT' }, - { value: 'lu', label: 'LU' }, - { value: 'lv', label: 'LV' }, - { value: 'ly', label: 'LY' }, - { value: 'ma', label: 'MA' }, - { value: 'mc', label: 'MC' }, - { value: 'md', label: 'MD' }, - { value: 'me', label: 'ME' }, - { value: 'mf', label: 'MF' }, - { value: 'mg', label: 'MG' }, - { value: 'mh', label: 'MH' }, - { value: 'ml', label: 'ML' }, - { value: 'mm', label: 'MM' }, - { value: 'mn', label: 'MN' }, - { value: 'mo', label: 'MO' }, - { value: 'mp', label: 'MP' }, - { value: 'mq', label: 'MQ' }, - { value: 'mr', label: 'MR' }, - { value: 'ms', label: 'MS' }, - { value: 'mt', label: 'MT' }, - { value: 'mu', label: 'MU' }, - { value: 'mv', label: 'MV' }, - { value: 'mw', label: 'MW' }, - { value: 'mx', label: 'MX' }, - { value: 'my', label: 'MY' }, - { value: 'mz', label: 'MZ' }, - { value: 'na', label: 'NA' }, - { value: 'nc', label: 'NC' }, - { value: 'ne', label: 'NE' }, - { value: 'nf', label: 'NF' }, - { value: 'ng', label: 'NG' }, - { value: 'ni', label: 'NI' }, - { value: 'nl', label: 'NL' }, - { value: 'no', label: 'NO' }, - { value: 'np', label: 'NP' }, - { value: 'nr', label: 'NR' }, - { value: 'nu', label: 'NU' }, - { value: 'nz', label: 'NZ' }, - { value: 'om', label: 'OM' }, - { value: 'pa', label: 'PA' }, - { value: 'pe', label: 'PE' }, - { value: 'pf', label: 'PF' }, - { value: 'pg', label: 'PG' }, - { value: 'ph', label: 'PH' }, - { value: 'pk', label: 'PK' }, - { value: 'pl', label: 'PL' }, - { value: 'pm', label: 'PM' }, - { value: 'pn', label: 'PN' }, - { value: 'pr', label: 'PR' }, - { value: 'ps', label: 'PS' }, - { value: 'pt', label: 'PT' }, - { value: 'pw', label: 'PW' }, - { value: 'py', label: 'PY' }, - { value: 'qa', label: 'QA' }, - { value: 're', label: 'RE' }, - { value: 'ro', label: 'RO' }, - { value: 'rs', label: 'RS' }, - { value: 'ru', label: 'RU' }, - { value: 'rw', label: 'RW' }, - { value: 'sa', label: 'SA' }, - { value: 'sb', label: 'SB' }, - { value: 'sc', label: 'SC' }, - { value: 'sd', label: 'SD' }, - { value: 'se', label: 'SE' }, - { value: 'sg', label: 'SG' }, - { value: 'sh', label: 'SH' }, - { value: 'si', label: 'SI' }, - { value: 'sj', label: 'SJ' }, - { value: 'sk', label: 'SK' }, - { value: 'sl', label: 'SL' }, - { value: 'sm', label: 'SM' }, - { value: 'sn', label: 'SN' }, - { value: 'so', label: 'SO' }, - { value: 'sr', label: 'SR' }, - { value: 'ss', label: 'SS' }, - { value: 'st', label: 'ST' }, - { value: 'sv', label: 'SV' }, - { value: 'sx', label: 'SX' }, - { value: 'sy', label: 'SY' }, - { value: 'sz', label: 'SZ' }, - { value: 'tc', label: 'TC' }, - { value: 'td', label: 'TD' }, - { value: 'tf', label: 'TF' }, - { value: 'tg', label: 'TG' }, - { value: 'th', label: 'TH' }, - { value: 'tj', label: 'TJ' }, - { value: 'tk', label: 'TK' }, - { value: 'tl', label: 'TL' }, - { value: 'tm', label: 'TM' }, - { value: 'tn', label: 'TN' }, - { value: 'to', label: 'TO' }, - { value: 'tr', label: 'TR' }, - { value: 'tt', label: 'TT' }, - { value: 'tv', label: 'TV' }, - { value: 'tw', label: 'TW' }, - { value: 'tz', label: 'TZ' }, - { value: 'ua', label: 'UA' }, - { value: 'ug', label: 'UG' }, - { value: 'um', label: 'UM' }, - { value: 'us', label: 'US' }, - { value: 'uy', label: 'UY' }, - { value: 'uz', label: 'UZ' }, - { value: 'va', label: 'VA' }, - { value: 'vc', label: 'VC' }, - { value: 've', label: 'VE' }, - { value: 'vg', label: 'VG' }, - { value: 'vi', label: 'VI' }, - { value: 'vn', label: 'VN' }, - { value: 'vu', label: 'VU' }, - { value: 'wf', label: 'WF' }, - { value: 'ws', label: 'WS' }, - { value: 'ye', label: 'YE' }, - { value: 'yt', label: 'YT' }, - { value: 'za', label: 'ZA' }, - { value: 'zm', label: 'ZM' }, - { value: 'zw', label: 'ZW' }, - ]; + const regionChoices = REGION_CHOICES; const form = useForm({ mode: 'uncontrolled', From e80d30689cdf6e10692bedff44a5416996ead623 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 14 Jun 2025 13:42:01 -0500 Subject: [PATCH 0504/1435] Settings load correctly during first open. --- core/api_views.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/core/api_views.py b/core/api_views.py index 04e39f11..dfb52b44 100644 --- a/core/api_views.py +++ b/core/api_views.py @@ -144,16 +144,12 @@ class ProxySettingsViewSet(viewsets.ViewSet): def list(self, request): """Return proxy settings""" settings_obj, settings_data = self._get_or_create_settings() - serializer = ProxySettingsSerializer(data=settings_data) - serializer.is_valid() - return Response(serializer.data) + return Response(settings_data) def retrieve(self, request, pk=None): """Return proxy settings regardless of ID""" settings_obj, settings_data = self._get_or_create_settings() - serializer = ProxySettingsSerializer(data=settings_data) - serializer.is_valid() - return Response(serializer.data) + return Response(settings_data) def update(self, request, pk=None): """Update proxy settings""" @@ -166,7 +162,7 @@ class ProxySettingsViewSet(viewsets.ViewSet): settings_obj.value = json.dumps(serializer.validated_data) settings_obj.save() - return Response(serializer.data) + return Response(serializer.validated_data) def partial_update(self, request, pk=None): """Partially update proxy settings""" @@ -182,7 +178,7 @@ class ProxySettingsViewSet(viewsets.ViewSet): settings_obj.value = json.dumps(serializer.validated_data) settings_obj.save() - return Response(serializer.data) + return Response(serializer.validated_data) @action(detail=False, methods=['get', 'patch']) def settings(self, request): @@ -271,6 +267,7 @@ def environment(request): operation_description="Get application version information", responses={200: "Version information"}, ) + @api_view(["GET"]) def version(request): # Import version information From 9757f6a48dbaebb066dec16b0c52571f0e775df0 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 15 Jun 2025 11:28:57 -0500 Subject: [PATCH 0505/1435] Fix key error with react. --- core/api_views.py | 9 +- frontend/src/pages/Settings.jsx | 597 ++++++++++++++++---------------- 2 files changed, 302 insertions(+), 304 deletions(-) diff --git a/core/api_views.py b/core/api_views.py index dfb52b44..b416cf92 100644 --- a/core/api_views.py +++ b/core/api_views.py @@ -203,7 +203,7 @@ def environment(request): country_code = None country_name = None - # 1) Get the public IP + # 1) Get the public IP from ipify.org API try: r = requests.get("https://api64.ipify.org?format=json", timeout=5) r.raise_for_status() @@ -211,17 +211,17 @@ def environment(request): except requests.RequestException as e: public_ip = f"Error: {e}" - # 2) Get the local IP + # 2) Get the local IP by connecting to a public DNS server try: s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - # connect to a “public” address so the OS can determine our local interface + # connect to a "public" address so the OS can determine our local interface s.connect(("8.8.8.8", 80)) local_ip = s.getsockname()[0] s.close() except Exception as e: local_ip = f"Error: {e}" - # 3) If we got a valid public_ip, fetch geo info from ipapi.co or ip-api.com + # 3) Get geolocation data from ipapi.co or ip-api.com if public_ip and "Error" not in public_ip: try: # Attempt to get geo information from ipapi.co first @@ -250,6 +250,7 @@ def environment(request): country_code = None country_name = None + # 4) Get environment mode from system environment variable return Response( { "authenticated": True, diff --git a/frontend/src/pages/Settings.jsx b/frontend/src/pages/Settings.jsx index ac1688e0..e471602e 100644 --- a/frontend/src/pages/Settings.jsx +++ b/frontend/src/pages/Settings.jsx @@ -143,15 +143,14 @@ const SettingsPage = () => { }, {}) ); - const proxySettings = JSON.parse( - settings['proxy-settings'].value || '{}' - ); - proxySettingsForm.setValues( - Object.keys(PROXY_SETTINGS_OPTIONS).reduce((acc, key) => { - acc[key] = proxySettings[key] || ''; - return acc; - }, {}) - ); + if (settings['proxy-settings']?.value) { + try { + const proxySettings = JSON.parse(settings['proxy-settings'].value); + proxySettingsForm.setValues(proxySettings); + } catch (error) { + console.error('Error parsing proxy settings:', error); + } + } } }, [settings]); @@ -246,143 +245,217 @@ const SettingsPage = () => { defaultValue="ui-settings" onChange={setAccordianValue} > - {[ - - UI Settings - - ({ - value: `${option.id}`, - label: option.name, - }))} - /> + + UI Settings + + ({ - value: `${option.id}`, - label: option.name, - }))} - /> - ({ + value: `${option.id}`, + label: option.name, + }))} + /> - ({ + value: `${option.id}`, + label: option.name, + }))} + /> + setM3uParams(prev => ({ + ...prev, + tvg_id_source: value + }))} + comboboxProps={{ withinPortal: false }} + data={[ + { value: 'channel_number', label: 'Channel Number' }, + { value: 'tvg_id', label: 'TVG-ID' }, + { value: 'gracenote', label: 'Gracenote Station ID' } + ]} + /> + + + - - - - - - - - + + + + + + } + /> + Use cached logos + setEpgParams(prev => ({ + ...prev, + cachedlogos: event.target.checked + }))} + /> + + { + data={Object.entries(USER_LEVELS).map(([, value]) => { return { label: USER_LEVEL_LABELS[value], value: `${value}`, diff --git a/frontend/src/components/tables/UsersTable.jsx b/frontend/src/components/tables/UsersTable.jsx index 137bca89..c141a951 100644 --- a/frontend/src/components/tables/UsersTable.jsx +++ b/frontend/src/components/tables/UsersTable.jsx @@ -1,4 +1,4 @@ -import React, { useEffect, useMemo, useCallback, useState } from 'react'; +import React, { useMemo, useCallback, useState } from 'react'; import API from '../../api'; import UserForm from '../forms/User'; import useUsersStore from '../../store/users'; @@ -147,6 +147,7 @@ const UsersTable = () => { { header: 'Username', accessorKey: 'username', + size: 150, cell: ({ getValue }) => ( { { header: 'Date Joined', accessorKey: 'date_joined', - size: 120, + size: 125, cell: ({ getValue }) => { const date = getValue(); return ( @@ -219,7 +220,7 @@ const UsersTable = () => { { header: 'XC Password', accessorKey: 'custom_properties', - size: 120, + size: 125, enableSorting: false, cell: ({ getValue, row }) => { const userId = row.original.id; @@ -317,7 +318,7 @@ const UsersTable = () => { style={{ display: 'flex', justifyContent: 'center', - padding: '20px', + padding: '0px', minHeight: '100vh', }} > From e9d60cdb1e369f9288b8bb9214fc3b1baa5e1833 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 28 Jun 2025 09:42:00 -0500 Subject: [PATCH 0569/1435] Allow setting blank XC password. --- frontend/src/components/forms/User.jsx | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/frontend/src/components/forms/User.jsx b/frontend/src/components/forms/User.jsx index 28a817f1..6ca9eaa1 100644 --- a/frontend/src/components/forms/User.jsx +++ b/frontend/src/components/forms/User.jsx @@ -78,9 +78,8 @@ const User = ({ user = null, isOpen, onClose }) => { user?.custom_properties || '{}' ); - if (values.xc_password) { - customProps.xc_password = values.xc_password; - } + // Always save xc_password, even if it's empty (to allow clearing) + customProps.xc_password = values.xc_password || ''; delete values.xc_password; values.custom_properties = JSON.stringify(customProps); From 5ff474d32299eadb8e52ede87756fa7ae3d5d0c5 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 1 Jul 2025 09:47:31 -0500 Subject: [PATCH 0570/1435] Fixes being unable to close web player on touch screens. --- frontend/src/components/FloatingVideo.jsx | 27 ++++++++++++++++++++--- 1 file changed, 24 insertions(+), 3 deletions(-) diff --git a/frontend/src/components/FloatingVideo.jsx b/frontend/src/components/FloatingVideo.jsx index 7f1e1c53..8b131dd3 100644 --- a/frontend/src/components/FloatingVideo.jsx +++ b/frontend/src/components/FloatingVideo.jsx @@ -185,7 +185,12 @@ export default function FloatingVideo() { }, [isVisible, streamUrl]); // Modified hideVideo handler to clean up player first - const handleClose = () => { + const handleClose = (e) => { + // Prevent event propagation to avoid triggering drag events + if (e) { + e.stopPropagation(); + e.preventDefault(); + } safeDestroyPlayer(); // Small delay before hiding the video component to ensure cleanup is complete setTimeout(() => { @@ -215,8 +220,24 @@ export default function FloatingVideo() { }} > {/* Simple header row with a close button */} - - + + e.stopPropagation()} + onTouchStart={(e) => e.stopPropagation()} + style={{ + minHeight: '32px', + minWidth: '32px', + cursor: 'pointer', + touchAction: 'manipulation' + }} + /> {/* Video container with relative positioning for the overlay */} From 2b97a958cdcb0ea9056b2ca7abc592af6ba9a178 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 1 Jul 2025 10:27:24 -0500 Subject: [PATCH 0571/1435] Check if a transcode process is running also to determine if we should close sockets. --- apps/proxy/ts_proxy/stream_manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index 64a764bf..1abfb6d8 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -768,7 +768,7 @@ class StreamManager: def _close_all_connections(self): """Close all connection resources""" - if self.socket: + if self.socket or self.transcode_process: try: self._close_socket() except Exception as e: From 8e2c6c7780215b631b2d40d0dec489701a84f556 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 1 Jul 2025 10:57:07 -0500 Subject: [PATCH 0572/1435] Check for any state to detmine if channel is running. --- apps/proxy/ts_proxy/server.py | 2 +- apps/proxy/ts_proxy/views.py | 12 ++---------- 2 files changed, 3 insertions(+), 11 deletions(-) diff --git a/apps/proxy/ts_proxy/server.py b/apps/proxy/ts_proxy/server.py index 4699091a..da5daaa7 100644 --- a/apps/proxy/ts_proxy/server.py +++ b/apps/proxy/ts_proxy/server.py @@ -708,7 +708,7 @@ class ProxyServer: elif state in [ChannelState.STOPPING, ChannelState.STOPPED, ChannelState.ERROR]: # These states indicate the channel should be reinitialized logger.info(f"Channel {channel_id} exists but in terminal state: {state}") - return False + return True else: # Unknown or initializing state, check how long it's been in this state if b'state_changed_at' in metadata: diff --git a/apps/proxy/ts_proxy/views.py b/apps/proxy/ts_proxy/views.py index b9ba3e65..7192937d 100644 --- a/apps/proxy/ts_proxy/views.py +++ b/apps/proxy/ts_proxy/views.py @@ -83,15 +83,7 @@ def stream_ts(request, channel_id): if state_field in metadata: channel_state = metadata[state_field].decode("utf-8") - # IMPROVED: Check for *any* state that indicates initialization is in progress - active_states = [ - ChannelState.INITIALIZING, - ChannelState.CONNECTING, - ChannelState.WAITING_FOR_CLIENTS, - ChannelState.ACTIVE, - ChannelState.BUFFERING, - ] - if channel_state in active_states: + if channel_state: # Channel is being initialized or already active - no need for reinitialization needs_initialization = False logger.debug( @@ -132,7 +124,7 @@ def stream_ts(request, channel_id): logger.warning( f"[{client_id}] Channel {channel_id} in state {channel_state}, forcing cleanup" ) - proxy_server.stop_channel(channel_id) + ChannelService.stop_channel(channel_id) # Use max retry attempts and connection timeout from config max_retries = ConfigHelper.max_retries() From 580aa1975c1697db09e4323957ac7d627e7ff511 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 3 Jul 2025 11:02:07 -0500 Subject: [PATCH 0573/1435] Add process management for safe connection handling in StreamManager - Introduced _wait_for_existing_processes_to_close method to ensure all existing processes and connections are fully closed before establishing new ones. - Updated _establish_transcode_connection and _establish_http_connection methods to check for and close lingering processes and connections. - Enhanced logging for better debugging and monitoring of connection states. --- apps/proxy/ts_proxy/stream_manager.py | 70 ++++++++++++++++++++++++++- 1 file changed, 69 insertions(+), 1 deletion(-) diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index 1abfb6d8..f7290436 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -135,6 +135,37 @@ class StreamManager: return session + def _wait_for_existing_processes_to_close(self, timeout=5.0): + """Wait for existing processes/connections to fully close before establishing new ones""" + start_time = time.time() + + while time.time() - start_time < timeout: + # Check if transcode process is still running + if self.transcode_process and self.transcode_process.poll() is None: + logger.debug(f"Waiting for existing transcode process to terminate for channel {self.channel_id}") + gevent.sleep(0.1) + continue + + # Check if HTTP connections are still active + if self.current_response or self.current_session: + logger.debug(f"Waiting for existing HTTP connections to close for channel {self.channel_id}") + gevent.sleep(0.1) + continue + + # Check if socket is still active + if self.socket: + logger.debug(f"Waiting for existing socket to close for channel {self.channel_id}") + gevent.sleep(0.1) + continue + + # All processes/connections are closed + logger.debug(f"All existing processes closed for channel {self.channel_id}") + return True + + # Timeout reached + logger.warning(f"Timeout waiting for existing processes to close for channel {self.channel_id} after {timeout}s") + return False + def run(self): """Main execution loop using HTTP streaming with improved connection handling and stream switching""" # Add a stop flag to the class properties @@ -323,6 +354,22 @@ class StreamManager: """Establish a connection using transcoding""" try: logger.debug(f"Building transcode command for channel {self.channel_id}") + + # Check if we already have a running transcode process + if self.transcode_process and self.transcode_process.poll() is None: + logger.info(f"Existing transcode process found for channel {self.channel_id}, closing before establishing new connection") + self._close_socket() + + # Wait for the process to fully terminate + if not self._wait_for_existing_processes_to_close(): + logger.error(f"Failed to close existing transcode process for channel {self.channel_id}") + return False + + # Also check for any lingering HTTP connections + if self.current_response or self.current_session: + logger.debug(f"Closing existing HTTP connections before establishing transcode connection for channel {self.channel_id}") + self._close_connection() + channel = get_stream_object(self.channel_id) # Use FFmpeg specifically for HLS streams @@ -656,6 +703,21 @@ class StreamManager: try: logger.debug(f"Using TS Proxy to connect to stream: {self.url}") + # Check if we already have active HTTP connections + if self.current_response or self.current_session: + logger.info(f"Existing HTTP connection found for channel {self.channel_id}, closing before establishing new connection") + self._close_connection() + + # Wait for connections to fully close + if not self._wait_for_existing_processes_to_close(): + logger.error(f"Failed to close existing HTTP connections for channel {self.channel_id}") + return False + + # Also check for any lingering transcode processes + if self.transcode_process and self.transcode_process.poll() is None: + logger.debug(f"Closing existing transcode process before establishing HTTP connection for channel {self.channel_id}") + self._close_socket() + # Create new session for each connection attempt session = self._create_session() self.current_session = session @@ -1000,12 +1062,18 @@ class StreamManager: logger.info("URL switching already in progress, skipping reconnect") return False - # Close existing connection + # Close existing connection and wait for it to fully terminate if self.transcode or self.socket: + logger.debug("Closing transcode process before reconnect") self._close_socket() else: + logger.debug("Closing HTTP connection before reconnect") self._close_connection() + # Wait for all processes to fully close before attempting reconnect + if not self._wait_for_existing_processes_to_close(): + logger.warning(f"Some processes may still be running during reconnect for channel {self.channel_id}") + self.connected = False # Attempt to establish a new connection using the same URL From 55e19f05aa86d386c4a24aed9e90ab07bad92cb5 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 3 Jul 2025 11:18:03 -0500 Subject: [PATCH 0574/1435] Check if stopping before adding chunks during transcoding. --- apps/proxy/ts_proxy/stream_manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index f7290436..97515424 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -784,7 +784,7 @@ class StreamManager: try: if self.transcode: # Handle transcoded stream data - while self.running and self.connected: + while self.running and self.connected and not self.stop_requested: if self.fetch_chunk(): self.last_data_time = time.time() else: From 01d4b253034d9587d1f8d12a0b86e55096842f04 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 3 Jul 2025 14:10:03 -0500 Subject: [PATCH 0575/1435] Health monitor thread no longer will no longer attempt to reconnect it will only notify the main thread of issues. --- apps/proxy/ts_proxy/stream_manager.py | 138 +++++++++++++++----------- 1 file changed, 81 insertions(+), 57 deletions(-) diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index 97515424..77523e87 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -193,6 +193,32 @@ class StreamManager: f"Resetting switching state.") self._reset_url_switching_state() + # NEW: Check for health monitor recovery requests + if hasattr(self, 'needs_reconnect') and self.needs_reconnect and not self.url_switching: + logger.info(f"Health monitor requested reconnect for channel {self.channel_id}") + self.needs_reconnect = False + + # Attempt reconnect without changing streams + if self._attempt_reconnect(): + logger.info(f"Health-requested reconnect successful") + continue # Go back to main loop + else: + logger.warning(f"Health-requested reconnect failed, will try stream switch") + self.needs_stream_switch = True + + if hasattr(self, 'needs_stream_switch') and self.needs_stream_switch and not self.url_switching: + logger.info(f"Health monitor requested stream switch for channel {self.channel_id}") + self.needs_stream_switch = False + + if self._try_next_stream(): + logger.info(f"Health-requested stream switch successful") + stream_switch_attempts += 1 + self.retry_count = 0 # Reset retries for new stream + continue # Go back to main loop with new stream + else: + logger.error(f"Health-requested stream switch failed") + # Continue with normal flow + # Check stream type before connecting stream_type = detect_stream_type(self.url) if self.transcode == False and stream_type == StreamType.HLS: @@ -981,13 +1007,15 @@ class StreamManager: return self.retry_count < self.max_retries def _monitor_health(self): - """Monitor stream health and attempt recovery if needed""" + """Monitor stream health and set flags for the main loop to handle recovery""" consecutive_unhealthy_checks = 0 - health_recovery_attempts = 0 - reconnect_attempts = 0 - max_health_recovery_attempts = ConfigHelper.get('MAX_HEALTH_RECOVERY_ATTEMPTS', 2) - max_reconnect_attempts = ConfigHelper.get('MAX_RECONNECT_ATTEMPTS', 3) - min_stable_time = ConfigHelper.get('MIN_STABLE_TIME_BEFORE_RECONNECT', 30) # seconds + max_unhealthy_checks = 3 + + # Add flags for the main loop to check + self.needs_reconnect = False + self.needs_stream_switch = False + self.last_health_action_time = 0 + action_cooldown = 30 # Prevent rapid recovery attempts while self.running: try: @@ -996,48 +1024,43 @@ class StreamManager: timeout_threshold = getattr(Config, 'CONNECTION_TIMEOUT', 10) if inactivity_duration > timeout_threshold and self.connected: - # Mark unhealthy if no data for too long if self.healthy: logger.warning(f"Stream unhealthy - no data for {inactivity_duration:.1f}s") self.healthy = False - # Track consecutive unhealthy checks consecutive_unhealthy_checks += 1 - # After several unhealthy checks in a row, try recovery - if consecutive_unhealthy_checks >= 3 and health_recovery_attempts < max_health_recovery_attempts: - # Calculate how long the stream was stable before failing + # Only set flags if enough time has passed since last action + if (consecutive_unhealthy_checks >= max_unhealthy_checks and + now - self.last_health_action_time > action_cooldown): + + # Calculate stability to decide on action type connection_start_time = getattr(self, 'connection_start_time', 0) stable_time = self.last_data_time - connection_start_time if connection_start_time > 0 else 0 - if stable_time >= min_stable_time and reconnect_attempts < max_reconnect_attempts: - # Stream was stable for a while, try reconnecting first - logger.warning(f"Stream was stable for {stable_time:.1f}s before failing. " - f"Attempting reconnect {reconnect_attempts + 1}/{max_reconnect_attempts}") - reconnect_attempts += 1 - threading.Thread(target=self._attempt_reconnect, daemon=True).start() + if stable_time >= 30: # Stream was stable, try reconnect first + if not self.needs_reconnect: + logger.info(f"Setting reconnect flag for stable stream (stable for {stable_time:.1f}s)") + self.needs_reconnect = True + self.last_health_action_time = now else: - # Stream was not stable long enough, or reconnects failed too many times - # Try switching to another stream - if reconnect_attempts > 0: - logger.warning(f"Reconnect attempts exhausted ({reconnect_attempts}/{max_reconnect_attempts}). " - f"Attempting stream switch recovery") - else: - logger.warning(f"Stream was only stable for {stable_time:.1f}s (<{min_stable_time}s). " - f"Skipping reconnect, attempting stream switch") + # Stream wasn't stable, suggest stream switch + if not self.needs_stream_switch: + logger.info(f"Setting stream switch flag for unstable stream (stable for {stable_time:.1f}s)") + self.needs_stream_switch = True + self.last_health_action_time = now + + consecutive_unhealthy_checks = 0 # Reset after setting flag - health_recovery_attempts += 1 - reconnect_attempts = 0 # Reset for next time - threading.Thread(target=self._attempt_health_recovery, daemon=True).start() elif self.connected and not self.healthy: # Auto-recover health when data resumes logger.info(f"Stream health restored") self.healthy = True consecutive_unhealthy_checks = 0 - health_recovery_attempts = 0 - reconnect_attempts = 0 + # Clear recovery flags when healthy again + self.needs_reconnect = False + self.needs_stream_switch = False - # If healthy, reset unhealthy counter (but keep other state) if self.healthy: consecutive_unhealthy_checks = 0 @@ -1053,51 +1076,52 @@ class StreamManager: # Don't try to reconnect if we're already switching URLs if self.url_switching: - # Add timeout check to prevent permanent deadlock - if time.time() - self.url_switch_start_time > self.url_switch_timeout: - logger.warning(f"URL switching has been in progress too long ({time.time() - self.url_switch_start_time:.1f}s), " - f"resetting switching state and allowing reconnect") - self._reset_url_switching_state() - else: - logger.info("URL switching already in progress, skipping reconnect") - return False + logger.info("URL switching already in progress, skipping reconnect") + return False - # Close existing connection and wait for it to fully terminate - if self.transcode or self.socket: - logger.debug("Closing transcode process before reconnect") - self._close_socket() - else: - logger.debug("Closing HTTP connection before reconnect") - self._close_connection() + # Set a flag to prevent concurrent operations + if hasattr(self, 'reconnecting') and self.reconnecting: + logger.info("Reconnect already in progress, skipping") + return False - # Wait for all processes to fully close before attempting reconnect - if not self._wait_for_existing_processes_to_close(): - logger.warning(f"Some processes may still be running during reconnect for channel {self.channel_id}") + self.reconnecting = True - self.connected = False - - # Attempt to establish a new connection using the same URL - connection_result = False try: + # Close existing connection and wait for it to fully terminate + if self.transcode or self.socket: + logger.debug("Closing transcode process before reconnect") + self._close_socket() + else: + logger.debug("Closing HTTP connection before reconnect") + self._close_connection() + + # Wait for all processes to fully close before attempting reconnect + if not self._wait_for_existing_processes_to_close(): + logger.warning(f"Some processes may still be running during reconnect for channel {self.channel_id}") + + self.connected = False + + # Attempt to establish a new connection using the same URL + connection_result = False if self.transcode: connection_result = self._establish_transcode_connection() else: connection_result = self._establish_http_connection() if connection_result: - # Store connection start time to measure stability self.connection_start_time = time.time() logger.info(f"Reconnect successful for channel {self.channel_id}") return True else: logger.warning(f"Reconnect failed for channel {self.channel_id}") return False - except Exception as e: - logger.error(f"Error during reconnect: {e}", exc_info=True) - return False + + finally: + self.reconnecting = False except Exception as e: logger.error(f"Error in reconnect attempt: {e}", exc_info=True) + self.reconnecting = False return False def _attempt_health_recovery(self): From 374aa82e22e91f837ba492b4496a95d19ffbc375 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 7 Jul 2025 17:08:53 -0500 Subject: [PATCH 0576/1435] Refactor editChannel to use selectedTableIds directly from the table state and remove unused selection clearing effects. --- frontend/src/components/tables/ChannelsTable.jsx | 14 +------------- 1 file changed, 1 insertion(+), 13 deletions(-) diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index 8c19671a..7a9d5007 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -376,7 +376,7 @@ const ChannelsTable = ({ }) => { const editChannel = async (ch = null) => { // Use table's selected state instead of store state to avoid stale selections - const currentSelection = table ? table.getState().selectedTableIds : []; + const currentSelection = table ? table.selectedTableIds : []; console.log('editChannel called with:', { ch, currentSelection, tableExists: !!table }); if (currentSelection.length > 1) { @@ -634,18 +634,6 @@ const ChannelsTable = ({ }) => { setPaginationString(`${startItem} to ${endItem} of ${totalCount}`); }, [pagination.pageIndex, pagination.pageSize, totalCount]); - // Clear selection when data changes (e.g., when navigating back to the page) - useEffect(() => { - setSelectedChannelIds([]); - }, [data, setSelectedChannelIds]); - - // Clear selection when component unmounts - useEffect(() => { - return () => { - setSelectedChannelIds([]); - }; - }, [setSelectedChannelIds]); - const columns = useMemo( () => [ { From d6605e71193ad9728a81776ae90738d63294f949 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 8 Jul 2025 15:57:11 -0500 Subject: [PATCH 0577/1435] Add timeout for chunks. --- apps/proxy/config.py | 4 +++ apps/proxy/ts_proxy/stream_manager.py | 39 +++++++++++++++++++-------- 2 files changed, 32 insertions(+), 11 deletions(-) diff --git a/apps/proxy/config.py b/apps/proxy/config.py index ca246b78..3e6eb42f 100644 --- a/apps/proxy/config.py +++ b/apps/proxy/config.py @@ -57,6 +57,8 @@ class TSConfig(BaseConfig): INITIAL_BEHIND_CHUNKS = 4 # How many chunks behind to start a client (4 chunks = ~1MB) CHUNK_BATCH_SIZE = 5 # How many chunks to fetch in one batch KEEPALIVE_INTERVAL = 0.5 # Seconds between keepalive packets when at buffer head + # Chunk read timeout + CHUNK_TIMEOUT = 10 # Seconds to wait for each chunk read # Streaming settings TARGET_BITRATE = 8000000 # Target bitrate (8 Mbps) @@ -80,6 +82,8 @@ class TSConfig(BaseConfig): FAILOVER_GRACE_PERIOD = 20 # Extra time (seconds) to allow for stream switching before disconnecting clients URL_SWITCH_TIMEOUT = 20 # Max time allowed for a stream switch operation + + # Database-dependent settings with fallbacks @classmethod def get_channel_shutdown_delay(cls): diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index 77523e87..0a5ab4c4 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -1232,22 +1232,38 @@ class StreamManager: self._buffer_check_timers = [] def fetch_chunk(self): - """Fetch data from socket with direct pass-through to buffer""" + """Fetch data from socket with timeout handling""" if not self.connected or not self.socket: return False try: - # Read data chunk - no need to align with TS packet size anymore - try: - # Try to read data chunk - if hasattr(self.socket, 'recv'): - chunk = self.socket.recv(Config.CHUNK_SIZE) # Standard socket - else: - chunk = self.socket.read(Config.CHUNK_SIZE) # SocketIO object + # Set timeout for chunk reads + chunk_timeout = ConfigHelper.get('CHUNK_TIMEOUT', 10) # Default 10 seconds - except AttributeError: - # Fall back to read() if recv() isn't available - chunk = self.socket.read(Config.CHUNK_SIZE) + try: + # Handle different socket types with timeout + if hasattr(self.socket, 'recv'): + # Standard socket - set timeout + original_timeout = self.socket.gettimeout() + self.socket.settimeout(chunk_timeout) + chunk = self.socket.recv(Config.CHUNK_SIZE) + self.socket.settimeout(original_timeout) # Restore original timeout + else: + # SocketIO object (transcode process stdout) - use select for timeout + import select + ready, _, _ = select.select([self.socket], [], [], chunk_timeout) + + if not ready: + # Timeout occurred + logger.warning(f"Chunk read timeout ({chunk_timeout}s) for channel {self.channel_id}") + return False + + chunk = self.socket.read(Config.CHUNK_SIZE) + + except socket.timeout: + # Socket timeout occurred + logger.warning(f"Socket timeout ({chunk_timeout}s) for channel {self.channel_id}") + return False if not chunk: # Connection closed by server @@ -1262,6 +1278,7 @@ class StreamManager: # Add directly to buffer without TS-specific processing success = self.buffer.add_chunk(chunk) + # Update last data timestamp in Redis if successful if success and hasattr(self.buffer, 'redis_client') and self.buffer.redis_client: last_data_key = RedisKeys.last_data(self.buffer.channel_id) From 2284d47f9fdb2216412a2e03549484961a430a2a Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 8 Jul 2025 17:10:55 -0500 Subject: [PATCH 0578/1435] If provider is slow but responsive, don't get locked up. --- apps/proxy/config.py | 2 +- apps/proxy/ts_proxy/stream_manager.py | 10 +++++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/apps/proxy/config.py b/apps/proxy/config.py index 3e6eb42f..9ce5b66c 100644 --- a/apps/proxy/config.py +++ b/apps/proxy/config.py @@ -58,7 +58,7 @@ class TSConfig(BaseConfig): CHUNK_BATCH_SIZE = 5 # How many chunks to fetch in one batch KEEPALIVE_INTERVAL = 0.5 # Seconds between keepalive packets when at buffer head # Chunk read timeout - CHUNK_TIMEOUT = 10 # Seconds to wait for each chunk read + CHUNK_TIMEOUT = 5 # Seconds to wait for each chunk read # Streaming settings TARGET_BITRATE = 8000000 # Target bitrate (8 Mbps) diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index 0a5ab4c4..bce385b7 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -236,7 +236,7 @@ class StreamManager: gevent.sleep(0.1) # REPLACE time.sleep(0.1) continue # Connection retry loop for current URL - while self.running and self.retry_count < self.max_retries and not url_failed: + while self.running and self.retry_count < self.max_retries and not url_failed and not self.needs_stream_switch: logger.info(f"Connection attempt {self.retry_count + 1}/{self.max_retries} for URL: {self.url}") @@ -260,6 +260,10 @@ class StreamManager: # This indicates we had a stable connection for a while before failing connection_duration = time.time() - connection_start_time stable_connection_threshold = 30 # 30 seconds threshold + + if self.needs_stream_switch: + logger.info(f"Stream needs to switch after {connection_duration:.1f} seconds") + break # Exit to switch streams if connection_duration > stable_connection_threshold: logger.info(f"Stream was stable for {connection_duration:.1f} seconds, resetting switch attempts counter") stream_switch_attempts = 0 @@ -810,7 +814,7 @@ class StreamManager: try: if self.transcode: # Handle transcoded stream data - while self.running and self.connected and not self.stop_requested: + while self.running and self.connected and not self.stop_requested and not self.needs_stream_switch: if self.fetch_chunk(): self.last_data_time = time.time() else: @@ -823,7 +827,7 @@ class StreamManager: try: for chunk in self.current_response.iter_content(chunk_size=self.chunk_size): # Check if we've been asked to stop - if self.stop_requested or self.url_switching: + if self.stop_requested or self.url_switching or self.needs_stream_switch: break if chunk: From 9f8a2db500ea514172d766779cfdbc5e880972b9 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Wed, 9 Jul 2025 16:44:00 -0500 Subject: [PATCH 0579/1435] Include channel ID in more logs. --- apps/proxy/ts_proxy/stream_manager.py | 148 +++++++++++++------------- 1 file changed, 74 insertions(+), 74 deletions(-) diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py index bce385b7..f7c538c2 100644 --- a/apps/proxy/ts_proxy/stream_manager.py +++ b/apps/proxy/ts_proxy/stream_manager.py @@ -182,7 +182,7 @@ class StreamManager: health_thread = threading.Thread(target=self._monitor_health, daemon=True) health_thread.start() - logger.info(f"Starting stream for URL: {self.url}") + logger.info(f"Starting stream for URL: {self.url} for channel {self.channel_id}") # Main stream switching loop - we'll try different streams if needed while self.running and stream_switch_attempts <= max_stream_switches: @@ -200,10 +200,10 @@ class StreamManager: # Attempt reconnect without changing streams if self._attempt_reconnect(): - logger.info(f"Health-requested reconnect successful") + logger.info(f"Health-requested reconnect successful for channel {self.channel_id}") continue # Go back to main loop else: - logger.warning(f"Health-requested reconnect failed, will try stream switch") + logger.warning(f"Health-requested reconnect failed, will try stream switch for channel {self.channel_id}") self.needs_stream_switch = True if hasattr(self, 'needs_stream_switch') and self.needs_stream_switch and not self.url_switching: @@ -211,19 +211,19 @@ class StreamManager: self.needs_stream_switch = False if self._try_next_stream(): - logger.info(f"Health-requested stream switch successful") + logger.info(f"Health-requested stream switch successful for channel {self.channel_id}") stream_switch_attempts += 1 self.retry_count = 0 # Reset retries for new stream continue # Go back to main loop with new stream else: - logger.error(f"Health-requested stream switch failed") + logger.error(f"Health-requested stream switch failed for channel {self.channel_id}") # Continue with normal flow # Check stream type before connecting stream_type = detect_stream_type(self.url) if self.transcode == False and stream_type == StreamType.HLS: - logger.info(f"Detected HLS stream: {self.url}") - logger.info(f"HLS streams will be handled with FFmpeg for now - future version will support HLS natively") + logger.info(f"Detected HLS stream: {self.url} for channel {self.channel_id}") + logger.info(f"HLS streams will be handled with FFmpeg for now - future version will support HLS natively for channel {self.channel_id}") # Enable transcoding for HLS streams self.transcode = True # We'll override the stream profile selection with ffmpeg in the transcoding section @@ -232,13 +232,13 @@ class StreamManager: self.retry_count = 0 url_failed = False if self.url_switching: - logger.debug("Skipping connection attempt during URL switch") + logger.debug(f"Skipping connection attempt during URL switch for channel {self.channel_id}") gevent.sleep(0.1) # REPLACE time.sleep(0.1) continue # Connection retry loop for current URL while self.running and self.retry_count < self.max_retries and not url_failed and not self.needs_stream_switch: - logger.info(f"Connection attempt {self.retry_count + 1}/{self.max_retries} for URL: {self.url}") + logger.info(f"Connection attempt {self.retry_count + 1}/{self.max_retries} for URL: {self.url} for channel {self.channel_id}") # Handle connection based on whether we transcode or not connection_result = False @@ -262,10 +262,10 @@ class StreamManager: stable_connection_threshold = 30 # 30 seconds threshold if self.needs_stream_switch: - logger.info(f"Stream needs to switch after {connection_duration:.1f} seconds") + logger.info(f"Stream needs to switch after {connection_duration:.1f} seconds for channel: {self.channel_id}") break # Exit to switch streams if connection_duration > stable_connection_threshold: - logger.info(f"Stream was stable for {connection_duration:.1f} seconds, resetting switch attempts counter") + logger.info(f"Stream was stable for {connection_duration:.1f} seconds, resetting switch attempts counter for channel: {self.channel_id}") stream_switch_attempts = 0 # Connection failed or ended - decide what to do next @@ -280,15 +280,15 @@ class StreamManager: # If we've reached max retries, mark this URL as failed if self.retry_count >= self.max_retries: url_failed = True - logger.warning(f"Maximum retry attempts ({self.max_retries}) reached for URL: {self.url}") + logger.warning(f"Maximum retry attempts ({self.max_retries}) reached for URL: {self.url} for channel: {self.channel_id}") else: # Wait with exponential backoff before retrying timeout = min(.25 * self.retry_count, 3) # Cap at 3 seconds - logger.info(f"Reconnecting in {timeout} seconds... (attempt {self.retry_count}/{self.max_retries})") + logger.info(f"Reconnecting in {timeout} seconds... (attempt {self.retry_count}/{self.max_retries}) for channel: {self.channel_id}") gevent.sleep(timeout) # REPLACE time.sleep(timeout) except Exception as e: - logger.error(f"Connection error: {e}", exc_info=True) + logger.error(f"Connection error on channel: {self.channel_id}: {e}", exc_info=True) self.retry_count += 1 self.connected = False @@ -297,25 +297,25 @@ class StreamManager: else: # Wait with exponential backoff before retrying timeout = min(.25 * self.retry_count, 3) # Cap at 3 seconds - logger.info(f"Reconnecting in {timeout} seconds after error... (attempt {self.retry_count}/{self.max_retries})") + logger.info(f"Reconnecting in {timeout} seconds after error... (attempt {self.retry_count}/{self.max_retries}) for channel: {self.channel_id}") gevent.sleep(timeout) # REPLACE time.sleep(timeout) # If URL failed and we're still running, try switching to another stream if url_failed and self.running: - logger.info(f"URL {self.url} failed after {self.retry_count} attempts, trying next stream") + logger.info(f"URL {self.url} failed after {self.retry_count} attempts, trying next stream for channel: {self.channel_id}") # Try to switch to next stream switch_result = self._try_next_stream() if switch_result: # Successfully switched to a new stream, continue with the new URL stream_switch_attempts += 1 - logger.info(f"Successfully switched to new URL: {self.url} (switch attempt {stream_switch_attempts}/{max_stream_switches})") + logger.info(f"Successfully switched to new URL: {self.url} (switch attempt {stream_switch_attempts}/{max_stream_switches}) for channel: {self.channel_id}") # Reset retry count for the new stream - important for the loop to work correctly self.retry_count = 0 # Continue outer loop with new URL - DON'T add a break statement here else: # No more streams to try - logger.error(f"Failed to find alternative streams after {stream_switch_attempts} attempts") + logger.error(f"Failed to find alternative streams after {stream_switch_attempts} attempts for channel: {self.channel_id}") break elif not self.running: # Normal shutdown was requested @@ -339,7 +339,7 @@ class StreamManager: # Make sure transcode process is terminated if self.transcode_process_active: - logger.info("Ensuring transcode process is terminated in finally block") + logger.info(f"Ensuring transcode process is terminated in finally block for channel: {self.channel_id}") self._close_socket() # Close all connections @@ -376,7 +376,7 @@ class StreamManager: stop_key = RedisKeys.channel_stopping(self.channel_id) self.buffer.redis_client.setex(stop_key, 60, "true") except Exception as e: - logger.error(f"Failed to update channel state in Redis: {e}") + logger.error(f"Failed to update channel state in Redis: {e} for channel {self.channel_id}", exc_info=True) logger.info(f"Stream manager stopped for channel {self.channel_id}") @@ -411,13 +411,13 @@ class StreamManager: except StreamProfile.DoesNotExist: # Fall back to channel's profile if FFmpeg not found stream_profile = channel.get_stream_profile() - logger.warning("FFmpeg profile not found, using channel default profile") + logger.warning(f"FFmpeg profile not found, using channel default profile for channel: {self.channel_id}") else: stream_profile = channel.get_stream_profile() # Build and start transcode command self.transcode_cmd = stream_profile.build_command(self.url, self.user_agent) - logger.debug(f"Starting transcode process: {self.transcode_cmd}") + logger.debug(f"Starting transcode process: {self.transcode_cmd} for channel: {self.channel_id}") # Modified to capture stderr instead of discarding it self.transcode_process = subprocess.Popen( @@ -444,7 +444,7 @@ class StreamManager: return True except Exception as e: - logger.error(f"Error establishing transcode connection: {e}", exc_info=True) + logger.error(f"Error establishing transcode connection for channel: {self.channel_id}: {e}", exc_info=True) self._close_socket() return False @@ -593,25 +593,25 @@ class StreamManager: # Determine log level based on content if any(keyword in content_lower for keyword in ['error', 'failed', 'cannot', 'invalid', 'corrupt']): - logger.error(f"FFmpeg stderr: {content}") + logger.error(f"FFmpeg stderr for channel {self.channel_id}: {content}") elif any(keyword in content_lower for keyword in ['warning', 'deprecated', 'ignoring']): - logger.warning(f"FFmpeg stderr: {content}") + logger.warning(f"FFmpeg stderr for channel {self.channel_id}: {content}") elif content.startswith('frame=') or 'fps=' in content or 'speed=' in content: # Stats lines - log at trace level to avoid spam - logger.trace(f"FFmpeg stats: {content}") + logger.trace(f"FFmpeg stats for channel {self.channel_id}: {content}") elif any(keyword in content_lower for keyword in ['input', 'output', 'stream', 'video', 'audio']): # Stream info - log at info level - logger.info(f"FFmpeg info: {content}") + logger.info(f"FFmpeg info for channel {self.channel_id}: {content}") if content.startswith('Input #0'): # If it's input 0, parse stream info from .services.channel_service import ChannelService ChannelService.parse_and_store_stream_info(self.channel_id, content, "input") else: # Everything else at debug level - logger.debug(f"FFmpeg stderr: {content}") + logger.debug(f"FFmpeg stderr for channel {self.channel_id}: {content}") except Exception as e: - logger.error(f"Error logging stderr content: {e}") + logger.error(f"Error logging stderr content for channel {self.channel_id}: {e}") def _parse_ffmpeg_stats(self, stats_line): """Parse FFmpeg stats line and extract speed, fps, and bitrate""" @@ -653,7 +653,7 @@ class StreamManager: actual_fps_str = f"{actual_fps:.1f}" if actual_fps is not None else "N/A" ffmpeg_output_bitrate_str = f"{ffmpeg_output_bitrate:.1f}" if ffmpeg_output_bitrate is not None else "N/A" # Log the stats - logger.debug(f"FFmpeg stats - Speed: {ffmpeg_speed}x, FFmpeg FPS: {ffmpeg_fps}, " + logger.debug(f"FFmpeg stats for channel {self.channel_id}: - Speed: {ffmpeg_speed}x, FFmpeg FPS: {ffmpeg_fps}, " f"Actual FPS: {actual_fps_str}, " f"Output Bitrate: {ffmpeg_output_bitrate_str} kbps") # If we have a valid speed, check for buffering @@ -763,7 +763,7 @@ class StreamManager: if response.status_code == 200: self.connected = True self.healthy = True - logger.info(f"Successfully connected to stream source") + logger.info(f"Successfully connected to stream source for channel {self.channel_id}") # Store connection start time for stability tracking self.connection_start_time = time.time() @@ -773,7 +773,7 @@ class StreamManager: return True else: - logger.error(f"Failed to connect to stream: HTTP {response.status_code}") + logger.error(f"Failed to connect to stream for channel {self.channel_id}: HTTP {response.status_code}") self._close_connection() return False except requests.exceptions.RequestException as e: @@ -781,7 +781,7 @@ class StreamManager: self._close_connection() return False except Exception as e: - logger.error(f"Error establishing HTTP connection: {e}", exc_info=True) + logger.error(f"Error establishing HTTP connection for channel {self.channel_id}: {e}", exc_info=True) self._close_connection() return False @@ -848,12 +848,12 @@ class StreamManager: self.buffer.redis_client.set(last_data_key, str(time.time()), ex=60) except (AttributeError, ConnectionError) as e: if self.stop_requested or self.url_switching: - logger.debug(f"Expected connection error during shutdown/URL switch: {e}") + logger.debug(f"Expected connection error during shutdown/URL switch for channel {self.channel_id}: {e}") else: - logger.error(f"Unexpected stream error: {e}") + logger.error(f"Unexpected stream error for channel {self.channel_id}: {e}") raise except Exception as e: - logger.error(f"Error processing stream data: {e}", exc_info=True) + logger.error(f"Error processing stream data for channel {self.channel_id}: {e}", exc_info=True) # If we exit the loop, connection is closed or failed self.connected = False @@ -864,19 +864,19 @@ class StreamManager: try: self._close_socket() except Exception as e: - logger.debug(f"Error closing socket: {e}") + logger.debug(f"Error closing socket for channel {self.channel_id}: {e}") if self.current_response: try: self.current_response.close() except Exception as e: - logger.debug(f"Error closing response: {e}") + logger.debug(f"Error closing response for channel {self.channel_id}: {e}") if self.current_session: try: self.current_session.close() except Exception as e: - logger.debug(f"Error closing session: {e}") + logger.debug(f"Error closing session for channel {self.channel_id}: {e}") # Clear references self.socket = None @@ -903,7 +903,7 @@ class StreamManager: if timer and timer.is_alive(): timer.cancel() except Exception as e: - logger.error(f"Error canceling buffer check timer: {e}") + logger.error(f"Error canceling buffer check timer for channel {self.channel_id}: {e}") self._buffer_check_timers.clear() @@ -936,7 +936,7 @@ class StreamManager: logger.info(f"URL unchanged: {new_url}") return False - logger.info(f"Switching stream URL from {self.url} to {new_url}") + logger.info(f"Switching stream URL from {self.url} to {new_url} for channel {self.channel_id}") # Import both models for proper resource management from apps.channels.models import Stream, Channel @@ -967,10 +967,10 @@ class StreamManager: try: # Check which type of connection we're using and close it properly if self.transcode or self.socket: - logger.debug("Closing transcode process before URL change") + logger.debug(f"Closing transcode process before URL change for channel {self.channel_id}") self._close_socket() else: - logger.debug("Closing HTTP connection before URL change") + logger.debug(f"Closing HTTP connection before URL change for channel {self.channel_id}") self._close_connection() # Update URL and reset connection state @@ -984,7 +984,7 @@ class StreamManager: self.current_stream_id = stream_id # Add stream ID to tried streams for proper tracking self.tried_stream_ids.add(stream_id) - logger.info(f"Updated stream ID from {old_stream_id} to {stream_id} for channel {self.buffer.channel_id}") + logger.info(f"Updated stream ID from {old_stream_id} to {stream_id} for channel {self.channel_id}") # Reset retry counter to allow immediate reconnect self.retry_count = 0 @@ -999,12 +999,12 @@ class StreamManager: return True except Exception as e: - logger.error(f"Error during URL update: {e}", exc_info=True) + logger.error(f"Error during URL update for channel {self.channel_id}: {e}", exc_info=True) return False finally: # CRITICAL FIX: Always reset the URL switching flag when done, whether successful or not self.url_switching = False - logger.info(f"Stream switch completed for channel {self.buffer.channel_id}") + logger.info(f"Stream switch completed for channel {self.channel_id}") def should_retry(self) -> bool: """Check if connection retry is allowed""" @@ -1029,7 +1029,7 @@ class StreamManager: if inactivity_duration > timeout_threshold and self.connected: if self.healthy: - logger.warning(f"Stream unhealthy - no data for {inactivity_duration:.1f}s") + logger.warning(f"Stream unhealthy for channel {self.channel_id} - no data for {inactivity_duration:.1f}s") self.healthy = False consecutive_unhealthy_checks += 1 @@ -1044,13 +1044,13 @@ class StreamManager: if stable_time >= 30: # Stream was stable, try reconnect first if not self.needs_reconnect: - logger.info(f"Setting reconnect flag for stable stream (stable for {stable_time:.1f}s)") + logger.info(f"Setting reconnect flag for stable stream (stable for {stable_time:.1f}s) for channel {self.channel_id}") self.needs_reconnect = True self.last_health_action_time = now else: # Stream wasn't stable, suggest stream switch if not self.needs_stream_switch: - logger.info(f"Setting stream switch flag for unstable stream (stable for {stable_time:.1f}s)") + logger.info(f"Setting stream switch flag for unstable stream (stable for {stable_time:.1f}s) for channel {self.channel_id}") self.needs_stream_switch = True self.last_health_action_time = now @@ -1058,7 +1058,7 @@ class StreamManager: elif self.connected and not self.healthy: # Auto-recover health when data resumes - logger.info(f"Stream health restored") + logger.info(f"Stream health restored for channel {self.channel_id} - data resumed after {inactivity_duration:.1f}s") self.healthy = True consecutive_unhealthy_checks = 0 # Clear recovery flags when healthy again @@ -1080,12 +1080,12 @@ class StreamManager: # Don't try to reconnect if we're already switching URLs if self.url_switching: - logger.info("URL switching already in progress, skipping reconnect") + logger.info(f"URL switching already in progress, skipping reconnect for channel {self.channel_id}") return False # Set a flag to prevent concurrent operations if hasattr(self, 'reconnecting') and self.reconnecting: - logger.info("Reconnect already in progress, skipping") + logger.info(f"Reconnect already in progress, skipping for channel {self.channel_id}") return False self.reconnecting = True @@ -1093,10 +1093,10 @@ class StreamManager: try: # Close existing connection and wait for it to fully terminate if self.transcode or self.socket: - logger.debug("Closing transcode process before reconnect") + logger.debug(f"Closing transcode process before reconnect for channel {self.channel_id}") self._close_socket() else: - logger.debug("Closing HTTP connection before reconnect") + logger.debug(f"Closing HTTP connection before reconnect for channel {self.channel_id}") self._close_connection() # Wait for all processes to fully close before attempting reconnect @@ -1124,7 +1124,7 @@ class StreamManager: self.reconnecting = False except Exception as e: - logger.error(f"Error in reconnect attempt: {e}", exc_info=True) + logger.error(f"Error in reconnect attempt for channel {self.channel_id}: {e}", exc_info=True) self.reconnecting = False return False @@ -1135,7 +1135,7 @@ class StreamManager: # Don't try to switch if we're already in the process of switching URLs if self.url_switching: - logger.info("URL switching already in progress, skipping health recovery") + logger.info(f"URL switching already in progress, skipping health recovery for channel {self.channel_id}") return # Try to switch to next stream @@ -1148,7 +1148,7 @@ class StreamManager: return False except Exception as e: - logger.error(f"Error in health recovery attempt: {e}", exc_info=True) + logger.error(f"Error in health recovery attempt for channel {self.channel_id}: {e}", exc_info=True) return False def _close_connection(self): @@ -1158,7 +1158,7 @@ class StreamManager: try: self.current_response.close() except Exception as e: - logger.debug(f"Error closing response: {e}") + logger.debug(f"Error closing response for channel {self.channel_id}: {e}") self.current_response = None # Close session if it exists @@ -1166,7 +1166,7 @@ class StreamManager: try: self.current_session.close() except Exception as e: - logger.debug(f"Error closing session: {e}") + logger.debug(f"Error closing session for channel {self.channel_id}: {e}") self.current_session = None def _close_socket(self): @@ -1180,7 +1180,7 @@ class StreamManager: try: self.socket.close() except Exception as e: - logger.debug(f"Error closing socket: {e}") + logger.debug(f"Error closing socket for channel {self.channel_id}: {e}") pass # Enhanced transcode process cleanup with more aggressive termination @@ -1195,21 +1195,21 @@ class StreamManager: self.transcode_process.wait(timeout=1.0) except subprocess.TimeoutExpired: # If it doesn't terminate quickly, kill it - logger.warning(f"Transcode process didn't terminate within timeout, killing forcefully") + logger.warning(f"Transcode process didn't terminate within timeout, killing forcefully for channel {self.channel_id}") self.transcode_process.kill() try: self.transcode_process.wait(timeout=1.0) except subprocess.TimeoutExpired: - logger.error(f"Failed to kill transcode process even with force") + logger.error(f"Failed to kill transcode process even with force for channel {self.channel_id}") except Exception as e: - logger.debug(f"Error terminating transcode process: {e}") + logger.debug(f"Error terminating transcode process for channel {self.channel_id}: {e}") # Final attempt: try to kill directly try: self.transcode_process.kill() except Exception as e: - logger.error(f"Final kill attempt failed: {e}") + logger.error(f"Final kill attempt failed for channel {self.channel_id}: {e}") self.transcode_process = None self.transcode_process_active = False # Reset the flag @@ -1221,7 +1221,7 @@ class StreamManager: self.buffer.redis_client.delete(transcode_key) logger.debug(f"Cleared transcode active flag for channel {self.channel_id}") except Exception as e: - logger.debug(f"Error clearing transcode flag: {e}") + logger.debug(f"Error clearing transcode flag for channel {self.channel_id}: {e}") self.socket = None self.connected = False # Cancel any remaining buffer check timers @@ -1231,7 +1231,7 @@ class StreamManager: timer.cancel() logger.debug(f"Cancelled buffer check timer during socket close for channel {self.channel_id}") except Exception as e: - logger.debug(f"Error canceling timer during socket close: {e}") + logger.debug(f"Error canceling timer during socket close for channel {self.channel_id}: {e}") self._buffer_check_timers = [] @@ -1259,19 +1259,19 @@ class StreamManager: if not ready: # Timeout occurred - logger.warning(f"Chunk read timeout ({chunk_timeout}s) for channel {self.channel_id}") + logger.debug(f"Chunk read timeout ({chunk_timeout}s) for channel {self.channel_id}") return False chunk = self.socket.read(Config.CHUNK_SIZE) except socket.timeout: # Socket timeout occurred - logger.warning(f"Socket timeout ({chunk_timeout}s) for channel {self.channel_id}") + logger.debug(f"Socket timeout ({chunk_timeout}s) for channel {self.channel_id}") return False if not chunk: # Connection closed by server - logger.warning("Server closed connection") + logger.warning(f"Server closed connection for channel {self.channel_id}") self._close_socket() self.connected = False return False @@ -1360,7 +1360,7 @@ class StreamManager: else: logger.debug(f"Not changing state: channel {channel_id} already in {current_state} state") except Exception as e: - logger.error(f"Error setting waiting for clients state: {e}") + logger.error(f"Error setting waiting for clients state for channel {channel_id}: {e}") def _check_buffer_and_set_state(self): """Check buffer size and set state to waiting_for_clients when ready""" @@ -1395,7 +1395,7 @@ class StreamManager: return True # Return value to indicate check was successful except Exception as e: - logger.error(f"Error in buffer check: {e}") + logger.error(f"Error in buffer check for channel {self.channel_id}: {e}") return False def _try_next_stream(self): @@ -1439,7 +1439,7 @@ class StreamManager: stream_info = get_stream_info_for_switch(self.channel_id, stream_id) if 'error' in stream_info or not stream_info.get('url'): - logger.error(f"Error getting info for stream {stream_id}: {stream_info.get('error', 'No URL')}") + logger.error(f"Error getting info for stream {stream_id} for channel {self.channel_id}: {stream_info.get('error', 'No URL')}") return False # Update URL and user agent @@ -1452,7 +1452,7 @@ class StreamManager: # IMPORTANT: Just update the URL, don't stop the channel or release resources switch_result = self.update_url(new_url, stream_id, profile_id) if not switch_result: - logger.error(f"Failed to update URL for stream ID {stream_id}") + logger.error(f"Failed to update URL for stream ID {stream_id} for channel {self.channel_id}") return False # Update stream ID tracking @@ -1478,7 +1478,7 @@ class StreamManager: # Log the switch logger.info(f"Stream metadata updated for channel {self.channel_id} to stream ID {stream_id} with M3U profile {profile_id}") - logger.info(f"Successfully switched to stream ID {stream_id} with URL {new_url}") + logger.info(f"Successfully switched to stream ID {stream_id} with URL {new_url} for channel {self.channel_id}") return True except Exception as e: From 8b6acf23751aa0b2a0e02a4c6d34501d40d0677c Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Thu, 10 Jul 2025 15:41:58 +0000 Subject: [PATCH 0580/1435] Release v0.6.2 --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index 2718738d..627b5289 100644 --- a/version.py +++ b/version.py @@ -1,5 +1,5 @@ """ Dispatcharr version information. """ -__version__ = '0.6.1' # Follow semantic versioning (MAJOR.MINOR.PATCH) +__version__ = '0.6.2' # Follow semantic versioning (MAJOR.MINOR.PATCH) __timestamp__ = None # Set during CI/CD build process From d24520d3d89dd7d2e4881740630b14b4fe0e0916 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 10 Jul 2025 13:22:42 -0500 Subject: [PATCH 0581/1435] Enhance EPG XML generation with additional metadata extraction and improved handling for keywords, languages, ratings, and credits. --- apps/epg/tasks.py | 157 ++++++++++++++++++++++++++++++++++++++-- apps/output/views.py | 166 +++++++++++++++++++++++++++++++++++++------ 2 files changed, 293 insertions(+), 30 deletions(-) diff --git a/apps/epg/tasks.py b/apps/epg/tasks.py index d3062171..4fcf5706 100644 --- a/apps/epg/tasks.py +++ b/apps/epg/tasks.py @@ -1612,6 +1612,11 @@ def extract_custom_properties(prog): if categories: custom_props['categories'] = categories + # Extract keywords (new) + keywords = [kw.text.strip() for kw in prog.findall('keyword') if kw.text and kw.text.strip()] + if keywords: + custom_props['keywords'] = keywords + # Extract episode numbers for ep_num in prog.findall('episode-num'): system = ep_num.get('system', '') @@ -1637,6 +1642,9 @@ def extract_custom_properties(prog): elif system == 'dd_progid' and ep_num.text: # Store the dd_progid format custom_props['dd_progid'] = ep_num.text.strip() + # Add support for other systems like thetvdb.com, themoviedb.org, imdb.com + elif system in ['thetvdb.com', 'themoviedb.org', 'imdb.com'] and ep_num.text: + custom_props[f'{system}_id'] = ep_num.text.strip() # Extract ratings more efficiently rating_elem = prog.find('rating') @@ -1647,37 +1655,172 @@ def extract_custom_properties(prog): if rating_elem.get('system'): custom_props['rating_system'] = rating_elem.get('system') + # Extract star ratings (new) + star_ratings = [] + for star_rating in prog.findall('star-rating'): + value_elem = star_rating.find('value') + if value_elem is not None and value_elem.text: + rating_data = {'value': value_elem.text.strip()} + if star_rating.get('system'): + rating_data['system'] = star_rating.get('system') + star_ratings.append(rating_data) + if star_ratings: + custom_props['star_ratings'] = star_ratings + # Extract credits more efficiently credits_elem = prog.find('credits') if credits_elem is not None: credits = {} - for credit_type in ['director', 'actor', 'writer', 'presenter', 'producer']: - names = [e.text.strip() for e in credits_elem.findall(credit_type) if e.text and e.text.strip()] - if names: - credits[credit_type] = names + for credit_type in ['director', 'actor', 'writer', 'adapter', 'producer', 'composer', 'editor', 'presenter', 'commentator', 'guest']: + if credit_type == 'actor': + # Handle actors with roles and guest status + actors = [] + for actor_elem in credits_elem.findall('actor'): + if actor_elem.text and actor_elem.text.strip(): + actor_data = {'name': actor_elem.text.strip()} + if actor_elem.get('role'): + actor_data['role'] = actor_elem.get('role') + if actor_elem.get('guest') == 'yes': + actor_data['guest'] = True + actors.append(actor_data) + if actors: + credits['actor'] = actors + else: + names = [e.text.strip() for e in credits_elem.findall(credit_type) if e.text and e.text.strip()] + if names: + credits[credit_type] = names if credits: custom_props['credits'] = credits # Extract other common program metadata date_elem = prog.find('date') if date_elem is not None and date_elem.text: - custom_props['year'] = date_elem.text.strip()[:4] # Just the year part + custom_props['date'] = date_elem.text.strip() country_elem = prog.find('country') if country_elem is not None and country_elem.text: custom_props['country'] = country_elem.text.strip() + # Extract language information (new) + language_elem = prog.find('language') + if language_elem is not None and language_elem.text: + custom_props['language'] = language_elem.text.strip() + + orig_language_elem = prog.find('orig-language') + if orig_language_elem is not None and orig_language_elem.text: + custom_props['original_language'] = orig_language_elem.text.strip() + + # Extract length (new) + length_elem = prog.find('length') + if length_elem is not None and length_elem.text: + try: + length_value = int(length_elem.text.strip()) + length_units = length_elem.get('units', 'minutes') + custom_props['length'] = {'value': length_value, 'units': length_units} + except ValueError: + pass + + # Extract video information (new) + video_elem = prog.find('video') + if video_elem is not None: + video_info = {} + for video_attr in ['present', 'colour', 'aspect', 'quality']: + attr_elem = video_elem.find(video_attr) + if attr_elem is not None and attr_elem.text: + video_info[video_attr] = attr_elem.text.strip() + if video_info: + custom_props['video'] = video_info + + # Extract audio information (new) + audio_elem = prog.find('audio') + if audio_elem is not None: + audio_info = {} + for audio_attr in ['present', 'stereo']: + attr_elem = audio_elem.find(audio_attr) + if attr_elem is not None and attr_elem.text: + audio_info[audio_attr] = attr_elem.text.strip() + if audio_info: + custom_props['audio'] = audio_info + + # Extract subtitles information (new) + subtitles = [] + for subtitle_elem in prog.findall('subtitles'): + subtitle_data = {} + if subtitle_elem.get('type'): + subtitle_data['type'] = subtitle_elem.get('type') + lang_elem = subtitle_elem.find('language') + if lang_elem is not None and lang_elem.text: + subtitle_data['language'] = lang_elem.text.strip() + if subtitle_data: + subtitles.append(subtitle_data) + + if subtitles: + custom_props['subtitles'] = subtitles + + # Extract reviews (new) + reviews = [] + for review_elem in prog.findall('review'): + if review_elem.text and review_elem.text.strip(): + review_data = {'content': review_elem.text.strip()} + if review_elem.get('type'): + review_data['type'] = review_elem.get('type') + if review_elem.get('source'): + review_data['source'] = review_elem.get('source') + if review_elem.get('reviewer'): + review_data['reviewer'] = review_elem.get('reviewer') + reviews.append(review_data) + if reviews: + custom_props['reviews'] = reviews + + # Extract images (new) + images = [] + for image_elem in prog.findall('image'): + if image_elem.text and image_elem.text.strip(): + image_data = {'url': image_elem.text.strip()} + for attr in ['type', 'size', 'orient', 'system']: + if image_elem.get(attr): + image_data[attr] = image_elem.get(attr) + images.append(image_data) + if images: + custom_props['images'] = images + icon_elem = prog.find('icon') if icon_elem is not None and icon_elem.get('src'): custom_props['icon'] = icon_elem.get('src') - # Simpler approach for boolean flags - for kw in ['previously-shown', 'premiere', 'new', 'live']: + # Simpler approach for boolean flags - expanded list + for kw in ['previously-shown', 'premiere', 'new', 'live', 'last-chance']: if prog.find(kw) is not None: custom_props[kw.replace('-', '_')] = True + # Extract premiere and last-chance text content if available + premiere_elem = prog.find('premiere') + if premiere_elem is not None: + custom_props['premiere'] = True + if premiere_elem.text and premiere_elem.text.strip(): + custom_props['premiere_text'] = premiere_elem.text.strip() + + last_chance_elem = prog.find('last-chance') + if last_chance_elem is not None: + custom_props['last_chance'] = True + if last_chance_elem.text and last_chance_elem.text.strip(): + custom_props['last_chance_text'] = last_chance_elem.text.strip() + + # Extract previously-shown details + prev_shown_elem = prog.find('previously-shown') + if prev_shown_elem is not None: + custom_props['previously_shown'] = True + prev_shown_data = {} + if prev_shown_elem.get('start'): + prev_shown_data['start'] = prev_shown_elem.get('start') + if prev_shown_elem.get('channel'): + prev_shown_data['channel'] = prev_shown_elem.get('channel') + if prev_shown_data: + custom_props['previously_shown_details'] = prev_shown_data + return custom_props + def clear_element(elem): """Clear an XML element and its parent to free memory.""" try: diff --git a/apps/output/views.py b/apps/output/views.py index 4ef9f4f2..67d72bd2 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -467,19 +467,27 @@ def generate_epg(request, profile_name=None, user=None): for category in custom_data["categories"]: program_xml.append(f" {html.escape(category)}") - # Handle episode numbering - multiple formats supported - # Standard episode number if available - if "episode" in custom_data: - program_xml.append(f' E{custom_data["episode"]}') + # Add keywords if available + if "keywords" in custom_data and custom_data["keywords"]: + for keyword in custom_data["keywords"]: + program_xml.append(f" {html.escape(keyword)}") - # Handle onscreen episode format (like S06E128) + # Handle episode numbering - multiple formats supported + # Prioritize onscreen_episode over standalone episode for onscreen system if "onscreen_episode" in custom_data: program_xml.append(f' {html.escape(custom_data["onscreen_episode"])}') + elif "episode" in custom_data: + program_xml.append(f' E{custom_data["episode"]}') # Handle dd_progid format if 'dd_progid' in custom_data: program_xml.append(f' {html.escape(custom_data["dd_progid"])}') + # Handle external database IDs + for system in ['thetvdb.com', 'themoviedb.org', 'imdb.com']: + if f'{system}_id' in custom_data: + program_xml.append(f' {html.escape(custom_data[f"{system}_id"])}') + # Add season and episode numbers in xmltv_ns format if available if "season" in custom_data and "episode" in custom_data: season = ( @@ -494,6 +502,46 @@ def generate_epg(request, profile_name=None, user=None): ) program_xml.append(f' {season}.{episode}.') + # Add language information + if "language" in custom_data: + program_xml.append(f' {html.escape(custom_data["language"])}') + + if "original_language" in custom_data: + program_xml.append(f' {html.escape(custom_data["original_language"])}') + + # Add length information + if "length" in custom_data and isinstance(custom_data["length"], dict): + length_value = custom_data["length"].get("value", "") + length_units = custom_data["length"].get("units", "minutes") + program_xml.append(f' {html.escape(str(length_value))}') + + # Add video information + if "video" in custom_data and isinstance(custom_data["video"], dict): + program_xml.append(" ") + + # Add audio information + if "audio" in custom_data and isinstance(custom_data["audio"], dict): + program_xml.append(" ") + + # Add subtitles information + if "subtitles" in custom_data and isinstance(custom_data["subtitles"], list): + for subtitle in custom_data["subtitles"]: + if isinstance(subtitle, dict): + subtitle_type = subtitle.get("type", "") + type_attr = f' type="{html.escape(subtitle_type)}"' if subtitle_type else "" + program_xml.append(f" ") + if "language" in subtitle: + program_xml.append(f" {html.escape(subtitle['language'])}") + program_xml.append(" ") + # Add rating if available if "rating" in custom_data: rating_system = custom_data.get("rating_system", "TV Parental Guidelines") @@ -501,20 +549,74 @@ def generate_epg(request, profile_name=None, user=None): program_xml.append(f' {html.escape(custom_data["rating"])}') program_xml.append(f" ") - # Add actors/directors/writers if available - if "credits" in custom_data: - program_xml.append(f" ") - for role, people in custom_data["credits"].items(): - if isinstance(people, list): - for person in people: - program_xml.append(f" <{role}>{html.escape(person)}") - else: - program_xml.append(f" <{role}>{html.escape(people)}") - program_xml.append(f" ") + # Add star ratings + if "star_ratings" in custom_data and isinstance(custom_data["star_ratings"], list): + for star_rating in custom_data["star_ratings"]: + if isinstance(star_rating, dict) and "value" in star_rating: + system_attr = f' system="{html.escape(star_rating["system"])}"' if "system" in star_rating else "" + program_xml.append(f" ") + program_xml.append(f" {html.escape(star_rating['value'])}") + program_xml.append(" ") - # Add program date/year if available - if "year" in custom_data: - program_xml.append(f' {html.escape(custom_data["year"])}') + # Add reviews + if "reviews" in custom_data and isinstance(custom_data["reviews"], list): + for review in custom_data["reviews"]: + if isinstance(review, dict) and "content" in review: + review_type = review.get("type", "text") + attrs = [f'type="{html.escape(review_type)}"'] + if "source" in review: + attrs.append(f'source="{html.escape(review["source"])}"') + if "reviewer" in review: + attrs.append(f'reviewer="{html.escape(review["reviewer"])}"') + attr_str = " ".join(attrs) + program_xml.append(f' {html.escape(review["content"])}') + + # Add images + if "images" in custom_data and isinstance(custom_data["images"], list): + for image in custom_data["images"]: + if isinstance(image, dict) and "url" in image: + attrs = [] + for attr in ['type', 'size', 'orient', 'system']: + if attr in image: + attrs.append(f'{attr}="{html.escape(image[attr])}"') + attr_str = " " + " ".join(attrs) if attrs else "" + program_xml.append(f' {html.escape(image["url"])}') + + # Add enhanced credits handling + if "credits" in custom_data: + program_xml.append(" ") + credits = custom_data["credits"] + + # Handle different credit types + for role in ['director', 'writer', 'adapter', 'producer', 'composer', 'editor', 'presenter', 'commentator', 'guest']: + if role in credits: + people = credits[role] + if isinstance(people, list): + for person in people: + program_xml.append(f" <{role}>{html.escape(person)}") + else: + program_xml.append(f" <{role}>{html.escape(people)}") + + # Handle actors separately to include role and guest attributes + if "actor" in credits: + actors = credits["actor"] + if isinstance(actors, list): + for actor in actors: + if isinstance(actor, dict): + name = actor.get("name", "") + role_attr = f' role="{html.escape(actor["role"])}"' if "role" in actor else "" + guest_attr = ' guest="yes"' if actor.get("guest") else "" + program_xml.append(f" {html.escape(name)}") + else: + program_xml.append(f" {html.escape(actor)}") + else: + program_xml.append(f" {html.escape(actors)}") + + program_xml.append(" ") + + # Add program date if available (full date, not just year) + if "date" in custom_data: + program_xml.append(f' {html.escape(custom_data["date"])}') # Add country if available if "country" in custom_data: @@ -524,18 +626,36 @@ def generate_epg(request, profile_name=None, user=None): if "icon" in custom_data: program_xml.append(f' ') - # Add special flags as proper tags + # Add special flags as proper tags with enhanced handling if custom_data.get("previously_shown", False): - program_xml.append(f" ") + prev_shown_details = custom_data.get("previously_shown_details", {}) + attrs = [] + if "start" in prev_shown_details: + attrs.append(f'start="{html.escape(prev_shown_details["start"])}"') + if "channel" in prev_shown_details: + attrs.append(f'channel="{html.escape(prev_shown_details["channel"])}"') + attr_str = " " + " ".join(attrs) if attrs else "" + program_xml.append(f" ") if custom_data.get("premiere", False): - program_xml.append(f" ") + premiere_text = custom_data.get("premiere_text", "") + if premiere_text: + program_xml.append(f" {html.escape(premiere_text)}") + else: + program_xml.append(" ") + + if custom_data.get("last_chance", False): + last_chance_text = custom_data.get("last_chance_text", "") + if last_chance_text: + program_xml.append(f" {html.escape(last_chance_text)}") + else: + program_xml.append(" ") if custom_data.get("new", False): - program_xml.append(f" ") + program_xml.append(" ") if custom_data.get('live', False): - program_xml.append(f' ') + program_xml.append(' ') except Exception as e: program_xml.append(f" ") From b392788d5f4ee436ee6009237ecdd4f18ddd81fa Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 10 Jul 2025 16:22:16 -0500 Subject: [PATCH 0582/1435] Improve error handling for API responses by checking for empty content and handling JSON decode errors. --- core/xtream_codes.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/core/xtream_codes.py b/core/xtream_codes.py index 17f3eaad..64b49cb2 100644 --- a/core/xtream_codes.py +++ b/core/xtream_codes.py @@ -56,8 +56,19 @@ class Client: response = requests.get(url, params=params, headers=self.headers, timeout=30) response.raise_for_status() - data = response.json() - logger.debug(f"XC API Response: {url} status code: {response.status_code}") + # Check if response is empty + if not response.content: + error_msg = f"XC API returned empty response from {url}" + logger.error(error_msg) + raise ValueError(error_msg) + + try: + data = response.json() + except requests.exceptions.JSONDecodeError as json_err: + error_msg = f"XC API returned invalid JSON from {url}. Response: {response.text[:1000]}" + logger.error(error_msg) + logger.error(f"JSON decode error: {str(json_err)}") + raise ValueError(error_msg) # Check for XC-specific error responses if isinstance(data, dict) and data.get('user_info') is None and 'error' in data: From 65da85991c35690cf7d36b2bc54c65d696562417 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 10 Jul 2025 18:07:25 -0500 Subject: [PATCH 0583/1435] Enhance error handling in API requests by checking for common blocking responses and improving JSON decode error logging. --- core/xtream_codes.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/core/xtream_codes.py b/core/xtream_codes.py index 64b49cb2..846e53d4 100644 --- a/core/xtream_codes.py +++ b/core/xtream_codes.py @@ -62,12 +62,25 @@ class Client: logger.error(error_msg) raise ValueError(error_msg) + # Check for common blocking responses before trying to parse JSON + response_text = response.text.strip() + if response_text.lower() in ['blocked', 'forbidden', 'access denied', 'unauthorized']: + error_msg = f"XC API request blocked by server from {url}. Response: {response_text}" + logger.error(error_msg) + logger.error(f"This may indicate IP blocking, User-Agent filtering, or rate limiting") + raise ValueError(error_msg) + try: data = response.json() except requests.exceptions.JSONDecodeError as json_err: error_msg = f"XC API returned invalid JSON from {url}. Response: {response.text[:1000]}" logger.error(error_msg) logger.error(f"JSON decode error: {str(json_err)}") + + # Check if it looks like an HTML error page + if response_text.startswith('<'): + logger.error("Response appears to be HTML - server may be returning an error page") + raise ValueError(error_msg) # Check for XC-specific error responses From fafd93e9588cedb82ebfb0ee8709485075c44691 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 10 Jul 2025 19:14:43 -0500 Subject: [PATCH 0584/1435] Refactor XC Client usage to improve error handling and resource management with context management. Implement connection pooling for better performance. --- apps/m3u/tasks.py | 202 +++++++++++++++++++++---------------------- core/xtream_codes.py | 42 +++++++-- 2 files changed, 137 insertions(+), 107 deletions(-) diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index d6e0755b..d7e46cde 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -285,57 +285,56 @@ def process_xc_category(account_id, batch, groups, hash_keys): stream_hashes = {} try: - xc_client = XCClient(account.server_url, account.username, account.password, account.get_user_agent()) + with XCClient(account.server_url, account.username, account.password, account.get_user_agent()) as xc_client: + # Log the batch details to help with debugging + logger.debug(f"Processing XC batch: {batch}") - # Log the batch details to help with debugging - logger.debug(f"Processing XC batch: {batch}") - - for group_name, props in batch.items(): - # Check if we have a valid xc_id for this group - if 'xc_id' not in props: - logger.error(f"Missing xc_id for group {group_name} in batch {batch}") - continue - - # Get actual group ID from the mapping - group_id = groups.get(group_name) - if not group_id: - logger.error(f"Group {group_name} not found in enabled groups") - continue - - try: - logger.debug(f"Fetching streams for XC category: {group_name} (ID: {props['xc_id']})") - streams = xc_client.get_live_category_streams(props['xc_id']) - - if not streams: - logger.warning(f"No streams found for XC category {group_name} (ID: {props['xc_id']})") + for group_name, props in batch.items(): + # Check if we have a valid xc_id for this group + if 'xc_id' not in props: + logger.error(f"Missing xc_id for group {group_name} in batch {batch}") continue - logger.debug(f"Found {len(streams)} streams for category {group_name}") + # Get actual group ID from the mapping + group_id = groups.get(group_name) + if not group_id: + logger.error(f"Group {group_name} not found in enabled groups") + continue - for stream in streams: - name = stream["name"] - url = xc_client.get_stream_url(stream["stream_id"]) - tvg_id = stream.get("epg_channel_id", "") - tvg_logo = stream.get("stream_icon", "") - group_title = group_name + try: + logger.debug(f"Fetching streams for XC category: {group_name} (ID: {props['xc_id']})") + streams = xc_client.get_live_category_streams(props['xc_id']) - stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys) - stream_props = { - "name": name, - "url": url, - "logo_url": tvg_logo, - "tvg_id": tvg_id, - "m3u_account": account, - "channel_group_id": int(group_id), - "stream_hash": stream_hash, - "custom_properties": json.dumps(stream), - } + if not streams: + logger.warning(f"No streams found for XC category {group_name} (ID: {props['xc_id']})") + continue - if stream_hash not in stream_hashes: - stream_hashes[stream_hash] = stream_props - except Exception as e: - logger.error(f"Error processing XC category {group_name} (ID: {props['xc_id']}): {str(e)}") - continue + logger.debug(f"Found {len(streams)} streams for category {group_name}") + + for stream in streams: + name = stream["name"] + url = xc_client.get_stream_url(stream["stream_id"]) + tvg_id = stream.get("epg_channel_id", "") + tvg_logo = stream.get("stream_icon", "") + group_title = group_name + + stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys) + stream_props = { + "name": name, + "url": url, + "logo_url": tvg_logo, + "tvg_id": tvg_id, + "m3u_account": account, + "channel_group_id": int(group_id), + "stream_hash": stream_hash, + "custom_properties": json.dumps(stream), + } + + if stream_hash not in stream_hashes: + stream_hashes[stream_hash] = stream_props + except Exception as e: + logger.error(f"Error processing XC category {group_name} (ID: {props['xc_id']}): {str(e)}") + continue # Process all found streams existing_streams = {s.stream_hash: s for s in Stream.objects.filter(stream_hash__in=stream_hashes.keys())} @@ -622,62 +621,63 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): # Create XCClient with explicit error handling try: - xc_client = XCClient(server_url, account.username, account.password, user_agent_string) - logger.info(f"XCClient instance created successfully") + with XCClient(server_url, account.username, account.password, user_agent_string) as xc_client: + logger.info(f"XCClient instance created successfully") + + # Authenticate with detailed error handling + try: + logger.debug(f"Authenticating with XC server {server_url}") + auth_result = xc_client.authenticate() + logger.debug(f"Authentication response: {auth_result}") + except Exception as e: + error_msg = f"Failed to authenticate with XC server: {str(e)}" + logger.error(error_msg) + account.status = M3UAccount.Status.ERROR + account.last_message = error_msg + account.save(update_fields=['status', 'last_message']) + send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) + release_task_lock('refresh_m3u_account_groups', account_id) + return error_msg, None + + # Get categories with detailed error handling + try: + logger.info(f"Getting live categories from XC server") + xc_categories = xc_client.get_live_categories() + logger.info(f"Found {len(xc_categories)} categories: {xc_categories}") + + # Validate response + if not isinstance(xc_categories, list): + error_msg = f"Unexpected response from XC server: {xc_categories}" + logger.error(error_msg) + account.status = M3UAccount.Status.ERROR + account.last_message = error_msg + account.save(update_fields=['status', 'last_message']) + send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) + release_task_lock('refresh_m3u_account_groups', account_id) + return error_msg, None + + if len(xc_categories) == 0: + logger.warning("No categories found in XC server response") + + for category in xc_categories: + cat_name = category.get("category_name", "Unknown Category") + cat_id = category.get("category_id", "0") + logger.info(f"Adding category: {cat_name} (ID: {cat_id})") + groups[cat_name] = { + "xc_id": cat_id, + } + except Exception as e: + error_msg = f"Failed to get categories from XC server: {str(e)}" + logger.error(error_msg) + account.status = M3UAccount.Status.ERROR + account.last_message = error_msg + account.save(update_fields=['status', 'last_message']) + send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) + release_task_lock('refresh_m3u_account_groups', account_id) + return error_msg, None + except Exception as e: - error_msg = f"Failed to create XCClient: {str(e)}" - logger.error(error_msg) - account.status = M3UAccount.Status.ERROR - account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) - release_task_lock('refresh_m3u_account_groups', account_id) - return error_msg, None - - # Authenticate with detailed error handling - try: - logger.debug(f"Authenticating with XC server {server_url}") - auth_result = xc_client.authenticate() - logger.debug(f"Authentication response: {auth_result}") - except Exception as e: - error_msg = f"Failed to authenticate with XC server: {str(e)}" - logger.error(error_msg) - account.status = M3UAccount.Status.ERROR - account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) - release_task_lock('refresh_m3u_account_groups', account_id) - return error_msg, None - - # Get categories with detailed error handling - try: - logger.info(f"Getting live categories from XC server") - xc_categories = xc_client.get_live_categories() - logger.info(f"Found {len(xc_categories)} categories: {xc_categories}") - - # Validate response - if not isinstance(xc_categories, list): - error_msg = f"Unexpected response from XC server: {xc_categories}" - logger.error(error_msg) - account.status = M3UAccount.Status.ERROR - account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) - release_task_lock('refresh_m3u_account_groups', account_id) - return error_msg, None - - if len(xc_categories) == 0: - logger.warning("No categories found in XC server response") - - for category in xc_categories: - cat_name = category.get("category_name", "Unknown Category") - cat_id = category.get("category_id", "0") - logger.info(f"Adding category: {cat_name} (ID: {cat_id})") - groups[cat_name] = { - "xc_id": cat_id, - } - except Exception as e: - error_msg = f"Failed to get categories from XC server: {str(e)}" + error_msg = f"Failed to create XC Client: {str(e)}" logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg @@ -686,7 +686,7 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): release_task_lock('refresh_m3u_account_groups', account_id) return error_msg, None except Exception as e: - error_msg = f"Unexpected error in XC processing: {str(e)}" + error_msg = f"Unexpected error occurred in XC Client: {str(e)}" logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg diff --git a/core/xtream_codes.py b/core/xtream_codes.py index 846e53d4..d068bacb 100644 --- a/core/xtream_codes.py +++ b/core/xtream_codes.py @@ -17,20 +17,29 @@ class Client: # Fix: Properly handle all possible user_agent input types if user_agent: if isinstance(user_agent, str): - # Direct string user agent user_agent_string = user_agent elif hasattr(user_agent, 'user_agent'): - # UserAgent model object user_agent_string = user_agent.user_agent else: - # Fallback for any other type logger.warning(f"Unexpected user_agent type: {type(user_agent)}, using default") user_agent_string = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64)' else: - # No user agent provided user_agent_string = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64)' - self.headers = {'User-Agent': user_agent_string} + # Create persistent session + self.session = requests.Session() + self.session.headers.update({'User-Agent': user_agent_string}) + + # Configure connection pooling + adapter = requests.adapters.HTTPAdapter( + pool_connections=1, + pool_maxsize=2, + max_retries=3, + pool_block=False + ) + self.session.mount('http://', adapter) + self.session.mount('https://', adapter) + self.server_info = None def _normalize_url(self, url): @@ -53,7 +62,7 @@ class Client: url = f"{self.server_url}/{endpoint}" logger.debug(f"XC API Request: {url} with params: {params}") - response = requests.get(url, params=params, headers=self.headers, timeout=30) + response = self.session.get(url, params=params, timeout=30) response.raise_for_status() # Check if response is empty @@ -186,3 +195,24 @@ class Client: def get_stream_url(self, stream_id): """Get the playback URL for a stream""" return f"{self.server_url}/live/{self.username}/{self.password}/{stream_id}.ts" + + def close(self): + """Close the session and cleanup resources""" + if hasattr(self, 'session') and self.session: + try: + self.session.close() + except Exception as e: + logger.debug(f"Error closing XC session: {e}") + + def __enter__(self): + """Enter the context manager""" + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """Exit the context manager and cleanup resources""" + self.close() + return False # Don't suppress exceptions + + def __del__(self): + """Ensure session is closed when object is destroyed""" + self.close() From 1c7fa21b868bc160ad2897fa65afa892b7fba43a Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 11 Jul 2025 14:11:41 -0500 Subject: [PATCH 0585/1435] Add rehash streams endpoint and UI integration for triggering stream rehashing --- core/api_urls.py | 3 +- core/api_views.py | 37 +++++++++++++ core/tasks.py | 96 ++++++++++++++++++++++++++++----- frontend/src/pages/Settings.jsx | 36 ++++++++++++- 4 files changed, 156 insertions(+), 16 deletions(-) diff --git a/core/api_urls.py b/core/api_urls.py index e30eb698..30714d44 100644 --- a/core/api_urls.py +++ b/core/api_urls.py @@ -2,7 +2,7 @@ from django.urls import path, include from rest_framework.routers import DefaultRouter -from .api_views import UserAgentViewSet, StreamProfileViewSet, CoreSettingsViewSet, environment, version +from .api_views import UserAgentViewSet, StreamProfileViewSet, CoreSettingsViewSet, environment, version, rehash_streams_endpoint router = DefaultRouter() router.register(r'useragents', UserAgentViewSet, basename='useragent') @@ -12,5 +12,6 @@ router.register(r'settings', CoreSettingsViewSet, basename='settings') urlpatterns = [ path('settings/env/', environment, name='token_refresh'), path('version/', version, name='version'), + path('rehash-streams/', rehash_streams_endpoint, name='rehash_streams'), path('', include(router.urls)), ] diff --git a/core/api_views.py b/core/api_views.py index b416cf92..6b9743f6 100644 --- a/core/api_views.py +++ b/core/api_views.py @@ -280,3 +280,40 @@ def version(request): "timestamp": __timestamp__, } ) + + +@swagger_auto_schema( + method="post", + operation_description="Trigger rehashing of all streams", + responses={200: "Rehash task started"}, +) +@api_view(["POST"]) +@permission_classes([Authenticated]) +def rehash_streams_endpoint(request): + """Trigger the rehash streams task""" + try: + # Get the current hash keys from settings + hash_key_setting = CoreSettings.objects.get(key=STREAM_HASH_KEY) + hash_keys = hash_key_setting.value.split(",") + + # Queue the rehash task + task = rehash_streams.delay(hash_keys) + + return Response({ + "success": True, + "message": "Stream rehashing task has been queued", + "task_id": task.id + }, status=status.HTTP_200_OK) + + except CoreSettings.DoesNotExist: + return Response({ + "success": False, + "message": "Hash key settings not found" + }, status=status.HTTP_400_BAD_REQUEST) + + except Exception as e: + logger.error(f"Error triggering rehash streams: {e}") + return Response({ + "success": False, + "message": "Failed to trigger rehash task" + }, status=status.HTTP_500_INTERNAL_SERVER_ERROR) diff --git a/core/tasks.py b/core/tasks.py index 0fdaedf7..157ffadc 100644 --- a/core/tasks.py +++ b/core/tasks.py @@ -312,32 +312,100 @@ def fetch_channel_stats(): @shared_task def rehash_streams(keys): + """ + Rehash all streams with new hash keys and handle duplicates. + """ batch_size = 1000 queryset = Stream.objects.all() + # Track statistics + total_processed = 0 + duplicates_merged = 0 hash_keys = {} + total_records = queryset.count() + logger.info(f"Starting rehash of {total_records} streams with keys: {keys}") + for start in range(0, total_records, batch_size): + batch_processed = 0 + batch_duplicates = 0 + with transaction.atomic(): batch = queryset[start:start + batch_size] + for obj in batch: - stream_hash = Stream.generate_hash_key(obj.name, obj.url, obj.tvg_id, keys) - if stream_hash in hash_keys: - # Handle duplicate keys and remove any without channels - stream_channels = ChannelStream.objects.filter(stream_id=obj.id).count() - if stream_channels == 0: + # Generate new hash + new_hash = Stream.generate_hash_key(obj.name, obj.url, obj.tvg_id, keys) + + # Check if this hash already exists in our tracking dict or in database + if new_hash in hash_keys: + # Found duplicate in current batch - merge the streams + existing_stream_id = hash_keys[new_hash] + existing_stream = Stream.objects.get(id=existing_stream_id) + + # Move any channel relationships from duplicate to existing stream + ChannelStream.objects.filter(stream_id=obj.id).update(stream_id=existing_stream_id) + + # Update the existing stream with the most recent data + if obj.updated_at > existing_stream.updated_at: + existing_stream.name = obj.name + existing_stream.url = obj.url + existing_stream.logo_url = obj.logo_url + existing_stream.tvg_id = obj.tvg_id + existing_stream.m3u_account = obj.m3u_account + existing_stream.channel_group = obj.channel_group + existing_stream.custom_properties = obj.custom_properties + existing_stream.last_seen = obj.last_seen + existing_stream.updated_at = obj.updated_at + existing_stream.save() + + # Delete the duplicate + obj.delete() + batch_duplicates += 1 + else: + # Check if hash already exists in database (from previous batches or existing data) + existing_stream = Stream.objects.filter(stream_hash=new_hash).exclude(id=obj.id).first() + if existing_stream: + # Found duplicate in database - merge the streams + # Move any channel relationships from duplicate to existing stream + ChannelStream.objects.filter(stream_id=obj.id).update(stream_id=existing_stream.id) + + # Update the existing stream with the most recent data + if obj.updated_at > existing_stream.updated_at: + existing_stream.name = obj.name + existing_stream.url = obj.url + existing_stream.logo_url = obj.logo_url + existing_stream.tvg_id = obj.tvg_id + existing_stream.m3u_account = obj.m3u_account + existing_stream.channel_group = obj.channel_group + existing_stream.custom_properties = obj.custom_properties + existing_stream.last_seen = obj.last_seen + existing_stream.updated_at = obj.updated_at + existing_stream.save() + + # Delete the duplicate obj.delete() - continue + batch_duplicates += 1 + hash_keys[new_hash] = existing_stream.id + else: + # Update hash for this stream + obj.stream_hash = new_hash + obj.save(update_fields=['stream_hash']) + hash_keys[new_hash] = obj.id + batch_processed += 1 - existing_stream_channels = ChannelStream.objects.filter(stream_id=hash_keys[stream_hash]).count() - if existing_stream_channels == 0: - Stream.objects.filter(id=hash_keys[stream_hash]).delete() + total_processed += batch_processed + duplicates_merged += batch_duplicates - obj.stream_hash = stream_hash - obj.save(update_fields=['stream_hash']) - hash_keys[stream_hash] = obj.id + logger.info(f"Rehashed batch {start//batch_size + 1}/{(total_records//batch_size) + 1}: " + f"{batch_processed} processed, {batch_duplicates} duplicates merged") - logger.debug(f"Re-hashed {batch_size} streams") + logger.info(f"Rehashing complete: {total_processed} streams processed, " + f"{duplicates_merged} duplicates merged") - logger.debug(f"Re-hashing complete") + return { + 'total_processed': total_processed, + 'duplicates_merged': duplicates_merged, + 'final_count': total_processed - duplicates_merged + } diff --git a/frontend/src/pages/Settings.jsx b/frontend/src/pages/Settings.jsx index a5b07fa2..865ac6c7 100644 --- a/frontend/src/pages/Settings.jsx +++ b/frontend/src/pages/Settings.jsx @@ -47,6 +47,8 @@ const SettingsPage = () => { useState([]); const [proxySettingsSaved, setProxySettingsSaved] = useState(false); + const [rehashingStreams, setRehashingStreams] = useState(false); + const [rehashSuccess, setRehashSuccess] = useState(false); // UI / local storage settings const [tableSize, setTableSize] = useLocalStorage('table-size', 'default'); @@ -245,6 +247,22 @@ const SettingsPage = () => { } }; + const onRehashStreams = async () => { + setRehashingStreams(true); + setRehashSuccess(false); + + try { + await API.post('/core/rehash-streams/'); + setRehashSuccess(true); + setTimeout(() => setRehashSuccess(false), 5000); // Clear success message after 5 seconds + } catch (error) { + console.error('Error rehashing streams:', error); + // You might want to add error state handling here + } finally { + setRehashingStreams(false); + } + }; + return (
    { key={form.key('m3u-hash-key')} /> + {rehashSuccess && ( + + )} + + + )} + + + + + + {/* Existing groups */} + + Existing Groups ({Object.keys(channelGroups).length}) + + {loading ? ( + Loading group information... + ) : Object.keys(channelGroups).length === 0 ? ( + No groups found + ) : ( + + {Object.values(channelGroups) + .sort((a, b) => a.name.localeCompare(b.name)) + .map((group) => ( + + + {editingGroup === group.id ? ( + setEditName(e.target.value)} + size="sm" + onKeyPress={(e) => e.key === 'Enter' && handleSaveEdit()} + /> + ) : ( + <> + {group.name} + + {getGroupBadges(group)} + + + )} + + + + {editingGroup === group.id ? ( + <> + + + + + + + + ) : ( + <> + handleEdit(group)} + disabled={!canEditGroup(group)} + > + + + handleDelete(group)} + disabled={!canDeleteGroup(group)} + > + + + + )} + + + ))} + + )} + + + + + + + + + + ); +}; + +export default GroupManager; diff --git a/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx b/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx index 8813ceda..1568e10d 100644 --- a/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx +++ b/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx @@ -25,6 +25,7 @@ import { SquareMinus, SquarePen, SquarePlus, + Settings, } from 'lucide-react'; import API from '../../../api'; import { notifications } from '@mantine/notifications'; @@ -32,6 +33,7 @@ import useChannelsStore from '../../../store/channels'; import useAuthStore from '../../../store/auth'; import { USER_LEVELS } from '../../../constants'; import AssignChannelNumbersForm from '../../forms/AssignChannelNumbers'; +import GroupManager from '../../forms/GroupManager'; import ConfirmationDialog from '../../ConfirmationDialog'; import useWarningsStore from '../../../store/warnings'; @@ -105,6 +107,7 @@ const ChannelTableHeader = ({ const [channelNumAssignmentStart, setChannelNumAssignmentStart] = useState(1); const [assignNumbersModalOpen, setAssignNumbersModalOpen] = useState(false); + const [groupManagerOpen, setGroupManagerOpen] = useState(false); const [confirmDeleteProfileOpen, setConfirmDeleteProfileOpen] = useState(false); const [profileToDelete, setProfileToDelete] = useState(null); @@ -301,6 +304,15 @@ const ChannelTableHeader = ({ Auto-Match + + } + disabled={authUser.user_level != USER_LEVELS.ADMIN} + > + setGroupManagerOpen(true)}> + Edit Groups + + @@ -312,6 +324,11 @@ const ChannelTableHeader = ({ onClose={closeAssignChannelNumbersModal} /> + setGroupManagerOpen(false)} + /> + setConfirmDeleteProfileOpen(false)} diff --git a/frontend/src/store/channels.jsx b/frontend/src/store/channels.jsx index beb62fe1..03cf2b86 100644 --- a/frontend/src/store/channels.jsx +++ b/frontend/src/store/channels.jsx @@ -204,10 +204,18 @@ const useChannelsStore = create((set, get) => ({ updateChannelGroup: (channelGroup) => set((state) => ({ - ...state.channelGroups, - [channelGroup.id]: channelGroup, + channelGroups: { + ...state.channelGroups, + [channelGroup.id]: channelGroup, + }, })), + removeChannelGroup: (groupId) => + set((state) => { + const { [groupId]: removed, ...remainingGroups } = state.channelGroups; + return { channelGroups: remainingGroups }; + }), + fetchLogos: async () => { set({ isLoading: true, error: null }); try { From a1d9a7cbbe22c246e8c50714e6626f184f59f856 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 12 Jul 2025 16:21:40 -0500 Subject: [PATCH 0595/1435] Fixed performance issue while creating group. --- .../src/components/forms/GroupManager.jsx | 262 +++++++++++------- 1 file changed, 156 insertions(+), 106 deletions(-) diff --git a/frontend/src/components/forms/GroupManager.jsx b/frontend/src/components/forms/GroupManager.jsx index 7709416f..65f4e0b6 100644 --- a/frontend/src/components/forms/GroupManager.jsx +++ b/frontend/src/components/forms/GroupManager.jsx @@ -1,4 +1,4 @@ -import React, { useState, useEffect } from 'react'; +import React, { useState, useEffect, useCallback, useMemo } from 'react'; import { Modal, Stack, @@ -27,7 +27,114 @@ import { notifications } from '@mantine/notifications'; import useChannelsStore from '../../store/channels'; import API from '../../api'; -const GroupManager = ({ isOpen, onClose }) => { +// Move GroupItem outside to prevent recreation on every render +const GroupItem = React.memo(({ + group, + editingGroup, + editName, + onEditNameChange, + onSaveEdit, + onCancelEdit, + onEdit, + onDelete, + groupUsage +}) => { + const getGroupBadges = (group) => { + const usage = groupUsage[group.id]; + const badges = []; + + if (usage?.hasChannels) { + badges.push( + }> + Channels + + ); + } + + if (usage?.hasM3UAccounts) { + badges.push( + }> + M3U + + ); + } + + return badges; + }; + + const canEditGroup = (group) => { + const usage = groupUsage[group.id]; + return usage?.canEdit !== false; + }; + + const canDeleteGroup = (group) => { + const usage = groupUsage[group.id]; + return usage?.canDelete !== false && !usage?.hasChannels && !usage?.hasM3UAccounts; + }; + + return ( + + + {editingGroup === group.id ? ( + e.key === 'Enter' && onSaveEdit()} + autoFocus + /> + ) : ( + <> + {group.name} + + {getGroupBadges(group)} + + + )} + + + + {editingGroup === group.id ? ( + <> + + + + + + + + ) : ( + <> + onEdit(group)} + disabled={!canEditGroup(group)} + > + + + onDelete(group)} + disabled={!canDeleteGroup(group)} + > + + + + )} + + + ); +}); + +const GroupManager = React.memo(({ isOpen, onClose }) => { + // Use a more specific selector to avoid unnecessary re-renders + const fetchChannelGroups = useChannelsStore((s) => s.fetchChannelGroups); const channelGroups = useChannelsStore((s) => s.channelGroups); const [editingGroup, setEditingGroup] = useState(null); const [editName, setEditName] = useState(''); @@ -36,6 +143,18 @@ const GroupManager = ({ isOpen, onClose }) => { const [groupUsage, setGroupUsage] = useState({}); const [loading, setLoading] = useState(false); + // Memoize the channel groups array to prevent unnecessary re-renders + const channelGroupsArray = useMemo(() => + Object.values(channelGroups), + [channelGroups] + ); + + // Memoize sorted groups to prevent re-sorting on every render + const sortedGroups = useMemo(() => + channelGroupsArray.sort((a, b) => a.name.localeCompare(b.name)), + [channelGroupsArray] + ); + // Fetch group usage information when modal opens useEffect(() => { if (isOpen) { @@ -69,12 +188,12 @@ const GroupManager = ({ isOpen, onClose }) => { } }; - const handleEdit = (group) => { + const handleEdit = useCallback((group) => { setEditingGroup(group.id); setEditName(group.name); - }; + }, []); - const handleSaveEdit = async () => { + const handleSaveEdit = useCallback(async () => { if (!editName.trim()) { notifications.show({ title: 'Error', @@ -105,14 +224,14 @@ const GroupManager = ({ isOpen, onClose }) => { color: 'red', }); } - }; + }, [editName, editingGroup]); - const handleCancelEdit = () => { + const handleCancelEdit = useCallback(() => { setEditingGroup(null); setEditName(''); - }; + }, []); - const handleCreate = async () => { + const handleCreate = useCallback(async () => { if (!newGroupName.trim()) { notifications.show({ title: 'Error', @@ -143,9 +262,9 @@ const GroupManager = ({ isOpen, onClose }) => { color: 'red', }); } - }; + }, [newGroupName]); - const handleDelete = async (group) => { + const handleDelete = useCallback(async (group) => { const usage = groupUsage[group.id]; if (usage && (!usage.canDelete || usage.hasChannels || usage.hasM3UAccounts)) { @@ -174,40 +293,15 @@ const GroupManager = ({ isOpen, onClose }) => { color: 'red', }); } - }; + }, [groupUsage]); - const getGroupBadges = (group) => { - const usage = groupUsage[group.id]; - const badges = []; + const handleNewGroupNameChange = useCallback((e) => { + setNewGroupName(e.target.value); + }, []); - if (usage?.hasChannels) { - badges.push( - }> - Channels - - ); - } - - if (usage?.hasM3UAccounts) { - badges.push( - }> - M3U - - ); - } - - return badges; - }; - - const canEditGroup = (group) => { - const usage = groupUsage[group.id]; - return usage?.canEdit !== false; // Default to true if no usage data - }; - - const canDeleteGroup = (group) => { - const usage = groupUsage[group.id]; - return usage?.canDelete !== false && !usage?.hasChannels && !usage?.hasM3UAccounts; - }; + const handleEditNameChange = useCallback((e) => { + setEditName(e.target.value); + }, []); if (!isOpen) return null; @@ -233,9 +327,10 @@ const GroupManager = ({ isOpen, onClose }) => { setNewGroupName(e.target.value)} + onChange={handleNewGroupNameChange} style={{ flex: 1 }} onKeyPress={(e) => e.key === 'Enter' && handleCreate()} + autoFocus /> @@ -264,73 +359,28 @@ const GroupManager = ({ isOpen, onClose }) => { {/* Existing groups */} - Existing Groups ({Object.keys(channelGroups).length}) + Existing Groups ({channelGroupsArray.length}) {loading ? ( Loading group information... - ) : Object.keys(channelGroups).length === 0 ? ( + ) : sortedGroups.length === 0 ? ( No groups found ) : ( - {Object.values(channelGroups) - .sort((a, b) => a.name.localeCompare(b.name)) - .map((group) => ( - - - {editingGroup === group.id ? ( - setEditName(e.target.value)} - size="sm" - onKeyPress={(e) => e.key === 'Enter' && handleSaveEdit()} - /> - ) : ( - <> - {group.name} - - {getGroupBadges(group)} - - - )} - - - - {editingGroup === group.id ? ( - <> - - - - - - - - ) : ( - <> - handleEdit(group)} - disabled={!canEditGroup(group)} - > - - - handleDelete(group)} - disabled={!canDeleteGroup(group)} - > - - - - )} - - - ))} + {sortedGroups.map((group) => ( + + ))} )} @@ -345,6 +395,6 @@ const GroupManager = ({ isOpen, onClose }) => { ); -}; +}); export default GroupManager; From 9cb05a0ae1610d0e0c893cd78065d5cd31ed9c9c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 12 Jul 2025 16:27:49 -0500 Subject: [PATCH 0596/1435] Add search functionality to GroupManager for filtering groups --- .../src/components/forms/GroupManager.jsx | 45 ++++++++++++++++--- 1 file changed, 40 insertions(+), 5 deletions(-) diff --git a/frontend/src/components/forms/GroupManager.jsx b/frontend/src/components/forms/GroupManager.jsx index 65f4e0b6..e10b9a1c 100644 --- a/frontend/src/components/forms/GroupManager.jsx +++ b/frontend/src/components/forms/GroupManager.jsx @@ -142,6 +142,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { const [isCreating, setIsCreating] = useState(false); const [groupUsage, setGroupUsage] = useState({}); const [loading, setLoading] = useState(false); + const [searchTerm, setSearchTerm] = useState(''); // Memoize the channel groups array to prevent unnecessary re-renders const channelGroupsArray = useMemo(() => @@ -155,6 +156,14 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { [channelGroupsArray] ); + // Filter groups based on search term + const filteredGroups = useMemo(() => { + if (!searchTerm.trim()) return sortedGroups; + return sortedGroups.filter(group => + group.name.toLowerCase().includes(searchTerm.toLowerCase()) + ); + }, [sortedGroups, searchTerm]); + // Fetch group usage information when modal opens useEffect(() => { if (isOpen) { @@ -293,7 +302,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { color: 'red', }); } - }, [groupUsage]); + }, [groupUsage, fetchGroupUsage]); const handleNewGroupNameChange = useCallback((e) => { setNewGroupName(e.target.value); @@ -303,6 +312,10 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { setEditName(e.target.value); }, []); + const handleSearchChange = useCallback((e) => { + setSearchTerm(e.target.value); + }, []); + if (!isOpen) return null; return ( @@ -359,15 +372,37 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { {/* Existing groups */} - Existing Groups ({channelGroupsArray.length}) + + + Existing Groups ({filteredGroups.length}{searchTerm && ` of ${sortedGroups.length}`}) + + setSearchTerm('')} + > + + + )} + /> + {loading ? ( Loading group information... - ) : sortedGroups.length === 0 ? ( - No groups found + ) : filteredGroups.length === 0 ? ( + + {searchTerm ? 'No groups found matching your search' : 'No groups found'} + ) : ( - {sortedGroups.map((group) => ( + {filteredGroups.map((group) => ( Date: Sat, 12 Jul 2025 16:57:05 -0500 Subject: [PATCH 0597/1435] Disable buttons that can't be used. --- apps/channels/api_views.py | 34 +++++++++++++++++ apps/channels/serializers.py | 5 ++- frontend/src/api.js | 10 ++++- frontend/src/components/forms/Channel.jsx | 2 + .../src/components/forms/ChannelBatch.jsx | 2 + .../src/components/forms/ChannelGroup.jsx | 27 ++++++++++++- .../src/components/forms/GroupManager.jsx | 38 +++++++------------ .../src/components/tables/ChannelsTable.jsx | 4 +- frontend/src/store/channels.jsx | 35 +++++++++++++---- 9 files changed, 120 insertions(+), 37 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 5d00e84d..b4df2461 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -187,6 +187,40 @@ class ChannelGroupViewSet(viewsets.ModelViewSet): except KeyError: return [Authenticated()] + def get_queryset(self): + """Add annotation for association counts""" + from django.db.models import Count + return ChannelGroup.objects.annotate( + channel_count=Count('channels', distinct=True), + m3u_account_count=Count('m3u_account', distinct=True) + ) + + def update(self, request, *args, **kwargs): + """Override update to check M3U associations""" + instance = self.get_object() + + # Check if group has M3U account associations + if hasattr(instance, 'm3u_account') and instance.m3u_account.exists(): + return Response( + {"error": "Cannot edit group with M3U account associations"}, + status=status.HTTP_400_BAD_REQUEST + ) + + return super().update(request, *args, **kwargs) + + def partial_update(self, request, *args, **kwargs): + """Override partial_update to check M3U associations""" + instance = self.get_object() + + # Check if group has M3U account associations + if hasattr(instance, 'm3u_account') and instance.m3u_account.exists(): + return Response( + {"error": "Cannot edit group with M3U account associations"}, + status=status.HTTP_400_BAD_REQUEST + ) + + return super().partial_update(request, *args, **kwargs) + def destroy(self, request, *args, **kwargs): """Override destroy to check for associations before deletion""" instance = self.get_object() diff --git a/apps/channels/serializers.py b/apps/channels/serializers.py index cdc6ef60..4d1694dc 100644 --- a/apps/channels/serializers.py +++ b/apps/channels/serializers.py @@ -89,9 +89,12 @@ class StreamSerializer(serializers.ModelSerializer): # Channel Group # class ChannelGroupSerializer(serializers.ModelSerializer): + channel_count = serializers.IntegerField(read_only=True) + m3u_account_count = serializers.IntegerField(read_only=True) + class Meta: model = ChannelGroup - fields = ["id", "name"] + fields = ["id", "name", "channel_count", "m3u_account_count"] class ChannelProfileSerializer(serializers.ModelSerializer): diff --git a/frontend/src/api.js b/frontend/src/api.js index ff95f634..9786bb75 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -250,7 +250,15 @@ export default class API { }); if (response.id) { - useChannelsStore.getState().addChannelGroup(response); + // Add association flags for new groups + const processedGroup = { + ...response, + hasChannels: false, + hasM3UAccounts: false, + canEdit: true, + canDelete: true + }; + useChannelsStore.getState().addChannelGroup(processedGroup); } return response; diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index 452db052..64412cb4 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -45,6 +45,8 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { const groupListRef = useRef(null); const channelGroups = useChannelsStore((s) => s.channelGroups); + const canEditChannelGroup = useChannelsStore((s) => s.canEditChannelGroup); + const logos = useChannelsStore((s) => s.logos); const fetchLogos = useChannelsStore((s) => s.fetchLogos); const streams = useStreamsStore((state) => state.streams); diff --git a/frontend/src/components/forms/ChannelBatch.jsx b/frontend/src/components/forms/ChannelBatch.jsx index 2ba3245c..693ebb11 100644 --- a/frontend/src/components/forms/ChannelBatch.jsx +++ b/frontend/src/components/forms/ChannelBatch.jsx @@ -32,6 +32,8 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => { const groupListRef = useRef(null); const channelGroups = useChannelsStore((s) => s.channelGroups); + const canEditChannelGroup = useChannelsStore((s) => s.canEditChannelGroup); + const streamProfiles = useStreamProfilesStore((s) => s.profiles); const [channelGroupModelOpen, setChannelGroupModalOpen] = useState(false); diff --git a/frontend/src/components/forms/ChannelGroup.jsx b/frontend/src/components/forms/ChannelGroup.jsx index 18ed31c1..46641fb1 100644 --- a/frontend/src/components/forms/ChannelGroup.jsx +++ b/frontend/src/components/forms/ChannelGroup.jsx @@ -1,10 +1,17 @@ // Modal.js import React from 'react'; import API from '../../api'; -import { Flex, TextInput, Button, Modal } from '@mantine/core'; +import { Flex, TextInput, Button, Modal, Alert } from '@mantine/core'; +import { notifications } from '@mantine/notifications'; import { isNotEmpty, useForm } from '@mantine/form'; +import useChannelsStore from '../../store/channels'; const ChannelGroup = ({ channelGroup = null, isOpen, onClose }) => { + const canEditChannelGroup = useChannelsStore((s) => s.canEditChannelGroup); + + // Check if editing is allowed + const canEdit = !channelGroup || canEditChannelGroup(channelGroup.id); + const form = useForm({ mode: 'uncontrolled', initialValues: { @@ -17,6 +24,16 @@ const ChannelGroup = ({ channelGroup = null, isOpen, onClose }) => { }); const onSubmit = async () => { + // Prevent submission if editing is not allowed + if (channelGroup && !canEdit) { + notifications.show({ + title: 'Error', + message: 'Cannot edit group with M3U account associations', + color: 'red', + }); + return; + } + const values = form.getValues(); let newGroup; @@ -36,11 +53,17 @@ const ChannelGroup = ({ channelGroup = null, isOpen, onClose }) => { return ( + {channelGroup && !canEdit && ( + + This group cannot be edited because it has M3U account associations. + + )}
    @@ -50,7 +73,7 @@ const ChannelGroup = ({ channelGroup = null, isOpen, onClose }) => { type="submit" variant="contained" color="primary" - disabled={form.submitting} + disabled={form.submitting || (channelGroup && !canEdit)} size="small" > Submit diff --git a/frontend/src/components/forms/GroupManager.jsx b/frontend/src/components/forms/GroupManager.jsx index e10b9a1c..f6bf7305 100644 --- a/frontend/src/components/forms/GroupManager.jsx +++ b/frontend/src/components/forms/GroupManager.jsx @@ -37,7 +37,9 @@ const GroupItem = React.memo(({ onCancelEdit, onEdit, onDelete, - groupUsage + groupUsage, + canEditGroup, + canDeleteGroup }) => { const getGroupBadges = (group) => { const usage = groupUsage[group.id]; @@ -62,16 +64,6 @@ const GroupItem = React.memo(({ return badges; }; - const canEditGroup = (group) => { - const usage = groupUsage[group.id]; - return usage?.canEdit !== false; - }; - - const canDeleteGroup = (group) => { - const usage = groupUsage[group.id]; - return usage?.canDelete !== false && !usage?.hasChannels && !usage?.hasM3UAccounts; - }; - return ( { - // Use a more specific selector to avoid unnecessary re-renders - const fetchChannelGroups = useChannelsStore((s) => s.fetchChannelGroups); const channelGroups = useChannelsStore((s) => s.channelGroups); + const canEditChannelGroup = useChannelsStore((s) => s.canEditChannelGroup); + const canDeleteChannelGroup = useChannelsStore((s) => s.canDeleteChannelGroup); const [editingGroup, setEditingGroup] = useState(null); const [editName, setEditName] = useState(''); const [newGroupName, setNewGroupName] = useState(''); @@ -171,21 +163,18 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { } }, [isOpen]); - const fetchGroupUsage = async () => { + const fetchGroupUsage = useCallback(async () => { setLoading(true); try { - // This would ideally be a dedicated API endpoint, but we'll use the existing data - // For now, we'll determine usage based on the group having associated data + // Use the actual channel group data that already has the flags const usage = {}; - // Check which groups have channels or M3U associations - // This is a simplified check - in a real implementation you'd want a dedicated API Object.values(channelGroups).forEach(group => { usage[group.id] = { - hasChannels: false, // Would need API call to check - hasM3UAccounts: false, // Would need API call to check - canEdit: true, // Assume editable unless proven otherwise - canDelete: true // Assume deletable unless proven otherwise + hasChannels: group.hasChannels ?? false, + hasM3UAccounts: group.hasM3UAccounts ?? false, + canEdit: group.canEdit ?? true, + canDelete: group.canDelete ?? true }; }); @@ -195,7 +184,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { } finally { setLoading(false); } - }; + }, [channelGroups]); const handleEdit = useCallback((group) => { setEditingGroup(group.id); @@ -414,6 +403,8 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { onEdit={handleEdit} onDelete={handleDelete} groupUsage={groupUsage} + canEditGroup={canEditChannelGroup} + canDeleteGroup={canDeleteChannelGroup} /> ))} @@ -431,5 +422,4 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { ); }); - export default GroupManager; diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index 7a9d5007..077602ad 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -216,6 +216,9 @@ const ChannelRowActions = React.memo( const ChannelsTable = ({ }) => { const theme = useMantineTheme(); + const channelGroups = useChannelsStore((s) => s.channelGroups); + const canEditChannelGroup = useChannelsStore((s) => s.canEditChannelGroup); + const canDeleteChannelGroup = useChannelsStore((s) => s.canDeleteChannelGroup); /** * STORES @@ -241,7 +244,6 @@ const ChannelsTable = ({ }) => { const channels = useChannelsStore((s) => s.channels); const profiles = useChannelsStore((s) => s.profiles); const selectedProfileId = useChannelsStore((s) => s.selectedProfileId); - const channelGroups = useChannelsStore((s) => s.channelGroups); const logos = useChannelsStore((s) => s.logos); const [tablePrefs, setTablePrefs] = useLocalStorage('channel-table-prefs', { pageSize: 50, diff --git a/frontend/src/store/channels.jsx b/frontend/src/store/channels.jsx index 03cf2b86..40791cf4 100644 --- a/frontend/src/store/channels.jsx +++ b/frontend/src/store/channels.jsx @@ -46,16 +46,24 @@ const useChannelsStore = create((set, get) => ({ }, fetchChannelGroups: async () => { - set({ isLoading: true, error: null }); try { const channelGroups = await api.getChannelGroups(); - set({ - channelGroups: channelGroups.reduce((acc, group) => { - acc[group.id] = group; - return acc; - }, {}), - isLoading: false, - }); + + // Process groups to add association flags + const processedGroups = channelGroups.reduce((acc, group) => { + acc[group.id] = { + ...group, + hasChannels: group.channel_count > 0, + hasM3UAccounts: group.m3u_account_count > 0, + canEdit: group.m3u_account_count === 0, + canDelete: group.channel_count === 0 && group.m3u_account_count === 0 + }; + return acc; + }, {}); + + set((state) => ({ + channelGroups: processedGroups, + })); } catch (error) { console.error('Failed to fetch channel groups:', error); set({ error: 'Failed to load channel groups.', isLoading: false }); @@ -435,6 +443,17 @@ const useChannelsStore = create((set, get) => ({ set({ error: 'Failed to load recordings.', isLoading: false }); } }, + + // Add helper methods for validation + canEditChannelGroup: (groupIdOrGroup) => { + const groupId = typeof groupIdOrGroup === 'object' ? groupIdOrGroup.id : groupIdOrGroup; + return get().channelGroups[groupId]?.canEdit ?? true; + }, + + canDeleteChannelGroup: (groupIdOrGroup) => { + const groupId = typeof groupIdOrGroup === 'object' ? groupIdOrGroup.id : groupIdOrGroup; + return get().channelGroups[groupId]?.canDelete ?? true; + }, })); export default useChannelsStore; From 9b7aa0c8946bccf65b641bf41fd330caa33fab96 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 12 Jul 2025 17:05:48 -0500 Subject: [PATCH 0598/1435] Add ability to cleanup all unused groups. --- apps/channels/api_views.py | 45 ++++++++++++++++--- frontend/src/api.js | 16 +++++++ .../src/components/forms/GroupManager.jsx | 41 ++++++++++++++++- 3 files changed, 93 insertions(+), 9 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index b4df2461..f0f59f29 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -198,47 +198,78 @@ class ChannelGroupViewSet(viewsets.ModelViewSet): def update(self, request, *args, **kwargs): """Override update to check M3U associations""" instance = self.get_object() - + # Check if group has M3U account associations if hasattr(instance, 'm3u_account') and instance.m3u_account.exists(): return Response( {"error": "Cannot edit group with M3U account associations"}, status=status.HTTP_400_BAD_REQUEST ) - + return super().update(request, *args, **kwargs) def partial_update(self, request, *args, **kwargs): """Override partial_update to check M3U associations""" instance = self.get_object() - + # Check if group has M3U account associations if hasattr(instance, 'm3u_account') and instance.m3u_account.exists(): return Response( {"error": "Cannot edit group with M3U account associations"}, status=status.HTTP_400_BAD_REQUEST ) - + return super().partial_update(request, *args, **kwargs) + @swagger_auto_schema( + method="post", + operation_description="Delete all channel groups that have no associations (no channels or M3U accounts)", + responses={200: "Cleanup completed"}, + ) + @action(detail=False, methods=["post"], url_path="cleanup") + def cleanup_unused_groups(self, request): + """Delete all channel groups with no channels or M3U account associations""" + from django.db.models import Count + + # Find groups with no channels and no M3U account associations + unused_groups = ChannelGroup.objects.annotate( + channel_count=Count('channels', distinct=True), + m3u_account_count=Count('m3u_account', distinct=True) + ).filter( + channel_count=0, + m3u_account_count=0 + ) + + deleted_count = unused_groups.count() + group_names = list(unused_groups.values_list('name', flat=True)) + + # Delete the unused groups + unused_groups.delete() + + return Response({ + "message": f"Successfully deleted {deleted_count} unused channel groups", + "deleted_count": deleted_count, + "deleted_groups": group_names + }) + def destroy(self, request, *args, **kwargs): """Override destroy to check for associations before deletion""" instance = self.get_object() - + # Check if group has associated channels if instance.channels.exists(): return Response( {"error": "Cannot delete group with associated channels"}, status=status.HTTP_400_BAD_REQUEST ) - + # Check if group has M3U account associations if hasattr(instance, 'm3u_account') and instance.m3u_account.exists(): return Response( {"error": "Cannot delete group with M3U account associations"}, status=status.HTTP_400_BAD_REQUEST ) - + return super().destroy(request, *args, **kwargs) diff --git a/frontend/src/api.js b/frontend/src/api.js index 9786bb75..e9ab4deb 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -301,6 +301,22 @@ export default class API { } } + static async cleanupUnusedChannelGroups() { + try { + const response = await request(`${host}/api/channels/groups/cleanup/`, { + method: 'POST', + }); + + // Refresh channel groups to update the UI + useChannelsStore.getState().fetchChannelGroups(); + + return response; + } catch (e) { + errorNotification('Failed to cleanup unused channel groups', e); + throw e; + } + } + static async addChannel(channel) { try { let body = null; diff --git a/frontend/src/components/forms/GroupManager.jsx b/frontend/src/components/forms/GroupManager.jsx index f6bf7305..f89c9228 100644 --- a/frontend/src/components/forms/GroupManager.jsx +++ b/frontend/src/components/forms/GroupManager.jsx @@ -21,7 +21,8 @@ import { X, AlertCircle, Database, - Tv + Tv, + Trash } from 'lucide-react'; import { notifications } from '@mantine/notifications'; import useChannelsStore from '../../store/channels'; @@ -135,6 +136,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { const [groupUsage, setGroupUsage] = useState({}); const [loading, setLoading] = useState(false); const [searchTerm, setSearchTerm] = useState(''); + const [isCleaningUp, setIsCleaningUp] = useState(false); // Memoize the channel groups array to prevent unnecessary re-renders const channelGroupsArray = useMemo(() => @@ -305,6 +307,29 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { setSearchTerm(e.target.value); }, []); + const handleCleanup = useCallback(async () => { + setIsCleaningUp(true); + try { + const result = await API.cleanupUnusedChannelGroups(); + + notifications.show({ + title: 'Cleanup Complete', + message: `Successfully deleted ${result.deleted_count} unused groups`, + color: 'green', + }); + + fetchGroupUsage(); // Refresh usage data + } catch (error) { + notifications.show({ + title: 'Cleanup Failed', + message: 'Failed to cleanup unused groups', + color: 'red', + }); + } finally { + setIsCleaningUp(false); + } + }, [fetchGroupUsage]); + if (!isOpen) return null; return ( @@ -322,7 +347,19 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { {/* Create new group section */} - Create New Group + + Create New Group + + {isCreating ? ( <> From 171d64841a566e79f0fb7dc84d3185016c2d5b99 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 12 Jul 2025 17:28:04 -0500 Subject: [PATCH 0599/1435] Changed some colors to match our theme better. --- .../src/components/forms/GroupManager.jsx | 91 ++++++++++--------- 1 file changed, 47 insertions(+), 44 deletions(-) diff --git a/frontend/src/components/forms/GroupManager.jsx b/frontend/src/components/forms/GroupManager.jsx index f89c9228..edc04d20 100644 --- a/frontend/src/components/forms/GroupManager.jsx +++ b/frontend/src/components/forms/GroupManager.jsx @@ -12,11 +12,12 @@ import { Alert, Divider, ScrollArea, + useMantineTheme, } from '@mantine/core'; import { SquarePlus, SquarePen, - Trash2, + SquareMinus, Check, X, AlertCircle, @@ -42,6 +43,8 @@ const GroupItem = React.memo(({ canEditGroup, canDeleteGroup }) => { + const theme = useMantineTheme(); + const getGroupBadges = (group) => { const usage = groupUsage[group.id]; const badges = []; @@ -69,7 +72,7 @@ const GroupItem = React.memo(({ {editingGroup === group.id ? ( @@ -103,20 +106,22 @@ const GroupItem = React.memo(({ ) : ( <> onEdit(group)} disabled={!canEditGroup(group)} > - + onDelete(group)} disabled={!canDeleteGroup(group)} > - + )} @@ -346,9 +351,39 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { {/* Create new group section */} - - - Create New Group + + {isCreating ? ( + + e.key === 'Enter' && handleCreate()} + autoFocus + /> + + + + { + setIsCreating(false); + setNewGroupName(''); + }}> + + + + ) : ( + + )} + + {!isCreating && ( - - - {isCreating ? ( - <> - e.key === 'Enter' && handleCreate()} - autoFocus - /> - - - - { - setIsCreating(false); - setNewGroupName(''); - }}> - - - - ) : ( - - )} - - + )} + @@ -400,7 +403,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { - Existing Groups ({filteredGroups.length}{searchTerm && ` of ${sortedGroups.length}`}) + Groups ({filteredGroups.length}{searchTerm && ` of ${sortedGroups.length}`}) Date: Sat, 12 Jul 2025 17:37:24 -0500 Subject: [PATCH 0600/1435] Add filtering based on group membership. --- .../src/components/forms/GroupManager.jsx | 139 ++++++++++++++++-- 1 file changed, 125 insertions(+), 14 deletions(-) diff --git a/frontend/src/components/forms/GroupManager.jsx b/frontend/src/components/forms/GroupManager.jsx index edc04d20..3b63b738 100644 --- a/frontend/src/components/forms/GroupManager.jsx +++ b/frontend/src/components/forms/GroupManager.jsx @@ -13,6 +13,7 @@ import { Divider, ScrollArea, useMantineTheme, + Chip, } from '@mantine/core'; import { SquarePlus, @@ -23,7 +24,8 @@ import { AlertCircle, Database, Tv, - Trash + Trash, + Filter } from 'lucide-react'; import { notifications } from '@mantine/notifications'; import useChannelsStore from '../../store/channels'; @@ -142,6 +144,9 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { const [loading, setLoading] = useState(false); const [searchTerm, setSearchTerm] = useState(''); const [isCleaningUp, setIsCleaningUp] = useState(false); + const [showChannelGroups, setShowChannelGroups] = useState(true); + const [showM3UGroups, setShowM3UGroups] = useState(true); + const [showUnusedGroups, setShowUnusedGroups] = useState(true); // Memoize the channel groups array to prevent unnecessary re-renders const channelGroupsArray = useMemo(() => @@ -155,13 +160,75 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { [channelGroupsArray] ); - // Filter groups based on search term + // Filter groups based on search term and chip filters const filteredGroups = useMemo(() => { - if (!searchTerm.trim()) return sortedGroups; - return sortedGroups.filter(group => - group.name.toLowerCase().includes(searchTerm.toLowerCase()) - ); - }, [sortedGroups, searchTerm]); + let filtered = sortedGroups; + + // Apply search filter + if (searchTerm.trim()) { + filtered = filtered.filter(group => + group.name.toLowerCase().includes(searchTerm.toLowerCase()) + ); + } + + // Apply chip filters + filtered = filtered.filter(group => { + const usage = groupUsage[group.id]; + if (!usage) return false; + + const hasChannels = usage.hasChannels; + const hasM3U = usage.hasM3UAccounts; + const isUnused = !hasChannels && !hasM3U; + + // If group is unused, only show if unused groups are enabled + if (isUnused) { + return showUnusedGroups; + } + + // For groups with channels and/or M3U, show if either filter is enabled + let shouldShow = false; + if (hasChannels && showChannelGroups) shouldShow = true; + if (hasM3U && showM3UGroups) shouldShow = true; + + return shouldShow; + }); + + return filtered; + }, [sortedGroups, searchTerm, showChannelGroups, showM3UGroups, showUnusedGroups, groupUsage]); + + // Calculate filter counts + const filterCounts = useMemo(() => { + const counts = { + channels: 0, + m3u: 0, + unused: 0 + }; + + sortedGroups.forEach(group => { + const usage = groupUsage[group.id]; + if (usage) { + const hasChannels = usage.hasChannels; + const hasM3U = usage.hasM3UAccounts; + + // Count groups with channels (including those with both) + if (hasChannels) { + counts.channels++; + } + + // Count groups with M3U (including those with both) + if (hasM3U) { + counts.m3u++; + } + + // Count truly unused groups + if (!hasChannels && !hasM3U) { + counts.unused++; + } + } + }); + + return counts; + }, [sortedGroups, groupUsage]); // Fetch group usage information when modal opens useEffect(() => { @@ -342,7 +409,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { opened={isOpen} onClose={onClose} title="Group Manager" - size="md" + size="lg" scrollAreaComponent={ScrollArea.Autosize} > @@ -399,12 +466,13 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { - {/* Existing groups */} - + {/* Filter Controls */} + - - Groups ({filteredGroups.length}{searchTerm && ` of ${sortedGroups.length}`}) - + + + Filter Groups + { /> + + Show: + + + + Channel Groups ({filterCounts.channels}) + + + + + + M3U Groups ({filterCounts.m3u}) + + + + Unused Groups ({filterCounts.unused}) + + + + + + + {/* Existing groups */} + + + Groups ({filteredGroups.length}{(searchTerm || !showChannelGroups || !showM3UGroups || !showUnusedGroups) && ` of ${sortedGroups.length}`}) + + {loading ? ( Loading group information... ) : filteredGroups.length === 0 ? ( - {searchTerm ? 'No groups found matching your search' : 'No groups found'} + {searchTerm || !showChannelGroups || !showM3UGroups || !showUnusedGroups ? 'No groups found matching your filters' : 'No groups found'} ) : ( From 2da8273de64fd19324ef4ea8769ab32141f75c3c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 12 Jul 2025 17:41:35 -0500 Subject: [PATCH 0601/1435] Add confirmation for deleting and cleaning up groups. --- .../src/components/forms/GroupManager.jsx | 452 +++++++++++------- 1 file changed, 268 insertions(+), 184 deletions(-) diff --git a/frontend/src/components/forms/GroupManager.jsx b/frontend/src/components/forms/GroupManager.jsx index 3b63b738..abb44727 100644 --- a/frontend/src/components/forms/GroupManager.jsx +++ b/frontend/src/components/forms/GroupManager.jsx @@ -29,6 +29,8 @@ import { } from 'lucide-react'; import { notifications } from '@mantine/notifications'; import useChannelsStore from '../../store/channels'; +import useWarningsStore from '../../store/warnings'; +import ConfirmationDialog from '../ConfirmationDialog'; import API from '../../api'; // Move GroupItem outside to prevent recreation on every render @@ -136,6 +138,9 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { const channelGroups = useChannelsStore((s) => s.channelGroups); const canEditChannelGroup = useChannelsStore((s) => s.canEditChannelGroup); const canDeleteChannelGroup = useChannelsStore((s) => s.canDeleteChannelGroup); + const isWarningSuppressed = useWarningsStore((s) => s.isWarningSuppressed); + const suppressWarning = useWarningsStore((s) => s.suppressWarning); + const [editingGroup, setEditingGroup] = useState(null); const [editName, setEditName] = useState(''); const [newGroupName, setNewGroupName] = useState(''); @@ -148,6 +153,11 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { const [showM3UGroups, setShowM3UGroups] = useState(true); const [showUnusedGroups, setShowUnusedGroups] = useState(true); + // Confirmation dialog states + const [confirmDeleteOpen, setConfirmDeleteOpen] = useState(false); + const [groupToDelete, setGroupToDelete] = useState(null); + const [confirmCleanupOpen, setConfirmCleanupOpen] = useState(false); + // Memoize the channel groups array to prevent unnecessary re-renders const channelGroupsArray = useMemo(() => Object.values(channelGroups), @@ -348,6 +358,18 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { return; } + // Store group for confirmation dialog + setGroupToDelete(group); + + // Skip warning if it's been suppressed + if (isWarningSuppressed('delete-group')) { + return executeDeleteGroup(group); + } + + setConfirmDeleteOpen(true); + }, [groupUsage, isWarningSuppressed]); + + const executeDeleteGroup = useCallback(async (group) => { try { await API.deleteChannelGroup(group.id); @@ -358,14 +380,50 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { }); fetchGroupUsage(); // Refresh usage data + setConfirmDeleteOpen(false); } catch (error) { notifications.show({ title: 'Error', message: 'Failed to delete group', color: 'red', }); + setConfirmDeleteOpen(false); } - }, [groupUsage, fetchGroupUsage]); + }, [fetchGroupUsage]); + + const handleCleanup = useCallback(async () => { + // Skip warning if it's been suppressed + if (isWarningSuppressed('cleanup-groups')) { + return executeCleanup(); + } + + setConfirmCleanupOpen(true); + }, [isWarningSuppressed]); + + const executeCleanup = useCallback(async () => { + setIsCleaningUp(true); + try { + const result = await API.cleanupUnusedChannelGroups(); + + notifications.show({ + title: 'Cleanup Complete', + message: `Successfully deleted ${result.deleted_count} unused groups`, + color: 'green', + }); + + fetchGroupUsage(); // Refresh usage data + setConfirmCleanupOpen(false); + } catch (error) { + notifications.show({ + title: 'Cleanup Failed', + message: 'Failed to cleanup unused groups', + color: 'red', + }); + setConfirmCleanupOpen(false); + } finally { + setIsCleaningUp(false); + } + }, [fetchGroupUsage]); const handleNewGroupNameChange = useCallback((e) => { setNewGroupName(e.target.value); @@ -379,198 +437,224 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { setSearchTerm(e.target.value); }, []); - const handleCleanup = useCallback(async () => { - setIsCleaningUp(true); - try { - const result = await API.cleanupUnusedChannelGroups(); - - notifications.show({ - title: 'Cleanup Complete', - message: `Successfully deleted ${result.deleted_count} unused groups`, - color: 'green', - }); - - fetchGroupUsage(); // Refresh usage data - } catch (error) { - notifications.show({ - title: 'Cleanup Failed', - message: 'Failed to cleanup unused groups', - color: 'red', - }); - } finally { - setIsCleaningUp(false); - } - }, [fetchGroupUsage]); - if (!isOpen) return null; return ( - - - } color="blue" variant="light"> - Manage channel groups. Groups associated with M3U accounts or containing channels cannot be deleted. - - - {/* Create new group section */} - - {isCreating ? ( - - e.key === 'Enter' && handleCreate()} - autoFocus - /> - - - - { - setIsCreating(false); - setNewGroupName(''); - }}> - - - - ) : ( - - )} - - {!isCreating && ( - - )} - - - - - {/* Filter Controls */} - - - - - Filter Groups - - setSearchTerm('')} - > - - - )} - /> - - - - Show: - - - - Channel Groups ({filterCounts.channels}) - - - - - - M3U Groups ({filterCounts.m3u}) - - - - Unused Groups ({filterCounts.unused}) - - - - - - - {/* Existing groups */} + <> + - - Groups ({filteredGroups.length}{(searchTerm || !showChannelGroups || !showM3UGroups || !showUnusedGroups) && ` of ${sortedGroups.length}`}) - + } color="blue" variant="light"> + Manage channel groups. Groups associated with M3U accounts or containing channels cannot be deleted. + - {loading ? ( - Loading group information... - ) : filteredGroups.length === 0 ? ( - - {searchTerm || !showChannelGroups || !showM3UGroups || !showUnusedGroups ? 'No groups found matching your filters' : 'No groups found'} - - ) : ( - - {filteredGroups.map((group) => ( - + {isCreating ? ( + + e.key === 'Enter' && handleCreate()} + autoFocus /> - ))} - - )} + + + + { + setIsCreating(false); + setNewGroupName(''); + }}> + + + + ) : ( + + )} + + {!isCreating && ( + + )} + + + + + {/* Filter Controls */} + + + + + Filter Groups + + setSearchTerm('')} + > + + + )} + /> + + + + Show: + + + + Channel Groups ({filterCounts.channels}) + + + + + + M3U Groups ({filterCounts.m3u}) + + + + Unused Groups ({filterCounts.unused}) + + + + + + + {/* Existing groups */} + + + Groups ({filteredGroups.length}{(searchTerm || !showChannelGroups || !showM3UGroups || !showUnusedGroups) && ` of ${sortedGroups.length}`}) + + + {loading ? ( + Loading group information... + ) : filteredGroups.length === 0 ? ( + + {searchTerm || !showChannelGroups || !showM3UGroups || !showUnusedGroups ? 'No groups found matching your filters' : 'No groups found'} + + ) : ( + + {filteredGroups.map((group) => ( + + ))} + + )} + + + + + + + + - + setConfirmDeleteOpen(false)} + onConfirm={() => executeDeleteGroup(groupToDelete)} + title="Confirm Group Deletion" + message={ + groupToDelete ? ( +
    + {`Are you sure you want to delete the following group? - - - - - +Name: ${groupToDelete.name} + +This action cannot be undone.`} +
    + ) : ( + 'Are you sure you want to delete this group? This action cannot be undone.' + ) + } + confirmLabel="Delete" + cancelLabel="Cancel" + actionKey="delete-group" + onSuppressChange={suppressWarning} + size="md" + /> + + setConfirmCleanupOpen(false)} + onConfirm={executeCleanup} + title="Confirm Group Cleanup" + message={ +
    + {`Are you sure you want to cleanup all unused groups? + +This will permanently delete all groups that are not associated with any channels or M3U accounts. + +This action cannot be undone.`} +
    + } + confirmLabel="Cleanup" + cancelLabel="Cancel" + actionKey="cleanup-groups" + onSuppressChange={suppressWarning} + size="md" + /> + ); }); + export default GroupManager; From 35d95c47c724dcf1563e7bb69b37783f11a4fbdd Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 12 Jul 2025 17:48:56 -0500 Subject: [PATCH 0602/1435] Fixed z index issue when stream table was refreshing. --- frontend/src/components/ConfirmationDialog.jsx | 12 ++++++++++-- frontend/src/components/forms/GroupManager.jsx | 3 +++ 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/frontend/src/components/ConfirmationDialog.jsx b/frontend/src/components/ConfirmationDialog.jsx index 822b46f1..8f96708d 100644 --- a/frontend/src/components/ConfirmationDialog.jsx +++ b/frontend/src/components/ConfirmationDialog.jsx @@ -27,7 +27,8 @@ const ConfirmationDialog = ({ cancelLabel = 'Cancel', actionKey, onSuppressChange, - size = 'md', // Add default size parameter - md is a medium width + size = 'md', + zIndex = 1000, }) => { const suppressWarning = useWarningsStore((s) => s.suppressWarning); const isWarningSuppressed = useWarningsStore((s) => s.isWarningSuppressed); @@ -50,7 +51,14 @@ const ConfirmationDialog = ({ }; return ( - + {message} {actionKey && ( diff --git a/frontend/src/components/forms/GroupManager.jsx b/frontend/src/components/forms/GroupManager.jsx index abb44727..48ca85b1 100644 --- a/frontend/src/components/forms/GroupManager.jsx +++ b/frontend/src/components/forms/GroupManager.jsx @@ -447,6 +447,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { title="Group Manager" size="lg" scrollAreaComponent={ScrollArea.Autosize} + zIndex={2000} > } color="blue" variant="light"> @@ -631,6 +632,7 @@ This action cannot be undone.`} actionKey="delete-group" onSuppressChange={suppressWarning} size="md" + zIndex={2100} /> ); From 8b361ee6466c212df64951e645b44fa0cab25cbf Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 12 Jul 2025 18:12:25 -0500 Subject: [PATCH 0603/1435] Fix eslint issues. --- .../src/components/forms/GroupManager.jsx | 89 ++++++++++--------- 1 file changed, 45 insertions(+), 44 deletions(-) diff --git a/frontend/src/components/forms/GroupManager.jsx b/frontend/src/components/forms/GroupManager.jsx index 48ca85b1..253a2b9c 100644 --- a/frontend/src/components/forms/GroupManager.jsx +++ b/frontend/src/components/forms/GroupManager.jsx @@ -240,13 +240,6 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { return counts; }, [sortedGroups, groupUsage]); - // Fetch group usage information when modal opens - useEffect(() => { - if (isOpen) { - fetchGroupUsage(); - } - }, [isOpen]); - const fetchGroupUsage = useCallback(async () => { setLoading(true); try { @@ -270,6 +263,13 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { } }, [channelGroups]); + // Fetch group usage information when modal opens + useEffect(() => { + if (isOpen) { + fetchGroupUsage(); + } + }, [isOpen, fetchGroupUsage]); + const handleEdit = useCallback((group) => { setEditingGroup(group.id); setEditName(group.name); @@ -299,6 +299,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { setEditingGroup(null); setEditName(''); + await fetchGroupUsage(); // Refresh usage data } catch (error) { notifications.show({ title: 'Error', @@ -306,7 +307,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { color: 'red', }); } - }, [editName, editingGroup]); + }, [editName, editingGroup, fetchGroupUsage]); const handleCancelEdit = useCallback(() => { setEditingGroup(null); @@ -336,7 +337,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { setNewGroupName(''); setIsCreating(false); - fetchGroupUsage(); // Refresh usage data + await fetchGroupUsage(); // Refresh usage data } catch (error) { notifications.show({ title: 'Error', @@ -344,7 +345,29 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { color: 'red', }); } - }, [newGroupName]); + }, [newGroupName, fetchGroupUsage]); + + const executeDeleteGroup = useCallback(async (group) => { + try { + await API.deleteChannelGroup(group.id); + + notifications.show({ + title: 'Success', + message: 'Group deleted successfully', + color: 'green', + }); + + await fetchGroupUsage(); // Refresh usage data + setConfirmDeleteOpen(false); + } catch (error) { + notifications.show({ + title: 'Error', + message: 'Failed to delete group', + color: 'red', + }); + setConfirmDeleteOpen(false); + } + }, [fetchGroupUsage]); const handleDelete = useCallback(async (group) => { const usage = groupUsage[group.id]; @@ -367,38 +390,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { } setConfirmDeleteOpen(true); - }, [groupUsage, isWarningSuppressed]); - - const executeDeleteGroup = useCallback(async (group) => { - try { - await API.deleteChannelGroup(group.id); - - notifications.show({ - title: 'Success', - message: 'Group deleted successfully', - color: 'green', - }); - - fetchGroupUsage(); // Refresh usage data - setConfirmDeleteOpen(false); - } catch (error) { - notifications.show({ - title: 'Error', - message: 'Failed to delete group', - color: 'red', - }); - setConfirmDeleteOpen(false); - } - }, [fetchGroupUsage]); - - const handleCleanup = useCallback(async () => { - // Skip warning if it's been suppressed - if (isWarningSuppressed('cleanup-groups')) { - return executeCleanup(); - } - - setConfirmCleanupOpen(true); - }, [isWarningSuppressed]); + }, [groupUsage, isWarningSuppressed, executeDeleteGroup]); const executeCleanup = useCallback(async () => { setIsCleaningUp(true); @@ -411,7 +403,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { color: 'green', }); - fetchGroupUsage(); // Refresh usage data + await fetchGroupUsage(); // Refresh usage data setConfirmCleanupOpen(false); } catch (error) { notifications.show({ @@ -425,6 +417,15 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { } }, [fetchGroupUsage]); + const handleCleanup = useCallback(async () => { + // Skip warning if it's been suppressed + if (isWarningSuppressed('cleanup-groups')) { + return executeCleanup(); + } + + setConfirmCleanupOpen(true); + }, [isWarningSuppressed, executeCleanup]); + const handleNewGroupNameChange = useCallback((e) => { setNewGroupName(e.target.value); }, []); @@ -612,7 +613,7 @@ const GroupManager = React.memo(({ isOpen, onClose }) => { setConfirmDeleteOpen(false)} - onConfirm={() => executeDeleteGroup(groupToDelete)} + onConfirm={() => groupToDelete && executeDeleteGroup(groupToDelete)} title="Confirm Group Deletion" message={ groupToDelete ? ( From c4e5710b484ce8d31673d194e3636e8ed521cdca Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 12 Jul 2025 19:05:06 -0500 Subject: [PATCH 0604/1435] When adding a group. Fetch groups after. --- frontend/src/api.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/frontend/src/api.js b/frontend/src/api.js index e9ab4deb..e0a62160 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -259,6 +259,8 @@ export default class API { canDelete: true }; useChannelsStore.getState().addChannelGroup(processedGroup); + // Refresh channel groups to update the UI + useChannelsStore.getState().fetchChannelGroups(); } return response; From 69f8f426a627af88b7a8d85389501b40a3bcd4a1 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sat, 12 Jul 2025 19:10:59 -0500 Subject: [PATCH 0605/1435] Refactor menu items in ChannelTableHeader to fix html error. --- .../ChannelsTable/ChannelTableHeader.jsx | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx b/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx index 1568e10d..72372cc7 100644 --- a/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx +++ b/frontend/src/components/tables/ChannelsTable/ChannelTableHeader.jsx @@ -287,31 +287,25 @@ const ChannelTableHeader = ({ selectedTableIds.length == 0 || authUser.user_level != USER_LEVELS.ADMIN } + onClick={() => setAssignNumbersModalOpen(true)} > - setAssignNumbersModalOpen(true)} - > - Assign #s - + Assign #s } disabled={authUser.user_level != USER_LEVELS.ADMIN} + onClick={matchEpg} > - - Auto-Match - + Auto-Match } disabled={authUser.user_level != USER_LEVELS.ADMIN} + onClick={() => setGroupManagerOpen(true)} > - setGroupManagerOpen(true)}> - Edit Groups - + Edit Groups From ea81cfb1afd426bb8b12df6bb5205673634e31a1 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Sun, 13 Jul 2025 15:59:25 -0500 Subject: [PATCH 0606/1435] Add auto channel sync settings to ChannelGroupM3UAccount and update related components - Introduced `auto_channel_sync` and `auto_sync_channel_start` fields in the ChannelGroupM3UAccount model. - Added API endpoint to update M3U group settings. - Updated M3UGroupFilter component to manage auto sync settings. - Enhanced M3URefreshNotification and M3U components for better user guidance. - Created a Celery task for automatic channel synchronization after M3U refresh. --- ...upm3uaccount_auto_channel_sync_and_more.py | 23 +++ apps/channels/models.py | 9 ++ apps/channels/serializers.py | 4 +- apps/m3u/api_views.py | 34 ++++- apps/m3u/tasks.py | 144 ++++++++++++++++++ dispatcharr/celery.py | 1 + frontend/src/api.js | 13 ++ .../src/components/M3URefreshNotification.jsx | 4 +- frontend/src/components/forms/M3U.jsx | 3 +- .../src/components/forms/M3UGroupFilter.jsx | 140 ++++++++++++----- 10 files changed, 334 insertions(+), 41 deletions(-) create mode 100644 apps/channels/migrations/0022_channelgroupm3uaccount_auto_channel_sync_and_more.py diff --git a/apps/channels/migrations/0022_channelgroupm3uaccount_auto_channel_sync_and_more.py b/apps/channels/migrations/0022_channelgroupm3uaccount_auto_channel_sync_and_more.py new file mode 100644 index 00000000..a0c94c7d --- /dev/null +++ b/apps/channels/migrations/0022_channelgroupm3uaccount_auto_channel_sync_and_more.py @@ -0,0 +1,23 @@ +# Generated by Django 5.1.6 on 2025-07-13 20:40 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dispatcharr_channels', '0021_channel_user_level'), + ] + + operations = [ + migrations.AddField( + model_name='channelgroupm3uaccount', + name='auto_channel_sync', + field=models.BooleanField(default=False, help_text='Automatically create/delete channels to match streams in this group'), + ), + migrations.AddField( + model_name='channelgroupm3uaccount', + name='auto_sync_channel_start', + field=models.FloatField(blank=True, help_text='Starting channel number for auto-created channels in this group', null=True), + ), + ] diff --git a/apps/channels/models.py b/apps/channels/models.py index 1bcbcc41..b6333aab 100644 --- a/apps/channels/models.py +++ b/apps/channels/models.py @@ -541,6 +541,15 @@ class ChannelGroupM3UAccount(models.Model): ) custom_properties = models.TextField(null=True, blank=True) enabled = models.BooleanField(default=True) + auto_channel_sync = models.BooleanField( + default=False, + help_text='Automatically create/delete channels to match streams in this group' + ) + auto_sync_channel_start = models.FloatField( + null=True, + blank=True, + help_text='Starting channel number for auto-created channels in this group' + ) class Meta: unique_together = ("channel_group", "m3u_account") diff --git a/apps/channels/serializers.py b/apps/channels/serializers.py index 4d1694dc..0eb5acc3 100644 --- a/apps/channels/serializers.py +++ b/apps/channels/serializers.py @@ -289,10 +289,12 @@ class ChannelSerializer(serializers.ModelSerializer): class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer): enabled = serializers.BooleanField() + auto_channel_sync = serializers.BooleanField(default=False) + auto_sync_channel_start = serializers.FloatField(allow_null=True, required=False) class Meta: model = ChannelGroupM3UAccount - fields = ["id", "channel_group", "enabled"] + fields = ["id", "channel_group", "enabled", "auto_channel_sync", "auto_sync_channel_start"] # Optionally, if you only need the id of the ChannelGroup, you can customize it like this: # channel_group = serializers.PrimaryKeyRelatedField(queryset=ChannelGroup.objects.all()) diff --git a/apps/m3u/api_views.py b/apps/m3u/api_views.py index 0ef42272..39b9e22e 100644 --- a/apps/m3u/api_views.py +++ b/apps/m3u/api_views.py @@ -16,13 +16,11 @@ from rest_framework.decorators import action from django.conf import settings from .tasks import refresh_m3u_groups -# Import all models, including UserAgent. from .models import M3UAccount, M3UFilter, ServerGroup, M3UAccountProfile from core.models import UserAgent from apps.channels.models import ChannelGroupM3UAccount from core.serializers import UserAgentSerializer -# Import all serializers, including the UserAgentSerializer. from .serializers import ( M3UAccountSerializer, M3UFilterSerializer, @@ -144,6 +142,38 @@ class M3UAccountViewSet(viewsets.ModelViewSet): # Continue with regular partial update return super().partial_update(request, *args, **kwargs) + @action(detail=True, methods=["patch"], url_path="group-settings") + def update_group_settings(self, request, pk=None): + """Update auto channel sync settings for M3U account groups""" + account = self.get_object() + group_settings = request.data.get("group_settings", []) + + try: + for setting in group_settings: + group_id = setting.get("channel_group") + enabled = setting.get("enabled", True) + auto_sync = setting.get("auto_channel_sync", False) + sync_start = setting.get("auto_sync_channel_start") + + if group_id: + ChannelGroupM3UAccount.objects.update_or_create( + channel_group_id=group_id, + m3u_account=account, + defaults={ + "enabled": enabled, + "auto_channel_sync": auto_sync, + "auto_sync_channel_start": sync_start, + }, + ) + + return Response({"message": "Group settings updated successfully"}) + + except Exception as e: + return Response( + {"error": f"Failed to update group settings: {str(e)}"}, + status=status.HTTP_400_BAD_REQUEST, + ) + class M3UFilterViewSet(viewsets.ModelViewSet): """Handles CRUD operations for M3U filters""" diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index 0b782649..b5614376 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -838,6 +838,144 @@ def delete_m3u_refresh_task_by_id(account_id): logger.error(f"Error deleting periodic task for M3UAccount {account_id}: {str(e)}", exc_info=True) return False +@shared_task +def sync_auto_channels(account_id): + """ + Automatically create/delete channels to match streams in groups with auto_channel_sync enabled. + Called after M3U refresh completes successfully. + """ + from apps.channels.models import Channel, ChannelGroup, ChannelGroupM3UAccount, Stream, ChannelStream + from apps.epg.models import EPGData + import json + + try: + account = M3UAccount.objects.get(id=account_id) + logger.info(f"Starting auto channel sync for M3U account {account.name}") + + # Get groups with auto sync enabled for this account + auto_sync_groups = ChannelGroupM3UAccount.objects.filter( + m3u_account=account, + enabled=True, + auto_channel_sync=True + ).select_related('channel_group') + + channels_created = 0 + channels_deleted = 0 + + for group_relation in auto_sync_groups: + channel_group = group_relation.channel_group + start_number = group_relation.auto_sync_channel_start or 1.0 + + logger.info(f"Processing auto sync for group: {channel_group.name} (start: {start_number})") + + # Get all streams in this group for this M3U account + current_streams = Stream.objects.filter( + m3u_account=account, + channel_group=channel_group + ) + + # Get existing channels in this group that were auto-created (we'll track this via a custom property) + existing_auto_channels = Channel.objects.filter( + channel_group=channel_group, + streams__m3u_account=account + ).distinct() + + # Create a mapping of stream hashes to existing channels + existing_channel_streams = {} + for channel in existing_auto_channels: + for stream in channel.streams.filter(m3u_account=account): + existing_channel_streams[stream.stream_hash] = channel + + # Track which channels should exist (based on current streams) + channels_to_keep = set() + current_channel_number = start_number + + # Create channels for streams that don't have them + for stream in current_streams.order_by('name'): + if stream.stream_hash in existing_channel_streams: + # Channel already exists for this stream + channels_to_keep.add(existing_channel_streams[stream.stream_hash].id) + continue + + # Find next available channel number + while Channel.objects.filter(channel_number=current_channel_number).exists(): + current_channel_number += 0.1 + + # Create new channel + try: + # Parse custom properties for additional info + stream_custom_props = json.loads(stream.custom_properties) if stream.custom_properties else {} + + # Get tvc_guide_stationid from custom properties if it exists + tvc_guide_stationid = stream_custom_props.get("tvc-guide-stationid") + + # Create the channel + channel = Channel.objects.create( + channel_number=current_channel_number, + name=stream.name, + tvg_id=stream.tvg_id, + tvc_guide_stationid=tvc_guide_stationid, + channel_group=channel_group, + user_level=0 # Default user level + ) + + # Associate the stream with the channel + ChannelStream.objects.create( + channel=channel, + stream=stream, + order=0 + ) + + # Try to match EPG data + if stream.tvg_id: + epg_data = EPGData.objects.filter(tvg_id=stream.tvg_id).first() + if epg_data: + channel.epg_data = epg_data + channel.save(update_fields=['epg_data']) + + # Handle logo + if stream.logo_url: + from apps.channels.models import Logo + logo, _ = Logo.objects.get_or_create( + url=stream.logo_url, + defaults={"name": stream.name or stream.tvg_id or "Unknown"} + ) + channel.logo = logo + channel.save(update_fields=['logo']) + + channels_to_keep.add(channel.id) + channels_created += 1 + current_channel_number += 1.0 + + logger.debug(f"Created auto channel: {channel.channel_number} - {channel.name}") + + except Exception as e: + logger.error(f"Error creating auto channel for stream {stream.name}: {str(e)}") + continue + + # Delete channels that no longer have corresponding streams + channels_to_delete = existing_auto_channels.exclude(id__in=channels_to_keep) + + for channel in channels_to_delete: + # Only delete if all streams for this channel are from this M3U account + # and this channel group + all_streams_from_account = all( + s.m3u_account_id == account.id and s.channel_group_id == channel_group.id + for s in channel.streams.all() + ) + + if all_streams_from_account: + logger.debug(f"Deleting auto channel: {channel.channel_number} - {channel.name}") + channel.delete() + channels_deleted += 1 + + logger.info(f"Auto channel sync complete for account {account.name}: {channels_created} created, {channels_deleted} deleted") + return f"Auto sync: {channels_created} channels created, {channels_deleted} deleted" + + except Exception as e: + logger.error(f"Error in auto channel sync for account {account_id}: {str(e)}") + return f"Auto sync error: {str(e)}" + @shared_task def refresh_single_m3u_account(account_id): """Splits M3U processing into chunks and dispatches them as parallel tasks.""" @@ -1120,6 +1258,12 @@ def refresh_single_m3u_account(account_id): message=account.last_message ) + # Run auto channel sync after successful refresh + try: + sync_result = sync_auto_channels(account_id) + logger.info(f"Auto channel sync result for account {account_id}: {sync_result}") + except Exception as e: + logger.error(f"Error running auto channel sync for account {account_id}: {str(e)}") except Exception as e: logger.error(f"Error processing M3U for account {account_id}: {str(e)}") account.status = M3UAccount.Status.ERROR diff --git a/dispatcharr/celery.py b/dispatcharr/celery.py index 8856d330..98c6210b 100644 --- a/dispatcharr/celery.py +++ b/dispatcharr/celery.py @@ -62,6 +62,7 @@ def cleanup_task_memory(**kwargs): 'apps.m3u.tasks.refresh_m3u_accounts', 'apps.m3u.tasks.process_m3u_batch', 'apps.m3u.tasks.process_xc_category', + 'apps.m3u.tasks.sync_auto_channels', 'apps.epg.tasks.refresh_epg_data', 'apps.epg.tasks.refresh_all_epg_data', 'apps.epg.tasks.parse_programs_for_source', diff --git a/frontend/src/api.js b/frontend/src/api.js index e0a62160..e34dabe2 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -733,6 +733,19 @@ export default class API { } } + static async updateM3UGroupSettings(playlistId, groupSettings) { + try { + const response = await request(`${host}/api/m3u/accounts/${playlistId}/group-settings/`, { + method: 'PATCH', + body: { group_settings: groupSettings }, + }); + + return response; + } catch (e) { + errorNotification('Failed to update M3U group settings', e); + } + } + static async addPlaylist(values) { if (values.custom_properties) { values.custom_properties = JSON.stringify(values.custom_properties); diff --git a/frontend/src/components/M3URefreshNotification.jsx b/frontend/src/components/M3URefreshNotification.jsx index 8a6647cb..3b57af37 100644 --- a/frontend/src/components/M3URefreshNotification.jsx +++ b/frontend/src/components/M3URefreshNotification.jsx @@ -49,7 +49,7 @@ export default function M3URefreshNotification() { message: ( {data.message || - 'M3U groups loaded. Please select groups or refresh M3U to complete setup.'} + 'M3U groups loaded. Configure group filters and auto channel sync settings.'} diff --git a/frontend/src/components/forms/M3U.jsx b/frontend/src/components/forms/M3U.jsx index 24ddd377..0e4d5643 100644 --- a/frontend/src/components/forms/M3U.jsx +++ b/frontend/src/components/forms/M3U.jsx @@ -145,8 +145,7 @@ const M3U = ({ if (values.account_type != 'XC') { notifications.show({ title: 'Fetching M3U Groups', - message: 'Filter out groups or refresh M3U once complete.', - // color: 'green.5', + message: 'Configure group filters and auto sync settings once complete.', }); // Don't prompt for group filters, but keeping this here diff --git a/frontend/src/components/forms/M3UGroupFilter.jsx b/frontend/src/components/forms/M3UGroupFilter.jsx index 7ca0fa96..0213eeee 100644 --- a/frontend/src/components/forms/M3UGroupFilter.jsx +++ b/frontend/src/components/forms/M3UGroupFilter.jsx @@ -21,7 +21,11 @@ import { Center, SimpleGrid, Text, + NumberInput, + Divider, + Alert, } from '@mantine/core'; +import { Info } from 'lucide-react'; import useChannelsStore from '../../store/channels'; import { CircleCheck, CircleX } from 'lucide-react'; @@ -40,6 +44,8 @@ const M3UGroupFilter = ({ playlist = null, isOpen, onClose }) => { playlist.channel_groups.map((group) => ({ ...group, name: channelGroups[group.channel_group].name, + auto_channel_sync: group.auto_channel_sync || false, + auto_sync_channel_start: group.auto_sync_channel_start || 1.0, })) ); }, [playlist, channelGroups]); @@ -53,15 +59,38 @@ const M3UGroupFilter = ({ playlist = null, isOpen, onClose }) => { ); }; + const toggleAutoSync = (id) => { + setGroupStates( + groupStates.map((state) => ({ + ...state, + auto_channel_sync: state.channel_group == id ? !state.auto_channel_sync : state.auto_channel_sync, + })) + ); + }; + + const updateChannelStart = (id, value) => { + setGroupStates( + groupStates.map((state) => ({ + ...state, + auto_sync_channel_start: state.channel_group == id ? value : state.auto_sync_channel_start, + })) + ); + }; + const submit = async () => { setIsLoading(true); - await API.updatePlaylist({ - ...playlist, - channel_groups: groupStates, - }); - setIsLoading(false); - API.refreshPlaylist(playlist.id); - onClose(); + try { + // Update group settings via new API endpoint + await API.updateM3UGroupSettings(playlist.id, groupStates); + + // Refresh the playlist + API.refreshPlaylist(playlist.id); + onClose(); + } catch (error) { + console.error('Error updating group settings:', error); + } finally { + setIsLoading(false); + } }; const selectAll = () => { @@ -94,14 +123,21 @@ const M3UGroupFilter = ({ playlist = null, isOpen, onClose }) => { + } color="blue" variant="light"> + + Auto Channel Sync: When enabled, channels will be automatically created for all streams in the group during M3U updates, + and removed when streams are no longer present. Set a starting channel number for each group to organize your channels. + + + setGroupFilter(event.currentTarget.value)} style={{ flex: 1 }} @@ -113,41 +149,77 @@ const M3UGroupFilter = ({ playlist = null, isOpen, onClose }) => { Deselect Visible - + + + + {groupStates .filter((group) => group.name.toLowerCase().includes(groupFilter.toLowerCase()) ) - .sort((a, b) => a.name > b.name) + .sort((a, b) => a.name.localeCompare(b.name)) .map((group) => ( - + + {/* Group Enable/Disable Button */} + + + {/* Auto Sync Checkbox */} + toggleAutoSync(group.channel_group)} + size="sm" + /> + + {/* Channel Start Number Input */} + updateChannelStart(group.channel_group, value)} + disabled={!group.enabled || !group.auto_channel_sync} + min={1} + step={1} + size="sm" + style={{ width: '120px' }} + precision={1} + /> + ))} - + + - - - {groupStates - .filter((group) => - group.name.toLowerCase().includes(groupFilter.toLowerCase()) - ) - .sort((a, b) => a.name.localeCompare(b.name)) - .map((group) => ( - - {/* Group Enable/Disable Button */} - + + + {groupStates + .filter((group) => + group.name.toLowerCase().includes(groupFilter.toLowerCase()) + ) + .sort((a, b) => a.name.localeCompare(b.name)) + .map((group) => ( + + {/* Group Enable/Disable Button */} + - {/* Auto Sync Checkbox */} - toggleAutoSync(group.channel_group)} - size="sm" - /> + {/* Auto Sync Controls */} + + toggleAutoSync(group.channel_group)} + size="xs" + /> - {/* Channel Start Number Input */} - updateChannelStart(group.channel_group, value)} - disabled={!group.enabled || !group.auto_channel_sync} - min={1} - step={1} - size="sm" - style={{ width: '120px' }} - precision={1} - /> - - ))} - + {group.auto_channel_sync && group.enabled && ( + updateChannelStart(group.channel_group, value)} + min={1} + step={1} + size="xs" + precision={1} + /> + )} + +
    + ))} + + - + + + + +
    + ); +}; + +export default LogoForm; diff --git a/frontend/src/pages/Logos.jsx b/frontend/src/pages/Logos.jsx new file mode 100644 index 00000000..7ca879f6 --- /dev/null +++ b/frontend/src/pages/Logos.jsx @@ -0,0 +1,223 @@ +import React, { useState, useEffect } from 'react'; +import { + Container, + Title, + Button, + Table, + Group, + ActionIcon, + Text, + Image, + Box, + Center, + Stack, + Badge, +} from '@mantine/core'; +import { SquarePen, Trash2, Plus, ExternalLink } from 'lucide-react'; +import { notifications } from '@mantine/notifications'; +import useChannelsStore from '../store/channels'; +import API from '../api'; +import LogoForm from '../components/forms/Logo'; +import ConfirmationDialog from '../components/ConfirmationDialog'; + +const LogosPage = () => { + const { logos, fetchLogos } = useChannelsStore(); + const [logoFormOpen, setLogoFormOpen] = useState(false); + const [editingLogo, setEditingLogo] = useState(null); + const [deleteConfirmOpen, setDeleteConfirmOpen] = useState(false); + const [logoToDelete, setLogoToDelete] = useState(null); + const [loading, setLoading] = useState(true); + + useEffect(() => { + loadLogos(); + }, []); + + const loadLogos = async () => { + setLoading(true); + try { + await fetchLogos(); + } catch (error) { + notifications.show({ + title: 'Error', + message: 'Failed to load logos', + color: 'red', + }); + } finally { + setLoading(false); + } + }; + + const handleCreateLogo = () => { + setEditingLogo(null); + setLogoFormOpen(true); + }; + + const handleEditLogo = (logo) => { + setEditingLogo(logo); + setLogoFormOpen(true); + }; + + const handleDeleteLogo = (logo) => { + setLogoToDelete(logo); + setDeleteConfirmOpen(true); + }; + + const confirmDeleteLogo = async () => { + if (!logoToDelete) return; + + try { + await API.deleteLogo(logoToDelete.id); + await fetchLogos(); + notifications.show({ + title: 'Success', + message: 'Logo deleted successfully', + color: 'green', + }); + } catch (error) { + notifications.show({ + title: 'Error', + message: 'Failed to delete logo', + color: 'red', + }); + } finally { + setDeleteConfirmOpen(false); + setLogoToDelete(null); + } + }; + + const handleFormClose = () => { + setLogoFormOpen(false); + setEditingLogo(null); + loadLogos(); // Refresh the logos list + }; + + const logosArray = Object.values(logos || {}); + + const rows = logosArray.map((logo) => ( + + +
    + {logo.name} +
    +
    + + {logo.name} + + + + + {logo.url} + + {logo.url.startsWith('http') && ( + window.open(logo.url, '_blank')} + > + + + )} + + + + + handleEditLogo(logo)} + color="blue" + > + + + handleDeleteLogo(logo)} + color="red" + > + + + + +
    + )); + + return ( + <> + + + Logos + + + + {loading ? ( +
    + Loading logos... +
    + ) : logosArray.length === 0 ? ( +
    + + No logos found + Click "Add Logo" to create your first logo + +
    + ) : ( + + + Total: {logosArray.length} logo{logosArray.length !== 1 ? 's' : ''} + + + + + + Preview + Name + URL + Actions + + + {rows} +
    +
    + )} +
    + + + + setDeleteConfirmOpen(false)} + onConfirm={confirmDeleteLogo} + title="Delete Logo" + message={ + logoToDelete ? ( +
    + Are you sure you want to delete the logo "{logoToDelete.name}"? +
    + + This action cannot be undone. + +
    + ) : ( + 'Are you sure you want to delete this logo?' + ) + } + confirmLabel="Delete" + cancelLabel="Cancel" + /> + + ); +}; + +export default LogosPage; diff --git a/frontend/src/store/channels.jsx b/frontend/src/store/channels.jsx index 40791cf4..a4c61149 100644 --- a/frontend/src/store/channels.jsx +++ b/frontend/src/store/channels.jsx @@ -21,7 +21,7 @@ const useChannelsStore = create((set, get) => ({ forceUpdate: 0, triggerUpdate: () => { - set({ forecUpdate: new Date() }); + set({ forceUpdate: new Date() }); }, fetchChannels: async () => { @@ -255,6 +255,24 @@ const useChannelsStore = create((set, get) => ({ }, })), + updateLogo: (logo) => + set((state) => ({ + logos: { + ...state.logos, + [logo.id]: { + ...logo, + url: logo.url.replace(/^\/data/, ''), + }, + }, + })), + + removeLogo: (logoId) => + set((state) => { + const newLogos = { ...state.logos }; + delete newLogos[logoId]; + return { logos: newLogos }; + }), + addProfile: (profile) => set((state) => ({ profiles: { From cea078f6ef5b20cbbb8c0fd6991964ca76527bba Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 15 Jul 2025 18:37:22 -0500 Subject: [PATCH 0615/1435] Use default user-agent and adjust timeouts. --- apps/channels/api_views.py | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 96b7362f..310fccbb 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -17,6 +17,8 @@ from apps.accounts.permissions import ( permission_classes_by_method, ) +from core.models import UserAgent, CoreSettings + from .models import ( Stream, Channel, @@ -1053,14 +1055,14 @@ class LogoViewSet(viewsets.ModelViewSet): def destroy(self, request, *args, **kwargs): """Delete a logo""" logo = self.get_object() - + # Check if logo is being used by any channels if logo.channels.exists(): return Response( {"error": f"Cannot delete logo as it is used by {logo.channels.count()} channel(s)"}, status=status.HTTP_400_BAD_REQUEST ) - + return super().destroy(request, *args, **kwargs) @action(detail=False, methods=["post"]) @@ -1117,12 +1119,21 @@ class LogoViewSet(viewsets.ModelViewSet): else: # Remote image try: + # Get the default user agent + try: + default_user_agent_id = CoreSettings.get_default_user_agent_id() + user_agent_obj = UserAgent.objects.get(id=int(default_user_agent_id)) + user_agent = user_agent_obj.user_agent + except (CoreSettings.DoesNotExist, UserAgent.DoesNotExist, ValueError): + # Fallback to hardcoded if default not found + user_agent = 'Dispatcharr/1.0' + # Add proper timeouts to prevent hanging remote_response = requests.get( - logo_url, - stream=True, - timeout=(10, 30), # (connect_timeout, read_timeout) - headers={'User-Agent': 'Dispatcharr/1.0'} + logo_url, + stream=True, + timeout=(3, 5), # (connect_timeout, read_timeout) + headers={'User-Agent': user_agent} ) if remote_response.status_code == 200: # Try to get content type from response headers first From 6afd5a38c9429459c12c757eef46e3f0b6c96527 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 15 Jul 2025 18:44:53 -0500 Subject: [PATCH 0616/1435] Add timeouts to logo fetching to avoid hanging UI if a logo is unreachable. Also add default user-agent to request to prevent servers from denying request. Fixes #217 and Fixes #101 --- apps/channels/api_views.py | 55 +++++++++++++++++++++++++++++++++++--- 1 file changed, 52 insertions(+), 3 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index f0f59f29..310fccbb 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -17,6 +17,8 @@ from apps.accounts.permissions import ( permission_classes_by_method, ) +from core.models import UserAgent, CoreSettings + from .models import ( Stream, Channel, @@ -1038,6 +1040,31 @@ class LogoViewSet(viewsets.ModelViewSet): except KeyError: return [Authenticated()] + def create(self, request, *args, **kwargs): + """Create a new logo entry""" + serializer = self.get_serializer(data=request.data) + if serializer.is_valid(): + logo = serializer.save() + return Response(self.get_serializer(logo).data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def update(self, request, *args, **kwargs): + """Update an existing logo""" + return super().update(request, *args, **kwargs) + + def destroy(self, request, *args, **kwargs): + """Delete a logo""" + logo = self.get_object() + + # Check if logo is being used by any channels + if logo.channels.exists(): + return Response( + {"error": f"Cannot delete logo as it is used by {logo.channels.count()} channel(s)"}, + status=status.HTTP_400_BAD_REQUEST + ) + + return super().destroy(request, *args, **kwargs) + @action(detail=False, methods=["post"]) def upload(self, request): if "file" not in request.FILES: @@ -1062,7 +1089,7 @@ class LogoViewSet(viewsets.ModelViewSet): ) return Response( - {"id": logo.id, "name": logo.name, "url": logo.url}, + LogoSerializer(logo, context={'request': request}).data, status=status.HTTP_201_CREATED, ) @@ -1092,7 +1119,22 @@ class LogoViewSet(viewsets.ModelViewSet): else: # Remote image try: - remote_response = requests.get(logo_url, stream=True) + # Get the default user agent + try: + default_user_agent_id = CoreSettings.get_default_user_agent_id() + user_agent_obj = UserAgent.objects.get(id=int(default_user_agent_id)) + user_agent = user_agent_obj.user_agent + except (CoreSettings.DoesNotExist, UserAgent.DoesNotExist, ValueError): + # Fallback to hardcoded if default not found + user_agent = 'Dispatcharr/1.0' + + # Add proper timeouts to prevent hanging + remote_response = requests.get( + logo_url, + stream=True, + timeout=(3, 5), # (connect_timeout, read_timeout) + headers={'User-Agent': user_agent} + ) if remote_response.status_code == 200: # Try to get content type from response headers first content_type = remote_response.headers.get("Content-Type") @@ -1114,7 +1156,14 @@ class LogoViewSet(viewsets.ModelViewSet): ) return response raise Http404("Remote image not found") - except requests.RequestException: + except requests.exceptions.Timeout: + logger.warning(f"Timeout fetching logo from {logo_url}") + raise Http404("Logo request timed out") + except requests.exceptions.ConnectionError: + logger.warning(f"Connection error fetching logo from {logo_url}") + raise Http404("Unable to connect to logo server") + except requests.RequestException as e: + logger.warning(f"Error fetching logo from {logo_url}: {e}") raise Http404("Error fetching remote image") From 2bba31940d1ac4927827f7a97f994c59291f4ceb Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 15 Jul 2025 20:02:21 -0500 Subject: [PATCH 0617/1435] Use our custom table for displaying logos --- frontend/src/components/tables/LogosTable.jsx | 356 ++++++++++++++++++ frontend/src/pages/Logos.jsx | 204 +--------- 2 files changed, 363 insertions(+), 197 deletions(-) create mode 100644 frontend/src/components/tables/LogosTable.jsx diff --git a/frontend/src/components/tables/LogosTable.jsx b/frontend/src/components/tables/LogosTable.jsx new file mode 100644 index 00000000..df6605d1 --- /dev/null +++ b/frontend/src/components/tables/LogosTable.jsx @@ -0,0 +1,356 @@ +import React, { useMemo, useCallback, useState } from 'react'; +import API from '../../api'; +import LogoForm from '../forms/Logo'; +import useChannelsStore from '../../store/channels'; +import useLocalStorage from '../../hooks/useLocalStorage'; +import { + SquarePlus, + SquareMinus, + SquarePen, + ExternalLink, +} from 'lucide-react'; +import { + ActionIcon, + Box, + Text, + Paper, + Button, + Flex, + Group, + useMantineTheme, + LoadingOverlay, + Stack, + Image, + Center, +} from '@mantine/core'; +import { CustomTable, useTable } from './CustomTable'; +import ConfirmationDialog from '../ConfirmationDialog'; +import { notifications } from '@mantine/notifications'; + +const LogoRowActions = ({ theme, row, editLogo, deleteLogo }) => { + const [tableSize, _] = useLocalStorage('table-size', 'default'); + + const onEdit = useCallback(() => { + editLogo(row.original); + }, [row.original, editLogo]); + + const onDelete = useCallback(() => { + deleteLogo(row.original.id); + }, [row.original.id, deleteLogo]); + + const iconSize = + tableSize == 'default' ? 'sm' : tableSize == 'compact' ? 'xs' : 'md'; + + return ( + + + + + + + + + + + + ); +}; + +const LogosTable = () => { + const theme = useMantineTheme(); + + /** + * STORES + */ + const { logos, fetchLogos } = useChannelsStore(); + + /** + * useState + */ + const [selectedLogo, setSelectedLogo] = useState(null); + const [logoModalOpen, setLogoModalOpen] = useState(false); + const [confirmDeleteOpen, setConfirmDeleteOpen] = useState(false); + const [deleteTarget, setDeleteTarget] = useState(null); + const [logoToDelete, setLogoToDelete] = useState(null); + const [isLoading, setIsLoading] = useState(false); + + /** + * Functions + */ + const executeDeleteLogo = useCallback(async (id) => { + setIsLoading(true); + try { + await API.deleteLogo(id); + await fetchLogos(); + notifications.show({ + title: 'Success', + message: 'Logo deleted successfully', + color: 'green', + }); + } catch (error) { + notifications.show({ + title: 'Error', + message: 'Failed to delete logo', + color: 'red', + }); + } finally { + setIsLoading(false); + setConfirmDeleteOpen(false); + } + }, [fetchLogos]); + + const editLogo = useCallback(async (logo = null) => { + setSelectedLogo(logo); + setLogoModalOpen(true); + }, []); + + const deleteLogo = useCallback(async (id) => { + const logosArray = Object.values(logos || {}); + const logo = logosArray.find((l) => l.id === id); + setLogoToDelete(logo); + setDeleteTarget(id); + setConfirmDeleteOpen(true); + }, [logos]); + + /** + * useMemo + */ + const columns = useMemo( + () => [ + { + header: 'Preview', + accessorKey: 'cache_url', + size: 80, + enableSorting: false, + cell: ({ getValue, row }) => ( +
    + {row.original.name} +
    + ), + }, + { + header: 'Name', + accessorKey: 'name', + size: 200, + cell: ({ getValue }) => ( + + {getValue()} + + ), + }, + { + header: 'URL', + accessorKey: 'url', + cell: ({ getValue }) => ( + + + + {getValue()} + + + {getValue()?.startsWith('http') && ( + window.open(getValue(), '_blank')} + > + + + )} + + ), + }, + { + id: 'actions', + size: 80, + header: 'Actions', + enableSorting: false, + cell: ({ row }) => ( + + ), + }, + ], + [theme, editLogo, deleteLogo] + ); + + const closeLogoForm = () => { + setSelectedLogo(null); + setLogoModalOpen(false); + fetchLogos(); // Refresh the logos list + }; + + const data = useMemo(() => { + const logosArray = Object.values(logos || {}); + return logosArray.sort((a, b) => a.id - b.id); + }, [logos]); + + const renderHeaderCell = (header) => { + return ( + + {header.column.columnDef.header} + + ); + }; + + const table = useTable({ + columns, + data, + allRowIds: data.map((logo) => logo.id), + enablePagination: false, + enableRowSelection: false, + enableRowVirtualization: false, + renderTopToolbar: false, + manualSorting: false, + manualFiltering: false, + manualPagination: false, + headerCellRenderFns: { + actions: renderHeaderCell, + cache_url: renderHeaderCell, + name: renderHeaderCell, + url: renderHeaderCell, + }, + }); + + return ( + <> + + + + + Logos + + + ({data.length} logo{data.length !== 1 ? 's' : ''}) + + + + + {/* Top toolbar */} + + + + + {/* Table container */} + +
    + + +
    +
    +
    +
    +
    + + + + setConfirmDeleteOpen(false)} + onConfirm={() => executeDeleteLogo(deleteTarget)} + title="Delete Logo" + message={ + logoToDelete ? ( +
    + Are you sure you want to delete the logo "{logoToDelete.name}"? +
    + + This action cannot be undone. + +
    + ) : ( + 'Are you sure you want to delete this logo?' + ) + } + confirmLabel="Delete" + cancelLabel="Cancel" + size="md" + /> + + ); +}; + +export default LogosTable; diff --git a/frontend/src/pages/Logos.jsx b/frontend/src/pages/Logos.jsx index 7ca879f6..ee26c51e 100644 --- a/frontend/src/pages/Logos.jsx +++ b/frontend/src/pages/Logos.jsx @@ -1,39 +1,17 @@ -import React, { useState, useEffect } from 'react'; -import { - Container, - Title, - Button, - Table, - Group, - ActionIcon, - Text, - Image, - Box, - Center, - Stack, - Badge, -} from '@mantine/core'; -import { SquarePen, Trash2, Plus, ExternalLink } from 'lucide-react'; +import React, { useEffect } from 'react'; +import { Box } from '@mantine/core'; import { notifications } from '@mantine/notifications'; import useChannelsStore from '../store/channels'; -import API from '../api'; -import LogoForm from '../components/forms/Logo'; -import ConfirmationDialog from '../components/ConfirmationDialog'; +import LogosTable from '../components/tables/LogosTable'; const LogosPage = () => { - const { logos, fetchLogos } = useChannelsStore(); - const [logoFormOpen, setLogoFormOpen] = useState(false); - const [editingLogo, setEditingLogo] = useState(null); - const [deleteConfirmOpen, setDeleteConfirmOpen] = useState(false); - const [logoToDelete, setLogoToDelete] = useState(null); - const [loading, setLoading] = useState(true); + const { fetchLogos } = useChannelsStore(); useEffect(() => { loadLogos(); }, []); const loadLogos = async () => { - setLoading(true); try { await fetchLogos(); } catch (error) { @@ -42,181 +20,13 @@ const LogosPage = () => { message: 'Failed to load logos', color: 'red', }); - } finally { - setLoading(false); } }; - const handleCreateLogo = () => { - setEditingLogo(null); - setLogoFormOpen(true); - }; - - const handleEditLogo = (logo) => { - setEditingLogo(logo); - setLogoFormOpen(true); - }; - - const handleDeleteLogo = (logo) => { - setLogoToDelete(logo); - setDeleteConfirmOpen(true); - }; - - const confirmDeleteLogo = async () => { - if (!logoToDelete) return; - - try { - await API.deleteLogo(logoToDelete.id); - await fetchLogos(); - notifications.show({ - title: 'Success', - message: 'Logo deleted successfully', - color: 'green', - }); - } catch (error) { - notifications.show({ - title: 'Error', - message: 'Failed to delete logo', - color: 'red', - }); - } finally { - setDeleteConfirmOpen(false); - setLogoToDelete(null); - } - }; - - const handleFormClose = () => { - setLogoFormOpen(false); - setEditingLogo(null); - loadLogos(); // Refresh the logos list - }; - - const logosArray = Object.values(logos || {}); - - const rows = logosArray.map((logo) => ( - - -
    - {logo.name} -
    -
    - - {logo.name} - - - - - {logo.url} - - {logo.url.startsWith('http') && ( - window.open(logo.url, '_blank')} - > - - - )} - - - - - handleEditLogo(logo)} - color="blue" - > - - - handleDeleteLogo(logo)} - color="red" - > - - - - -
    - )); - return ( - <> - - - Logos - - - - {loading ? ( -
    - Loading logos... -
    - ) : logosArray.length === 0 ? ( -
    - - No logos found - Click "Add Logo" to create your first logo - -
    - ) : ( - - - Total: {logosArray.length} logo{logosArray.length !== 1 ? 's' : ''} - - - - - - Preview - Name - URL - Actions - - - {rows} -
    -
    - )} -
    - - - - setDeleteConfirmOpen(false)} - onConfirm={confirmDeleteLogo} - title="Delete Logo" - message={ - logoToDelete ? ( -
    - Are you sure you want to delete the logo "{logoToDelete.name}"? -
    - - This action cannot be undone. - -
    - ) : ( - 'Are you sure you want to delete this logo?' - ) - } - confirmLabel="Delete" - cancelLabel="Cancel" - /> - + + + ); }; From 500df533bbe3ca68ab824ec5fc8f477a25a93086 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 15 Jul 2025 20:12:25 -0500 Subject: [PATCH 0618/1435] Center logos in the column. --- frontend/src/components/tables/LogosTable.jsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/components/tables/LogosTable.jsx b/frontend/src/components/tables/LogosTable.jsx index df6605d1..257bb17f 100644 --- a/frontend/src/components/tables/LogosTable.jsx +++ b/frontend/src/components/tables/LogosTable.jsx @@ -133,7 +133,7 @@ const LogosTable = () => { size: 80, enableSorting: false, cell: ({ getValue, row }) => ( -
    +
    {row.original.name} Date: Tue, 15 Jul 2025 20:14:34 -0500 Subject: [PATCH 0619/1435] Add padding to logos. --- frontend/src/components/tables/LogosTable.jsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/components/tables/LogosTable.jsx b/frontend/src/components/tables/LogosTable.jsx index 257bb17f..9138aeb7 100644 --- a/frontend/src/components/tables/LogosTable.jsx +++ b/frontend/src/components/tables/LogosTable.jsx @@ -133,7 +133,7 @@ const LogosTable = () => { size: 80, enableSorting: false, cell: ({ getValue, row }) => ( -
    +
    {row.original.name} Date: Tue, 15 Jul 2025 20:26:02 -0500 Subject: [PATCH 0620/1435] Enhance Logo management with filtering and usage details in API and UI --- apps/channels/api_views.py | 18 +++ apps/channels/serializers.py | 21 +++- frontend/src/api.js | 15 ++- frontend/src/components/tables/LogosTable.jsx | 108 +++++++++++++++++- 4 files changed, 155 insertions(+), 7 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 310fccbb..97d0b074 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -1040,6 +1040,24 @@ class LogoViewSet(viewsets.ModelViewSet): except KeyError: return [Authenticated()] + def get_queryset(self): + """Optimize queryset with prefetch and add filtering""" + queryset = Logo.objects.prefetch_related('channels').order_by('name') + + # Filter by usage + used_filter = self.request.query_params.get('used', None) + if used_filter == 'true': + queryset = queryset.filter(channels__isnull=False).distinct() + elif used_filter == 'false': + queryset = queryset.filter(channels__isnull=True) + + # Filter by name + name_filter = self.request.query_params.get('name', None) + if name_filter: + queryset = queryset.filter(name__icontains=name_filter) + + return queryset + def create(self, request, *args, **kwargs): """Create a new logo entry""" serializer = self.get_serializer(data=request.data) diff --git a/apps/channels/serializers.py b/apps/channels/serializers.py index 278399dd..3346495e 100644 --- a/apps/channels/serializers.py +++ b/apps/channels/serializers.py @@ -20,10 +20,13 @@ from django.utils import timezone class LogoSerializer(serializers.ModelSerializer): cache_url = serializers.SerializerMethodField() + channel_count = serializers.SerializerMethodField() + is_used = serializers.SerializerMethodField() + channel_names = serializers.SerializerMethodField() class Meta: model = Logo - fields = ["id", "name", "url", "cache_url"] + fields = ["id", "name", "url", "cache_url", "channel_count", "is_used", "channel_names"] def get_cache_url(self, obj): # return f"/api/channels/logos/{obj.id}/cache/" @@ -34,6 +37,22 @@ class LogoSerializer(serializers.ModelSerializer): ) return reverse("api:channels:logo-cache", args=[obj.id]) + def get_channel_count(self, obj): + """Get the number of channels using this logo""" + return obj.channels.count() + + def get_is_used(self, obj): + """Check if this logo is used by any channels""" + return obj.channels.exists() + + def get_channel_names(self, obj): + """Get the names of channels using this logo (limited to first 5)""" + channels = obj.channels.all()[:5] + names = [channel.name for channel in channels] + if obj.channels.count() > 5: + names.append(f"...and {obj.channels.count() - 5} more") + return names + # # Stream diff --git a/frontend/src/api.js b/frontend/src/api.js index cbd8950a..3263eaf5 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -1225,9 +1225,10 @@ export default class API { } } - static async getLogos() { + static async getLogos(params = {}) { try { - const response = await request(`${host}/api/channels/logos/`); + const queryParams = new URLSearchParams(params); + const response = await request(`${host}/api/channels/logos/?${queryParams.toString()}`); return response; } catch (e) { @@ -1235,6 +1236,16 @@ export default class API { } } + static async fetchLogos() { + try { + const response = await this.getLogos(); + useChannelsStore.getState().setLogos(response); + return response; + } catch (e) { + errorNotification('Failed to fetch logos', e); + } + } + static async uploadLogo(file) { try { const formData = new FormData(); diff --git a/frontend/src/components/tables/LogosTable.jsx b/frontend/src/components/tables/LogosTable.jsx index 9138aeb7..872b9dca 100644 --- a/frontend/src/components/tables/LogosTable.jsx +++ b/frontend/src/components/tables/LogosTable.jsx @@ -8,6 +8,7 @@ import { SquareMinus, SquarePen, ExternalLink, + Filter, } from 'lucide-react'; import { ActionIcon, @@ -22,6 +23,11 @@ import { Stack, Image, Center, + Badge, + Tooltip, + Select, + TextInput, + Menu, } from '@mantine/core'; import { CustomTable, useTable } from './CustomTable'; import ConfirmationDialog from '../ConfirmationDialog'; @@ -83,6 +89,10 @@ const LogosTable = () => { const [deleteTarget, setDeleteTarget] = useState(null); const [logoToDelete, setLogoToDelete] = useState(null); const [isLoading, setIsLoading] = useState(false); + const [filters, setFilters] = useState({ + name: '', + used: 'all' + }); /** * Functions @@ -155,6 +165,42 @@ const LogosTable = () => { ), }, + { + header: 'Usage', + accessorKey: 'channel_count', + size: 120, + cell: ({ getValue, row }) => { + const count = getValue(); + const channelNames = row.original.channel_names || []; + + if (count === 0) { + return ( + + Unused + + ); + } + + return ( + + Used by {count} channel{count !== 1 ? 's' : ''}: + {channelNames.map((name, index) => ( + • {name} + ))} + + } + multiline + width={220} + > + + {count} channel{count !== 1 ? 's' : ''} + + + ); + }, + }, { header: 'URL', accessorKey: 'url', @@ -211,8 +257,24 @@ const LogosTable = () => { const data = useMemo(() => { const logosArray = Object.values(logos || {}); - return logosArray.sort((a, b) => a.id - b.id); - }, [logos]); + + // Apply filters + let filteredLogos = logosArray; + + if (filters.name) { + filteredLogos = filteredLogos.filter(logo => + logo.name.toLowerCase().includes(filters.name.toLowerCase()) + ); + } + + if (filters.used === 'used') { + filteredLogos = filteredLogos.filter(logo => logo.is_used); + } else if (filters.used === 'unused') { + filteredLogos = filteredLogos.filter(logo => !logo.is_used); + } + + return filteredLogos.sort((a, b) => a.id - b.id); + }, [logos, filters]); const renderHeaderCell = (header) => { return ( @@ -238,6 +300,7 @@ const LogosTable = () => { cache_url: renderHeaderCell, name: renderHeaderCell, url: renderHeaderCell, + channel_count: renderHeaderCell, }, }); @@ -282,11 +345,44 @@ const LogosTable = () => { + + + setFilters(prev => ({ + ...prev, + name: event.currentTarget.value + })) + } + size="xs" + style={{ width: 200 }} + /> + { + const newValue = value ? parseInt(value) : null; + setGroupStates( + groupStates.map((state) => ({ + ...state, + custom_properties: { + ...state.custom_properties, + group_override: newValue, + }, + })) + ); + }} + data={Object.values(channelGroups).map((g) => ({ + value: g.id.toString(), + label: g.name, + }))} + clearable + searchable + size="xs" + /> )} diff --git a/frontend/src/components/tables/ChannelsTable.jsx b/frontend/src/components/tables/ChannelsTable.jsx index 077602ad..3d34ca55 100644 --- a/frontend/src/components/tables/ChannelsTable.jsx +++ b/frontend/src/components/tables/ChannelsTable.jsx @@ -288,7 +288,8 @@ const ChannelsTable = ({ }) => { const [isLoading, setIsLoading] = useState(true); const [hdhrUrl, setHDHRUrl] = useState(hdhrUrlBase); - const [epgUrl, setEPGUrl] = useState(epgUrlBase); const [m3uUrl, setM3UUrl] = useState(m3uUrlBase); + const [epgUrl, setEPGUrl] = useState(epgUrlBase); + const [m3uUrl, setM3UUrl] = useState(m3uUrlBase); const [confirmDeleteOpen, setConfirmDeleteOpen] = useState(false); const [deleteTarget, setDeleteTarget] = useState(null); @@ -308,7 +309,7 @@ const ChannelsTable = ({ }) => { }); /** - * Dereived variables + * Derived variables */ const activeGroupIds = new Set( Object.values(channels).map((channel) => channel.channel_group_id) From b406a3b504fda3aa588e605ff05a464568e1ccfe Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 17 Jul 2025 19:02:03 -0500 Subject: [PATCH 0628/1435] Move force dummy epg to top. --- .../src/components/forms/M3UGroupFilter.jsx | 117 ++++++++++++------ 1 file changed, 76 insertions(+), 41 deletions(-) diff --git a/frontend/src/components/forms/M3UGroupFilter.jsx b/frontend/src/components/forms/M3UGroupFilter.jsx index f94f35ef..315a6424 100644 --- a/frontend/src/components/forms/M3UGroupFilter.jsx +++ b/frontend/src/components/forms/M3UGroupFilter.jsx @@ -254,13 +254,23 @@ const M3UGroupFilter = ({ playlist = null, isOpen, onClose }) => { {/* Auto Sync Controls */} - toggleAutoSync(group.channel_group)} - size="xs" - /> + + toggleAutoSync(group.channel_group)} + size="xs" + /> + {group.auto_channel_sync && group.enabled && ( + toggleForceDummyEPG(group.channel_group)} + size="xs" + /> + )} + {group.auto_channel_sync && group.enabled && ( <> @@ -274,39 +284,64 @@ const M3UGroupFilter = ({ playlist = null, isOpen, onClose }) => { precision={1} /> - {/* Force Dummy EPG Checkbox */} - toggleForceDummyEPG(group.channel_group)} - size="xs" - /> - - {/* Override Channel Group Select */} - { + const newValue = value ? parseInt(value) : null; + setGroupStates( + groupStates.map((state) => { + if (state.channel_group == group.channel_group) { + return { + ...state, + custom_properties: { + ...state.custom_properties, + group_override: newValue, + }, + }; + } + return state; + }) + ); + }} + data={Object.values(channelGroups).map((g) => ({ + value: g.id.toString(), + label: g.name, + }))} + disabled={!(group.custom_properties && Object.prototype.hasOwnProperty.call(group.custom_properties, 'group_override'))} + clearable + searchable + size="xs" + style={{ flex: 1 }} + /> + )} @@ -334,4 +369,4 @@ const M3UGroupFilter = ({ playlist = null, isOpen, onClose }) => { ); }; -export default M3UGroupFilter; +export default M3UGroupFilter; \ No newline at end of file From f40e9fb9be1458e56881828edce746a6e200a38f Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 17 Jul 2025 19:24:27 -0500 Subject: [PATCH 0629/1435] Update playlist store when auto sync settings change. --- frontend/src/api.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/frontend/src/api.js b/frontend/src/api.js index e34dabe2..5812a4b9 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -739,7 +739,9 @@ export default class API { method: 'PATCH', body: { group_settings: groupSettings }, }); - + // Fetch the updated playlist and update the store + const updatedPlaylist = await API.getPlaylist(playlistId); + usePlaylistsStore.getState().updatePlaylist(updatedPlaylist); return response; } catch (e) { errorNotification('Failed to update M3U group settings', e); @@ -781,7 +783,6 @@ export default class API { const response = await request(`${host}/api/m3u/refresh/${id}/`, { method: 'POST', }); - return response; } catch (e) { errorNotification('Failed to refresh M3U account', e); From cebc4c8ca931967f814bd6d1fee3a66f548801b3 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 17 Jul 2025 20:32:24 -0500 Subject: [PATCH 0630/1435] Add pagination. --- frontend/src/components/tables/LogosTable.jsx | 120 ++++++++++++++++-- 1 file changed, 111 insertions(+), 9 deletions(-) diff --git a/frontend/src/components/tables/LogosTable.jsx b/frontend/src/components/tables/LogosTable.jsx index 61a325c2..64822cb5 100644 --- a/frontend/src/components/tables/LogosTable.jsx +++ b/frontend/src/components/tables/LogosTable.jsx @@ -31,6 +31,8 @@ import { TextInput, Menu, Checkbox, + Pagination, + NativeSelect, } from '@mantine/core'; import { CustomTable, useTable } from './CustomTable'; import ConfirmationDialog from '../ConfirmationDialog'; @@ -101,6 +103,12 @@ const LogosTable = () => { }); const [debouncedNameFilter, setDebouncedNameFilter] = useState(''); const [selectedRows, setSelectedRows] = useState(new Set()); + const [pageSize, setPageSize] = useLocalStorage('logos-page-size', 25); + const [pagination, setPagination] = useState({ + pageIndex: 0, + pageSize: pageSize, + }); + const [paginationString, setPaginationString] = useState(''); // Debounce the name filter useEffect(() => { @@ -132,6 +140,13 @@ const LogosTable = () => { return filteredLogos.sort((a, b) => a.id - b.id); }, [logos, debouncedNameFilter, filters.used]); + // Get paginated data + const paginatedData = useMemo(() => { + const startIndex = pagination.pageIndex * pagination.pageSize; + const endIndex = startIndex + pagination.pageSize; + return data.slice(startIndex, endIndex); + }, [data, pagination.pageIndex, pagination.pageSize]); + // Calculate unused logos count const unusedLogosCount = useMemo(() => { const allLogos = Object.values(logos || {}); @@ -270,6 +285,29 @@ const LogosTable = () => { setSelectedRows(new Set()); }, [data.length]); + // Update pagination when pageSize changes + useEffect(() => { + setPagination(prev => ({ + ...prev, + pageSize: pageSize, + })); + }, [pageSize]); + + // Calculate pagination string + useEffect(() => { + const startItem = pagination.pageIndex * pagination.pageSize + 1; + const endItem = Math.min( + (pagination.pageIndex + 1) * pagination.pageSize, + data.length + ); + setPaginationString(`${startItem} to ${endItem} of ${data.length}`); + }, [pagination.pageIndex, pagination.pageSize, data.length]); + + // Calculate page count + const pageCount = useMemo(() => { + return Math.ceil(data.length / pagination.pageSize); + }, [data.length, pagination.pageSize]); + /** * useMemo */ @@ -425,17 +463,38 @@ const LogosTable = () => { setSelectedRows(new Set(newSelection)); }, []); + const onPageSizeChange = (e) => { + const newPageSize = parseInt(e.target.value); + setPageSize(newPageSize); + setPagination(prev => ({ + ...prev, + pageSize: newPageSize, + pageIndex: 0, // Reset to first page + })); + }; + + const onPageIndexChange = (pageIndex) => { + if (!pageIndex || pageIndex > pageCount) { + return; + } + + setPagination(prev => ({ + ...prev, + pageIndex: pageIndex - 1, + })); + }; + const table = useTable({ columns, - data, - allRowIds: data.map((logo) => logo.id), - enablePagination: false, + data: paginatedData, + allRowIds: paginatedData.map((logo) => logo.id), + enablePagination: false, // Disable internal pagination since we're handling it manually enableRowSelection: true, enableRowVirtualization: false, renderTopToolbar: false, manualSorting: false, manualFiltering: false, - manualPagination: false, + manualPagination: true, // Enable manual pagination onRowSelectionChange: onRowSelectionChange, headerCellRenderFns: { actions: renderHeaderCell, @@ -571,14 +630,57 @@ const LogosTable = () => { -
    - - -
    + +
    + + +
    +
    + + {/* Pagination Controls */} + + + Page Size + + + {paginationString} + +
    From 05539794e3578101e84a2f643ebed69e0af7a12b Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 17 Jul 2025 20:45:04 -0500 Subject: [PATCH 0631/1435] Set better sizing. --- frontend/src/components/tables/LogosTable.jsx | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/frontend/src/components/tables/LogosTable.jsx b/frontend/src/components/tables/LogosTable.jsx index 64822cb5..477da0cf 100644 --- a/frontend/src/components/tables/LogosTable.jsx +++ b/frontend/src/components/tables/LogosTable.jsx @@ -512,7 +512,8 @@ const LogosTable = () => { display: 'flex', justifyContent: 'center', padding: '0px', - minHeight: '100vh', + minHeight: 'calc(100vh - 200px)', + minWidth: '900px', }} > @@ -636,10 +637,10 @@ const LogosTable = () => { -
    +
    From bd1831e226b7443cf4ac4c23e8e3e6a3b67c1d6c Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 17 Jul 2025 20:49:05 -0500 Subject: [PATCH 0632/1435] Fix edits not saving --- frontend/src/api.js | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/frontend/src/api.js b/frontend/src/api.js index 63c193ba..967e462b 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -209,10 +209,10 @@ export default class API { API.getAllChannelIds(API.lastQueryParams), ]); - useChannelsTableStore + useChannelsTable .getState() .queryChannels(response, API.lastQueryParams); - useChannelsTableStore.getState().setAllQueryIds(ids); + useChannelsTable.getState().setAllQueryIds(ids); return response; } catch (e) { @@ -1282,9 +1282,19 @@ export default class API { static async updateLogo(id, values) { try { + // Convert values to FormData for the multipart/form-data content type + const formData = new FormData(); + + // Add each field to the form data + Object.keys(values).forEach(key => { + if (values[key] !== null && values[key] !== undefined) { + formData.append(key, values[key]); + } + }); + const response = await request(`${host}/api/channels/logos/${id}/`, { method: 'PUT', - body: values, + body: formData, // Send as FormData instead of JSON }); useChannelsStore.getState().updateLogo(response); From 8e2309ac583c05bb2b479bc97b799c1989826921 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 17 Jul 2025 21:02:50 -0500 Subject: [PATCH 0633/1435] Fixes logo uploads --- apps/channels/api_views.py | 14 +++++++- dispatcharr/utils.py | 8 ++--- frontend/src/api.js | 42 +++++++++++++++++++--- frontend/src/components/forms/Channel.jsx | 27 +++++++++++--- frontend/src/components/forms/Channels.jsx | 27 +++++++++++--- frontend/src/components/forms/Logo.jsx | 23 +++++++++--- 6 files changed, 119 insertions(+), 22 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 0956da11..ee7109b7 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -1172,6 +1172,16 @@ class LogoViewSet(viewsets.ModelViewSet): ) file = request.FILES["file"] + + # Validate file + try: + from dispatcharr.utils import validate_logo_file + validate_logo_file(file) + except Exception as e: + return Response( + {"error": str(e)}, status=status.HTTP_400_BAD_REQUEST + ) + file_name = file.name file_path = os.path.join("/data/logos", file_name) @@ -1187,8 +1197,10 @@ class LogoViewSet(viewsets.ModelViewSet): }, ) + # Use get_serializer to ensure proper context + serializer = self.get_serializer(logo) return Response( - LogoSerializer(logo, context={'request': request}).data, + serializer.data, status=status.HTTP_201_CREATED, ) diff --git a/dispatcharr/utils.py b/dispatcharr/utils.py index 767913c6..5e1ad087 100644 --- a/dispatcharr/utils.py +++ b/dispatcharr/utils.py @@ -21,11 +21,11 @@ def json_success_response(data=None, status=200): def validate_logo_file(file): """Validate uploaded logo file size and MIME type.""" - valid_mime_types = ["image/jpeg", "image/png", "image/gif"] + valid_mime_types = ["image/jpeg", "image/png", "image/gif", "image/webp"] if file.content_type not in valid_mime_types: - raise ValidationError("Unsupported file type. Allowed types: JPEG, PNG, GIF.") - if file.size > 2 * 1024 * 1024: - raise ValidationError("File too large. Max 2MB.") + raise ValidationError("Unsupported file type. Allowed types: JPEG, PNG, GIF, WebP.") + if file.size > 5 * 1024 * 1024: # Increased to 5MB + raise ValidationError("File too large. Max 5MB.") def get_client_ip(request): diff --git a/frontend/src/api.js b/frontend/src/api.js index 967e462b..bcffc920 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -209,10 +209,10 @@ export default class API { API.getAllChannelIds(API.lastQueryParams), ]); - useChannelsTable + useChannelsTableStore .getState() .queryChannels(response, API.lastQueryParams); - useChannelsTable.getState().setAllQueryIds(ids); + useChannelsTableStore.getState().setAllQueryIds(ids); return response; } catch (e) { @@ -1252,16 +1252,48 @@ export default class API { const formData = new FormData(); formData.append('file', file); - const response = await request(`${host}/api/channels/logos/upload/`, { + // Add timeout handling for file uploads + const controller = new AbortController(); + const timeoutId = setTimeout(() => controller.abort(), 30000); // 30 second timeout + + const response = await fetch(`${host}/api/channels/logos/upload/`, { method: 'POST', body: formData, + headers: { + Authorization: `Bearer ${await API.getAuthToken()}`, + }, + signal: controller.signal, }); - useChannelsStore.getState().addLogo(response); + clearTimeout(timeoutId); - return response; + if (!response.ok) { + const error = new Error(`HTTP error! Status: ${response.status}`); + let errorBody = await response.text(); + + try { + errorBody = JSON.parse(errorBody); + } catch (e) { + // If parsing fails, leave errorBody as the raw text + } + + error.status = response.status; + error.response = response; + error.body = errorBody; + throw error; + } + + const result = await response.json(); + useChannelsStore.getState().addLogo(result); + return result; } catch (e) { + if (e.name === 'AbortError') { + const timeoutError = new Error('Upload timed out. Please try again.'); + timeoutError.code = 'NETWORK_ERROR'; + throw timeoutError; + } errorNotification('Failed to upload logo', e); + throw e; } } diff --git a/frontend/src/components/forms/Channel.jsx b/frontend/src/components/forms/Channel.jsx index 64412cb4..c7d8ed6c 100644 --- a/frontend/src/components/forms/Channel.jsx +++ b/frontend/src/components/forms/Channel.jsx @@ -31,6 +31,7 @@ import { Image, UnstyledButton, } from '@mantine/core'; +import { notifications } from '@mantine/notifications'; import { ListOrdered, SquarePlus, SquareX, X } from 'lucide-react'; import useEPGsStore from '../../store/epgs'; import { Dropzone } from '@mantine/dropzone'; @@ -84,10 +85,28 @@ const ChannelForm = ({ channel = null, isOpen, onClose }) => { const handleLogoChange = async (files) => { if (files.length === 1) { - const retval = await API.uploadLogo(files[0]); - await fetchLogos(); - setLogoPreview(retval.cache_url); - formik.setFieldValue('logo_id', retval.id); + const file = files[0]; + + // Validate file size on frontend first + if (file.size > 5 * 1024 * 1024) { + // 5MB + notifications.show({ + title: 'Error', + message: 'File too large. Maximum size is 5MB.', + color: 'red', + }); + return; + } + + try { + const retval = await API.uploadLogo(file); + await fetchLogos(); + setLogoPreview(retval.cache_url); + formik.setFieldValue('logo_id', retval.id); + } catch (error) { + console.error('Logo upload failed:', error); + // Error notification is already handled in API.uploadLogo + } } else { setLogoPreview(null); } diff --git a/frontend/src/components/forms/Channels.jsx b/frontend/src/components/forms/Channels.jsx index dbce5cf3..e67d9419 100644 --- a/frontend/src/components/forms/Channels.jsx +++ b/frontend/src/components/forms/Channels.jsx @@ -34,6 +34,7 @@ import { import { ListOrdered, SquarePlus, SquareX, X } from 'lucide-react'; import useEPGsStore from '../../store/epgs'; import { Dropzone } from '@mantine/dropzone'; +import { notifications } from '@mantine/notifications'; import { FixedSizeList as List } from 'react-window'; const ChannelsForm = ({ channel = null, isOpen, onClose }) => { @@ -81,10 +82,28 @@ const ChannelsForm = ({ channel = null, isOpen, onClose }) => { const handleLogoChange = async (files) => { if (files.length === 1) { - const retval = await API.uploadLogo(files[0]); - await fetchLogos(); - setLogoPreview(retval.cache_url); - formik.setFieldValue('logo_id', retval.id); + const file = files[0]; + + // Validate file size on frontend first + if (file.size > 5 * 1024 * 1024) { + // 5MB + notifications.show({ + title: 'Error', + message: 'File too large. Maximum size is 5MB.', + color: 'red', + }); + return; + } + + try { + const retval = await API.uploadLogo(file); + await fetchLogos(); + setLogoPreview(retval.cache_url); + formik.setFieldValue('logo_id', retval.id); + } catch (error) { + console.error('Logo upload failed:', error); + // Error notification is already handled in API.uploadLogo + } } else { setLogoPreview(null); } diff --git a/frontend/src/components/forms/Logo.jsx b/frontend/src/components/forms/Logo.jsx index c3e48d5d..436dbf8a 100644 --- a/frontend/src/components/forms/Logo.jsx +++ b/frontend/src/components/forms/Logo.jsx @@ -51,12 +51,12 @@ const LogoForm = ({ logo = null, isOpen, onClose }) => { onClose(); } catch (error) { let errorMessage = logo ? 'Failed to update logo' : 'Failed to create logo'; - + // Handle specific timeout errors if (error.code === 'NETWORK_ERROR' || error.message?.includes('timeout')) { errorMessage = 'Request timed out. Please try again.'; } - + notifications.show({ title: 'Error', message: errorMessage, @@ -85,6 +85,17 @@ const LogoForm = ({ logo = null, isOpen, onClose }) => { if (files.length === 0) return; const file = files[0]; + + // Validate file size on frontend first + if (file.size > 5 * 1024 * 1024) { // 5MB + notifications.show({ + title: 'Error', + message: 'File too large. Maximum size is 5MB.', + color: 'red', + }); + return; + } + setUploading(true); try { @@ -102,12 +113,16 @@ const LogoForm = ({ logo = null, isOpen, onClose }) => { }); } catch (error) { let errorMessage = 'Failed to upload logo'; - + // Handle specific timeout errors if (error.code === 'NETWORK_ERROR' || error.message?.includes('timeout')) { errorMessage = 'Upload timed out. Please try again.'; + } else if (error.status === 413) { + errorMessage = 'File too large. Please choose a smaller file.'; + } else if (error.body?.error) { + errorMessage = error.body.error; } - + notifications.show({ title: 'Error', message: errorMessage, From 5d82fd17c2865aef1b36a743a80248bdadf76114 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 17 Jul 2025 21:09:05 -0500 Subject: [PATCH 0634/1435] Treat local files as valid urls --- frontend/src/components/forms/Logo.jsx | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/frontend/src/components/forms/Logo.jsx b/frontend/src/components/forms/Logo.jsx index 436dbf8a..7c685b2f 100644 --- a/frontend/src/components/forms/Logo.jsx +++ b/frontend/src/components/forms/Logo.jsx @@ -29,7 +29,20 @@ const LogoForm = ({ logo = null, isOpen, onClose }) => { }, validationSchema: Yup.object({ name: Yup.string().required('Name is required'), - url: Yup.string().url('Must be a valid URL').required('URL is required'), + url: Yup.string() + .required('URL is required') + .test('valid-url-or-path', 'Must be a valid URL or local file path', (value) => { + if (!value) return false; + // Allow local file paths starting with /logos/ + if (value.startsWith('/logos/')) return true; + // Allow valid URLs + try { + new URL(value); + return true; + } catch { + return false; + } + }), }), onSubmit: async (values, { setSubmitting }) => { try { From 23bd5484ee1a0e3a472ba865a961c661189de5ef Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 17 Jul 2025 21:12:05 -0500 Subject: [PATCH 0635/1435] Enlarge logo on hover. --- frontend/src/components/forms/Logo.jsx | 13 +++++++++++++ frontend/src/components/tables/LogosTable.jsx | 10 ++++++++++ 2 files changed, 23 insertions(+) diff --git a/frontend/src/components/forms/Logo.jsx b/frontend/src/components/forms/Logo.jsx index 7c685b2f..bd711443 100644 --- a/frontend/src/components/forms/Logo.jsx +++ b/frontend/src/components/forms/Logo.jsx @@ -179,6 +179,19 @@ const LogoForm = ({ logo = null, isOpen, onClose }) => { height={75} fit="contain" fallbackSrc="/logo.png" + style={{ + transition: 'transform 0.3s ease', + cursor: 'pointer', + ':hover': { + transform: 'scale(1.5)' + } + }} + onMouseEnter={(e) => { + e.target.style.transform = 'scale(1.5)'; + }} + onMouseLeave={(e) => { + e.target.style.transform = 'scale(1)'; + }} />
    diff --git a/frontend/src/components/tables/LogosTable.jsx b/frontend/src/components/tables/LogosTable.jsx index 477da0cf..41799449 100644 --- a/frontend/src/components/tables/LogosTable.jsx +++ b/frontend/src/components/tables/LogosTable.jsx @@ -347,6 +347,16 @@ const LogosTable = () => { height={30} fit="contain" fallbackSrc="/logo.png" + style={{ + transition: 'transform 0.3s ease', + cursor: 'pointer', + }} + onMouseEnter={(e) => { + e.target.style.transform = 'scale(1.5)'; + }} + onMouseLeave={(e) => { + e.target.style.transform = 'scale(1)'; + }} />
    ), From e7771d5b6764c7c18c15384d51fba3adbec3da23 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 18 Jul 2025 11:36:15 -0500 Subject: [PATCH 0636/1435] Allow deleting logos that are assigned to channels. --- apps/channels/api_views.py | 42 ++++++++++--------- apps/channels/serializers.py | 10 +++++ frontend/src/components/forms/Logo.jsx | 2 + frontend/src/components/tables/LogosTable.jsx | 5 ++- 4 files changed, 38 insertions(+), 21 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index ee7109b7..dbdd4271 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -1053,24 +1053,27 @@ class BulkDeleteLogosAPIView(APIView): def delete(self, request): logo_ids = request.data.get("logo_ids", []) - # Check if any logos are being used by channels - used_logos = Logo.objects.filter( - id__in=logo_ids, - channels__isnull=False - ).distinct() + # Get logos and their usage info before deletion + logos_to_delete = Logo.objects.filter(id__in=logo_ids) + total_channels_affected = 0 + + for logo in logos_to_delete: + if logo.channels.exists(): + channel_count = logo.channels.count() + total_channels_affected += channel_count + # Remove logo from channels + logo.channels.update(logo=None) + logger.info(f"Removed logo {logo.name} from {channel_count} channels before deletion") - if used_logos.exists(): - used_names = list(used_logos.values_list('name', flat=True)) - return Response( - {"error": f"Cannot delete logos that are in use: {', '.join(used_names)}"}, - status=status.HTTP_400_BAD_REQUEST - ) + # Delete logos + deleted_count = logos_to_delete.delete()[0] - # Delete logos that are not in use - deleted_count = Logo.objects.filter(id__in=logo_ids).delete()[0] + message = f"Successfully deleted {deleted_count} logos" + if total_channels_affected > 0: + message += f" and removed them from {total_channels_affected} channels" return Response( - {"message": f"Successfully deleted {deleted_count} logos"}, + {"message": message}, status=status.HTTP_204_NO_CONTENT ) @@ -1152,15 +1155,14 @@ class LogoViewSet(viewsets.ModelViewSet): return super().update(request, *args, **kwargs) def destroy(self, request, *args, **kwargs): - """Delete a logo""" + """Delete a logo and remove it from any channels using it""" logo = self.get_object() - # Check if logo is being used by any channels + # Instead of preventing deletion, remove the logo from channels if logo.channels.exists(): - return Response( - {"error": f"Cannot delete logo as it is used by {logo.channels.count()} channel(s)"}, - status=status.HTTP_400_BAD_REQUEST - ) + channel_count = logo.channels.count() + logo.channels.update(logo=None) + logger.info(f"Removed logo {logo.name} from {channel_count} channels before deletion") return super().destroy(request, *args, **kwargs) diff --git a/apps/channels/serializers.py b/apps/channels/serializers.py index a933c496..82b5f808 100644 --- a/apps/channels/serializers.py +++ b/apps/channels/serializers.py @@ -28,6 +28,16 @@ class LogoSerializer(serializers.ModelSerializer): model = Logo fields = ["id", "name", "url", "cache_url", "channel_count", "is_used", "channel_names"] + def validate_url(self, value): + """Validate that the URL is unique for creation or update""" + if self.instance and self.instance.url == value: + return value + + if Logo.objects.filter(url=value).exists(): + raise serializers.ValidationError("A logo with this URL already exists.") + + return value + def get_cache_url(self, obj): # return f"/api/channels/logos/{obj.id}/cache/" request = self.context.get("request") diff --git a/frontend/src/components/forms/Logo.jsx b/frontend/src/components/forms/Logo.jsx index bd711443..c724c21c 100644 --- a/frontend/src/components/forms/Logo.jsx +++ b/frontend/src/components/forms/Logo.jsx @@ -68,6 +68,8 @@ const LogoForm = ({ logo = null, isOpen, onClose }) => { // Handle specific timeout errors if (error.code === 'NETWORK_ERROR' || error.message?.includes('timeout')) { errorMessage = 'Request timed out. Please try again.'; + } else if (error.response?.data?.error) { + errorMessage = error.response.data.error; } notifications.show({ diff --git a/frontend/src/components/tables/LogosTable.jsx b/frontend/src/components/tables/LogosTable.jsx index 41799449..0ec6488f 100644 --- a/frontend/src/components/tables/LogosTable.jsx +++ b/frontend/src/components/tables/LogosTable.jsx @@ -718,6 +718,9 @@ const LogosTable = () => { isBulkDelete ? (
    Are you sure you want to delete {selectedRows.size} selected logos? + + Any channels using these logos will have their logo removed. + This action cannot be undone. @@ -727,7 +730,7 @@ const LogosTable = () => { Are you sure you want to delete the logo "{logoToDelete.name}"? {logoToDelete.channel_count > 0 && ( - Warning: This logo is currently used by {logoToDelete.channel_count} channel{logoToDelete.channel_count !== 1 ? 's' : ''}. + This logo is currently used by {logoToDelete.channel_count} channel{logoToDelete.channel_count !== 1 ? 's' : ''}. They will have their logo removed. )} From 0fcb8b9f2eeec7a74e03d9afe36c07d898f50a1e Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 18 Jul 2025 13:44:00 -0500 Subject: [PATCH 0637/1435] Don't convert urls in the store. --- frontend/src/store/channels.jsx | 3 --- 1 file changed, 3 deletions(-) diff --git a/frontend/src/store/channels.jsx b/frontend/src/store/channels.jsx index a4c61149..b18b02f6 100644 --- a/frontend/src/store/channels.jsx +++ b/frontend/src/store/channels.jsx @@ -232,7 +232,6 @@ const useChannelsStore = create((set, get) => ({ logos: logos.reduce((acc, logo) => { acc[logo.id] = { ...logo, - url: logo.url.replace(/^\/data/, ''), }; return acc; }, {}), @@ -250,7 +249,6 @@ const useChannelsStore = create((set, get) => ({ ...state.logos, [newLogo.id]: { ...newLogo, - url: newLogo.url.replace(/^\/data/, ''), }, }, })), @@ -261,7 +259,6 @@ const useChannelsStore = create((set, get) => ({ ...state.logos, [logo.id]: { ...logo, - url: logo.url.replace(/^\/data/, ''), }, }, })), From e27f45809bb479967ba92fef108857c55d34c556 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 18 Jul 2025 13:47:50 -0500 Subject: [PATCH 0638/1435] Allow /data/logos as a url. --- frontend/src/components/forms/Logo.jsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/components/forms/Logo.jsx b/frontend/src/components/forms/Logo.jsx index c724c21c..e209659c 100644 --- a/frontend/src/components/forms/Logo.jsx +++ b/frontend/src/components/forms/Logo.jsx @@ -33,8 +33,8 @@ const LogoForm = ({ logo = null, isOpen, onClose }) => { .required('URL is required') .test('valid-url-or-path', 'Must be a valid URL or local file path', (value) => { if (!value) return false; - // Allow local file paths starting with /logos/ - if (value.startsWith('/logos/')) return true; + // Allow local file paths starting with /data/logos/ + if (value.startsWith('/data/logos/')) return true; // Allow valid URLs try { new URL(value); From 1ece74a0b0d2cbdbe849988ac274721fae1f8bda Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 18 Jul 2025 14:07:58 -0500 Subject: [PATCH 0639/1435] Scan logos folder for new logos. --- core/tasks.py | 117 +++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 115 insertions(+), 2 deletions(-) diff --git a/core/tasks.py b/core/tasks.py index e8b36162..41e5d707 100644 --- a/core/tasks.py +++ b/core/tasks.py @@ -21,10 +21,12 @@ logger = logging.getLogger(__name__) EPG_WATCH_DIR = '/data/epgs' M3U_WATCH_DIR = '/data/m3us' +LOGO_WATCH_DIR = '/data/logos' MIN_AGE_SECONDS = 6 STARTUP_SKIP_AGE = 30 REDIS_PREFIX = "processed_file:" REDIS_TTL = 60 * 60 * 24 * 3 # expire keys after 3 days (optional) +SUPPORTED_LOGO_FORMATS = ['.jpg', '.jpeg', '.png', '.gif', '.webp', '.bmp', '.svg'] # Store the last known value to compare with new data last_known_data = {} @@ -56,10 +58,11 @@ def scan_and_process_files(): global _first_scan_completed redis_client = RedisClient.get_client() now = time.time() + # Check if directories exist - dirs_exist = all(os.path.exists(d) for d in [M3U_WATCH_DIR, EPG_WATCH_DIR]) + dirs_exist = all(os.path.exists(d) for d in [M3U_WATCH_DIR, EPG_WATCH_DIR, LOGO_WATCH_DIR]) if not dirs_exist: - throttled_log(logger.warning, f"Watch directories missing: M3U ({os.path.exists(M3U_WATCH_DIR)}), EPG ({os.path.exists(EPG_WATCH_DIR)})", "watch_dirs_missing") + throttled_log(logger.warning, f"Watch directories missing: M3U ({os.path.exists(M3U_WATCH_DIR)}), EPG ({os.path.exists(EPG_WATCH_DIR)}), LOGO ({os.path.exists(LOGO_WATCH_DIR)})", "watch_dirs_missing") # Process M3U files m3u_files = [f for f in os.listdir(M3U_WATCH_DIR) @@ -266,6 +269,116 @@ def scan_and_process_files(): logger.trace(f"EPG processing complete: {epg_processed} processed, {epg_skipped} skipped, {epg_errors} errors") + # Process Logo files + try: + logo_files = os.listdir(LOGO_WATCH_DIR) if os.path.exists(LOGO_WATCH_DIR) else [] + logger.trace(f"Found {len(logo_files)} files in LOGO directory") + except Exception as e: + logger.error(f"Error listing LOGO directory: {e}") + logo_files = [] + + logo_processed = 0 + logo_skipped = 0 + logo_errors = 0 + + for filename in logo_files: + filepath = os.path.join(LOGO_WATCH_DIR, filename) + + if not os.path.isfile(filepath): + if _first_scan_completed: + logger.trace(f"Skipping {filename}: Not a file") + else: + logger.debug(f"Skipping {filename}: Not a file") + logo_skipped += 1 + continue + + # Check if file has supported logo extension + file_ext = os.path.splitext(filename)[1].lower() + if file_ext not in SUPPORTED_LOGO_FORMATS: + if _first_scan_completed: + logger.trace(f"Skipping {filename}: Not a supported logo format") + else: + logger.debug(f"Skipping {filename}: Not a supported logo format") + logo_skipped += 1 + continue + + mtime = os.path.getmtime(filepath) + age = now - mtime + redis_key = REDIS_PREFIX + filepath + stored_mtime = redis_client.get(redis_key) + + # Check if logo already exists in database + if not stored_mtime and age > STARTUP_SKIP_AGE: + from apps.channels.models import Logo + existing_logo = Logo.objects.filter(url=filepath).exists() + if existing_logo: + if _first_scan_completed: + logger.trace(f"Skipping {filename}: Already exists in database") + else: + logger.debug(f"Skipping {filename}: Already exists in database") + redis_client.set(redis_key, mtime, ex=REDIS_TTL) + logo_skipped += 1 + continue + else: + logger.debug(f"Processing {filename} despite age: Not found in database") + + # File too new — probably still being written + if age < MIN_AGE_SECONDS: + if _first_scan_completed: + logger.trace(f"Skipping {filename}: Too new, possibly still being written (age={age}s)") + else: + logger.debug(f"Skipping {filename}: Too new, possibly still being written (age={age}s)") + logo_skipped += 1 + continue + + # Skip if we've already processed this mtime + if stored_mtime and float(stored_mtime) >= mtime: + if _first_scan_completed: + logger.trace(f"Skipping {filename}: Already processed this version") + else: + logger.debug(f"Skipping {filename}: Already processed this version") + logo_skipped += 1 + continue + + try: + from apps.channels.models import Logo + + # Create logo entry with just the filename (without extension) as name + logo_name = os.path.splitext(filename)[0] + + logo, created = Logo.objects.get_or_create( + url=filepath, + defaults={ + "name": logo_name, + } + ) + + redis_client.set(redis_key, mtime, ex=REDIS_TTL) + + if created: + logger.info(f"Created new logo entry: {logo_name}") + else: + logger.debug(f"Logo entry already exists: {logo_name}") + + logo_processed += 1 + + # Send websocket notification + channel_layer = get_channel_layer() + async_to_sync(channel_layer.group_send)( + "updates", + { + "type": "update", + "data": {"success": True, "type": "logo_file", "filename": filename, "created": created} + }, + ) + + except Exception as e: + logger.error(f"Error processing logo file {filename}: {str(e)}", exc_info=True) + logo_errors += 1 + continue + + logger.trace(f"LOGO processing complete: {logo_processed} processed, {logo_skipped} skipped, {logo_errors} errors") + # Mark that the first scan is complete _first_scan_completed = True From 13672919d0f7c10f0abcc3ebebcbc8169e47711f Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 18 Jul 2025 14:26:09 -0500 Subject: [PATCH 0640/1435] Fetch Playlists on successful m3u update. --- frontend/src/WebSocket.jsx | 1 + 1 file changed, 1 insertion(+) diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index 538ffda3..ae0316ad 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -218,6 +218,7 @@ export const WebsocketProvider = ({ children }) => { } updatePlaylist(updateData); + fetchPlaylists(); // Refresh playlists to ensure UI is up-to-date } else { // Log when playlist can't be found for debugging purposes console.warn( From 479826709bdd41c3bf1a2923ba11f3ddf30e6121 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 18 Jul 2025 15:01:26 -0500 Subject: [PATCH 0641/1435] Fetch logos when logos are added by filesystem scan. --- core/tasks.py | 26 +++++++++++++++++--------- frontend/src/WebSocket.jsx | 11 +++++++++++ 2 files changed, 28 insertions(+), 9 deletions(-) diff --git a/core/tasks.py b/core/tasks.py index 41e5d707..3a738611 100644 --- a/core/tasks.py +++ b/core/tasks.py @@ -362,15 +362,7 @@ def scan_and_process_files(): logo_processed += 1 - # Send websocket notification - channel_layer = get_channel_layer() - async_to_sync(channel_layer.group_send)( - "updates", - { - "type": "update", - "data": {"success": True, "type": "logo_file", "filename": filename, "created": created} - }, - ) + # Remove individual websocket notification - will send summary instead except Exception as e: logger.error(f"Error processing logo file {filename}: {str(e)}", exc_info=True) @@ -379,6 +371,22 @@ def scan_and_process_files(): logger.trace(f"LOGO processing complete: {logo_processed} processed, {logo_skipped} skipped, {logo_errors} errors") + # Send summary websocket update for logo processing + if logo_processed > 0 or logo_errors > 0: + send_websocket_update( + "updates", + "update", + { + "success": True, + "type": "logo_processing_summary", + "processed": logo_processed, + "skipped": logo_skipped, + "errors": logo_errors, + "total_files": len(logo_files), + "message": f"Logo processing complete: {logo_processed} processed, {logo_skipped} skipped, {logo_errors} errors" + } + ) + # Mark that the first scan is complete _first_scan_completed = True diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index ae0316ad..156a7e29 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -418,6 +418,16 @@ export const WebsocketProvider = ({ children }) => { } break; + case 'logo_processing_summary': + notifications.show({ + title: 'Logo Processing Summary', + message: `Logo processing complete: ${parsedEvent.data.processed} logos processed, ${parsedEvent.data.duplicates_merged} duplicates merged.`, + color: 'blue', + autoClose: 5000, + }); + fetchLogos(); + break; + default: console.error( `Unknown websocket event type: ${parsedEvent.data?.type}` @@ -488,6 +498,7 @@ export const WebsocketProvider = ({ children }) => { const setProfilePreview = usePlaylistsStore((s) => s.setProfilePreview); const fetchEPGData = useEPGsStore((s) => s.fetchEPGData); const fetchEPGs = useEPGsStore((s) => s.fetchEPGs); + const fetchLogos = useChannelsStore((s) => s.fetchLogos); const ret = useMemo(() => { return [isReady, ws.current?.send.bind(ws.current), val]; From e876af1aa2a79b0b7046a56b31c99ab85010c08b Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 18 Jul 2025 15:04:34 -0500 Subject: [PATCH 0642/1435] Scan sub folders for logos. --- core/tasks.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/core/tasks.py b/core/tasks.py index 3a738611..47bc8cf0 100644 --- a/core/tasks.py +++ b/core/tasks.py @@ -269,10 +269,14 @@ def scan_and_process_files(): logger.trace(f"EPG processing complete: {epg_processed} processed, {epg_skipped} skipped, {epg_errors} errors") - # Process Logo files + # Process Logo files (including subdirectories) try: - logo_files = os.listdir(LOGO_WATCH_DIR) if os.path.exists(LOGO_WATCH_DIR) else [] - logger.trace(f"Found {len(logo_files)} files in LOGO directory") + logo_files = [] + if os.path.exists(LOGO_WATCH_DIR): + for root, dirs, files in os.walk(LOGO_WATCH_DIR): + for filename in files: + logo_files.append(os.path.join(root, filename)) + logger.trace(f"Found {len(logo_files)} files in LOGO directory (including subdirectories)") except Exception as e: logger.error(f"Error listing LOGO directory: {e}") logo_files = [] @@ -281,8 +285,8 @@ def scan_and_process_files(): logo_skipped = 0 logo_errors = 0 - for filename in logo_files: - filepath = os.path.join(LOGO_WATCH_DIR, filename) + for filepath in logo_files: + filename = os.path.basename(filepath) if not os.path.isfile(filepath): if _first_scan_completed: @@ -362,8 +366,6 @@ def scan_and_process_files(): logo_processed += 1 - # Remove individual websocket notification - will send summary instead - except Exception as e: logger.error(f"Error processing logo file {filename}: {str(e)}", exc_info=True) logo_errors += 1 From d926d90dd913d266701193e8a4401c12930c591d Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 18 Jul 2025 15:14:11 -0500 Subject: [PATCH 0643/1435] Fix websocket message. --- frontend/src/WebSocket.jsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/WebSocket.jsx b/frontend/src/WebSocket.jsx index 156a7e29..2e210461 100644 --- a/frontend/src/WebSocket.jsx +++ b/frontend/src/WebSocket.jsx @@ -421,7 +421,7 @@ export const WebsocketProvider = ({ children }) => { case 'logo_processing_summary': notifications.show({ title: 'Logo Processing Summary', - message: `Logo processing complete: ${parsedEvent.data.processed} logos processed, ${parsedEvent.data.duplicates_merged} duplicates merged.`, + message: `${parsedEvent.data.message}`, color: 'blue', autoClose: 5000, }); From bc08cb1270a618deec0bc924c7f00a19013f9404 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 18 Jul 2025 15:23:30 -0500 Subject: [PATCH 0644/1435] Ask to delete local files as well. --- apps/channels/api_views.py | 34 ++++++++++++++++++- frontend/src/api.js | 19 ++++++++--- .../src/components/ConfirmationDialog.jsx | 28 +++++++++++++-- frontend/src/components/tables/LogosTable.jsx | 27 +++++++++++---- 4 files changed, 93 insertions(+), 15 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index dbdd4271..0126aaf9 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -1052,12 +1052,28 @@ class BulkDeleteLogosAPIView(APIView): ) def delete(self, request): logo_ids = request.data.get("logo_ids", []) + delete_files = request.data.get("delete_files", False) # Get logos and their usage info before deletion logos_to_delete = Logo.objects.filter(id__in=logo_ids) total_channels_affected = 0 - + local_files_deleted = 0 + for logo in logos_to_delete: + # Handle file deletion for local files + if delete_files and logo.url and logo.url.startswith('/data/logos'): + try: + if os.path.exists(logo.url): + os.remove(logo.url) + local_files_deleted += 1 + logger.info(f"Deleted local logo file: {logo.url}") + except Exception as e: + logger.error(f"Failed to delete logo file {logo.url}: {str(e)}") + return Response( + {"error": f"Failed to delete logo file {logo.url}: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR + ) + if logo.channels.exists(): channel_count = logo.channels.count() total_channels_affected += channel_count @@ -1071,6 +1087,8 @@ class BulkDeleteLogosAPIView(APIView): message = f"Successfully deleted {deleted_count} logos" if total_channels_affected > 0: message += f" and removed them from {total_channels_affected} channels" + if local_files_deleted > 0: + message += f" and deleted {local_files_deleted} local files" return Response( {"message": message}, @@ -1157,6 +1175,20 @@ class LogoViewSet(viewsets.ModelViewSet): def destroy(self, request, *args, **kwargs): """Delete a logo and remove it from any channels using it""" logo = self.get_object() + delete_file = request.query_params.get('delete_file', 'false').lower() == 'true' + + # Check if it's a local file that should be deleted + if delete_file and logo.url and logo.url.startswith('/data/logos'): + try: + if os.path.exists(logo.url): + os.remove(logo.url) + logger.info(f"Deleted local logo file: {logo.url}") + except Exception as e: + logger.error(f"Failed to delete logo file {logo.url}: {str(e)}") + return Response( + {"error": f"Failed to delete logo file: {str(e)}"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR + ) # Instead of preventing deletion, remove the logo from channels if logo.channels.exists(): diff --git a/frontend/src/api.js b/frontend/src/api.js index bcffc920..b285e2ea 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -1337,9 +1337,15 @@ export default class API { } } - static async deleteLogo(id) { + static async deleteLogo(id, deleteFile = false) { try { - await request(`${host}/api/channels/logos/${id}/`, { + const params = new URLSearchParams(); + if (deleteFile) { + params.append('delete_file', 'true'); + } + + const url = `${host}/api/channels/logos/${id}/?${params.toString()}`; + await request(url, { method: 'DELETE', }); @@ -1351,11 +1357,16 @@ export default class API { } } - static async deleteLogos(ids) { + static async deleteLogos(ids, deleteFiles = false) { try { + const body = { logo_ids: ids }; + if (deleteFiles) { + body.delete_files = true; + } + await request(`${host}/api/channels/logos/bulk-delete/`, { method: 'DELETE', - body: { logo_ids: ids }, + body: body, }); // Remove multiple logos from store diff --git a/frontend/src/components/ConfirmationDialog.jsx b/frontend/src/components/ConfirmationDialog.jsx index 8f96708d..1cfbe84d 100644 --- a/frontend/src/components/ConfirmationDialog.jsx +++ b/frontend/src/components/ConfirmationDialog.jsx @@ -29,12 +29,15 @@ const ConfirmationDialog = ({ onSuppressChange, size = 'md', zIndex = 1000, + showDeleteFileOption = false, + deleteFileLabel = "Also delete files from disk", }) => { const suppressWarning = useWarningsStore((s) => s.suppressWarning); const isWarningSuppressed = useWarningsStore((s) => s.isWarningSuppressed); const [suppressChecked, setSuppressChecked] = useState( isWarningSuppressed(actionKey) ); + const [deleteFiles, setDeleteFiles] = useState(false); const handleToggleSuppress = (e) => { setSuppressChecked(e.currentTarget.checked); @@ -47,13 +50,23 @@ const ConfirmationDialog = ({ if (suppressChecked) { suppressWarning(actionKey); } - onConfirm(); + if (showDeleteFileOption) { + onConfirm(deleteFiles); + } else { + onConfirm(); + } + setDeleteFiles(false); // Reset for next time + }; + + const handleClose = () => { + setDeleteFiles(false); // Reset for next time + onClose(); }; return ( )} + {showDeleteFileOption && ( + setDeleteFiles(event.currentTarget.checked)} + label={deleteFileLabel} + mb="md" + /> + )} + - + + )} + + {/* Advanced Stats (expandable) */} + + + {renderStatsCategory('Video', categorizedStats.video)} + {renderStatsCategory('Audio', categorizedStats.audio)} + {renderStatsCategory('Technical', categorizedStats.technical)} + {renderStatsCategory('Other', categorizedStats.other)} + + {/* Show when stats were last updated */} + {stream.stream_stats_updated_at && ( + + Last updated: {new Date(stream.stream_stats_updated_at).toLocaleString()} + + )} + + ); }, @@ -296,7 +454,7 @@ const ChannelStreams = ({ channel, isExpanded }) => { ), }, ], - [data, playlists, m3uAccountsMap] + [data, playlists, m3uAccountsMap, expandedAdvancedStats] ), data, state: { From 7551869a2eaa8101926af557839947fa68288b6b Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 29 Jul 2025 15:12:14 -0500 Subject: [PATCH 0674/1435] Remove audio bitrate from basic stats. --- frontend/src/components/tables/ChannelTableStreams.jsx | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/frontend/src/components/tables/ChannelTableStreams.jsx b/frontend/src/components/tables/ChannelTableStreams.jsx index 373427bb..2da4002d 100644 --- a/frontend/src/components/tables/ChannelTableStreams.jsx +++ b/frontend/src/components/tables/ChannelTableStreams.jsx @@ -367,7 +367,7 @@ const ChannelStreams = ({ channel, isExpanded }) => { )} {/* Audio Information */} - {(stream.stream_stats.audio_codec || stream.stream_stats.audio_channels || stream.stream_stats.audio_bitrate) && ( + {(stream.stream_stats.audio_codec || stream.stream_stats.audio_channels) && ( <> Audio: {stream.stream_stats.audio_channels && ( @@ -380,11 +380,6 @@ const ChannelStreams = ({ channel, isExpanded }) => { {stream.stream_stats.audio_codec.toUpperCase()} )} - {stream.stream_stats.audio_bitrate && ( - - {stream.stream_stats.audio_bitrate} kbps - - )} )} From e26ecad013c0fba5f76c5df0fbecc721462871de Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 29 Jul 2025 15:17:18 -0500 Subject: [PATCH 0675/1435] Move m3u and url badges to same line as stream name. --- frontend/src/components/tables/ChannelTableStreams.jsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/components/tables/ChannelTableStreams.jsx b/frontend/src/components/tables/ChannelTableStreams.jsx index 2da4002d..d226c52a 100644 --- a/frontend/src/components/tables/ChannelTableStreams.jsx +++ b/frontend/src/components/tables/ChannelTableStreams.jsx @@ -303,8 +303,8 @@ const ChannelStreams = ({ channel, isExpanded }) => { return ( - {stream.name} - + + {stream.name} {accountName} From 613c0d8bb559dd25c37229e68dc15f38ae77a914 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Tue, 29 Jul 2025 15:43:44 -0500 Subject: [PATCH 0676/1435] Add input_bitrate to technical for future use. --- frontend/src/components/tables/ChannelTableStreams.jsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/components/tables/ChannelTableStreams.jsx b/frontend/src/components/tables/ChannelTableStreams.jsx index d226c52a..11fa9600 100644 --- a/frontend/src/components/tables/ChannelTableStreams.jsx +++ b/frontend/src/components/tables/ChannelTableStreams.jsx @@ -181,7 +181,7 @@ const ChannelStreams = ({ channel, isExpanded }) => { basic: ['resolution', 'video_codec', 'source_fps', 'audio_codec', 'audio_channels'], video: ['video_bitrate', 'pixel_format', 'width', 'height', 'aspect_ratio', 'frame_rate'], audio: ['audio_bitrate', 'sample_rate', 'audio_format', 'audio_channels_layout'], - technical: ['stream_type', 'container_format', 'duration', 'file_size', 'ffmpeg_output_bitrate'], + technical: ['stream_type', 'container_format', 'duration', 'file_size', 'ffmpeg_output_bitrate', 'input_bitrate'], other: [] // Will catch anything not categorized above }; From 26e237f2d10cc57148f2717129ae086c2685a785 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Tue, 29 Jul 2025 21:06:06 +0000 Subject: [PATCH 0677/1435] Release v0.7.1 --- version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.py b/version.py index aca85fa0..3c0bf9df 100644 --- a/version.py +++ b/version.py @@ -1,5 +1,5 @@ """ Dispatcharr version information. """ -__version__ = '0.7.0' # Follow semantic versioning (MAJOR.MINOR.PATCH) +__version__ = '0.7.1' # Follow semantic versioning (MAJOR.MINOR.PATCH) __timestamp__ = None # Set during CI/CD build process From 4ae66e0bc9572001c4a1bad5cd771861833a019f Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 31 Jul 2025 09:52:02 -0500 Subject: [PATCH 0678/1435] Add membership creation in UpdateChannelMembershipAPIView if not found. Fixes #275 --- apps/channels/api_views.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py index 636d4875..0221a266 100644 --- a/apps/channels/api_views.py +++ b/apps/channels/api_views.py @@ -1508,9 +1508,17 @@ class UpdateChannelMembershipAPIView(APIView): """Enable or disable a channel for a specific group""" channel_profile = get_object_or_404(ChannelProfile, id=profile_id) channel = get_object_or_404(Channel, id=channel_id) - membership = get_object_or_404( - ChannelProfileMembership, channel_profile=channel_profile, channel=channel - ) + try: + membership = ChannelProfileMembership.objects.get( + channel_profile=channel_profile, channel=channel + ) + except ChannelProfileMembership.DoesNotExist: + # Create the membership if it does not exist (for custom channels) + membership = ChannelProfileMembership.objects.create( + channel_profile=channel_profile, + channel=channel, + enabled=False # Default to False, will be updated below + ) serializer = ChannelProfileMembershipSerializer( membership, data=request.data, partial=True From e029cd8b3dbbff550b9a1926370e156de13571af Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 31 Jul 2025 10:22:43 -0500 Subject: [PATCH 0679/1435] Fix XML escaping for channel ID in generate_dummy_epg function --- apps/output/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/output/views.py b/apps/output/views.py index 8d58a1b3..3fcd512b 100644 --- a/apps/output/views.py +++ b/apps/output/views.py @@ -266,7 +266,7 @@ def generate_dummy_epg( # Create program entry with escaped channel name xml_lines.append( - f' ' + f' ' ) xml_lines.append(f" {html.escape(program['title'])}") xml_lines.append(f" {html.escape(program['description'])}") From 5a887cc55ab6cabe58bdda7a15bccc66b58a84d8 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 31 Jul 2025 13:54:20 -0500 Subject: [PATCH 0680/1435] Bump Postgres to version 17. --- docker/DispatcharrBase | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/DispatcharrBase b/docker/DispatcharrBase index 4360ced3..957c8573 100644 --- a/docker/DispatcharrBase +++ b/docker/DispatcharrBase @@ -32,11 +32,11 @@ RUN curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyri apt-get update && apt-get install -y redis-server && \ apt-get clean && rm -rf /var/lib/apt/lists/* -# --- Set up PostgreSQL 14.x --- +# --- Set up PostgreSQL 17.x --- RUN curl -fsSL https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor -o /usr/share/keyrings/postgresql-keyring.gpg && \ echo "deb [signed-by=/usr/share/keyrings/postgresql-keyring.gpg] http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" | \ tee /etc/apt/sources.list.d/pgdg.list && \ - apt-get update && apt-get install -y postgresql-14 postgresql-contrib-14 && \ + apt-get update && apt-get install -y postgresql-17 postgresql-contrib-17 && \ apt-get clean && rm -rf /var/lib/apt/lists/* # Create render group for hardware acceleration support with GID 109 From 826c824084c58d104dbcc642634882f289936b41 Mon Sep 17 00:00:00 2001 From: SergeantPanda <61642231+SergeantPanda@users.noreply.github.com> Date: Thu, 31 Jul 2025 14:03:37 -0500 Subject: [PATCH 0681/1435] Bump Postgres to version 17 --- docker/DispatcharrBase | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/DispatcharrBase b/docker/DispatcharrBase index 4360ced3..574284f4 100644 --- a/docker/DispatcharrBase +++ b/docker/DispatcharrBase @@ -32,14 +32,14 @@ RUN curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyri apt-get update && apt-get install -y redis-server && \ apt-get clean && rm -rf /var/lib/apt/lists/* -# --- Set up PostgreSQL 14.x --- +# --- Set up PostgreSQL 17.x --- RUN curl -fsSL https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor -o /usr/share/keyrings/postgresql-keyring.gpg && \ echo "deb [signed-by=/usr/share/keyrings/postgresql-keyring.gpg] http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" | \ tee /etc/apt/sources.list.d/pgdg.list && \ - apt-get update && apt-get install -y postgresql-14 postgresql-contrib-14 && \ + apt-get update && apt-get install -y postgresql-17 postgresql-contrib-17 && \ apt-get clean && rm -rf /var/lib/apt/lists/* # Create render group for hardware acceleration support with GID 109 RUN groupadd -r -g 109 render || true -ENTRYPOINT ["/app/docker/entrypoint.sh"] \ No newline at end of file +ENTRYPOINT ["/app/docker/entrypoint.sh"] From 108a99264333a2135cbd84b0cba101903b01c264 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 31 Jul 2025 14:53:55 -0500 Subject: [PATCH 0682/1435] Detect mismatched Postgres version and automatically run pg_upgrade --- docker/entrypoint.sh | 9 ++--- docker/init/02-postgres.sh | 67 ++++++++++++++++++++++++++++++++++---- 2 files changed, 66 insertions(+), 10 deletions(-) diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh index 412cf808..8d204a5b 100755 --- a/docker/entrypoint.sh +++ b/docker/entrypoint.sh @@ -33,7 +33,8 @@ export POSTGRES_USER=${POSTGRES_USER:-dispatch} export POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-secret} export POSTGRES_HOST=${POSTGRES_HOST:-localhost} export POSTGRES_PORT=${POSTGRES_PORT:-5432} - +export PG_VERSION=$(ls /usr/lib/postgresql/ | sort -V | tail -n 1) +export PG_BINDIR="/usr/lib/postgresql/${PG_VERSION}/bin" export REDIS_HOST=${REDIS_HOST:-localhost} export REDIS_DB=${REDIS_DB:-0} export DISPATCHARR_PORT=${DISPATCHARR_PORT:-9191} @@ -107,13 +108,13 @@ echo "Starting init process..." # Start PostgreSQL echo "Starting Postgres..." -su - postgres -c "/usr/lib/postgresql/14/bin/pg_ctl -D ${POSTGRES_DIR} start -w -t 300 -o '-c port=${POSTGRES_PORT}'" +su - postgres -c "$PG_BINDIR/pg_ctl -D ${POSTGRES_DIR} start -w -t 300 -o '-c port=${POSTGRES_PORT}'" # Wait for PostgreSQL to be ready -until su - postgres -c "/usr/lib/postgresql/14/bin/pg_isready -h ${POSTGRES_HOST} -p ${POSTGRES_PORT}" >/dev/null 2>&1; do +until su - postgres -c "$PG_BINDIR/pg_isready -h ${POSTGRES_HOST} -p ${POSTGRES_PORT}" >/dev/null 2>&1; do echo_with_timestamp "Waiting for PostgreSQL to be ready..." sleep 1 done -postgres_pid=$(su - postgres -c "/usr/lib/postgresql/14/bin/pg_ctl -D ${POSTGRES_DIR} status" | sed -n 's/.*PID: \([0-9]\+\).*/\1/p') +postgres_pid=$(su - postgres -c "$PG_BINDIR/pg_ctl -D ${POSTGRES_DIR} status" | sed -n 's/.*PID: \([0-9]\+\).*/\1/p') echo "✅ Postgres started with PID $postgres_pid" pids+=("$postgres_pid") diff --git a/docker/init/02-postgres.sh b/docker/init/02-postgres.sh index 69a81dd4..aebce1a4 100644 --- a/docker/init/02-postgres.sh +++ b/docker/init/02-postgres.sh @@ -27,6 +27,61 @@ if [ -e "/data/postgresql.conf" ]; then echo "Migration completed successfully." fi +PG_VERSION_FILE="${POSTGRES_DIR}/PG_VERSION" + +# Detect current version from data directory, if present +if [ -f "$PG_VERSION_FILE" ]; then + CURRENT_VERSION=$(cat "$PG_VERSION_FILE") +else + CURRENT_VERSION="" +fi + +# Set binary paths for upgrade if needed +OLD_PG_VERSION="$CURRENT_VERSION" +OLD_BINDIR="/usr/lib/postgresql/${OLD_PG_VERSION}/bin" +NEW_BINDIR="/usr/lib/postgresql/${PG_VERSION}/bin" + +# Only run upgrade if current version is set and not the target +PG_INSTALLED_BY_SCRIPT=0 + +if [ -n "$CURRENT_VERSION" ] && [ "$CURRENT_VERSION" != "$PG_VERSION" ]; then + echo "Detected PostgreSQL data directory version $CURRENT_VERSION, upgrading to $PG_VERSION..." + if [ ! -d "$OLD_BINDIR" ]; then + echo "PostgreSQL binaries for version $CURRENT_VERSION not found. Installing..." + apt update && apt install -y postgresql-$CURRENT_VERSION postgresql-contrib-$CURRENT_VERSION + if [ $? -ne 0 ]; then + echo "Failed to install PostgreSQL version $CURRENT_VERSION. Exiting." + exit 1 + fi + PG_INSTALLED_BY_SCRIPT=1 + fi + + # Prepare new data directory + NEW_POSTGRES_DIR="${POSTGRES_DIR}_$PG_VERSION" + mkdir -p "$NEW_POSTGRES_DIR" + chown -R postgres:postgres "$NEW_POSTGRES_DIR" + chmod 700 "$NEW_POSTGRES_DIR" + + # Initialize new data directory + su - postgres -c "$NEW_BINDIR/initdb -D $NEW_POSTGRES_DIR" + + # Run pg_upgrade + su - postgres -c "$NEW_BINDIR/pg_upgrade -b $OLD_BINDIR -B $NEW_BINDIR -d $POSTGRES_DIR -D $NEW_POSTGRES_DIR" + + # Move old data directory for backup, move new into place + mv "$POSTGRES_DIR" "${POSTGRES_DIR}_backup_${CURRENT_VERSION}_$(date +%s)" + mv "$NEW_POSTGRES_DIR" "$POSTGRES_DIR" + + echo "Upgrade complete. Old data directory backed up." + + # Uninstall PostgreSQL if we installed it just for upgrade + if [ "$PG_INSTALLED_BY_SCRIPT" -eq 1 ]; then + echo "Uninstalling temporary PostgreSQL $CURRENT_VERSION packages..." + apt remove -y postgresql-$CURRENT_VERSION postgresql-contrib-$CURRENT_VERSION + apt autoremove -y + fi +fi + # Initialize PostgreSQL database if [ -z "$(ls -A $POSTGRES_DIR)" ]; then echo "Initializing PostgreSQL database..." @@ -35,21 +90,21 @@ if [ -z "$(ls -A $POSTGRES_DIR)" ]; then chmod 700 $POSTGRES_DIR # Initialize PostgreSQL - su - postgres -c "/usr/lib/postgresql/14/bin/initdb -D ${POSTGRES_DIR}" + su - postgres -c "$PG_BINDIR/initdb -D ${POSTGRES_DIR}" # Configure PostgreSQL echo "host all all 0.0.0.0/0 md5" >> "${POSTGRES_DIR}/pg_hba.conf" echo "listen_addresses='*'" >> "${POSTGRES_DIR}/postgresql.conf" # Start PostgreSQL echo "Starting Postgres..." - su - postgres -c "/usr/lib/postgresql/14/bin/pg_ctl -D ${POSTGRES_DIR} start -w -t 300 -o '-c port=${POSTGRES_PORT}'" + su - postgres -c "$PG_BINDIR/pg_ctl -D ${POSTGRES_DIR} start -w -t 300 -o '-c port=${POSTGRES_PORT}'" # Wait for PostgreSQL to be ready - until su - postgres -c "/usr/lib/postgresql/14/bin/pg_isready -h ${POSTGRES_HOST} -p ${POSTGRES_PORT}" >/dev/null 2>&1; do + until su - postgres -c "$PG_BINDIR/pg_isready -h ${POSTGRES_HOST} -p ${POSTGRES_PORT}" >/dev/null 2>&1; do echo "Waiting for PostgreSQL to be ready..." sleep 1 done - postgres_pid=$(su - postgres -c "/usr/lib/postgresql/14/bin/pg_ctl -D ${POSTGRES_DIR} status" | sed -n 's/.*PID: \([0-9]\+\).*/\1/p') + postgres_pid=$(su - postgres -c "$PG_BINDIR/pg_ctl -D ${POSTGRES_DIR} status" | sed -n 's/.*PID: \([0-9]\+\).*/\1/p') # Setup database if needed if ! su - postgres -c "psql -p ${POSTGRES_PORT} -tAc \"SELECT 1 FROM pg_database WHERE datname = '$POSTGRES_DB';\"" | grep -q 1; then @@ -69,8 +124,8 @@ END \$\$; EOF echo "Setting PostgreSQL user privileges..." - su postgres -c "/usr/lib/postgresql/14/bin/psql -p ${POSTGRES_PORT} -c \"ALTER DATABASE ${POSTGRES_DB} OWNER TO $POSTGRES_USER;\"" - su postgres -c "/usr/lib/postgresql/14/bin/psql -p ${POSTGRES_PORT} -c \"GRANT ALL PRIVILEGES ON DATABASE ${POSTGRES_DB} TO $POSTGRES_USER;\"" + su postgres -c "$PG_BINDIR/psql -p ${POSTGRES_PORT} -c \"ALTER DATABASE ${POSTGRES_DB} OWNER TO $POSTGRES_USER;\"" + su postgres -c "$PG_BINDIR/psql -p ${POSTGRES_PORT} -c \"GRANT ALL PRIVILEGES ON DATABASE ${POSTGRES_DB} TO $POSTGRES_USER;\"" # Finished setting up PosgresSQL database echo "PostgreSQL database setup complete." fi From 406ac37fb97c9803e8cc3778425d11c75db53e79 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 31 Jul 2025 15:01:28 -0500 Subject: [PATCH 0683/1435] Delete temp folder if it exists during upgrade. --- docker/init/02-postgres.sh | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/docker/init/02-postgres.sh b/docker/init/02-postgres.sh index aebce1a4..4deb921d 100644 --- a/docker/init/02-postgres.sh +++ b/docker/init/02-postgres.sh @@ -36,16 +36,13 @@ else CURRENT_VERSION="" fi -# Set binary paths for upgrade if needed -OLD_PG_VERSION="$CURRENT_VERSION" -OLD_BINDIR="/usr/lib/postgresql/${OLD_PG_VERSION}/bin" -NEW_BINDIR="/usr/lib/postgresql/${PG_VERSION}/bin" - # Only run upgrade if current version is set and not the target -PG_INSTALLED_BY_SCRIPT=0 - if [ -n "$CURRENT_VERSION" ] && [ "$CURRENT_VERSION" != "$PG_VERSION" ]; then echo "Detected PostgreSQL data directory version $CURRENT_VERSION, upgrading to $PG_VERSION..." + # Set binary paths for upgrade if needed + OLD_BINDIR="/usr/lib/postgresql/${CURRENT_VERSION}/bin" + NEW_BINDIR="/usr/lib/postgresql/${PG_VERSION}/bin" + PG_INSTALLED_BY_SCRIPT=0 if [ ! -d "$OLD_BINDIR" ]; then echo "PostgreSQL binaries for version $CURRENT_VERSION not found. Installing..." apt update && apt install -y postgresql-$CURRENT_VERSION postgresql-contrib-$CURRENT_VERSION @@ -58,13 +55,21 @@ if [ -n "$CURRENT_VERSION" ] && [ "$CURRENT_VERSION" != "$PG_VERSION" ]; then # Prepare new data directory NEW_POSTGRES_DIR="${POSTGRES_DIR}_$PG_VERSION" + + # Remove new data directory if it already exists (from a failed/partial upgrade) + if [ -d "$NEW_POSTGRES_DIR" ]; then + echo "Warning: $NEW_POSTGRES_DIR already exists. Removing it to avoid upgrade issues." + rm -rf "$NEW_POSTGRES_DIR" + fi + mkdir -p "$NEW_POSTGRES_DIR" chown -R postgres:postgres "$NEW_POSTGRES_DIR" chmod 700 "$NEW_POSTGRES_DIR" # Initialize new data directory + echo "Initializing new PostgreSQL data directory at $NEW_POSTGRES_DIR..." su - postgres -c "$NEW_BINDIR/initdb -D $NEW_POSTGRES_DIR" - + echo "Running pg_upgrade from $OLD_BINDIR to $NEW_BINDIR..." # Run pg_upgrade su - postgres -c "$NEW_BINDIR/pg_upgrade -b $OLD_BINDIR -B $NEW_BINDIR -d $POSTGRES_DIR -D $NEW_POSTGRES_DIR" From a9aac72a60439c7af701acc181913c254311df8f Mon Sep 17 00:00:00 2001 From: deku-m <37069737+deku-m@users.noreply.github.com> Date: Thu, 31 Jul 2025 22:14:35 +0200 Subject: [PATCH 0684/1435] Update debian_install.sh --- debian_install.sh | 450 +++++++++++++++++++++++----------------------- 1 file changed, 227 insertions(+), 223 deletions(-) diff --git a/debian_install.sh b/debian_install.sh index 0e41139e..3a97caed 100644 --- a/debian_install.sh +++ b/debian_install.sh @@ -1,208 +1,217 @@ #!/usr/bin/env bash -set -e +set -euo pipefail +IFS=$'\n\t' + +# Root check +if [[ $EUID -ne 0 ]]; then + echo "[ERROR] This script must be run as root." >&2 + exit 1 +fi + +trap 'echo -e "\n[ERROR] Line $LINENO failed. Exiting." >&2; exit 1' ERR ############################################################################## # 0) Warning / Disclaimer ############################################################################## -echo "**************************************************************" -echo "WARNING: While we do not anticipate any problems, we disclaim all" -echo "responsibility for anything that happens to your machine." -echo "" -echo "This script is intended for **Debian-based operating systems only**." -echo "Running it on other distributions WILL cause unexpected issues." -echo "" -echo "This script is **NOT RECOMMENDED** for use on your primary machine." -echo "For safety and best results, we strongly advise running this inside a" -echo "clean virtual machine (VM) or LXC container environment." -echo "" -echo "Additionally, there is NO SUPPORT for this method; Docker is the only" -echo "officially supported way to run Dispatcharr." -echo "**************************************************************" -echo "" -echo "If you wish to proceed, type \"I understand\" and press Enter." -read user_input - -if [ "$user_input" != "I understand" ]; then - echo "Exiting script..." - exit 1 -fi - +show_disclaimer() { + echo "**************************************************************" + echo "WARNING: While we do not anticipate any problems, we disclaim all" + echo "responsibility for anything that happens to your machine." + echo "" + echo "This script is intended for **Debian-based operating systems only**." + echo "Running it on other distributions WILL cause unexpected issues." + echo "" + echo "This script is **NOT RECOMMENDED** for use on your primary machine." + echo "For safety and best results, we strongly advise running this inside a" + echo "clean virtual machine (VM) or LXC container environment." + echo "" + echo "Additionally, there is NO SUPPORT for this method; Docker is the only" + echo "officially supported way to run Dispatcharr." + echo "**************************************************************" + echo "" + echo "If you wish to proceed, type \"I understand\" and press Enter." + read user_input + if [ "$user_input" != "I understand" ]; then + echo "Exiting script..." + exit 1 + fi +} ############################################################################## # 1) Configuration ############################################################################## -# Linux user/group under which Dispatcharr processes will run -DISPATCH_USER="dispatcharr" -DISPATCH_GROUP="dispatcharr" - -# Where Dispatcharr source code should live -APP_DIR="/opt/dispatcharr" - -# Git branch to clone (e.g., "main" or "dev") -DISPATCH_BRANCH="dev" - -# PostgreSQL settings -POSTGRES_DB="dispatcharr" -POSTGRES_USER="dispatch" -POSTGRES_PASSWORD="secret" - -# The port on which Nginx will listen for HTTP -NGINX_HTTP_PORT="9191" - -# The TCP port for Daphné (Django Channels) -WEBSOCKET_PORT="8001" - -# Directory inside /run/ for our socket; full path becomes /run/dispatcharr/dispatcharr.sock -GUNICORN_RUNTIME_DIR="dispatcharr" -GUNICORN_SOCKET="/run/${GUNICORN_RUNTIME_DIR}/dispatcharr.sock" +configure_variables() { + DISPATCH_USER="dispatcharr" + DISPATCH_GROUP="dispatcharr" + APP_DIR="/opt/dispatcharr" + DISPATCH_BRANCH="main" + POSTGRES_DB="dispatcharr" + POSTGRES_USER="dispatch" + POSTGRES_PASSWORD="secret" + NGINX_HTTP_PORT="9191" + WEBSOCKET_PORT="8001" + GUNICORN_RUNTIME_DIR="dispatcharr" + GUNICORN_SOCKET="/run/${GUNICORN_RUNTIME_DIR}/dispatcharr.sock" + PYTHON_BIN=$(command -v python3) + SYSTEMD_DIR="/etc/systemd/system" + NGINX_SITE="/etc/nginx/sites-available/dispatcharr" +} ############################################################################## # 2) Install System Packages ############################################################################## -echo ">>> Installing system packages..." -apt-get update -apt-get install -y \ - git \ - curl \ - wget \ - build-essential \ - gcc \ - libpcre3-dev \ - libpq-dev \ - python3-dev \ - python3-venv \ - python3-pip \ - nginx \ - redis-server \ - postgresql \ - postgresql-contrib \ - ffmpeg \ - procps \ - streamlink +install_packages() { + echo ">>> Installing system packages..." + apt-get update + declare -a packages=( + git curl wget build-essential gcc libpcre3-dev libpq-dev + python3-dev python3-venv python3-pip nginx redis-server + postgresql postgresql-contrib ffmpeg procps streamlink + ) + apt-get install -y --no-install-recommends "${packages[@]}" -# Node.js setup (v23.x from NodeSource) - adjust version if needed -if ! command -v node >/dev/null 2>&1; then - echo ">>> Installing Node.js..." - curl -sL https://deb.nodesource.com/setup_23.x | bash - - apt-get install -y nodejs -fi + if ! command -v node >/dev/null 2>&1; then + echo ">>> Installing Node.js..." + curl -sL https://deb.nodesource.com/setup_23.x | bash - + apt-get install -y nodejs + fi -# Start & enable PostgreSQL and Redis -systemctl enable postgresql redis-server -systemctl start postgresql redis-server + systemctl enable --now postgresql redis-server +} ############################################################################## -# 3) Create Dispatcharr User/Group +# 3) Create User/Group ############################################################################## -if ! getent group "${DISPATCH_GROUP}" >/dev/null; then - echo ">>> Creating group: ${DISPATCH_GROUP}" - groupadd "${DISPATCH_GROUP}" -fi - -if ! id -u "${DISPATCH_USER}" >/dev/null; then - echo ">>> Creating user: ${DISPATCH_USER}" - useradd -m -g "${DISPATCH_GROUP}" -s /bin/bash "${DISPATCH_USER}" -fi +create_dispatcharr_user() { + if ! getent group "$DISPATCH_GROUP" >/dev/null; then + groupadd "$DISPATCH_GROUP" + fi + if ! id -u "$DISPATCH_USER" >/dev/null; then + useradd -m -g "$DISPATCH_GROUP" -s /bin/bash "$DISPATCH_USER" + fi +} ############################################################################## -# 4) Configure PostgreSQL Database +# 4) PostgreSQL Setup ############################################################################## -echo ">>> Configuring PostgreSQL..." -su - postgres -c "psql -tc \"SELECT 1 FROM pg_database WHERE datname='${POSTGRES_DB}'\"" | grep -q 1 || \ - su - postgres -c "psql -c \"CREATE DATABASE ${POSTGRES_DB};\"" +setup_postgresql() { + echo ">>> Checking PostgreSQL database and user..." -su - postgres -c "psql -tc \"SELECT 1 FROM pg_roles WHERE rolname='${POSTGRES_USER}'\"" | grep -q 1 || \ - su - postgres -c "psql -c \"CREATE USER ${POSTGRES_USER} WITH PASSWORD '${POSTGRES_PASSWORD}';\"" + db_exists=$(sudo -u postgres psql -tAc "SELECT 1 FROM pg_database WHERE datname='$POSTGRES_DB'") + if [[ "$db_exists" != "1" ]]; then + echo ">>> Creating database '${POSTGRES_DB}'..." + sudo -u postgres createdb "$POSTGRES_DB" + else + echo ">>> Database '${POSTGRES_DB}' already exists, skipping creation." + fi -su - postgres -c "psql -c \"GRANT ALL PRIVILEGES ON DATABASE ${POSTGRES_DB} TO ${POSTGRES_USER};\"" -su - postgres -c "psql -c \"ALTER DATABASE ${POSTGRES_DB} OWNER TO ${POSTGRES_USER};\"" -su - postgres -c "psql -d ${POSTGRES_DB} -c \"ALTER SCHEMA public OWNER TO ${POSTGRES_USER};\"" + user_exists=$(sudo -u postgres psql -tAc "SELECT 1 FROM pg_roles WHERE rolname='$POSTGRES_USER'") + if [[ "$user_exists" != "1" ]]; then + echo ">>> Creating user '${POSTGRES_USER}'..." + sudo -u postgres psql -c "CREATE USER $POSTGRES_USER WITH PASSWORD '$POSTGRES_PASSWORD';" + else + echo ">>> User '${POSTGRES_USER}' already exists, skipping creation." + fi + + echo ">>> Granting privileges..." + sudo -u postgres psql -c "GRANT ALL PRIVILEGES ON DATABASE $POSTGRES_DB TO $POSTGRES_USER;" + sudo -u postgres psql -c "ALTER DATABASE $POSTGRES_DB OWNER TO $POSTGRES_USER;" + sudo -u postgres psql -d "$POSTGRES_DB" -c "ALTER SCHEMA public OWNER TO $POSTGRES_USER;" +} ############################################################################## -# 5) Clone or Update Dispatcharr Code +# 5) Clone Dispatcharr Repository ############################################################################## -echo ">>> Installing or updating Dispatcharr in ${APP_DIR} ..." +clone_dispatcharr_repo() { + echo ">>> Installing or updating Dispatcharr in ${APP_DIR} ..." + + if [ ! -d "$APP_DIR" ]; then + mkdir -p "$APP_DIR" + chown "$DISPATCH_USER:$DISPATCH_GROUP" "$APP_DIR" + fi -if [ ! -d "${APP_DIR}" ]; then - echo ">>> Cloning repository for the first time..." - mkdir -p "${APP_DIR}" - chown "${DISPATCH_USER}:${DISPATCH_GROUP}" "${APP_DIR}" - su - "${DISPATCH_USER}" -c "git clone -b ${DISPATCH_BRANCH} https://github.com/Dispatcharr/Dispatcharr.git ${APP_DIR}" -else - echo ">>> Updating existing repository..." - su - "${DISPATCH_USER}" <>> Updating existing Dispatcharr repo..." + su - "$DISPATCH_USER" <>> Cloning Dispatcharr repo into ${APP_DIR}..." + rm -rf "$APP_DIR"/* + chown "$DISPATCH_USER:$DISPATCH_GROUP" "$APP_DIR" + su - "$DISPATCH_USER" -c "git clone -b $DISPATCH_BRANCH https://github.com/Dispatcharr/Dispatcharr.git $APP_DIR" + fi +} ############################################################################## -# 6) Create Python Virtual Environment & Install Python Dependencies +# 6) Setup Python Environment ############################################################################## -echo ">>> Setting up Python virtual environment..." -su - "${DISPATCH_USER}" <>> Setting up Python virtual environment..." + su - "$DISPATCH_USER" <>> Linking ffmpeg into the virtual environment..." -ln -sf /usr/bin/ffmpeg ${APP_DIR}/env/bin/ffmpeg + ln -sf /usr/bin/ffmpeg "$APP_DIR/env/bin/ffmpeg" +} ############################################################################## -# 7) Build Frontend (React) +# 7) Build Frontend ############################################################################## -echo ">>> Building frontend..." -su - "${DISPATCH_USER}" <>> Building frontend..." + su - "$DISPATCH_USER" <>> Running Django migrations & collectstatic..." -su - "${DISPATCH_USER}" <>> Running Django migrations & collectstatic..." + su - "$DISPATCH_USER" </etc/systemd/system/dispatcharr.service +configure_services() { + echo ">>> Creating systemd service files..." + + # Gunicorn + cat <${SYSTEMD_DIR}/dispatcharr.service [Unit] Description=Gunicorn for Dispatcharr After=network.target postgresql.service redis-server.service @@ -211,36 +220,31 @@ After=network.target postgresql.service redis-server.service User=${DISPATCH_USER} Group=${DISPATCH_GROUP} WorkingDirectory=${APP_DIR} - RuntimeDirectory=${GUNICORN_RUNTIME_DIR} RuntimeDirectoryMode=0775 - -# Update PATH to include both the virtualenv and system binaries (for ffmpeg) Environment="PATH=${APP_DIR}/env/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin" Environment="POSTGRES_DB=${POSTGRES_DB}" Environment="POSTGRES_USER=${POSTGRES_USER}" Environment="POSTGRES_PASSWORD=${POSTGRES_PASSWORD}" Environment="POSTGRES_HOST=localhost" - +ExecStartPre=/usr/bin/bash -c 'until pg_isready -h localhost -U ${POSTGRES_USER}; do sleep 1; done' ExecStart=${APP_DIR}/env/bin/gunicorn \\ --workers=4 \\ --worker-class=gevent \\ --timeout=300 \\ --bind unix:${GUNICORN_SOCKET} \\ dispatcharr.wsgi:application - Restart=always KillMode=mixed - +SyslogIdentifier=dispatcharr +StandardOutput=journal +StandardError=journal [Install] WantedBy=multi-user.target EOF -############################################################################## -# 10) Create Systemd Service for Celery -############################################################################## - -cat </etc/systemd/system/dispatcharr-celery.service + # Celery + cat <${SYSTEMD_DIR}/dispatcharr-celery.service [Unit] Description=Celery Worker for Dispatcharr After=network.target redis-server.service @@ -256,21 +260,18 @@ Environment="POSTGRES_USER=${POSTGRES_USER}" Environment="POSTGRES_PASSWORD=${POSTGRES_PASSWORD}" Environment="POSTGRES_HOST=localhost" Environment="CELERY_BROKER_URL=redis://localhost:6379/0" - ExecStart=${APP_DIR}/env/bin/celery -A dispatcharr worker -l info - Restart=always KillMode=mixed - +SyslogIdentifier=dispatcharr-celery +StandardOutput=journal +StandardError=journal [Install] WantedBy=multi-user.target EOF -############################################################################## -# 11) Create Systemd Service for Celery Beat (Optional) -############################################################################## - -cat </etc/systemd/system/dispatcharr-celerybeat.service + # Celery Beat + cat <${SYSTEMD_DIR}/dispatcharr-celerybeat.service [Unit] Description=Celery Beat Scheduler for Dispatcharr After=network.target redis-server.service @@ -286,23 +287,20 @@ Environment="POSTGRES_USER=${POSTGRES_USER}" Environment="POSTGRES_PASSWORD=${POSTGRES_PASSWORD}" Environment="POSTGRES_HOST=localhost" Environment="CELERY_BROKER_URL=redis://localhost:6379/0" - ExecStart=${APP_DIR}/env/bin/celery -A dispatcharr beat -l info - Restart=always KillMode=mixed - +SyslogIdentifier=dispatcharr-celerybeat +StandardOutput=journal +StandardError=journal [Install] WantedBy=multi-user.target EOF -############################################################################## -# 12) Create Systemd Service for Daphné (WebSockets / Channels) -############################################################################## - -cat </etc/systemd/system/dispatcharr-daphne.service + # Daphne + cat <${SYSTEMD_DIR}/dispatcharr-daphne.service [Unit] -Description=Daphne for Dispatcharr (ASGI) +Description=Daphne for Dispatcharr (ASGI/WebSockets) After=network.target Requires=dispatcharr.service @@ -315,47 +313,33 @@ Environment="POSTGRES_DB=${POSTGRES_DB}" Environment="POSTGRES_USER=${POSTGRES_USER}" Environment="POSTGRES_PASSWORD=${POSTGRES_PASSWORD}" Environment="POSTGRES_HOST=localhost" - ExecStart=${APP_DIR}/env/bin/daphne -b 0.0.0.0 -p ${WEBSOCKET_PORT} dispatcharr.asgi:application - Restart=always KillMode=mixed - +SyslogIdentifier=dispatcharr-daphne +StandardOutput=journal +StandardError=journal [Install] WantedBy=multi-user.target EOF -############################################################################## -# 13) Configure Nginx -############################################################################## - -echo ">>> Configuring Nginx at /etc/nginx/sites-available/dispatcharr.conf ..." -cat </etc/nginx/sites-available/dispatcharr.conf + echo ">>> Creating Nginx config..." + cat </etc/nginx/sites-available/dispatcharr.conf server { listen ${NGINX_HTTP_PORT}; - - # Proxy to Gunicorn socket for main HTTP traffic location / { include proxy_params; proxy_pass http://unix:${GUNICORN_SOCKET}; } - - # Serve Django static files location /static/ { alias ${APP_DIR}/static/; } - - # Serve React build assets location /assets/ { alias ${APP_DIR}/frontend/dist/assets/; } - - # Serve media files if any location /media/ { alias ${APP_DIR}/media/; } - - # WebSockets for Daphné location /ws/ { proxy_pass http://127.0.0.1:${WEBSOCKET_PORT}; proxy_http_version 1.1; @@ -368,46 +352,66 @@ server { } EOF -ln -sf /etc/nginx/sites-available/dispatcharr.conf /etc/nginx/sites-enabled/dispatcharr.conf - -# Remove default site if it exists -if [ -f /etc/nginx/sites-enabled/default ]; then - rm -f /etc/nginx/sites-enabled/default -fi - -echo ">>> Testing Nginx config..." -nginx -t - -echo ">>> Restarting Nginx..." -systemctl restart nginx -systemctl enable nginx + ln -sf /etc/nginx/sites-available/dispatcharr.conf /etc/nginx/sites-enabled/dispatcharr.conf + [ -f /etc/nginx/sites-enabled/default ] && rm /etc/nginx/sites-enabled/default + nginx -t + systemctl restart nginx + systemctl enable nginx +} ############################################################################## -# 14) Start & Enable Services +# 10) Start Services ############################################################################## -echo ">>> Enabling systemd services..." -systemctl daemon-reload -systemctl enable dispatcharr -systemctl enable dispatcharr-celery -systemctl enable dispatcharr-celerybeat -systemctl enable dispatcharr-daphne - -echo ">>> Restarting / Starting services..." -systemctl restart dispatcharr -systemctl restart dispatcharr-celery -systemctl restart dispatcharr-celerybeat -systemctl restart dispatcharr-daphne +start_services() { + echo ">>> Enabling and starting services..." + systemctl daemon-reexec + systemctl daemon-reload + systemctl enable --now dispatcharr dispatcharr-celery dispatcharr-celerybeat dispatcharr-daphne +} ############################################################################## -# Done! +# 11) Summary ############################################################################## -echo "=================================================" -echo "Dispatcharr installation (or update) complete!" -echo "Nginx is listening on port ${NGINX_HTTP_PORT}." -echo "Gunicorn socket: ${GUNICORN_SOCKET}." -echo "WebSockets on port ${WEBSOCKET_PORT} (path /ws/)." -echo "You can check logs via 'sudo journalctl -u dispatcharr -f', etc." -echo "Visit http://:${NGINX_HTTP_PORT} in your browser." -echo "=================================================" +show_summary() { + server_ip=$(ip route get 1 | awk '{print $7; exit}') + cat < Date: Thu, 31 Jul 2025 15:54:24 -0500 Subject: [PATCH 0685/1435] Add ability to preview streams under a channel. --- .../components/tables/ChannelTableStreams.jsx | 66 +++++++++++++------ 1 file changed, 45 insertions(+), 21 deletions(-) diff --git a/frontend/src/components/tables/ChannelTableStreams.jsx b/frontend/src/components/tables/ChannelTableStreams.jsx index 11fa9600..991b7074 100644 --- a/frontend/src/components/tables/ChannelTableStreams.jsx +++ b/frontend/src/components/tables/ChannelTableStreams.jsx @@ -1,7 +1,7 @@ import React, { useMemo, useState, useEffect } from 'react'; import API from '../../api'; import { copyToClipboard } from '../../utils'; -import { GripHorizontal, SquareMinus, ChevronDown, ChevronRight } from 'lucide-react'; +import { GripHorizontal, SquareMinus, ChevronDown, ChevronRight, Eye } from 'lucide-react'; import { Box, ActionIcon, @@ -14,6 +14,7 @@ import { Tooltip, Collapse, Button, + } from '@mantine/core'; import { notifications } from '@mantine/notifications'; import { @@ -24,6 +25,8 @@ import { import './table.css'; import useChannelsTableStore from '../../store/channelsTable'; import usePlaylistsStore from '../../store/playlists'; +import useVideoStore from '../../store/useVideoStore'; +import useSettingsStore from '../../store/settings'; import { DndContext, KeyboardSensor, @@ -130,6 +133,15 @@ const ChannelStreams = ({ channel, isExpanded }) => { ); const playlists = usePlaylistsStore((s) => s.playlists); const authUser = useAuthStore((s) => s.user); + const showVideo = useVideoStore((s) => s.showVideo); + const env_mode = useSettingsStore((s) => s.environment.env_mode); + function handleWatchStream(streamHash) { + let vidUrl = `/proxy/ts/stream/${streamHash}`; + if (env_mode === 'dev') { + vidUrl = `${window.location.protocol}//${window.location.hostname}:5656${vidUrl}`; + } + showVideo(vidUrl); + } const [data, setData] = useState(channelStreams || []); @@ -314,25 +326,38 @@ const ChannelStreams = ({ channel, isExpanded }) => { )} {stream.url && ( - - { - e.stopPropagation(); - const success = await copyToClipboard(stream.url); - notifications.show({ - title: success ? 'URL Copied' : 'Copy Failed', - message: success ? 'Stream URL copied to clipboard' : 'Failed to copy URL to clipboard', - color: success ? 'green' : 'red', - }); - }} - > - URL - - + <> + + { + e.stopPropagation(); + const success = await copyToClipboard(stream.url); + notifications.show({ + title: success ? 'URL Copied' : 'Copy Failed', + message: success ? 'Stream URL copied to clipboard' : 'Failed to copy URL to clipboard', + color: success ? 'green' : 'red', + }); + }} + > + URL + + + + handleWatchStream(stream.stream_hash || stream.id)} + style={{ marginLeft: 2 }} + > + + + + )} @@ -563,5 +588,4 @@ const ChannelStreams = ({ channel, isExpanded }) => { ); }; - export default ChannelStreams; From 20651a8d590bd4e8f92f045e5b780265f95e2573 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 31 Jul 2025 20:10:36 -0500 Subject: [PATCH 0686/1435] Update dependencies in requirements.txt for compatibility and improvements --- requirements.txt | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/requirements.txt b/requirements.txt index f1526ceb..01a51342 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,33 +1,33 @@ -Django==5.1.6 +Django==5.2.4 psycopg2-binary==2.9.10 -redis==4.5.5 +redis==6.2.0 celery celery[redis] -djangorestframework==3.15.2 -requests==2.32.3 +djangorestframework==3.16.0 +requests==2.32.4 psutil==7.0.0 pillow drf-yasg>=1.20.0 streamlink python-vlc yt-dlp -gevent==24.11.1 +gevent==25.5.1 daphne uwsgi django-cors-headers djangorestframework-simplejwt m3u8 -rapidfuzz==3.12.1 +rapidfuzz==3.13.0 tzlocal # PyTorch dependencies (CPU only) --extra-index-url https://download.pytorch.org/whl/cpu/ -torch==2.6.0+cpu +torch==2.7.1+cpu # ML/NLP dependencies -sentence-transformers==3.4.1 +sentence-transformers==5.0.0 channels -channels-redis +channels-redis==4.3.0 django-filter django-celery-beat -lxml==5.4.0 +lxml==6.0.0 From 953db7947644ebf3789d71bd8fdc9f0e83decabb Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Thu, 31 Jul 2025 21:26:59 -0500 Subject: [PATCH 0687/1435] Display stream logo and name in channel card when previewing streams. --- frontend/src/pages/Stats.jsx | 26 ++++++++++++++++++++++---- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx index c3709c17..19520a21 100644 --- a/frontend/src/pages/Stats.jsx +++ b/frontend/src/pages/Stats.jsx @@ -94,6 +94,7 @@ const ChannelCard = ({ const [activeStreamId, setActiveStreamId] = useState(null); const [currentM3UProfile, setCurrentM3UProfile] = useState(null); // Add state for current M3U profile const [data, setData] = useState([]); + const [previewedStream, setPreviewedStream] = useState(null); // Get M3U account data from the playlists store const m3uAccounts = usePlaylistsStore((s) => s.playlists); @@ -425,12 +426,29 @@ const ChannelCard = ({ // Get logo URL from the logos object if available const logoUrl = - channel.logo_id && logos && logos[channel.logo_id] + (channel.logo_id && logos && logos[channel.logo_id] ? logos[channel.logo_id].cache_url - : null; + : null) || + (previewedStream && previewedStream.logo_url) || + null; - // Ensure these values exist to prevent errors - const channelName = channel.name || 'Unnamed Channel'; + useEffect(() => { + let isMounted = true; + // Only fetch if we have a stream_id and NO channel.name + if (!channel.name && channel.stream_id) { + API.getStreamsByIds([channel.stream_id]).then((streams) => { + if (isMounted && streams && streams.length > 0) { + setPreviewedStream(streams[0]); + } + }); + } + return () => { isMounted = false; }; + }, [channel.name, channel.stream_id]); + + const channelName = + channel.name || + previewedStream?.name || + 'Unnamed Channel'; const uptime = channel.uptime || 0; const bitrates = channel.bitrates || []; const totalBytes = channel.total_bytes || 0; From 7b5a617bf829f91f26f7dafa1dbc0e1dfea7fa6a Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Fri, 1 Aug 2025 11:28:51 -0500 Subject: [PATCH 0688/1435] Use custom validator for urls fields to allow for non fqdn hostnames. Fixes #63 --- apps/channels/serializers.py | 11 +++++++++-- apps/epg/serializers.py | 7 +++++++ apps/m3u/serializers.py | 7 +++++++ core/utils.py | 32 ++++++++++++++++++++++++++++++++ 4 files changed, 55 insertions(+), 2 deletions(-) diff --git a/apps/channels/serializers.py b/apps/channels/serializers.py index 32fd4a74..7c5ddd54 100644 --- a/apps/channels/serializers.py +++ b/apps/channels/serializers.py @@ -16,6 +16,7 @@ from apps.epg.models import EPGData from django.urls import reverse from rest_framework import serializers from django.utils import timezone +from core.utils import validate_flexible_url class LogoSerializer(serializers.ModelSerializer): @@ -32,10 +33,10 @@ class LogoSerializer(serializers.ModelSerializer): """Validate that the URL is unique for creation or update""" if self.instance and self.instance.url == value: return value - + if Logo.objects.filter(url=value).exists(): raise serializers.ValidationError("A logo with this URL already exists.") - + return value def create(self, validated_data): @@ -79,6 +80,12 @@ class LogoSerializer(serializers.ModelSerializer): # Stream # class StreamSerializer(serializers.ModelSerializer): + url = serializers.CharField( + required=False, + allow_blank=True, + allow_null=True, + validators=[validate_flexible_url] + ) stream_profile_id = serializers.PrimaryKeyRelatedField( queryset=StreamProfile.objects.all(), source="stream_profile", diff --git a/apps/epg/serializers.py b/apps/epg/serializers.py index 09390237..2f97cebf 100644 --- a/apps/epg/serializers.py +++ b/apps/epg/serializers.py @@ -1,3 +1,4 @@ +from core.utils import validate_flexible_url from rest_framework import serializers from .models import EPGSource, EPGData, ProgramData from apps.channels.models import Channel @@ -5,6 +6,12 @@ from apps.channels.models import Channel class EPGSourceSerializer(serializers.ModelSerializer): epg_data_ids = serializers.SerializerMethodField() read_only_fields = ['created_at', 'updated_at'] + url = serializers.CharField( + required=False, + allow_blank=True, + allow_null=True, + validators=[validate_flexible_url] + ) class Meta: model = EPGSource diff --git a/apps/m3u/serializers.py b/apps/m3u/serializers.py index 7394f00b..a86227aa 100644 --- a/apps/m3u/serializers.py +++ b/apps/m3u/serializers.py @@ -1,3 +1,4 @@ +from core.utils import validate_flexible_url from rest_framework import serializers from rest_framework.response import Response from .models import M3UAccount, M3UFilter, ServerGroup, M3UAccountProfile @@ -76,6 +77,12 @@ class M3UAccountSerializer(serializers.ModelSerializer): channel_groups = ChannelGroupM3UAccountSerializer( source="channel_group", many=True, required=False ) + server_url = serializers.CharField( + required=False, + allow_blank=True, + allow_null=True, + validators=[validate_flexible_url], + ) class Meta: model = M3UAccount diff --git a/core/utils.py b/core/utils.py index 932af979..36ac5fef 100644 --- a/core/utils.py +++ b/core/utils.py @@ -9,6 +9,8 @@ from redis.exceptions import ConnectionError, TimeoutError from django.core.cache import cache from asgiref.sync import async_to_sync from channels.layers import get_channel_layer +from django.core.validators import URLValidator +from django.core.exceptions import ValidationError import gc logger = logging.getLogger(__name__) @@ -354,3 +356,33 @@ def is_protected_path(file_path): return True return False + +def validate_flexible_url(value): + """ + Custom URL validator that accepts URLs with hostnames that aren't FQDNs. + This allows URLs like "http://hostname/" which + Django's standard URLValidator rejects. + """ + if not value: + return # Allow empty values since the field is nullable + + # Create a standard Django URL validator + url_validator = URLValidator() + + try: + # First try the standard validation + url_validator(value) + except ValidationError as e: + # If standard validation fails, check if it's a non-FQDN hostname + import re + + # More flexible pattern for non-FQDN hostnames with paths + # Matches: http://hostname, http://hostname/, http://hostname:port/path/to/file.xml + non_fqdn_pattern = r'^https?://[a-zA-Z0-9]([a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])?(\:[0-9]+)?(/[^\s]*)?$' + non_fqdn_match = re.match(non_fqdn_pattern, value) + + if non_fqdn_match: + return # Accept non-FQDN hostnames + + # If it doesn't match our flexible patterns, raise the original error + raise ValidationError("Enter a valid URL.") From ead76fe6611476d5cdd163b51e6d0910c4c7d3ce Mon Sep 17 00:00:00 2001 From: dekzter Date: Fri, 1 Aug 2025 15:02:43 -0400 Subject: [PATCH 0689/1435] first run at m3u filtering --- apps/m3u/api_urls.py | 36 +- apps/m3u/api_views.py | 19 +- .../0013_alter_m3ufilter_filter_type.py | 18 + ...alter_m3ufilter_options_m3ufilter_order.py | 22 + apps/m3u/models.py | 8 +- apps/m3u/serializers.py | 10 +- apps/m3u/tasks.py | 1016 ++++++++++++----- frontend/src/api.js | 61 +- frontend/src/components/forms/M3U.jsx | 44 +- frontend/src/components/forms/M3UFilter.jsx | 126 ++ frontend/src/components/forms/M3UFilters.jsx | 327 ++++++ frontend/src/constants.js | 27 +- frontend/src/store/playlists.jsx | 24 +- 13 files changed, 1400 insertions(+), 338 deletions(-) create mode 100644 apps/m3u/migrations/0013_alter_m3ufilter_filter_type.py create mode 100644 apps/m3u/migrations/0014_alter_m3ufilter_options_m3ufilter_order.py create mode 100644 frontend/src/components/forms/M3UFilter.jsx create mode 100644 frontend/src/components/forms/M3UFilters.jsx diff --git a/apps/m3u/api_urls.py b/apps/m3u/api_urls.py index 41fc2fbc..80e54bb2 100644 --- a/apps/m3u/api_urls.py +++ b/apps/m3u/api_urls.py @@ -1,18 +1,38 @@ from django.urls import path, include from rest_framework.routers import DefaultRouter -from .api_views import M3UAccountViewSet, M3UFilterViewSet, ServerGroupViewSet, RefreshM3UAPIView, RefreshSingleM3UAPIView, UserAgentViewSet, M3UAccountProfileViewSet +from .api_views import ( + M3UAccountViewSet, + M3UFilterViewSet, + ServerGroupViewSet, + RefreshM3UAPIView, + RefreshSingleM3UAPIView, + UserAgentViewSet, + M3UAccountProfileViewSet, +) -app_name = 'm3u' +app_name = "m3u" router = DefaultRouter() -router.register(r'accounts', M3UAccountViewSet, basename='m3u-account') -router.register(r'accounts\/(?P\d+)\/profiles', M3UAccountProfileViewSet, basename='m3u-account-profiles') -router.register(r'filters', M3UFilterViewSet, basename='m3u-filter') -router.register(r'server-groups', ServerGroupViewSet, basename='server-group') +router.register(r"accounts", M3UAccountViewSet, basename="m3u-account") +router.register( + r"accounts\/(?P\d+)\/profiles", + M3UAccountProfileViewSet, + basename="m3u-account-profiles", +) +router.register( + r"accounts\/(?P\d+)\/filters", + M3UFilterViewSet, + basename="m3u-filters", +) +router.register(r"server-groups", ServerGroupViewSet, basename="server-group") urlpatterns = [ - path('refresh/', RefreshM3UAPIView.as_view(), name='m3u_refresh'), - path('refresh//', RefreshSingleM3UAPIView.as_view(), name='m3u_refresh_single'), + path("refresh/", RefreshM3UAPIView.as_view(), name="m3u_refresh"), + path( + "refresh//", + RefreshSingleM3UAPIView.as_view(), + name="m3u_refresh_single", + ), ] urlpatterns += router.urls diff --git a/apps/m3u/api_views.py b/apps/m3u/api_views.py index d3739f19..46676e93 100644 --- a/apps/m3u/api_views.py +++ b/apps/m3u/api_views.py @@ -183,8 +183,6 @@ class M3UAccountViewSet(viewsets.ModelViewSet): class M3UFilterViewSet(viewsets.ModelViewSet): - """Handles CRUD operations for M3U filters""" - queryset = M3UFilter.objects.all() serializer_class = M3UFilterSerializer @@ -194,6 +192,23 @@ class M3UFilterViewSet(viewsets.ModelViewSet): except KeyError: return [Authenticated()] + def get_queryset(self): + m3u_account_id = self.kwargs["account_id"] + return M3UFilter.objects.filter(m3u_account_id=m3u_account_id) + + def perform_create(self, serializer): + # Get the account ID from the URL + account_id = self.kwargs["account_id"] + + # # Get the M3UAccount instance for the account_id + # m3u_account = M3UAccount.objects.get(id=account_id) + + # Save the 'm3u_account' in the serializer context + serializer.context["m3u_account"] = account_id + + # Perform the actual save + serializer.save(m3u_account_id=account_id) + class ServerGroupViewSet(viewsets.ModelViewSet): """Handles CRUD operations for Server Groups""" diff --git a/apps/m3u/migrations/0013_alter_m3ufilter_filter_type.py b/apps/m3u/migrations/0013_alter_m3ufilter_filter_type.py new file mode 100644 index 00000000..0b0a8a1d --- /dev/null +++ b/apps/m3u/migrations/0013_alter_m3ufilter_filter_type.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.6 on 2025-07-22 21:16 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('m3u', '0012_alter_m3uaccount_refresh_interval'), + ] + + operations = [ + migrations.AlterField( + model_name='m3ufilter', + name='filter_type', + field=models.CharField(choices=[('group', 'Group'), ('name', 'Stream Name'), ('url', 'Stream URL')], default='group', help_text='Filter based on either group title or stream name.', max_length=50), + ), + ] diff --git a/apps/m3u/migrations/0014_alter_m3ufilter_options_m3ufilter_order.py b/apps/m3u/migrations/0014_alter_m3ufilter_options_m3ufilter_order.py new file mode 100644 index 00000000..3510bfc5 --- /dev/null +++ b/apps/m3u/migrations/0014_alter_m3ufilter_options_m3ufilter_order.py @@ -0,0 +1,22 @@ +# Generated by Django 5.1.6 on 2025-07-31 17:14 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('m3u', '0013_alter_m3ufilter_filter_type'), + ] + + operations = [ + migrations.AlterModelOptions( + name='m3ufilter', + options={'ordering': ['order']}, + ), + migrations.AddField( + model_name='m3ufilter', + name='order', + field=models.PositiveIntegerField(default=0), + ), + ] diff --git a/apps/m3u/models.py b/apps/m3u/models.py index 94ec88fc..b7993ef6 100644 --- a/apps/m3u/models.py +++ b/apps/m3u/models.py @@ -155,9 +155,11 @@ class M3UFilter(models.Model): """Defines filters for M3U accounts based on stream name or group title.""" FILTER_TYPE_CHOICES = ( - ("group", "Group Title"), + ("group", "Group"), ("name", "Stream Name"), + ("url", "Stream URL"), ) + m3u_account = models.ForeignKey( M3UAccount, on_delete=models.CASCADE, @@ -177,6 +179,7 @@ class M3UFilter(models.Model): default=True, help_text="If True, matching items are excluded; if False, only matches are included.", ) + order = models.PositiveIntegerField(default=0) def applies_to(self, stream_name, group_name): target = group_name if self.filter_type == "group" else stream_name @@ -226,9 +229,6 @@ class ServerGroup(models.Model): return self.name -from django.db import models - - class M3UAccountProfile(models.Model): """Represents a profile associated with an M3U Account.""" diff --git a/apps/m3u/serializers.py b/apps/m3u/serializers.py index a86227aa..3bf0e335 100644 --- a/apps/m3u/serializers.py +++ b/apps/m3u/serializers.py @@ -16,11 +16,9 @@ logger = logging.getLogger(__name__) class M3UFilterSerializer(serializers.ModelSerializer): """Serializer for M3U Filters""" - channel_groups = ChannelGroupM3UAccountSerializer(source="m3u_account", many=True) - class Meta: model = M3UFilter - fields = ["id", "filter_type", "regex_pattern", "exclude", "channel_groups"] + fields = ["id", "filter_type", "regex_pattern", "exclude", "order"] class M3UAccountProfileSerializer(serializers.ModelSerializer): @@ -64,7 +62,7 @@ class M3UAccountProfileSerializer(serializers.ModelSerializer): class M3UAccountSerializer(serializers.ModelSerializer): """Serializer for M3U Account""" - filters = M3UFilterSerializer(many=True, read_only=True) + filters = serializers.SerializerMethodField() # Include user_agent as a mandatory field using its primary key. user_agent = serializers.PrimaryKeyRelatedField( queryset=UserAgent.objects.all(), @@ -149,6 +147,10 @@ class M3UAccountSerializer(serializers.ModelSerializer): return instance + def get_filters(self, obj): + filters = obj.filters.order_by("order") + return M3UFilterSerializer(filters, many=True).data + class ServerGroupSerializer(serializers.ModelSerializer): """Serializer for Server Group""" diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py index 40a395ce..588705a4 100644 --- a/apps/m3u/tasks.py +++ b/apps/m3u/tasks.py @@ -18,7 +18,12 @@ from channels.layers import get_channel_layer from django.utils import timezone import time import json -from core.utils import RedisClient, acquire_task_lock, release_task_lock, natural_sort_key +from core.utils import ( + RedisClient, + acquire_task_lock, + release_task_lock, + natural_sort_key, +) from core.models import CoreSettings, UserAgent from asgiref.sync import async_to_sync from core.xtream_codes import Client as XCClient @@ -29,6 +34,7 @@ logger = logging.getLogger(__name__) BATCH_SIZE = 1000 m3u_dir = os.path.join(settings.MEDIA_ROOT, "cached_m3u") + def fetch_m3u_lines(account, use_cache=False): os.makedirs(m3u_dir, exist_ok=True) file_path = os.path.join(m3u_dir, f"{account.id}.m3u") @@ -39,27 +45,35 @@ def fetch_m3u_lines(account, use_cache=False): try: # Try to get account-specific user agent first user_agent_obj = account.get_user_agent() - user_agent = user_agent_obj.user_agent if user_agent_obj else "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" + user_agent = ( + user_agent_obj.user_agent + if user_agent_obj + else "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" + ) - logger.debug(f"Using user agent: {user_agent} for M3U account: {account.name}") + logger.debug( + f"Using user agent: {user_agent} for M3U account: {account.name}" + ) headers = {"User-Agent": user_agent} logger.info(f"Fetching from URL {account.server_url}") # Set account status to FETCHING before starting download account.status = M3UAccount.Status.FETCHING account.last_message = "Starting download..." - account.save(update_fields=['status', 'last_message']) + account.save(update_fields=["status", "last_message"]) - response = requests.get(account.server_url, headers=headers, stream=True) + response = requests.get( + account.server_url, headers=headers, stream=True + ) response.raise_for_status() - total_size = int(response.headers.get('Content-Length', 0)) + total_size = int(response.headers.get("Content-Length", 0)) downloaded = 0 start_time = time.time() last_update_time = start_time progress = 0 - with open(file_path, 'wb') as file: + with open(file_path, "wb") as file: send_m3u_update(account.id, "downloading", 0) for chunk in response.iter_content(chunk_size=8192): if chunk: @@ -76,7 +90,11 @@ def fetch_m3u_lines(account, use_cache=False): progress = (downloaded / total_size) * 100 # Time remaining (in seconds) - time_remaining = (total_size - downloaded) / (speed * 1024) if speed > 0 else 0 + time_remaining = ( + (total_size - downloaded) / (speed * 1024) + if speed > 0 + else 0 + ) current_time = time.time() if current_time - last_update_time >= 0.5: @@ -85,26 +103,36 @@ def fetch_m3u_lines(account, use_cache=False): # Update the account's last_message with detailed progress info progress_msg = f"Downloading: {progress:.1f}% - {speed:.1f} KB/s - {time_remaining:.1f}s remaining" account.last_message = progress_msg - account.save(update_fields=['last_message']) + account.save(update_fields=["last_message"]) - send_m3u_update(account.id, "downloading", progress, - speed=speed, - elapsed_time=elapsed_time, - time_remaining=time_remaining, - message=progress_msg) + send_m3u_update( + account.id, + "downloading", + progress, + speed=speed, + elapsed_time=elapsed_time, + time_remaining=time_remaining, + message=progress_msg, + ) # Final update with 100% progress final_msg = f"Download complete. Size: {total_size/1024/1024:.2f} MB, Time: {time.time() - start_time:.1f}s" account.last_message = final_msg - account.save(update_fields=['last_message']) + account.save(update_fields=["last_message"]) send_m3u_update(account.id, "downloading", 100, message=final_msg) except Exception as e: logger.error(f"Error fetching M3U from URL {account.server_url}: {e}") # Update account status and send error notification account.status = M3UAccount.Status.ERROR account.last_message = f"Error downloading M3U file: {str(e)}" - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account.id, "downloading", 100, status="error", error=f"Error downloading M3U file: {str(e)}") + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account.id, + "downloading", + 100, + status="error", + error=f"Error downloading M3U file: {str(e)}", + ) return [], False # Return empty list and False for success # Check if the file exists and is not empty @@ -113,45 +141,55 @@ def fetch_m3u_lines(account, use_cache=False): logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account.id, "downloading", 100, status="error", error=error_msg) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account.id, "downloading", 100, status="error", error=error_msg + ) return [], False # Return empty list and False for success try: - with open(file_path, 'r', encoding='utf-8') as f: + with open(file_path, "r", encoding="utf-8") as f: return f.readlines(), True except Exception as e: error_msg = f"Error reading M3U file: {str(e)}" logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account.id, "downloading", 100, status="error", error=error_msg) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account.id, "downloading", 100, status="error", error=error_msg + ) return [], False elif account.file_path: try: - if account.file_path.endswith('.gz'): - with gzip.open(account.file_path, 'rt', encoding='utf-8') as f: + if account.file_path.endswith(".gz"): + with gzip.open(account.file_path, "rt", encoding="utf-8") as f: return f.readlines(), True - elif account.file_path.endswith('.zip'): - with zipfile.ZipFile(account.file_path, 'r') as zip_file: + elif account.file_path.endswith(".zip"): + with zipfile.ZipFile(account.file_path, "r") as zip_file: for name in zip_file.namelist(): - if name.endswith('.m3u'): + if name.endswith(".m3u"): with zip_file.open(name) as f: - return [line.decode('utf-8') for line in f.readlines()], True + return [ + line.decode("utf-8") for line in f.readlines() + ], True - error_msg = f"No .m3u file found in ZIP archive: {account.file_path}" + error_msg = ( + f"No .m3u file found in ZIP archive: {account.file_path}" + ) logger.warning(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account.id, "downloading", 100, status="error", error=error_msg) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account.id, "downloading", 100, status="error", error=error_msg + ) return [], False else: - with open(account.file_path, 'r', encoding='utf-8') as f: + with open(account.file_path, "r", encoding="utf-8") as f: return f.readlines(), True except (IOError, OSError, zipfile.BadZipFile, gzip.BadGzipFile) as e: @@ -159,8 +197,10 @@ def fetch_m3u_lines(account, use_cache=False): logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account.id, "downloading", 100, status="error", error=error_msg) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account.id, "downloading", 100, status="error", error=error_msg + ) return [], False # Neither server_url nor uploaded_file is available @@ -168,10 +208,11 @@ def fetch_m3u_lines(account, use_cache=False): logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) + account.save(update_fields=["status", "last_message"]) send_m3u_update(account.id, "downloading", 100, status="error", error=error_msg) return [], False + def get_case_insensitive_attr(attributes, key, default=""): """Get attribute value using case-insensitive key lookup.""" for attr_key, attr_value in attributes.items(): @@ -179,6 +220,7 @@ def get_case_insensitive_attr(attributes, key, default=""): return attr_value return default + def parse_extinf_line(line: str) -> dict: """ Parse an EXTINF line from an M3U file. @@ -192,7 +234,7 @@ def parse_extinf_line(line: str) -> dict: """ if not line.startswith("#EXTINF:"): return None - content = line[len("#EXTINF:"):].strip() + content = line[len("#EXTINF:") :].strip() # Split on the first comma that is not inside quotes. parts = re.split(r',(?=(?:[^"]*"[^"]*")*[^"]*$)', content, maxsplit=1) if len(parts) != 2: @@ -200,21 +242,9 @@ def parse_extinf_line(line: str) -> dict: attributes_part, display_name = parts[0], parts[1].strip() attrs = dict(re.findall(r'([^\s]+)=["\']([^"\']+)["\']', attributes_part)) # Use tvg-name attribute if available; otherwise, use the display name. - name = get_case_insensitive_attr(attrs, 'tvg-name', display_name) - return { - 'attributes': attrs, - 'display_name': display_name, - 'name': name - } + name = get_case_insensitive_attr(attrs, "tvg-name", display_name) + return {"attributes": attrs, "display_name": display_name, "name": name} -def _matches_filters(stream_name: str, group_name: str, filters): - """Check if a stream or group name matches a precompiled regex filter.""" - compiled_filters = [(re.compile(f.regex_pattern, re.IGNORECASE), f.exclude) for f in filters] - for pattern, exclude in compiled_filters: - target = group_name if f.filter_type == 'group' else stream_name - if pattern.search(target or ''): - return exclude - return False @shared_task def refresh_m3u_accounts(): @@ -229,6 +259,7 @@ def refresh_m3u_accounts(): logger.info(msg) return msg + def check_field_lengths(streams_to_create): for stream in streams_to_create: for field, value in stream.__dict__.items(): @@ -238,19 +269,44 @@ def check_field_lengths(streams_to_create): print("") print("") + @shared_task def process_groups(account, groups): - existing_groups = {group.name: group for group in ChannelGroup.objects.filter(name__in=groups.keys())} + existing_groups = { + group.name: group + for group in ChannelGroup.objects.filter(name__in=groups.keys()) + } logger.info(f"Currently {len(existing_groups)} existing groups") + compiled_filters = [ + (re.compile(f.regex_pattern), f) + for f in account.filters.order_by("order") + if f.filter_type == "group" + ] + group_objs = [] groups_to_create = [] for group_name, custom_props in groups.items(): logger.debug(f"Handling group for M3U account {account.id}: {group_name}") - if (group_name not in existing_groups): - groups_to_create.append(ChannelGroup( - name=group_name, - )) + + include = True + for pattern, filter in compiled_filters: + if pattern.search(group_name): + logger.debug( + f"Group {group_name} matches filter pattern {filter.regex_pattern}" + ) + include = not filter.exclude + break + + if not include: + continue + + if group_name not in existing_groups: + groups_to_create.append( + ChannelGroup( + name=group_name, + ) + ) else: group_objs.append(existing_groups[group_name]) @@ -264,17 +320,17 @@ def process_groups(account, groups): for group in group_objs: # Ensure we include the xc_id in the custom_properties custom_props = groups.get(group.name, {}) - relations.append(ChannelGroupM3UAccount( - channel_group=group, - m3u_account=account, - custom_properties=json.dumps(custom_props), - enabled=True, # Default to enabled - )) + relations.append( + ChannelGroupM3UAccount( + channel_group=group, + m3u_account=account, + custom_properties=json.dumps(custom_props), + enabled=True, # Default to enabled + ) + ) + + ChannelGroupM3UAccount.objects.bulk_create(relations, ignore_conflicts=True) - ChannelGroupM3UAccount.objects.bulk_create( - relations, - ignore_conflicts=True - ) @shared_task def process_xc_category(account_id, batch, groups, hash_keys): @@ -285,14 +341,21 @@ def process_xc_category(account_id, batch, groups, hash_keys): stream_hashes = {} try: - with XCClient(account.server_url, account.username, account.password, account.get_user_agent()) as xc_client: + with XCClient( + account.server_url, + account.username, + account.password, + account.get_user_agent(), + ) as xc_client: # Log the batch details to help with debugging logger.debug(f"Processing XC batch: {batch}") for group_name, props in batch.items(): # Check if we have a valid xc_id for this group - if 'xc_id' not in props: - logger.error(f"Missing xc_id for group {group_name} in batch {batch}") + if "xc_id" not in props: + logger.error( + f"Missing xc_id for group {group_name} in batch {batch}" + ) continue # Get actual group ID from the mapping @@ -302,14 +365,20 @@ def process_xc_category(account_id, batch, groups, hash_keys): continue try: - logger.debug(f"Fetching streams for XC category: {group_name} (ID: {props['xc_id']})") - streams = xc_client.get_live_category_streams(props['xc_id']) + logger.debug( + f"Fetching streams for XC category: {group_name} (ID: {props['xc_id']})" + ) + streams = xc_client.get_live_category_streams(props["xc_id"]) if not streams: - logger.warning(f"No streams found for XC category {group_name} (ID: {props['xc_id']})") + logger.warning( + f"No streams found for XC category {group_name} (ID: {props['xc_id']})" + ) continue - logger.debug(f"Found {len(streams)} streams for category {group_name}") + logger.debug( + f"Found {len(streams)} streams for category {group_name}" + ) for stream in streams: name = stream["name"] @@ -318,7 +387,9 @@ def process_xc_category(account_id, batch, groups, hash_keys): tvg_logo = stream.get("stream_icon", "") group_title = group_name - stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys) + stream_hash = Stream.generate_hash_key( + name, url, tvg_id, hash_keys + ) stream_props = { "name": name, "url": url, @@ -333,23 +404,38 @@ def process_xc_category(account_id, batch, groups, hash_keys): if stream_hash not in stream_hashes: stream_hashes[stream_hash] = stream_props except Exception as e: - logger.error(f"Error processing XC category {group_name} (ID: {props['xc_id']}): {str(e)}") + logger.error( + f"Error processing XC category {group_name} (ID: {props['xc_id']}): {str(e)}" + ) continue # Process all found streams - existing_streams = {s.stream_hash: s for s in Stream.objects.filter(stream_hash__in=stream_hashes.keys())} + existing_streams = { + s.stream_hash: s + for s in Stream.objects.filter(stream_hash__in=stream_hashes.keys()) + } for stream_hash, stream_props in stream_hashes.items(): if stream_hash in existing_streams: obj = existing_streams[stream_hash] - existing_attr = {field.name: getattr(obj, field.name) for field in Stream._meta.fields if field != 'channel_group_id'} - changed = any(existing_attr[key] != value for key, value in stream_props.items() if key != 'channel_group_id') + existing_attr = { + field.name: getattr(obj, field.name) + for field in Stream._meta.fields + if field != "channel_group_id" + } + changed = any( + existing_attr[key] != value + for key, value in stream_props.items() + if key != "channel_group_id" + ) if changed: for key, value in stream_props.items(): setattr(obj, key, value) obj.last_seen = timezone.now() - obj.updated_at = timezone.now() # Update timestamp only for changed streams + obj.updated_at = ( + timezone.now() + ) # Update timestamp only for changed streams streams_to_update.append(obj) del existing_streams[stream_hash] else: @@ -360,7 +446,9 @@ def process_xc_category(account_id, batch, groups, hash_keys): existing_streams[stream_hash] = obj else: stream_props["last_seen"] = timezone.now() - stream_props["updated_at"] = timezone.now() # Set initial updated_at for new streams + stream_props["updated_at"] = ( + timezone.now() + ) # Set initial updated_at for new streams streams_to_create.append(Stream(**stream_props)) try: @@ -370,14 +458,28 @@ def process_xc_category(account_id, batch, groups, hash_keys): if streams_to_update: # We need to split the bulk update to correctly handle updated_at # First, get the subset of streams that have content changes - changed_streams = [s for s in streams_to_update if hasattr(s, 'updated_at') and s.updated_at] - unchanged_streams = [s for s in streams_to_update if not hasattr(s, 'updated_at') or not s.updated_at] + changed_streams = [ + s + for s in streams_to_update + if hasattr(s, "updated_at") and s.updated_at + ] + unchanged_streams = [ + s + for s in streams_to_update + if not hasattr(s, "updated_at") or not s.updated_at + ] # Update changed streams with all fields including updated_at if changed_streams: Stream.objects.bulk_update( changed_streams, - {key for key in stream_props.keys() if key not in ["m3u_account", "stream_hash"] and key not in hash_keys} | {"last_seen", "updated_at"} + { + key + for key in stream_props.keys() + if key not in ["m3u_account", "stream_hash"] + and key not in hash_keys + } + | {"last_seen", "updated_at"}, ) # Update unchanged streams with only last_seen @@ -401,11 +503,18 @@ def process_xc_category(account_id, batch, groups, hash_keys): return retval + @shared_task def process_m3u_batch(account_id, batch, groups, hash_keys): """Processes a batch of M3U streams using bulk operations.""" account = M3UAccount.objects.get(id=account_id) + compiled_filters = [ + (re.compile(f.regex_pattern), f) + for f in account.filters.order_by("order") + if f.filter_type != "group" + ] + streams_to_create = [] streams_to_update = [] stream_hashes = {} @@ -415,12 +524,34 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): for stream_info in batch: try: name, url = stream_info["name"], stream_info["url"] - tvg_id, tvg_logo = get_case_insensitive_attr(stream_info["attributes"], "tvg-id", ""), get_case_insensitive_attr(stream_info["attributes"], "tvg-logo", "") - group_title = get_case_insensitive_attr(stream_info["attributes"], "group-title", "Default Group") + + include = True + for pattern, filter in compiled_filters: + logger.debug(f"Checking filter patterh {pattern}") + target = url if filter.filter_type == "url" else name + if pattern.search(target or ""): + logger.debug( + f"Stream {name} - {url} matches filter pattern {filter.regex_pattern}" + ) + include = not filter.exclude + break + + if not include: + logger.debug(f"Stream excluded by filter, skipping.") + continue + + tvg_id, tvg_logo = get_case_insensitive_attr( + stream_info["attributes"], "tvg-id", "" + ), get_case_insensitive_attr(stream_info["attributes"], "tvg-logo", "") + group_title = get_case_insensitive_attr( + stream_info["attributes"], "group-title", "Default Group" + ) # Filter out disabled groups for this account if group_title not in groups: - logger.debug(f"Skipping stream in disabled group: {group_title}") + logger.debug( + f"Skipping stream in disabled or excluded group: {group_title}" + ) continue stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys) @@ -441,19 +572,32 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): logger.error(f"Failed to process stream {name}: {e}") logger.error(json.dumps(stream_info)) - existing_streams = {s.stream_hash: s for s in Stream.objects.filter(stream_hash__in=stream_hashes.keys())} + existing_streams = { + s.stream_hash: s + for s in Stream.objects.filter(stream_hash__in=stream_hashes.keys()) + } for stream_hash, stream_props in stream_hashes.items(): if stream_hash in existing_streams: obj = existing_streams[stream_hash] - existing_attr = {field.name: getattr(obj, field.name) for field in Stream._meta.fields if field != 'channel_group_id'} - changed = any(existing_attr[key] != value for key, value in stream_props.items() if key != 'channel_group_id') + existing_attr = { + field.name: getattr(obj, field.name) + for field in Stream._meta.fields + if field != "channel_group_id" + } + changed = any( + existing_attr[key] != value + for key, value in stream_props.items() + if key != "channel_group_id" + ) if changed: for key, value in stream_props.items(): setattr(obj, key, value) obj.last_seen = timezone.now() - obj.updated_at = timezone.now() # Update timestamp only for changed streams + obj.updated_at = ( + timezone.now() + ) # Update timestamp only for changed streams streams_to_update.append(obj) del existing_streams[stream_hash] else: @@ -464,7 +608,9 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): existing_streams[stream_hash] = obj else: stream_props["last_seen"] = timezone.now() - stream_props["updated_at"] = timezone.now() # Set initial updated_at for new streams + stream_props["updated_at"] = ( + timezone.now() + ) # Set initial updated_at for new streams streams_to_create.append(Stream(**stream_props)) try: @@ -474,14 +620,28 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): if streams_to_update: # We need to split the bulk update to correctly handle updated_at # First, get the subset of streams that have content changes - changed_streams = [s for s in streams_to_update if hasattr(s, 'updated_at') and s.updated_at] - unchanged_streams = [s for s in streams_to_update if not hasattr(s, 'updated_at') or not s.updated_at] + changed_streams = [ + s + for s in streams_to_update + if hasattr(s, "updated_at") and s.updated_at + ] + unchanged_streams = [ + s + for s in streams_to_update + if not hasattr(s, "updated_at") or not s.updated_at + ] # Update changed streams with all fields including updated_at if changed_streams: Stream.objects.bulk_update( changed_streams, - {key for key in stream_props.keys() if key not in ["m3u_account", "stream_hash"] and key not in hash_keys} | {"last_seen", "updated_at"} + { + key + for key in stream_props.keys() + if key not in ["m3u_account", "stream_hash"] + and key not in hash_keys + } + | {"last_seen", "updated_at"}, ) # Update unchanged streams with only last_seen @@ -496,35 +656,37 @@ def process_m3u_batch(account_id, batch, groups, hash_keys): retval = f"M3U account: {account_id}, Batch processed: {len(streams_to_create)} created, {len(streams_to_update)} updated." # Aggressive garbage collection - #del streams_to_create, streams_to_update, stream_hashes, existing_streams - #from core.utils import cleanup_memory - #cleanup_memory(log_usage=True, force_collection=True) + # del streams_to_create, streams_to_update, stream_hashes, existing_streams + # from core.utils import cleanup_memory + # cleanup_memory(log_usage=True, force_collection=True) return retval + def cleanup_streams(account_id, scan_start_time=timezone.now): account = M3UAccount.objects.get(id=account_id, is_active=True) existing_groups = ChannelGroup.objects.filter( m3u_account__m3u_account=account, m3u_account__enabled=True, - ).values_list('id', flat=True) - logger.info(f"Found {len(existing_groups)} active groups for M3U account {account_id}") + ).values_list("id", flat=True) + logger.info( + f"Found {len(existing_groups)} active groups for M3U account {account_id}" + ) # Calculate cutoff date for stale streams stale_cutoff = scan_start_time - timezone.timedelta(days=account.stale_stream_days) - logger.info(f"Removing streams not seen since {stale_cutoff} for M3U account {account_id}") + logger.info( + f"Removing streams not seen since {stale_cutoff} for M3U account {account_id}" + ) # Delete streams that are not in active groups - streams_to_delete = Stream.objects.filter( - m3u_account=account - ).exclude( + streams_to_delete = Stream.objects.filter(m3u_account=account).exclude( channel_group__in=existing_groups ) # Also delete streams that haven't been seen for longer than stale_stream_days stale_streams = Stream.objects.filter( - m3u_account=account, - last_seen__lt=stale_cutoff + m3u_account=account, last_seen__lt=stale_cutoff ) deleted_count = streams_to_delete.count() @@ -534,20 +696,23 @@ def cleanup_streams(account_id, scan_start_time=timezone.now): stale_streams.delete() total_deleted = deleted_count + stale_count - logger.info(f"Cleanup for M3U account {account_id} complete: {deleted_count} streams removed due to group filter, {stale_count} removed as stale") + logger.info( + f"Cleanup for M3U account {account_id} complete: {deleted_count} streams removed due to group filter, {stale_count} removed as stale" + ) # Return the total count of deleted streams return total_deleted + @shared_task def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): - if not acquire_task_lock('refresh_m3u_account_groups', account_id): + if not acquire_task_lock("refresh_m3u_account_groups", account_id): return f"Task already running for account_id={account_id}.", None try: account = M3UAccount.objects.get(id=account_id, is_active=True) except M3UAccount.DoesNotExist: - release_task_lock('refresh_m3u_account_groups', account_id) + release_task_lock("refresh_m3u_account_groups", account_id) return f"M3UAccount with ID={account_id} not found or inactive.", None extinf_data = [] @@ -555,8 +720,12 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): if account.account_type == M3UAccount.Types.XC: # Log detailed information about the account - logger.info(f"Processing XC account {account_id} with URL: {account.server_url}") - logger.debug(f"Username: {account.username}, Has password: {'Yes' if account.password else 'No'}") + logger.info( + f"Processing XC account {account_id} with URL: {account.server_url}" + ) + logger.debug( + f"Username: {account.username}, Has password: {'Yes' if account.password else 'No'}" + ) # Validate required fields if not account.server_url: @@ -564,9 +733,11 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) - release_task_lock('refresh_m3u_account_groups', account_id) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account_id, "processing_groups", 100, status="error", error=error_msg + ) + release_task_lock("refresh_m3u_account_groups", account_id) return error_msg, None if not account.username or not account.password: @@ -574,15 +745,19 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) - release_task_lock('refresh_m3u_account_groups', account_id) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account_id, "processing_groups", 100, status="error", error=error_msg + ) + release_task_lock("refresh_m3u_account_groups", account_id) return error_msg, None try: # Ensure server URL is properly formatted - server_url = account.server_url.rstrip('/') - if not (server_url.startswith('http://') or server_url.startswith('https://')): + server_url = account.server_url.rstrip("/") + if not ( + server_url.startswith("http://") or server_url.startswith("https://") + ): server_url = f"http://{server_url}" # User agent handling - completely rewritten @@ -591,37 +766,63 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): logger.debug(f"Getting user agent for account {account.id}") # Use a hardcoded user agent string to avoid any issues with object structure - user_agent_string = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" + user_agent_string = ( + "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" + ) try: # Try to get the user agent directly from the database if account.user_agent_id: ua_obj = UserAgent.objects.get(id=account.user_agent_id) - if ua_obj and hasattr(ua_obj, 'user_agent') and ua_obj.user_agent: + if ( + ua_obj + and hasattr(ua_obj, "user_agent") + and ua_obj.user_agent + ): user_agent_string = ua_obj.user_agent - logger.debug(f"Using user agent from account: {user_agent_string}") + logger.debug( + f"Using user agent from account: {user_agent_string}" + ) else: # Get default user agent from CoreSettings default_ua_id = CoreSettings.get_default_user_agent_id() - logger.debug(f"Default user agent ID from settings: {default_ua_id}") + logger.debug( + f"Default user agent ID from settings: {default_ua_id}" + ) if default_ua_id: ua_obj = UserAgent.objects.get(id=default_ua_id) - if ua_obj and hasattr(ua_obj, 'user_agent') and ua_obj.user_agent: + if ( + ua_obj + and hasattr(ua_obj, "user_agent") + and ua_obj.user_agent + ): user_agent_string = ua_obj.user_agent - logger.debug(f"Using default user agent: {user_agent_string}") + logger.debug( + f"Using default user agent: {user_agent_string}" + ) except Exception as e: - logger.warning(f"Error getting user agent, using fallback: {str(e)}") + logger.warning( + f"Error getting user agent, using fallback: {str(e)}" + ) logger.debug(f"Final user agent string: {user_agent_string}") except Exception as e: - user_agent_string = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" - logger.warning(f"Exception in user agent handling, using fallback: {str(e)}") + user_agent_string = ( + "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" + ) + logger.warning( + f"Exception in user agent handling, using fallback: {str(e)}" + ) - logger.info(f"Creating XCClient with URL: {server_url}, Username: {account.username}, User-Agent: {user_agent_string}") + logger.info( + f"Creating XCClient with URL: {server_url}, Username: {account.username}, User-Agent: {user_agent_string}" + ) # Create XCClient with explicit error handling try: - with XCClient(server_url, account.username, account.password, user_agent_string) as xc_client: + with XCClient( + server_url, account.username, account.password, user_agent_string + ) as xc_client: logger.info(f"XCClient instance created successfully") # Authenticate with detailed error handling @@ -634,26 +835,42 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) - release_task_lock('refresh_m3u_account_groups', account_id) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account_id, + "processing_groups", + 100, + status="error", + error=error_msg, + ) + release_task_lock("refresh_m3u_account_groups", account_id) return error_msg, None # Get categories with detailed error handling try: logger.info(f"Getting live categories from XC server") xc_categories = xc_client.get_live_categories() - logger.info(f"Found {len(xc_categories)} categories: {xc_categories}") + logger.info( + f"Found {len(xc_categories)} categories: {xc_categories}" + ) # Validate response if not isinstance(xc_categories, list): - error_msg = f"Unexpected response from XC server: {xc_categories}" + error_msg = ( + f"Unexpected response from XC server: {xc_categories}" + ) logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) - release_task_lock('refresh_m3u_account_groups', account_id) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account_id, + "processing_groups", + 100, + status="error", + error=error_msg, + ) + release_task_lock("refresh_m3u_account_groups", account_id) return error_msg, None if len(xc_categories) == 0: @@ -671,9 +888,15 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) - release_task_lock('refresh_m3u_account_groups', account_id) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account_id, + "processing_groups", + 100, + status="error", + error=error_msg, + ) + release_task_lock("refresh_m3u_account_groups", account_id) return error_msg, None except Exception as e: @@ -681,25 +904,33 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) - release_task_lock('refresh_m3u_account_groups', account_id) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account_id, + "processing_groups", + 100, + status="error", + error=error_msg, + ) + release_task_lock("refresh_m3u_account_groups", account_id) return error_msg, None except Exception as e: error_msg = f"Unexpected error occurred in XC Client: {str(e)}" logger.error(error_msg) account.status = M3UAccount.Status.ERROR account.last_message = error_msg - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "processing_groups", 100, status="error", error=error_msg) - release_task_lock('refresh_m3u_account_groups', account_id) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account_id, "processing_groups", 100, status="error", error=error_msg + ) + release_task_lock("refresh_m3u_account_groups", account_id) return error_msg, None else: # Here's the key change - use the success flag from fetch_m3u_lines lines, success = fetch_m3u_lines(account, use_cache) if not success: # If fetch failed, don't continue processing - release_task_lock('refresh_m3u_account_groups', account_id) + release_task_lock("refresh_m3u_account_groups", account_id) return f"Failed to fetch M3U data for account_id={account_id}.", None # Log basic file structure for debugging @@ -719,19 +950,25 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): extinf_count += 1 parsed = parse_extinf_line(line) if parsed: - group_title_attr = get_case_insensitive_attr(parsed["attributes"], "group-title", "") + group_title_attr = get_case_insensitive_attr( + parsed["attributes"], "group-title", "" + ) if group_title_attr: group_name = group_title_attr # Log new groups as they're discovered if group_name not in groups: - logger.debug(f"Found new group for M3U account {account_id}: '{group_name}'") + logger.debug( + f"Found new group for M3U account {account_id}: '{group_name}'" + ) groups[group_name] = {} extinf_data.append(parsed) else: # Log problematic EXTINF lines - logger.warning(f"Failed to parse EXTINF at line {line_index+1}: {line[:200]}") - problematic_lines.append((line_index+1, line[:200])) + logger.warning( + f"Failed to parse EXTINF at line {line_index+1}: {line[:200]}" + ) + problematic_lines.append((line_index + 1, line[:200])) elif extinf_data and line.startswith("http"): url_count += 1 @@ -741,49 +978,69 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False): # Periodically log progress for large files if valid_stream_count % 1000 == 0: - logger.debug(f"Processed {valid_stream_count} valid streams so far for M3U account: {account_id}") + logger.debug( + f"Processed {valid_stream_count} valid streams so far for M3U account: {account_id}" + ) # Log summary statistics - logger.info(f"M3U parsing complete - Lines: {line_count}, EXTINF: {extinf_count}, URLs: {url_count}, Valid streams: {valid_stream_count}") + logger.info( + f"M3U parsing complete - Lines: {line_count}, EXTINF: {extinf_count}, URLs: {url_count}, Valid streams: {valid_stream_count}" + ) if problematic_lines: - logger.warning(f"Found {len(problematic_lines)} problematic lines during parsing") - for i, (line_num, content) in enumerate(problematic_lines[:10]): # Log max 10 examples + logger.warning( + f"Found {len(problematic_lines)} problematic lines during parsing" + ) + for i, (line_num, content) in enumerate( + problematic_lines[:10] + ): # Log max 10 examples logger.warning(f"Problematic line #{i+1} at line {line_num}: {content}") if len(problematic_lines) > 10: - logger.warning(f"... and {len(problematic_lines) - 10} more problematic lines") + logger.warning( + f"... and {len(problematic_lines) - 10} more problematic lines" + ) # Log group statistics - logger.info(f"Found {len(groups)} groups in M3U file: {', '.join(list(groups.keys())[:20])}" + - ("..." if len(groups) > 20 else "")) + logger.info( + f"Found {len(groups)} groups in M3U file: {', '.join(list(groups.keys())[:20])}" + + ("..." if len(groups) > 20 else "") + ) # Cache processed data cache_path = os.path.join(m3u_dir, f"{account_id}.json") - with open(cache_path, 'w', encoding='utf-8') as f: - json.dump({ - "extinf_data": extinf_data, - "groups": groups, - }, f) + with open(cache_path, "w", encoding="utf-8") as f: + json.dump( + { + "extinf_data": extinf_data, + "groups": groups, + }, + f, + ) logger.debug(f"Cached parsed M3U data to {cache_path}") send_m3u_update(account_id, "processing_groups", 0) process_groups(account, groups) - release_task_lock('refresh_m3u_account_groups', account_id) - - + release_task_lock("refresh_m3u_account_groups", account_id) if not full_refresh: # Use update() instead of save() to avoid triggering signals M3UAccount.objects.filter(id=account_id).update( status=M3UAccount.Status.PENDING_SETUP, - last_message="M3U groups loaded. Please select groups or refresh M3U to complete setup." + last_message="M3U groups loaded. Please select groups or refresh M3U to complete setup.", + ) + send_m3u_update( + account_id, + "processing_groups", + 100, + status="pending_setup", + message="M3U groups loaded. Please select groups or refresh M3U to complete setup.", ) - send_m3u_update(account_id, "processing_groups", 100, status="pending_setup", message="M3U groups loaded. Please select groups or refresh M3U to complete setup.") return extinf_data, groups + def delete_m3u_refresh_task_by_id(account_id): """ Delete the periodic task associated with an M3U account ID. @@ -797,6 +1054,7 @@ def delete_m3u_refresh_task_by_id(account_id): # Look for task by name try: from django_celery_beat.models import PeriodicTask, IntervalSchedule + task = PeriodicTask.objects.get(name=task_name) logger.debug(f"Found task by name: {task.id} for M3UAccount {account_id}") except PeriodicTask.DoesNotExist: @@ -807,12 +1065,16 @@ def delete_m3u_refresh_task_by_id(account_id): if task: # Store interval info before deleting the task interval_id = None - if hasattr(task, 'interval') and task.interval: + if hasattr(task, "interval") and task.interval: interval_id = task.interval.id # Count how many TOTAL tasks use this interval (including this one) - tasks_with_same_interval = PeriodicTask.objects.filter(interval_id=interval_id).count() - logger.debug(f"Interval {interval_id} is used by {tasks_with_same_interval} tasks total") + tasks_with_same_interval = PeriodicTask.objects.filter( + interval_id=interval_id + ).count() + logger.debug( + f"Interval {interval_id} is used by {tasks_with_same_interval} tasks total" + ) # Delete the task first task_id = task.id @@ -824,20 +1086,28 @@ def delete_m3u_refresh_task_by_id(account_id): if interval_id and tasks_with_same_interval == 1: try: interval = IntervalSchedule.objects.get(id=interval_id) - logger.debug(f"Deleting interval schedule {interval_id} (not shared with other tasks)") + logger.debug( + f"Deleting interval schedule {interval_id} (not shared with other tasks)" + ) interval.delete() logger.debug(f"Successfully deleted interval {interval_id}") except IntervalSchedule.DoesNotExist: logger.warning(f"Interval {interval_id} no longer exists") elif interval_id: - logger.debug(f"Not deleting interval {interval_id} as it's shared with {tasks_with_same_interval-1} other tasks") + logger.debug( + f"Not deleting interval {interval_id} as it's shared with {tasks_with_same_interval-1} other tasks" + ) return True return False except Exception as e: - logger.error(f"Error deleting periodic task for M3UAccount {account_id}: {str(e)}", exc_info=True) + logger.error( + f"Error deleting periodic task for M3UAccount {account_id}: {str(e)}", + exc_info=True, + ) return False + @shared_task def sync_auto_channels(account_id, scan_start_time=None): """ @@ -845,7 +1115,13 @@ def sync_auto_channels(account_id, scan_start_time=None): Preserves existing channel UUIDs to maintain M3U link integrity. Called after M3U refresh completes successfully. """ - from apps.channels.models import Channel, ChannelGroup, ChannelGroupM3UAccount, Stream, ChannelStream + from apps.channels.models import ( + Channel, + ChannelGroup, + ChannelGroupM3UAccount, + Stream, + ChannelStream, + ) from apps.epg.models import EPGData from django.utils import timezone @@ -862,10 +1138,8 @@ def sync_auto_channels(account_id, scan_start_time=None): # Get groups with auto sync enabled for this account auto_sync_groups = ChannelGroupM3UAccount.objects.filter( - m3u_account=account, - enabled=True, - auto_channel_sync=True - ).select_related('channel_group') + m3u_account=account, enabled=True, auto_channel_sync=True + ).select_related("channel_group") channels_created = 0 channels_updated = 0 @@ -890,7 +1164,9 @@ def sync_auto_channels(account_id, scan_start_time=None): force_dummy_epg = group_custom_props.get("force_dummy_epg", False) override_group_id = group_custom_props.get("group_override") name_regex_pattern = group_custom_props.get("name_regex_pattern") - name_replace_pattern = group_custom_props.get("name_replace_pattern") + name_replace_pattern = group_custom_props.get( + "name_replace_pattern" + ) name_match_regex = group_custom_props.get("name_match_regex") channel_profile_ids = group_custom_props.get("channel_profile_ids") channel_sort_order = group_custom_props.get("channel_sort_order") @@ -908,17 +1184,23 @@ def sync_auto_channels(account_id, scan_start_time=None): if override_group_id: try: target_group = ChannelGroup.objects.get(id=override_group_id) - logger.info(f"Using override group '{target_group.name}' instead of '{channel_group.name}' for auto-created channels") + logger.info( + f"Using override group '{target_group.name}' instead of '{channel_group.name}' for auto-created channels" + ) except ChannelGroup.DoesNotExist: - logger.warning(f"Override group with ID {override_group_id} not found, using original group '{channel_group.name}'") + logger.warning( + f"Override group with ID {override_group_id} not found, using original group '{channel_group.name}'" + ) - logger.info(f"Processing auto sync for group: {channel_group.name} (start: {start_number})") + logger.info( + f"Processing auto sync for group: {channel_group.name} (start: {start_number})" + ) # Get all current streams in this group for this M3U account, filter out stale streams current_streams = Stream.objects.filter( m3u_account=account, channel_group=channel_group, - last_seen__gte=scan_start_time + last_seen__gte=scan_start_time, ) # --- FILTER STREAMS BY NAME MATCH REGEX IF SPECIFIED --- @@ -928,33 +1210,38 @@ def sync_auto_channels(account_id, scan_start_time=None): name__iregex=name_match_regex ) except re.error as e: - logger.warning(f"Invalid name_match_regex '{name_match_regex}' for group '{channel_group.name}': {e}. Skipping name filter.") + logger.warning( + f"Invalid name_match_regex '{name_match_regex}' for group '{channel_group.name}': {e}. Skipping name filter." + ) # --- APPLY CHANNEL SORT ORDER --- streams_is_list = False # Track if we converted to list - if channel_sort_order and channel_sort_order != '': - if channel_sort_order == 'name': + if channel_sort_order and channel_sort_order != "": + if channel_sort_order == "name": # Use natural sorting for names to handle numbers correctly current_streams = list(current_streams) - current_streams.sort(key=lambda stream: natural_sort_key(stream.name)) + current_streams.sort( + key=lambda stream: natural_sort_key(stream.name) + ) streams_is_list = True - elif channel_sort_order == 'tvg_id': - current_streams = current_streams.order_by('tvg_id') - elif channel_sort_order == 'updated_at': - current_streams = current_streams.order_by('updated_at') + elif channel_sort_order == "tvg_id": + current_streams = current_streams.order_by("tvg_id") + elif channel_sort_order == "updated_at": + current_streams = current_streams.order_by("updated_at") else: - logger.warning(f"Unknown channel_sort_order '{channel_sort_order}' for group '{channel_group.name}'. Using provider order.") - current_streams = current_streams.order_by('id') + logger.warning( + f"Unknown channel_sort_order '{channel_sort_order}' for group '{channel_group.name}'. Using provider order." + ) + current_streams = current_streams.order_by("id") else: - current_streams = current_streams.order_by('id') + current_streams = current_streams.order_by("id") # If channel_sort_order is empty or None, use provider order (no additional sorting) # Get existing auto-created channels for this account (regardless of current group) # We'll find them by their stream associations instead of just group location existing_channels = Channel.objects.filter( - auto_created=True, - auto_created_by=account - ).select_related('logo', 'epg_data') + auto_created=True, auto_created_by=account + ).select_related("logo", "epg_data") # Create mapping of existing channels by their associated stream # This approach finds channels even if they've been moved to different groups @@ -964,8 +1251,8 @@ def sync_auto_channels(account_id, scan_start_time=None): channel_streams = ChannelStream.objects.filter( channel=channel, stream__m3u_account=account, - stream__channel_group=channel_group # Match streams from the original group - ).select_related('stream') + stream__channel_group=channel_group, # Match streams from the original group + ).select_related("stream") # Map each of our M3U account's streams to this channel for channel_stream in channel_streams: @@ -976,7 +1263,11 @@ def sync_auto_channels(account_id, scan_start_time=None): processed_stream_ids = set() # Check if we have streams - handle both QuerySet and list cases - has_streams = len(current_streams) > 0 if streams_is_list else current_streams.exists() + has_streams = ( + len(current_streams) > 0 + if streams_is_list + else current_streams.exists() + ) if not has_streams: logger.debug(f"No streams found in group {channel_group.name}") @@ -984,20 +1275,31 @@ def sync_auto_channels(account_id, scan_start_time=None): channels_to_delete = [ch for ch in existing_channel_map.values()] if channels_to_delete: deleted_count = len(channels_to_delete) - Channel.objects.filter(id__in=[ch.id for ch in channels_to_delete]).delete() + Channel.objects.filter( + id__in=[ch.id for ch in channels_to_delete] + ).delete() channels_deleted += deleted_count - logger.debug(f"Deleted {deleted_count} auto channels (no streams remaining)") + logger.debug( + f"Deleted {deleted_count} auto channels (no streams remaining)" + ) continue # Prepare profiles to assign to new channels from apps.channels.models import ChannelProfile, ChannelProfileMembership - if channel_profile_ids and isinstance(channel_profile_ids, list) and len(channel_profile_ids) > 0: + + if ( + channel_profile_ids + and isinstance(channel_profile_ids, list) + and len(channel_profile_ids) > 0 + ): # Convert all to int (in case they're strings) try: profile_ids = [int(pid) for pid in channel_profile_ids] except Exception: profile_ids = [] - profiles_to_assign = list(ChannelProfile.objects.filter(id__in=profile_ids)) + profiles_to_assign = list( + ChannelProfile.objects.filter(id__in=profile_ids) + ) else: profiles_to_assign = list(ChannelProfile.objects.all()) @@ -1010,10 +1312,11 @@ def sync_auto_channels(account_id, scan_start_time=None): temp_channel_number = start_number # Get all channel numbers that are already in use by other channels (not auto-created by this account) - used_numbers = set(Channel.objects.exclude( - auto_created=True, - auto_created_by=account - ).values_list('channel_number', flat=True)) + used_numbers = set( + Channel.objects.exclude( + auto_created=True, auto_created_by=account + ).values_list("channel_number", flat=True) + ) for stream in current_streams: if stream.id in existing_channel_map: @@ -1030,7 +1333,9 @@ def sync_auto_channels(account_id, scan_start_time=None): if channel.channel_number != target_number: channel.channel_number = target_number channels_to_renumber.append(channel) - logger.debug(f"Will renumber channel '{channel.name}' to {target_number}") + logger.debug( + f"Will renumber channel '{channel.name}' to {target_number}" + ) temp_channel_number += 1.0 if temp_channel_number % 1 != 0: # Has decimal @@ -1038,8 +1343,10 @@ def sync_auto_channels(account_id, scan_start_time=None): # Bulk update channel numbers if any need renumbering if channels_to_renumber: - Channel.objects.bulk_update(channels_to_renumber, ['channel_number']) - logger.info(f"Renumbered {len(channels_to_renumber)} channels to maintain sort order") + Channel.objects.bulk_update(channels_to_renumber, ["channel_number"]) + logger.info( + f"Renumbered {len(channels_to_renumber)} channels to maintain sort order" + ) # Reset channel number counter for processing new channels current_channel_number = start_number @@ -1048,7 +1355,11 @@ def sync_auto_channels(account_id, scan_start_time=None): processed_stream_ids.add(stream.id) try: # Parse custom properties for additional info - stream_custom_props = json.loads(stream.custom_properties) if stream.custom_properties else {} + stream_custom_props = ( + json.loads(stream.custom_properties) + if stream.custom_properties + else {} + ) tvc_guide_stationid = stream_custom_props.get("tvc-guide-stationid") # --- REGEX FIND/REPLACE LOGIC --- @@ -1056,11 +1367,19 @@ def sync_auto_channels(account_id, scan_start_time=None): new_name = original_name if name_regex_pattern is not None: # If replace is None, treat as empty string (remove match) - replace = name_replace_pattern if name_replace_pattern is not None else '' + replace = ( + name_replace_pattern + if name_replace_pattern is not None + else "" + ) try: - new_name = re.sub(name_regex_pattern, replace, original_name) + new_name = re.sub( + name_regex_pattern, replace, original_name + ) except re.error as e: - logger.warning(f"Regex error for group '{channel_group.name}': {e}. Using original name.") + logger.warning( + f"Regex error for group '{channel_group.name}': {e}. Using original name." + ) new_name = original_name # Check if we already have a channel for this stream @@ -1087,15 +1406,20 @@ def sync_auto_channels(account_id, scan_start_time=None): if existing_channel.channel_group != target_group: existing_channel.channel_group = target_group channel_updated = True - logger.info(f"Moved auto channel '{existing_channel.name}' from '{existing_channel.channel_group.name if existing_channel.channel_group else 'None'}' to '{target_group.name}'") + logger.info( + f"Moved auto channel '{existing_channel.name}' from '{existing_channel.channel_group.name if existing_channel.channel_group else 'None'}' to '{target_group.name}'" + ) # Handle logo updates current_logo = None if stream.logo_url: from apps.channels.models import Logo + current_logo, _ = Logo.objects.get_or_create( url=stream.logo_url, - defaults={"name": stream.name or stream.tvg_id or "Unknown"} + defaults={ + "name": stream.name or stream.tvg_id or "Unknown" + }, ) if existing_channel.logo != current_logo: @@ -1105,7 +1429,9 @@ def sync_auto_channels(account_id, scan_start_time=None): # Handle EPG data updates current_epg_data = None if stream.tvg_id and not force_dummy_epg: - current_epg_data = EPGData.objects.filter(tvg_id=stream.tvg_id).first() + current_epg_data = EPGData.objects.filter( + tvg_id=stream.tvg_id + ).first() if existing_channel.epg_data != current_epg_data: existing_channel.epg_data = current_epg_data @@ -1114,17 +1440,20 @@ def sync_auto_channels(account_id, scan_start_time=None): if channel_updated: existing_channel.save() channels_updated += 1 - logger.debug(f"Updated auto channel: {existing_channel.channel_number} - {existing_channel.name}") + logger.debug( + f"Updated auto channel: {existing_channel.channel_number} - {existing_channel.name}" + ) # Update channel profile memberships for existing channels current_memberships = set( ChannelProfileMembership.objects.filter( - channel=existing_channel, - enabled=True - ).values_list('channel_profile_id', flat=True) + channel=existing_channel, enabled=True + ).values_list("channel_profile_id", flat=True) ) - target_profile_ids = set(profile.id for profile in profiles_to_assign) + target_profile_ids = set( + profile.id for profile in profiles_to_assign + ) # Only update if memberships have changed if current_memberships != target_profile_ids: @@ -1135,16 +1464,20 @@ def sync_auto_channels(account_id, scan_start_time=None): # Enable/create memberships for target profiles for profile in profiles_to_assign: - membership, created = ChannelProfileMembership.objects.get_or_create( - channel_profile=profile, - channel=existing_channel, - defaults={'enabled': True} + membership, created = ( + ChannelProfileMembership.objects.get_or_create( + channel_profile=profile, + channel=existing_channel, + defaults={"enabled": True}, + ) ) if not created and not membership.enabled: membership.enabled = True membership.save() - logger.debug(f"Updated profile memberships for auto channel: {existing_channel.name}") + logger.debug( + f"Updated profile memberships for auto channel: {existing_channel.name}" + ) else: # Create new channel @@ -1164,19 +1497,19 @@ def sync_auto_channels(account_id, scan_start_time=None): channel_group=target_group, user_level=0, auto_created=True, - auto_created_by=account + auto_created_by=account, ) # Associate the stream with the channel ChannelStream.objects.create( - channel=channel, - stream=stream, - order=0 + channel=channel, stream=stream, order=0 ) # Assign to correct profiles memberships = [ - ChannelProfileMembership(channel_profile=profile, channel=channel, enabled=True) + ChannelProfileMembership( + channel_profile=profile, channel=channel, enabled=True + ) for profile in profiles_to_assign ] if memberships: @@ -1184,26 +1517,33 @@ def sync_auto_channels(account_id, scan_start_time=None): # Try to match EPG data if stream.tvg_id and not force_dummy_epg: - epg_data = EPGData.objects.filter(tvg_id=stream.tvg_id).first() + epg_data = EPGData.objects.filter( + tvg_id=stream.tvg_id + ).first() if epg_data: channel.epg_data = epg_data - channel.save(update_fields=['epg_data']) + channel.save(update_fields=["epg_data"]) elif stream.tvg_id and force_dummy_epg: channel.epg_data = None - channel.save(update_fields=['epg_data']) + channel.save(update_fields=["epg_data"]) # Handle logo if stream.logo_url: from apps.channels.models import Logo + logo, _ = Logo.objects.get_or_create( url=stream.logo_url, - defaults={"name": stream.name or stream.tvg_id or "Unknown"} + defaults={ + "name": stream.name or stream.tvg_id or "Unknown" + }, ) channel.logo = logo - channel.save(update_fields=['logo']) + channel.save(update_fields=["logo"]) channels_created += 1 - logger.debug(f"Created auto channel: {channel.channel_number} - {channel.name}") + logger.debug( + f"Created auto channel: {channel.channel_number} - {channel.name}" + ) # Increment channel number for next iteration current_channel_number += 1.0 @@ -1211,7 +1551,9 @@ def sync_auto_channels(account_id, scan_start_time=None): current_channel_number = int(current_channel_number) + 1.0 except Exception as e: - logger.error(f"Error processing auto channel for stream {stream.name}: {str(e)}") + logger.error( + f"Error processing auto channel for stream {stream.name}: {str(e)}" + ) continue # Delete channels for streams that no longer exist @@ -1222,21 +1564,28 @@ def sync_auto_channels(account_id, scan_start_time=None): if channels_to_delete: deleted_count = len(channels_to_delete) - Channel.objects.filter(id__in=[ch.id for ch in channels_to_delete]).delete() + Channel.objects.filter( + id__in=[ch.id for ch in channels_to_delete] + ).delete() channels_deleted += deleted_count - logger.debug(f"Deleted {deleted_count} auto channels for removed streams") + logger.debug( + f"Deleted {deleted_count} auto channels for removed streams" + ) - logger.info(f"Auto channel sync complete for account {account.name}: {channels_created} created, {channels_updated} updated, {channels_deleted} deleted") + logger.info( + f"Auto channel sync complete for account {account.name}: {channels_created} created, {channels_updated} updated, {channels_deleted} deleted" + ) return f"Auto sync: {channels_created} channels created, {channels_updated} updated, {channels_deleted} deleted" except Exception as e: logger.error(f"Error in auto channel sync for account {account_id}: {str(e)}") return f"Auto sync error: {str(e)}" + @shared_task def refresh_single_m3u_account(account_id): """Splits M3U processing into chunks and dispatches them as parallel tasks.""" - if not acquire_task_lock('refresh_single_m3u_account', account_id): + if not acquire_task_lock("refresh_single_m3u_account", account_id): return f"Task already running for account_id={account_id}." # Record start time @@ -1250,25 +1599,27 @@ def refresh_single_m3u_account(account_id): account = M3UAccount.objects.get(id=account_id, is_active=True) if not account.is_active: logger.debug(f"Account {account_id} is not active, skipping.") - release_task_lock('refresh_single_m3u_account', account_id) + release_task_lock("refresh_single_m3u_account", account_id) return # Set status to fetching account.status = M3UAccount.Status.FETCHING - account.save(update_fields=['status']) - - filters = list(account.filters.all()) + account.save(update_fields=["status"]) except M3UAccount.DoesNotExist: # The M3U account doesn't exist, so delete the periodic task if it exists - logger.warning(f"M3U account with ID {account_id} not found, but task was triggered. Cleaning up orphaned task.") + logger.warning( + f"M3U account with ID {account_id} not found, but task was triggered. Cleaning up orphaned task." + ) # Call the helper function to delete the task if delete_m3u_refresh_task_by_id(account_id): - logger.info(f"Successfully cleaned up orphaned task for M3U account {account_id}") + logger.info( + f"Successfully cleaned up orphaned task for M3U account {account_id}" + ) else: logger.debug(f"No orphaned task found for M3U account {account_id}") - release_task_lock('refresh_single_m3u_account', account_id) + release_task_lock("refresh_single_m3u_account", account_id) return f"M3UAccount with ID={account_id} not found or inactive, task cleaned up" # Fetch M3U lines and handle potential issues @@ -1278,14 +1629,16 @@ def refresh_single_m3u_account(account_id): cache_path = os.path.join(m3u_dir, f"{account_id}.json") if os.path.exists(cache_path): try: - with open(cache_path, 'r') as file: + with open(cache_path, "r") as file: data = json.load(file) - extinf_data = data['extinf_data'] - groups = data['groups'] + extinf_data = data["extinf_data"] + groups = data["groups"] except json.JSONDecodeError as e: # Handle corrupted JSON file - logger.error(f"Error parsing cached M3U data for account {account_id}: {str(e)}") + logger.error( + f"Error parsing cached M3U data for account {account_id}: {str(e)}" + ) # Backup the corrupted file for potential analysis backup_path = f"{cache_path}.corrupted" @@ -1293,7 +1646,9 @@ def refresh_single_m3u_account(account_id): os.rename(cache_path, backup_path) logger.info(f"Renamed corrupted cache file to {backup_path}") except OSError as rename_err: - logger.warning(f"Failed to rename corrupted cache file: {str(rename_err)}") + logger.warning( + f"Failed to rename corrupted cache file: {str(rename_err)}" + ) # Reset the data to empty structures extinf_data = [] @@ -1311,8 +1666,10 @@ def refresh_single_m3u_account(account_id): # Check for completely empty result or missing groups if not result or result[1] is None: - logger.error(f"Failed to refresh M3U groups for account {account_id}: {result}") - release_task_lock('refresh_single_m3u_account', account_id) + logger.error( + f"Failed to refresh M3U groups for account {account_id}: {result}" + ) + release_task_lock("refresh_single_m3u_account", account_id) return "Failed to update m3u account - download failed or other error" extinf_data, groups = result @@ -1329,15 +1686,23 @@ def refresh_single_m3u_account(account_id): logger.error(f"No streams found for non-XC account {account_id}") account.status = M3UAccount.Status.ERROR account.last_message = "No streams found in M3U source" - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "parsing", 100, status="error", error="No streams found") + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account_id, "parsing", 100, status="error", error="No streams found" + ) except Exception as e: logger.error(f"Exception in refresh_m3u_groups: {str(e)}", exc_info=True) account.status = M3UAccount.Status.ERROR account.last_message = f"Error refreshing M3U groups: {str(e)}" - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "parsing", 100, status="error", error=f"Error refreshing M3U groups: {str(e)}") - release_task_lock('refresh_single_m3u_account', account_id) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account_id, + "parsing", + 100, + status="error", + error=f"Error refreshing M3U groups: {str(e)}", + ) + release_task_lock("refresh_single_m3u_account", account_id) return "Failed to update m3u account" # Only proceed with parsing if we actually have data and no errors were encountered @@ -1352,37 +1717,53 @@ def refresh_single_m3u_account(account_id): logger.error(f"No data to process for account {account_id}") account.status = M3UAccount.Status.ERROR account.last_message = "No data available for processing" - account.save(update_fields=['status', 'last_message']) - send_m3u_update(account_id, "parsing", 100, status="error", error="No data available for processing") - release_task_lock('refresh_single_m3u_account', account_id) + account.save(update_fields=["status", "last_message"]) + send_m3u_update( + account_id, + "parsing", + 100, + status="error", + error="No data available for processing", + ) + release_task_lock("refresh_single_m3u_account", account_id) return "Failed to update m3u account, no data available" hash_keys = CoreSettings.get_m3u_hash_key().split(",") - existing_groups = {group.name: group.id for group in ChannelGroup.objects.filter( - m3u_account__m3u_account=account, # Filter by the M3UAccount - m3u_account__enabled=True # Filter by the enabled flag in the join table - )} + existing_groups = { + group.name: group.id + for group in ChannelGroup.objects.filter( + m3u_account__m3u_account=account, # Filter by the M3UAccount + m3u_account__enabled=True, # Filter by the enabled flag in the join table + ) + } try: # Set status to parsing account.status = M3UAccount.Status.PARSING - account.save(update_fields=['status']) + account.save(update_fields=["status"]) if account.account_type == M3UAccount.Types.STADNARD: - logger.debug(f"Processing Standard account ({account_id}) with groups: {existing_groups}") + logger.debug( + f"Processing Standard account ({account_id}) with groups: {existing_groups}" + ) # Break into batches and process in parallel - batches = [extinf_data[i:i + BATCH_SIZE] for i in range(0, len(extinf_data), BATCH_SIZE)] - task_group = group(process_m3u_batch.s(account_id, batch, existing_groups, hash_keys) for batch in batches) + batches = [ + extinf_data[i : i + BATCH_SIZE] + for i in range(0, len(extinf_data), BATCH_SIZE) + ] + task_group = group( + process_m3u_batch.s(account_id, batch, existing_groups, hash_keys) + for batch in batches + ) else: # For XC accounts, get the groups with their custom properties containing xc_id logger.debug(f"Processing XC account with groups: {existing_groups}") # Get the ChannelGroupM3UAccount entries with their custom_properties channel_group_relationships = ChannelGroupM3UAccount.objects.filter( - m3u_account=account, - enabled=True - ).select_related('channel_group') + m3u_account=account, enabled=True + ).select_related("channel_group") filtered_groups = {} for rel in channel_group_relationships: @@ -1391,34 +1772,51 @@ def refresh_single_m3u_account(account_id): # Load the custom properties with the xc_id try: - custom_props = json.loads(rel.custom_properties) if rel.custom_properties else {} - if 'xc_id' in custom_props: + custom_props = ( + json.loads(rel.custom_properties) + if rel.custom_properties + else {} + ) + if "xc_id" in custom_props: filtered_groups[group_name] = { - 'xc_id': custom_props['xc_id'], - 'channel_group_id': group_id + "xc_id": custom_props["xc_id"], + "channel_group_id": group_id, } - logger.debug(f"Added group {group_name} with xc_id {custom_props['xc_id']}") + logger.debug( + f"Added group {group_name} with xc_id {custom_props['xc_id']}" + ) else: - logger.warning(f"No xc_id found in custom properties for group {group_name}") + logger.warning( + f"No xc_id found in custom properties for group {group_name}" + ) except (json.JSONDecodeError, KeyError) as e: - logger.error(f"Error parsing custom properties for group {group_name}: {str(e)}") + logger.error( + f"Error parsing custom properties for group {group_name}: {str(e)}" + ) - logger.info(f"Filtered {len(filtered_groups)} groups for processing: {filtered_groups}") + logger.info( + f"Filtered {len(filtered_groups)} groups for processing: {filtered_groups}" + ) # Batch the groups filtered_groups_list = list(filtered_groups.items()) batches = [ - dict(filtered_groups_list[i:i + 2]) + dict(filtered_groups_list[i : i + 2]) for i in range(0, len(filtered_groups_list), 2) ] logger.info(f"Created {len(batches)} batches for XC processing") - task_group = group(process_xc_category.s(account_id, batch, existing_groups, hash_keys) for batch in batches) + task_group = group( + process_xc_category.s(account_id, batch, existing_groups, hash_keys) + for batch in batches + ) total_batches = len(batches) completed_batches = 0 streams_processed = 0 # Track total streams processed - logger.debug(f"Dispatched {len(batches)} parallel tasks for account_id={account_id}.") + logger.debug( + f"Dispatched {len(batches)} parallel tasks for account_id={account_id}." + ) # result = task_group.apply_async() result = task_group.apply_async() @@ -1427,7 +1825,9 @@ def refresh_single_m3u_account(account_id): completed_task_ids = set() while completed_batches < total_batches: for async_result in result: - if async_result.ready() and async_result.id not in completed_task_ids: # If the task has completed and we haven't counted it + if ( + async_result.ready() and async_result.id not in completed_task_ids + ): # If the task has completed and we haven't counted it task_result = async_result.result # The result of the task logger.debug(f"Task completed with result: {task_result}") @@ -1447,7 +1847,9 @@ def refresh_single_m3u_account(account_id): pass completed_batches += 1 - completed_task_ids.add(async_result.id) # Mark this task as processed + completed_task_ids.add( + async_result.id + ) # Mark this task as processed # Calculate progress progress = int((completed_batches / total_batches) * 100) @@ -1471,7 +1873,7 @@ def refresh_single_m3u_account(account_id): progress, elapsed_time=current_elapsed, time_remaining=time_remaining, - streams_processed=streams_processed + streams_processed=streams_processed, ) # Optionally remove completed task from the group to prevent processing it again @@ -1480,9 +1882,13 @@ def refresh_single_m3u_account(account_id): logger.trace(f"Task is still running.") # Ensure all database transactions are committed before cleanup - logger.info(f"All {total_batches} tasks completed, ensuring DB transactions are committed before cleanup") + logger.info( + f"All {total_batches} tasks completed, ensuring DB transactions are committed before cleanup" + ) # Force a simple DB query to ensure connection sync - Stream.objects.filter(id=-1).exists() # This will never find anything but ensures DB sync + Stream.objects.filter( + id=-1 + ).exists() # This will never find anything but ensures DB sync # Now run cleanup streams_deleted = cleanup_streams(account_id, refresh_start_timestamp) @@ -1490,12 +1896,18 @@ def refresh_single_m3u_account(account_id): # Run auto channel sync after successful refresh auto_sync_message = "" try: - sync_result = sync_auto_channels(account_id, scan_start_time=str(refresh_start_timestamp)) - logger.info(f"Auto channel sync result for account {account_id}: {sync_result}") + sync_result = sync_auto_channels( + account_id, scan_start_time=str(refresh_start_timestamp) + ) + logger.info( + f"Auto channel sync result for account {account_id}: {sync_result}" + ) if sync_result and "created" in sync_result: auto_sync_message = f" {sync_result}." except Exception as e: - logger.error(f"Error running auto channel sync for account {account_id}: {str(e)}") + logger.error( + f"Error running auto channel sync for account {account_id}: {str(e)}" + ) # Calculate elapsed time elapsed_time = time.time() - start_time @@ -1508,7 +1920,7 @@ def refresh_single_m3u_account(account_id): f"Total processed: {streams_processed}.{auto_sync_message}" ) account.updated_at = timezone.now() - account.save(update_fields=['status', 'last_message', 'updated_at']) + account.save(update_fields=["status", "last_message", "updated_at"]) # Send final update with complete metrics and explicitly include success status send_m3u_update( @@ -1522,21 +1934,22 @@ def refresh_single_m3u_account(account_id): streams_created=streams_created, streams_updated=streams_updated, streams_deleted=streams_deleted, - message=account.last_message + message=account.last_message, ) except Exception as e: logger.error(f"Error processing M3U for account {account_id}: {str(e)}") account.status = M3UAccount.Status.ERROR account.last_message = f"Error processing M3U: {str(e)}" - account.save(update_fields=['status', 'last_message']) + account.save(update_fields=["status", "last_message"]) raise # Re-raise the exception for Celery to handle - release_task_lock('refresh_single_m3u_account', account_id) + release_task_lock("refresh_single_m3u_account", account_id) # Aggressive garbage collection del existing_groups, extinf_data, groups, batches from core.utils import cleanup_memory + cleanup_memory(log_usage=True, force_collection=True) # Clean up cache file since we've fully processed it @@ -1545,6 +1958,7 @@ def refresh_single_m3u_account(account_id): return f"Dispatched jobs complete." + def send_m3u_update(account_id, action, progress, **kwargs): # Start with the base data dictionary data = { @@ -1567,7 +1981,7 @@ def send_m3u_update(account_id, action, progress, **kwargs): # Add the additional key-value pairs from kwargs data.update(kwargs) - send_websocket_update('updates', 'update', data, collect_garbage=False) + send_websocket_update("updates", "update", data, collect_garbage=False) # Explicitly clear data reference to help garbage collection data = None diff --git a/frontend/src/api.js b/frontend/src/api.js index ddaccbc7..a6998bc2 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -256,7 +256,7 @@ export default class API { hasChannels: false, hasM3UAccounts: false, canEdit: true, - canDelete: true + canDelete: true, }; useChannelsStore.getState().addChannelGroup(processedGroup); // Refresh channel groups to update the UI @@ -736,10 +736,13 @@ export default class API { static async updateM3UGroupSettings(playlistId, groupSettings) { try { - const response = await request(`${host}/api/m3u/accounts/${playlistId}/group-settings/`, { - method: 'PATCH', - body: { group_settings: groupSettings }, - }); + const response = await request( + `${host}/api/m3u/accounts/${playlistId}/group-settings/`, + { + method: 'PATCH', + body: { group_settings: groupSettings }, + } + ); // Fetch the updated playlist and update the store const updatedPlaylist = await API.getPlaylist(playlistId); usePlaylistsStore.getState().updatePlaylist(updatedPlaylist); @@ -1110,6 +1113,48 @@ export default class API { } } + static async addM3UFilter(accountId, values) { + try { + const response = await request( + `${host}/api/m3u/accounts/${accountId}/filters/`, + { + method: 'POST', + body: values, + } + ); + + return response; + } catch (e) { + errorNotification(`Failed to add profile to account ${accountId}`, e); + } + } + + static async deleteM3UFilter(accountId, id) { + try { + await request(`${host}/api/m3u/accounts/${accountId}/filters/${id}/`, { + method: 'DELETE', + }); + } catch (e) { + errorNotification(`Failed to delete profile for account ${accountId}`, e); + } + } + + static async updateM3UFilter(accountId, filterId, values) { + const { id, ...payload } = values; + + try { + await request( + `${host}/api/m3u/accounts/${accountId}/filters/${filterId}/`, + { + method: 'PUT', + body: payload, + } + ); + } catch (e) { + errorNotification(`Failed to update profile for account ${accountId}`, e); + } + } + static async getSettings() { try { const response = await request(`${host}/api/core/settings/`); @@ -1230,7 +1275,9 @@ export default class API { static async getLogos(params = {}) { try { const queryParams = new URLSearchParams(params); - const response = await request(`${host}/api/channels/logos/?${queryParams.toString()}`); + const response = await request( + `${host}/api/channels/logos/?${queryParams.toString()}` + ); return response; } catch (e) { @@ -1369,7 +1416,7 @@ export default class API { }); // Remove multiple logos from store - ids.forEach(id => { + ids.forEach((id) => { useChannelsStore.getState().removeLogo(id); }); diff --git a/frontend/src/components/forms/M3U.jsx b/frontend/src/components/forms/M3U.jsx index 0e4d5643..3d55d31b 100644 --- a/frontend/src/components/forms/M3U.jsx +++ b/frontend/src/components/forms/M3U.jsx @@ -27,6 +27,7 @@ import usePlaylistsStore from '../../store/playlists'; import { notifications } from '@mantine/notifications'; import { isNotEmpty, useForm } from '@mantine/form'; import useEPGsStore from '../../store/epgs'; +import M3UFilters from './M3UFilters'; const M3U = ({ m3uAccount = null, @@ -45,6 +46,7 @@ const M3U = ({ const [file, setFile] = useState(null); const [profileModalOpen, setProfileModalOpen] = useState(false); const [groupFilterModalOpen, setGroupFilterModalOpen] = useState(false); + const [filterModalOpen, setFilterModalOpen] = useState(false); const [loadingText, setLoadingText] = useState(''); const [showCredentialFields, setShowCredentialFields] = useState(false); @@ -85,7 +87,11 @@ const M3U = ({ account_type: m3uAccount.account_type, username: m3uAccount.username ?? '', password: '', - stale_stream_days: m3uAccount.stale_stream_days !== undefined && m3uAccount.stale_stream_days !== null ? m3uAccount.stale_stream_days : 7, + stale_stream_days: + m3uAccount.stale_stream_days !== undefined && + m3uAccount.stale_stream_days !== null + ? m3uAccount.stale_stream_days + : 7, }); if (m3uAccount.account_type == 'XC') { @@ -145,7 +151,8 @@ const M3U = ({ if (values.account_type != 'XC') { notifications.show({ title: 'Fetching M3U Groups', - message: 'Configure group filters and auto sync settings once complete.', + message: + 'Configure group filters and auto sync settings once complete.', }); // Don't prompt for group filters, but keeping this here @@ -177,7 +184,10 @@ const M3U = ({ const closeGroupFilter = () => { setGroupFilterModalOpen(false); - close(); + }; + + const closeFilter = () => { + setFilterModalOpen(false); }; useEffect(() => { @@ -224,7 +234,12 @@ const M3U = ({ id="account_type" name="account_type" label="Account Type" - description={<>Standard for direct M3U URLs,
    Xtream Codes for panel-based services} + description={ + <> + Standard for direct M3U URLs,
    + Xtream Codes for panel-based services + + } data={[ { value: 'STD', @@ -316,8 +331,13 @@ const M3U = ({ How often to automatically refresh M3U data
    - (0 to disable automatic refreshes)} + description={ + <> + How often to automatically refresh M3U data +
    + (0 to disable automatic refreshes) + + } {...form.getInputProps('refresh_interval')} key={form.key('refresh_interval')} /> @@ -342,6 +362,13 @@ const M3U = ({ {playlist && ( <> + + + + + {/* Description */} + {displayVOD.description && ( + + Description + + {displayVOD.description} + + + )} + + {/* YouTube trailer if available */} + {displayVOD.youtube_trailer && ( + + Trailer + + + )} + +
    + ); +}; + const MIN_CARD_WIDTH = 260; const MAX_CARD_WIDTH = 320; @@ -316,7 +551,10 @@ const VODsPage = () => { const showVideo = useVideoStore((s) => s.showVideo); const [selectedSeries, setSelectedSeries] = useState(null); + const [selectedVOD, setSelectedVOD] = useState(null); const [seriesModalOpened, { open: openSeriesModal, close: closeSeriesModal }] = useDisclosure(false); + const [vodModalOpened, { open: openVODModal, close: closeVODModal }] = useDisclosure(false); + const [initialLoad, setInitialLoad] = useState(true); const columns = useCardColumns(); useEffect(() => { @@ -325,9 +563,9 @@ const VODsPage = () => { useEffect(() => { if (filters.type === 'series') { - fetchSeries(); + fetchSeries().finally(() => setInitialLoad(false)); } else { - fetchVODs(); + fetchVODs().finally(() => setInitialLoad(false)); } }, [filters, currentPage, fetchVODs, fetchSeries]); @@ -339,7 +577,12 @@ const VODsPage = () => { } else { streamUrl = `${window.location.origin}${vod.stream_url}`; } - showVideo(streamUrl, 'vod'); // Specify VOD content type + showVideo(streamUrl, 'vod', vod); + }; + + const handleVODCardClick = (vod) => { + setSelectedVOD(vod); + openVODModal(); }; const handleSeriesClick = (series) => { @@ -395,7 +638,7 @@ const VODsPage = () => { {/* Content */} - {loading ? ( + {initialLoad ? ( @@ -424,7 +667,7 @@ const VODsPage = () => { key={vod.id} style={{ minWidth: MIN_CARD_WIDTH, maxWidth: MAX_CARD_WIDTH, margin: '0 auto' }} > - + ))} @@ -450,6 +693,13 @@ const VODsPage = () => { opened={seriesModalOpened} onClose={closeSeriesModal} /> + + {/* VOD Details Modal */} + ); }; diff --git a/frontend/src/store/useVODStore.jsx b/frontend/src/store/useVODStore.jsx index cb79701b..249182a8 100644 --- a/frontend/src/store/useVODStore.jsx +++ b/frontend/src/store/useVODStore.jsx @@ -28,8 +28,8 @@ const useVODStore = create((set, get) => ({ })), fetchVODs: async () => { - set({ loading: true, error: null }); try { + set({ loading: true, error: null }); const state = get(); const params = new URLSearchParams(); @@ -126,6 +126,96 @@ const useVODStore = create((set, get) => ({ } }, + fetchVODDetails: async (vodId) => { + set({ loading: true, error: null }); + try { + const response = await api.getVODInfo(vodId); + + // Transform the response data to match our expected format + const vodDetails = { + id: response.id || vodId, + name: response.name || '', + description: response.description || '', + year: response.year || null, + genre: response.genre || '', + rating: response.rating || '', + duration: response.duration || null, + stream_url: response.stream_url || '', + logo: response.logo || null, + type: 'movie', + director: response.director || '', + actors: response.actors || '', + country: response.country || '', + tmdb_id: response.tmdb_id || '', + youtube_trailer: response.youtube_trailer || '', + }; + + set((state) => ({ + vods: { + ...state.vods, + [vodDetails.id]: vodDetails, + }, + loading: false, + })); + + return vodDetails; + } catch (error) { + console.error('Failed to fetch VOD details:', error); + set({ error: 'Failed to load VOD details.', loading: false }); + throw error; + } + }, + + fetchVODDetailsFromProvider: async (vodId) => { + set({ loading: true, error: null }); + try { + const response = await api.getVODInfoFromProvider(vodId); + + // Transform the response data to match our expected format + const vodDetails = { + id: response.id || vodId, + name: response.name || '', + description: response.description || response.plot || '', + year: response.year || null, + genre: response.genre || '', + rating: response.rating || '', + duration: response.duration || null, + stream_url: response.stream_url || '', + logo: response.logo || response.cover || null, + type: 'movie', + director: response.director || '', + actors: response.actors || response.cast || '', + country: response.country || '', + tmdb_id: response.tmdb_id || '', + youtube_trailer: response.youtube_trailer || '', + // Additional provider fields + backdrop_path: response.backdrop_path || [], + release_date: response.release_date || response.releasedate || '', + movie_image: response.movie_image || null, + o_name: response.o_name || '', + age: response.age || '', + episode_run_time: response.episode_run_time || null, + bitrate: response.bitrate || 0, + video: response.video || {}, + audio: response.audio || {}, + }; + + set((state) => ({ + vods: { + ...state.vods, + [vodDetails.id]: vodDetails, + }, + loading: false, + })); + + return vodDetails; + } catch (error) { + console.error('Failed to fetch VOD details from provider:', error); + set({ error: 'Failed to load VOD details from provider.', loading: false }); + throw error; + } + }, + fetchCategories: async () => { try { const response = await api.getVODCategories(); diff --git a/frontend/src/store/useVideoStore.jsx b/frontend/src/store/useVideoStore.jsx index 4aa721ed..1ac21542 100644 --- a/frontend/src/store/useVideoStore.jsx +++ b/frontend/src/store/useVideoStore.jsx @@ -8,12 +8,14 @@ const useVideoStore = create((set) => ({ isVisible: false, streamUrl: null, contentType: 'live', // 'live' for MPEG-TS streams, 'vod' for MP4/MKV files + metadata: null, // Store additional metadata for VOD content - showVideo: (url, type = 'live') => + showVideo: (url, type = 'live', metadata = null) => set({ isVisible: true, streamUrl: url, contentType: type, + metadata: metadata, }), hideVideo: () => @@ -21,6 +23,7 @@ const useVideoStore = create((set) => ({ isVisible: false, streamUrl: null, contentType: 'live', + metadata: null, }), })); From 10ab3e4bd811b0874a8c61954a924ce669b38559 Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 4 Aug 2025 17:28:20 -0500 Subject: [PATCH 0707/1435] Fix movie link not building correctly for web player. --- frontend/src/pages/VODs.jsx | 17 +++-------------- 1 file changed, 3 insertions(+), 14 deletions(-) diff --git a/frontend/src/pages/VODs.jsx b/frontend/src/pages/VODs.jsx index c7c0d296..7138b940 100644 --- a/frontend/src/pages/VODs.jsx +++ b/frontend/src/pages/VODs.jsx @@ -314,11 +314,11 @@ const VODModal = ({ vod, opened, onClose }) => { const vodToPlay = detailedVOD || vod; if (!vodToPlay) return; - let streamUrl = vodToPlay.stream_url; + let streamUrl = `/proxy/vod/movie/${vod.uuid}`; if (env_mode === 'dev') { - streamUrl = `${window.location.protocol}//${window.location.hostname}:5656${vodToPlay.stream_url}`; + streamUrl = `${window.location.protocol}//${window.location.hostname}:5656${streamUrl}`; } else { - streamUrl = `${window.location.origin}${vodToPlay.stream_url}`; + streamUrl = `${window.location.origin}${streamUrl}`; } showVideo(streamUrl, 'vod', vodToPlay); }; @@ -569,17 +569,6 @@ const VODsPage = () => { } }, [filters, currentPage, fetchVODs, fetchSeries]); - const env_mode = useSettingsStore((s) => s.environment.env_mode); - const handlePlayVOD = (vod) => { - let streamUrl = vod.stream_url; - if (env_mode === 'dev') { - streamUrl = `${window.location.protocol}//${window.location.hostname}:5656${vod.stream_url}`; - } else { - streamUrl = `${window.location.origin}${vod.stream_url}`; - } - showVideo(streamUrl, 'vod', vod); - }; - const handleVODCardClick = (vod) => { setSelectedVOD(vod); openVODModal(); From d917a3a915025c63edfb54bb6b742e9d39c7916e Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 4 Aug 2025 18:21:18 -0500 Subject: [PATCH 0708/1435] Rearranged data. --- frontend/src/pages/VODs.jsx | 152 ++++++++++++++++++++--------- frontend/src/store/useVODStore.jsx | 3 +- 2 files changed, 106 insertions(+), 49 deletions(-) diff --git a/frontend/src/pages/VODs.jsx b/frontend/src/pages/VODs.jsx index 7138b940..fb877aa7 100644 --- a/frontend/src/pages/VODs.jsx +++ b/frontend/src/pages/VODs.jsx @@ -345,7 +345,7 @@ const VODModal = ({ vod, opened, onClose }) => { > {loadingDetails && ( - + Loading additional details... @@ -443,67 +443,123 @@ const VODModal = ({ vod, opened, onClose }) => {
    )} - {/* Technical info */} - {(displayVOD.bitrate || displayVOD.video || displayVOD.audio) && ( - - Technical Details: - {displayVOD.bitrate && displayVOD.bitrate > 0 && ( - - Bitrate: {displayVOD.bitrate} kbps - - )} - {displayVOD.video && Object.keys(displayVOD.video).length > 0 && ( - - Video: {JSON.stringify(displayVOD.video)} - - )} - {displayVOD.audio && Object.keys(displayVOD.audio).length > 0 && ( - - Audio: {JSON.stringify(displayVOD.audio)} - - )} - + {/* Description */} + {displayVOD.description && ( + + Description + + {displayVOD.description} + + )} + {/* Watch Trailer button at top */} + {displayVOD.youtube_trailer && ( + + )} + {/* Removed Play Movie button from here */} + + + {/* Provider Information & Play Button Row */} + {(vod?.m3u_account || true) && ( + + {vod?.m3u_account && ( + + IPTV Provider + + + {vod.m3u_account.name} + + {vod.m3u_account.account_type && ( + + {vod.m3u_account.account_type === 'XC' ? 'Xtream Codes' : 'Standard M3U'} + + )} + + + )} + + )} + {/* Technical Details */} + {(displayVOD.bitrate || displayVOD.video || displayVOD.audio) && ( + + Technical Details: + {displayVOD.bitrate && displayVOD.bitrate > 0 && ( + + Bitrate: {displayVOD.bitrate} kbps + + )} + {displayVOD.video && Object.keys(displayVOD.video).length > 0 && ( + + Video:{' '} + {displayVOD.video.codec_long_name || displayVOD.video.codec_name} + {displayVOD.video.profile ? ` (${displayVOD.video.profile})` : ''} + {displayVOD.video.width && displayVOD.video.height + ? `, ${displayVOD.video.width}x${displayVOD.video.height}` + : ''} + {displayVOD.video.display_aspect_ratio + ? `, Aspect Ratio: ${displayVOD.video.display_aspect_ratio}` + : ''} + {displayVOD.video.bit_rate + ? `, Bitrate: ${Math.round(Number(displayVOD.video.bit_rate) / 1000)} kbps` + : ''} + {displayVOD.video.r_frame_rate + ? `, Frame Rate: ${displayVOD.video.r_frame_rate.replace('/', '/')} fps` + : ''} + {displayVOD.video.tags?.encoder + ? `, Encoder: ${displayVOD.video.tags.encoder}` + : ''} + + )} + {displayVOD.audio && Object.keys(displayVOD.audio).length > 0 && ( + + Audio:{' '} + {displayVOD.audio.codec_long_name || displayVOD.audio.codec_name} + {displayVOD.audio.profile ? ` (${displayVOD.audio.profile})` : ''} + {displayVOD.audio.channel_layout + ? `, Channels: ${displayVOD.audio.channel_layout}` + : displayVOD.audio.channels + ? `, Channels: ${displayVOD.audio.channels}` + : ''} + {displayVOD.audio.sample_rate + ? `, Sample Rate: ${displayVOD.audio.sample_rate} Hz` + : ''} + {displayVOD.audio.bit_rate + ? `, Bitrate: ${Math.round(Number(displayVOD.audio.bit_rate) / 1000)} kbps` + : ''} + {displayVOD.audio.tags?.handler_name + ? `, Handler: ${displayVOD.audio.tags.handler_name}` + : ''} + + )} - - - {/* Description */} - {displayVOD.description && ( - - Description - - {displayVOD.description} - - )} - {/* YouTube trailer if available */} - {displayVOD.youtube_trailer && ( - - Trailer - - - )} ); diff --git a/frontend/src/store/useVODStore.jsx b/frontend/src/store/useVODStore.jsx index 249182a8..a8436507 100644 --- a/frontend/src/store/useVODStore.jsx +++ b/frontend/src/store/useVODStore.jsx @@ -148,8 +148,9 @@ const useVODStore = create((set, get) => ({ country: response.country || '', tmdb_id: response.tmdb_id || '', youtube_trailer: response.youtube_trailer || '', + m3u_account: response.m3u_account || '', }; - + console.log('Fetched VOD Details:', vodDetails); set((state) => ({ vods: { ...state.vods, From b19efd2f753529cf26fa5a980563ef35791bf9be Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 4 Aug 2025 18:24:45 -0500 Subject: [PATCH 0709/1435] Use backdrop image as background for modal. --- frontend/src/pages/VODs.jsx | 433 +++++++++++++++++++----------------- 1 file changed, 230 insertions(+), 203 deletions(-) diff --git a/frontend/src/pages/VODs.jsx b/frontend/src/pages/VODs.jsx index fb877aa7..d391b887 100644 --- a/frontend/src/pages/VODs.jsx +++ b/frontend/src/pages/VODs.jsx @@ -343,224 +343,251 @@ const VODModal = ({ vod, opened, onClose }) => { size="xl" centered > - - {loadingDetails && ( - - - Loading additional details... - - )} - - {/* Backdrop image if available */} + + {/* Backdrop image as background */} {displayVOD.backdrop_path && displayVOD.backdrop_path.length > 0 && ( - + <> {`${displayVOD.name} - - )} - - {/* Movie poster and basic info */} - - {/* Use movie_image or logo */} - {(displayVOD.movie_image || displayVOD.logo?.url) ? ( - - {displayVOD.name} - - ) : ( + {/* Overlay for readability */} - - - )} - - - {displayVOD.name} - - {/* Original name if different */} - {displayVOD.o_name && displayVOD.o_name !== displayVOD.name && ( - - Original: {displayVOD.o_name} - + /> + + )} + {/* Modal content above backdrop */} + + + {loadingDetails && ( + + + Loading additional details... + )} - - {displayVOD.year && {displayVOD.year}} - {displayVOD.duration && {formatDuration(displayVOD.duration)}} - {displayVOD.rating && {displayVOD.rating}} - {displayVOD.age && {displayVOD.age}} - Movie - - - {/* Release date */} - {displayVOD.release_date && ( - - Release Date: {displayVOD.release_date} - - )} - - {displayVOD.genre && ( - - Genre: {displayVOD.genre} - - )} - - {displayVOD.director && ( - - Director: {displayVOD.director} - - )} - - {displayVOD.actors && ( - - Cast: {displayVOD.actors} - - )} - - {displayVOD.country && ( - - Country: {displayVOD.country} - - )} - - {/* Description */} - {displayVOD.description && ( - - Description - - {displayVOD.description} - - - )} - - {/* Watch Trailer button at top */} - {displayVOD.youtube_trailer && ( - - )} - {/* Removed Play Movie button from here */} - - - {/* Provider Information & Play Button Row */} - {(vod?.m3u_account || true) && ( - - {vod?.m3u_account && ( - - IPTV Provider - - - {vod.m3u_account.name} - - {vod.m3u_account.account_type && ( - - {vod.m3u_account.account_type === 'XC' ? 'Xtream Codes' : 'Standard M3U'} - - )} - - - )} - - - )} - {/* Technical Details */} - {(displayVOD.bitrate || displayVOD.video || displayVOD.audio) && ( - - Technical Details: - {displayVOD.bitrate && displayVOD.bitrate > 0 && ( - - Bitrate: {displayVOD.bitrate} kbps - + + + {displayVOD.name} + + {/* Original name if different */} + {displayVOD.o_name && displayVOD.o_name !== displayVOD.name && ( + + Original: {displayVOD.o_name} + + )} + + + {displayVOD.year && {displayVOD.year}} + {displayVOD.duration && {formatDuration(displayVOD.duration)}} + {displayVOD.rating && {displayVOD.rating}} + {displayVOD.age && {displayVOD.age}} + Movie + + + {/* Release date */} + {displayVOD.release_date && ( + + Release Date: {displayVOD.release_date} + + )} + + {displayVOD.genre && ( + + Genre: {displayVOD.genre} + + )} + + {displayVOD.director && ( + + Director: {displayVOD.director} + + )} + + {displayVOD.actors && ( + + Cast: {displayVOD.actors} + + )} + + {displayVOD.country && ( + + Country: {displayVOD.country} + + )} + + {/* Description */} + {displayVOD.description && ( + + Description + + {displayVOD.description} + + + )} + + {/* Watch Trailer button at top */} + {displayVOD.youtube_trailer && ( + + )} + {/* Removed Play Movie button from here */} + + + {/* Provider Information & Play Button Row */} + {(vod?.m3u_account || true) && ( + + {vod?.m3u_account && ( + + IPTV Provider + + + {vod.m3u_account.name} + + {vod.m3u_account.account_type && ( + + {vod.m3u_account.account_type === 'XC' ? 'Xtream Codes' : 'Standard M3U'} + + )} + + + )} + + )} - {displayVOD.video && Object.keys(displayVOD.video).length > 0 && ( - - Video:{' '} - {displayVOD.video.codec_long_name || displayVOD.video.codec_name} - {displayVOD.video.profile ? ` (${displayVOD.video.profile})` : ''} - {displayVOD.video.width && displayVOD.video.height - ? `, ${displayVOD.video.width}x${displayVOD.video.height}` - : ''} - {displayVOD.video.display_aspect_ratio - ? `, Aspect Ratio: ${displayVOD.video.display_aspect_ratio}` - : ''} - {displayVOD.video.bit_rate - ? `, Bitrate: ${Math.round(Number(displayVOD.video.bit_rate) / 1000)} kbps` - : ''} - {displayVOD.video.r_frame_rate - ? `, Frame Rate: ${displayVOD.video.r_frame_rate.replace('/', '/')} fps` - : ''} - {displayVOD.video.tags?.encoder - ? `, Encoder: ${displayVOD.video.tags.encoder}` - : ''} - - )} - {displayVOD.audio && Object.keys(displayVOD.audio).length > 0 && ( - - Audio:{' '} - {displayVOD.audio.codec_long_name || displayVOD.audio.codec_name} - {displayVOD.audio.profile ? ` (${displayVOD.audio.profile})` : ''} - {displayVOD.audio.channel_layout - ? `, Channels: ${displayVOD.audio.channel_layout}` - : displayVOD.audio.channels - ? `, Channels: ${displayVOD.audio.channels}` - : ''} - {displayVOD.audio.sample_rate - ? `, Sample Rate: ${displayVOD.audio.sample_rate} Hz` - : ''} - {displayVOD.audio.bit_rate - ? `, Bitrate: ${Math.round(Number(displayVOD.audio.bit_rate) / 1000)} kbps` - : ''} - {displayVOD.audio.tags?.handler_name - ? `, Handler: ${displayVOD.audio.tags.handler_name}` - : ''} - + {/* Technical Details */} + {(displayVOD.bitrate || displayVOD.video || displayVOD.audio) && ( + + Technical Details: + {displayVOD.bitrate && displayVOD.bitrate > 0 && ( + + Bitrate: {displayVOD.bitrate} kbps + + )} + {displayVOD.video && Object.keys(displayVOD.video).length > 0 && ( + + Video:{' '} + {displayVOD.video.codec_long_name || displayVOD.video.codec_name} + {displayVOD.video.profile ? ` (${displayVOD.video.profile})` : ''} + {displayVOD.video.width && displayVOD.video.height + ? `, ${displayVOD.video.width}x${displayVOD.video.height}` + : ''} + {displayVOD.video.display_aspect_ratio + ? `, Aspect Ratio: ${displayVOD.video.display_aspect_ratio}` + : ''} + {displayVOD.video.bit_rate + ? `, Bitrate: ${Math.round(Number(displayVOD.video.bit_rate) / 1000)} kbps` + : ''} + {displayVOD.video.r_frame_rate + ? `, Frame Rate: ${displayVOD.video.r_frame_rate.replace('/', '/')} fps` + : ''} + {displayVOD.video.tags?.encoder + ? `, Encoder: ${displayVOD.video.tags.encoder}` + : ''} + + )} + {displayVOD.audio && Object.keys(displayVOD.audio).length > 0 && ( + + Audio:{' '} + {displayVOD.audio.codec_long_name || displayVOD.audio.codec_name} + {displayVOD.audio.profile ? ` (${displayVOD.audio.profile})` : ''} + {displayVOD.audio.channel_layout + ? `, Channels: ${displayVOD.audio.channel_layout}` + : displayVOD.audio.channels + ? `, Channels: ${displayVOD.audio.channels}` + : ''} + {displayVOD.audio.sample_rate + ? `, Sample Rate: ${displayVOD.audio.sample_rate} Hz` + : ''} + {displayVOD.audio.bit_rate + ? `, Bitrate: ${Math.round(Number(displayVOD.audio.bit_rate) / 1000)} kbps` + : ''} + {displayVOD.audio.tags?.handler_name + ? `, Handler: ${displayVOD.audio.tags.handler_name}` + : ''} + + )} + )} + {/* YouTube trailer if available */} - )} - {/* YouTube trailer if available */} - + + ); }; From 36450af23fffe808486b7b1b8af2c88b7df76dda Mon Sep 17 00:00:00 2001 From: SergeantPanda Date: Mon, 4 Aug 2025 18:30:33 -0500 Subject: [PATCH 0710/1435] Fix youtube links not loading. --- frontend/src/pages/VODs.jsx | 530 +++++++++++++++++++----------------- 1 file changed, 287 insertions(+), 243 deletions(-) diff --git a/frontend/src/pages/VODs.jsx b/frontend/src/pages/VODs.jsx index d391b887..8a4b4ab7 100644 --- a/frontend/src/pages/VODs.jsx +++ b/frontend/src/pages/VODs.jsx @@ -282,6 +282,8 @@ const SeriesModal = ({ series, opened, onClose }) => { const VODModal = ({ vod, opened, onClose }) => { const [detailedVOD, setDetailedVOD] = useState(null); const [loadingDetails, setLoadingDetails] = useState(false); + const [trailerModalOpened, setTrailerModalOpened] = useState(false); + const [trailerUrl, setTrailerUrl] = useState(''); const { fetchVODDetailsFromProvider } = useVODStore(); const showVideo = useVideoStore((s) => s.showVideo); const env_mode = useSettingsStore((s) => s.environment.env_mode); @@ -307,6 +309,8 @@ const VODModal = ({ vod, opened, onClose }) => { if (!opened) { setDetailedVOD(null); setLoadingDetails(false); + setTrailerModalOpened(false); + setTrailerUrl(''); } }, [opened]); @@ -330,265 +334,305 @@ const VODModal = ({ vod, opened, onClose }) => { return hours > 0 ? `${hours}h ${mins}m` : `${mins}m`; }; + // Helper to get embeddable YouTube URL + const getEmbedUrl = (url) => { + if (!url) return ''; + // Accepts full YouTube URLs or just IDs + const match = url.match(/(?:youtube\.com\/watch\?v=|youtu\.be\/)([\w-]+)/); + const videoId = match ? match[1] : url; + return `https://www.youtube.com/embed/${videoId}`; + }; + if (!vod) return null; // Use detailed data if available, otherwise use basic vod data const displayVOD = detailedVOD || vod; return ( - - - {/* Backdrop image as background */} - {displayVOD.backdrop_path && displayVOD.backdrop_path.length > 0 && ( - <> - {`${displayVOD.name} + + + {/* Backdrop image as background */} + {displayVOD.backdrop_path && displayVOD.backdrop_path.length > 0 && ( + <> + {`${displayVOD.name} + {/* Overlay for readability */} + + + )} + {/* Modal content above backdrop */} + + + {loadingDetails && ( + + + Loading additional details... + + )} + + {/* Movie poster and basic info */} + + {/* Use movie_image or logo */} + {(displayVOD.movie_image || displayVOD.logo?.url) ? ( + + {displayVOD.name} + + ) : ( + + + + )} + + + {displayVOD.name} + + {/* Original name if different */} + {displayVOD.o_name && displayVOD.o_name !== displayVOD.name && ( + + Original: {displayVOD.o_name} + + )} + + + {displayVOD.year && {displayVOD.year}} + {displayVOD.duration && {formatDuration(displayVOD.duration)}} + {displayVOD.rating && {displayVOD.rating}} + {displayVOD.age && {displayVOD.age}} + Movie + + + {/* Release date */} + {displayVOD.release_date && ( + + Release Date: {displayVOD.release_date} + + )} + + {displayVOD.genre && ( + + Genre: {displayVOD.genre} + + )} + + {displayVOD.director && ( + + Director: {displayVOD.director} + + )} + + {displayVOD.actors && ( + + Cast: {displayVOD.actors} + + )} + + {displayVOD.country && ( + + Country: {displayVOD.country} + + )} + + {/* Description */} + {displayVOD.description && ( + + Description + + {displayVOD.description} + + + )} + + {/* Watch Trailer button at top */} + {displayVOD.youtube_trailer && ( + + )} + {/* Removed Play Movie button from here */} + + + {/* Provider Information & Play Button Row */} + {(vod?.m3u_account || true) && ( + + {vod?.m3u_account && ( + + IPTV Provider + + + {vod.m3u_account.name} + + {vod.m3u_account.account_type && ( + + {vod.m3u_account.account_type === 'XC' ? 'Xtream Codes' : 'Standard M3U'} + + )} + + + )} + + + )} + {/* Technical Details */} + {(displayVOD.bitrate || displayVOD.video || displayVOD.audio) && ( + + Technical Details: + {displayVOD.bitrate && displayVOD.bitrate > 0 && ( + + Bitrate: {displayVOD.bitrate} kbps + + )} + {displayVOD.video && Object.keys(displayVOD.video).length > 0 && ( + + Video:{' '} + {displayVOD.video.codec_long_name || displayVOD.video.codec_name} + {displayVOD.video.profile ? ` (${displayVOD.video.profile})` : ''} + {displayVOD.video.width && displayVOD.video.height + ? `, ${displayVOD.video.width}x${displayVOD.video.height}` + : ''} + {displayVOD.video.display_aspect_ratio + ? `, Aspect Ratio: ${displayVOD.video.display_aspect_ratio}` + : ''} + {displayVOD.video.bit_rate + ? `, Bitrate: ${Math.round(Number(displayVOD.video.bit_rate) / 1000)} kbps` + : ''} + {displayVOD.video.r_frame_rate + ? `, Frame Rate: ${displayVOD.video.r_frame_rate.replace('/', '/')} fps` + : ''} + {displayVOD.video.tags?.encoder + ? `, Encoder: ${displayVOD.video.tags.encoder}` + : ''} + + )} + {displayVOD.audio && Object.keys(displayVOD.audio).length > 0 && ( + + Audio:{' '} + {displayVOD.audio.codec_long_name || displayVOD.audio.codec_name} + {displayVOD.audio.profile ? ` (${displayVOD.audio.profile})` : ''} + {displayVOD.audio.channel_layout + ? `, Channels: ${displayVOD.audio.channel_layout}` + : displayVOD.audio.channels + ? `, Channels: ${displayVOD.audio.channels}` + : ''} + {displayVOD.audio.sample_rate + ? `, Sample Rate: ${displayVOD.audio.sample_rate} Hz` + : ''} + {displayVOD.audio.bit_rate + ? `, Bitrate: ${Math.round(Number(displayVOD.audio.bit_rate) / 1000)} kbps` + : ''} + {displayVOD.audio.tags?.handler_name + ? `, Handler: ${displayVOD.audio.tags.handler_name}` + : ''} + + )} + + )} + {/* YouTube trailer if available */} + + + + + {/* YouTube Trailer Modal */} + setTrailerModalOpened(false)} + title="Trailer" + size="xl" + centered + withCloseButton + > + + {trailerUrl && ( +