This commit is contained in:
SergeantPanda 2025-04-07 16:47:58 -05:00
commit 23c65a2f03
28 changed files with 570 additions and 224 deletions

View file

@ -0,0 +1,18 @@
# Generated by Django 5.1.6 on 2025-04-07 16:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dispatcharr_channels', '0014_recording'),
]
operations = [
migrations.AddField(
model_name='recording',
name='custom_properties',
field=models.TextField(blank=True, null=True),
),
]

View file

@ -410,6 +410,7 @@ class Recording(models.Model):
start_time = models.DateTimeField()
end_time = models.DateTimeField()
task_id = models.CharField(max_length=255, null=True, blank=True)
custom_properties = models.TextField(null=True, blank=True)
def __str__(self):
return f"{self.channel.name} - {self.start_time} to {self.end_time}"

View file

@ -4,17 +4,15 @@ import os
import re
import requests
import time
import gc
import json
import subprocess
from datetime import datetime
from celery import shared_task
from rapidfuzz import fuzz
from django.conf import settings
from django.db import transaction
from django.utils.text import slugify
from apps.channels.models import Channel
from apps.epg.models import EPGData, EPGSource
from apps.epg.models import EPGData
from core.models import CoreSettings
from channels.layers import get_channel_layer
@ -22,15 +20,10 @@ from asgiref.sync import async_to_sync
from asgiref.sync import async_to_sync
from channels.layers import get_channel_layer
from core.utils import SentenceTransformer
import tempfile
logger = logging.getLogger(__name__)
# Thresholds
BEST_FUZZY_THRESHOLD = 85
LOWER_FUZZY_THRESHOLD = 40
EMBED_SIM_THRESHOLD = 0.65
# Words we remove to help with fuzzy + embedding matching
COMMON_EXTRANEOUS_WORDS = [
"tv", "channel", "network", "television",
@ -70,12 +63,8 @@ def match_epg_channels():
4) If a match is found, we set channel.tvg_id
5) Summarize and log results.
"""
from sentence_transformers import util
logger.info("Starting EPG matching logic...")
st_model = SentenceTransformer.get_model()
# Attempt to retrieve a "preferred-region" if configured
try:
region_obj = CoreSettings.objects.get(key="preferred-region")
@ -83,130 +72,61 @@ def match_epg_channels():
except CoreSettings.DoesNotExist:
region_code = None
# Gather EPGData rows so we can do fuzzy matching in memory
all_epg = {e.id: e for e in EPGData.objects.all()}
epg_rows = []
for e in list(all_epg.values()):
epg_rows.append({
"epg_id": e.id,
"tvg_id": e.tvg_id or "",
"raw_name": e.name,
"norm_name": normalize_name(e.name),
})
epg_embeddings = None
if any(row["norm_name"] for row in epg_rows):
epg_embeddings = st_model.encode(
[row["norm_name"] for row in epg_rows],
convert_to_tensor=True
)
matched_channels = []
channels_to_update = []
source = EPGSource.objects.filter(is_active=True).first()
epg_file_path = getattr(source, 'file_path', None) if source else None
channels_json = [{
"id": channel.id,
"name": channel.name,
"tvg_id": channel.tvg_id,
"fallback_name": channel.tvg_id.strip() if channel.tvg_id else channel.name,
"norm_chan": normalize_name(channel.tvg_id.strip() if channel.tvg_id else channel.name)
} for channel in Channel.objects.all() if not channel.epg_data]
with transaction.atomic():
for chan in Channel.objects.all():
# skip if channel already assigned an EPG
if chan.epg_data:
continue
epg_json = [{
'id': epg.id,
'tvg_id': epg.tvg_id,
'name': epg.name,
'norm_name': normalize_name(epg.name),
'epg_source_id': epg.epg_source.id,
} for epg in EPGData.objects.all()]
# If channel has a tvg_id that doesn't exist in EPGData, do direct check.
# I don't THINK this should happen now that we assign EPG on channel creation.
if chan.tvg_id:
epg_match = EPGData.objects.filter(tvg_id=chan.tvg_id).first()
if epg_match:
chan.epg_data = epg_match
logger.info(f"Channel {chan.id} '{chan.name}' => EPG found by tvg_id={chan.tvg_id}")
channels_to_update.append(chan)
continue
payload = {
"channels": channels_json,
"epg_data": epg_json,
"region_code": region_code,
}
# C) Perform name-based fuzzy matching
fallback_name = chan.tvg_id.strip() if chan.tvg_id else chan.name
norm_chan = normalize_name(fallback_name)
if not norm_chan:
logger.info(f"Channel {chan.id} '{chan.name}' => empty after normalization, skipping")
continue
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
temp_file.write(json.dumps(payload).encode('utf-8'))
temp_file_path = temp_file.name
best_score = 0
best_epg = None
for row in epg_rows:
if not row["norm_name"]:
continue
base_score = fuzz.ratio(norm_chan, row["norm_name"])
bonus = 0
# Region-based bonus/penalty
combined_text = row["tvg_id"].lower() + " " + row["raw_name"].lower()
dot_regions = re.findall(r'\.([a-z]{2})', combined_text)
if region_code:
if dot_regions:
if region_code in dot_regions:
bonus = 30 # bigger bonus if .us or .ca matches
else:
bonus = -15
elif region_code in combined_text:
bonus = 15
score = base_score + bonus
process = subprocess.Popen(
['python', '/app/scripts/epg_match.py', temp_file_path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True
)
logger.debug(
f"Channel {chan.id} '{fallback_name}' => EPG row {row['epg_id']}: "
f"raw_name='{row['raw_name']}', norm_name='{row['norm_name']}', "
f"combined_text='{combined_text}', dot_regions={dot_regions}, "
f"base_score={base_score}, bonus={bonus}, total_score={score}"
)
# Log stderr in real-time
for line in iter(process.stderr.readline, ''):
if line:
logger.info(line.strip())
if score > best_score:
best_score = score
best_epg = row
process.stderr.close()
stdout, stderr = process.communicate()
# If no best match was found, skip
if not best_epg:
logger.info(f"Channel {chan.id} '{fallback_name}' => no EPG match at all.")
continue
os.remove(temp_file_path)
# If best_score is above BEST_FUZZY_THRESHOLD => direct accept
if best_score >= BEST_FUZZY_THRESHOLD:
chan.epg_data = all_epg[best_epg["epg_id"]]
chan.save()
if process.returncode != 0:
return f"Failed to process EPG matching: {stderr}"
matched_channels.append((chan.id, fallback_name, best_epg["tvg_id"]))
logger.info(
f"Channel {chan.id} '{fallback_name}' => matched tvg_id={best_epg['tvg_id']} "
f"(score={best_score})"
)
result = json.loads(stdout)
channels_to_update = result["channels_to_update"]
matched_channels = result["matched_channels"]
# If best_score is in the “middle range,” do embedding check
elif best_score >= LOWER_FUZZY_THRESHOLD and epg_embeddings is not None:
chan_embedding = st_model.encode(norm_chan, convert_to_tensor=True)
sim_scores = util.cos_sim(chan_embedding, epg_embeddings)[0]
top_index = int(sim_scores.argmax())
top_value = float(sim_scores[top_index])
if top_value >= EMBED_SIM_THRESHOLD:
matched_epg = epg_rows[top_index]
chan.epg_data = all_epg[matched_epg["epg_id"]]
chan.save()
matched_channels.append((chan.id, fallback_name, matched_epg["tvg_id"]))
logger.info(
f"Channel {chan.id} '{fallback_name}' => matched EPG tvg_id={matched_epg['tvg_id']} "
f"(fuzzy={best_score}, cos-sim={top_value:.2f})"
)
else:
logger.info(
f"Channel {chan.id} '{fallback_name}' => fuzzy={best_score}, "
f"cos-sim={top_value:.2f} < {EMBED_SIM_THRESHOLD}, skipping"
)
else:
logger.info(
f"Channel {chan.id} '{fallback_name}' => fuzzy={best_score} < "
f"{LOWER_FUZZY_THRESHOLD}, skipping"
)
if channels_to_update:
Channel.objects.bulk_update(channels_to_update, ['epg_data'])
if channels_to_update:
Channel.objects.bulk_update(channels_to_update, ['epg_data'])
total_matched = len(matched_channels)
if total_matched:
@ -227,8 +147,6 @@ def match_epg_channels():
}
)
SentenceTransformer.clear()
gc.collect()
return f"Done. Matched {total_matched} channel(s)."
@shared_task

View file

@ -0,0 +1,24 @@
# Generated by Django 5.1.6 on 2025-04-07 16:29
import django.utils.timezone
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('epg', '0007_populate_periodic_tasks'),
]
operations = [
migrations.AddField(
model_name='epgsource',
name='created_at',
field=models.DateTimeField(default=django.utils.timezone.now, help_text='Time when this source was created'),
),
migrations.AddField(
model_name='epgsource',
name='updated_at',
field=models.DateTimeField(default=django.utils.timezone.now, help_text='Time when this source was last updated'),
),
]

View file

@ -0,0 +1,23 @@
# Generated by Django 5.1.6 on 2025-04-07 16:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('epg', '0008_epgsource_created_at_epgsource_updated_at'),
]
operations = [
migrations.AlterField(
model_name='epgsource',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='Time when this source was created'),
),
migrations.AlterField(
model_name='epgsource',
name='updated_at',
field=models.DateTimeField(auto_now=True, help_text='Time when this source was last updated'),
),
]

View file

@ -17,6 +17,14 @@ class EPGSource(models.Model):
refresh_task = models.ForeignKey(
PeriodicTask, on_delete=models.SET_NULL, null=True, blank=True
)
created_at = models.DateTimeField(
auto_now_add=True,
help_text="Time when this source was created"
)
updated_at = models.DateTimeField(
auto_now=True,
help_text="Time when this source was last updated"
)
def __str__(self):
return self.name

View file

@ -4,10 +4,11 @@ from apps.channels.models import Channel
class EPGSourceSerializer(serializers.ModelSerializer):
epg_data_ids = serializers.SerializerMethodField()
read_only_fields = ['created_at', 'updated_at']
class Meta:
model = EPGSource
fields = ['id', 'name', 'source_type', 'url', 'api_key', 'is_active', 'epg_data_ids', 'refresh_interval']
fields = ['id', 'name', 'source_type', 'url', 'api_key', 'is_active', 'epg_data_ids', 'refresh_interval', 'created_at', 'updated_at']
def get_epg_data_ids(self, obj):
return list(obj.epgs.values_list('id', flat=True))

View file

@ -50,6 +50,8 @@ def refresh_epg_data(source_id):
elif source.source_type == 'schedules_direct':
fetch_schedules_direct(source)
source.save(update_fields=['updated_at'])
release_task_lock('refresh_epg_data', source_id)
def fetch_xmltv(source):

View file

@ -56,7 +56,7 @@ class M3UAccountSerializer(serializers.ModelSerializer):
required=True
)
profiles = M3UAccountProfileSerializer(many=True, read_only=True)
read_only_fields = ['locked']
read_only_fields = ['locked', 'created_at', 'updated_at']
# channel_groups = serializers.SerializerMethodField()
channel_groups = ChannelGroupM3UAccountSerializer(source='channel_group', many=True, required=False)

View file

@ -430,6 +430,7 @@ def refresh_single_m3u_account(account_id):
# Calculate elapsed time
elapsed_time = end_time - start_time
account.save(update_fields=['updated_at'])
print(f"Function took {elapsed_time} seconds to execute.")

View file

@ -43,6 +43,9 @@ def scan_and_process_files():
if not os.path.isfile(filepath):
continue
if not filename.endswith('.m3u') and not filename.endswith('.m3u8'):
continue
mtime = os.path.getmtime(filepath)
age = now - mtime
redis_key = REDIS_PREFIX + filepath
@ -85,6 +88,9 @@ def scan_and_process_files():
if not os.path.isfile(filepath):
continue
if not filename.endswith('.xml') and not filename.endswith('.gz'):
continue
mtime = os.path.getmtime(filepath)
age = now - mtime
redis_key = REDIS_PREFIX + filepath

View file

@ -160,34 +160,3 @@ def send_websocket_event(event, success, data):
"data": {"success": True, "type": "epg_channels"}
}
)
class SentenceTransformer:
_instance = None
@classmethod
def get_model(cls):
if cls._instance is None:
from sentence_transformers import SentenceTransformer as st
# Load the sentence-transformers model once at the module level
SENTENCE_MODEL_NAME = "sentence-transformers/all-MiniLM-L6-v2"
MODEL_PATH = os.path.join(settings.MEDIA_ROOT, "models", "all-MiniLM-L6-v2")
os.makedirs(MODEL_PATH, exist_ok=True)
# If not present locally, download:
if not os.path.exists(os.path.join(MODEL_PATH, "config.json")):
logger.info(f"Local model not found in {MODEL_PATH}; downloading from {SENTENCE_MODEL_NAME}...")
cls._instance = st(SENTENCE_MODEL_NAME, cache_folder=MODEL_PATH)
else:
logger.info(f"Loading local model from {MODEL_PATH}")
cls._instance = st(MODEL_PATH)
return cls._instance
@classmethod
def clear(cls):
"""Clear the model instance and release memory."""
if cls._instance is not None:
del cls._instance
cls._instance = None
gc.collect()

View file

@ -1,7 +1,6 @@
import os
from pathlib import Path
from datetime import timedelta
from celery.schedules import crontab
BASE_DIR = Path(__file__).resolve().parent.parent

View file

@ -48,7 +48,7 @@ ENV PATH="/dispatcharrpy/bin:$PATH" \
# Copy the virtual environment and application from the builder stage
COPY --from=builder /dispatcharrpy /dispatcharrpy
COPY --from=builder /app /app
COPY --from=frontend-builder /app/frontend /app/frontend
COPY --from=frontend-builder /app/frontend/dist /app/frontend/dist
# Run collectstatic after frontend assets are copied
RUN cd /app && python manage.py collectstatic --noinput

View file

@ -13,6 +13,7 @@ import M3U from './pages/M3U';
import EPG from './pages/EPG';
import Guide from './pages/Guide';
import Stats from './pages/Stats';
import DVR from './pages/DVR';
import Settings from './pages/Settings';
import StreamProfiles from './pages/StreamProfiles';
import useAuthStore from './store/auth';
@ -127,6 +128,7 @@ const App = () => {
element={<StreamProfiles />}
/>
<Route path="/guide" element={<Guide />} />
<Route path="/dvr" element={<DVR />} />
<Route path="/stats" element={<Stats />} />
<Route path="/settings" element={<Settings />} />
</>

View file

@ -1035,6 +1035,19 @@ export default class API {
.updateProfileChannels(channelIds, profileId, enabled);
}
static async getRecordings() {
const response = await fetch(`${host}/api/channels/recordings/`, {
headers: {
Authorization: `Bearer ${await API.getAuthToken()}`,
'Content-Type': 'application/json',
},
});
const retval = await response.json();
return retval;
}
static async createRecording(values) {
const response = await fetch(`${host}/api/channels/recordings/`, {
method: 'POST',
@ -1046,7 +1059,20 @@ export default class API {
});
const retval = await response.json();
useChannelsStore.getState().fetchRecordings();
return retval;
}
static async deleteRecording(id) {
const response = await fetch(`${host}/api/channels/recordings/${id}/`, {
method: 'DELETE',
headers: {
Authorization: `Bearer ${await API.getAuthToken()}`,
'Content-Type': 'application/json',
},
});
useChannelsStore.getState().fetchRecordings();
}
}

View file

@ -9,6 +9,7 @@ import {
Settings as LucideSettings,
Copy,
ChartLine,
Video,
} from 'lucide-react';
import {
Avatar,
@ -80,6 +81,7 @@ const Sidebar = ({ collapsed, toggleDrawer, drawerWidth, miniDrawerWidth }) => {
path: '/stream-profiles',
},
{ label: 'TV Guide', icon: <LayoutGrid size={20} />, path: '/guide' },
{ label: 'DVR', icon: <Video size={20} />, path: '/dvr' },
{ label: 'Stats', icon: <ChartLine size={20} />, path: '/stats' },
{
label: 'Settings',

View file

@ -1,40 +1,31 @@
// Modal.js
import React, { useEffect } from 'react';
import { useFormik } from 'formik';
import * as Yup from 'yup';
import React from 'react';
import API from '../../api';
import { Flex, TextInput, Button, Modal } from '@mantine/core';
import { isNotEmpty, useForm } from '@mantine/form';
const ChannelGroup = ({ channelGroup = null, isOpen, onClose }) => {
const formik = useFormik({
const form = useForm({
mode: 'uncontrolled',
initialValues: {
name: '',
name: channelGroup ? channelGroup.name : '',
},
validationSchema: Yup.object({
name: Yup.string().required('Name is required'),
}),
onSubmit: async (values, { setSubmitting, resetForm }) => {
if (channelGroup?.id) {
await API.updateChannelGroup({ id: channelGroup.id, ...values });
} else {
await API.addChannelGroup(values);
}
resetForm();
setSubmitting(false);
onClose();
validate: {
name: isNotEmpty('Specify a name'),
},
});
useEffect(() => {
const onSubmit = async () => {
const values = form.getValues();
if (channelGroup) {
formik.setValues({
name: channelGroup.name,
});
await API.updateChannelGroup({ id: channelGroup.id, ...values });
} else {
formik.resetForm();
await API.addChannelGroup(values);
}
}, [channelGroup]);
return form.reset();
};
if (!isOpen) {
return <></>;
@ -42,14 +33,13 @@ const ChannelGroup = ({ channelGroup = null, isOpen, onClose }) => {
return (
<Modal opened={isOpen} onClose={onClose} title="Channel Group">
<form onSubmit={formik.handleSubmit}>
<form onSubmit={form.onSubmit(onSubmit)}>
<TextInput
id="name"
name="name"
label="Name"
value={formik.values.name}
onChange={formik.handleChange}
error={formik.touched.name}
{...form.getInputProps('name')}
key={form.key('name')}
/>
<Flex mih={50} gap="xs" justify="flex-end" align="flex-end">
@ -57,7 +47,7 @@ const ChannelGroup = ({ channelGroup = null, isOpen, onClose }) => {
type="submit"
variant="contained"
color="primary"
disabled={formik.isSubmitting}
disabled={form.submitting}
size="small"
>
Submit

View file

@ -1,22 +1,7 @@
// Modal.js
import React, { useState, useEffect } from 'react';
import { useFormik } from 'formik';
import * as Yup from 'yup';
import React from 'react';
import API from '../../api';
import useEPGsStore from '../../store/epgs';
import {
LoadingOverlay,
TextInput,
Button,
Checkbox,
Modal,
Flex,
NativeSelect,
NumberInput,
Space,
Select,
Alert,
} from '@mantine/core';
import { Button, Modal, Flex, Select, Alert } from '@mantine/core';
import useChannelsStore from '../../store/channels';
import { DateTimePicker } from '@mantine/dates';
import { CircleAlert } from 'lucide-react';
@ -61,6 +46,8 @@ const DVR = ({ recording = null, channel = null, isOpen, onClose }) => {
...values,
channel: channel_id,
});
form.reset();
onClose();
};
@ -110,7 +97,12 @@ const DVR = ({ recording = null, channel = null, isOpen, onClose }) => {
/>
<Flex mih={50} gap="xs" justify="flex-end" align="flex-end">
<Button type="submit" variant="contained" size="small">
<Button
type="submit"
variant="contained"
size="small"
disabled={form.submitting}
>
Submit
</Button>
</Flex>

View file

@ -892,7 +892,7 @@ const ChannelsTable = ({}) => {
<Menu>
<Menu.Target>
<ActionIcon variant="transparent" size="sm">
<CircleEllipsis size="18" />
<EllipsisVertical size="18" />
</ActionIcon>
</Menu.Target>

View file

@ -17,6 +17,7 @@ import {
import { notifications } from '@mantine/notifications';
import { IconSquarePlus } from '@tabler/icons-react';
import { RefreshCcw, SquareMinus, SquarePen } from 'lucide-react';
import dayjs from 'dayjs';
const EPGsTable = () => {
const [epg, setEPG] = useState(null);
@ -44,6 +45,11 @@ const EPGsTable = () => {
accessorKey: 'max_streams',
enableSorting: false,
},
{
header: 'Updated',
accessorFn: (row) => dayjs(row.updated_at).format('MMMM D, YYYY h:mma'),
enableSorting: false,
},
],
[]
);

View file

@ -16,6 +16,7 @@ import {
} from '@mantine/core';
import { SquareMinus, SquarePen, RefreshCcw, Check, X } from 'lucide-react';
import { IconSquarePlus } from '@tabler/icons-react'; // Import custom icons
import dayjs from 'dayjs';
const M3UTable = () => {
const [playlist, setPlaylist] = useState(null);
@ -70,6 +71,11 @@ const M3UTable = () => {
</Box>
),
},
{
header: 'Updated',
accessorFn: (row) => dayjs(row.updated_at).format('MMMM D, YYYY h:mma'),
enableSorting: false,
},
],
[]
);

View file

@ -503,7 +503,7 @@ const StreamsTable = ({}) => {
<>
<Tooltip label="Add to Channel">
<ActionIcon
size="sm"
size="xs"
color={theme.tailwind.blue[6]}
variant="transparent"
onClick={() => addStreamToChannel(row.original.id)}
@ -522,7 +522,7 @@ const StreamsTable = ({}) => {
<Tooltip label="Create New Channel">
<ActionIcon
size="sm"
size="xs"
color={theme.tailwind.green[5]}
variant="transparent"
onClick={() => createChannelFromStream(row.original)}
@ -533,7 +533,7 @@ const StreamsTable = ({}) => {
<Menu>
<Menu.Target>
<ActionIcon variant="transparent" size="sm">
<ActionIcon variant="transparent" size="xs">
<EllipsisVertical size="18" />
</ActionIcon>
</Menu.Target>

135
frontend/src/pages/DVR.jsx Normal file
View file

@ -0,0 +1,135 @@
import React, { useMemo, useState, useEffect } from 'react';
import {
ActionIcon,
Box,
Button,
Card,
Center,
Container,
Flex,
Group,
SimpleGrid,
Stack,
Text,
Title,
Tooltip,
useMantineTheme,
} from '@mantine/core';
import {
Gauge,
HardDriveDownload,
HardDriveUpload,
SquarePlus,
SquareX,
Timer,
Users,
Video,
} from 'lucide-react';
import dayjs from 'dayjs';
import duration from 'dayjs/plugin/duration';
import relativeTime from 'dayjs/plugin/relativeTime';
import useChannelsStore from '../store/channels';
import RecordingForm from '../components/forms/Recording';
import API from '../api';
dayjs.extend(duration);
dayjs.extend(relativeTime);
const RecordingCard = ({ recording }) => {
const { channels } = useChannelsStore();
const deleteRecording = (id) => {
API.deleteRecording(id);
};
const customProps = JSON.parse(recording.custom_properties);
let recordingName = 'Custom Recording';
if (customProps.program) {
recordingName = customProps.program.title;
}
return (
<Card
shadow="sm"
padding="md"
radius="md"
withBorder
style={{
color: '#fff',
backgroundColor: '#27272A',
}}
>
<Flex justify="space-between" align="center">
<Group>
<Text fw={500}>{recordingName}</Text>
</Group>
<Center>
<Tooltip label="Delete / Cancel">
<ActionIcon
variant="transparent"
color="red.9"
onClick={() => deleteRecording(recording.id)}
>
<SquareX size="24" />
</ActionIcon>
</Tooltip>
</Center>
</Flex>
<Text size="sm">Channel: {channels[recording.channel].name}</Text>
<Text size="sm">
Start: {dayjs(recording.start_time).format('MMMM D, YYYY h:MMa')}
End: {dayjs(recording.end_time).format('MMMM D, YYYY h:MMa')}
</Text>
</Card>
);
};
const DVRPage = () => {
const theme = useMantineTheme();
const { recordings } = useChannelsStore();
const [recordingModalOpen, setRecordingModalOpen] = useState(false);
const openRecordingModal = () => {
setRecordingModalOpen(true);
};
const closeRecordingModal = () => {
setRecordingModalOpen(false);
};
return (
<Box style={{ padding: 10 }}>
<Button
leftSection={<SquarePlus size={18} />}
variant="light"
size="sm"
onClick={openRecordingModal}
p={5}
color={theme.tailwind.green[5]}
style={{
borderWidth: '1px',
borderColor: theme.tailwind.green[5],
color: 'white',
}}
>
New Recording
</Button>
<SimpleGrid cols={5} spacing="md" style={{ paddingTop: 10 }}>
{Object.values(recordings).map((recording) => (
<RecordingCard recording={recording} />
))}
</SimpleGrid>
<RecordingForm
isOpen={recordingModalOpen}
onClose={closeRecordingModal}
/>
</Box>
);
};
export default DVRPage;

View file

@ -16,6 +16,7 @@ import {
Text,
Paper,
Grid,
Group,
} from '@mantine/core';
import './guide.css';
@ -31,12 +32,13 @@ const MODAL_WIDTH = 600;
const MODAL_HEIGHT = 400;
export default function TVChannelGuide({ startDate, endDate }) {
const { channels } = useChannelsStore();
const { channels, recordings } = useChannelsStore();
const [programs, setPrograms] = useState([]);
const [guideChannels, setGuideChannels] = useState([]);
const [now, setNow] = useState(dayjs());
const [selectedProgram, setSelectedProgram] = useState(null);
const [recording, setRecording] = useState(null);
const [loading, setLoading] = useState(true);
const {
environment: { env_mode },
@ -70,6 +72,7 @@ export default function TVChannelGuide({ startDate, endDate }) {
);
setGuideChannels(filteredChannels);
console.log(fetched);
setPrograms(fetched);
setLoading(false);
};
@ -158,13 +161,17 @@ export default function TVChannelGuide({ startDate, endDate }) {
return guideChannels.find((ch) => ch.epg_data?.tvg_id === tvgId);
}
const record = (program) => {
const record = async (program) => {
const channel = findChannelByTvgId(program.tvg_id);
API.createRecording({
await API.createRecording({
channel: `${channel.id}`,
start_time: program.start_time,
end_time: program.end_time,
custom_properties: JSON.stringify({
program,
}),
});
notifications.show({ title: 'Recording scheduled' });
};
// The Watch Now click => show floating video
@ -190,6 +197,18 @@ export default function TVChannelGuide({ startDate, endDate }) {
// On program click, open the details modal
function handleProgramClick(program, event) {
setSelectedProgram(program);
setRecording(
recordings.find((recording) => {
if (recording.custom_properties) {
const customProps = JSON.parse(recording.custom_properties);
if (customProps.program && customProps.program.id == program.id) {
return recording;
}
}
return null;
})
);
}
// Close the modal
@ -206,6 +225,16 @@ export default function TVChannelGuide({ startDate, endDate }) {
const durationMinutes = programEnd.diff(programStart, 'minute');
const leftPx = (startOffsetMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH;
const widthPx = (durationMinutes / MINUTE_INCREMENT) * MINUTE_BLOCK_WIDTH;
const recording = recordings.find((recording) => {
if (recording.custom_properties) {
const customProps = JSON.parse(recording.custom_properties);
if (customProps.program && customProps.program.id == program.id) {
return recording;
}
}
return null;
});
// Highlight if currently live
const isLive = now.isAfter(programStart) && now.isBefore(programEnd);
@ -250,7 +279,20 @@ export default function TVChannelGuide({ startDate, endDate }) {
}}
>
<Text size="md" style={{ fontWeight: 'bold' }}>
{program.title}
<Group gap="xs">
{recording && (
<div
style={{
borderRadius: '50%',
width: '10px',
height: '10px',
display: 'flex',
backgroundColor: 'red',
}}
></div>
)}
{program.title}
</Group>
</Text>
<Text size="sm" noWrap>
{programStart.format('h:mma')} - {programEnd.format('h:mma')}
@ -464,13 +506,15 @@ export default function TVChannelGuide({ startDate, endDate }) {
</Text>
{/* Only show the Watch button if currently live */}
<Flex mih={50} gap="xs" justify="flex-end" align="flex-end">
<Button
variant="transparent"
color="gray"
onClick={() => record(selectedProgram)}
>
Record
</Button>
{!recording && (
<Button
variant="transparent"
color="gray"
onClick={() => record(selectedProgram)}
>
Record
</Button>
)}
{now.isAfter(dayjs(selectedProgram.start_time)) &&
now.isBefore(dayjs(selectedProgram.end_time)) && (

View file

@ -36,6 +36,7 @@ const useAuthStore = create((set, get) => ({
useChannelsStore.getState().fetchChannelGroups(),
useChannelsStore.getState().fetchLogos(),
useChannelsStore.getState().fetchChannelProfiles(),
useChannelsStore.getState().fetchRecordings(),
useUserAgentsStore.getState().fetchUserAgents(),
usePlaylistsStore.getState().fetchPlaylists(),
useEPGsStore.getState().fetchEPGs(),

View file

@ -16,6 +16,7 @@ const useChannelsStore = create((set, get) => ({
activeChannels: {},
activeClients: {},
logos: {},
recordings: [],
isLoading: false,
error: null,
@ -371,6 +372,18 @@ const useChannelsStore = create((set, get) => ({
};
});
},
fetchRecordings: async () => {
set({ isLoading: true, error: null });
try {
set({
recordings: await api.getRecordings(),
});
} catch (error) {
console.error('Failed to fetch recordings:', error);
set({ error: 'Failed to load recordings.', isLoading: false });
}
},
}));
export default useChannelsStore;

159
scripts/epg_match.py Normal file
View file

@ -0,0 +1,159 @@
# ml_model.py
import sys
import json
import re
import os
import sys
from rapidfuzz import fuzz
from sentence_transformers import util
from sentence_transformers import SentenceTransformer as st
# Load the sentence-transformers model once at the module level
SENTENCE_MODEL_NAME = "sentence-transformers/all-MiniLM-L6-v2"
MODEL_PATH = os.path.join("/app/media", "models", "all-MiniLM-L6-v2")
# Thresholds
BEST_FUZZY_THRESHOLD = 85
LOWER_FUZZY_THRESHOLD = 40
EMBED_SIM_THRESHOLD = 0.65
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def process_data(input_data):
os.makedirs(MODEL_PATH, exist_ok=True)
# If not present locally, download:
if not os.path.exists(os.path.join(MODEL_PATH, "config.json")):
eprint(f"Local model not found in {MODEL_PATH}; downloading from {SENTENCE_MODEL_NAME}...")
st_model = st(SENTENCE_MODEL_NAME, cache_folder=MODEL_PATH)
else:
eprint(f"Loading local model from {MODEL_PATH}")
st_model = st(MODEL_PATH)
channels = input_data["channels"]
epg_data = input_data["epg_data"]
region_code = input_data["region_code"]
epg_embeddings = None
if any(row["norm_name"] for row in epg_data):
epg_embeddings = st_model.encode(
[row["norm_name"] for row in epg_data],
convert_to_tensor=True
)
channels_to_update = []
matched_channels = []
for chan in channels:
# If channel has a tvg_id that doesn't exist in EPGData, do direct check.
# I don't THINK this should happen now that we assign EPG on channel creation.
if chan["tvg_id"]:
epg_match = [epg["id"] for epg in epg_data if epg["tvg_id"] == chan["tvg_id"]]
if epg_match:
chan["epg_data_id"] = epg_match[0]["id"]
eprint(f"Channel {chan['id']} '{chan['name']}' => EPG found by tvg_id={chan['tvg_id']}")
channels_to_update.append(chan)
continue
# C) Perform name-based fuzzy matching
fallback_name = chan["tvg_id"].strip() if chan["tvg_id"] else chan["name"]
if not chan["norm_chan"]:
eprint(f"Channel {chan['id']} '{chan['name']}' => empty after normalization, skipping")
continue
best_score = 0
best_epg = None
for row in epg_data:
if not row["norm_name"]:
continue
base_score = fuzz.ratio(chan["norm_chan"], row["norm_name"])
bonus = 0
# Region-based bonus/penalty
combined_text = row["tvg_id"].lower() + " " + row["name"].lower()
dot_regions = re.findall(r'\.([a-z]{2})', combined_text)
if region_code:
if dot_regions:
if region_code in dot_regions:
bonus = 30 # bigger bonus if .us or .ca matches
else:
bonus = -15
elif region_code in combined_text:
bonus = 15
score = base_score + bonus
eprint(
f"Channel {chan['id']} '{fallback_name}' => EPG row {row['id']}: "
f"name='{row['name']}', norm_name='{row['norm_name']}', "
f"combined_text='{combined_text}', dot_regions={dot_regions}, "
f"base_score={base_score}, bonus={bonus}, total_score={score}"
)
if score > best_score:
best_score = score
best_epg = row
# If no best match was found, skip
if not best_epg:
eprint(f"Channel {chan['id']} '{fallback_name}' => no EPG match at all.")
continue
# If best_score is above BEST_FUZZY_THRESHOLD => direct accept
if best_score >= BEST_FUZZY_THRESHOLD:
chan["epg_data_id"] = best_epg["id"]
channels_to_update.append(chan)
matched_channels.append((chan['id'], fallback_name, best_epg["tvg_id"]))
eprint(
f"Channel {chan['id']} '{fallback_name}' => matched tvg_id={best_epg['tvg_id']} "
f"(score={best_score})"
)
# If best_score is in the “middle range,” do embedding check
elif best_score >= LOWER_FUZZY_THRESHOLD and epg_embeddings is not None:
chan_embedding = st_model.encode(chan["norm_chan"], convert_to_tensor=True)
sim_scores = util.cos_sim(chan_embedding, epg_embeddings)[0]
top_index = int(sim_scores.argmax())
top_value = float(sim_scores[top_index])
if top_value >= EMBED_SIM_THRESHOLD:
matched_epg = epg_data[top_index]
chan["epg_data_id"] = matched_epg["id"]
channels_to_update.append(chan)
matched_channels.append((chan['id'], fallback_name, matched_epg["tvg_id"]))
eprint(
f"Channel {chan['id']} '{fallback_name}' => matched EPG tvg_id={matched_epg['tvg_id']} "
f"(fuzzy={best_score}, cos-sim={top_value:.2f})"
)
else:
eprint(
f"Channel {chan['id']} '{fallback_name}' => fuzzy={best_score}, "
f"cos-sim={top_value:.2f} < {EMBED_SIM_THRESHOLD}, skipping"
)
else:
eprint(
f"Channel {chan['id']} '{fallback_name}' => fuzzy={best_score} < "
f"{LOWER_FUZZY_THRESHOLD}, skipping"
)
return {
"channels_to_update": channels_to_update,
"matched_channels": matched_channels,
}
def main():
# Read input data from a file
input_file_path = sys.argv[1]
with open(input_file_path, 'r') as f:
input_data = json.load(f)
# Process data with the ML model (or your logic)
result = process_data(input_data)
# Output result to stdout
print(json.dumps(result))
if __name__ == "__main__":
main()