diff --git a/apps/channels/migrations/0015_recording_custom_properties.py b/apps/channels/migrations/0015_recording_custom_properties.py
new file mode 100644
index 00000000..3c01fc61
--- /dev/null
+++ b/apps/channels/migrations/0015_recording_custom_properties.py
@@ -0,0 +1,18 @@
+# Generated by Django 5.1.6 on 2025-04-07 16:47
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dispatcharr_channels', '0014_recording'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='recording',
+ name='custom_properties',
+ field=models.TextField(blank=True, null=True),
+ ),
+ ]
diff --git a/apps/channels/models.py b/apps/channels/models.py
index abcecb77..9f1b641e 100644
--- a/apps/channels/models.py
+++ b/apps/channels/models.py
@@ -410,6 +410,7 @@ class Recording(models.Model):
start_time = models.DateTimeField()
end_time = models.DateTimeField()
task_id = models.CharField(max_length=255, null=True, blank=True)
+ custom_properties = models.TextField(null=True, blank=True)
def __str__(self):
return f"{self.channel.name} - {self.start_time} to {self.end_time}"
diff --git a/apps/channels/tasks.py b/apps/channels/tasks.py
index 7e271846..2cecbf04 100644
--- a/apps/channels/tasks.py
+++ b/apps/channels/tasks.py
@@ -4,17 +4,15 @@ import os
import re
import requests
import time
-import gc
+import json
+import subprocess
from datetime import datetime
from celery import shared_task
-from rapidfuzz import fuzz
-from django.conf import settings
-from django.db import transaction
from django.utils.text import slugify
from apps.channels.models import Channel
-from apps.epg.models import EPGData, EPGSource
+from apps.epg.models import EPGData
from core.models import CoreSettings
from channels.layers import get_channel_layer
@@ -22,15 +20,10 @@ from asgiref.sync import async_to_sync
from asgiref.sync import async_to_sync
from channels.layers import get_channel_layer
-from core.utils import SentenceTransformer
+import tempfile
logger = logging.getLogger(__name__)
-# Thresholds
-BEST_FUZZY_THRESHOLD = 85
-LOWER_FUZZY_THRESHOLD = 40
-EMBED_SIM_THRESHOLD = 0.65
-
# Words we remove to help with fuzzy + embedding matching
COMMON_EXTRANEOUS_WORDS = [
"tv", "channel", "network", "television",
@@ -70,12 +63,8 @@ def match_epg_channels():
4) If a match is found, we set channel.tvg_id
5) Summarize and log results.
"""
- from sentence_transformers import util
-
logger.info("Starting EPG matching logic...")
- st_model = SentenceTransformer.get_model()
-
# Attempt to retrieve a "preferred-region" if configured
try:
region_obj = CoreSettings.objects.get(key="preferred-region")
@@ -83,130 +72,61 @@ def match_epg_channels():
except CoreSettings.DoesNotExist:
region_code = None
- # Gather EPGData rows so we can do fuzzy matching in memory
- all_epg = {e.id: e for e in EPGData.objects.all()}
-
- epg_rows = []
- for e in list(all_epg.values()):
- epg_rows.append({
- "epg_id": e.id,
- "tvg_id": e.tvg_id or "",
- "raw_name": e.name,
- "norm_name": normalize_name(e.name),
- })
-
- epg_embeddings = None
- if any(row["norm_name"] for row in epg_rows):
- epg_embeddings = st_model.encode(
- [row["norm_name"] for row in epg_rows],
- convert_to_tensor=True
- )
-
matched_channels = []
channels_to_update = []
- source = EPGSource.objects.filter(is_active=True).first()
- epg_file_path = getattr(source, 'file_path', None) if source else None
+ channels_json = [{
+ "id": channel.id,
+ "name": channel.name,
+ "tvg_id": channel.tvg_id,
+ "fallback_name": channel.tvg_id.strip() if channel.tvg_id else channel.name,
+ "norm_chan": normalize_name(channel.tvg_id.strip() if channel.tvg_id else channel.name)
+ } for channel in Channel.objects.all() if not channel.epg_data]
- with transaction.atomic():
- for chan in Channel.objects.all():
- # skip if channel already assigned an EPG
- if chan.epg_data:
- continue
+ epg_json = [{
+ 'id': epg.id,
+ 'tvg_id': epg.tvg_id,
+ 'name': epg.name,
+ 'norm_name': normalize_name(epg.name),
+ 'epg_source_id': epg.epg_source.id,
+ } for epg in EPGData.objects.all()]
- # If channel has a tvg_id that doesn't exist in EPGData, do direct check.
- # I don't THINK this should happen now that we assign EPG on channel creation.
- if chan.tvg_id:
- epg_match = EPGData.objects.filter(tvg_id=chan.tvg_id).first()
- if epg_match:
- chan.epg_data = epg_match
- logger.info(f"Channel {chan.id} '{chan.name}' => EPG found by tvg_id={chan.tvg_id}")
- channels_to_update.append(chan)
- continue
+ payload = {
+ "channels": channels_json,
+ "epg_data": epg_json,
+ "region_code": region_code,
+ }
- # C) Perform name-based fuzzy matching
- fallback_name = chan.tvg_id.strip() if chan.tvg_id else chan.name
- norm_chan = normalize_name(fallback_name)
- if not norm_chan:
- logger.info(f"Channel {chan.id} '{chan.name}' => empty after normalization, skipping")
- continue
+ with tempfile.NamedTemporaryFile(delete=False) as temp_file:
+ temp_file.write(json.dumps(payload).encode('utf-8'))
+ temp_file_path = temp_file.name
- best_score = 0
- best_epg = None
- for row in epg_rows:
- if not row["norm_name"]:
- continue
- base_score = fuzz.ratio(norm_chan, row["norm_name"])
- bonus = 0
- # Region-based bonus/penalty
- combined_text = row["tvg_id"].lower() + " " + row["raw_name"].lower()
- dot_regions = re.findall(r'\.([a-z]{2})', combined_text)
- if region_code:
- if dot_regions:
- if region_code in dot_regions:
- bonus = 30 # bigger bonus if .us or .ca matches
- else:
- bonus = -15
- elif region_code in combined_text:
- bonus = 15
- score = base_score + bonus
+ process = subprocess.Popen(
+ ['python', '/app/scripts/epg_match.py', temp_file_path],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ text=True
+ )
- logger.debug(
- f"Channel {chan.id} '{fallback_name}' => EPG row {row['epg_id']}: "
- f"raw_name='{row['raw_name']}', norm_name='{row['norm_name']}', "
- f"combined_text='{combined_text}', dot_regions={dot_regions}, "
- f"base_score={base_score}, bonus={bonus}, total_score={score}"
- )
+ # Log stderr in real-time
+ for line in iter(process.stderr.readline, ''):
+ if line:
+ logger.info(line.strip())
- if score > best_score:
- best_score = score
- best_epg = row
+ process.stderr.close()
+ stdout, stderr = process.communicate()
- # If no best match was found, skip
- if not best_epg:
- logger.info(f"Channel {chan.id} '{fallback_name}' => no EPG match at all.")
- continue
+ os.remove(temp_file_path)
- # If best_score is above BEST_FUZZY_THRESHOLD => direct accept
- if best_score >= BEST_FUZZY_THRESHOLD:
- chan.epg_data = all_epg[best_epg["epg_id"]]
- chan.save()
+ if process.returncode != 0:
+ return f"Failed to process EPG matching: {stderr}"
- matched_channels.append((chan.id, fallback_name, best_epg["tvg_id"]))
- logger.info(
- f"Channel {chan.id} '{fallback_name}' => matched tvg_id={best_epg['tvg_id']} "
- f"(score={best_score})"
- )
+ result = json.loads(stdout)
+ channels_to_update = result["channels_to_update"]
+ matched_channels = result["matched_channels"]
- # If best_score is in the “middle range,” do embedding check
- elif best_score >= LOWER_FUZZY_THRESHOLD and epg_embeddings is not None:
- chan_embedding = st_model.encode(norm_chan, convert_to_tensor=True)
- sim_scores = util.cos_sim(chan_embedding, epg_embeddings)[0]
- top_index = int(sim_scores.argmax())
- top_value = float(sim_scores[top_index])
- if top_value >= EMBED_SIM_THRESHOLD:
- matched_epg = epg_rows[top_index]
- chan.epg_data = all_epg[matched_epg["epg_id"]]
- chan.save()
-
- matched_channels.append((chan.id, fallback_name, matched_epg["tvg_id"]))
- logger.info(
- f"Channel {chan.id} '{fallback_name}' => matched EPG tvg_id={matched_epg['tvg_id']} "
- f"(fuzzy={best_score}, cos-sim={top_value:.2f})"
- )
- else:
- logger.info(
- f"Channel {chan.id} '{fallback_name}' => fuzzy={best_score}, "
- f"cos-sim={top_value:.2f} < {EMBED_SIM_THRESHOLD}, skipping"
- )
- else:
- logger.info(
- f"Channel {chan.id} '{fallback_name}' => fuzzy={best_score} < "
- f"{LOWER_FUZZY_THRESHOLD}, skipping"
- )
-
- if channels_to_update:
- Channel.objects.bulk_update(channels_to_update, ['epg_data'])
+ if channels_to_update:
+ Channel.objects.bulk_update(channels_to_update, ['epg_data'])
total_matched = len(matched_channels)
if total_matched:
@@ -227,8 +147,6 @@ def match_epg_channels():
}
)
- SentenceTransformer.clear()
- gc.collect()
return f"Done. Matched {total_matched} channel(s)."
@shared_task
diff --git a/apps/epg/migrations/0008_epgsource_created_at_epgsource_updated_at.py b/apps/epg/migrations/0008_epgsource_created_at_epgsource_updated_at.py
new file mode 100644
index 00000000..1dcfeed0
--- /dev/null
+++ b/apps/epg/migrations/0008_epgsource_created_at_epgsource_updated_at.py
@@ -0,0 +1,24 @@
+# Generated by Django 5.1.6 on 2025-04-07 16:29
+
+import django.utils.timezone
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('epg', '0007_populate_periodic_tasks'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='epgsource',
+ name='created_at',
+ field=models.DateTimeField(default=django.utils.timezone.now, help_text='Time when this source was created'),
+ ),
+ migrations.AddField(
+ model_name='epgsource',
+ name='updated_at',
+ field=models.DateTimeField(default=django.utils.timezone.now, help_text='Time when this source was last updated'),
+ ),
+ ]
diff --git a/apps/epg/migrations/0009_alter_epgsource_created_at_and_more.py b/apps/epg/migrations/0009_alter_epgsource_created_at_and_more.py
new file mode 100644
index 00000000..cb8088eb
--- /dev/null
+++ b/apps/epg/migrations/0009_alter_epgsource_created_at_and_more.py
@@ -0,0 +1,23 @@
+# Generated by Django 5.1.6 on 2025-04-07 16:29
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('epg', '0008_epgsource_created_at_epgsource_updated_at'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='epgsource',
+ name='created_at',
+ field=models.DateTimeField(auto_now_add=True, help_text='Time when this source was created'),
+ ),
+ migrations.AlterField(
+ model_name='epgsource',
+ name='updated_at',
+ field=models.DateTimeField(auto_now=True, help_text='Time when this source was last updated'),
+ ),
+ ]
diff --git a/apps/epg/models.py b/apps/epg/models.py
index 3f9b018d..09986bfe 100644
--- a/apps/epg/models.py
+++ b/apps/epg/models.py
@@ -17,6 +17,14 @@ class EPGSource(models.Model):
refresh_task = models.ForeignKey(
PeriodicTask, on_delete=models.SET_NULL, null=True, blank=True
)
+ created_at = models.DateTimeField(
+ auto_now_add=True,
+ help_text="Time when this source was created"
+ )
+ updated_at = models.DateTimeField(
+ auto_now=True,
+ help_text="Time when this source was last updated"
+ )
def __str__(self):
return self.name
diff --git a/apps/epg/serializers.py b/apps/epg/serializers.py
index e4a2a4b3..e4ff932e 100644
--- a/apps/epg/serializers.py
+++ b/apps/epg/serializers.py
@@ -4,10 +4,11 @@ from apps.channels.models import Channel
class EPGSourceSerializer(serializers.ModelSerializer):
epg_data_ids = serializers.SerializerMethodField()
+ read_only_fields = ['created_at', 'updated_at']
class Meta:
model = EPGSource
- fields = ['id', 'name', 'source_type', 'url', 'api_key', 'is_active', 'epg_data_ids', 'refresh_interval']
+ fields = ['id', 'name', 'source_type', 'url', 'api_key', 'is_active', 'epg_data_ids', 'refresh_interval', 'created_at', 'updated_at']
def get_epg_data_ids(self, obj):
return list(obj.epgs.values_list('id', flat=True))
diff --git a/apps/epg/tasks.py b/apps/epg/tasks.py
index 3b84df6d..33e981a6 100644
--- a/apps/epg/tasks.py
+++ b/apps/epg/tasks.py
@@ -50,6 +50,8 @@ def refresh_epg_data(source_id):
elif source.source_type == 'schedules_direct':
fetch_schedules_direct(source)
+ source.save(update_fields=['updated_at'])
+
release_task_lock('refresh_epg_data', source_id)
def fetch_xmltv(source):
diff --git a/apps/m3u/serializers.py b/apps/m3u/serializers.py
index e7dbfcea..d3948145 100644
--- a/apps/m3u/serializers.py
+++ b/apps/m3u/serializers.py
@@ -56,7 +56,7 @@ class M3UAccountSerializer(serializers.ModelSerializer):
required=True
)
profiles = M3UAccountProfileSerializer(many=True, read_only=True)
- read_only_fields = ['locked']
+ read_only_fields = ['locked', 'created_at', 'updated_at']
# channel_groups = serializers.SerializerMethodField()
channel_groups = ChannelGroupM3UAccountSerializer(source='channel_group', many=True, required=False)
diff --git a/apps/m3u/tasks.py b/apps/m3u/tasks.py
index e2de2af3..82dd7864 100644
--- a/apps/m3u/tasks.py
+++ b/apps/m3u/tasks.py
@@ -430,6 +430,7 @@ def refresh_single_m3u_account(account_id):
# Calculate elapsed time
elapsed_time = end_time - start_time
+ account.save(update_fields=['updated_at'])
print(f"Function took {elapsed_time} seconds to execute.")
diff --git a/core/tasks.py b/core/tasks.py
index 62c20b3e..061dd1a5 100644
--- a/core/tasks.py
+++ b/core/tasks.py
@@ -43,6 +43,9 @@ def scan_and_process_files():
if not os.path.isfile(filepath):
continue
+ if not filename.endswith('.m3u') and not filename.endswith('.m3u8'):
+ continue
+
mtime = os.path.getmtime(filepath)
age = now - mtime
redis_key = REDIS_PREFIX + filepath
@@ -85,6 +88,9 @@ def scan_and_process_files():
if not os.path.isfile(filepath):
continue
+ if not filename.endswith('.xml') and not filename.endswith('.gz'):
+ continue
+
mtime = os.path.getmtime(filepath)
age = now - mtime
redis_key = REDIS_PREFIX + filepath
diff --git a/core/utils.py b/core/utils.py
index d6f0b446..3a5d84f4 100644
--- a/core/utils.py
+++ b/core/utils.py
@@ -160,34 +160,3 @@ def send_websocket_event(event, success, data):
"data": {"success": True, "type": "epg_channels"}
}
)
-
-class SentenceTransformer:
- _instance = None
-
- @classmethod
- def get_model(cls):
- if cls._instance is None:
- from sentence_transformers import SentenceTransformer as st
-
- # Load the sentence-transformers model once at the module level
- SENTENCE_MODEL_NAME = "sentence-transformers/all-MiniLM-L6-v2"
- MODEL_PATH = os.path.join(settings.MEDIA_ROOT, "models", "all-MiniLM-L6-v2")
- os.makedirs(MODEL_PATH, exist_ok=True)
-
- # If not present locally, download:
- if not os.path.exists(os.path.join(MODEL_PATH, "config.json")):
- logger.info(f"Local model not found in {MODEL_PATH}; downloading from {SENTENCE_MODEL_NAME}...")
- cls._instance = st(SENTENCE_MODEL_NAME, cache_folder=MODEL_PATH)
- else:
- logger.info(f"Loading local model from {MODEL_PATH}")
- cls._instance = st(MODEL_PATH)
-
- return cls._instance
-
- @classmethod
- def clear(cls):
- """Clear the model instance and release memory."""
- if cls._instance is not None:
- del cls._instance
- cls._instance = None
- gc.collect()
diff --git a/dispatcharr/settings.py b/dispatcharr/settings.py
index 8aa2c8ca..96bda89b 100644
--- a/dispatcharr/settings.py
+++ b/dispatcharr/settings.py
@@ -1,7 +1,6 @@
import os
from pathlib import Path
from datetime import timedelta
-from celery.schedules import crontab
BASE_DIR = Path(__file__).resolve().parent.parent
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 9a576eeb..e3f8a165 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -48,7 +48,7 @@ ENV PATH="/dispatcharrpy/bin:$PATH" \
# Copy the virtual environment and application from the builder stage
COPY --from=builder /dispatcharrpy /dispatcharrpy
COPY --from=builder /app /app
-COPY --from=frontend-builder /app/frontend /app/frontend
+COPY --from=frontend-builder /app/frontend/dist /app/frontend/dist
# Run collectstatic after frontend assets are copied
RUN cd /app && python manage.py collectstatic --noinput
diff --git a/frontend/src/App.jsx b/frontend/src/App.jsx
index 4fa2b9a9..a641a53b 100644
--- a/frontend/src/App.jsx
+++ b/frontend/src/App.jsx
@@ -13,6 +13,7 @@ import M3U from './pages/M3U';
import EPG from './pages/EPG';
import Guide from './pages/Guide';
import Stats from './pages/Stats';
+import DVR from './pages/DVR';
import Settings from './pages/Settings';
import StreamProfiles from './pages/StreamProfiles';
import useAuthStore from './store/auth';
@@ -127,6 +128,7 @@ const App = () => {
element={}
/>
} />
+ } />
} />
} />
>
diff --git a/frontend/src/api.js b/frontend/src/api.js
index 9e1dfd55..9d8bf746 100644
--- a/frontend/src/api.js
+++ b/frontend/src/api.js
@@ -1035,6 +1035,19 @@ export default class API {
.updateProfileChannels(channelIds, profileId, enabled);
}
+ static async getRecordings() {
+ const response = await fetch(`${host}/api/channels/recordings/`, {
+ headers: {
+ Authorization: `Bearer ${await API.getAuthToken()}`,
+ 'Content-Type': 'application/json',
+ },
+ });
+
+ const retval = await response.json();
+
+ return retval;
+ }
+
static async createRecording(values) {
const response = await fetch(`${host}/api/channels/recordings/`, {
method: 'POST',
@@ -1046,7 +1059,20 @@ export default class API {
});
const retval = await response.json();
+ useChannelsStore.getState().fetchRecordings();
return retval;
}
+
+ static async deleteRecording(id) {
+ const response = await fetch(`${host}/api/channels/recordings/${id}/`, {
+ method: 'DELETE',
+ headers: {
+ Authorization: `Bearer ${await API.getAuthToken()}`,
+ 'Content-Type': 'application/json',
+ },
+ });
+
+ useChannelsStore.getState().fetchRecordings();
+ }
}
diff --git a/frontend/src/components/Sidebar.jsx b/frontend/src/components/Sidebar.jsx
index 157381f0..00fb4045 100644
--- a/frontend/src/components/Sidebar.jsx
+++ b/frontend/src/components/Sidebar.jsx
@@ -9,6 +9,7 @@ import {
Settings as LucideSettings,
Copy,
ChartLine,
+ Video,
} from 'lucide-react';
import {
Avatar,
@@ -80,6 +81,7 @@ const Sidebar = ({ collapsed, toggleDrawer, drawerWidth, miniDrawerWidth }) => {
path: '/stream-profiles',
},
{ label: 'TV Guide', icon: , path: '/guide' },
+ { label: 'DVR', icon: , path: '/dvr' },
{ label: 'Stats', icon: , path: '/stats' },
{
label: 'Settings',
diff --git a/frontend/src/components/forms/ChannelGroup.jsx b/frontend/src/components/forms/ChannelGroup.jsx
index 93741ef1..2429d1d9 100644
--- a/frontend/src/components/forms/ChannelGroup.jsx
+++ b/frontend/src/components/forms/ChannelGroup.jsx
@@ -1,40 +1,31 @@
// Modal.js
-import React, { useEffect } from 'react';
-import { useFormik } from 'formik';
-import * as Yup from 'yup';
+import React from 'react';
import API from '../../api';
import { Flex, TextInput, Button, Modal } from '@mantine/core';
+import { isNotEmpty, useForm } from '@mantine/form';
const ChannelGroup = ({ channelGroup = null, isOpen, onClose }) => {
- const formik = useFormik({
+ const form = useForm({
+ mode: 'uncontrolled',
initialValues: {
- name: '',
+ name: channelGroup ? channelGroup.name : '',
},
- validationSchema: Yup.object({
- name: Yup.string().required('Name is required'),
- }),
- onSubmit: async (values, { setSubmitting, resetForm }) => {
- if (channelGroup?.id) {
- await API.updateChannelGroup({ id: channelGroup.id, ...values });
- } else {
- await API.addChannelGroup(values);
- }
- resetForm();
- setSubmitting(false);
- onClose();
+ validate: {
+ name: isNotEmpty('Specify a name'),
},
});
- useEffect(() => {
+ const onSubmit = async () => {
+ const values = form.getValues();
if (channelGroup) {
- formik.setValues({
- name: channelGroup.name,
- });
+ await API.updateChannelGroup({ id: channelGroup.id, ...values });
} else {
- formik.resetForm();
+ await API.addChannelGroup(values);
}
- }, [channelGroup]);
+
+ return form.reset();
+ };
if (!isOpen) {
return <>>;
@@ -42,14 +33,13 @@ const ChannelGroup = ({ channelGroup = null, isOpen, onClose }) => {
return (
-