mirror of
https://github.com/Dispatcharr/Dispatcharr.git
synced 2026-01-23 18:54:58 +00:00
Merge remote-tracking branch 'origin/dev' into Media-Server
This commit is contained in:
commit
466df48cd5
16 changed files with 200 additions and 47 deletions
25
CHANGELOG.md
25
CHANGELOG.md
|
|
@ -7,15 +7,38 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
|
||||
## [Unreleased]
|
||||
|
||||
### Fixed
|
||||
|
||||
- nginx now gracefully handles hosts without IPv6 support by automatically disabling IPv6 binding at startup
|
||||
- XtreamCodes EPG API now returns correct date/time format for start/end fields and proper string types for timestamps and channel_id
|
||||
|
||||
## [0.14.0] - 2025-12-09
|
||||
|
||||
### Added
|
||||
|
||||
- Sort buttons for 'Group' and 'M3U' columns in Streams table for improved stream organization and filtering - Thanks [@bobey6](https://github.com/bobey6)
|
||||
- EPG source priority field for controlling which EPG source is preferred when multiple sources have matching entries for a channel (higher numbers = higher priority) (Closes #603)
|
||||
|
||||
### Changed
|
||||
|
||||
- **Performance**: EPG program parsing optimized for sources with many channels but only a fraction mapped. Now parses XML file once per source instead of once per channel, dramatically reducing I/O and CPU overhead. For sources with 10,000 channels and 100 mapped, this results in ~99x fewer file opens and ~100x fewer full file scans. Orphaned programs for unmapped channels are also cleaned up during refresh to prevent database bloat. Database updates are now atomic to prevent clients from seeing empty/partial EPG data during refresh.
|
||||
- EPG program parsing optimized for sources with many channels but only a fraction mapped. Now parses XML file once per source instead of once per channel, dramatically reducing I/O and CPU overhead. For sources with 10,000 channels and 100 mapped, this results in ~99x fewer file opens and ~100x fewer full file scans. Orphaned programs for unmapped channels are also cleaned up during refresh to prevent database bloat. Database updates are now atomic to prevent clients from seeing empty/partial EPG data during refresh.
|
||||
- EPG table now displays detailed status messages including refresh progress, success messages, and last message for idle sources (matching M3U table behavior) (Closes #214)
|
||||
- IPv6 access now allowed by default with all IPv6 CIDRs accepted - Thanks [@adrianmace](https://github.com/adrianmace)
|
||||
- nginx.conf updated to bind to both IPv4 and IPv6 ports - Thanks [@jordandalley](https://github.com/jordandalley)
|
||||
- EPG matching now respects source priority and only uses active (enabled) EPG sources (Closes #672)
|
||||
- EPG form API Key field now only visible when Schedules Direct source type is selected
|
||||
|
||||
### Fixed
|
||||
|
||||
- EPG table "Updated" column now updates in real-time via WebSocket using the actual backend timestamp instead of requiring a page refresh
|
||||
- Bulk channel editor confirmation dialog now displays the correct stream profile name that will be applied to the selected channels.
|
||||
- uWSGI not found and 502 bad gateway on first startup
|
||||
|
||||
## [0.13.1] - 2025-12-06
|
||||
|
||||
### Fixed
|
||||
|
||||
- JWT token generated so is unique for each deployment
|
||||
|
||||
## [0.13.0] - 2025-12-02
|
||||
|
||||
|
|
|
|||
|
|
@ -295,7 +295,11 @@ def match_channels_to_epg(channels_data, epg_data, region_code=None, use_ml=True
|
|||
if score > 50: # Only show decent matches
|
||||
logger.debug(f" EPG '{row['name']}' (norm: '{row['norm_name']}') => score: {score} (base: {base_score}, bonus: {bonus})")
|
||||
|
||||
if score > best_score:
|
||||
# When scores are equal, prefer higher priority EPG source
|
||||
row_priority = row.get('epg_source_priority', 0)
|
||||
best_priority = best_epg.get('epg_source_priority', 0) if best_epg else -1
|
||||
|
||||
if score > best_score or (score == best_score and row_priority > best_priority):
|
||||
best_score = score
|
||||
best_epg = row
|
||||
|
||||
|
|
@ -471,9 +475,9 @@ def match_epg_channels():
|
|||
"norm_chan": normalize_name(channel.name) # Always use channel name for fuzzy matching!
|
||||
})
|
||||
|
||||
# Get all EPG data
|
||||
# Get all EPG data from active sources, ordered by source priority (highest first) so we prefer higher priority matches
|
||||
epg_data = []
|
||||
for epg in EPGData.objects.all():
|
||||
for epg in EPGData.objects.select_related('epg_source').filter(epg_source__is_active=True):
|
||||
normalized_tvg_id = epg.tvg_id.strip().lower() if epg.tvg_id else ""
|
||||
epg_data.append({
|
||||
'id': epg.id,
|
||||
|
|
@ -482,9 +486,13 @@ def match_epg_channels():
|
|||
'name': epg.name,
|
||||
'norm_name': normalize_name(epg.name),
|
||||
'epg_source_id': epg.epg_source.id if epg.epg_source else None,
|
||||
'epg_source_priority': epg.epg_source.priority if epg.epg_source else 0,
|
||||
})
|
||||
|
||||
logger.info(f"Processing {len(channels_data)} channels against {len(epg_data)} EPG entries")
|
||||
# Sort EPG data by source priority (highest first) so we prefer higher priority matches
|
||||
epg_data.sort(key=lambda x: x['epg_source_priority'], reverse=True)
|
||||
|
||||
logger.info(f"Processing {len(channels_data)} channels against {len(epg_data)} EPG entries (from active sources only)")
|
||||
|
||||
# Run EPG matching with progress updates - automatically uses conservative thresholds for bulk operations
|
||||
result = match_channels_to_epg(channels_data, epg_data, region_code, use_ml=True, send_progress=True)
|
||||
|
|
@ -618,9 +626,9 @@ def match_selected_channels_epg(channel_ids):
|
|||
"norm_chan": normalize_name(channel.name)
|
||||
})
|
||||
|
||||
# Get all EPG data
|
||||
# Get all EPG data from active sources, ordered by source priority (highest first) so we prefer higher priority matches
|
||||
epg_data = []
|
||||
for epg in EPGData.objects.all():
|
||||
for epg in EPGData.objects.select_related('epg_source').filter(epg_source__is_active=True):
|
||||
normalized_tvg_id = epg.tvg_id.strip().lower() if epg.tvg_id else ""
|
||||
epg_data.append({
|
||||
'id': epg.id,
|
||||
|
|
@ -629,9 +637,13 @@ def match_selected_channels_epg(channel_ids):
|
|||
'name': epg.name,
|
||||
'norm_name': normalize_name(epg.name),
|
||||
'epg_source_id': epg.epg_source.id if epg.epg_source else None,
|
||||
'epg_source_priority': epg.epg_source.priority if epg.epg_source else 0,
|
||||
})
|
||||
|
||||
logger.info(f"Processing {len(channels_data)} selected channels against {len(epg_data)} EPG entries")
|
||||
# Sort EPG data by source priority (highest first) so we prefer higher priority matches
|
||||
epg_data.sort(key=lambda x: x['epg_source_priority'], reverse=True)
|
||||
|
||||
logger.info(f"Processing {len(channels_data)} selected channels against {len(epg_data)} EPG entries (from active sources only)")
|
||||
|
||||
# Run EPG matching with progress updates - automatically uses appropriate thresholds
|
||||
result = match_channels_to_epg(channels_data, epg_data, region_code, use_ml=True, send_progress=True)
|
||||
|
|
@ -749,9 +761,10 @@ def match_single_channel_epg(channel_id):
|
|||
test_normalized = normalize_name(test_name)
|
||||
logger.debug(f"DEBUG normalization example: '{test_name}' → '{test_normalized}' (call sign preserved)")
|
||||
|
||||
# Get all EPG data for matching - must include norm_name field
|
||||
# Get all EPG data for matching from active sources - must include norm_name field
|
||||
# Ordered by source priority (highest first) so we prefer higher priority matches
|
||||
epg_data_list = []
|
||||
for epg in EPGData.objects.filter(name__isnull=False).exclude(name=''):
|
||||
for epg in EPGData.objects.select_related('epg_source').filter(epg_source__is_active=True, name__isnull=False).exclude(name=''):
|
||||
normalized_epg_tvg_id = epg.tvg_id.strip().lower() if epg.tvg_id else ""
|
||||
epg_data_list.append({
|
||||
'id': epg.id,
|
||||
|
|
@ -760,10 +773,14 @@ def match_single_channel_epg(channel_id):
|
|||
'name': epg.name,
|
||||
'norm_name': normalize_name(epg.name),
|
||||
'epg_source_id': epg.epg_source.id if epg.epg_source else None,
|
||||
'epg_source_priority': epg.epg_source.priority if epg.epg_source else 0,
|
||||
})
|
||||
|
||||
# Sort EPG data by source priority (highest first) so we prefer higher priority matches
|
||||
epg_data_list.sort(key=lambda x: x['epg_source_priority'], reverse=True)
|
||||
|
||||
if not epg_data_list:
|
||||
return {"matched": False, "message": "No EPG data available for matching"}
|
||||
return {"matched": False, "message": "No EPG data available for matching (from active sources)"}
|
||||
|
||||
logger.info(f"Matching single channel '{channel.name}' against {len(epg_data_list)} EPG entries")
|
||||
|
||||
|
|
|
|||
18
apps/epg/migrations/0021_epgsource_priority.py
Normal file
18
apps/epg/migrations/0021_epgsource_priority.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.2.4 on 2025-12-05 15:24
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0020_migrate_time_to_starttime_placeholders'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='epgsource',
|
||||
name='priority',
|
||||
field=models.PositiveIntegerField(default=0, help_text='Priority for EPG matching (higher numbers = higher priority). Used when multiple EPG sources have matching entries for a channel.'),
|
||||
),
|
||||
]
|
||||
|
|
@ -45,6 +45,10 @@ class EPGSource(models.Model):
|
|||
null=True,
|
||||
help_text="Custom properties for dummy EPG configuration (regex patterns, timezone, duration, etc.)"
|
||||
)
|
||||
priority = models.PositiveIntegerField(
|
||||
default=0,
|
||||
help_text="Priority for EPG matching (higher numbers = higher priority). Used when multiple EPG sources have matching entries for a channel."
|
||||
)
|
||||
status = models.CharField(
|
||||
max_length=20,
|
||||
choices=STATUS_CHOICES,
|
||||
|
|
|
|||
|
|
@ -24,6 +24,7 @@ class EPGSourceSerializer(serializers.ModelSerializer):
|
|||
'is_active',
|
||||
'file_path',
|
||||
'refresh_interval',
|
||||
'priority',
|
||||
'status',
|
||||
'last_message',
|
||||
'created_at',
|
||||
|
|
|
|||
|
|
@ -1650,7 +1650,7 @@ def parse_programs_for_source(epg_source, tvg_id=None):
|
|||
epg_source.status = EPGSource.STATUS_SUCCESS
|
||||
epg_source.last_message = (
|
||||
f"Parsed {total_programs:,} programs for {channels_with_programs} channels "
|
||||
f"(skipped {skipped_programs:,} programmes for {total_epg_count - mapped_count} unmapped channels)"
|
||||
f"(skipped {skipped_programs:,} programs for {total_epg_count - mapped_count} unmapped channels)"
|
||||
)
|
||||
epg_source.updated_at = timezone.now()
|
||||
epg_source.save(update_fields=['status', 'last_message', 'updated_at'])
|
||||
|
|
@ -1668,11 +1668,12 @@ def parse_programs_for_source(epg_source, tvg_id=None):
|
|||
# Send completion notification with status
|
||||
send_epg_update(epg_source.id, "parsing_programs", 100,
|
||||
status="success",
|
||||
message=epg_source.last_message)
|
||||
message=epg_source.last_message,
|
||||
updated_at=epg_source.updated_at.isoformat())
|
||||
|
||||
logger.info(f"Completed parsing programs for source: {epg_source.name} - "
|
||||
f"{total_programs:,} programs for {channels_with_programs} channels, "
|
||||
f"skipped {skipped_programs:,} programmes for unmapped channels")
|
||||
f"{total_programs:,} programs for {channels_with_programs} channels, "
|
||||
f"skipped {skipped_programs:,} programs for unmapped channels")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
|
|
|
|||
|
|
@ -2316,18 +2316,18 @@ def xc_get_epg(request, user, short=False):
|
|||
"epg_id": f"{epg_id}",
|
||||
"title": base64.b64encode(title.encode()).decode(),
|
||||
"lang": "",
|
||||
"start": start.strftime("%Y%m%d%H%M%S"),
|
||||
"end": end.strftime("%Y%m%d%H%M%S"),
|
||||
"start": start.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"end": end.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"description": base64.b64encode(description.encode()).decode(),
|
||||
"channel_id": channel_num_int,
|
||||
"start_timestamp": int(start.timestamp()),
|
||||
"stop_timestamp": int(end.timestamp()),
|
||||
"channel_id": str(channel_num_int),
|
||||
"start_timestamp": str(int(start.timestamp())),
|
||||
"stop_timestamp": str(int(end.timestamp())),
|
||||
"stream_id": f"{channel_id}",
|
||||
}
|
||||
|
||||
if short == False:
|
||||
program_output["now_playing"] = 1 if start <= django_timezone.now() <= end else 0
|
||||
program_output["has_archive"] = "0"
|
||||
program_output["has_archive"] = 0
|
||||
|
||||
output['epg_listings'].append(program_output)
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from datetime import timedelta
|
|||
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||
|
||||
SECRET_KEY = "REPLACE_ME_WITH_A_REAL_SECRET"
|
||||
SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY")
|
||||
REDIS_HOST = os.environ.get("REDIS_HOST", "localhost")
|
||||
REDIS_DB = os.environ.get("REDIS_DB", "0")
|
||||
|
||||
|
|
|
|||
|
|
@ -40,6 +40,21 @@ export REDIS_DB=${REDIS_DB:-0}
|
|||
export DISPATCHARR_PORT=${DISPATCHARR_PORT:-9191}
|
||||
export LIBVA_DRIVERS_PATH='/usr/local/lib/x86_64-linux-gnu/dri'
|
||||
export LD_LIBRARY_PATH='/usr/local/lib'
|
||||
export SECRET_FILE="/data/jwt"
|
||||
# Ensure Django secret key exists or generate a new one
|
||||
if [ ! -f "$SECRET_FILE" ]; then
|
||||
echo "Generating new Django secret key..."
|
||||
old_umask=$(umask)
|
||||
umask 077
|
||||
tmpfile="$(mktemp "${SECRET_FILE}.XXXXXX")" || { echo "mktemp failed"; exit 1; }
|
||||
python3 - <<'PY' >"$tmpfile" || { echo "secret generation failed"; rm -f "$tmpfile"; exit 1; }
|
||||
import secrets
|
||||
print(secrets.token_urlsafe(64))
|
||||
PY
|
||||
mv -f "$tmpfile" "$SECRET_FILE" || { echo "move failed"; rm -f "$tmpfile"; exit 1; }
|
||||
umask $old_umask
|
||||
fi
|
||||
export DJANGO_SECRET_KEY="$(cat "$SECRET_FILE")"
|
||||
|
||||
# Process priority configuration
|
||||
# UWSGI_NICE_LEVEL: Absolute nice value for uWSGI/streaming (default: 0 = normal priority)
|
||||
|
|
@ -90,7 +105,7 @@ if [[ ! -f /etc/profile.d/dispatcharr.sh ]]; then
|
|||
DISPATCHARR_ENV DISPATCHARR_DEBUG DISPATCHARR_LOG_LEVEL
|
||||
REDIS_HOST REDIS_DB POSTGRES_DIR DISPATCHARR_PORT
|
||||
DISPATCHARR_VERSION DISPATCHARR_TIMESTAMP LIBVA_DRIVERS_PATH LIBVA_DRIVER_NAME LD_LIBRARY_PATH
|
||||
CELERY_NICE_LEVEL UWSGI_NICE_LEVEL
|
||||
CELERY_NICE_LEVEL UWSGI_NICE_LEVEL DJANGO_SECRET_KEY
|
||||
)
|
||||
|
||||
# Process each variable for both profile.d and environment
|
||||
|
|
@ -187,7 +202,7 @@ fi
|
|||
# Users can override via UWSGI_NICE_LEVEL environment variable in docker-compose
|
||||
# Start with nice as root, then use setpriv to drop privileges to dispatch user
|
||||
# This preserves both the nice value and environment variables
|
||||
nice -n $UWSGI_NICE_LEVEL su -p - "$POSTGRES_USER" -c "cd /app && exec uwsgi $uwsgi_args" & uwsgi_pid=$!
|
||||
nice -n $UWSGI_NICE_LEVEL su - "$POSTGRES_USER" -c "cd /app && exec /dispatcharrpy/bin/uwsgi $uwsgi_args" & uwsgi_pid=$!
|
||||
echo "✅ uwsgi started with PID $uwsgi_pid (nice $UWSGI_NICE_LEVEL)"
|
||||
pids+=("$uwsgi_pid")
|
||||
|
||||
|
|
|
|||
|
|
@ -29,9 +29,17 @@ if [ "$(id -u)" = "0" ] && [ -d "/app" ]; then
|
|||
chown $PUID:$PGID /app
|
||||
fi
|
||||
fi
|
||||
|
||||
# Configure nginx port
|
||||
sed -i "s/NGINX_PORT/${DISPATCHARR_PORT}/g" /etc/nginx/sites-enabled/default
|
||||
|
||||
# Configure nginx based on IPv6 availability
|
||||
if ip -6 addr show | grep -q "inet6"; then
|
||||
echo "✅ IPv6 is available, enabling IPv6 in nginx"
|
||||
else
|
||||
echo "⚠️ IPv6 not available, disabling IPv6 in nginx"
|
||||
sed -i '/listen \[::\]:/d' /etc/nginx/sites-enabled/default
|
||||
fi
|
||||
|
||||
# NOTE: mac doesn't run as root, so only manage permissions
|
||||
# if this script is running as root
|
||||
if [ "$(id -u)" = "0" ]; then
|
||||
|
|
|
|||
|
|
@ -20,7 +20,6 @@ module = scripts.debug_wrapper:application
|
|||
virtualenv = /dispatcharrpy
|
||||
master = true
|
||||
env = DJANGO_SETTINGS_MODULE=dispatcharr.settings
|
||||
|
||||
socket = /app/uwsgi.sock
|
||||
chmod-socket = 777
|
||||
vacuum = true
|
||||
|
|
|
|||
|
|
@ -631,7 +631,7 @@ export const WebsocketProvider = ({ children }) => {
|
|||
const sourceId =
|
||||
parsedEvent.data.source || parsedEvent.data.account;
|
||||
const epg = epgs[sourceId];
|
||||
|
||||
|
||||
// Only update progress if the EPG still exists in the store
|
||||
// This prevents crashes when receiving updates for deleted EPGs
|
||||
if (epg) {
|
||||
|
|
@ -639,7 +639,9 @@ export const WebsocketProvider = ({ children }) => {
|
|||
updateEPGProgress(parsedEvent.data);
|
||||
} else {
|
||||
// EPG was deleted, ignore this update
|
||||
console.debug(`Ignoring EPG refresh update for deleted EPG ${sourceId}`);
|
||||
console.debug(
|
||||
`Ignoring EPG refresh update for deleted EPG ${sourceId}`
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
|
|
@ -678,6 +680,10 @@ export const WebsocketProvider = ({ children }) => {
|
|||
status: parsedEvent.data.status || 'success',
|
||||
last_message:
|
||||
parsedEvent.data.message || epg.last_message,
|
||||
// Use the timestamp from the backend if provided
|
||||
...(parsedEvent.data.updated_at && {
|
||||
updated_at: parsedEvent.data.updated_at,
|
||||
}),
|
||||
});
|
||||
|
||||
// Only show success notification if we've finished parsing programs and had no errors
|
||||
|
|
|
|||
|
|
@ -135,8 +135,10 @@ const ChannelBatchForm = ({ channelIds, isOpen, onClose }) => {
|
|||
if (values.stream_profile_id === '0') {
|
||||
changes.push(`• Stream Profile: Use Default`);
|
||||
} else {
|
||||
const profileName =
|
||||
streamProfiles[values.stream_profile_id]?.name || 'Selected Profile';
|
||||
const profile = streamProfiles.find(
|
||||
(p) => `${p.id}` === `${values.stream_profile_id}`
|
||||
);
|
||||
const profileName = profile?.name || 'Selected Profile';
|
||||
changes.push(`• Stream Profile: ${profileName}`);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -29,6 +29,7 @@ const EPG = ({ epg = null, isOpen, onClose }) => {
|
|||
api_key: '',
|
||||
is_active: true,
|
||||
refresh_interval: 24,
|
||||
priority: 0,
|
||||
},
|
||||
|
||||
validate: {
|
||||
|
|
@ -69,6 +70,7 @@ const EPG = ({ epg = null, isOpen, onClose }) => {
|
|||
api_key: epg.api_key,
|
||||
is_active: epg.is_active,
|
||||
refresh_interval: epg.refresh_interval,
|
||||
priority: epg.priority ?? 0,
|
||||
};
|
||||
form.setValues(values);
|
||||
setSourceType(epg.source_type);
|
||||
|
|
@ -148,14 +150,24 @@ const EPG = ({ epg = null, isOpen, onClose }) => {
|
|||
key={form.key('url')}
|
||||
/>
|
||||
|
||||
<TextInput
|
||||
id="api_key"
|
||||
name="api_key"
|
||||
label="API Key"
|
||||
description="API key for services that require authentication"
|
||||
{...form.getInputProps('api_key')}
|
||||
key={form.key('api_key')}
|
||||
disabled={sourceType !== 'schedules_direct'}
|
||||
{sourceType === 'schedules_direct' && (
|
||||
<TextInput
|
||||
id="api_key"
|
||||
name="api_key"
|
||||
label="API Key"
|
||||
description="API key for services that require authentication"
|
||||
{...form.getInputProps('api_key')}
|
||||
key={form.key('api_key')}
|
||||
/>
|
||||
)}
|
||||
|
||||
<NumberInput
|
||||
min={0}
|
||||
max={999}
|
||||
label="Priority"
|
||||
description="Priority for EPG matching (higher numbers = higher priority). Used when multiple EPG sources have matching entries for a channel."
|
||||
{...form.getInputProps('priority')}
|
||||
key={form.key('priority')}
|
||||
/>
|
||||
|
||||
{/* Put checkbox at the same level as Refresh Interval */}
|
||||
|
|
|
|||
|
|
@ -160,6 +160,9 @@ const EPGsTable = () => {
|
|||
case 'downloading':
|
||||
label = 'Downloading';
|
||||
break;
|
||||
case 'extracting':
|
||||
label = 'Extracting';
|
||||
break;
|
||||
case 'parsing_channels':
|
||||
label = 'Parsing Channels';
|
||||
break;
|
||||
|
|
@ -170,6 +173,22 @@ const EPGsTable = () => {
|
|||
return null;
|
||||
}
|
||||
|
||||
// Build additional info string from progress data
|
||||
let additionalInfo = '';
|
||||
if (progress.message) {
|
||||
additionalInfo = progress.message;
|
||||
} else if (
|
||||
progress.processed !== undefined &&
|
||||
progress.channels !== undefined
|
||||
) {
|
||||
additionalInfo = `${progress.processed.toLocaleString()} programs for ${progress.channels} channels`;
|
||||
} else if (
|
||||
progress.processed !== undefined &&
|
||||
progress.total !== undefined
|
||||
) {
|
||||
additionalInfo = `${progress.processed.toLocaleString()} / ${progress.total.toLocaleString()}`;
|
||||
}
|
||||
|
||||
return (
|
||||
<Stack spacing={2}>
|
||||
<Text size="xs">
|
||||
|
|
@ -181,7 +200,14 @@ const EPGsTable = () => {
|
|||
style={{ margin: '2px 0' }}
|
||||
/>
|
||||
{progress.speed && (
|
||||
<Text size="xs">Speed: {parseInt(progress.speed)} KB/s</Text>
|
||||
<Text size="xs" c="dimmed">
|
||||
Speed: {parseInt(progress.speed)} KB/s
|
||||
</Text>
|
||||
)}
|
||||
{additionalInfo && (
|
||||
<Text size="xs" c="dimmed" lineClamp={1}>
|
||||
{additionalInfo}
|
||||
</Text>
|
||||
)}
|
||||
</Stack>
|
||||
);
|
||||
|
|
@ -286,14 +312,35 @@ const EPGsTable = () => {
|
|||
|
||||
// Show success message for successful sources
|
||||
if (data.status === 'success') {
|
||||
const successMessage =
|
||||
data.last_message || 'EPG data refreshed successfully';
|
||||
return (
|
||||
<Text
|
||||
c="dimmed"
|
||||
size="xs"
|
||||
style={{ color: theme.colors.green[6], lineHeight: 1.3 }}
|
||||
>
|
||||
EPG data refreshed successfully
|
||||
</Text>
|
||||
<Tooltip label={successMessage} multiline width={300}>
|
||||
<Text
|
||||
c="dimmed"
|
||||
size="xs"
|
||||
lineClamp={2}
|
||||
style={{ color: theme.colors.green[6], lineHeight: 1.3 }}
|
||||
>
|
||||
{successMessage}
|
||||
</Text>
|
||||
</Tooltip>
|
||||
);
|
||||
}
|
||||
|
||||
// Show last_message for idle sources (from previous refresh)
|
||||
if (data.status === 'idle' && data.last_message) {
|
||||
return (
|
||||
<Tooltip label={data.last_message} multiline width={300}>
|
||||
<Text
|
||||
c="dimmed"
|
||||
size="xs"
|
||||
lineClamp={2}
|
||||
style={{ lineHeight: 1.3 }}
|
||||
>
|
||||
{data.last_message}
|
||||
</Text>
|
||||
</Tooltip>
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
"""
|
||||
Dispatcharr version information.
|
||||
"""
|
||||
__version__ = '0.13.0' # Follow semantic versioning (MAJOR.MINOR.PATCH)
|
||||
__version__ = '0.14.0' # Follow semantic versioning (MAJOR.MINOR.PATCH)
|
||||
__timestamp__ = None # Set during CI/CD build process
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue