forked from Mirrors/Dispatcharr
merged in dev
This commit is contained in:
commit
eb1bbdd299
11 changed files with 275 additions and 121 deletions
18
apps/epg/migrations/0013_alter_epgsource_refresh_interval.py
Normal file
18
apps/epg/migrations/0013_alter_epgsource_refresh_interval.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.1.6 on 2025-05-21 19:58
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0012_alter_epgsource_status'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='epgsource',
|
||||
name='refresh_interval',
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
]
|
||||
|
|
@ -32,7 +32,7 @@ class EPGSource(models.Model):
|
|||
api_key = models.CharField(max_length=255, blank=True, null=True) # For Schedules Direct
|
||||
is_active = models.BooleanField(default=True)
|
||||
file_path = models.CharField(max_length=1024, blank=True, null=True)
|
||||
refresh_interval = models.IntegerField(default=24)
|
||||
refresh_interval = models.IntegerField(default=0)
|
||||
refresh_task = models.ForeignKey(
|
||||
PeriodicTask, on_delete=models.SET_NULL, null=True, blank=True
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1445,17 +1445,42 @@ def fetch_schedules_direct(source):
|
|||
# -------------------------------
|
||||
def parse_xmltv_time(time_str):
|
||||
try:
|
||||
# Basic format validation
|
||||
if len(time_str) < 14:
|
||||
logger.warning(f"XMLTV timestamp too short: '{time_str}', using as-is")
|
||||
dt_obj = datetime.strptime(time_str, '%Y%m%d%H%M%S')
|
||||
return timezone.make_aware(dt_obj, timezone=dt_timezone.utc)
|
||||
|
||||
# Parse base datetime
|
||||
dt_obj = datetime.strptime(time_str[:14], '%Y%m%d%H%M%S')
|
||||
tz_sign = time_str[15]
|
||||
tz_hours = int(time_str[16:18])
|
||||
tz_minutes = int(time_str[18:20])
|
||||
if tz_sign == '+':
|
||||
dt_obj = dt_obj - timedelta(hours=tz_hours, minutes=tz_minutes)
|
||||
elif tz_sign == '-':
|
||||
dt_obj = dt_obj + timedelta(hours=tz_hours, minutes=tz_minutes)
|
||||
aware_dt = timezone.make_aware(dt_obj, timezone=dt_timezone.utc)
|
||||
logger.trace(f"Parsed XMLTV time '{time_str}' to {aware_dt}")
|
||||
return aware_dt
|
||||
|
||||
# Handle timezone if present
|
||||
if len(time_str) >= 20: # Has timezone info
|
||||
tz_sign = time_str[15]
|
||||
tz_hours = int(time_str[16:18])
|
||||
tz_minutes = int(time_str[18:20])
|
||||
|
||||
# Create a timezone object
|
||||
if tz_sign == '+':
|
||||
tz_offset = dt_timezone(timedelta(hours=tz_hours, minutes=tz_minutes))
|
||||
elif tz_sign == '-':
|
||||
tz_offset = dt_timezone(timedelta(hours=-tz_hours, minutes=-tz_minutes))
|
||||
else:
|
||||
tz_offset = dt_timezone.utc
|
||||
|
||||
# Make datetime aware with correct timezone
|
||||
aware_dt = datetime.replace(dt_obj, tzinfo=tz_offset)
|
||||
# Convert to UTC
|
||||
aware_dt = aware_dt.astimezone(dt_timezone.utc)
|
||||
|
||||
logger.trace(f"Parsed XMLTV time '{time_str}' to {aware_dt}")
|
||||
return aware_dt
|
||||
else:
|
||||
# No timezone info, assume UTC
|
||||
aware_dt = timezone.make_aware(dt_obj, timezone=dt_timezone.utc)
|
||||
logger.trace(f"Parsed XMLTV time without timezone '{time_str}' as UTC: {aware_dt}")
|
||||
return aware_dt
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error parsing XMLTV time '{time_str}': {e}", exc_info=True)
|
||||
raise
|
||||
|
|
|
|||
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.1.6 on 2025-05-21 19:58
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('m3u', '0011_alter_m3uaccount_status'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='m3uaccount',
|
||||
name='refresh_interval',
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
]
|
||||
|
|
@ -86,7 +86,7 @@ class M3UAccount(models.Model):
|
|||
username = models.CharField(max_length=255, null=True, blank=True)
|
||||
password = models.CharField(max_length=255, null=True, blank=True)
|
||||
custom_properties = models.TextField(null=True, blank=True)
|
||||
refresh_interval = models.IntegerField(default=24)
|
||||
refresh_interval = models.IntegerField(default=0)
|
||||
refresh_task = models.ForeignKey(
|
||||
PeriodicTask, on_delete=models.SET_NULL, null=True, blank=True
|
||||
)
|
||||
|
|
|
|||
|
|
@ -22,11 +22,11 @@ from core.utils import RedisClient, acquire_task_lock, release_task_lock
|
|||
from core.models import CoreSettings, UserAgent
|
||||
from asgiref.sync import async_to_sync
|
||||
from core.xtream_codes import Client as XCClient
|
||||
from core.utils import send_websocket_update
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
BATCH_SIZE = 1000
|
||||
SKIP_EXTS = {}
|
||||
m3u_dir = os.path.join(settings.MEDIA_ROOT, "cached_m3u")
|
||||
|
||||
def fetch_m3u_lines(account, use_cache=False):
|
||||
|
|
@ -200,11 +200,6 @@ def parse_extinf_line(line: str) -> dict:
|
|||
'name': name
|
||||
}
|
||||
|
||||
import re
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def _matches_filters(stream_name: str, group_name: str, filters):
|
||||
"""Check if a stream or group name matches a precompiled regex filter."""
|
||||
compiled_filters = [(re.compile(f.regex_pattern, re.IGNORECASE), f.exclude) for f in filters]
|
||||
|
|
@ -245,7 +240,7 @@ def process_groups(account, groups):
|
|||
groups_to_create = []
|
||||
for group_name, custom_props in groups.items():
|
||||
logger.debug(f"Handling group: {group_name}")
|
||||
if (group_name not in existing_groups) and (group_name not in SKIP_EXTS):
|
||||
if (group_name not in existing_groups):
|
||||
groups_to_create.append(ChannelGroup(
|
||||
name=group_name,
|
||||
))
|
||||
|
|
@ -495,9 +490,9 @@ def process_m3u_batch(account_id, batch, groups, hash_keys):
|
|||
retval = f"Batch processed: {len(streams_to_create)} created, {len(streams_to_update)} updated."
|
||||
|
||||
# Aggressive garbage collection
|
||||
del streams_to_create, streams_to_update, stream_hashes, existing_streams
|
||||
from core.utils import cleanup_memory
|
||||
cleanup_memory(log_usage=True, force_collection=True)
|
||||
#del streams_to_create, streams_to_update, stream_hashes, existing_streams
|
||||
#from core.utils import cleanup_memory
|
||||
#cleanup_memory(log_usage=True, force_collection=True)
|
||||
|
||||
return retval
|
||||
|
||||
|
|
@ -696,25 +691,68 @@ def refresh_m3u_groups(account_id, use_cache=False, full_refresh=False):
|
|||
release_task_lock('refresh_m3u_account_groups', account_id)
|
||||
return f"Failed to fetch M3U data for account_id={account_id}.", None
|
||||
|
||||
for line in lines:
|
||||
# Log basic file structure for debugging
|
||||
logger.debug(f"Processing {len(lines)} lines from M3U file")
|
||||
|
||||
line_count = 0
|
||||
extinf_count = 0
|
||||
url_count = 0
|
||||
valid_stream_count = 0
|
||||
problematic_lines = []
|
||||
|
||||
for line_index, line in enumerate(lines):
|
||||
line_count += 1
|
||||
line = line.strip()
|
||||
|
||||
if line.startswith("#EXTINF"):
|
||||
extinf_count += 1
|
||||
parsed = parse_extinf_line(line)
|
||||
if parsed:
|
||||
if "group-title" in parsed["attributes"]:
|
||||
groups[parsed["attributes"]["group-title"]] = {}
|
||||
group_name = parsed["attributes"]["group-title"]
|
||||
# Log new groups as they're discovered
|
||||
if group_name not in groups:
|
||||
logger.debug(f"Found new group: '{group_name}'")
|
||||
groups[group_name] = {}
|
||||
|
||||
extinf_data.append(parsed)
|
||||
else:
|
||||
# Log problematic EXTINF lines
|
||||
logger.warning(f"Failed to parse EXTINF at line {line_index+1}: {line[:200]}")
|
||||
problematic_lines.append((line_index+1, line[:200]))
|
||||
|
||||
elif extinf_data and line.startswith("http"):
|
||||
url_count += 1
|
||||
# Associate URL with the last EXTINF line
|
||||
extinf_data[-1]["url"] = line
|
||||
valid_stream_count += 1
|
||||
|
||||
# Periodically log progress for large files
|
||||
if valid_stream_count % 1000 == 0:
|
||||
logger.debug(f"Processed {valid_stream_count} valid streams so far...")
|
||||
|
||||
# Log summary statistics
|
||||
logger.info(f"M3U parsing complete - Lines: {line_count}, EXTINF: {extinf_count}, URLs: {url_count}, Valid streams: {valid_stream_count}")
|
||||
|
||||
if problematic_lines:
|
||||
logger.warning(f"Found {len(problematic_lines)} problematic lines during parsing")
|
||||
for i, (line_num, content) in enumerate(problematic_lines[:10]): # Log max 10 examples
|
||||
logger.warning(f"Problematic line #{i+1} at line {line_num}: {content}")
|
||||
if len(problematic_lines) > 10:
|
||||
logger.warning(f"... and {len(problematic_lines) - 10} more problematic lines")
|
||||
|
||||
# Log group statistics
|
||||
logger.info(f"Found {len(groups)} groups in M3U file: {', '.join(list(groups.keys())[:20])}" +
|
||||
("..." if len(groups) > 20 else ""))
|
||||
|
||||
# Cache processed data
|
||||
cache_path = os.path.join(m3u_dir, f"{account_id}.json")
|
||||
with open(cache_path, 'w', encoding='utf-8') as f:
|
||||
json.dump({
|
||||
"extinf_data": extinf_data,
|
||||
"groups": groups,
|
||||
}, f)
|
||||
logger.debug(f"Cached parsed M3U data to {cache_path}")
|
||||
|
||||
send_m3u_update(account_id, "processing_groups", 0)
|
||||
|
||||
|
|
@ -924,6 +962,7 @@ def refresh_single_m3u_account(account_id):
|
|||
account.save(update_fields=['status'])
|
||||
|
||||
if account.account_type == M3UAccount.Types.STADNARD:
|
||||
logger.debug(f"Processing Standard account with groups: {existing_groups}")
|
||||
# Break into batches and process in parallel
|
||||
batches = [extinf_data[i:i + BATCH_SIZE] for i in range(0, len(extinf_data), BATCH_SIZE)]
|
||||
task_group = group(process_m3u_batch.s(account_id, batch, existing_groups, hash_keys) for batch in batches)
|
||||
|
|
@ -1090,8 +1129,6 @@ def refresh_single_m3u_account(account_id):
|
|||
|
||||
return f"Dispatched jobs complete."
|
||||
|
||||
from core.utils import send_websocket_update
|
||||
|
||||
def send_m3u_update(account_id, action, progress, **kwargs):
|
||||
# Start with the base data dictionary
|
||||
data = {
|
||||
|
|
|
|||
|
|
@ -59,9 +59,16 @@ class RedisClient:
|
|||
client.config_set('save', '') # Disable RDB snapshots
|
||||
client.config_set('appendonly', 'no') # Disable AOF logging
|
||||
|
||||
# Set optimal memory settings
|
||||
client.config_set('maxmemory-policy', 'allkeys-lru') # Use LRU eviction
|
||||
client.config_set('maxmemory', '256mb') # Set reasonable memory limit
|
||||
# Set optimal memory settings with environment variable support
|
||||
# Get max memory from environment or use a larger default (512MB instead of 256MB)
|
||||
#max_memory = os.environ.get('REDIS_MAX_MEMORY', '512mb')
|
||||
#eviction_policy = os.environ.get('REDIS_EVICTION_POLICY', 'allkeys-lru')
|
||||
|
||||
# Apply memory settings
|
||||
#client.config_set('maxmemory-policy', eviction_policy)
|
||||
#client.config_set('maxmemory', max_memory)
|
||||
|
||||
#logger.info(f"Redis configured with maxmemory={max_memory}, policy={eviction_policy}")
|
||||
|
||||
# Disable protected mode when in debug mode
|
||||
if os.environ.get('DISPATCHARR_DEBUG', '').lower() == 'true':
|
||||
|
|
@ -69,10 +76,18 @@ class RedisClient:
|
|||
logger.warning("Redis protected mode disabled for debug environment")
|
||||
|
||||
logger.trace("Redis persistence disabled for better performance")
|
||||
except redis.exceptions.ResponseError:
|
||||
# This might fail if Redis is configured to prohibit CONFIG command
|
||||
# or if running in protected mode - that's okay
|
||||
logger.error("Could not modify Redis persistence settings (may be restricted)")
|
||||
except redis.exceptions.ResponseError as e:
|
||||
# Improve error handling for Redis configuration errors
|
||||
if "OOM" in str(e):
|
||||
logger.error(f"Redis OOM during configuration: {e}")
|
||||
# Try to increase maxmemory as an emergency measure
|
||||
try:
|
||||
client.config_set('maxmemory', '768mb')
|
||||
logger.warning("Applied emergency Redis memory increase to 768MB")
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
logger.error(f"Redis configuration error: {e}")
|
||||
|
||||
logger.info(f"Connected to Redis at {redis_host}:{redis_port}/{redis_db}")
|
||||
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ app.conf.update(
|
|||
)
|
||||
|
||||
# Add memory cleanup after task completion
|
||||
@task_postrun.connect # Use the imported signal
|
||||
#@task_postrun.connect # Use the imported signal
|
||||
def cleanup_task_memory(**kwargs):
|
||||
"""Clean up memory after each task completes"""
|
||||
# Get task name from kwargs
|
||||
|
|
|
|||
|
|
@ -12,19 +12,22 @@ import {
|
|||
NativeSelect,
|
||||
NumberInput,
|
||||
Space,
|
||||
Grid,
|
||||
Group,
|
||||
FileInput,
|
||||
Title,
|
||||
Text,
|
||||
Divider,
|
||||
Stack,
|
||||
Box,
|
||||
} from '@mantine/core';
|
||||
import { isNotEmpty, useForm } from '@mantine/form';
|
||||
import { IconUpload } from '@tabler/icons-react';
|
||||
|
||||
const EPG = ({ epg = null, isOpen, onClose }) => {
|
||||
const epgs = useEPGsStore((state) => state.epgs);
|
||||
const [file, setFile] = useState(null);
|
||||
|
||||
const handleFileChange = (e) => {
|
||||
const file = e.target.files[0];
|
||||
if (file) {
|
||||
setFile(file);
|
||||
}
|
||||
};
|
||||
// Remove the file state and handler since we're not supporting file uploads
|
||||
const [sourceType, setSourceType] = useState('xmltv');
|
||||
|
||||
const form = useForm({
|
||||
mode: 'uncontrolled',
|
||||
|
|
@ -47,114 +50,151 @@ const EPG = ({ epg = null, isOpen, onClose }) => {
|
|||
const values = form.getValues();
|
||||
|
||||
if (epg?.id) {
|
||||
await API.updateEPG({ id: epg.id, ...values, file });
|
||||
// Remove file from API call
|
||||
await API.updateEPG({ id: epg.id, ...values });
|
||||
} else {
|
||||
// Remove file from API call
|
||||
await API.addEPG({
|
||||
...values,
|
||||
file,
|
||||
});
|
||||
}
|
||||
|
||||
form.reset();
|
||||
setFile(null);
|
||||
onClose();
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (epg) {
|
||||
form.setValues({
|
||||
const values = {
|
||||
name: epg.name,
|
||||
source_type: epg.source_type,
|
||||
url: epg.url,
|
||||
api_key: epg.api_key,
|
||||
is_active: epg.is_active,
|
||||
refresh_interval: epg.refresh_interval,
|
||||
});
|
||||
};
|
||||
form.setValues(values);
|
||||
setSourceType(epg.source_type); // Update source type state
|
||||
} else {
|
||||
form.reset();
|
||||
setSourceType('xmltv'); // Reset to xmltv
|
||||
}
|
||||
}, [epg]);
|
||||
|
||||
// Function to handle source type changes
|
||||
const handleSourceTypeChange = (value) => {
|
||||
form.setFieldValue('source_type', value);
|
||||
setSourceType(value);
|
||||
};
|
||||
|
||||
if (!isOpen) {
|
||||
return <></>;
|
||||
}
|
||||
|
||||
return (
|
||||
<Modal opened={isOpen} onClose={onClose} title="EPG Source">
|
||||
<Modal opened={isOpen} onClose={onClose} title="EPG Source" size={700}>
|
||||
<form onSubmit={form.onSubmit(onSubmit)}>
|
||||
<TextInput
|
||||
id="name"
|
||||
name="name"
|
||||
label="Name"
|
||||
description="Unique identifier for this EPG source"
|
||||
{...form.getInputProps('name')}
|
||||
key={form.key('name')}
|
||||
/>
|
||||
<Group justify="space-between" align="top">
|
||||
{/* Left Column */}
|
||||
<Stack gap="md" style={{ flex: 1 }}>
|
||||
<TextInput
|
||||
id="name"
|
||||
name="name"
|
||||
label="Name"
|
||||
description="Unique identifier for this EPG source"
|
||||
{...form.getInputProps('name')}
|
||||
key={form.key('name')}
|
||||
/>
|
||||
|
||||
<TextInput
|
||||
id="url"
|
||||
name="url"
|
||||
label="URL"
|
||||
description="Direct URL to the XMLTV file or API endpoint"
|
||||
{...form.getInputProps('url')}
|
||||
key={form.key('url')}
|
||||
/>
|
||||
<NativeSelect
|
||||
id="source_type"
|
||||
name="source_type"
|
||||
label="Source Type"
|
||||
description="Format of the EPG data source"
|
||||
{...form.getInputProps('source_type')}
|
||||
key={form.key('source_type')}
|
||||
data={[
|
||||
{
|
||||
label: 'XMLTV',
|
||||
value: 'xmltv',
|
||||
},
|
||||
{
|
||||
label: 'Schedules Direct',
|
||||
value: 'schedules_direct',
|
||||
},
|
||||
]}
|
||||
onChange={(event) => handleSourceTypeChange(event.currentTarget.value)}
|
||||
/>
|
||||
|
||||
<TextInput
|
||||
id="api_key"
|
||||
name="api_key"
|
||||
label="API Key"
|
||||
description="API key for services that require authentication (like Schedules Direct)"
|
||||
{...form.getInputProps('api_key')}
|
||||
key={form.key('api_key')}
|
||||
/>
|
||||
<NumberInput
|
||||
label="Refresh Interval (hours)"
|
||||
description="How often to refresh EPG data (0 to disable)"
|
||||
{...form.getInputProps('refresh_interval')}
|
||||
key={form.key('refresh_interval')}
|
||||
min={0}
|
||||
/>
|
||||
</Stack>
|
||||
|
||||
<NativeSelect
|
||||
id="source_type"
|
||||
name="source_type"
|
||||
label="Source Type"
|
||||
description="Format of the EPG data source"
|
||||
{...form.getInputProps('source_type')}
|
||||
key={form.key('source_type')}
|
||||
data={[
|
||||
{
|
||||
label: 'XMLTV',
|
||||
value: 'xmltv',
|
||||
},
|
||||
{
|
||||
label: 'Schedules Direct',
|
||||
value: 'schedules_direct',
|
||||
},
|
||||
]}
|
||||
/>
|
||||
<Divider size="sm" orientation="vertical" />
|
||||
|
||||
<NumberInput
|
||||
label="Refresh Interval (hours)"
|
||||
description={<>How often to automatically refresh EPG data<br />
|
||||
(0 to disable automatic refreshes)</>}
|
||||
{...form.getInputProps('refresh_interval')}
|
||||
key={form.key('refresh_interval')}
|
||||
/>
|
||||
{/* Right Column */}
|
||||
<Stack gap="md" style={{ flex: 1 }}>
|
||||
<TextInput
|
||||
id="url"
|
||||
name="url"
|
||||
label="URL"
|
||||
description="Direct URL to the XMLTV file or API endpoint"
|
||||
{...form.getInputProps('url')}
|
||||
key={form.key('url')}
|
||||
/>
|
||||
|
||||
<Checkbox
|
||||
id="is_active"
|
||||
name="is_active"
|
||||
label="Is Active"
|
||||
description="Enable or disable this EPG source"
|
||||
{...form.getInputProps('is_active', { type: 'checkbox' })}
|
||||
key={form.key('is_active')}
|
||||
/>
|
||||
<TextInput
|
||||
id="api_key"
|
||||
name="api_key"
|
||||
label="API Key"
|
||||
description="API key for services that require authentication"
|
||||
{...form.getInputProps('api_key')}
|
||||
key={form.key('api_key')}
|
||||
disabled={sourceType !== 'schedules_direct'} // Use the state variable
|
||||
/>
|
||||
|
||||
<Flex mih={50} gap="xs" justify="flex-end" align="flex-end">
|
||||
<Button
|
||||
type="submit"
|
||||
variant="contained"
|
||||
disabled={form.submitting}
|
||||
size="small"
|
||||
>
|
||||
Submit
|
||||
</Button>
|
||||
</Flex>
|
||||
{/* Put checkbox at the same level as Refresh Interval */}
|
||||
<Box style={{ marginTop: 0 }}>
|
||||
<Text size="sm" fw={500} mb={3}>Status</Text>
|
||||
<Text size="xs" c="dimmed" mb={12}>When enabled, this EPG source will auto update.</Text>
|
||||
<Box style={{
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
height: '30px', // Reduced height
|
||||
marginTop: '-4px' // Slight negative margin to move it up
|
||||
}}>
|
||||
<Checkbox
|
||||
id="is_active"
|
||||
name="is_active"
|
||||
label="Enable this EPG source"
|
||||
{...form.getInputProps('is_active', { type: 'checkbox' })}
|
||||
key={form.key('is_active')}
|
||||
/>
|
||||
</Box>
|
||||
</Box>
|
||||
</Stack>
|
||||
</Group>
|
||||
|
||||
{/* Full Width Section */}
|
||||
<Box mt="md">
|
||||
<Divider my="sm" />
|
||||
|
||||
<Group justify="end" mt="xl">
|
||||
<Button variant="outline" onClick={onClose}>Cancel</Button>
|
||||
<Button
|
||||
type="submit"
|
||||
variant="filled"
|
||||
disabled={form.submitting}
|
||||
>
|
||||
{epg?.id ? 'Update' : 'Create'} EPG Source
|
||||
</Button>
|
||||
</Group>
|
||||
</Box>
|
||||
</form>
|
||||
</Modal>
|
||||
);
|
||||
|
|
|
|||
|
|
@ -306,14 +306,14 @@ const M3UTable = () => {
|
|||
<Box>
|
||||
<Flex direction="column" gap={2}>
|
||||
<Flex justify="space-between" align="center">
|
||||
<Text size="xs" fw={500}>
|
||||
<Text size="xs" fw={500} style={{ width: '80px' }}>
|
||||
Parsing:
|
||||
</Text>
|
||||
<Text size="xs">{parseInt(data.progress)}%</Text>
|
||||
</Flex>
|
||||
{data.elapsed_time && (
|
||||
<Flex justify="space-between" align="center">
|
||||
<Text size="xs" fw={500}>
|
||||
<Text size="xs" fw={500} style={{ width: '80px' }}>
|
||||
Elapsed:
|
||||
</Text>
|
||||
<Text size="xs">{elapsedTime}</Text>
|
||||
|
|
@ -321,7 +321,7 @@ const M3UTable = () => {
|
|||
)}
|
||||
{data.time_remaining && (
|
||||
<Flex justify="space-between" align="center">
|
||||
<Text size="xs" fw={500}>
|
||||
<Text size="xs" fw={500} style={{ width: '60px' }}>
|
||||
Remaining:
|
||||
</Text>
|
||||
<Text size="xs">{timeRemaining}</Text>
|
||||
|
|
@ -329,7 +329,7 @@ const M3UTable = () => {
|
|||
)}
|
||||
{data.streams_processed && (
|
||||
<Flex justify="space-between" align="center">
|
||||
<Text size="xs" fw={500}>
|
||||
<Text size="xs" fw={500} style={{ width: '80px' }}>
|
||||
Streams:
|
||||
</Text>
|
||||
<Text size="xs">{data.streams_processed}</Text>
|
||||
|
|
@ -561,7 +561,7 @@ const M3UTable = () => {
|
|||
c="dimmed"
|
||||
size="xs"
|
||||
lineClamp={2}
|
||||
style={{ lineHeight: 1.3 }}
|
||||
style={{ lineHeight: 1.1 }}
|
||||
>
|
||||
{value}
|
||||
</Text>
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ tzlocal
|
|||
# PyTorch dependencies (CPU only)
|
||||
--extra-index-url https://download.pytorch.org/whl/cpu/
|
||||
torch==2.6.0+cpu
|
||||
tzlocal
|
||||
|
||||
# ML/NLP dependencies
|
||||
sentence-transformers==3.4.1
|
||||
|
|
@ -30,4 +31,4 @@ channels
|
|||
channels-redis
|
||||
django-filter
|
||||
django-celery-beat
|
||||
lxml==5.4.0
|
||||
lxml==5.4.0
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue