merged in dev

This commit is contained in:
dekzter 2025-05-21 15:24:30 -04:00
commit e979113935
9 changed files with 131 additions and 63 deletions

View file

@ -542,6 +542,10 @@ class ChannelViewSet(viewsets.ModelViewSet):
channel_number = float(stream_custom_props["tvg-chno"])
elif "channel-number" in stream_custom_props:
channel_number = float(stream_custom_props["channel-number"])
# Get the tvc_guide_stationid from custom properties if it exists
tvc_guide_stationid = None
if "tvc-guide-stationid" in stream_custom_props:
tvc_guide_stationid = stream_custom_props["tvc-guide-stationid"]
# Determine channel number: if provided, use it (if free); else auto assign.
if channel_number is None:
@ -577,6 +581,7 @@ class ChannelViewSet(viewsets.ModelViewSet):
channel_data = {
"channel_number": channel_number,
"name": name,
"tvc_guide_stationid": tvc_guide_stationid,
"tvg_id": stream.tvg_id,
"channel_group_id": channel_group.id,
}

View file

@ -767,8 +767,9 @@ def parse_channels_only(source):
# Check if we should break early to avoid excessive sleep
if processed_channels >= total_channels and total_channels > 0:
logger.info(f"[parse_channels_only] Expected channel numbers hit, continuing - processed {processed_channels}/{total_channels}")
logger.debug(f"[parse_channels_only] Memory usage after {processed_channels}: {process.memory_info().rss / 1024 / 1024:.2f} MB")
#break
if process:
logger.debug(f"[parse_channels_only] Memory usage after {processed_channels}: {process.memory_info().rss / 1024 / 1024:.2f} MB")
logger.debug(f"[parse_channels_only] Total elements processed: {total_elements_processed}")
# Add periodic forced cleanup based on TOTAL ELEMENTS, not just channels
# This ensures we clean up even if processing many non-channel elements
@ -776,10 +777,15 @@ def parse_channels_only(source):
logger.info(f"[parse_channels_only] Performing preventative memory cleanup after {total_elements_processed} elements (found {processed_channels} channels)")
# Close and reopen the parser to release memory
if source_file and channel_parser:
# First clear element references
elem.clear()
if elem.getparent() is not None:
elem.getparent().remove(elem)
# First clear element references - safely with checks
if 'elem' in locals() and elem is not None:
try:
elem.clear()
parent = elem.getparent()
if parent is not None:
parent.remove(elem)
except Exception as e:
logger.debug(f"Non-critical error during cleanup: {e}")
# Reset parser state
del channel_parser
@ -799,10 +805,15 @@ def parse_channels_only(source):
logger.info(f"[parse_channels_only] Performing preventative memory cleanup at {processed_channels} channels")
# Close and reopen the parser to release memory
if source_file and channel_parser:
# First clear element references
elem.clear()
if elem.getparent() is not None:
elem.getparent().remove(elem)
# First clear element references - safely with checks
if 'elem' in locals() and elem is not None:
try:
elem.clear()
parent = elem.getparent()
if parent is not None:
parent.remove(elem)
except Exception as e:
logger.debug(f"Non-critical error during cleanup: {e}")
# Reset parser state
del channel_parser
@ -818,7 +829,10 @@ def parse_channels_only(source):
logger.info(f"[parse_channels_only] Recreated parser context after memory cleanup")
if processed_channels == total_channels:
logger.info(f"[parse_channels_only] Processed all channels current memory: {process.memory_info().rss / 1024 / 1024:.2f} MB")
if process:
logger.info(f"[parse_channels_only] Processed all channels current memory: {process.memory_info().rss / 1024 / 1024:.2f} MB")
else:
logger.info(f"[parse_channels_only] Processed all channels")
except (etree.XMLSyntaxError, Exception) as xml_error:
logger.error(f"[parse_channels_only] XML parsing failed: {xml_error}")
@ -1016,8 +1030,12 @@ def parse_programs_for_tvg_id(epg_id):
# Memory usage tracking
if process:
mem_before = process.memory_info().rss / 1024 / 1024
logger.info(f"[parse_programs_for_tvg_id] Memory before parsing {epg.tvg_id} - {mem_before:.2f} MB")
try:
mem_before = process.memory_info().rss / 1024 / 1024
logger.info(f"[parse_programs_for_tvg_id] Memory before parsing {epg.tvg_id} - {mem_before:.2f} MB")
except Exception as e:
logger.warning(f"Error tracking memory: {e}")
mem_before = 0
programs_to_create = []
batch_size = 1000 # Process in batches to limit memory usage
@ -1160,8 +1178,11 @@ def parse_programs_for_tvg_id(epg_id):
source_file = None
# Memory tracking after processing
if process:
mem_after = process.memory_info().rss / 1024 / 1024
logger.info(f"[parse_programs_for_tvg_id] Memory after parsing 1 {epg.tvg_id} - {programs_processed} programs: {mem_after:.2f} MB (change: {mem_after-mem_before:.2f} MB)")
try:
mem_after = process.memory_info().rss / 1024 / 1024
logger.info(f"[parse_programs_for_tvg_id] Memory after parsing 1 {epg.tvg_id} - {programs_processed} programs: {mem_after:.2f} MB (change: {mem_after-mem_before:.2f} MB)")
except Exception as e:
logger.warning(f"Error tracking memory: {e}")
# Process any remaining items
if programs_to_create:
@ -1194,8 +1215,11 @@ def parse_programs_for_tvg_id(epg_id):
cleanup_memory(log_usage=should_log_memory, force_collection=True)
# Memory tracking after processing
if process:
mem_after = process.memory_info().rss / 1024 / 1024
logger.info(f"[parse_programs_for_tvg_id] Final memory usage {epg.tvg_id} - {programs_processed} programs: {mem_after:.2f} MB (change: {mem_after-mem_before:.2f} MB)")
try:
mem_after = process.memory_info().rss / 1024 / 1024
logger.info(f"[parse_programs_for_tvg_id] Final memory usage {epg.tvg_id} - {programs_processed} programs: {mem_after:.2f} MB (change: {mem_after-mem_before:.2f} MB)")
except Exception as e:
logger.warning(f"Error tracking memory: {e}")
process = None
epg = None
programs_processed = None

View file

@ -65,7 +65,15 @@ class M3UAccountViewSet(viewsets.ModelViewSet):
request.data["file_path"] = (
file_path # Include the file path if a file was uploaded
)
request.data.pop("server_url")
# Handle the user_agent field - convert "null" string to None
if "user_agent" in request.data and request.data["user_agent"] == "null":
request.data["user_agent"] = None
# Handle server_url appropriately
if "server_url" in request.data and not request.data["server_url"]:
request.data.pop("server_url")
request.data._mutable = False # Make the request data immutable again
# Now call super().create() to create the instance
@ -98,16 +106,24 @@ class M3UAccountViewSet(viewsets.ModelViewSet):
request.data["file_path"] = (
file_path # Include the file path if a file was uploaded
)
request.data.pop("server_url")
# Handle the user_agent field - convert "null" string to None
if "user_agent" in request.data and request.data["user_agent"] == "null":
request.data["user_agent"] = None
# Handle server_url appropriately
if "server_url" in request.data and not request.data["server_url"]:
request.data.pop("server_url")
request.data._mutable = False # Make the request data immutable again
if instance.file_path and os.path.exists(instance.file_path):
os.remove(instance.file_path)
# Now call super().create() to create the instance
# Now call super().update() to update the instance
response = super().update(request, *args, **kwargs)
# After the instance is created, return the response
# After the instance is updated, return the response
return response
def partial_update(self, request, *args, **kwargs):

View file

@ -376,28 +376,33 @@ class StreamManager:
logger.debug(f"Started stderr reader thread for channel {self.channel_id}")
def _read_stderr(self):
"""Read and log stderr output from the transcode process"""
"""Read and log ffmpeg stderr output"""
try:
if not self.transcode_process or not self.transcode_process.stderr:
logger.warning(f"No stderr to read for channel {self.channel_id}")
return
for line in iter(self.transcode_process.stderr.readline, b''):
if not line:
break
# Decode the line and strip whitespace
error_line = line.decode('utf-8', errors='replace').strip()
# Skip empty lines
if not error_line:
continue
# Log all stderr output as debug messages
logger.debug(f"Transcode stderr [{self.channel_id}]: {error_line}")
for error_line in iter(self.transcode_process.stderr.readline, b''):
if error_line:
error_line = error_line.decode('utf-8', errors='replace').strip()
try:
# Wrap the logging call in a try-except to prevent crashes due to logging errors
logger.debug(f"Transcode stderr [{self.channel_id}]: {error_line}")
except OSError as e:
# If logging fails, try a simplified log message
if e.errno == 105: # No buffer space available
try:
# Try a much shorter message without the error content
logger.warning(f"Logging error (buffer full) in channel {self.channel_id}")
except:
# If even that fails, we have to silently continue
pass
except Exception:
# Ignore other logging errors to prevent thread crashes
pass
except Exception as e:
logger.error(f"Error reading transcode stderr: {e}")
# Catch any other exceptions in the thread to prevent crashes
try:
logger.error(f"Error in stderr reader thread: {e}")
except:
# Again, if logging fails, continue silently
pass
def _establish_http_connection(self):
"""Establish a direct HTTP connection to the stream"""

View file

@ -47,6 +47,7 @@ thunder-lock = true
log-4xx = true
log-5xx = true
disable-logging = false
log-buffering = 1024 # Add buffer size limit for logging
; Longer timeouts for debugging sessions
harakiri = 3600

View file

@ -57,4 +57,5 @@ log-master = true
logformat-strftime = true
log-date = %%Y-%%m-%%d %%H:%%M:%%S,000
# Use formatted time with environment variable for log level
log-format = %(ftime) $(DISPATCHARR_LOG_LEVEL) uwsgi.requests Worker ID: %(wid) %(method) %(status) %(uri) %(msecs)ms
log-format = %(ftime) $(DISPATCHARR_LOG_LEVEL) uwsgi.requests Worker ID: %(wid) %(method) %(status) %(uri) %(msecs)ms
log-buffering = 1024 # Add buffer size limit for logging

View file

@ -55,4 +55,5 @@ log-master = true
logformat-strftime = true
log-date = %%Y-%%m-%%d %%H:%%M:%%S,000
# Use formatted time with environment variable for log level
log-format = %(ftime) $(DISPATCHARR_LOG_LEVEL) uwsgi.requests Worker ID: %(wid) %(method) %(status) %(uri) %(msecs)ms
log-format = %(ftime) $(DISPATCHARR_LOG_LEVEL) uwsgi.requests Worker ID: %(wid) %(method) %(status) %(uri) %(msecs)ms
log-buffering = 1024 # Add buffer size limit for logging

View file

@ -307,7 +307,7 @@ const M3U = ({
description="User-Agent header to use when accessing this M3U source"
{...form.getInputProps('user_agent')}
key={form.key('user_agent')}
data={[{ value: '0', label: '(use default)' }].concat(
data={[{ value: '0', label: '(Use Default)' }].concat(
userAgents.map((ua) => ({
label: ua.name,
value: `${ua.id}`,

View file

@ -692,25 +692,40 @@ const ChannelsPage = () => {
}, [channelStats, channels, channelsByUUID, streamProfiles]);
return (
<SimpleGrid cols={3} spacing="md" style={{ padding: 10 }}>
{Object.keys(activeChannels).length === 0 ? (
<Box style={{ gridColumn: '1 / -1', textAlign: 'center', padding: '40px' }}>
<Text size="xl" color="dimmed">No active channels currently streaming</Text>
</Box>
) : (
Object.values(activeChannels).map((channel) => (
<ChannelCard
key={channel.channel_id}
channel={channel}
clients={clients}
stopClient={stopClient}
stopChannel={stopChannel}
logos={logos} // Pass logos to the component
channelsByUUID={channelsByUUID} // Pass channelsByUUID to fix the error
/>
))
)}
</SimpleGrid>
<Box style={{ overflowX: 'auto' }}>
<SimpleGrid
cols={{ base: 1, sm: 1, md: 2, lg: 3, xl: 3 }}
spacing="md"
style={{ padding: 10 }}
breakpoints={[
{ maxWidth: '72rem', cols: 2, spacing: 'md' },
{ maxWidth: '48rem', cols: 1, spacing: 'md' },
]}
verticalSpacing="lg"
>
{Object.keys(activeChannels).length === 0 ? (
<Box style={{ gridColumn: '1 / -1', textAlign: 'center', padding: '40px' }}>
<Text size="xl" color="dimmed">No active channels currently streaming</Text>
</Box>
) : (
Object.values(activeChannels).map((channel) => (
<Box
key={channel.channel_id}
style={{ minWidth: '420px', width: '100%' }}
>
<ChannelCard
channel={channel}
clients={clients}
stopClient={stopClient}
stopChannel={stopChannel}
logos={logos}
channelsByUUID={channelsByUUID}
/>
</Box>
))
)}
</SimpleGrid>
</Box>
);
};