diff --git a/apps/channels/api_views.py b/apps/channels/api_views.py
index ff681a44..b16f3afd 100644
--- a/apps/channels/api_views.py
+++ b/apps/channels/api_views.py
@@ -542,6 +542,10 @@ class ChannelViewSet(viewsets.ModelViewSet):
channel_number = float(stream_custom_props["tvg-chno"])
elif "channel-number" in stream_custom_props:
channel_number = float(stream_custom_props["channel-number"])
+ # Get the tvc_guide_stationid from custom properties if it exists
+ tvc_guide_stationid = None
+ if "tvc-guide-stationid" in stream_custom_props:
+ tvc_guide_stationid = stream_custom_props["tvc-guide-stationid"]
# Determine channel number: if provided, use it (if free); else auto assign.
if channel_number is None:
@@ -577,6 +581,7 @@ class ChannelViewSet(viewsets.ModelViewSet):
channel_data = {
"channel_number": channel_number,
"name": name,
+ "tvc_guide_stationid": tvc_guide_stationid,
"tvg_id": stream.tvg_id,
"channel_group_id": channel_group.id,
}
diff --git a/apps/epg/tasks.py b/apps/epg/tasks.py
index 60428484..71e468bd 100644
--- a/apps/epg/tasks.py
+++ b/apps/epg/tasks.py
@@ -767,8 +767,9 @@ def parse_channels_only(source):
# Check if we should break early to avoid excessive sleep
if processed_channels >= total_channels and total_channels > 0:
logger.info(f"[parse_channels_only] Expected channel numbers hit, continuing - processed {processed_channels}/{total_channels}")
- logger.debug(f"[parse_channels_only] Memory usage after {processed_channels}: {process.memory_info().rss / 1024 / 1024:.2f} MB")
- #break
+ if process:
+ logger.debug(f"[parse_channels_only] Memory usage after {processed_channels}: {process.memory_info().rss / 1024 / 1024:.2f} MB")
+
logger.debug(f"[parse_channels_only] Total elements processed: {total_elements_processed}")
# Add periodic forced cleanup based on TOTAL ELEMENTS, not just channels
# This ensures we clean up even if processing many non-channel elements
@@ -776,10 +777,15 @@ def parse_channels_only(source):
logger.info(f"[parse_channels_only] Performing preventative memory cleanup after {total_elements_processed} elements (found {processed_channels} channels)")
# Close and reopen the parser to release memory
if source_file and channel_parser:
- # First clear element references
- elem.clear()
- if elem.getparent() is not None:
- elem.getparent().remove(elem)
+ # First clear element references - safely with checks
+ if 'elem' in locals() and elem is not None:
+ try:
+ elem.clear()
+ parent = elem.getparent()
+ if parent is not None:
+ parent.remove(elem)
+ except Exception as e:
+ logger.debug(f"Non-critical error during cleanup: {e}")
# Reset parser state
del channel_parser
@@ -799,10 +805,15 @@ def parse_channels_only(source):
logger.info(f"[parse_channels_only] Performing preventative memory cleanup at {processed_channels} channels")
# Close and reopen the parser to release memory
if source_file and channel_parser:
- # First clear element references
- elem.clear()
- if elem.getparent() is not None:
- elem.getparent().remove(elem)
+ # First clear element references - safely with checks
+ if 'elem' in locals() and elem is not None:
+ try:
+ elem.clear()
+ parent = elem.getparent()
+ if parent is not None:
+ parent.remove(elem)
+ except Exception as e:
+ logger.debug(f"Non-critical error during cleanup: {e}")
# Reset parser state
del channel_parser
@@ -818,7 +829,10 @@ def parse_channels_only(source):
logger.info(f"[parse_channels_only] Recreated parser context after memory cleanup")
if processed_channels == total_channels:
- logger.info(f"[parse_channels_only] Processed all channels current memory: {process.memory_info().rss / 1024 / 1024:.2f} MB")
+ if process:
+ logger.info(f"[parse_channels_only] Processed all channels current memory: {process.memory_info().rss / 1024 / 1024:.2f} MB")
+ else:
+ logger.info(f"[parse_channels_only] Processed all channels")
except (etree.XMLSyntaxError, Exception) as xml_error:
logger.error(f"[parse_channels_only] XML parsing failed: {xml_error}")
@@ -1016,8 +1030,12 @@ def parse_programs_for_tvg_id(epg_id):
# Memory usage tracking
if process:
- mem_before = process.memory_info().rss / 1024 / 1024
- logger.info(f"[parse_programs_for_tvg_id] Memory before parsing {epg.tvg_id} - {mem_before:.2f} MB")
+ try:
+ mem_before = process.memory_info().rss / 1024 / 1024
+ logger.info(f"[parse_programs_for_tvg_id] Memory before parsing {epg.tvg_id} - {mem_before:.2f} MB")
+ except Exception as e:
+ logger.warning(f"Error tracking memory: {e}")
+ mem_before = 0
programs_to_create = []
batch_size = 1000 # Process in batches to limit memory usage
@@ -1160,8 +1178,11 @@ def parse_programs_for_tvg_id(epg_id):
source_file = None
# Memory tracking after processing
if process:
- mem_after = process.memory_info().rss / 1024 / 1024
- logger.info(f"[parse_programs_for_tvg_id] Memory after parsing 1 {epg.tvg_id} - {programs_processed} programs: {mem_after:.2f} MB (change: {mem_after-mem_before:.2f} MB)")
+ try:
+ mem_after = process.memory_info().rss / 1024 / 1024
+ logger.info(f"[parse_programs_for_tvg_id] Memory after parsing 1 {epg.tvg_id} - {programs_processed} programs: {mem_after:.2f} MB (change: {mem_after-mem_before:.2f} MB)")
+ except Exception as e:
+ logger.warning(f"Error tracking memory: {e}")
# Process any remaining items
if programs_to_create:
@@ -1194,8 +1215,11 @@ def parse_programs_for_tvg_id(epg_id):
cleanup_memory(log_usage=should_log_memory, force_collection=True)
# Memory tracking after processing
if process:
- mem_after = process.memory_info().rss / 1024 / 1024
- logger.info(f"[parse_programs_for_tvg_id] Final memory usage {epg.tvg_id} - {programs_processed} programs: {mem_after:.2f} MB (change: {mem_after-mem_before:.2f} MB)")
+ try:
+ mem_after = process.memory_info().rss / 1024 / 1024
+ logger.info(f"[parse_programs_for_tvg_id] Final memory usage {epg.tvg_id} - {programs_processed} programs: {mem_after:.2f} MB (change: {mem_after-mem_before:.2f} MB)")
+ except Exception as e:
+ logger.warning(f"Error tracking memory: {e}")
process = None
epg = None
programs_processed = None
diff --git a/apps/m3u/api_views.py b/apps/m3u/api_views.py
index aad719ae..571ace28 100644
--- a/apps/m3u/api_views.py
+++ b/apps/m3u/api_views.py
@@ -65,7 +65,15 @@ class M3UAccountViewSet(viewsets.ModelViewSet):
request.data["file_path"] = (
file_path # Include the file path if a file was uploaded
)
- request.data.pop("server_url")
+
+ # Handle the user_agent field - convert "null" string to None
+ if "user_agent" in request.data and request.data["user_agent"] == "null":
+ request.data["user_agent"] = None
+
+ # Handle server_url appropriately
+ if "server_url" in request.data and not request.data["server_url"]:
+ request.data.pop("server_url")
+
request.data._mutable = False # Make the request data immutable again
# Now call super().create() to create the instance
@@ -98,16 +106,24 @@ class M3UAccountViewSet(viewsets.ModelViewSet):
request.data["file_path"] = (
file_path # Include the file path if a file was uploaded
)
- request.data.pop("server_url")
+
+ # Handle the user_agent field - convert "null" string to None
+ if "user_agent" in request.data and request.data["user_agent"] == "null":
+ request.data["user_agent"] = None
+
+ # Handle server_url appropriately
+ if "server_url" in request.data and not request.data["server_url"]:
+ request.data.pop("server_url")
+
request.data._mutable = False # Make the request data immutable again
if instance.file_path and os.path.exists(instance.file_path):
os.remove(instance.file_path)
- # Now call super().create() to create the instance
+ # Now call super().update() to update the instance
response = super().update(request, *args, **kwargs)
- # After the instance is created, return the response
+ # After the instance is updated, return the response
return response
def partial_update(self, request, *args, **kwargs):
diff --git a/apps/proxy/ts_proxy/stream_manager.py b/apps/proxy/ts_proxy/stream_manager.py
index 7d158c09..054de05b 100644
--- a/apps/proxy/ts_proxy/stream_manager.py
+++ b/apps/proxy/ts_proxy/stream_manager.py
@@ -376,28 +376,33 @@ class StreamManager:
logger.debug(f"Started stderr reader thread for channel {self.channel_id}")
def _read_stderr(self):
- """Read and log stderr output from the transcode process"""
+ """Read and log ffmpeg stderr output"""
try:
- if not self.transcode_process or not self.transcode_process.stderr:
- logger.warning(f"No stderr to read for channel {self.channel_id}")
- return
-
- for line in iter(self.transcode_process.stderr.readline, b''):
- if not line:
- break
-
- # Decode the line and strip whitespace
- error_line = line.decode('utf-8', errors='replace').strip()
-
- # Skip empty lines
- if not error_line:
- continue
-
- # Log all stderr output as debug messages
- logger.debug(f"Transcode stderr [{self.channel_id}]: {error_line}")
-
+ for error_line in iter(self.transcode_process.stderr.readline, b''):
+ if error_line:
+ error_line = error_line.decode('utf-8', errors='replace').strip()
+ try:
+ # Wrap the logging call in a try-except to prevent crashes due to logging errors
+ logger.debug(f"Transcode stderr [{self.channel_id}]: {error_line}")
+ except OSError as e:
+ # If logging fails, try a simplified log message
+ if e.errno == 105: # No buffer space available
+ try:
+ # Try a much shorter message without the error content
+ logger.warning(f"Logging error (buffer full) in channel {self.channel_id}")
+ except:
+ # If even that fails, we have to silently continue
+ pass
+ except Exception:
+ # Ignore other logging errors to prevent thread crashes
+ pass
except Exception as e:
- logger.error(f"Error reading transcode stderr: {e}")
+ # Catch any other exceptions in the thread to prevent crashes
+ try:
+ logger.error(f"Error in stderr reader thread: {e}")
+ except:
+ # Again, if logging fails, continue silently
+ pass
def _establish_http_connection(self):
"""Establish a direct HTTP connection to the stream"""
diff --git a/docker/uwsgi.debug.ini b/docker/uwsgi.debug.ini
index ea567e1e..6ca855f3 100644
--- a/docker/uwsgi.debug.ini
+++ b/docker/uwsgi.debug.ini
@@ -47,6 +47,7 @@ thunder-lock = true
log-4xx = true
log-5xx = true
disable-logging = false
+log-buffering = 1024 # Add buffer size limit for logging
; Longer timeouts for debugging sessions
harakiri = 3600
diff --git a/docker/uwsgi.dev.ini b/docker/uwsgi.dev.ini
index 62a5f352..f3e5238e 100644
--- a/docker/uwsgi.dev.ini
+++ b/docker/uwsgi.dev.ini
@@ -57,4 +57,5 @@ log-master = true
logformat-strftime = true
log-date = %%Y-%%m-%%d %%H:%%M:%%S,000
# Use formatted time with environment variable for log level
-log-format = %(ftime) $(DISPATCHARR_LOG_LEVEL) uwsgi.requests Worker ID: %(wid) %(method) %(status) %(uri) %(msecs)ms
\ No newline at end of file
+log-format = %(ftime) $(DISPATCHARR_LOG_LEVEL) uwsgi.requests Worker ID: %(wid) %(method) %(status) %(uri) %(msecs)ms
+log-buffering = 1024 # Add buffer size limit for logging
\ No newline at end of file
diff --git a/docker/uwsgi.ini b/docker/uwsgi.ini
index 5068268c..32eb6e3c 100644
--- a/docker/uwsgi.ini
+++ b/docker/uwsgi.ini
@@ -55,4 +55,5 @@ log-master = true
logformat-strftime = true
log-date = %%Y-%%m-%%d %%H:%%M:%%S,000
# Use formatted time with environment variable for log level
-log-format = %(ftime) $(DISPATCHARR_LOG_LEVEL) uwsgi.requests Worker ID: %(wid) %(method) %(status) %(uri) %(msecs)ms
\ No newline at end of file
+log-format = %(ftime) $(DISPATCHARR_LOG_LEVEL) uwsgi.requests Worker ID: %(wid) %(method) %(status) %(uri) %(msecs)ms
+log-buffering = 1024 # Add buffer size limit for logging
\ No newline at end of file
diff --git a/frontend/src/components/forms/M3U.jsx b/frontend/src/components/forms/M3U.jsx
index 8e8fd932..9affa984 100644
--- a/frontend/src/components/forms/M3U.jsx
+++ b/frontend/src/components/forms/M3U.jsx
@@ -307,7 +307,7 @@ const M3U = ({
description="User-Agent header to use when accessing this M3U source"
{...form.getInputProps('user_agent')}
key={form.key('user_agent')}
- data={[{ value: '0', label: '(use default)' }].concat(
+ data={[{ value: '0', label: '(Use Default)' }].concat(
userAgents.map((ua) => ({
label: ua.name,
value: `${ua.id}`,
diff --git a/frontend/src/pages/Stats.jsx b/frontend/src/pages/Stats.jsx
index fc6705b0..fa3250ef 100644
--- a/frontend/src/pages/Stats.jsx
+++ b/frontend/src/pages/Stats.jsx
@@ -692,25 +692,40 @@ const ChannelsPage = () => {
}, [channelStats, channels, channelsByUUID, streamProfiles]);
return (
-
- {Object.keys(activeChannels).length === 0 ? (
-
- No active channels currently streaming
-
- ) : (
- Object.values(activeChannels).map((channel) => (
-
- ))
- )}
-
+
+
+ {Object.keys(activeChannels).length === 0 ? (
+
+ No active channels currently streaming
+
+ ) : (
+ Object.values(activeChannels).map((channel) => (
+
+
+
+ ))
+ )}
+
+
);
};