Add user stream profile settings and configurable service ports

- Add stream profile selection to user settings, allowing per-user default streaming profiles
- Add configurable environment variables for UWSGI_PORT, REDIS_PORT, and DAPHNE_PORT
- Add PostgreSQL database existence check in initialization scripts
- Update nginx, uwsgi configs, and entrypoint to use configurable ports
This commit is contained in:
Patti 2026-01-20 21:31:10 +01:00
parent 0e1db3e39c
commit 63fed9131a
16 changed files with 182 additions and 30 deletions

View file

@ -0,0 +1,26 @@
# Generated manually
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('accounts', '0003_alter_user_custom_properties'),
('core', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='user',
name='stream_profile',
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name='users',
to='core.streamprofile',
),
),
]

View file

@ -1,6 +1,7 @@
# apps/accounts/models.py
from django.db import models
from django.contrib.auth.models import AbstractUser, Permission
from core.models import StreamProfile
class User(AbstractUser):
@ -22,6 +23,13 @@ class User(AbstractUser):
)
user_level = models.IntegerField(default=UserLevel.STREAMER)
custom_properties = models.JSONField(default=dict, blank=True, null=True)
stream_profile = models.ForeignKey(
StreamProfile,
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="users",
)
def __str__(self):
return self.username

View file

@ -38,6 +38,7 @@ class UserSerializer(serializers.ModelSerializer):
"user_level",
"password",
"channel_profiles",
"stream_profile",
"custom_properties",
"avatar_config",
"is_active",

View file

@ -355,15 +355,25 @@ class Channel(models.Model):
n += 1
return n
# @TODO: honor stream's stream profile
def get_stream_profile(self):
stream_profile = self.stream_profile
if not stream_profile:
stream_profile = StreamProfile.objects.get(
id=CoreSettings.get_default_stream_profile_id()
)
def get_stream_profile(self, user=None):
"""
Get the stream profile using resolution hierarchy:
1. User's stream_profile (if provided and set) - Highest priority
2. Channel's stream_profile (if set)
3. System default from CoreSettings
"""
# User preference takes priority
if user and hasattr(user, 'stream_profile') and user.stream_profile:
return user.stream_profile
return stream_profile
# Then channel-level
if self.stream_profile:
return self.stream_profile
# System default
return StreamProfile.objects.get(
id=CoreSettings.get_default_stream_profile_id()
)
def get_stream(self):
"""

View file

@ -463,10 +463,10 @@ class StreamManager:
self._close_connection()
channel = get_stream_object(self.channel_id)
from core.models import StreamProfile
# Use FFmpeg specifically for HLS streams
if hasattr(self, 'force_ffmpeg') and self.force_ffmpeg:
from core.models import StreamProfile
try:
stream_profile = StreamProfile.objects.get(name='ffmpeg', locked=True)
logger.info("Using FFmpeg stream profile for unsupported proxy content (HLS/RTSP/UDP)")
@ -475,7 +475,25 @@ class StreamManager:
stream_profile = channel.get_stream_profile()
logger.warning(f"FFmpeg profile not found, using channel default profile for channel: {self.channel_id}")
else:
stream_profile = channel.get_stream_profile()
# Try to get stream profile from Redis metadata (set during channel initialization with user context)
stream_profile = None
try:
from .server import ProxyServer
proxy_server = ProxyServer.get_instance()
if proxy_server.redis_client:
metadata_key = RedisKeys.channel_metadata(self.channel_id)
stream_profile_id = proxy_server.redis_client.hget(metadata_key, ChannelMetadataField.STREAM_PROFILE)
if stream_profile_id:
stream_profile_id = int(stream_profile_id.decode('utf-8') if isinstance(stream_profile_id, bytes) else stream_profile_id)
stream_profile = StreamProfile.objects.get(id=stream_profile_id)
logger.debug(f"Using stream profile from Redis metadata: {stream_profile.name} for channel: {self.channel_id}")
except Exception as e:
logger.debug(f"Could not get stream profile from Redis metadata: {e}")
# Fall back to channel's default profile
if not stream_profile:
stream_profile = channel.get_stream_profile()
logger.debug(f"Using channel's default stream profile: {stream_profile.name} for channel: {self.channel_id}")
# Build and start transcode command
self.transcode_cmd = stream_profile.build_command(self.url, self.user_agent)

View file

@ -24,12 +24,13 @@ def get_stream_object(id: str):
logger.info(f"Fetching stream hash {id}")
return get_object_or_404(Stream, stream_hash=id)
def generate_stream_url(channel_id: str) -> Tuple[str, str, bool, Optional[int]]:
def generate_stream_url(channel_id: str, user=None) -> Tuple[str, str, bool, Optional[int]]:
"""
Generate the appropriate stream URL for a channel or stream based on its profile settings.
Args:
channel_id: The UUID of the channel or stream hash
user: Optional user object for user-specific stream profile resolution
Returns:
Tuple[str, str, bool, Optional[int]]: (stream_url, user_agent, transcode_flag, profile_id)
@ -98,10 +99,18 @@ def generate_stream_url(channel_id: str) -> Tuple[str, str, bool, Optional[int]]
# Get stream URL with the selected profile's URL transformation
stream_url = transform_url(stream.url, selected_profile.search_pattern, selected_profile.replace_pattern)
# Check if the stream has its own stream_profile set, otherwise use default
if stream.stream_profile:
# Check stream profile with user preference taking priority
stream_profile = None
# User preference takes priority
if user and hasattr(user, 'stream_profile') and user.stream_profile:
stream_profile = user.stream_profile
logger.debug(f"Using user's stream profile: {stream_profile.name}")
# Then stream's own profile
elif stream.stream_profile:
stream_profile = stream.stream_profile
logger.debug(f"Using stream's own stream profile: {stream_profile.name}")
# Fall back to default
else:
stream_profile = StreamProfile.objects.get(
id=CoreSettings.get_default_stream_profile_id()
@ -153,7 +162,7 @@ def generate_stream_url(channel_id: str) -> Tuple[str, str, bool, Optional[int]]
stream_url = transform_url(input_url, m3u_profile.search_pattern, m3u_profile.replace_pattern)
# Check if transcoding is needed
stream_profile = channel.get_stream_profile()
stream_profile = channel.get_stream_profile(user=user)
if stream_profile.is_proxy() or stream_profile is None:
transcode = False
else:

View file

@ -52,6 +52,9 @@ def stream_ts(request, channel_id):
"""Stream TS data to client with immediate response and keep-alive packets during initialization"""
channel = get_stream_object(channel_id)
# Extract user context if available (set by stream_xc for XC API requests)
dispatcharr_user = getattr(request, 'dispatcharr_user', None)
client_user_agent = None
proxy_server = ProxyServer.get_instance()
@ -166,7 +169,7 @@ def stream_ts(request, channel_id):
while should_retry and time.time() - wait_start_time < retry_timeout:
attempt += 1
stream_url, stream_user_agent, transcode, profile_value = (
generate_stream_url(channel_id)
generate_stream_url(channel_id, user=dispatcharr_user)
)
if stream_url is not None:
@ -211,7 +214,7 @@ def stream_ts(request, channel_id):
f"[{client_id}] Making final attempt {attempt} at timeout boundary"
)
stream_url, stream_user_agent, transcode, profile_value = (
generate_stream_url(channel_id)
generate_stream_url(channel_id, user=dispatcharr_user)
)
if stream_url is not None:
logger.info(
@ -553,6 +556,8 @@ def stream_xc(request, username, password, channel_id):
channel = get_object_or_404(Channel, id=channel_id)
# @TODO: we've got the file 'type' via extension, support this when we support multiple outputs
# Attach user to request for stream profile resolution
request._request.dispatcharr_user = user
return stream_ts(request._request, str(channel.uuid))

View file

@ -48,8 +48,11 @@ export POSTGRES_PORT=${POSTGRES_PORT:-5432}
export PG_VERSION=$(ls /usr/lib/postgresql/ | sort -V | tail -n 1)
export PG_BINDIR="/usr/lib/postgresql/${PG_VERSION}/bin"
export REDIS_HOST=${REDIS_HOST:-localhost}
export REDIS_PORT=${REDIS_PORT:-6379}
export REDIS_DB=${REDIS_DB:-0}
export DISPATCHARR_PORT=${DISPATCHARR_PORT:-9191}
export UWSGI_PORT=${UWSGI_PORT:-5656}
export DAPHNE_PORT=${DAPHNE_PORT:-8001}
export LIBVA_DRIVERS_PATH='/usr/local/lib/x86_64-linux-gnu/dri'
export LD_LIBRARY_PATH='/usr/local/lib'
export SECRET_FILE="/data/jwt"
@ -115,7 +118,7 @@ if [[ ! -f /etc/profile.d/dispatcharr.sh ]]; then
PATH VIRTUAL_ENV DJANGO_SETTINGS_MODULE PYTHONUNBUFFERED PYTHONDONTWRITEBYTECODE
POSTGRES_DB POSTGRES_USER POSTGRES_PASSWORD POSTGRES_HOST POSTGRES_PORT
DISPATCHARR_ENV DISPATCHARR_DEBUG DISPATCHARR_LOG_LEVEL
REDIS_HOST REDIS_DB POSTGRES_DIR DISPATCHARR_PORT
REDIS_HOST REDIS_PORT REDIS_DB POSTGRES_DIR DISPATCHARR_PORT UWSGI_PORT DAPHNE_PORT
DISPATCHARR_VERSION DISPATCHARR_TIMESTAMP LIBVA_DRIVERS_PATH LIBVA_DRIVER_NAME LD_LIBRARY_PATH
CELERY_NICE_LEVEL UWSGI_NICE_LEVEL DJANGO_SECRET_KEY
)

View file

@ -140,6 +140,26 @@ EOF
fi
ensure_utf8_encoding() {
# First check if the database exists
if ! su - postgres -c "psql -p ${POSTGRES_PORT} -tAc \"SELECT 1 FROM pg_database WHERE datname = '$POSTGRES_DB';\"" | grep -q 1; then
echo "Database $POSTGRES_DB does not exist yet, creating it..."
su - postgres -c "createdb -p ${POSTGRES_PORT} --encoding=UTF8 ${POSTGRES_DB}"
# Create user if not exists
su - postgres -c "psql -p ${POSTGRES_PORT} -d ${POSTGRES_DB}" <<EOF
DO \$\$
BEGIN
IF NOT EXISTS (SELECT FROM pg_roles WHERE rolname = '$POSTGRES_USER') THEN
CREATE ROLE $POSTGRES_USER WITH LOGIN PASSWORD '$POSTGRES_PASSWORD';
END IF;
END
\$\$;
EOF
su postgres -c "$PG_BINDIR/psql -p ${POSTGRES_PORT} -c \"ALTER DATABASE ${POSTGRES_DB} OWNER TO $POSTGRES_USER;\""
su postgres -c "$PG_BINDIR/psql -p ${POSTGRES_PORT} -c \"GRANT ALL PRIVILEGES ON DATABASE ${POSTGRES_DB} TO $POSTGRES_USER;\""
echo "Database $POSTGRES_DB created with UTF8 encoding."
return
fi
# Check encoding of existing database
CURRENT_ENCODING=$(su - postgres -c "psql -p ${POSTGRES_PORT} -tAc \"SELECT pg_encoding_to_char(encoding) FROM pg_database WHERE datname = '$POSTGRES_DB';\"" | tr -d ' ')
if [ "$CURRENT_ENCODING" != "UTF8" ]; then

View file

@ -37,6 +37,20 @@ if ! [[ "$DISPATCHARR_PORT" =~ ^[0-9]+$ ]]; then
fi
sed -i "s/NGINX_PORT/${DISPATCHARR_PORT}/g" /etc/nginx/sites-enabled/default
# Configure uWSGI port for nginx proxy
if ! [[ "$UWSGI_PORT" =~ ^[0-9]+$ ]]; then
echo "⚠️ Warning: UWSGI_PORT is not a valid integer, using default port 5656"
UWSGI_PORT=5656
fi
sed -i "s/UWSGI_PORT/${UWSGI_PORT}/g" /etc/nginx/sites-enabled/default
# Configure Daphne websocket port for nginx proxy
if ! [[ "$DAPHNE_PORT" =~ ^[0-9]+$ ]]; then
echo "⚠️ Warning: DAPHNE_PORT is not a valid integer, using default port 8001"
DAPHNE_PORT=8001
fi
sed -i "s/DAPHNE_PORT/${DAPHNE_PORT}/g" /etc/nginx/sites-enabled/default
# Configure nginx based on IPv6 availability
if ip -6 addr show | grep -q "inet6"; then
echo "✅ IPv6 is available, enabling IPv6 in nginx"

View file

@ -43,7 +43,7 @@ server {
}
location /api/logos/(?<logo_id>\d+)/cache/ {
proxy_pass http://127.0.0.1:5656;
proxy_pass http://127.0.0.1:UWSGI_PORT;
proxy_cache logo_cache;
proxy_cache_key "$scheme$request_uri"; # Cache per logo URL
proxy_cache_valid 200 24h; # Cache for 24 hours
@ -51,7 +51,7 @@ server {
}
location ~ ^/api/channels/logos/(?<logo_id>\d+)/cache/ {
proxy_pass http://127.0.0.1:5656;
proxy_pass http://127.0.0.1:UWSGI_PORT;
proxy_cache logo_cache;
proxy_cache_key "$scheme$request_uri"; # Cache per logo URL
proxy_cache_valid 200 24h; # Cache for 24 hours
@ -71,7 +71,7 @@ server {
# Serve FFmpeg streams efficiently
location /output/stream/ {
proxy_pass http://127.0.0.1:5656;
proxy_pass http://127.0.0.1:UWSGI_PORT;
proxy_buffering off;
proxy_set_header Connection keep-alive;
proxy_set_header X-Real-IP $remote_addr;
@ -81,7 +81,7 @@ server {
# WebSockets for real-time communication
location /ws/ {
proxy_pass http://127.0.0.1:8001;
proxy_pass http://127.0.0.1:DAPHNE_PORT;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "Upgrade";

View file

@ -6,12 +6,12 @@
exec-before = python /app/scripts/wait_for_redis.py
; Start Redis first
attach-daemon = redis-server
attach-daemon = redis-server --port $(REDIS_PORT)
; Then start other services with configurable nice level (default: 5 for low priority)
; Users can override via CELERY_NICE_LEVEL environment variable in docker-compose
attach-daemon = nice -n $(CELERY_NICE_LEVEL) celery -A dispatcharr worker --autoscale=6,1
attach-daemon = nice -n $(CELERY_NICE_LEVEL) celery -A dispatcharr beat
attach-daemon = daphne -b 0.0.0.0 -p 8001 dispatcharr.asgi:application
attach-daemon = daphne -b 0.0.0.0 -p $(DAPHNE_PORT) dispatcharr.asgi:application
attach-daemon = cd /app/frontend && npm run dev
# Core settings
@ -33,7 +33,7 @@ enable-threads = true
lazy-apps = true
# HTTP server
http = 0.0.0.0:5656
http = 0.0.0.0:$(UWSGI_PORT)
http-keepalive = 1
buffer-size = 65536
http-timeout = 600

View file

@ -8,12 +8,12 @@
exec-pre = python /app/scripts/wait_for_redis.py
; Start Redis first
attach-daemon = redis-server
attach-daemon = redis-server --port $(REDIS_PORT)
; Then start other services with configurable nice level (default: 5 for low priority)
; Users can override via CELERY_NICE_LEVEL environment variable in docker-compose
attach-daemon = nice -n $(CELERY_NICE_LEVEL) celery -A dispatcharr worker --autoscale=6,1
attach-daemon = nice -n $(CELERY_NICE_LEVEL) celery -A dispatcharr beat
attach-daemon = daphne -b 0.0.0.0 -p 8001 dispatcharr.asgi:application
attach-daemon = daphne -b 0.0.0.0 -p $(DAPHNE_PORT) dispatcharr.asgi:application
attach-daemon = cd /app/frontend && npm run dev
# Core settings
@ -34,7 +34,7 @@ threads = 2
enable-threads = true
# Optimize for streaming
http = 0.0.0.0:5656
http = 0.0.0.0:$(UWSGI_PORT)
http-keepalive = 1
buffer-size = 65536 # Increase buffer for large payloads
post-buffering = 4096 # Reduce buffering for real-time streaming

View file

@ -8,12 +8,12 @@
exec-pre = python /app/scripts/wait_for_redis.py
; Start Redis first
attach-daemon = redis-server
attach-daemon = redis-server --port $(REDIS_PORT)
; Then start other services with configurable nice level (default: 5 for low priority)
; Users can override via CELERY_NICE_LEVEL environment variable in docker-compose
attach-daemon = nice -n $(CELERY_NICE_LEVEL) celery -A dispatcharr worker --autoscale=6,1
attach-daemon = nice -n $(CELERY_NICE_LEVEL) celery -A dispatcharr beat
attach-daemon = daphne -b 0.0.0.0 -p 8001 dispatcharr.asgi:application
attach-daemon = daphne -b 0.0.0.0 -p $(DAPHNE_PORT) dispatcharr.asgi:application
# Core settings
chdir = /app
@ -32,7 +32,7 @@ static-map = /static=/app/static
workers = 4
# Optimize for streaming
http = 0.0.0.0:5656
http = 0.0.0.0:$(UWSGI_PORT)
http-keepalive = 1
buffer-size = 65536 # Increase buffer for large payloads
post-buffering = 4096 # Reduce buffering for real-time streaming

View file

@ -19,11 +19,13 @@ import {
import { RotateCcwKey, X } from 'lucide-react';
import { useForm } from '@mantine/form';
import useChannelsStore from '../../store/channels';
import useStreamProfilesStore from '../../store/streamProfiles';
import { USER_LEVELS, USER_LEVEL_LABELS } from '../../constants';
import useAuthStore from '../../store/auth';
const User = ({ user = null, isOpen, onClose }) => {
const profiles = useChannelsStore((s) => s.profiles);
const streamProfiles = useStreamProfilesStore((s) => s.profiles);
const authUser = useAuthStore((s) => s.user);
const setUser = useAuthStore((s) => s.setUser);
@ -42,6 +44,7 @@ const User = ({ user = null, isOpen, onClose }) => {
xc_password: '',
channel_profiles: [],
hide_adult_content: false,
stream_profile: '',
},
validate: (values) => ({
@ -95,6 +98,9 @@ const User = ({ user = null, isOpen, onClose }) => {
values.channel_profiles = [];
}
// Convert stream_profile to integer or null
values.stream_profile = values.stream_profile ? parseInt(values.stream_profile, 10) : null;
if (!user && values.user_level == USER_LEVELS.STREAMER) {
// Generate random password - they can't log in, but user can't be created without a password
values.password = Math.random().toString(36).slice(2);
@ -134,6 +140,7 @@ const User = ({ user = null, isOpen, onClose }) => {
: ['0'],
xc_password: customProps.xc_password || '',
hide_adult_content: customProps.hide_adult_content || false,
stream_profile: user.stream_profile ? `${user.stream_profile}` : '',
});
if (customProps.xc_password) {
@ -269,6 +276,23 @@ const User = ({ user = null, isOpen, onClose }) => {
</Tooltip>
</Box>
)}
{showPermissions && (
<Select
label="Stream Profile"
description="User's default streaming profile"
placeholder="System Default"
clearable
data={streamProfiles
.filter((p) => p.is_active)
.map((p) => ({
label: p.name,
value: `${p.id}`,
}))}
{...form.getInputProps('stream_profile')}
key={form.key('stream_profile')}
/>
)}
</Stack>
</Group>

View file

@ -2,6 +2,7 @@ import React, { useMemo, useCallback, useState } from 'react';
import API from '../../api';
import UserForm from '../forms/User';
import useUsersStore from '../../store/users';
import useStreamProfilesStore from '../../store/streamProfiles';
import useAuthStore from '../../store/auth';
import { USER_LEVELS, USER_LEVEL_LABELS } from '../../constants';
import useWarningsStore from '../../store/warnings';
@ -76,6 +77,7 @@ const UsersTable = () => {
* STORES
*/
const users = useUsersStore((s) => s.users);
const streamProfiles = useStreamProfilesStore((s) => s.profiles);
const authUser = useAuthStore((s) => s.user);
const isWarningSuppressed = useWarningsStore((s) => s.isWarningSuppressed);
const suppressWarning = useWarningsStore((s) => s.suppressWarning);
@ -220,6 +222,16 @@ const UsersTable = () => {
);
},
},
{
header: 'Stream Profile',
accessorKey: 'stream_profile',
size: 130,
cell: ({ getValue }) => {
const profileId = getValue();
const profile = streamProfiles.find((p) => p.id === profileId);
return <Text size="sm">{profile?.name || 'Default'}</Text>;
},
},
{
header: 'XC Password',
accessorKey: 'custom_properties',
@ -279,6 +291,7 @@ const UsersTable = () => {
togglePasswordVisibility,
fullDateFormat,
fullDateTimeFormat,
streamProfiles,
]
);
@ -318,6 +331,7 @@ const UsersTable = () => {
user_level: renderHeaderCell,
last_login: renderHeaderCell,
date_joined: renderHeaderCell,
stream_profile: renderHeaderCell,
custom_properties: renderHeaderCell,
},
});