Fix seeking not working.

This commit is contained in:
SergeantPanda 2025-08-12 08:29:26 -05:00
parent 7e94ee5d49
commit 07966424f8
2 changed files with 691 additions and 39 deletions

View file

@ -6,7 +6,10 @@ import time
import json
import logging
import threading
import random
import requests
from typing import Optional, Dict, Any
from django.http import StreamingHttpResponse, HttpResponse
from core.utils import RedisClient
from apps.vod.models import Movie, Episode
from apps.m3u.models import M3UAccountProfile
@ -304,9 +307,10 @@ class VODConnectionManager:
except Exception as e:
logger.error(f"Error during connection cleanup: {e}")
def stream_content(self, content_obj, stream_url, m3u_profile, client_ip, user_agent, request):
def stream_content(self, content_obj, stream_url, m3u_profile, client_ip, user_agent, request,
utc_start=None, utc_end=None, offset=None, range_header=None):
"""
Stream VOD content with connection tracking
Stream VOD content with connection tracking and timeshift support
Args:
content_obj: Movie or Episode object
@ -315,14 +319,14 @@ class VODConnectionManager:
client_ip: Client IP address
user_agent: Client user agent
request: Django request object
utc_start: UTC start time for timeshift (e.g., '2023-01-01T12:00:00')
utc_end: UTC end time for timeshift
offset: Offset in seconds for seeking
range_header: HTTP Range header for partial content requests
Returns:
StreamingHttpResponse or HttpResponse with error
"""
import time
import random
import requests
from django.http import StreamingHttpResponse, HttpResponse
try:
# Generate unique client ID
@ -354,36 +358,79 @@ class VODConnectionManager:
logger.error(f"Failed to create connection tracking for {content_type} {content_uuid}")
return HttpResponse("Connection limit exceeded", status=503)
# Create streaming generator
# Modify stream URL for timeshift functionality
modified_stream_url = self._apply_timeshift_parameters(
stream_url, utc_start, utc_end, offset
)
logger.info(f"[{client_id}] Modified stream URL for timeshift: {modified_stream_url}")
# Create streaming generator with simplified header handling
upstream_response = None
def stream_generator():
nonlocal upstream_response
try:
logger.info(f"[{client_id}] Starting VOD stream for {content_type} {content_name}")
# Make request to actual stream URL
headers = {'User-Agent': user_agent} if user_agent else {}
# Prepare request headers
headers = {}
if user_agent:
headers['User-Agent'] = user_agent
with requests.get(stream_url, headers=headers, stream=True, timeout=(10, 30)) as response:
response.raise_for_status()
# Forward important headers
important_headers = [
'authorization', 'x-forwarded-for', 'x-real-ip',
'referer', 'origin', 'accept'
]
bytes_sent = 0
chunk_count = 0
for header_name in important_headers:
django_header = f'HTTP_{header_name.upper().replace("-", "_")}'
if hasattr(request, 'META') and django_header in request.META:
headers[header_name] = request.META[django_header]
logger.debug(f"[{client_id}] Forwarded header {header_name}")
for chunk in response.iter_content(chunk_size=8192):
if chunk:
yield chunk
bytes_sent += len(chunk)
chunk_count += 1
# Add client IP
if client_ip:
headers['X-Forwarded-For'] = client_ip
headers['X-Real-IP'] = client_ip
# Update connection activity every 100 chunks
if chunk_count % 100 == 0:
self.update_connection_activity(
content_type=content_type,
content_uuid=content_uuid,
client_id=client_id,
bytes_sent=len(chunk)
)
# Add Range header if provided for seeking support
if range_header:
headers['Range'] = range_header
logger.info(f"[{client_id}] Added Range header: {range_header}")
logger.info(f"[{client_id}] VOD stream completed: {bytes_sent} bytes sent")
# Make request to upstream server with automatic redirect following
upstream_response = requests.get(modified_stream_url, headers=headers, stream=True, timeout=(10, 30), allow_redirects=True)
upstream_response.raise_for_status()
# Log upstream response info
logger.info(f"[{client_id}] Upstream response status: {upstream_response.status_code}")
logger.info(f"[{client_id}] Upstream content-type: {upstream_response.headers.get('content-type', 'unknown')}")
if 'content-length' in upstream_response.headers:
logger.info(f"[{client_id}] Upstream content-length: {upstream_response.headers['content-length']}")
if 'content-range' in upstream_response.headers:
logger.info(f"[{client_id}] Upstream content-range: {upstream_response.headers['content-range']}")
bytes_sent = 0
chunk_count = 0
for chunk in upstream_response.iter_content(chunk_size=8192):
if chunk:
yield chunk
bytes_sent += len(chunk)
chunk_count += 1
# Update connection activity every 100 chunks
if chunk_count % 100 == 0:
self.update_connection_activity(
content_type=content_type,
content_uuid=content_uuid,
client_id=client_id,
bytes_sent=len(chunk)
)
logger.info(f"[{client_id}] VOD stream completed: {bytes_sent} bytes sent")
except requests.RequestException as e:
logger.error(f"[{client_id}] Error streaming from source: {e}")
@ -393,14 +440,120 @@ class VODConnectionManager:
finally:
# Clean up connection tracking
self.remove_connection(content_type, content_uuid, client_id)
if upstream_response:
upstream_response.close()
# Create streaming response
def stream_generator():
nonlocal upstream_response
try:
logger.info(f"[{client_id}] Starting VOD stream for {content_type} {content_name}")
# Prepare request headers
headers = {}
if user_agent:
headers['User-Agent'] = user_agent
# Forward important headers
important_headers = [
'authorization', 'x-forwarded-for', 'x-real-ip',
'referer', 'origin', 'accept'
]
for header_name in important_headers:
django_header = f'HTTP_{header_name.upper().replace("-", "_")}'
if hasattr(request, 'META') and django_header in request.META:
headers[header_name] = request.META[django_header]
logger.debug(f"[{client_id}] Forwarded header {header_name}")
# Add client IP
if client_ip:
headers['X-Forwarded-For'] = client_ip
headers['X-Real-IP'] = client_ip
# Add Range header if provided for seeking support
if range_header:
headers['Range'] = range_header
logger.info(f"[{client_id}] Added Range header: {range_header}")
# Make single request to upstream server with automatic redirect following
upstream_response = requests.get(modified_stream_url, headers=headers, stream=True, timeout=(10, 30), allow_redirects=True)
upstream_response.raise_for_status()
# Log upstream response info
logger.info(f"[{client_id}] Upstream response status: {upstream_response.status_code}")
logger.info(f"[{client_id}] Final URL after redirects: {upstream_response.url}")
logger.info(f"[{client_id}] Upstream content-type: {upstream_response.headers.get('content-type', 'unknown')}")
if 'content-length' in upstream_response.headers:
logger.info(f"[{client_id}] Upstream content-length: {upstream_response.headers['content-length']}")
if 'content-range' in upstream_response.headers:
logger.info(f"[{client_id}] Upstream content-range: {upstream_response.headers['content-range']}")
bytes_sent = 0
chunk_count = 0
for chunk in upstream_response.iter_content(chunk_size=8192):
if chunk:
yield chunk
bytes_sent += len(chunk)
chunk_count += 1
# Update connection activity every 100 chunks
if chunk_count % 100 == 0:
self.update_connection_activity(
content_type=content_type,
content_uuid=content_uuid,
client_id=client_id,
bytes_sent=len(chunk)
)
logger.info(f"[{client_id}] VOD stream completed: {bytes_sent} bytes sent")
except requests.RequestException as e:
logger.error(f"[{client_id}] Error streaming from source: {e}")
yield b"Error: Unable to stream content"
except Exception as e:
logger.error(f"[{client_id}] Error in stream generator: {e}")
finally:
# Clean up connection tracking
self.remove_connection(content_type, content_uuid, client_id)
if upstream_response:
upstream_response.close()
# Create streaming response with sensible defaults
response = StreamingHttpResponse(
streaming_content=stream_generator(),
content_type='video/mp4'
)
# Set status code based on request type
if range_header:
response.status_code = 206
logger.info(f"[{client_id}] Set response status to 206 for range request")
else:
response.status_code = 200
logger.info(f"[{client_id}] Set response status to 200 for full request")
# Set headers that VLC and other players expect
response['Cache-Control'] = 'no-cache'
response['Accept-Ranges'] = 'none'
response['Pragma'] = 'no-cache'
response['X-Content-Type-Options'] = 'nosniff'
response['Connection'] = 'keep-alive'
response['Accept-Ranges'] = 'bytes'
# Log the critical headers we're sending to the client
logger.info(f"[{client_id}] Response headers to client - Status: {response.status_code}, Accept-Ranges: {response.get('Accept-Ranges', 'MISSING')}")
if 'Content-Length' in response:
logger.info(f"[{client_id}] Content-Length: {response['Content-Length']}")
if 'Content-Range' in response:
logger.info(f"[{client_id}] Content-Range: {response['Content-Range']}")
if 'Content-Type' in response:
logger.info(f"[{client_id}] Content-Type: {response['Content-Type']}")
# Critical: Log what VLC needs to see for seeking to work
if response.status_code == 200:
logger.info(f"[{client_id}] VLC SEEKING INFO: Full content response (200). VLC should see Accept-Ranges and Content-Length to enable seeking.")
elif response.status_code == 206:
logger.info(f"[{client_id}] VLC SEEKING INFO: Partial content response (206). This confirms seeking is working if VLC requested a range.")
return response
@ -408,7 +561,417 @@ class VODConnectionManager:
logger.error(f"Error in stream_content: {e}", exc_info=True)
return HttpResponse(f"Streaming error: {str(e)}", status=500)
# Global instance
def stream_content_with_session(self, session_id, content_obj, stream_url, m3u_profile, client_ip, user_agent, request,
utc_start=None, utc_end=None, offset=None, range_header=None):
"""
Stream VOD content with session-based connection reuse for timeshift operations
This method reuses existing upstream connections when the same session makes
timeshift requests, reducing provider connection usage to 1 per client session.
"""
try:
# Use session_id as client_id for connection tracking
client_id = session_id
# Determine content type and get content info
if hasattr(content_obj, 'episodes'): # Series
content_type = 'series'
elif hasattr(content_obj, 'series'): # Episode
content_type = 'episode'
else: # Movie
content_type = 'movie'
content_uuid = str(content_obj.uuid)
content_name = getattr(content_obj, 'name', getattr(content_obj, 'title', 'Unknown'))
# Check if we have an existing session connection
session_key = f"vod_session:{session_id}"
session_info = None
if self.redis_client:
try:
session_data = self.redis_client.get(session_key)
if session_data:
session_info = json.loads(session_data.decode('utf-8'))
logger.info(f"[{client_id}] Found existing session: {session_info}")
except Exception as e:
logger.warning(f"[{client_id}] Error reading session data: {e}")
# If no existing session or session expired, create new connection tracking
# But only increment the profile counter ONCE per session
if not session_info:
connection_created = self.create_connection(
content_type=content_type,
content_uuid=content_uuid,
content_name=content_name,
client_id=client_id,
client_ip=client_ip,
user_agent=user_agent,
m3u_profile=m3u_profile
)
if not connection_created:
logger.error(f"Failed to create connection tracking for {content_type} {content_uuid}")
return HttpResponse("Connection limit exceeded", status=503)
# Store session info in Redis
session_info = {
'content_type': content_type,
'content_uuid': content_uuid,
'content_name': content_name,
'created_at': time.time(),
'profile_id': m3u_profile.id,
'connection_counted': True # Mark that we've counted this connection
}
if self.redis_client:
try:
self.redis_client.setex(
session_key,
self.connection_ttl,
json.dumps(session_info)
)
logger.info(f"[{client_id}] Created new session: {session_info}")
except Exception as e:
logger.error(f"[{client_id}] Error storing session data: {e}")
else:
# Session exists - don't create new connection tracking
# This prevents double-counting connections for the same session
logger.info(f"[{client_id}] Reusing existing session - no new connection created") # Apply timeshift parameters to URL
modified_stream_url = self._apply_timeshift_parameters(
stream_url, utc_start, utc_end, offset
)
logger.info(f"[{client_id}] Modified stream URL for timeshift: {modified_stream_url}")
# Prepare headers - preserve ALL client headers for proper authentication
def prepare_headers():
headers = {}
# Copy all relevant headers from the original request
header_mapping = {
'HTTP_USER_AGENT': 'User-Agent',
'HTTP_AUTHORIZATION': 'Authorization',
'HTTP_X_FORWARDED_FOR': 'X-Forwarded-For',
'HTTP_X_REAL_IP': 'X-Real-IP',
'HTTP_REFERER': 'Referer',
'HTTP_ORIGIN': 'Origin',
'HTTP_ACCEPT': 'Accept',
'HTTP_ACCEPT_LANGUAGE': 'Accept-Language',
'HTTP_ACCEPT_ENCODING': 'Accept-Encoding',
'HTTP_CONNECTION': 'Connection',
'HTTP_CACHE_CONTROL': 'Cache-Control',
'HTTP_COOKIE': 'Cookie',
'HTTP_DNT': 'DNT',
'HTTP_X_FORWARDED_PROTO': 'X-Forwarded-Proto',
'HTTP_X_FORWARDED_PORT': 'X-Forwarded-Port',
}
# Log all headers for debugging
logger.debug(f"[{client_id}] All available headers:")
for django_header, http_header in header_mapping.items():
if hasattr(request, 'META') and django_header in request.META:
header_value = request.META[django_header]
headers[http_header] = header_value
logger.debug(f"[{client_id}] {http_header}: {header_value}")
# Check for any timeshift-related headers VLC might send
timeshift_headers = ['HTTP_TIME', 'HTTP_TIMESTAMP', 'HTTP_SEEK', 'HTTP_POSITION']
for header in timeshift_headers:
if hasattr(request, 'META') and header in request.META:
value = request.META[header]
logger.info(f"[{client_id}] Found timeshift header {header}: {value}")
# Forward these as custom headers
headers[header.replace('HTTP_', '').replace('_', '-')] = value # Ensure we have a User-Agent
if 'User-Agent' not in headers and user_agent:
headers['User-Agent'] = user_agent
# Add client IP headers
if client_ip:
headers['X-Forwarded-For'] = client_ip
headers['X-Real-IP'] = client_ip
# Add Range header for seeking support
if range_header:
headers['Range'] = range_header
logger.info(f"[{client_id}] Added Range header: {range_header}")
return headers
# STEP 1: Make a small Range request to get headers from the final URL after redirects
logger.info(f"[{client_id}] Making small Range request to get headers from final URL")
try:
probe_headers = prepare_headers()
# Add a small range request to get headers without downloading much data
probe_headers['Range'] = 'bytes=0-1024'
probe_response = requests.get(
modified_stream_url,
headers=probe_headers,
timeout=(10, 30),
allow_redirects=True,
stream=True
)
probe_response.raise_for_status()
# Extract critical headers from the final URL response
upstream_content_length = None
upstream_content_range = probe_response.headers.get('content-range')
# Parse Content-Range to get total file size: "bytes 0-1024/1559626615"
if upstream_content_range:
try:
parts = upstream_content_range.split('/')
if len(parts) == 2:
upstream_content_length = parts[1]
logger.info(f"[{client_id}] Extracted Content-Length from Content-Range: {upstream_content_length}")
except Exception as e:
logger.warning(f"[{client_id}] Could not parse Content-Range: {e}")
# Fallback to Content-Length header if available
if not upstream_content_length:
upstream_content_length = probe_response.headers.get('content-length')
upstream_content_type = probe_response.headers.get('content-type', 'video/mp4')
upstream_accept_ranges = probe_response.headers.get('accept-ranges', 'bytes')
upstream_last_modified = probe_response.headers.get('last-modified')
upstream_etag = probe_response.headers.get('etag')
logger.info(f"[{client_id}] Final URL after redirects: {probe_response.url}")
logger.info(f"[{client_id}] Headers from final URL - Content-Length: {upstream_content_length}, Content-Type: {upstream_content_type}")
# Close the probe response
probe_response.close()
except Exception as e:
logger.warning(f"[{client_id}] Probe request failed, proceeding without upstream headers: {e}")
upstream_content_length = None
upstream_content_type = 'video/mp4'
upstream_accept_ranges = 'bytes'
upstream_last_modified = None
upstream_etag = None
# STEP 2: Create streaming generator for actual content
def stream_generator():
upstream_response = None
try:
logger.info(f"[{client_id}] Starting session-based VOD stream for {content_type} {content_name}")
headers = prepare_headers()
# Make request to upstream server
upstream_response = requests.get(
modified_stream_url,
headers=headers,
stream=True,
timeout=(10, 30),
allow_redirects=True
)
upstream_response.raise_for_status()
# Log upstream response info
logger.info(f"[{client_id}] Upstream response status: {upstream_response.status_code}")
logger.info(f"[{client_id}] Final URL after redirects: {upstream_response.url}")
if 'content-type' in upstream_response.headers:
logger.info(f"[{client_id}] Upstream content-type: {upstream_response.headers['content-type']}")
if 'content-length' in upstream_response.headers:
logger.info(f"[{client_id}] Upstream content-length: {upstream_response.headers['content-length']}")
if 'content-range' in upstream_response.headers:
logger.info(f"[{client_id}] Upstream content-range: {upstream_response.headers['content-range']}")
bytes_sent = 0
chunk_count = 0
for chunk in upstream_response.iter_content(chunk_size=8192):
if chunk:
yield chunk
bytes_sent += len(chunk)
chunk_count += 1
# Update connection activity every 100 chunks
if chunk_count % 100 == 0:
self.update_connection_activity(
content_type=content_type,
content_uuid=content_uuid,
client_id=client_id,
bytes_sent=len(chunk)
)
logger.info(f"[{client_id}] Session-based VOD stream completed: {bytes_sent} bytes sent")
except requests.RequestException as e:
logger.error(f"[{client_id}] Error streaming from source: {e}")
yield b"Error: Unable to stream content"
except Exception as e:
logger.error(f"[{client_id}] Error in session stream generator: {e}")
finally:
# Don't remove connection tracking for sessions - let it expire naturally
# This allows timeshift operations to reuse the connection slot
if upstream_response:
upstream_response.close()
# STEP 3: Create streaming response with headers from final URL
response = StreamingHttpResponse(
streaming_content=stream_generator(),
content_type=upstream_content_type
)
# Set status code based on request type
if range_header:
response.status_code = 206
logger.info(f"[{client_id}] Set response status to 206 for range request")
else:
response.status_code = 200
logger.info(f"[{client_id}] Set response status to 200 for full request")
# Set headers that VLC and other players expect
response['Cache-Control'] = 'no-cache'
response['Pragma'] = 'no-cache'
response['X-Content-Type-Options'] = 'nosniff'
response['Connection'] = 'keep-alive'
response['Accept-Ranges'] = upstream_accept_ranges
# CRITICAL: Forward Content-Length from final URL to enable VLC seeking
if upstream_content_length:
response['Content-Length'] = upstream_content_length
logger.info(f"[{client_id}] *** FORWARDED Content-Length from final URL: {upstream_content_length} *** (VLC seeking enabled)")
else:
logger.warning(f"[{client_id}] *** NO Content-Length from final URL *** (VLC seeking may not work)")
# Forward other useful headers from final URL
if upstream_last_modified:
response['Last-Modified'] = upstream_last_modified
if upstream_etag:
response['ETag'] = upstream_etag
# Handle range requests - set Content-Range if this is a partial request
if range_header and upstream_content_length:
# Parse range header to set proper Content-Range
try:
if 'bytes=' in range_header:
range_part = range_header.replace('bytes=', '')
if '-' in range_part:
start_byte, end_byte = range_part.split('-', 1)
start = int(start_byte) if start_byte else 0
end = int(end_byte) if end_byte else int(upstream_content_length) - 1
total_size = int(upstream_content_length)
content_range = f"bytes {start}-{end}/{total_size}"
response['Content-Range'] = content_range
logger.info(f"[{client_id}] Set Content-Range: {content_range}")
except Exception as e:
logger.warning(f"[{client_id}] Could not parse range for Content-Range header: {e}")
# Log the critical headers we're sending to the client
logger.info(f"[{client_id}] SESSION Response headers to client - Status: {response.status_code}, Accept-Ranges: {response.get('Accept-Ranges', 'MISSING')}")
if 'Content-Length' in response:
logger.info(f"[{client_id}] SESSION Content-Length: {response['Content-Length']}")
if 'Content-Range' in response:
logger.info(f"[{client_id}] SESSION Content-Range: {response['Content-Range']}")
if 'Content-Type' in response:
logger.info(f"[{client_id}] SESSION Content-Type: {response['Content-Type']}")
# Critical: Log what VLC needs to see for seeking to work
if response.status_code == 200:
if upstream_content_length:
logger.info(f"[{client_id}] SESSION VLC SEEKING INFO: ✅ Full content response (200) with Content-Length from final URL. VLC seeking should work!")
else:
logger.info(f"[{client_id}] SESSION VLC SEEKING INFO: ❌ Full content response (200) but NO Content-Length from final URL. VLC seeking will NOT work!")
elif response.status_code == 206:
logger.info(f"[{client_id}] SESSION VLC SEEKING INFO: ✅ Partial content response (206). This confirms seeking is working if VLC requested a range.")
return response
except Exception as e:
logger.error(f"Error in stream_content_with_session: {e}", exc_info=True)
return HttpResponse(f"Streaming error: {str(e)}", status=500)
def _apply_timeshift_parameters(self, original_url, utc_start=None, utc_end=None, offset=None):
"""
Apply timeshift parameters to the stream URL
Args:
original_url: Original stream URL
utc_start: UTC start time (ISO format string)
utc_end: UTC end time (ISO format string)
offset: Offset in seconds
Returns:
Modified URL with timeshift parameters
"""
try:
from urllib.parse import urlparse, parse_qs, urlencode, urlunparse
import re
parsed_url = urlparse(original_url)
query_params = parse_qs(parsed_url.query)
logger.debug(f"Original URL: {original_url}")
logger.debug(f"Original query params: {query_params}")
# Add timeshift parameters if provided
if utc_start:
# Support both utc_start and start parameter names
query_params['utc_start'] = [utc_start]
query_params['start'] = [utc_start] # Some providers use 'start'
logger.info(f"Added utc_start/start parameter: {utc_start}")
if utc_end:
# Support both utc_end and end parameter names
query_params['utc_end'] = [utc_end]
query_params['end'] = [utc_end] # Some providers use 'end'
logger.info(f"Added utc_end/end parameter: {utc_end}")
if offset:
try:
# Ensure offset is a valid number
offset_seconds = int(offset)
# Support multiple offset parameter names
query_params['offset'] = [str(offset_seconds)]
query_params['seek'] = [str(offset_seconds)] # Some providers use 'seek'
query_params['t'] = [str(offset_seconds)] # Some providers use 't'
logger.info(f"Added offset/seek/t parameter: {offset_seconds} seconds")
except (ValueError, TypeError):
logger.warning(f"Invalid offset value: {offset}, skipping")
# Handle special URL patterns for VOD providers
# Some providers embed timeshift info in the path rather than query params
path = parsed_url.path
# Check if this looks like an IPTV catchup URL pattern
catchup_pattern = r'/(\d{4}-\d{2}-\d{2})/(\d{2}-\d{2}-\d{2})'
if utc_start and re.search(catchup_pattern, path):
# Convert ISO format to provider-specific format if needed
try:
from datetime import datetime
start_dt = datetime.fromisoformat(utc_start.replace('Z', '+00:00'))
date_part = start_dt.strftime('%Y-%m-%d')
time_part = start_dt.strftime('%H-%M-%S')
# Replace existing date/time in path
path = re.sub(catchup_pattern, f'/{date_part}/{time_part}', path)
logger.info(f"Modified path for catchup: {path}")
except Exception as e:
logger.warning(f"Could not parse timeshift date: {e}")
# Reconstruct URL with new parameters
new_query = urlencode(query_params, doseq=True)
modified_url = urlunparse((
parsed_url.scheme,
parsed_url.netloc,
path, # Use potentially modified path
parsed_url.params,
new_query,
parsed_url.fragment
))
logger.info(f"Modified URL: {modified_url}")
return modified_url
except Exception as e:
logger.error(f"Error applying timeshift parameters: {e}")
return original_url# Global instance
_connection_manager = None
def get_connection_manager() -> VODConnectionManager:

View file

@ -26,7 +26,7 @@ class VODStreamView(View):
def get(self, request, content_type, content_id, profile_id=None):
"""
Stream VOD content (movies or series episodes)
Stream VOD content (movies or series episodes) with session-based connection reuse
Args:
content_type: 'movie', 'series', or 'episode'
@ -36,10 +36,96 @@ class VODStreamView(View):
logger.info(f"[VOD-REQUEST] Starting VOD stream request: {content_type}/{content_id}, profile: {profile_id}")
logger.info(f"[VOD-REQUEST] Full request path: {request.get_full_path()}")
logger.info(f"[VOD-REQUEST] Request method: {request.method}")
logger.info(f"[VOD-REQUEST] Request headers: {dict(request.headers)}")
try:
client_ip, user_agent = get_client_info(request)
logger.info(f"[VOD-CLIENT] Client info - IP: {client_ip}, User-Agent: {user_agent[:100]}...")
# Extract timeshift parameters from query string
# Support multiple timeshift parameter formats
utc_start = request.GET.get('utc_start') or request.GET.get('start') or request.GET.get('playliststart')
utc_end = request.GET.get('utc_end') or request.GET.get('end') or request.GET.get('playlistend')
offset = request.GET.get('offset') or request.GET.get('seek') or request.GET.get('t')
# VLC specific timeshift parameters
if not utc_start and not offset:
# Check for VLC-style timestamp parameters
if 'timestamp' in request.GET:
offset = request.GET.get('timestamp')
elif 'time' in request.GET:
offset = request.GET.get('time')
# Extract session ID for connection reuse
session_id = request.GET.get('session_id')
# Extract Range header for seeking support
range_header = request.META.get('HTTP_RANGE')
logger.info(f"[VOD-TIMESHIFT] Timeshift params - utc_start: {utc_start}, utc_end: {utc_end}, offset: {offset}")
logger.info(f"[VOD-SESSION] Session ID: {session_id}")
# Log all query parameters for debugging
if request.GET:
logger.debug(f"[VOD-PARAMS] All query params: {dict(request.GET)}")
if range_header:
logger.info(f"[VOD-RANGE] Range header: {range_header}")
# Parse the range to understand what position VLC is seeking to
try:
if 'bytes=' in range_header:
range_part = range_header.replace('bytes=', '')
if '-' in range_part:
start_byte, end_byte = range_part.split('-', 1)
if start_byte:
start_pos_mb = int(start_byte) / (1024 * 1024)
logger.info(f"[VOD-SEEK] Seeking to byte position: {start_byte} (~{start_pos_mb:.1f} MB)")
if int(start_byte) > 0:
logger.info(f"[VOD-SEEK] *** ACTUAL SEEK DETECTED *** Position: {start_pos_mb:.1f} MB")
else:
logger.info(f"[VOD-SEEK] Open-ended range request (from start)")
if end_byte:
end_pos_mb = int(end_byte) / (1024 * 1024)
logger.info(f"[VOD-SEEK] End position: {end_byte} bytes (~{end_pos_mb:.1f} MB)")
except Exception as e:
logger.warning(f"[VOD-SEEK] Could not parse range header: {e}")
# Simple seek detection - track rapid requests
current_time = time.time()
request_key = f"{client_ip}:{content_type}:{content_id}"
if not hasattr(self.__class__, '_request_times'):
self.__class__._request_times = {}
if request_key in self.__class__._request_times:
time_diff = current_time - self.__class__._request_times[request_key]
if time_diff < 5.0:
logger.info(f"[VOD-SEEK] Rapid request detected ({time_diff:.1f}s) - likely seeking")
self.__class__._request_times[request_key] = current_time
else:
logger.info(f"[VOD-RANGE] No Range header - full content request")
logger.info(f"[VOD-CLIENT] Client info - IP: {client_ip}, User-Agent: {user_agent[:50]}...")
# If no session ID, create one and redirect
if not session_id:
session_id = f"vod_{int(time.time() * 1000)}_{random.randint(1000, 9999)}"
logger.info(f"[VOD-SESSION] Creating new session: {session_id}")
# Build redirect URL with session ID and preserve all parameters
from urllib.parse import urlencode
query_params = dict(request.GET)
query_params['session_id'] = session_id
query_string = urlencode(query_params, doseq=True)
redirect_url = f"{request.path}?{query_string}"
logger.info(f"[VOD-SESSION] Redirecting to: {redirect_url}")
return HttpResponse(
status=302,
headers={'Location': redirect_url}
)
# Get the content object and its relation
content_obj, relation = self._get_content_and_relation(content_type, content_id)
@ -70,13 +156,12 @@ class VODStreamView(View):
logger.info(f"[VOD-PROFILE] Using M3U profile: {m3u_profile.id} (max_streams: {m3u_profile.max_streams}, current: {m3u_profile.current_viewers})")
# Track connection start in Redis
# Track connection in Redis (simplified)
try:
from core.utils import RedisClient
redis_client = RedisClient.get_client()
profile_connections_key = f"profile_connections:{m3u_profile.id}"
current_count = redis_client.incr(profile_connections_key)
logger.debug(f"Incremented VOD profile {m3u_profile.id} connections to {current_count}")
redis_client.incr(profile_connections_key)
except Exception as e:
logger.error(f"Error tracking connection in Redis: {e}")
@ -92,15 +177,20 @@ class VODStreamView(View):
# Get connection manager
connection_manager = VODConnectionManager.get_instance()
# Stream the content
# Stream the content with session-based connection reuse
logger.info("[VOD-STREAM] Calling connection manager to stream content")
response = connection_manager.stream_content(
response = connection_manager.stream_content_with_session(
session_id=session_id,
content_obj=content_obj,
stream_url=final_stream_url,
m3u_profile=m3u_profile,
client_ip=client_ip,
user_agent=user_agent,
request=request
request=request,
utc_start=utc_start,
utc_end=utc_end,
offset=offset,
range_header=range_header
)
logger.info(f"[VOD-SUCCESS] Stream response created successfully, type: {type(response)}")
@ -225,7 +315,6 @@ class VODStreamView(View):
if search_pattern and replace_pattern:
transformed_url = re.sub(search_pattern, safe_replace_pattern, original_url)
logger.debug(f"URL transformed from {original_url} to {transformed_url}")
return transformed_url
return original_url