mirror of
https://github.com/Dispatcharr/Dispatcharr.git
synced 2026-01-23 02:35:14 +00:00
Merge branch 'dev' of https://github.com/Dispatcharr/Dispatcharr into pr/sethwv/757
This commit is contained in:
commit
31b9868bfd
35 changed files with 1808 additions and 775 deletions
|
|
@ -19,11 +19,11 @@ SETTING_KEYS = {
|
|||
}
|
||||
|
||||
DEFAULTS = {
|
||||
"enabled": False,
|
||||
"enabled": True,
|
||||
"frequency": "daily",
|
||||
"time": "03:00",
|
||||
"day_of_week": 0, # Sunday
|
||||
"retention_count": 0,
|
||||
"retention_count": 3,
|
||||
"cron_expression": "",
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ urlpatterns = [
|
|||
path('series-rules/', SeriesRulesAPIView.as_view(), name='series_rules'),
|
||||
path('series-rules/evaluate/', EvaluateSeriesRulesAPIView.as_view(), name='evaluate_series_rules'),
|
||||
path('series-rules/bulk-remove/', BulkRemoveSeriesRecordingsAPIView.as_view(), name='bulk_remove_series_recordings'),
|
||||
path('series-rules/<str:tvg_id>/', DeleteSeriesRuleAPIView.as_view(), name='delete_series_rule'),
|
||||
path('series-rules/<path:tvg_id>/', DeleteSeriesRuleAPIView.as_view(), name='delete_series_rule'),
|
||||
path('recordings/bulk-delete-upcoming/', BulkDeleteUpcomingRecordingsAPIView.as_view(), name='bulk_delete_upcoming_recordings'),
|
||||
path('dvr/comskip-config/', ComskipConfigAPIView.as_view(), name='comskip_config'),
|
||||
]
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ from django.shortcuts import get_object_or_404, get_list_or_404
|
|||
from django.db import transaction
|
||||
from django.db.models import Q
|
||||
import os, json, requests, logging
|
||||
from urllib.parse import unquote
|
||||
from apps.accounts.permissions import (
|
||||
Authenticated,
|
||||
IsAdmin,
|
||||
|
|
@ -2053,7 +2054,7 @@ class DeleteSeriesRuleAPIView(APIView):
|
|||
return [Authenticated()]
|
||||
|
||||
def delete(self, request, tvg_id):
|
||||
tvg_id = str(tvg_id)
|
||||
tvg_id = unquote(str(tvg_id or ""))
|
||||
rules = [r for r in CoreSettings.get_dvr_series_rules() if str(r.get("tvg_id")) != tvg_id]
|
||||
CoreSettings.set_dvr_series_rules(rules)
|
||||
return Response({"success": True, "rules": rules})
|
||||
|
|
|
|||
|
|
@ -119,11 +119,11 @@ class Stream(models.Model):
|
|||
return self.name or self.url or f"Stream ID {self.id}"
|
||||
|
||||
@classmethod
|
||||
def generate_hash_key(cls, name, url, tvg_id, keys=None, m3u_id=None):
|
||||
def generate_hash_key(cls, name, url, tvg_id, keys=None, m3u_id=None, group=None):
|
||||
if keys is None:
|
||||
keys = CoreSettings.get_m3u_hash_key().split(",")
|
||||
|
||||
stream_parts = {"name": name, "url": url, "tvg_id": tvg_id, "m3u_id": m3u_id}
|
||||
stream_parts = {"name": name, "url": url, "tvg_id": tvg_id, "m3u_id": m3u_id, "group": group}
|
||||
|
||||
hash_parts = {key: stream_parts[key] for key in keys if key in stream_parts}
|
||||
|
||||
|
|
|
|||
|
|
@ -792,7 +792,7 @@ def process_xc_category_direct(account_id, batch, groups, hash_keys):
|
|||
group_title = group_name
|
||||
|
||||
stream_hash = Stream.generate_hash_key(
|
||||
name, url, tvg_id, hash_keys, m3u_id=account_id
|
||||
name, url, tvg_id, hash_keys, m3u_id=account_id, group=group_title
|
||||
)
|
||||
stream_props = {
|
||||
"name": name,
|
||||
|
|
@ -966,7 +966,7 @@ def process_m3u_batch_direct(account_id, batch, groups, hash_keys):
|
|||
)
|
||||
continue
|
||||
|
||||
stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys, m3u_id=account_id)
|
||||
stream_hash = Stream.generate_hash_key(name, url, tvg_id, hash_keys, m3u_id=account_id, group=group_title)
|
||||
stream_props = {
|
||||
"name": name,
|
||||
"url": url,
|
||||
|
|
|
|||
|
|
@ -2292,17 +2292,27 @@ def xc_get_epg(request, user, short=False):
|
|||
output = {"epg_listings": []}
|
||||
|
||||
for program in programs:
|
||||
id = "0"
|
||||
epg_id = "0"
|
||||
title = program['title'] if isinstance(program, dict) else program.title
|
||||
description = program['description'] if isinstance(program, dict) else program.description
|
||||
|
||||
start = program["start_time"] if isinstance(program, dict) else program.start_time
|
||||
end = program["end_time"] if isinstance(program, dict) else program.end_time
|
||||
|
||||
# For database programs, use actual ID; for generated dummy programs, create synthetic ID
|
||||
if isinstance(program, dict):
|
||||
# Generated dummy program - create unique ID from channel + timestamp
|
||||
program_id = str(abs(hash(f"{channel_id}_{int(start.timestamp())}")))
|
||||
else:
|
||||
# Database program - use actual ID
|
||||
program_id = str(program.id)
|
||||
|
||||
# epg_id refers to the EPG source/channel mapping in XC panels
|
||||
# Use the actual EPGData ID when available, otherwise fall back to 0
|
||||
epg_id = str(channel.epg_data.id) if channel.epg_data else "0"
|
||||
|
||||
program_output = {
|
||||
"id": f"{id}",
|
||||
"epg_id": f"{epg_id}",
|
||||
"id": program_id,
|
||||
"epg_id": epg_id,
|
||||
"title": base64.b64encode((title or "").encode()).decode(),
|
||||
"lang": "",
|
||||
"start": start.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
|
|
@ -2521,34 +2531,45 @@ def xc_get_series_info(request, user, series_id):
|
|||
except Exception as e:
|
||||
logger.error(f"Error refreshing series data for relation {series_relation.id}: {str(e)}")
|
||||
|
||||
# Get episodes for this series from the same M3U account
|
||||
episode_relations = M3UEpisodeRelation.objects.filter(
|
||||
episode__series=series,
|
||||
m3u_account=series_relation.m3u_account
|
||||
).select_related('episode').order_by('episode__season_number', 'episode__episode_number')
|
||||
# Get unique episodes for this series that have relations from any active M3U account
|
||||
# We query episodes directly to avoid duplicates when multiple relations exist
|
||||
# (e.g., same episode in different languages/qualities)
|
||||
from apps.vod.models import Episode
|
||||
episodes = Episode.objects.filter(
|
||||
series=series,
|
||||
m3u_relations__m3u_account__is_active=True
|
||||
).distinct().order_by('season_number', 'episode_number')
|
||||
|
||||
# Group episodes by season
|
||||
seasons = {}
|
||||
for relation in episode_relations:
|
||||
episode = relation.episode
|
||||
for episode in episodes:
|
||||
season_num = episode.season_number or 1
|
||||
if season_num not in seasons:
|
||||
seasons[season_num] = []
|
||||
|
||||
# Try to get the highest priority related M3UEpisodeRelation for this episode (for video/audio/bitrate)
|
||||
# Get the highest priority relation for this episode (for container_extension, video/audio/bitrate)
|
||||
from apps.vod.models import M3UEpisodeRelation
|
||||
first_relation = M3UEpisodeRelation.objects.filter(
|
||||
episode=episode
|
||||
best_relation = M3UEpisodeRelation.objects.filter(
|
||||
episode=episode,
|
||||
m3u_account__is_active=True
|
||||
).select_related('m3u_account').order_by('-m3u_account__priority', 'id').first()
|
||||
|
||||
video = audio = bitrate = None
|
||||
if first_relation and first_relation.custom_properties:
|
||||
info = first_relation.custom_properties.get('info')
|
||||
if info and isinstance(info, dict):
|
||||
info_info = info.get('info')
|
||||
if info_info and isinstance(info_info, dict):
|
||||
video = info_info.get('video', {})
|
||||
audio = info_info.get('audio', {})
|
||||
bitrate = info_info.get('bitrate', 0)
|
||||
container_extension = "mp4"
|
||||
added_timestamp = str(int(episode.created_at.timestamp()))
|
||||
|
||||
if best_relation:
|
||||
container_extension = best_relation.container_extension or "mp4"
|
||||
added_timestamp = str(int(best_relation.created_at.timestamp()))
|
||||
if best_relation.custom_properties:
|
||||
info = best_relation.custom_properties.get('info')
|
||||
if info and isinstance(info, dict):
|
||||
info_info = info.get('info')
|
||||
if info_info and isinstance(info_info, dict):
|
||||
video = info_info.get('video', {})
|
||||
audio = info_info.get('audio', {})
|
||||
bitrate = info_info.get('bitrate', 0)
|
||||
|
||||
if video is None:
|
||||
video = episode.custom_properties.get('video', {}) if episode.custom_properties else {}
|
||||
if audio is None:
|
||||
|
|
@ -2561,8 +2582,8 @@ def xc_get_series_info(request, user, series_id):
|
|||
"season": season_num,
|
||||
"episode_num": episode.episode_number or 0,
|
||||
"title": episode.name,
|
||||
"container_extension": relation.container_extension or "mp4",
|
||||
"added": str(int(relation.created_at.timestamp())),
|
||||
"container_extension": container_extension,
|
||||
"added": added_timestamp,
|
||||
"custom_sid": None,
|
||||
"direct_source": "",
|
||||
"info": {
|
||||
|
|
@ -2878,7 +2899,7 @@ def xc_series_stream(request, username, password, stream_id, extension):
|
|||
filters = {"episode_id": stream_id, "m3u_account__is_active": True}
|
||||
|
||||
try:
|
||||
episode_relation = M3UEpisodeRelation.objects.select_related('episode').get(**filters)
|
||||
episode_relation = M3UEpisodeRelation.objects.select_related('episode').filter(**filters).order_by('-m3u_account__priority', 'id').first()
|
||||
except M3UEpisodeRelation.DoesNotExist:
|
||||
return JsonResponse({"error": "Episode not found"}, status=404)
|
||||
|
||||
|
|
|
|||
|
|
@ -48,9 +48,11 @@ class ClientManager:
|
|||
# Import here to avoid potential import issues
|
||||
from apps.proxy.ts_proxy.channel_status import ChannelStatus
|
||||
import redis
|
||||
from django.conf import settings
|
||||
|
||||
# Get all channels from Redis
|
||||
redis_client = redis.Redis.from_url('redis://localhost:6379', decode_responses=True)
|
||||
# Get all channels from Redis using settings
|
||||
redis_url = getattr(settings, 'REDIS_URL', 'redis://localhost:6379/0')
|
||||
redis_client = redis.Redis.from_url(redis_url, decode_responses=True)
|
||||
all_channels = []
|
||||
cursor = 0
|
||||
|
||||
|
|
|
|||
|
|
@ -471,7 +471,7 @@ def validate_stream_url(url, user_agent=None, timeout=(5, 5)):
|
|||
# If HEAD not supported, server will return 405 or other error
|
||||
if 200 <= head_response.status_code < 300:
|
||||
# HEAD request successful
|
||||
return True, head_response.url, head_response.status_code, "Valid (HEAD request)"
|
||||
return True, url, head_response.status_code, "Valid (HEAD request)"
|
||||
|
||||
# Try a GET request with stream=True to avoid downloading all content
|
||||
get_response = session.get(
|
||||
|
|
@ -484,7 +484,7 @@ def validate_stream_url(url, user_agent=None, timeout=(5, 5)):
|
|||
# IMPORTANT: Check status code first before checking content
|
||||
if not (200 <= get_response.status_code < 300):
|
||||
logger.warning(f"Stream validation failed with HTTP status {get_response.status_code}")
|
||||
return False, get_response.url, get_response.status_code, f"Invalid HTTP status: {get_response.status_code}"
|
||||
return False, url, get_response.status_code, f"Invalid HTTP status: {get_response.status_code}"
|
||||
|
||||
# Only check content if status code is valid
|
||||
try:
|
||||
|
|
@ -538,7 +538,7 @@ def validate_stream_url(url, user_agent=None, timeout=(5, 5)):
|
|||
get_response.close()
|
||||
|
||||
# If we have content, consider it valid even with unrecognized content type
|
||||
return is_valid, get_response.url, get_response.status_code, message
|
||||
return is_valid, url, get_response.status_code, message
|
||||
|
||||
except requests.exceptions.Timeout:
|
||||
return False, url, 0, "Timeout connecting to stream"
|
||||
|
|
|
|||
|
|
@ -97,7 +97,11 @@ class PersistentVODConnection:
|
|||
# First check if we have a pre-stored content length from HEAD request
|
||||
try:
|
||||
import redis
|
||||
r = redis.StrictRedis(host='localhost', port=6379, db=0, decode_responses=True)
|
||||
from django.conf import settings
|
||||
redis_host = getattr(settings, 'REDIS_HOST', 'localhost')
|
||||
redis_port = int(getattr(settings, 'REDIS_PORT', 6379))
|
||||
redis_db = int(getattr(settings, 'REDIS_DB', 0))
|
||||
r = redis.StrictRedis(host=redis_host, port=redis_port, db=redis_db, decode_responses=True)
|
||||
content_length_key = f"vod_content_length:{self.session_id}"
|
||||
stored_length = r.get(content_length_key)
|
||||
if stored_length:
|
||||
|
|
|
|||
|
|
@ -24,6 +24,11 @@ from apps.m3u.models import M3UAccountProfile
|
|||
logger = logging.getLogger("vod_proxy")
|
||||
|
||||
|
||||
def get_vod_client_stop_key(client_id):
|
||||
"""Get the Redis key for signaling a VOD client to stop"""
|
||||
return f"vod_proxy:client:{client_id}:stop"
|
||||
|
||||
|
||||
def infer_content_type_from_url(url: str) -> Optional[str]:
|
||||
"""
|
||||
Infer MIME type from file extension in URL
|
||||
|
|
@ -832,6 +837,7 @@ class MultiWorkerVODConnectionManager:
|
|||
# Create streaming generator
|
||||
def stream_generator():
|
||||
decremented = False
|
||||
stop_signal_detected = False
|
||||
try:
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Starting Redis-backed stream")
|
||||
|
||||
|
|
@ -846,14 +852,25 @@ class MultiWorkerVODConnectionManager:
|
|||
bytes_sent = 0
|
||||
chunk_count = 0
|
||||
|
||||
# Get the stop signal key for this client
|
||||
stop_key = get_vod_client_stop_key(client_id)
|
||||
|
||||
for chunk in upstream_response.iter_content(chunk_size=8192):
|
||||
if chunk:
|
||||
yield chunk
|
||||
bytes_sent += len(chunk)
|
||||
chunk_count += 1
|
||||
|
||||
# Update activity every 100 chunks in consolidated connection state
|
||||
# Check for stop signal every 100 chunks
|
||||
if chunk_count % 100 == 0:
|
||||
# Check if stop signal has been set
|
||||
if self.redis_client and self.redis_client.exists(stop_key):
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Stop signal detected, terminating stream")
|
||||
# Delete the stop key
|
||||
self.redis_client.delete(stop_key)
|
||||
stop_signal_detected = True
|
||||
break
|
||||
|
||||
# Update the connection state
|
||||
logger.debug(f"Client: [{client_id}] Worker: {self.worker_id} sent {chunk_count} chunks for VOD: {content_name}")
|
||||
if redis_connection._acquire_lock():
|
||||
|
|
@ -867,7 +884,10 @@ class MultiWorkerVODConnectionManager:
|
|||
finally:
|
||||
redis_connection._release_lock()
|
||||
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Redis-backed stream completed: {bytes_sent} bytes sent")
|
||||
if stop_signal_detected:
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Stream stopped by signal: {bytes_sent} bytes sent")
|
||||
else:
|
||||
logger.info(f"[{client_id}] Worker {self.worker_id} - Redis-backed stream completed: {bytes_sent} bytes sent")
|
||||
redis_connection.decrement_active_streams()
|
||||
decremented = True
|
||||
|
||||
|
|
|
|||
|
|
@ -21,4 +21,7 @@ urlpatterns = [
|
|||
|
||||
# VOD Stats
|
||||
path('stats/', views.VODStatsView.as_view(), name='vod_stats'),
|
||||
|
||||
# Stop VOD client connection
|
||||
path('stop_client/', views.stop_vod_client, name='stop_vod_client'),
|
||||
]
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ from django.views import View
|
|||
from apps.vod.models import Movie, Series, Episode
|
||||
from apps.m3u.models import M3UAccount, M3UAccountProfile
|
||||
from apps.proxy.vod_proxy.connection_manager import VODConnectionManager
|
||||
from apps.proxy.vod_proxy.multi_worker_connection_manager import MultiWorkerVODConnectionManager, infer_content_type_from_url
|
||||
from apps.proxy.vod_proxy.multi_worker_connection_manager import MultiWorkerVODConnectionManager, infer_content_type_from_url, get_vod_client_stop_key
|
||||
from .utils import get_client_info, create_vod_response
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -329,7 +329,11 @@ class VODStreamView(View):
|
|||
# Store the total content length in Redis for the persistent connection to use
|
||||
try:
|
||||
import redis
|
||||
r = redis.StrictRedis(host='localhost', port=6379, db=0, decode_responses=True)
|
||||
from django.conf import settings
|
||||
redis_host = getattr(settings, 'REDIS_HOST', 'localhost')
|
||||
redis_port = int(getattr(settings, 'REDIS_PORT', 6379))
|
||||
redis_db = int(getattr(settings, 'REDIS_DB', 0))
|
||||
r = redis.StrictRedis(host=redis_host, port=redis_port, db=redis_db, decode_responses=True)
|
||||
content_length_key = f"vod_content_length:{session_id}"
|
||||
r.set(content_length_key, total_size, ex=1800) # Store for 30 minutes
|
||||
logger.info(f"[VOD-HEAD] Stored total content length {total_size} for session {session_id}")
|
||||
|
|
@ -1011,3 +1015,59 @@ class VODStatsView(View):
|
|||
except Exception as e:
|
||||
logger.error(f"Error getting VOD stats: {e}")
|
||||
return JsonResponse({'error': str(e)}, status=500)
|
||||
|
||||
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
from apps.accounts.permissions import IsAdmin
|
||||
|
||||
|
||||
@csrf_exempt
|
||||
@api_view(["POST"])
|
||||
@permission_classes([IsAdmin])
|
||||
def stop_vod_client(request):
|
||||
"""Stop a specific VOD client connection using stop signal mechanism"""
|
||||
try:
|
||||
# Parse request body
|
||||
import json
|
||||
try:
|
||||
data = json.loads(request.body)
|
||||
except json.JSONDecodeError:
|
||||
return JsonResponse({'error': 'Invalid JSON'}, status=400)
|
||||
|
||||
client_id = data.get('client_id')
|
||||
if not client_id:
|
||||
return JsonResponse({'error': 'No client_id provided'}, status=400)
|
||||
|
||||
logger.info(f"Request to stop VOD client: {client_id}")
|
||||
|
||||
# Get Redis client
|
||||
connection_manager = MultiWorkerVODConnectionManager.get_instance()
|
||||
redis_client = connection_manager.redis_client
|
||||
|
||||
if not redis_client:
|
||||
return JsonResponse({'error': 'Redis not available'}, status=500)
|
||||
|
||||
# Check if connection exists
|
||||
connection_key = f"vod_persistent_connection:{client_id}"
|
||||
connection_data = redis_client.hgetall(connection_key)
|
||||
if not connection_data:
|
||||
logger.warning(f"VOD connection not found: {client_id}")
|
||||
return JsonResponse({'error': 'Connection not found'}, status=404)
|
||||
|
||||
# Set a stop signal key that the worker will check
|
||||
stop_key = get_vod_client_stop_key(client_id)
|
||||
redis_client.setex(stop_key, 60, "true") # 60 second TTL
|
||||
|
||||
logger.info(f"Set stop signal for VOD client: {client_id}")
|
||||
|
||||
return JsonResponse({
|
||||
'message': 'VOD client stop signal sent',
|
||||
'client_id': client_id,
|
||||
'stop_key': stop_key
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error stopping VOD client: {e}", exc_info=True)
|
||||
return JsonResponse({'error': str(e)}, status=500)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1232,7 +1232,13 @@ def refresh_series_episodes(account, series, external_series_id, episodes_data=N
|
|||
|
||||
|
||||
def batch_process_episodes(account, series, episodes_data, scan_start_time=None):
|
||||
"""Process episodes in batches for better performance"""
|
||||
"""Process episodes in batches for better performance.
|
||||
|
||||
Note: Multiple streams can represent the same episode (e.g., different languages
|
||||
or qualities). Each stream has a unique stream_id, but they share the same
|
||||
season/episode number. We create one Episode record per (series, season, episode)
|
||||
and multiple M3UEpisodeRelation records pointing to it.
|
||||
"""
|
||||
if not episodes_data:
|
||||
return
|
||||
|
||||
|
|
@ -1249,12 +1255,13 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
|||
logger.info(f"Batch processing {len(all_episodes_data)} episodes for series {series.name}")
|
||||
|
||||
# Extract episode identifiers
|
||||
episode_keys = []
|
||||
# Note: episode_keys may have duplicates when multiple streams represent same episode
|
||||
episode_keys = set() # Use set to track unique episode keys
|
||||
episode_ids = []
|
||||
for episode_data in all_episodes_data:
|
||||
season_num = episode_data['_season_number']
|
||||
episode_num = episode_data.get('episode_num', 0)
|
||||
episode_keys.append((series.id, season_num, episode_num))
|
||||
episode_keys.add((series.id, season_num, episode_num))
|
||||
episode_ids.append(str(episode_data.get('id')))
|
||||
|
||||
# Pre-fetch existing episodes
|
||||
|
|
@ -1277,6 +1284,10 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
|||
relations_to_create = []
|
||||
relations_to_update = []
|
||||
|
||||
# Track episodes we're creating in this batch to avoid duplicates
|
||||
# Key: (series_id, season_number, episode_number) -> Episode object
|
||||
episodes_pending_creation = {}
|
||||
|
||||
for episode_data in all_episodes_data:
|
||||
try:
|
||||
episode_id = str(episode_data.get('id'))
|
||||
|
|
@ -1306,10 +1317,15 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
|||
if backdrop:
|
||||
custom_props['backdrop_path'] = [backdrop]
|
||||
|
||||
# Find existing episode
|
||||
# Find existing episode - check DB first, then pending creations
|
||||
episode_key = (series.id, season_number, episode_number)
|
||||
episode = existing_episodes.get(episode_key)
|
||||
|
||||
# Check if we already have this episode pending creation (multiple streams for same episode)
|
||||
if not episode and episode_key in episodes_pending_creation:
|
||||
episode = episodes_pending_creation[episode_key]
|
||||
logger.debug(f"Reusing pending episode for S{season_number:02d}E{episode_number:02d} (stream_id: {episode_id})")
|
||||
|
||||
if episode:
|
||||
# Update existing episode
|
||||
updated = False
|
||||
|
|
@ -1338,7 +1354,9 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
|||
episode.custom_properties = custom_props if custom_props else None
|
||||
updated = True
|
||||
|
||||
if updated:
|
||||
# Only add to update list if episode has a PK (exists in DB) and isn't already in list
|
||||
# Episodes pending creation don't have PKs yet and will be created via bulk_create
|
||||
if updated and episode.pk and episode not in episodes_to_update:
|
||||
episodes_to_update.append(episode)
|
||||
else:
|
||||
# Create new episode
|
||||
|
|
@ -1356,6 +1374,8 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
|||
custom_properties=custom_props if custom_props else None
|
||||
)
|
||||
episodes_to_create.append(episode)
|
||||
# Track this episode so subsequent streams with same season/episode can reuse it
|
||||
episodes_pending_creation[episode_key] = episode
|
||||
|
||||
# Handle episode relation
|
||||
if episode_id in existing_relations:
|
||||
|
|
@ -1389,9 +1409,28 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
|||
|
||||
# Execute batch operations
|
||||
with transaction.atomic():
|
||||
# Create new episodes
|
||||
# Create new episodes - use ignore_conflicts in case of race conditions
|
||||
if episodes_to_create:
|
||||
Episode.objects.bulk_create(episodes_to_create)
|
||||
Episode.objects.bulk_create(episodes_to_create, ignore_conflicts=True)
|
||||
|
||||
# Re-fetch the created episodes to get their PKs
|
||||
# We need to do this because bulk_create with ignore_conflicts doesn't set PKs
|
||||
created_episode_keys = [
|
||||
(ep.series_id, ep.season_number, ep.episode_number)
|
||||
for ep in episodes_to_create
|
||||
]
|
||||
db_episodes = Episode.objects.filter(series=series)
|
||||
episode_pk_map = {
|
||||
(ep.series_id, ep.season_number, ep.episode_number): ep
|
||||
for ep in db_episodes
|
||||
}
|
||||
|
||||
# Update relations to point to the actual DB episodes with PKs
|
||||
for relation in relations_to_create:
|
||||
ep = relation.episode
|
||||
key = (ep.series_id, ep.season_number, ep.episode_number)
|
||||
if key in episode_pk_map:
|
||||
relation.episode = episode_pk_map[key]
|
||||
|
||||
# Update existing episodes
|
||||
if episodes_to_update:
|
||||
|
|
@ -1400,9 +1439,9 @@ def batch_process_episodes(account, series, episodes_data, scan_start_time=None)
|
|||
'tmdb_id', 'imdb_id', 'custom_properties'
|
||||
])
|
||||
|
||||
# Create new episode relations
|
||||
# Create new episode relations - use ignore_conflicts for stream_id duplicates
|
||||
if relations_to_create:
|
||||
M3UEpisodeRelation.objects.bulk_create(relations_to_create)
|
||||
M3UEpisodeRelation.objects.bulk_create(relations_to_create, ignore_conflicts=True)
|
||||
|
||||
# Update existing episode relations
|
||||
if relations_to_update:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue