This commit is contained in:
Dispatcharr 2025-10-09 13:45:34 -05:00
parent dc50079b17
commit f7e6d39ac7
19 changed files with 1586 additions and 62 deletions

View file

@ -350,7 +350,32 @@ class MediaItemViewSet(viewsets.ModelViewSet):
status=status.HTTP_404_NOT_FOUND,
)
start_ms = 0
start_ms_param = request.query_params.get("start_ms")
if start_ms_param not in (None, "", "0"):
try:
start_ms = max(0, int(start_ms_param))
except (TypeError, ValueError):
raise ValidationError({"start_ms": "Start offset must be an integer number of milliseconds."})
applied_start_ms = 0
should_embed_start = False
if start_ms > 0 and file.requires_transcode:
cached_ready = (
file.transcode_status == models.MediaFile.TRANSCODE_STATUS_READY
and file.transcoded_path
and os.path.exists(file.transcoded_path)
)
if not cached_ready:
applied_start_ms = start_ms
should_embed_start = True
duration_ms = file.effective_duration_ms or item.runtime_ms or 0
payload = {"file_id": file.id, "user_id": request.user.id}
if should_embed_start:
payload["start_ms"] = applied_start_ms
token = self._stream_signer.sign_object(payload)
stream_url = request.build_absolute_uri(
reverse("api:media:stream-file", args=[token])
@ -362,9 +387,12 @@ class MediaItemViewSet(viewsets.ModelViewSet):
"file_id": file.id,
"expires_in": ttl,
"type": "direct",
"duration_ms": file.duration_ms,
"duration_ms": duration_ms,
"bit_rate": file.bit_rate,
"container": file.container,
"requires_transcode": file.requires_transcode,
"transcode_status": file.transcode_status,
"start_offset_ms": applied_start_ms,
}
)

View file

@ -0,0 +1,43 @@
# Generated by Django 5.0.14 on 2025-10-09 16:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('media_library', '0002_library_use_as_vod_source'),
]
operations = [
migrations.AddField(
model_name='mediafile',
name='requires_transcode',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='mediafile',
name='transcode_error',
field=models.TextField(blank=True),
),
migrations.AddField(
model_name='mediafile',
name='transcode_status',
field=models.CharField(choices=[('not_required', 'Not Required'), ('pending', 'Pending'), ('processing', 'Processing'), ('ready', 'Ready'), ('failed', 'Failed')], default='not_required', max_length=20),
),
migrations.AddField(
model_name='mediafile',
name='transcoded_at',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='mediafile',
name='transcoded_mime_type',
field=models.CharField(blank=True, max_length=128),
),
migrations.AddField(
model_name='mediafile',
name='transcoded_path',
field=models.CharField(blank=True, max_length=4096),
),
]

View file

@ -408,6 +408,24 @@ class ArtworkAsset(models.Model):
class MediaFile(models.Model):
"""Physical file on disk that is associated with a media item."""
TRANSCODE_STATUS_NOT_REQUIRED = "not_required"
TRANSCODE_STATUS_PENDING = "pending"
TRANSCODE_STATUS_PROCESSING = "processing"
TRANSCODE_STATUS_READY = "ready"
TRANSCODE_STATUS_FAILED = "failed"
TRANSCODE_STATUS_CHOICES = [
(TRANSCODE_STATUS_NOT_REQUIRED, "Not Required"),
(TRANSCODE_STATUS_PENDING, "Pending"),
(TRANSCODE_STATUS_PROCESSING, "Processing"),
(TRANSCODE_STATUS_READY, "Ready"),
(TRANSCODE_STATUS_FAILED, "Failed"),
]
BROWSER_SAFE_CONTAINERS = {"mp4", "m4v"}
BROWSER_SAFE_VIDEO_CODECS = {"h264", "avc1"}
BROWSER_SAFE_AUDIO_CODECS = {"aac", "mp3", "mp4a", "libmp3lame"}
library = models.ForeignKey(
Library,
on_delete=models.CASCADE,
@ -446,6 +464,16 @@ class MediaFile(models.Model):
checksum = models.CharField(max_length=64, blank=True, db_index=True)
fingerprint = models.CharField(max_length=64, blank=True, db_index=True)
last_modified_at = models.DateTimeField(blank=True, null=True)
requires_transcode = models.BooleanField(default=False)
transcode_status = models.CharField(
max_length=20,
choices=TRANSCODE_STATUS_CHOICES,
default=TRANSCODE_STATUS_NOT_REQUIRED,
)
transcoded_path = models.CharField(max_length=4096, blank=True)
transcoded_mime_type = models.CharField(max_length=128, blank=True)
transcode_error = models.TextField(blank=True)
transcoded_at = models.DateTimeField(blank=True, null=True)
last_seen_at = models.DateTimeField(blank=True, null=True)
missing_since = models.DateTimeField(blank=True, null=True)
notes = models.TextField(blank=True)
@ -468,6 +496,75 @@ class MediaFile(models.Model):
def extension(self):
return os.path.splitext(self.file_name)[1].lower()
def _normalized_container(self) -> str:
container = (self.container or "").split(",")[0].strip().lower()
if container:
return container
ext = self.extension
return ext[1:] if ext.startswith(".") else ext
@staticmethod
def _normalized_codec(codec_value: str) -> str:
return (codec_value or "").split(".")[0].strip().lower()
def is_browser_playable(self) -> bool:
container = self._normalized_container()
if container not in self.BROWSER_SAFE_CONTAINERS:
return False
video_codec = self._normalized_codec(self.video_codec)
if video_codec and video_codec not in self.BROWSER_SAFE_VIDEO_CODECS:
return False
audio_codec = self._normalized_codec(self.audio_codec)
if audio_codec and audio_codec not in self.BROWSER_SAFE_AUDIO_CODECS:
return False
return True
@property
def effective_duration_ms(self) -> int | None:
"""
Return the best-known duration (ms). Falls back to probe metadata and
associated media item runtime when the direct field is missing.
"""
if self.duration_ms:
try:
return int(self.duration_ms)
except (TypeError, ValueError):
pass
extra = self.extra_streams or {}
format_info = extra.get("format") or {}
candidates: list[tuple[object, float]] = []
if "duration_ms" in format_info:
candidates.append((format_info.get("duration_ms"), 1.0))
if "duration" in format_info:
# ffprobe reports seconds in this field.
candidates.append((format_info.get("duration"), 1000.0))
for value, multiplier in candidates:
if value in (None, "", 0):
continue
try:
numeric = float(value)
except (TypeError, ValueError):
continue
if numeric <= 0:
continue
return int(numeric * multiplier)
if self.media_item_id:
runtime_ms = self.media_item.runtime_ms
if runtime_ms:
try:
return int(runtime_ms)
except (TypeError, ValueError):
return None
return None
def calculate_checksum(self, chunk_size: int = 1024 * 1024) -> str:
"""Calculate a SHA1 checksum for the file."""
sha1 = hashlib.sha1()

View file

@ -207,6 +207,12 @@ class MediaFileSerializer(serializers.ModelSerializer):
"checksum",
"fingerprint",
"last_modified_at",
"requires_transcode",
"transcode_status",
"transcoded_path",
"transcoded_mime_type",
"transcode_error",
"transcoded_at",
"last_seen_at",
"missing_since",
"created_at",
@ -215,6 +221,12 @@ class MediaFileSerializer(serializers.ModelSerializer):
read_only_fields = [
"id",
"last_modified_at",
"requires_transcode",
"transcode_status",
"transcoded_path",
"transcoded_mime_type",
"transcode_error",
"transcoded_at",
"last_seen_at",
"missing_since",
"created_at",

View file

@ -20,6 +20,7 @@ from apps.media_library.utils import (
probe_media_file,
resolve_media_item,
)
from apps.media_library.transcode import ensure_browser_ready_source
from apps.media_library.vod_sync import (
sync_library_to_vod,
sync_media_item_to_vod,
@ -467,6 +468,16 @@ def _probe_media_file(*, file_id: int) -> None:
file_record.checksum = checksum
file_record.save(update_fields=["checksum", "updated_at"])
if file_record.requires_transcode and (
file_record.transcode_status
in (
MediaFile.TRANSCODE_STATUS_PENDING,
MediaFile.TRANSCODE_STATUS_FAILED,
)
or not file_record.transcoded_path
):
transcode_media_file_task.delay(file_record.id)
@shared_task(name="media_library.probe_media")
def probe_media_task(file_id: int):
@ -510,6 +521,23 @@ def prune_stale_scans(max_age_hours: int = 72):
logger.info("Pruned %s stale library scan records", deleted)
@shared_task(name="media_library.transcode_media_file")
def transcode_media_file_task(file_id: int, force: bool = False):
try:
media_file = MediaFile.objects.get(pk=file_id)
except MediaFile.DoesNotExist:
logger.warning("Media file %s not found for transcoding", file_id)
return
try:
ensure_browser_ready_source(media_file, force=force)
except FileNotFoundError:
logger.warning("Source file missing for media file %s", file_id)
except Exception as exc: # noqa: BLE001
logger.exception("Transcode failed for media file %s: %s", file_id, exc)
raise
@shared_task(name="media_library.schedule_auto_scans")
def schedule_auto_scans():
now = timezone.now()

View file

@ -0,0 +1,471 @@
import contextlib
import hashlib
import logging
import mimetypes
import os
import shutil
import subprocess
import tempfile
import threading
from collections import deque
from pathlib import Path
from typing import Iterable, Tuple
from django.conf import settings
from django.utils import timezone
from .models import MediaFile
logger = logging.getLogger(__name__)
CHUNK_SIZE = 128 * 1024 # 128KB chunks for streaming
def _as_path(value) -> Path:
if isinstance(value, Path):
return value
return Path(str(value))
def _int_setting(name: str, default: int) -> int:
value = getattr(settings, name, default)
try:
return int(value)
except (TypeError, ValueError):
return default
TRANSCODE_ROOT = _as_path(
getattr(settings, "MEDIA_LIBRARY_TRANSCODE_DIR", settings.MEDIA_ROOT / "transcoded")
)
TRANSCODE_ROOT.mkdir(parents=True, exist_ok=True)
FFMPEG_PATH = getattr(settings, "MEDIA_LIBRARY_FFMPEG_PATH", "ffmpeg")
VIDEO_BITRATE = _int_setting("MEDIA_LIBRARY_TRANSCODE_VIDEO_BITRATE", 4500)
AUDIO_BITRATE = _int_setting("MEDIA_LIBRARY_TRANSCODE_AUDIO_BITRATE", 192)
PRESET = getattr(settings, "MEDIA_LIBRARY_TRANSCODE_PRESET", "veryfast")
TARGET_VIDEO_CODEC = getattr(settings, "MEDIA_LIBRARY_TRANSCODE_VIDEO_CODEC", "libx264")
TARGET_AUDIO_CODEC = getattr(settings, "MEDIA_LIBRARY_TRANSCODE_AUDIO_CODEC", "aac")
def _build_target_path(media_file: MediaFile) -> Path:
identifier = media_file.checksum or f"{media_file.absolute_path}:{media_file.size_bytes}"
digest = hashlib.sha256(identifier.encode("utf-8")).hexdigest()[:16]
filename = f"{media_file.id}_{digest}.mp4"
return TRANSCODE_ROOT / filename
def _normalize_mime(path: Path) -> str:
mime, _ = mimetypes.guess_type(path.name)
return mime or "video/mp4"
def _build_ffmpeg_command(
source_path: Path, *, output: str, fragmented: bool, start_seconds: float = 0.0
) -> list[str]:
command = [FFMPEG_PATH, "-y"]
if start_seconds and start_seconds > 0:
command.extend(["-ss", f"{start_seconds:.3f}"])
command.extend(
[
"-i",
str(source_path),
]
)
command.extend(
[
"-map",
"0:v:0",
"-map",
"0:a:0?",
"-c:v",
TARGET_VIDEO_CODEC,
"-preset",
PRESET,
"-profile:v",
"high",
"-level",
"4.0",
"-pix_fmt",
"yuv420p",
"-max_muxing_queue_size",
"1024",
"-c:a",
TARGET_AUDIO_CODEC,
"-b:a",
f"{AUDIO_BITRATE}k",
"-sn",
]
)
if VIDEO_BITRATE > 0:
command.extend(["-b:v", f"{VIDEO_BITRATE}k"])
if fragmented:
command.extend(["-movflags", "frag_keyframe+empty_moov+faststart", "-f", "mp4", output])
else:
command.extend(["-movflags", "+faststart", output])
return command
def ensure_browser_ready_source(
media_file: MediaFile, *, force: bool = False
) -> Tuple[str, str]:
"""
Ensure the provided media file is playable by major browsers (Chromium, Firefox, Safari).
Returns a tuple of (absolute_path, mime_type) pointing at either the original file (if compatible)
or a transcoded MP4 fallback.
"""
if media_file.is_browser_playable() and not force:
path = _as_path(media_file.absolute_path)
mime_type, _ = mimetypes.guess_type(path.name)
return str(path), mime_type or "video/mp4"
target_path = _ensure_transcode_to_file(media_file, force=force)
return str(target_path), "video/mp4"
def _ensure_transcode_to_file(media_file: MediaFile, *, force: bool = False) -> Path:
source_path = _as_path(media_file.absolute_path)
if not source_path.exists():
raise FileNotFoundError(f"Media source missing at {source_path}")
target_path = _build_target_path(media_file)
# Re-use existing artifact when it is up-to-date unless force=True.
if (
not force
and media_file.transcode_status == MediaFile.TRANSCODE_STATUS_READY
and media_file.transcoded_path
):
cached_path = Path(media_file.transcoded_path)
if cached_path.exists():
source_mtime = source_path.stat().st_mtime
if cached_path.stat().st_mtime >= source_mtime:
return cached_path
# Cached metadata is stale; clear below.
# Remove stale artifact if a new digest produced a different path.
if media_file.transcoded_path and media_file.transcoded_path != str(target_path):
old_path = Path(media_file.transcoded_path)
if old_path.exists():
try:
old_path.unlink()
except OSError:
logger.debug("Unable to remove old transcode %s", old_path)
target_path.parent.mkdir(parents=True, exist_ok=True)
media_file.transcode_status = MediaFile.TRANSCODE_STATUS_PROCESSING
media_file.transcode_error = ""
media_file.requires_transcode = True
media_file.transcoded_path = ""
media_file.transcoded_mime_type = ""
media_file.transcoded_at = None
media_file.save(
update_fields=[
"transcode_status",
"transcode_error",
"requires_transcode",
"transcoded_path",
"transcoded_mime_type",
"transcoded_at",
"updated_at",
]
)
fd, temp_path = tempfile.mkstemp(dir=str(target_path.parent), suffix=".mp4")
os.close(fd)
command = _build_ffmpeg_command(source_path, output=str(temp_path), fragmented=False)
logger.info(
"Transcoding media file %s (%s) to %s for browser playback",
media_file.id,
source_path,
target_path,
)
try:
result = subprocess.run(
command,
capture_output=True,
text=True,
check=False,
)
if result.returncode != 0:
stderr_tail = (result.stderr or "").strip()[-4000:]
raise RuntimeError(
f"ffmpeg exited with status {result.returncode}: {stderr_tail}"
)
shutil.move(temp_path, target_path)
except Exception as exc: # noqa: BLE001
if os.path.exists(temp_path):
try:
os.remove(temp_path)
except OSError:
pass
media_file.transcode_status = MediaFile.TRANSCODE_STATUS_FAILED
media_file.transcode_error = str(exc)
media_file.save(
update_fields=["transcode_status", "transcode_error", "updated_at"]
)
logger.error("Transcoding failed for %s: %s", media_file.id, exc)
raise
media_file.transcode_status = MediaFile.TRANSCODE_STATUS_READY
media_file.transcoded_path = str(target_path)
media_file.transcoded_mime_type = _normalize_mime(target_path)
media_file.transcoded_at = timezone.now()
media_file.transcode_error = ""
media_file.requires_transcode = True
media_file.save(
update_fields=[
"transcode_status",
"transcoded_path",
"transcoded_mime_type",
"transcoded_at",
"transcode_error",
"requires_transcode",
"updated_at",
]
)
logger.info("Finished transcoding media file %s", media_file.id)
return target_path
class LiveTranscodeSession:
"""Manage a live ffmpeg transcoding process and stream the output while writing to disk."""
mime_type = "video/mp4"
def __init__(self, media_file: MediaFile, *, start_seconds: float = 0.0):
self.media_file = media_file
self.source_path = _as_path(media_file.absolute_path)
if not self.source_path.exists():
raise FileNotFoundError(f"Media source missing at {self.source_path}")
self.target_path = _build_target_path(media_file)
self.target_path.parent.mkdir(parents=True, exist_ok=True)
self.start_seconds = max(0.0, float(start_seconds))
self.cache_enabled = self.start_seconds == 0.0
if self.cache_enabled:
fd, temp_path = tempfile.mkstemp(dir=str(self.target_path.parent), suffix=".mp4")
os.close(fd)
self.temp_path = Path(temp_path)
else:
self.temp_path = None
self.process: subprocess.Popen | None = None
self._stderr_thread: threading.Thread | None = None
self._stderr_lines: deque[str] = deque(maxlen=200)
self._aborted = False
self._finalized = False
# Prepare media file state
media_file.requires_transcode = True
media_file.transcode_status = MediaFile.TRANSCODE_STATUS_PROCESSING
media_file.transcode_error = ""
update_fields = [
"requires_transcode",
"transcode_status",
"transcode_error",
"updated_at",
]
if self.cache_enabled:
media_file.transcoded_path = ""
media_file.transcoded_mime_type = ""
media_file.transcoded_at = None
update_fields.extend(
[
"transcoded_path",
"transcoded_mime_type",
"transcoded_at",
]
)
media_file.save(update_fields=update_fields)
def start(self) -> "LiveTranscodeSession":
command = _build_ffmpeg_command(
self.source_path,
output="pipe:1",
fragmented=True,
start_seconds=self.start_seconds,
)
logger.info(
"Starting live transcode for media file %s (%s) [start=%.3fs]",
self.media_file.id,
self.source_path,
self.start_seconds,
)
self.process = subprocess.Popen(
command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
bufsize=CHUNK_SIZE,
)
if self.process.stdout is None or self.process.stderr is None:
raise RuntimeError("Failed to capture ffmpeg output streams")
self._stderr_thread = threading.Thread(
target=self._drain_stderr, name=f"ffmpeg-stderr-{self.media_file.id}", daemon=True
)
self._stderr_thread.start()
return self
def stream(self) -> Iterable[bytes]:
if not self.process or not self.process.stdout:
raise RuntimeError("Live transcode session is not started")
try:
cache_ctx = (
open(self.temp_path, "wb")
if self.cache_enabled and self.temp_path
else contextlib.nullcontext()
)
with cache_ctx as cache_fp:
while True:
chunk = self.process.stdout.read(CHUNK_SIZE)
if not chunk:
break
if cache_fp:
cache_fp.write(chunk)
cache_fp.flush()
yield chunk
except GeneratorExit:
self._aborted = True
logger.debug("Live transcode aborted by client for media file %s", self.media_file.id)
self._terminate_process()
raise
except Exception as exc: # noqa: BLE001
self._aborted = True
logger.warning(
"Live transcode streaming error for media file %s: %s",
self.media_file.id,
exc,
)
self._terminate_process()
raise
finally:
self._finalize()
def _terminate_process(self):
if self.process and self.process.poll() is None:
try:
self.process.terminate()
self.process.wait(timeout=5)
except subprocess.TimeoutExpired:
self.process.kill()
except Exception: # noqa: BLE001
self.process.kill()
def _drain_stderr(self):
assert self.process and self.process.stderr
for line in iter(self.process.stderr.readline, b""):
text = line.decode("utf-8", errors="ignore").strip()
if text:
self._stderr_lines.append(text)
logger.debug("ffmpeg[%s]: %s", self.media_file.id, text)
def _stderr_tail(self) -> str:
return "\n".join(self._stderr_lines)
def _finalize(self):
if self._finalized:
return
self._finalized = True
return_code = None
if self.process:
try:
return_code = self.process.poll()
if return_code is None:
return_code = self.process.wait()
except Exception: # noqa: BLE001
return_code = -1
if self._stderr_thread and self._stderr_thread.is_alive():
self._stderr_thread.join(timeout=2)
if (
return_code == 0
and not self._aborted
and self.cache_enabled
and self.temp_path
and os.path.exists(self.temp_path)
):
try:
shutil.move(self.temp_path, self.target_path)
except Exception as exc: # noqa: BLE001
logger.error(
"Failed to finalize live transcode output for media file %s: %s",
self.media_file.id,
exc,
)
return_code = -1
if return_code == 0 and not self._aborted:
if not self.cache_enabled:
self.media_file.transcode_status = MediaFile.TRANSCODE_STATUS_PENDING
self.media_file.save(update_fields=["transcode_status", "updated_at"])
return
logger.info("Finished live transcode for media file %s", self.media_file.id)
self.media_file.transcode_status = MediaFile.TRANSCODE_STATUS_READY
self.media_file.transcoded_path = str(self.target_path)
self.media_file.transcoded_mime_type = self.mime_type
self.media_file.transcoded_at = timezone.now()
self.media_file.transcode_error = ""
self.media_file.requires_transcode = True
self.media_file.save(
update_fields=[
"transcode_status",
"transcoded_path",
"transcoded_mime_type",
"transcoded_at",
"transcode_error",
"requires_transcode",
"updated_at",
]
)
else:
if self.temp_path and os.path.exists(self.temp_path):
try:
os.remove(self.temp_path)
except OSError:
pass
if self._aborted:
# Mark as pending so a future request can retry.
self.media_file.transcode_status = MediaFile.TRANSCODE_STATUS_PENDING
self.media_file.save(update_fields=["transcode_status", "updated_at"])
else:
msg = self._stderr_tail() or "Unknown ffmpeg failure"
self.media_file.transcode_status = MediaFile.TRANSCODE_STATUS_FAILED
self.media_file.transcode_error = msg[-4000:]
self.media_file.save(
update_fields=["transcode_status", "transcode_error", "updated_at"]
)
logger.error(
"Live transcode failed for media file %s: %s",
self.media_file.id,
self.media_file.transcode_error,
)
def start_streaming_transcode(
media_file: MediaFile, *, start_seconds: float = 0.0
) -> LiveTranscodeSession:
"""Start a live transcoding session for the given media file."""
session = LiveTranscodeSession(media_file, start_seconds=start_seconds)
return session.start()

View file

@ -560,12 +560,26 @@ def apply_probe_metadata(file_record: MediaFile, probe_data: dict) -> None:
format_info = probe_data.get("format", {})
streams = probe_data.get("streams", [])
duration_ms: int | None = None
duration = format_info.get("duration")
try:
if duration:
file_record.duration_ms = int(float(duration) * 1000)
except (TypeError, ValueError): # noqa: PERF203
logger.debug("Unable to parse duration '%s' for %s", duration, file_record)
if duration not in (None, "", 0):
try:
duration_ms = int(float(duration) * 1000)
file_record.duration_ms = duration_ms
except (TypeError, ValueError): # noqa: PERF203
logger.debug("Unable to parse duration '%s' for %s", duration, file_record)
if duration_ms is None:
raw_duration_ms = format_info.get("duration_ms")
if raw_duration_ms not in (None, "", 0):
try:
duration_ms = int(float(raw_duration_ms))
file_record.duration_ms = duration_ms
except (TypeError, ValueError): # noqa: PERF203
logger.debug(
"Unable to parse duration_ms '%s' for %s", raw_duration_ms, file_record
)
bit_rate = format_info.get("bit_rate")
try:
@ -606,7 +620,10 @@ def apply_probe_metadata(file_record: MediaFile, probe_data: dict) -> None:
"format": format_info,
"streams": streams,
}
file_record.save(update_fields=[
needs_transcode = not file_record.is_browser_playable()
file_record.requires_transcode = needs_transcode
update_fields = [
"duration_ms",
"bit_rate",
"container",
@ -619,8 +636,86 @@ def apply_probe_metadata(file_record: MediaFile, probe_data: dict) -> None:
"has_subtitles",
"subtitle_languages",
"extra_streams",
"requires_transcode",
"updated_at",
])
]
if not needs_transcode:
file_record.transcode_status = MediaFile.TRANSCODE_STATUS_NOT_REQUIRED
file_record.transcoded_path = ""
file_record.transcoded_mime_type = ""
file_record.transcode_error = ""
file_record.transcoded_at = None
update_fields.extend(
[
"transcode_status",
"transcoded_path",
"transcoded_mime_type",
"transcode_error",
"transcoded_at",
]
)
else:
if file_record.transcode_status in (
MediaFile.TRANSCODE_STATUS_NOT_REQUIRED,
"",
):
file_record.transcode_status = MediaFile.TRANSCODE_STATUS_PENDING
update_fields.append("transcode_status")
if (
file_record.transcode_status == MediaFile.TRANSCODE_STATUS_READY
and file_record.transcoded_path
and not os.path.exists(file_record.transcoded_path)
):
file_record.transcode_status = MediaFile.TRANSCODE_STATUS_PENDING
file_record.transcoded_path = ""
file_record.transcoded_mime_type = ""
file_record.transcoded_at = None
update_fields.extend(
[
"transcode_status",
"transcoded_path",
"transcoded_mime_type",
"transcoded_at",
]
)
file_record.save(update_fields=update_fields)
if file_record.media_item_id:
candidate_duration_ms = duration_ms or file_record.duration_ms
try:
media_item = file_record.media_item
except MediaFile.media_item.RelatedObjectDoesNotExist: # type: ignore[attr-defined]
media_item = None
if not candidate_duration_ms:
extra = file_record.extra_streams or {}
format_info = extra.get("format") or {}
fallback_candidates: list[tuple[object, float]] = []
if "duration_ms" in format_info:
fallback_candidates.append((format_info.get("duration_ms"), 1.0))
if "duration" in format_info:
fallback_candidates.append((format_info.get("duration"), 1000.0))
for value, multiplier in fallback_candidates:
if value in (None, "", 0):
continue
try:
numeric = float(value)
except (TypeError, ValueError): # noqa: PERF203
continue
if numeric <= 0:
continue
candidate_duration_ms = int(numeric * multiplier)
break
if candidate_duration_ms and media_item and (
not media_item.runtime_ms or media_item.runtime_ms < candidate_duration_ms
):
media_item.runtime_ms = int(candidate_duration_ms)
media_item.save(update_fields=["runtime_ms", "updated_at"])
def _safe_frame_rate(stream: dict) -> Optional[float]:

View file

@ -1,3 +1,4 @@
import logging
import mimetypes
import os
import re
@ -14,6 +15,9 @@ from django.http import (
from django.views.decorators.http import require_GET
from apps.media_library.models import MediaFile
from apps.media_library.transcode import start_streaming_transcode
logger = logging.getLogger(__name__)
STREAM_SIGNER = TimestampSigner(salt="media-library-stream")
TOKEN_TTL = getattr(settings, "MEDIA_LIBRARY_STREAM_TOKEN_TTL", 3600)
@ -36,6 +40,13 @@ def _iter_file(file_obj, offset=0, length=None, chunk_size=8192):
yield data
def _guess_mime(path: str | None) -> str:
if not path:
return "application/octet-stream"
mime, _ = mimetypes.guess_type(path)
return mime or "application/octet-stream"
@require_GET
def stream_media_file(request, token: str):
try:
@ -51,18 +62,103 @@ def stream_media_file(request, token: str):
if request.user.is_authenticated and request.user.id != user_id:
return HttpResponseForbidden("Stream token not issued for this user")
start_ms = payload.get("start_ms", 0)
try:
start_ms = int(start_ms)
except (TypeError, ValueError):
start_ms = 0
start_seconds = max(0.0, start_ms / 1000.0)
try:
media_file = MediaFile.objects.get(pk=file_id)
except MediaFile.DoesNotExist:
raise Http404("Media file not found")
path = media_file.absolute_path
if not path or not os.path.exists(path):
duration_seconds: float | None = None
duration_ms = media_file.effective_duration_ms
if duration_ms:
try:
duration_seconds = float(duration_ms) / 1000.0
except (TypeError, ValueError):
duration_seconds = None
original_path = media_file.absolute_path or ""
cached_path = media_file.transcoded_path or ""
playback_path = ""
mime_type = "application/octet-stream"
download_name = media_file.file_name or f"{media_file.id}.mp4"
if cached_path and os.path.exists(cached_path):
playback_path = cached_path
mime_type = media_file.transcoded_mime_type or "video/mp4"
base_name, _ = os.path.splitext(media_file.file_name or "")
download_name = f"{base_name or media_file.id}.mp4"
else:
if cached_path and media_file.transcode_status == MediaFile.TRANSCODE_STATUS_READY:
# Cached entry missing on disk reset so we regenerate.
media_file.transcode_status = MediaFile.TRANSCODE_STATUS_PENDING
media_file.transcoded_path = ""
media_file.transcoded_mime_type = ""
media_file.transcoded_at = None
media_file.save(
update_fields=[
"transcode_status",
"transcoded_path",
"transcoded_mime_type",
"transcoded_at",
"updated_at",
]
)
if original_path and os.path.exists(original_path) and media_file.is_browser_playable():
playback_path = original_path
mime_type = _guess_mime(original_path)
download_name = media_file.file_name or os.path.basename(original_path)
elif original_path and os.path.exists(original_path):
# Start live transcode and stream output directly.
try:
session = start_streaming_transcode(
media_file,
start_seconds=start_seconds,
)
except FileNotFoundError:
raise Http404("Media file not found")
except Exception as exc: # noqa: BLE001
logger.error(
"Unable to start live transcode for media file %s: %s",
media_file.id,
exc,
exc_info=True,
)
return HttpResponse(
"Unable to prepare video for playback. Please try again later.",
status=500,
)
base_name, _ = os.path.splitext(media_file.file_name or "")
download_name = f"{base_name or media_file.id}.mp4"
response = StreamingHttpResponse(
session.stream(),
content_type=session.mime_type,
)
response["Content-Disposition"] = f'inline; filename="{download_name}"'
response["Cache-Control"] = "no-store"
response["Accept-Ranges"] = "none"
if duration_seconds:
formatted_duration = f"{duration_seconds:.3f}"
response["X-Content-Duration"] = formatted_duration
response["Content-Duration"] = formatted_duration
return response
else:
raise Http404("Media file not found")
if not playback_path or not os.path.exists(playback_path):
raise Http404("Media file not found")
mime_type, _ = mimetypes.guess_type(path)
mime_type = mime_type or "application/octet-stream"
file_size = os.path.getsize(path)
file_size = os.path.getsize(playback_path)
mime_type = mime_type or _guess_mime(playback_path)
range_header = request.headers.get("Range")
if range_header:
@ -78,7 +174,7 @@ def stream_media_file(request, token: str):
end = min(end, file_size - 1)
length = end - start + 1
file_handle = open(path, "rb")
file_handle = open(playback_path, "rb")
def closing_iterator():
try:
@ -93,12 +189,22 @@ def stream_media_file(request, token: str):
response["Content-Range"] = f"bytes {start}-{end}/{file_size}"
response["Accept-Ranges"] = "bytes"
response["Content-Disposition"] = (
f"inline; filename=\"{os.path.basename(path)}\""
f'inline; filename="{download_name}"'
)
if duration_seconds:
formatted_duration = f"{duration_seconds:.3f}"
response["X-Content-Duration"] = formatted_duration
response["Content-Duration"] = formatted_duration
return response
response = FileResponse(open(path, "rb"), content_type=mime_type)
response = FileResponse(open(playback_path, "rb"), content_type=mime_type)
response["Accept-Ranges"] = "bytes"
response["Content-Length"] = str(file_size)
response["Content-Disposition"] = f"inline; filename=\"{os.path.basename(path)}\""
response["Content-Disposition"] = (
f'inline; filename="{download_name}"'
)
if duration_seconds:
formatted_duration = f"{duration_seconds:.3f}"
response["X-Content-Duration"] = formatted_duration
response["Content-Duration"] = formatted_duration
return response

View file

@ -247,12 +247,17 @@ def _update_movie_from_media_item(movie: Movie, media_item: MediaItem) -> Movie:
poster_source_url = (
metadata_updates.get("vod_poster_source_url") if metadata_updates else poster_source
)
backdrop_entries: list[str] = []
if media_item.backdrop_url:
backdrop_entries.append(media_item.backdrop_url)
custom_updates = {
"source": "library",
"library_id": media_item.library_id,
"library_item_id": media_item.id,
"poster_url": poster_media_url or poster_source_url or media_item.poster_url,
"backdrop_url": media_item.backdrop_url,
"backdrop_path": backdrop_entries,
"quality": quality_info,
}
merged_custom = _merge_custom_properties(movie.custom_properties, custom_updates)
@ -305,12 +310,17 @@ def _update_series_from_media_item(series: Series, media_item: MediaItem) -> Ser
metadata_updates.get("vod_poster_source_url") if metadata_updates else poster_source
)
backdrop_entries: list[str] = []
if media_item.backdrop_url:
backdrop_entries.append(media_item.backdrop_url)
custom_updates = {
"source": "library",
"library_id": media_item.library_id,
"library_item_id": media_item.id,
"poster_url": poster_media_url or poster_source_url or media_item.poster_url,
"backdrop_url": media_item.backdrop_url,
"backdrop_path": backdrop_entries,
}
merged_custom = _merge_custom_properties(series.custom_properties, custom_updates)
if merged_custom != series.custom_properties:

View file

@ -1,3 +1,4 @@
import json
from rest_framework import viewsets, status
from rest_framework.response import Response
from rest_framework.decorators import action
@ -177,7 +178,11 @@ class MovieViewSet(viewsets.ReadOnlyModelViewSet):
'imdb_id': movie.imdb_id,
'duration_secs': movie.duration_secs,
'movie_image': logo_cache_url or (movie.logo.url if movie.logo else custom_props.get('poster_url')),
'backdrop_path': [custom_props.get('backdrop_url')] if custom_props.get('backdrop_url') else [],
'backdrop_path': (
custom_props.get('backdrop_path')
if isinstance(custom_props.get('backdrop_path'), list)
else ([custom_props.get('backdrop_url')] if custom_props.get('backdrop_url') else [])
),
'video': (custom_props.get('quality') or {}).get('video'),
'audio': (custom_props.get('quality') or {}).get('audio'),
'bitrate': (custom_props.get('quality') or {}).get('bitrate'),
@ -884,10 +889,19 @@ class UnifiedContentViewSet(viewsets.ReadOnlyModelViewSet):
for row in cursor.fetchall():
item_dict = dict(zip(columns, row))
custom_props = item_dict.get('custom_properties') or {}
if isinstance(custom_props, str):
try:
custom_props = json.loads(custom_props)
except json.JSONDecodeError:
custom_props = {}
# Build logo object in the format expected by frontend
logo_data = None
poster_candidate = None
if item_dict['logo_id']:
cache_url = build_logo_cache_url(request, item_dict['logo_id'])
poster_candidate = cache_url or item_dict.get('logo_url')
logo_data = {
'id': item_dict['logo_id'],
'name': item_dict['logo_name'],
@ -897,6 +911,14 @@ class UnifiedContentViewSet(viewsets.ReadOnlyModelViewSet):
'is_used': True,
'channel_names': []
}
if not poster_candidate:
poster_candidate = custom_props.get('poster_url') or custom_props.get('cover')
backdrop_values = []
if isinstance(custom_props.get('backdrop_path'), list):
backdrop_values = custom_props['backdrop_path']
elif custom_props.get('backdrop_url'):
backdrop_values = [custom_props['backdrop_url']]
rating_value = item_dict['rating']
try:
@ -914,10 +936,14 @@ class UnifiedContentViewSet(viewsets.ReadOnlyModelViewSet):
'rating': rating_parsed,
'genre': item_dict['genre'] or '',
'duration': item_dict['duration'],
'duration_secs': item_dict['duration'],
'created_at': item_dict['created_at'].isoformat() if item_dict['created_at'] else None,
'updated_at': item_dict['updated_at'].isoformat() if item_dict['updated_at'] else None,
'custom_properties': item_dict['custom_properties'] or {},
'custom_properties': custom_props,
'logo': logo_data,
'movie_image': poster_candidate if item_dict['content_type'] == 'movie' else None,
'series_image': poster_candidate if item_dict['content_type'] == 'series' else None,
'backdrop_path': backdrop_values,
'content_type': item_dict['content_type']
}
results.append(formatted_item)

View file

@ -1,3 +1,4 @@
from django.urls import reverse
from rest_framework import serializers
from .models import (
Series, VODCategory, Movie, Episode,
@ -30,10 +31,21 @@ class VODCategorySerializer(serializers.ModelSerializer):
"m3u_accounts",
]
def _build_logo_cache_url(request, logo_id):
if not logo_id:
return None
cache_path = reverse("api:channels:logo-cache", args=[logo_id])
if request:
return request.build_absolute_uri(cache_path)
return cache_path
class SeriesSerializer(serializers.ModelSerializer):
logo = LogoSerializer(read_only=True)
episode_count = serializers.SerializerMethodField()
library_sources = serializers.SerializerMethodField()
series_image = serializers.SerializerMethodField()
backdrop_path = serializers.SerializerMethodField()
class Meta:
model = Series
@ -52,13 +64,33 @@ class SeriesSerializer(serializers.ModelSerializer):
"library_name": library.name,
"media_item_id": item.id,
}
)
)
return sources
def get_series_image(self, obj):
request = self.context.get("request")
if obj.logo_id:
cache_url = _build_logo_cache_url(request, obj.logo_id)
return cache_url or (obj.logo.url if obj.logo else None)
custom = obj.custom_properties or {}
return custom.get("poster_url") or custom.get("cover")
def get_backdrop_path(self, obj):
custom = obj.custom_properties or {}
if "backdrop_path" in custom and isinstance(custom["backdrop_path"], list):
return custom["backdrop_path"]
backdrop_url = custom.get("backdrop_url")
if backdrop_url:
return [backdrop_url]
return []
class MovieSerializer(serializers.ModelSerializer):
logo = LogoSerializer(read_only=True)
library_sources = serializers.SerializerMethodField()
movie_image = serializers.SerializerMethodField()
backdrop_path = serializers.SerializerMethodField()
class Meta:
model = Movie
@ -74,13 +106,33 @@ class MovieSerializer(serializers.ModelSerializer):
"library_name": library.name,
"media_item_id": item.id,
}
)
)
return sources
def get_movie_image(self, obj):
request = self.context.get("request")
if obj.logo_id:
cache_url = _build_logo_cache_url(request, obj.logo_id)
return cache_url or (obj.logo.url if obj.logo else None)
custom = obj.custom_properties or {}
return custom.get("poster_url") or custom.get("cover")
def get_backdrop_path(self, obj):
custom = obj.custom_properties or {}
if "backdrop_path" in custom and isinstance(custom["backdrop_path"], list):
return custom["backdrop_path"]
backdrop_url = custom.get("backdrop_url")
if backdrop_url:
return [backdrop_url]
return []
class EpisodeSerializer(serializers.ModelSerializer):
series = SeriesSerializer(read_only=True)
library_sources = serializers.SerializerMethodField()
movie_image = serializers.SerializerMethodField()
backdrop_path = serializers.SerializerMethodField()
class Meta:
model = Episode
@ -96,9 +148,35 @@ class EpisodeSerializer(serializers.ModelSerializer):
"library_name": library.name,
"media_item_id": item.id,
}
)
)
return sources
def get_movie_image(self, obj):
custom = obj.custom_properties or {}
if custom.get("poster_url"):
return custom["poster_url"]
if obj.series_id and obj.series and obj.series.logo_id:
request = self.context.get("request")
return _build_logo_cache_url(request, obj.series.logo_id) or (
obj.series.logo.url if obj.series.logo else None
)
return None
def get_backdrop_path(self, obj):
custom = obj.custom_properties or {}
if isinstance(custom.get("backdrop_path"), list):
return custom["backdrop_path"]
backdrop_url = custom.get("backdrop_url")
if backdrop_url:
return [backdrop_url]
if obj.series_id and obj.series:
series_custom = obj.series.custom_properties or {}
if isinstance(series_custom.get("backdrop_path"), list):
return series_custom["backdrop_path"]
if series_custom.get("backdrop_url"):
return [series_custom["backdrop_url"]]
return []
class M3USeriesRelationSerializer(serializers.ModelSerializer):
series = SeriesSerializer(read_only=True)

View file

@ -4,6 +4,16 @@ from datetime import timedelta
BASE_DIR = Path(__file__).resolve().parent.parent
def _env_int(key: str, default: int) -> int:
value = os.environ.get(key)
if value is None or value == "":
return default
try:
return int(value)
except (TypeError, ValueError):
return default
SECRET_KEY = "REPLACE_ME_WITH_A_REAL_SECRET"
REDIS_HOST = os.environ.get("REDIS_HOST", "localhost")
REDIS_DB = os.environ.get("REDIS_DB", "0")
@ -233,6 +243,25 @@ CELERY_BEAT_SCHEDULE = {
MEDIA_ROOT = BASE_DIR / "media"
MEDIA_URL = "/media/"
MEDIA_LIBRARY_TRANSCODE_DIR = Path(
os.environ.get("MEDIA_LIBRARY_TRANSCODE_DIR", MEDIA_ROOT / "transcoded")
)
MEDIA_LIBRARY_FFMPEG_PATH = os.environ.get("MEDIA_LIBRARY_FFMPEG_PATH", "ffmpeg")
MEDIA_LIBRARY_TRANSCODE_PRESET = os.environ.get(
"MEDIA_LIBRARY_TRANSCODE_PRESET", "veryfast"
)
MEDIA_LIBRARY_TRANSCODE_VIDEO_BITRATE = _env_int(
"MEDIA_LIBRARY_TRANSCODE_VIDEO_BITRATE", 4500
)
MEDIA_LIBRARY_TRANSCODE_AUDIO_BITRATE = _env_int(
"MEDIA_LIBRARY_TRANSCODE_AUDIO_BITRATE", 192
)
MEDIA_LIBRARY_TRANSCODE_VIDEO_CODEC = os.environ.get(
"MEDIA_LIBRARY_TRANSCODE_VIDEO_CODEC", "libx264"
)
MEDIA_LIBRARY_TRANSCODE_AUDIO_CODEC = os.environ.get(
"MEDIA_LIBRARY_TRANSCODE_AUDIO_CODEC", "aac"
)
SERVER_IP = "127.0.0.1"

View file

@ -2703,6 +2703,18 @@ export default class API {
if (options.fileId) {
params.append('file', options.fileId);
}
const startCandidate =
options.startMs ??
options.resumeMs ??
(typeof options.startSeconds === 'number'
? Math.round(options.startSeconds * 1000)
: null);
if (startCandidate != null) {
const normalized = Math.floor(Number(startCandidate));
if (!Number.isNaN(normalized) && normalized > 0) {
params.append('start_ms', String(normalized));
}
}
const query = params.toString();
const response = await request(
`${host}/api/media/items/${id}/stream/${query ? `?${query}` : ''}`

View file

@ -3,11 +3,38 @@ import React, { useEffect, useRef, useState } from 'react';
import Draggable from 'react-draggable';
import useVideoStore from '../store/useVideoStore';
import mpegts from 'mpegts.js';
import { CloseButton, Flex, Loader, Text, Box, Button, Progress, Group } from '@mantine/core';
import { Play } from 'lucide-react';
import {
CloseButton,
Flex,
Loader,
Text,
Box,
Button,
Progress,
Group,
Slider,
ActionIcon,
} from '@mantine/core';
import { Play, Pause } from 'lucide-react';
import API from '../api';
import useAuthStore from '../store/auth';
const CONTROL_HIDE_DELAY = 2500;
const formatTime = (value) => {
if (!Number.isFinite(value)) {
return '0:00';
}
const totalSeconds = Math.max(0, Math.floor(value));
const hours = Math.floor(totalSeconds / 3600);
const minutes = Math.floor((totalSeconds % 3600) / 60);
const seconds = totalSeconds % 60;
if (hours > 0) {
return `${hours}:${String(minutes).padStart(2, '0')}:${String(seconds).padStart(2, '0')}`;
}
return `${minutes}:${String(seconds).padStart(2, '0')}`;
};
export default function FloatingVideo() {
const isVisible = useVideoStore((s) => s.isVisible);
const streamUrl = useVideoStore((s) => s.streamUrl);
@ -28,15 +55,222 @@ export default function FloatingVideo() {
const countdownIntervalRef = useRef(null);
const AUTOPLAY_SECONDS = 10;
const authUser = useAuthStore((s) => s.user);
const [isPlaying, setIsPlaying] = useState(false);
const [currentTimeSeconds, setCurrentTimeSeconds] = useState(
(metadata?.startOffsetMs ?? 0) / 1000
);
const [durationSeconds, setDurationSeconds] = useState(
metadata?.durationMs ? metadata.durationMs / 1000 : 0
);
const [isScrubbing, setIsScrubbing] = useState(false);
const [scrubValueSeconds, setScrubValueSeconds] = useState(
(metadata?.startOffsetMs ?? 0) / 1000
);
const [showControls, setShowControls] = useState(true);
const controlsTimeoutRef = useRef(null);
const serverSeekInProgressRef = useRef(false);
const lastServerSeekAbsoluteRef = useRef((metadata?.startOffsetMs ?? 0) / 1000);
const wasPlayingBeforeScrubRef = useRef(false);
const clearControlsTimeout = () => {
if (controlsTimeoutRef.current) {
clearTimeout(controlsTimeoutRef.current);
controlsTimeoutRef.current = null;
}
};
const handlePointerActivity = () => {
setShowControls(true);
clearControlsTimeout();
controlsTimeoutRef.current = setTimeout(() => {
if (!isScrubbing && !serverSeekInProgressRef.current) {
setShowControls(false);
}
}, CONTROL_HIDE_DELAY);
};
const togglePlayback = () => {
const video = videoRef.current;
if (!video || serverSeekInProgressRef.current || isLoading) return;
handlePointerActivity();
if (video.paused) {
video.play().catch(() => {});
} else {
video.pause();
}
};
const performSeek = (targetSeconds) => {
if (!Number.isFinite(targetSeconds)) {
return;
}
const sanitized = Math.max(0, targetSeconds);
const startOffsetSeconds = (metadata?.startOffsetMs ?? 0) / 1000;
const video = videoRef.current;
const isLocalSeek =
!metadata?.requiresTranscode || metadata?.transcodeStatus === 'ready' || !metadata?.mediaItemId;
if (isLocalSeek) {
if (video) {
const shouldResume = wasPlayingBeforeScrubRef.current || !video.paused;
try {
video.currentTime = Math.max(0, sanitized - startOffsetSeconds);
} catch (err) {
console.debug('Failed to adjust local playback position', err);
}
if (shouldResume) {
video.play().catch(() => {});
}
setIsPlaying(!video.paused);
}
setCurrentTimeSeconds(sanitized);
setScrubValueSeconds(sanitized);
lastServerSeekAbsoluteRef.current = sanitized;
serverSeekInProgressRef.current = false;
setIsLoading(false);
handlePointerActivity();
wasPlayingBeforeScrubRef.current = false;
return;
}
if (!metadata?.fileId) {
console.debug('Seek requested without file identifier; aborting');
serverSeekInProgressRef.current = false;
setIsLoading(false);
return;
}
serverSeekInProgressRef.current = true;
lastServerSeekAbsoluteRef.current = sanitized;
setIsLoading(true);
setLoadError(null);
setShowControls(true);
clearControlsTimeout();
if (video) {
try {
video.pause();
} catch (pauseError) {
console.debug('Failed to pause prior to server seek', pauseError);
}
}
setIsPlaying(false);
const startMs = Math.round(sanitized * 1000);
API.streamMediaItem(metadata.mediaItemId, {
fileId: metadata.fileId,
startMs,
})
.then((streamInfo) => {
const playbackUrl = streamInfo?.url || streamInfo?.stream_url;
if (!playbackUrl) {
throw new Error('Streaming endpoint did not return a playable URL.');
}
const startOffsetMs = streamInfo?.start_offset_ms ?? 0;
const resumeHandledByServer = startOffsetMs > 0;
const requiresTranscode = Boolean(streamInfo?.requires_transcode);
const transcodeStatus = streamInfo?.transcode_status ?? metadata?.transcodeStatus ?? null;
const derivedDurationMs =
streamInfo?.duration_ms ??
metadata?.durationMs ??
(videoRef.current?.duration
? Math.round(
((metadata?.startOffsetMs ?? 0) / 1000 + videoRef.current.duration) * 1000
)
: undefined);
const nextMetadata = {
...metadata,
resumePositionMs: startMs,
resumeHandledByServer,
startOffsetMs:
startOffsetMs || (resumeHandledByServer ? startMs : metadata?.startOffsetMs ?? 0),
requiresTranscode,
transcodeStatus,
durationMs: derivedDurationMs,
};
wasPlayingBeforeScrubRef.current = false;
useVideoStore.getState().showVideo(playbackUrl, 'library', nextMetadata);
})
.catch((err) => {
console.error('Failed to perform server-side seek', err);
setLoadError('Unable to seek in this stream.');
setIsLoading(false);
serverSeekInProgressRef.current = false;
if (video) {
try {
video.play();
} catch (playErr) {
console.debug('Failed to resume after seek failure', playErr);
}
}
wasPlayingBeforeScrubRef.current = false;
});
};
const handleScrubChange = (value) => {
const video = videoRef.current;
if (!isScrubbing) {
setIsScrubbing(true);
wasPlayingBeforeScrubRef.current = video ? !video.paused : false;
}
if (video) {
try {
video.pause();
} catch (pauseError) {
console.debug('Failed to pause video while scrubbing', pauseError);
}
setIsPlaying(false);
}
setScrubValueSeconds(value);
handlePointerActivity();
};
const handleScrubEnd = (value) => {
setIsScrubbing(false);
performSeek(value);
};
const sendLibraryProgress = (positionSeconds, durationSeconds, completed = false) => {
if (contentType !== 'library') return;
if (!metadata?.mediaItemId || !authUser?.id) return;
const startOffsetMs = metadata?.startOffsetMs ?? 0;
const relativePosition = Number.isFinite(positionSeconds) ? positionSeconds : 0;
const absolutePositionSeconds = Math.max(0, startOffsetMs / 1000 + relativePosition);
let totalDurationMs;
if (metadata?.durationMs) {
totalDurationMs = metadata.durationMs;
} else {
const relativeDuration = Number.isFinite(durationSeconds) ? durationSeconds : 0;
const fallbackDurationSeconds =
relativeDuration > 0
? relativeDuration
: videoRef.current?.duration
? videoRef.current.duration
: 0;
totalDurationMs = Math.round(
Math.max(absolutePositionSeconds, startOffsetMs / 1000 + fallbackDurationSeconds) * 1000
);
}
let positionMs = Math.round(absolutePositionSeconds * 1000);
if (completed) {
positionMs = totalDurationMs;
} else {
positionMs = Math.min(positionMs, totalDurationMs);
}
const payload = {
user: authUser.id,
media_item: metadata.mediaItemId,
position_ms: Math.max(0, Math.floor(positionSeconds * 1000)),
duration_ms: Math.max(0, Math.floor(durationSeconds * 1000)),
position_ms: Math.max(0, positionMs),
duration_ms: Math.max(0, totalDurationMs),
completed,
};
API.setMediaWatchProgress(payload).catch((error) => {
@ -44,6 +278,26 @@ export default function FloatingVideo() {
});
};
useEffect(() => {
const start = (metadata?.startOffsetMs ?? 0) / 1000;
setCurrentTimeSeconds(start);
setScrubValueSeconds(start);
if (metadata?.durationMs) {
setDurationSeconds(metadata.durationMs / 1000);
}
wasPlayingBeforeScrubRef.current = false;
}, [metadata?.mediaItemId, metadata?.startOffsetMs, metadata?.durationMs]);
useEffect(() => {
if (isScrubbing) {
setShowControls(true);
clearControlsTimeout();
}
}, [isScrubbing]);
useEffect(() => () => clearControlsTimeout(), []);
const clearAutoPlayTimers = () => {
if (autoPlayTimerRef.current) {
clearTimeout(autoPlayTimerRef.current);
@ -63,6 +317,9 @@ export default function FloatingVideo() {
if (playerRef.current) {
setIsLoading(false);
setLoadError(null);
setIsPlaying(false);
clearControlsTimeout();
setShowControls(true);
if (videoRef.current) {
videoRef.current.removeAttribute('src');
@ -131,8 +388,21 @@ export default function FloatingVideo() {
setLoadError('Next episode is missing media files.');
return;
}
const summary = episodeDetail.watch_summary;
const resumePositionMs =
summary?.status === 'in_progress'
? summary.position_ms || 0
: episodeDetail.watch_progress?.position_ms || 0;
const initialDurationMs =
summary?.duration_ms ??
episodeDetail.watch_progress?.duration_ms ??
episodeDetail.runtime_ms ??
episodeDetail.files?.[0]?.duration_ms ??
null;
const streamInfo = await API.streamMediaItem(episodeDetail.id, {
fileId,
startMs: resumePositionMs,
});
const playbackUrl = streamInfo?.url || streamInfo?.stream_url;
if (!playbackUrl) {
@ -140,13 +410,12 @@ export default function FloatingVideo() {
return;
}
const summary = episodeDetail.watch_summary;
const resumePositionMs =
summary?.status === 'in_progress'
? summary.position_ms || 0
: episodeDetail.watch_progress?.position_ms || 0;
const startOffsetMs = streamInfo?.start_offset_ms ?? 0;
const resumeHandledByServer = startOffsetMs > 0;
const requiresTranscode = Boolean(streamInfo?.requires_transcode);
const transcodeStatus = streamInfo?.transcode_status ?? null;
const durationMs =
summary?.duration_ms || episodeDetail.watch_progress?.duration_ms || episodeDetail.runtime_ms;
streamInfo?.duration_ms ?? initialDurationMs;
const playbackSequence = {
episodeIds,
@ -166,6 +435,10 @@ export default function FloatingVideo() {
: metadata?.logo || (metadata?.showPoster ? { url: metadata.showPoster } : undefined),
progressId: episodeDetail.watch_progress?.id,
resumePositionMs,
resumeHandledByServer,
startOffsetMs,
requiresTranscode,
transcodeStatus,
durationMs,
fileId,
playbackSequence,
@ -230,8 +503,28 @@ export default function FloatingVideo() {
video.crossOrigin = 'anonymous';
// Set up event listeners
const handleLoadStart = () => setIsLoading(true);
const handleLoadedData = () => setIsLoading(false);
const handleLoadStart = () => {
setIsLoading(true);
handlePointerActivity();
};
const handleLoadedData = () => {
setIsLoading(false);
handlePointerActivity();
};
const handleLoadedMetadata = () => {
const startOffsetSeconds = (metadata?.startOffsetMs ?? 0) / 1000;
const videoDuration = Number.isFinite(video.duration) ? video.duration : 0;
const resolvedDuration = metadata?.durationMs
? Math.max(videoDuration + startOffsetSeconds, metadata.durationMs / 1000)
: videoDuration + startOffsetSeconds;
if (resolvedDuration > 0) {
setDurationSeconds(resolvedDuration);
}
const absolutePosition = startOffsetSeconds + video.currentTime;
setCurrentTimeSeconds(absolutePosition);
setScrubValueSeconds(absolutePosition);
};
const handleCanPlay = () => {
setIsLoading(false);
// Auto-play for VOD content
@ -242,6 +535,7 @@ export default function FloatingVideo() {
if (
contentType === 'library' &&
metadata?.resumePositionMs &&
!metadata?.resumeHandledByServer &&
!resumeApplied
) {
try {
@ -251,9 +545,25 @@ export default function FloatingVideo() {
console.debug('Failed to set resume position', error);
}
}
const startOffsetSeconds = (metadata?.startOffsetMs ?? 0) / 1000;
const absolutePosition = startOffsetSeconds + video.currentTime;
setCurrentTimeSeconds(absolutePosition);
setScrubValueSeconds(absolutePosition);
setIsPlaying(!video.paused);
handlePointerActivity();
// Start overlay timer when video is ready
startOverlayTimer();
};
const handlePlay = () => {
setIsPlaying(true);
handlePointerActivity();
};
const handlePause = () => {
setIsPlaying(false);
handlePointerActivity();
};
const handleError = (e) => {
setIsLoading(false);
const error = e.target.error;
@ -295,7 +605,25 @@ export default function FloatingVideo() {
const handleTimeUpdate = () => {
if (contentType !== 'library') return;
if (!video.duration || Number.isNaN(video.duration)) return;
const startOffsetSeconds = (metadata?.startOffsetMs ?? 0) / 1000;
const relativePosition = Number.isFinite(video.currentTime) ? video.currentTime : 0;
const absolutePosition = startOffsetSeconds + relativePosition;
setCurrentTimeSeconds(absolutePosition);
if (!isScrubbing) {
setScrubValueSeconds(absolutePosition);
}
if (Number.isFinite(video.duration) && video.duration > 0) {
const potentialDuration = Math.max(
durationSeconds,
startOffsetSeconds + video.duration,
metadata?.durationMs ? metadata.durationMs / 1000 : 0
);
if (potentialDuration > durationSeconds) {
setDurationSeconds(potentialDuration);
}
}
const now = Date.now();
if (now - lastProgressSentRef.current < 5000) {
return;
@ -307,6 +635,10 @@ export default function FloatingVideo() {
const handleEnded = () => {
if (contentType !== 'library') return;
if (!video.duration || Number.isNaN(video.duration)) return;
const startOffsetSeconds = (metadata?.startOffsetMs ?? 0) / 1000;
const totalSeconds = Math.max(durationSeconds, startOffsetSeconds + video.duration);
setCurrentTimeSeconds(totalSeconds);
setScrubValueSeconds(totalSeconds);
sendLibraryProgress(video.duration, video.duration, true);
const sequence = metadata?.playbackSequence;
if (sequence?.episodeIds?.length) {
@ -319,8 +651,11 @@ export default function FloatingVideo() {
// Add event listeners
video.addEventListener('loadstart', handleLoadStart);
video.addEventListener('loadedmetadata', handleLoadedMetadata);
video.addEventListener('loadeddata', handleLoadedData);
video.addEventListener('canplay', handleCanPlay);
video.addEventListener('play', handlePlay);
video.addEventListener('pause', handlePause);
video.addEventListener('error', handleError);
video.addEventListener('progress', handleProgress);
video.addEventListener('timeupdate', handleTimeUpdate);
@ -334,8 +669,11 @@ export default function FloatingVideo() {
playerRef.current = {
destroy: () => {
video.removeEventListener('loadstart', handleLoadStart);
video.removeEventListener('loadedmetadata', handleLoadedMetadata);
video.removeEventListener('loadeddata', handleLoadedData);
video.removeEventListener('canplay', handleCanPlay);
video.removeEventListener('play', handlePlay);
video.removeEventListener('pause', handlePause);
video.removeEventListener('error', handleError);
video.removeEventListener('progress', handleProgress);
video.removeEventListener('timeupdate', handleTimeUpdate);
@ -484,6 +822,23 @@ export default function FloatingVideo() {
};
}, [isVisible, streamUrl, contentType]);
useEffect(() => {
if (isVisible) {
setShowControls(true);
handlePointerActivity();
} else {
clearControlsTimeout();
}
}, [isVisible]);
useEffect(() => {
serverSeekInProgressRef.current = false;
}, [streamUrl]);
useEffect(() => {
lastServerSeekAbsoluteRef.current = (metadata?.startOffsetMs ?? 0) / 1000;
}, [metadata?.startOffsetMs]);
useEffect(() => {
clearAutoPlayTimers();
}, [metadata?.mediaItemId]);
@ -505,6 +860,26 @@ export default function FloatingVideo() {
return null;
}
const baseDurationSeconds =
Number.isFinite(durationSeconds) && durationSeconds > 0
? durationSeconds
: metadata?.durationMs
? metadata.durationMs / 1000
: 0;
const sliderMaxValue = Number.isFinite(baseDurationSeconds) && baseDurationSeconds > 0
? baseDurationSeconds
: Math.max(scrubValueSeconds, currentTimeSeconds + 1);
const sliderValue = Math.min(
Math.max(isScrubbing ? scrubValueSeconds : currentTimeSeconds, 0),
Math.max(sliderMaxValue, 1)
);
const formattedDurationLabel =
Number.isFinite(baseDurationSeconds) && baseDurationSeconds > 0
? formatTime(sliderMaxValue)
: '--:--';
const formattedCurrentTime = sliderMaxValue > 0 ? formatTime(sliderValue) : '--:--';
const showPlaybackControls = contentType !== 'live';
return (
<Draggable nodeRef={videoContainerRef}>
<div
@ -545,6 +920,8 @@ export default function FloatingVideo() {
{/* Video container with relative positioning for the overlay */}
<Box
style={{ position: 'relative' }}
onMouseMove={handlePointerActivity}
onTouchStart={handlePointerActivity}
onMouseEnter={() => {
if (contentType !== 'live' && !isLoading) {
setShowOverlay(true);
@ -562,17 +939,15 @@ export default function FloatingVideo() {
{/* Enhanced video element with better controls for VOD */}
<video
ref={videoRef}
controls
style={{
width: '100%',
height: '180px',
backgroundColor: '#000',
// Better controls styling for VOD
...(contentType !== 'live' && {
controlsList: 'nodownload',
playsInline: true,
}),
}}
playsInline
controls={contentType === 'live'}
controlsList={contentType === 'live' ? undefined : 'nodownload'}
onClick={togglePlayback}
// Add poster for VOD if available
{...(contentType !== 'live' && {
poster: metadata?.logo?.url, // Use poster if available
@ -638,11 +1013,65 @@ export default function FloatingVideo() {
</Box>
)}
{showPlaybackControls && (
<Box
style={{
position: 'absolute',
bottom: 0,
left: 0,
right: 0,
padding: '10px 14px',
background: 'linear-gradient(transparent, rgba(0,0,0,0.85))',
transition: 'opacity 0.2s ease-in-out',
opacity: showControls ? 1 : 0,
pointerEvents:
showControls && !serverSeekInProgressRef.current && !isLoading
? 'auto'
: 'none',
zIndex: 4,
}}
>
<Group gap="sm" align="center" justify="space-between">
<ActionIcon
variant="filled"
color="gray"
radius="xl"
size="lg"
onClick={togglePlayback}
aria-label={isPlaying ? 'Pause' : 'Play'}
disabled={serverSeekInProgressRef.current || isLoading}
>
{isPlaying ? <Pause size={16} /> : <Play size={16} />}
</ActionIcon>
<Text size="xs" c="gray.1" style={{ width: 44 }}>{formattedCurrentTime}</Text>
<Slider
style={{ flexGrow: 1 }}
min={0}
max={Math.max(sliderMaxValue, 1)}
step={0.1}
value={Math.min(Math.max(sliderValue, 0), Math.max(sliderMaxValue, 1))}
onChange={handleScrubChange}
onChangeEnd={handleScrubEnd}
size="sm"
label={null}
aria-label="Seek"
disabled={sliderMaxValue <= 0 || serverSeekInProgressRef.current || isLoading}
styles={{
track: { backgroundColor: 'rgba(255,255,255,0.2)' },
bar: { backgroundColor: '#1abc9c' },
thumb: { borderColor: '#1abc9c', backgroundColor: '#1abc9c' },
}}
/>
<Text size="xs" c="gray.1" style={{ width: 44, textAlign: 'right' }}>{formattedDurationLabel}</Text>
</Group>
</Box>
)}
{nextAutoplay && autoplayCountdown !== null && (
<Box
style={{
position: 'absolute',
bottom: 12,
bottom: showPlaybackControls ? 70 : 12,
left: 12,
backgroundColor: 'rgba(15, 23, 42, 0.85)',
padding: '10px 12px',

View file

@ -294,6 +294,18 @@ const SeriesModal = ({ series, opened, onClose }) => {
...episode,
provider_type: 'library',
library_media_item_id: mediaItemId,
mediaItemId,
fileId: streamInfo?.file_id,
resumePositionMs: 0,
resumeHandledByServer: Boolean(streamInfo?.start_offset_ms),
startOffsetMs: streamInfo?.start_offset_ms ?? 0,
requiresTranscode: Boolean(streamInfo?.requires_transcode),
transcodeStatus: streamInfo?.transcode_status ?? null,
durationMs:
streamInfo?.duration_ms ??
episode?.runtime_ms ??
episode?.files?.[0]?.duration_ms ??
null,
};
showVideo(streamInfo.url, 'library', playbackMeta);
} catch (error) {

View file

@ -279,7 +279,7 @@ const Sidebar = ({ collapsed, toggleDrawer, drawerWidth, miniDrawerWidth }) => {
return (
<Stack key={item.label} spacing={4}>
<UnstyledButton
className={`navlink ${expanded ? 'navlink-active' : ''} ${collapsed ? 'navlink-collapsed' : ''}`}
className={`navlink ${childActive ? 'navlink-active' : ''} ${collapsed ? 'navlink-collapsed' : ''}`}
onClick={() => setLibraryExpanded((prev) => !prev)}
>
{item.icon}

View file

@ -278,6 +278,18 @@ const VODModal = ({ vod, opened, onClose }) => {
...vodToPlay,
provider_type: 'library',
library_media_item_id: mediaItemId,
mediaItemId,
fileId: streamInfo?.file_id,
resumePositionMs: 0,
resumeHandledByServer: Boolean(streamInfo?.start_offset_ms),
startOffsetMs: streamInfo?.start_offset_ms ?? 0,
requiresTranscode: Boolean(streamInfo?.requires_transcode),
transcodeStatus: streamInfo?.transcode_status ?? null,
durationMs:
streamInfo?.duration_ms ??
vodToPlay?.runtime_ms ??
vodToPlay?.files?.[0]?.duration_ms ??
null,
};
showVideo(streamInfo.url, 'library', playbackMeta);
} catch (error) {

View file

@ -246,7 +246,15 @@ const MediaDetailModal = ({ opened, onClose }) => {
setResumeMode(mode);
setStartingPlayback(true);
try {
const streamInfo = await API.streamMediaItem(activeItem.id, { fileId });
const resumePositionMs =
mode === 'resume'
? resumePrompt?.position_ms || activeProgress?.position_ms || 0
: 0;
const streamInfo = await API.streamMediaItem(activeItem.id, {
fileId,
startMs: resumePositionMs,
});
const playbackUrl = streamInfo?.url || streamInfo?.stream_url;
if (!playbackUrl) {
notifications.show({
@ -257,10 +265,11 @@ const MediaDetailModal = ({ opened, onClose }) => {
return;
}
const resumePositionMs =
mode === 'resume'
? resumePrompt?.position_ms || activeProgress?.position_ms || 0
: 0;
const startOffsetMs = streamInfo?.start_offset_ms ?? 0;
const resumeHandledByServer = startOffsetMs > 0;
const primaryFile = activeItem.files?.[0];
const requiresTranscode = Boolean(streamInfo?.requires_transcode);
const transcodeStatus = streamInfo?.transcode_status ?? null;
showVideo(playbackUrl, 'library', {
mediaItemId: activeItem.id,
@ -270,8 +279,17 @@ const MediaDetailModal = ({ opened, onClose }) => {
logo: activeItem.poster_url ? { url: activeItem.poster_url } : undefined,
progressId: activeProgress?.id,
resumePositionMs,
resumeHandledByServer,
startOffsetMs,
requiresTranscode,
transcodeStatus,
durationMs:
resumePrompt?.duration_ms || activeProgress?.duration_ms || activeItem.runtime_ms,
streamInfo?.duration_ms ??
resumePrompt?.duration_ms ??
activeProgress?.duration_ms ??
activeItem.runtime_ms ??
primaryFile?.duration_ms ??
null,
fileId,
});
@ -344,17 +362,6 @@ const MediaDetailModal = ({ opened, onClose }) => {
return;
}
const streamInfo = await API.streamMediaItem(episodeDetail.id, { fileId: episodeFileId });
const playbackUrl = streamInfo?.url || streamInfo?.stream_url;
if (!playbackUrl) {
notifications.show({
title: 'Playback error',
message: 'Streaming endpoint did not return a playable URL.',
color: 'red',
});
return;
}
const resolvedSequence = Array.isArray(sequence) ? sequence : [];
const episodeIds = resolvedSequence.length
? resolvedSequence.map((ep) => ep.id)
@ -372,8 +379,29 @@ const MediaDetailModal = ({ opened, onClose }) => {
? episodeSummary.position_ms || 0
: episodeProgress?.position_ms || 0;
const streamInfo = await API.streamMediaItem(episodeDetail.id, {
fileId: episodeFileId,
startMs: resumePositionMs,
});
const playbackUrl = streamInfo?.url || streamInfo?.stream_url;
if (!playbackUrl) {
notifications.show({
title: 'Playback error',
message: 'Streaming endpoint did not return a playable URL.',
color: 'red',
});
return;
}
const resumeHandledByServer = Boolean(streamInfo?.start_offset_ms);
const durationMs =
episodeSummary?.duration_ms || episodeProgress?.duration_ms || episodeDetail.runtime_ms;
streamInfo?.duration_ms ??
episodeSummary?.duration_ms ??
episodeProgress?.duration_ms ??
episodeDetail.runtime_ms ??
episodeDetail.files?.[0]?.duration_ms ??
null;
showVideo(playbackUrl, 'library', {
mediaItemId: episodeDetail.id,
@ -391,6 +419,7 @@ const MediaDetailModal = ({ opened, onClose }) => {
showPoster: activeItem?.poster_url,
progressId: episodeProgress?.id,
resumePositionMs,
resumeHandledByServer,
durationMs,
fileId: episodeFileId,
playbackSequence,

View file

@ -15,7 +15,14 @@ const useVideoStore = create((set) => ({
isVisible: true,
streamUrl: url,
contentType: type,
metadata: metadata,
metadata: metadata
? {
startOffsetMs: 0,
requiresTranscode: false,
transcodeStatus: null,
...metadata,
}
: null,
}),
hideVideo: () =>