mirror of
https://github.com/Dispatcharr/Dispatcharr.git
synced 2026-01-23 02:35:14 +00:00
Merge branch 'dev' of https://github.com/Dispatcharr/Dispatcharr into pr/maluueu/166
This commit is contained in:
commit
ed7e16483b
238 changed files with 54792 additions and 7565 deletions
|
|
@ -11,6 +11,10 @@
|
|||
**/.toolstarget
|
||||
**/.vs
|
||||
**/.vscode
|
||||
**/.history
|
||||
**/media
|
||||
**/models
|
||||
**/static
|
||||
**/*.*proj.user
|
||||
**/*.dbmdl
|
||||
**/*.jfm
|
||||
|
|
@ -26,3 +30,4 @@
|
|||
**/values.dev.yaml
|
||||
LICENSE
|
||||
README.md
|
||||
data/
|
||||
|
|
|
|||
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
|
@ -1,7 +1,7 @@
|
|||
name: Bug Report
|
||||
description: I have an issue with Dispatcharr
|
||||
title: "[Bug]: "
|
||||
labels: ["Bug", "Triage"]
|
||||
labels: ["Triage"]
|
||||
type: "Bug"
|
||||
projects: []
|
||||
assignees: []
|
||||
|
|
|
|||
2
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
2
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
|
|
@ -1,7 +1,7 @@
|
|||
name: Feature request
|
||||
description: I want to suggest a new feature for Dispatcharr
|
||||
title: "[Feature]: "
|
||||
labels: ["Feature Request"]
|
||||
labels: ["Triage"]
|
||||
type: "Feature"
|
||||
projects: []
|
||||
assignees: []
|
||||
|
|
|
|||
135
.github/workflows/base-image.yml
vendored
135
.github/workflows/base-image.yml
vendored
|
|
@ -2,42 +2,37 @@ name: Base Image Build
|
|||
|
||||
on:
|
||||
push:
|
||||
branches: [ main, dev ]
|
||||
branches: [main, dev]
|
||||
paths:
|
||||
- 'docker/DispatcharrBase'
|
||||
- '.github/workflows/base-image.yml'
|
||||
- 'requirements.txt'
|
||||
pull_request:
|
||||
branches: [ main, dev ]
|
||||
branches: [main, dev]
|
||||
paths:
|
||||
- 'docker/DispatcharrBase'
|
||||
- '.github/workflows/base-image.yml'
|
||||
- 'requirements.txt'
|
||||
workflow_dispatch: # Allow manual triggering
|
||||
workflow_dispatch: # Allow manual triggering
|
||||
|
||||
permissions:
|
||||
contents: write # For managing releases and pushing tags
|
||||
packages: write # For publishing to GitHub Container Registry
|
||||
contents: write # For managing releases and pushing tags
|
||||
packages: write # For publishing to GitHub Container Registry
|
||||
|
||||
jobs:
|
||||
build-base-image:
|
||||
runs-on: ubuntu-latest
|
||||
prepare:
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
repo_owner: ${{ steps.meta.outputs.repo_owner }}
|
||||
repo_name: ${{ steps.meta.outputs.repo_name }}
|
||||
branch_tag: ${{ steps.meta.outputs.branch_tag }}
|
||||
timestamp: ${{ steps.timestamp.outputs.timestamp }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate timestamp for build
|
||||
id: timestamp
|
||||
run: |
|
||||
|
|
@ -66,13 +61,111 @@ jobs:
|
|||
echo "branch_tag=base-${BRANCH}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
docker:
|
||||
needs: [prepare]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform: [amd64, arm64]
|
||||
include:
|
||||
- platform: amd64
|
||||
runner: ubuntu-24.04
|
||||
- platform: arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
runs-on: ${{ matrix.runner }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config user.name "GitHub Actions"
|
||||
git config user.email "actions@github.com"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: docker.io
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and push Docker base image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
file: ./docker/DispatcharrBase
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
platforms: linux/${{ matrix.platform }}
|
||||
tags: |
|
||||
ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:base
|
||||
ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:base-${{ steps.timestamp.outputs.timestamp }}
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||
build-args: |
|
||||
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
|
||||
BRANCH=${{ github.ref_name }}
|
||||
REPO_URL=https://github.com/${{ github.repository }}
|
||||
TIMESTAMP=${{ needs.prepare.outputs.timestamp }}
|
||||
|
||||
create-manifest:
|
||||
needs: [prepare, docker]
|
||||
runs-on: ubuntu-24.04
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: docker.io
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Create multi-arch manifest tags
|
||||
run: |
|
||||
set -euo pipefail
|
||||
OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO=${{ needs.prepare.outputs.repo_name }}
|
||||
BRANCH_TAG=${{ needs.prepare.outputs.branch_tag }}
|
||||
TIMESTAMP=${{ needs.prepare.outputs.timestamp }}
|
||||
|
||||
echo "Creating multi-arch manifest for ${OWNER}/${REPO}"
|
||||
|
||||
# GitHub Container Registry manifests
|
||||
# branch tag (e.g. base or base-dev)
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \
|
||||
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-arm64
|
||||
|
||||
# branch + timestamp tag
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \
|
||||
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-arm64
|
||||
|
||||
# Docker Hub manifests
|
||||
# branch tag (e.g. base or base-dev)
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-arm64
|
||||
|
||||
# branch + timestamp tag
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-${TIMESTAMP}-arm64
|
||||
|
|
|
|||
200
.github/workflows/ci.yml
vendored
200
.github/workflows/ci.yml
vendored
|
|
@ -2,19 +2,84 @@ name: CI Pipeline
|
|||
|
||||
on:
|
||||
push:
|
||||
branches: [ dev ]
|
||||
branches: [dev]
|
||||
pull_request:
|
||||
branches: [ dev ]
|
||||
workflow_dispatch: # Allow manual triggering
|
||||
branches: [dev]
|
||||
workflow_dispatch:
|
||||
|
||||
# Add explicit permissions for the workflow
|
||||
permissions:
|
||||
contents: write # For managing releases and pushing tags
|
||||
packages: write # For publishing to GitHub Container Registry
|
||||
contents: write
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
prepare:
|
||||
runs-on: ubuntu-24.04
|
||||
# compute a single timestamp, version, and repo metadata for the entire workflow
|
||||
outputs:
|
||||
repo_owner: ${{ steps.meta.outputs.repo_owner }}
|
||||
repo_name: ${{ steps.meta.outputs.repo_name }}
|
||||
branch_tag: ${{ steps.meta.outputs.branch_tag }}
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
timestamp: ${{ steps.timestamp.outputs.timestamp }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate timestamp for build
|
||||
id: timestamp
|
||||
run: |
|
||||
TIMESTAMP=$(date -u +'%Y%m%d%H%M%S')
|
||||
echo "timestamp=${TIMESTAMP}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Extract version info
|
||||
id: version
|
||||
run: |
|
||||
VERSION=$(python -c "import version; print(version.__version__)")
|
||||
echo "version=${VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set repository and image metadata
|
||||
id: meta
|
||||
run: |
|
||||
REPO_OWNER=$(echo "${{ github.repository_owner }}" | tr '[:upper:]' '[:lower:]')
|
||||
echo "repo_owner=${REPO_OWNER}" >> $GITHUB_OUTPUT
|
||||
|
||||
REPO_NAME=$(echo "${{ github.repository }}" | cut -d '/' -f 2 | tr '[:upper:]' '[:lower:]')
|
||||
echo "repo_name=${REPO_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
if [[ "${{ github.ref }}" == "refs/heads/main" ]]; then
|
||||
echo "branch_tag=latest" >> $GITHUB_OUTPUT
|
||||
echo "is_main=true" >> $GITHUB_OUTPUT
|
||||
elif [[ "${{ github.ref }}" == "refs/heads/dev" ]]; then
|
||||
echo "branch_tag=dev" >> $GITHUB_OUTPUT
|
||||
echo "is_main=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
BRANCH=$(echo "${{ github.ref }}" | sed 's/refs\/heads\///' | sed 's/[^a-zA-Z0-9]/-/g')
|
||||
echo "branch_tag=${BRANCH}" >> $GITHUB_OUTPUT
|
||||
echo "is_main=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ "${{ github.event.pull_request.head.repo.fork }}" == "true" ]]; then
|
||||
echo "is_fork=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "is_fork=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
docker:
|
||||
needs: [prepare]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform: [amd64, arm64]
|
||||
include:
|
||||
- platform: amd64
|
||||
runner: ubuntu-24.04
|
||||
- platform: arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
runs-on: ${{ matrix.runner }}
|
||||
# no per-job outputs here; shared metadata comes from the `prepare` job
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
|
|
@ -45,66 +110,85 @@ jobs:
|
|||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate timestamp for build
|
||||
id: timestamp
|
||||
run: |
|
||||
TIMESTAMP=$(date -u +'%Y%m%d%H%M%S')
|
||||
echo "timestamp=${TIMESTAMP}" >> $GITHUB_OUTPUT
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: docker.io
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Extract version info
|
||||
id: version
|
||||
run: |
|
||||
VERSION=$(python -c "import version; print(version.__version__)")
|
||||
echo "version=${VERSION}" >> $GITHUB_OUTPUT
|
||||
echo "sha_short=${GITHUB_SHA::7}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set repository and image metadata
|
||||
id: meta
|
||||
run: |
|
||||
# Get lowercase repository owner
|
||||
REPO_OWNER=$(echo "${{ github.repository_owner }}" | tr '[:upper:]' '[:lower:]')
|
||||
echo "repo_owner=${REPO_OWNER}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Get repository name
|
||||
REPO_NAME=$(echo "${{ github.repository }}" | cut -d '/' -f 2 | tr '[:upper:]' '[:lower:]')
|
||||
echo "repo_name=${REPO_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Determine branch name
|
||||
if [[ "${{ github.ref }}" == "refs/heads/main" ]]; then
|
||||
echo "branch_tag=latest" >> $GITHUB_OUTPUT
|
||||
echo "is_main=true" >> $GITHUB_OUTPUT
|
||||
elif [[ "${{ github.ref }}" == "refs/heads/dev" ]]; then
|
||||
echo "branch_tag=dev" >> $GITHUB_OUTPUT
|
||||
echo "is_main=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
# For other branches, use the branch name
|
||||
BRANCH=$(echo "${{ github.ref }}" | sed 's/refs\/heads\///' | sed 's/[^a-zA-Z0-9]/-/g')
|
||||
echo "branch_tag=${BRANCH}" >> $GITHUB_OUTPUT
|
||||
echo "is_main=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Determine if this is from a fork
|
||||
if [[ "${{ github.event.pull_request.head.repo.fork }}" == "true" ]]; then
|
||||
echo "is_fork=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "is_fork=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
# use metadata from the prepare job
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
# Build only the platform for this matrix job to avoid running amd64
|
||||
# stages under qemu on an arm64 runner (and vice-versa). This makes
|
||||
# the matrix runner's platform the one built by buildx.
|
||||
platforms: linux/${{ matrix.platform }}
|
||||
# push arch-specific tags from each matrix job (they will be combined
|
||||
# into a multi-arch manifest in a follow-up job)
|
||||
tags: |
|
||||
ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:${{ steps.meta.outputs.branch_tag }}
|
||||
ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:${{ steps.version.outputs.version }}-${{ steps.timestamp.outputs.timestamp }}
|
||||
ghcr.io/${{ steps.meta.outputs.repo_owner }}/${{ steps.meta.outputs.repo_name }}:${{ steps.version.outputs.sha_short }}
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.branch_tag }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.version }}-${{ needs.prepare.outputs.timestamp }}-${{ matrix.platform }}
|
||||
build-args: |
|
||||
REPO_OWNER=${{ steps.meta.outputs.repo_owner }}
|
||||
REPO_NAME=${{ steps.meta.outputs.repo_name }}
|
||||
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
|
||||
BASE_TAG=base
|
||||
BRANCH=${{ github.ref_name }}
|
||||
REPO_URL=https://github.com/${{ github.repository }}
|
||||
TIMESTAMP=${{ steps.timestamp.outputs.timestamp }}
|
||||
TIMESTAMP=${{ needs.prepare.outputs.timestamp }}
|
||||
file: ./docker/Dockerfile
|
||||
|
||||
create-manifest:
|
||||
# wait for prepare and all matrix builds to finish
|
||||
needs: [prepare, docker]
|
||||
runs-on: ubuntu-24.04
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: docker.io
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Create multi-arch manifest tags
|
||||
run: |
|
||||
set -euo pipefail
|
||||
OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO=${{ needs.prepare.outputs.repo_name }}
|
||||
BRANCH_TAG=${{ needs.prepare.outputs.branch_tag }}
|
||||
VERSION=${{ needs.prepare.outputs.version }}
|
||||
TIMESTAMP=${{ needs.prepare.outputs.timestamp }}
|
||||
|
||||
echo "Creating multi-arch manifest for ${OWNER}/${REPO}"
|
||||
|
||||
# branch tag (e.g. latest or dev)
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG} \
|
||||
ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-amd64 ghcr.io/${OWNER}/${REPO}:${BRANCH_TAG}-arm64
|
||||
|
||||
# version + timestamp tag
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP} \
|
||||
ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-${TIMESTAMP}-arm64
|
||||
|
||||
# also create Docker Hub manifests using the same username
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${BRANCH_TAG}-arm64
|
||||
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-${TIMESTAMP}-arm64
|
||||
|
|
|
|||
138
.github/workflows/release.yml
vendored
138
.github/workflows/release.yml
vendored
|
|
@ -15,16 +15,21 @@ on:
|
|||
|
||||
# Add explicit permissions for the workflow
|
||||
permissions:
|
||||
contents: write # For managing releases and pushing tags
|
||||
packages: write # For publishing to GitHub Container Registry
|
||||
contents: write # For managing releases and pushing tags
|
||||
packages: write # For publishing to GitHub Container Registry
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
prepare:
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
new_version: ${{ steps.update_version.outputs.new_version }}
|
||||
repo_owner: ${{ steps.meta.outputs.repo_owner }}
|
||||
repo_name: ${{ steps.meta.outputs.repo_name }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
|
|
@ -38,14 +43,45 @@ jobs:
|
|||
NEW_VERSION=$(python -c "import version; print(f'{version.__version__}')")
|
||||
echo "new_version=${NEW_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set lowercase repo owner
|
||||
id: repo_owner
|
||||
- name: Set repository metadata
|
||||
id: meta
|
||||
run: |
|
||||
REPO_OWNER=$(echo "${{ github.repository_owner }}" | tr '[:upper:]' '[:lower:]')
|
||||
echo "lowercase=${REPO_OWNER}" >> $GITHUB_OUTPUT
|
||||
echo "repo_owner=${REPO_OWNER}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
REPO_NAME=$(echo "${{ github.repository }}" | cut -d '/' -f 2 | tr '[:upper:]' '[:lower:]')
|
||||
echo "repo_name=${REPO_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Commit and Tag
|
||||
run: |
|
||||
git add version.py
|
||||
git commit -m "Release v${{ steps.update_version.outputs.new_version }}"
|
||||
git tag -a "v${{ steps.update_version.outputs.new_version }}" -m "Release v${{ steps.update_version.outputs.new_version }}"
|
||||
git push origin main --tags
|
||||
|
||||
docker:
|
||||
needs: [prepare]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform: [amd64, arm64]
|
||||
include:
|
||||
- platform: amd64
|
||||
runner: ubuntu-24.04
|
||||
- platform: arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
runs-on: ${{ matrix.runner }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
ref: main
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config user.name "GitHub Actions"
|
||||
git config user.email "actions@github.com"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
|
@ -57,36 +93,88 @@ jobs:
|
|||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Commit and Tag
|
||||
run: |
|
||||
git add version.py
|
||||
git commit -m "Release v${{ steps.update_version.outputs.new_version }}"
|
||||
git tag -a "v${{ steps.update_version.outputs.new_version }}" -m "Release v${{ steps.update_version.outputs.new_version }}"
|
||||
git push origin main --tags
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: docker.io
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and Push Release Image
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64, #linux/arm/v7 # Multi-arch support for releases
|
||||
platforms: linux/${{ matrix.platform }}
|
||||
tags: |
|
||||
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:latest
|
||||
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:${{ steps.update_version.outputs.new_version }}
|
||||
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:latest-amd64
|
||||
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:latest-arm64
|
||||
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:${{ steps.update_version.outputs.new_version }}-amd64
|
||||
ghcr.io/${{ steps.repo_owner.outputs.lowercase }}/dispatcharr:${{ steps.update_version.outputs.new_version }}-arm64
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:latest-${{ matrix.platform }}
|
||||
ghcr.io/${{ needs.prepare.outputs.repo_owner }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.new_version }}-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:latest-${{ matrix.platform }}
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${{ needs.prepare.outputs.repo_name }}:${{ needs.prepare.outputs.new_version }}-${{ matrix.platform }}
|
||||
build-args: |
|
||||
REPO_OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO_NAME=${{ needs.prepare.outputs.repo_name }}
|
||||
BRANCH=${{ github.ref_name }}
|
||||
REPO_URL=https://github.com/${{ github.repository }}
|
||||
file: ./docker/Dockerfile
|
||||
|
||||
create-manifest:
|
||||
needs: [prepare, docker]
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: docker.io
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Create multi-arch manifest tags
|
||||
run: |
|
||||
set -euo pipefail
|
||||
OWNER=${{ needs.prepare.outputs.repo_owner }}
|
||||
REPO=${{ needs.prepare.outputs.repo_name }}
|
||||
VERSION=${{ needs.prepare.outputs.new_version }}
|
||||
|
||||
echo "Creating multi-arch manifest for ${OWNER}/${REPO}"
|
||||
|
||||
# GitHub Container Registry manifests
|
||||
# latest tag
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:latest \
|
||||
ghcr.io/${OWNER}/${REPO}:latest-amd64 ghcr.io/${OWNER}/${REPO}:latest-arm64
|
||||
|
||||
# version tag
|
||||
docker buildx imagetools create --tag ghcr.io/${OWNER}/${REPO}:${VERSION} \
|
||||
ghcr.io/${OWNER}/${REPO}:${VERSION}-amd64 ghcr.io/${OWNER}/${REPO}:${VERSION}-arm64
|
||||
|
||||
# Docker Hub manifests
|
||||
# latest tag
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:latest-arm64
|
||||
|
||||
# version tag
|
||||
docker buildx imagetools create --tag docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION} \
|
||||
docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-amd64 docker.io/${{ secrets.DOCKERHUB_ORGANIZATION }}/${REPO}:${VERSION}-arm64
|
||||
|
||||
create-release:
|
||||
needs: [prepare, create-manifest]
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Create GitHub Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
tag_name: v${{ steps.update_version.outputs.new_version }}
|
||||
name: Release v${{ steps.update_version.outputs.new_version }}
|
||||
tag_name: v${{ needs.prepare.outputs.new_version }}
|
||||
name: Release v${{ needs.prepare.outputs.new_version }}
|
||||
draft: false
|
||||
prerelease: false
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
|
|
|||
286
Plugins.md
Normal file
286
Plugins.md
Normal file
|
|
@ -0,0 +1,286 @@
|
|||
# Dispatcharr Plugins
|
||||
|
||||
This document explains how to build, install, and use Python plugins in Dispatcharr. It covers discovery, the plugin interface, settings, actions, how to access application APIs, and examples.
|
||||
|
||||
---
|
||||
|
||||
## Quick Start
|
||||
|
||||
1) Create a folder under `/app/data/plugins/my_plugin/` (host path `data/plugins/my_plugin/` in the repo).
|
||||
|
||||
2) Add a `plugin.py` file exporting a `Plugin` class:
|
||||
|
||||
```
|
||||
# /app/data/plugins/my_plugin/plugin.py
|
||||
class Plugin:
|
||||
name = "My Plugin"
|
||||
version = "0.1.0"
|
||||
description = "Does something useful"
|
||||
|
||||
# Settings fields rendered by the UI and persisted by the backend
|
||||
fields = [
|
||||
{"id": "enabled", "label": "Enabled", "type": "boolean", "default": True},
|
||||
{"id": "limit", "label": "Item limit", "type": "number", "default": 5},
|
||||
{"id": "mode", "label": "Mode", "type": "select", "default": "safe",
|
||||
"options": [
|
||||
{"value": "safe", "label": "Safe"},
|
||||
{"value": "fast", "label": "Fast"},
|
||||
]},
|
||||
{"id": "note", "label": "Note", "type": "string", "default": ""},
|
||||
]
|
||||
|
||||
# Actions appear as buttons. Clicking one calls run(action, params, context)
|
||||
actions = [
|
||||
{"id": "do_work", "label": "Do Work", "description": "Process items"},
|
||||
]
|
||||
|
||||
def run(self, action: str, params: dict, context: dict):
|
||||
settings = context.get("settings", {})
|
||||
logger = context.get("logger")
|
||||
|
||||
if action == "do_work":
|
||||
limit = int(settings.get("limit", 5))
|
||||
mode = settings.get("mode", "safe")
|
||||
logger.info(f"My Plugin running with limit={limit}, mode={mode}")
|
||||
# Do a small amount of work here. Schedule Celery tasks for heavy work.
|
||||
return {"status": "ok", "processed": limit, "mode": mode}
|
||||
|
||||
return {"status": "error", "message": f"Unknown action {action}"}
|
||||
```
|
||||
|
||||
3) Open the Plugins page in the UI, click the refresh icon to reload discovery, then configure and run your plugin.
|
||||
|
||||
---
|
||||
|
||||
## Where Plugins Live
|
||||
|
||||
- Default directory: `/app/data/plugins` inside the container.
|
||||
- Override with env var: `DISPATCHARR_PLUGINS_DIR`.
|
||||
- Each plugin is a directory containing either:
|
||||
- `plugin.py` exporting a `Plugin` class, or
|
||||
- a Python package (`__init__.py`) exporting a `Plugin` class.
|
||||
|
||||
The directory name (lowercased, spaces as `_`) is used as the registry key and module import path (e.g. `my_plugin.plugin`).
|
||||
|
||||
---
|
||||
|
||||
## Discovery & Lifecycle
|
||||
|
||||
- Discovery runs at server startup and on-demand when:
|
||||
- Fetching the plugins list from the UI
|
||||
- Hitting `POST /api/plugins/plugins/reload/`
|
||||
- The loader imports each plugin module and instantiates `Plugin()`.
|
||||
- Metadata (name, version, description) and a per-plugin settings JSON are stored in the DB.
|
||||
|
||||
Backend code:
|
||||
- Loader: `apps/plugins/loader.py`
|
||||
- API Views: `apps/plugins/api_views.py`
|
||||
- API URLs: `apps/plugins/api_urls.py`
|
||||
- Model: `apps/plugins/models.py` (stores `enabled` flag and `settings` per plugin)
|
||||
|
||||
---
|
||||
|
||||
## Plugin Interface
|
||||
|
||||
Export a `Plugin` class. Supported attributes and behavior:
|
||||
|
||||
- `name` (str): Human-readable name.
|
||||
- `version` (str): Semantic version string.
|
||||
- `description` (str): Short description.
|
||||
- `fields` (list): Settings schema used by the UI to render controls.
|
||||
- `actions` (list): Available actions; the UI renders a Run button for each.
|
||||
- `run(action, params, context)` (callable): Invoked when a user clicks an action.
|
||||
|
||||
### Settings Schema
|
||||
Supported field `type`s:
|
||||
- `boolean`
|
||||
- `number`
|
||||
- `string`
|
||||
- `select` (requires `options`: `[{"value": ..., "label": ...}, ...]`)
|
||||
|
||||
Common field keys:
|
||||
- `id` (str): Settings key.
|
||||
- `label` (str): Label shown in the UI.
|
||||
- `type` (str): One of above.
|
||||
- `default` (any): Default value used until saved.
|
||||
- `help_text` (str, optional): Shown under the control.
|
||||
- `options` (list, for select): List of `{value, label}`.
|
||||
|
||||
The UI automatically renders settings and persists them. The backend stores settings in `PluginConfig.settings`.
|
||||
|
||||
Read settings in `run` via `context["settings"]`.
|
||||
|
||||
### Actions
|
||||
Each action is a dict:
|
||||
- `id` (str): Unique action id.
|
||||
- `label` (str): Button label.
|
||||
- `description` (str, optional): Helper text.
|
||||
|
||||
Clicking an action calls your plugin’s `run(action, params, context)` and shows a notification with the result or error.
|
||||
|
||||
### Action Confirmation (Modal)
|
||||
Developers can request a confirmation modal per action using the `confirm` key on the action. Options:
|
||||
|
||||
- Boolean: `confirm: true` will show a default confirmation modal.
|
||||
- Object: `confirm: { required: true, title: '...', message: '...' }` to customize the modal title and message.
|
||||
|
||||
Example:
|
||||
```
|
||||
actions = [
|
||||
{
|
||||
"id": "danger_run",
|
||||
"label": "Do Something Risky",
|
||||
"description": "Runs a job that affects many records.",
|
||||
"confirm": { "required": true, "title": "Proceed?", "message": "This will modify many records." },
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Accessing Dispatcharr APIs from Plugins
|
||||
|
||||
Plugins are server-side Python code running within the Django application. You can:
|
||||
|
||||
- Import models and run queries/updates:
|
||||
```
|
||||
from apps.m3u.models import M3UAccount
|
||||
from apps.epg.models import EPGSource
|
||||
from apps.channels.models import Channel
|
||||
from core.models import CoreSettings
|
||||
```
|
||||
|
||||
- Dispatch Celery tasks for heavy work (recommended):
|
||||
```
|
||||
from apps.m3u.tasks import refresh_m3u_accounts # apps/m3u/tasks.py
|
||||
from apps.epg.tasks import refresh_all_epg_data # apps/epg/tasks.py
|
||||
|
||||
refresh_m3u_accounts.delay()
|
||||
refresh_all_epg_data.delay()
|
||||
```
|
||||
|
||||
- Send WebSocket updates:
|
||||
```
|
||||
from core.utils import send_websocket_update
|
||||
send_websocket_update('updates', 'update', {"type": "plugin", "plugin": "my_plugin", "message": "Done"})
|
||||
```
|
||||
|
||||
- Use transactions:
|
||||
```
|
||||
from django.db import transaction
|
||||
with transaction.atomic():
|
||||
# bulk updates here
|
||||
...
|
||||
```
|
||||
|
||||
- Log via provided context or standard logging:
|
||||
```
|
||||
def run(self, action, params, context):
|
||||
logger = context.get("logger") # already configured
|
||||
logger.info("running action %s", action)
|
||||
```
|
||||
|
||||
Prefer Celery tasks (`.delay()`) to keep `run` fast and non-blocking.
|
||||
|
||||
---
|
||||
|
||||
## REST Endpoints (for UI and tooling)
|
||||
|
||||
- List plugins: `GET /api/plugins/plugins/`
|
||||
- Response: `{ "plugins": [{ key, name, version, description, enabled, fields, settings, actions }, ...] }`
|
||||
- Reload discovery: `POST /api/plugins/plugins/reload/`
|
||||
- Import plugin: `POST /api/plugins/plugins/import/` with form-data file field `file`
|
||||
- Update settings: `POST /api/plugins/plugins/<key>/settings/` with `{"settings": {...}}`
|
||||
- Run action: `POST /api/plugins/plugins/<key>/run/` with `{"action": "id", "params": {...}}`
|
||||
- Enable/disable: `POST /api/plugins/plugins/<key>/enabled/` with `{"enabled": true|false}`
|
||||
|
||||
Notes:
|
||||
- When disabled, a plugin cannot run actions; backend returns HTTP 403.
|
||||
|
||||
---
|
||||
|
||||
## Importing Plugins
|
||||
|
||||
- In the UI, click the Import button on the Plugins page and upload a `.zip` containing a plugin folder.
|
||||
- The archive should contain either `plugin.py` or a Python package (`__init__.py`).
|
||||
- On success, the UI shows the plugin name/description and lets you enable it immediately (plugins are disabled by default).
|
||||
|
||||
---
|
||||
|
||||
## Enabling / Disabling Plugins
|
||||
|
||||
- Each plugin has a persisted `enabled` flag (default: disabled) and `ever_enabled` flag in the DB (`apps/plugins/models.py`).
|
||||
- New plugins are disabled by default and require an explicit enable.
|
||||
- The first time a plugin is enabled, the UI shows a trust warning modal explaining that plugins can run arbitrary server-side code.
|
||||
- The Plugins page shows a toggle in the card header. Turning it off dims the card and disables the Run button.
|
||||
- Backend enforcement: Attempts to run an action for a disabled plugin return HTTP 403.
|
||||
|
||||
---
|
||||
|
||||
## Example: Refresh All Sources Plugin
|
||||
|
||||
Path: `data/plugins/refresh_all/plugin.py`
|
||||
|
||||
```
|
||||
class Plugin:
|
||||
name = "Refresh All Sources"
|
||||
version = "1.0.0"
|
||||
description = "Force refresh all M3U accounts and EPG sources."
|
||||
|
||||
fields = [
|
||||
{"id": "confirm", "label": "Require confirmation", "type": "boolean", "default": True,
|
||||
"help_text": "If enabled, the UI should ask before running."}
|
||||
]
|
||||
|
||||
actions = [
|
||||
{"id": "refresh_all", "label": "Refresh All M3Us and EPGs",
|
||||
"description": "Queues background refresh for all active M3U accounts and EPG sources."}
|
||||
]
|
||||
|
||||
def run(self, action: str, params: dict, context: dict):
|
||||
if action == "refresh_all":
|
||||
from apps.m3u.tasks import refresh_m3u_accounts
|
||||
from apps.epg.tasks import refresh_all_epg_data
|
||||
refresh_m3u_accounts.delay()
|
||||
refresh_all_epg_data.delay()
|
||||
return {"status": "queued", "message": "Refresh jobs queued"}
|
||||
return {"status": "error", "message": f"Unknown action: {action}"}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Best Practices
|
||||
|
||||
- Keep `run` short and schedule heavy operations via Celery tasks.
|
||||
- Validate and sanitize `params` received from the UI.
|
||||
- Use database transactions for bulk or related updates.
|
||||
- Log actionable messages for troubleshooting.
|
||||
- Only write files under `/data` or `/app/data` paths.
|
||||
- Treat plugins as trusted code: they run with full app permissions.
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
- Plugin not listed: ensure the folder exists and contains `plugin.py` with a `Plugin` class.
|
||||
- Import errors: the folder name is the import name; avoid spaces or exotic characters.
|
||||
- No confirmation: include a boolean field with `id: "confirm"` and set it to true or default true.
|
||||
- HTTP 403 on run: the plugin is disabled; enable it from the toggle or via the `enabled/` endpoint.
|
||||
|
||||
---
|
||||
|
||||
## Contributing
|
||||
|
||||
- Keep dependencies minimal. Vendoring small helpers into the plugin folder is acceptable.
|
||||
- Use the existing task and model APIs where possible; propose extensions if you need new capabilities.
|
||||
|
||||
---
|
||||
|
||||
## Internals Reference
|
||||
|
||||
- Loader: `apps/plugins/loader.py`
|
||||
- API Views: `apps/plugins/api_views.py`
|
||||
- API URLs: `apps/plugins/api_urls.py`
|
||||
- Model: `apps/plugins/models.py`
|
||||
- Frontend page: `frontend/src/pages/Plugins.jsx`
|
||||
- Sidebar entry: `frontend/src/components/Sidebar.jsx`
|
||||
|
|
@ -22,6 +22,7 @@ Dispatcharr has officially entered **BETA**, bringing powerful new features and
|
|||
📊 **Real-Time Stats Dashboard** — Live insights into stream health and client activity\
|
||||
🧠 **EPG Auto-Match** — Match program data to channels automatically\
|
||||
⚙️ **Streamlink + FFmpeg Support** — Flexible backend options for streaming and recording\
|
||||
🎬 **VOD Management** — Full Video on Demand support with movies and TV series\
|
||||
🧼 **UI & UX Enhancements** — Smoother, faster, more responsive interface\
|
||||
🛁 **Output Compatibility** — HDHomeRun, M3U, and XMLTV EPG support for Plex, Jellyfin, and more
|
||||
|
||||
|
|
@ -31,6 +32,7 @@ Dispatcharr has officially entered **BETA**, bringing powerful new features and
|
|||
|
||||
✅ **Full IPTV Control** — Import, organize, proxy, and monitor IPTV streams on your own terms\
|
||||
✅ **Smart Playlist Handling** — M3U import, filtering, grouping, and failover support\
|
||||
✅ **VOD Content Management** — Organize movies and TV series with metadata and streaming\
|
||||
✅ **Reliable EPG Integration** — Match and manage TV guide data with ease\
|
||||
✅ **Clean & Responsive Interface** — Modern design that gets out of your way\
|
||||
✅ **Fully Self-Hosted** — Total control, zero reliance on third-party services
|
||||
|
|
|
|||
|
|
@ -1,41 +1,39 @@
|
|||
from django.urls import path, include
|
||||
from rest_framework.routers import DefaultRouter
|
||||
from .api_views import (
|
||||
AuthViewSet, UserViewSet, GroupViewSet,
|
||||
list_permissions, initialize_superuser
|
||||
AuthViewSet,
|
||||
UserViewSet,
|
||||
GroupViewSet,
|
||||
TokenObtainPairView,
|
||||
TokenRefreshView,
|
||||
list_permissions,
|
||||
initialize_superuser,
|
||||
)
|
||||
from rest_framework_simplejwt import views as jwt_views
|
||||
|
||||
app_name = 'accounts'
|
||||
app_name = "accounts"
|
||||
|
||||
# 🔹 Register ViewSets with a Router
|
||||
router = DefaultRouter()
|
||||
router.register(r'users', UserViewSet, basename='user')
|
||||
router.register(r'groups', GroupViewSet, basename='group')
|
||||
router.register(r"users", UserViewSet, basename="user")
|
||||
router.register(r"groups", GroupViewSet, basename="group")
|
||||
|
||||
# 🔹 Custom Authentication Endpoints
|
||||
auth_view = AuthViewSet.as_view({
|
||||
'post': 'login'
|
||||
})
|
||||
auth_view = AuthViewSet.as_view({"post": "login"})
|
||||
|
||||
logout_view = AuthViewSet.as_view({
|
||||
'post': 'logout'
|
||||
})
|
||||
logout_view = AuthViewSet.as_view({"post": "logout"})
|
||||
|
||||
# 🔹 Define API URL patterns
|
||||
urlpatterns = [
|
||||
# Authentication
|
||||
path('auth/login/', auth_view, name='user-login'),
|
||||
path('auth/logout/', logout_view, name='user-logout'),
|
||||
|
||||
path("auth/login/", auth_view, name="user-login"),
|
||||
path("auth/logout/", logout_view, name="user-logout"),
|
||||
# Superuser API
|
||||
path('initialize-superuser/', initialize_superuser, name='initialize_superuser'),
|
||||
|
||||
path("initialize-superuser/", initialize_superuser, name="initialize_superuser"),
|
||||
# Permissions API
|
||||
path('permissions/', list_permissions, name='list-permissions'),
|
||||
|
||||
path('token/', jwt_views.TokenObtainPairView.as_view(), name='token_obtain_pair'),
|
||||
path('token/refresh/', jwt_views.TokenRefreshView.as_view(), name='token_refresh'),
|
||||
path("permissions/", list_permissions, name="list-permissions"),
|
||||
path("token/", TokenObtainPairView.as_view(), name="token_obtain_pair"),
|
||||
path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"),
|
||||
]
|
||||
|
||||
# 🔹 Include ViewSet routes
|
||||
|
|
|
|||
|
|
@ -2,16 +2,52 @@ from django.contrib.auth import authenticate, login, logout
|
|||
from django.contrib.auth.models import Group, Permission
|
||||
from django.http import JsonResponse, HttpResponse
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
from rest_framework.permissions import IsAuthenticated, AllowAny
|
||||
from rest_framework.decorators import api_view, permission_classes, action
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import viewsets
|
||||
from rest_framework import viewsets, status
|
||||
from drf_yasg.utils import swagger_auto_schema
|
||||
from drf_yasg import openapi
|
||||
import json
|
||||
from .permissions import IsAdmin, Authenticated
|
||||
from dispatcharr.utils import network_access_allowed
|
||||
|
||||
from .models import User
|
||||
from .serializers import UserSerializer, GroupSerializer, PermissionSerializer
|
||||
from rest_framework_simplejwt.views import TokenObtainPairView, TokenRefreshView
|
||||
|
||||
|
||||
class TokenObtainPairView(TokenObtainPairView):
|
||||
def post(self, request, *args, **kwargs):
|
||||
# Custom logic here
|
||||
if not network_access_allowed(request, "UI"):
|
||||
return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
# Get the response from the parent class first
|
||||
response = super().post(request, *args, **kwargs)
|
||||
|
||||
# If login was successful, update last_login
|
||||
if response.status_code == 200:
|
||||
username = request.data.get("username")
|
||||
if username:
|
||||
from django.utils import timezone
|
||||
try:
|
||||
user = User.objects.get(username=username)
|
||||
user.last_login = timezone.now()
|
||||
user.save(update_fields=['last_login'])
|
||||
except User.DoesNotExist:
|
||||
pass # User doesn't exist, but login somehow succeeded
|
||||
|
||||
return response
|
||||
|
||||
|
||||
class TokenRefreshView(TokenRefreshView):
|
||||
def post(self, request, *args, **kwargs):
|
||||
# Custom logic here
|
||||
if not network_access_allowed(request, "UI"):
|
||||
return Response({"error": "Unauthorized"}, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
return super().post(request, *args, **kwargs)
|
||||
|
||||
|
||||
@csrf_exempt # In production, consider CSRF protection strategies or ensure this endpoint is only accessible when no superuser exists.
|
||||
def initialize_superuser(request):
|
||||
|
|
@ -26,15 +62,20 @@ def initialize_superuser(request):
|
|||
password = data.get("password")
|
||||
email = data.get("email", "")
|
||||
if not username or not password:
|
||||
return JsonResponse({"error": "Username and password are required."}, status=400)
|
||||
return JsonResponse(
|
||||
{"error": "Username and password are required."}, status=400
|
||||
)
|
||||
# Create the superuser
|
||||
User.objects.create_superuser(username=username, password=password, email=email)
|
||||
User.objects.create_superuser(
|
||||
username=username, password=password, email=email, user_level=10
|
||||
)
|
||||
return JsonResponse({"superuser_exists": True})
|
||||
except Exception as e:
|
||||
return JsonResponse({"error": str(e)}, status=500)
|
||||
# For GET requests, indicate no superuser exists
|
||||
return JsonResponse({"superuser_exists": False})
|
||||
|
||||
|
||||
# 🔹 1) Authentication APIs
|
||||
class AuthViewSet(viewsets.ViewSet):
|
||||
"""Handles user login and logout"""
|
||||
|
|
@ -43,36 +84,45 @@ class AuthViewSet(viewsets.ViewSet):
|
|||
operation_description="Authenticate and log in a user",
|
||||
request_body=openapi.Schema(
|
||||
type=openapi.TYPE_OBJECT,
|
||||
required=['username', 'password'],
|
||||
required=["username", "password"],
|
||||
properties={
|
||||
'username': openapi.Schema(type=openapi.TYPE_STRING),
|
||||
'password': openapi.Schema(type=openapi.TYPE_STRING, format=openapi.FORMAT_PASSWORD)
|
||||
"username": openapi.Schema(type=openapi.TYPE_STRING),
|
||||
"password": openapi.Schema(
|
||||
type=openapi.TYPE_STRING, format=openapi.FORMAT_PASSWORD
|
||||
),
|
||||
},
|
||||
),
|
||||
responses={200: "Login successful", 400: "Invalid credentials"},
|
||||
)
|
||||
def login(self, request):
|
||||
"""Logs in a user and returns user details"""
|
||||
username = request.data.get('username')
|
||||
password = request.data.get('password')
|
||||
username = request.data.get("username")
|
||||
password = request.data.get("password")
|
||||
user = authenticate(request, username=username, password=password)
|
||||
|
||||
if user:
|
||||
login(request, user)
|
||||
return Response({
|
||||
"message": "Login successful",
|
||||
"user": {
|
||||
"id": user.id,
|
||||
"username": user.username,
|
||||
"email": user.email,
|
||||
"groups": list(user.groups.values_list('name', flat=True))
|
||||
# Update last_login timestamp
|
||||
from django.utils import timezone
|
||||
user.last_login = timezone.now()
|
||||
user.save(update_fields=['last_login'])
|
||||
|
||||
return Response(
|
||||
{
|
||||
"message": "Login successful",
|
||||
"user": {
|
||||
"id": user.id,
|
||||
"username": user.username,
|
||||
"email": user.email,
|
||||
"groups": list(user.groups.values_list("name", flat=True)),
|
||||
},
|
||||
}
|
||||
})
|
||||
)
|
||||
return Response({"error": "Invalid credentials"}, status=400)
|
||||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Log out the current user",
|
||||
responses={200: "Logout successful"}
|
||||
responses={200: "Logout successful"},
|
||||
)
|
||||
def logout(self, request):
|
||||
"""Logs out the authenticated user"""
|
||||
|
|
@ -83,13 +133,19 @@ class AuthViewSet(viewsets.ViewSet):
|
|||
# 🔹 2) User Management APIs
|
||||
class UserViewSet(viewsets.ModelViewSet):
|
||||
"""Handles CRUD operations for Users"""
|
||||
queryset = User.objects.all()
|
||||
|
||||
queryset = User.objects.all().prefetch_related('channel_profiles')
|
||||
serializer_class = UserSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get_permissions(self):
|
||||
if self.action == "me":
|
||||
return [Authenticated()]
|
||||
|
||||
return [IsAdmin()]
|
||||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Retrieve a list of users",
|
||||
responses={200: UserSerializer(many=True)}
|
||||
responses={200: UserSerializer(many=True)},
|
||||
)
|
||||
def list(self, request, *args, **kwargs):
|
||||
return super().list(request, *args, **kwargs)
|
||||
|
|
@ -110,17 +166,28 @@ class UserViewSet(viewsets.ModelViewSet):
|
|||
def destroy(self, request, *args, **kwargs):
|
||||
return super().destroy(request, *args, **kwargs)
|
||||
|
||||
@swagger_auto_schema(
|
||||
method="get",
|
||||
operation_description="Get active user information",
|
||||
)
|
||||
@action(detail=False, methods=["get"], url_path="me")
|
||||
def me(self, request):
|
||||
user = request.user
|
||||
serializer = UserSerializer(user)
|
||||
return Response(serializer.data)
|
||||
|
||||
|
||||
# 🔹 3) Group Management APIs
|
||||
class GroupViewSet(viewsets.ModelViewSet):
|
||||
"""Handles CRUD operations for Groups"""
|
||||
|
||||
queryset = Group.objects.all()
|
||||
serializer_class = GroupSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
permission_classes = [Authenticated]
|
||||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Retrieve a list of groups",
|
||||
responses={200: GroupSerializer(many=True)}
|
||||
responses={200: GroupSerializer(many=True)},
|
||||
)
|
||||
def list(self, request, *args, **kwargs):
|
||||
return super().list(request, *args, **kwargs)
|
||||
|
|
@ -144,12 +211,12 @@ class GroupViewSet(viewsets.ModelViewSet):
|
|||
|
||||
# 🔹 4) Permissions List API
|
||||
@swagger_auto_schema(
|
||||
method='get',
|
||||
method="get",
|
||||
operation_description="Retrieve a list of all permissions",
|
||||
responses={200: PermissionSerializer(many=True)}
|
||||
responses={200: PermissionSerializer(many=True)},
|
||||
)
|
||||
@api_view(['GET'])
|
||||
@permission_classes([IsAuthenticated])
|
||||
@api_view(["GET"])
|
||||
@permission_classes([Authenticated])
|
||||
def list_permissions(request):
|
||||
"""Returns a list of all available permissions"""
|
||||
permissions = Permission.objects.all()
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class AccountsConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'apps.accounts'
|
||||
default_auto_field = "django.db.models.BigAutoField"
|
||||
name = "apps.accounts"
|
||||
verbose_name = "Accounts & Authentication"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,43 @@
|
|||
# Generated by Django 5.1.6 on 2025-05-18 15:47
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
def set_user_level_to_10(apps, schema_editor):
|
||||
User = apps.get_model("accounts", "User")
|
||||
User.objects.update(user_level=10)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0001_initial"),
|
||||
("dispatcharr_channels", "0021_channel_user_level"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="user",
|
||||
name="channel_groups",
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="channel_profiles",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name="users",
|
||||
to="dispatcharr_channels.channelprofile",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="user_level",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="custom_properties",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.RunPython(set_user_level_to_10),
|
||||
]
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.2.4 on 2025-09-02 14:30
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0002_remove_user_channel_groups_user_channel_profiles_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='user',
|
||||
name='custom_properties',
|
||||
field=models.JSONField(blank=True, default=dict, null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -2,17 +2,26 @@
|
|||
from django.db import models
|
||||
from django.contrib.auth.models import AbstractUser, Permission
|
||||
|
||||
|
||||
class User(AbstractUser):
|
||||
"""
|
||||
Custom user model for Dispatcharr.
|
||||
Inherits from Django's AbstractUser to add additional fields if needed.
|
||||
"""
|
||||
|
||||
class UserLevel(models.IntegerChoices):
|
||||
STREAMER = 0, "Streamer"
|
||||
STANDARD = 1, "Standard User"
|
||||
ADMIN = 10, "Admin"
|
||||
|
||||
avatar_config = models.JSONField(default=dict, blank=True, null=True)
|
||||
channel_groups = models.ManyToManyField(
|
||||
'dispatcharr_channels.ChannelGroup', # Updated reference to renamed model
|
||||
channel_profiles = models.ManyToManyField(
|
||||
"dispatcharr_channels.ChannelProfile",
|
||||
blank=True,
|
||||
related_name="users"
|
||||
related_name="users",
|
||||
)
|
||||
user_level = models.IntegerField(default=UserLevel.STREAMER)
|
||||
custom_properties = models.JSONField(default=dict, blank=True, null=True)
|
||||
|
||||
def __str__(self):
|
||||
return self.username
|
||||
|
|
|
|||
56
apps/accounts/permissions.py
Normal file
56
apps/accounts/permissions.py
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
from rest_framework.permissions import IsAuthenticated
|
||||
from .models import User
|
||||
from dispatcharr.utils import network_access_allowed
|
||||
|
||||
|
||||
class Authenticated(IsAuthenticated):
|
||||
def has_permission(self, request, view):
|
||||
is_authenticated = super().has_permission(request, view)
|
||||
network_allowed = network_access_allowed(request, "UI")
|
||||
|
||||
return is_authenticated and network_allowed
|
||||
|
||||
|
||||
class IsStandardUser(Authenticated):
|
||||
def has_permission(self, request, view):
|
||||
if not super().has_permission(request, view):
|
||||
return False
|
||||
|
||||
return request.user and request.user.user_level >= User.UserLevel.STANDARD
|
||||
|
||||
|
||||
class IsAdmin(Authenticated):
|
||||
def has_permission(self, request, view):
|
||||
if not super().has_permission(request, view):
|
||||
return False
|
||||
|
||||
return request.user.user_level >= 10
|
||||
|
||||
|
||||
class IsOwnerOfObject(Authenticated):
|
||||
def has_object_permission(self, request, view, obj):
|
||||
if not super().has_permission(request, view):
|
||||
return False
|
||||
|
||||
is_admin = IsAdmin().has_permission(request, view)
|
||||
is_owner = request.user in obj.users.all()
|
||||
|
||||
return is_admin or is_owner
|
||||
|
||||
|
||||
permission_classes_by_action = {
|
||||
"list": [IsStandardUser],
|
||||
"create": [IsAdmin],
|
||||
"retrieve": [IsStandardUser],
|
||||
"update": [IsAdmin],
|
||||
"partial_update": [IsAdmin],
|
||||
"destroy": [IsAdmin],
|
||||
}
|
||||
|
||||
permission_classes_by_method = {
|
||||
"GET": [IsStandardUser],
|
||||
"POST": [IsAdmin],
|
||||
"PATCH": [IsAdmin],
|
||||
"PUT": [IsAdmin],
|
||||
"DELETE": [IsAdmin],
|
||||
}
|
||||
|
|
@ -1,13 +1,14 @@
|
|||
from rest_framework import serializers
|
||||
from django.contrib.auth.models import Group, Permission
|
||||
from .models import User
|
||||
from apps.channels.models import ChannelProfile
|
||||
|
||||
|
||||
# 🔹 Fix for Permission serialization
|
||||
class PermissionSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Permission
|
||||
fields = ['id', 'name', 'codename']
|
||||
fields = ["id", "name", "codename"]
|
||||
|
||||
|
||||
# 🔹 Fix for Group serialization
|
||||
|
|
@ -18,15 +19,61 @@ class GroupSerializer(serializers.ModelSerializer):
|
|||
|
||||
class Meta:
|
||||
model = Group
|
||||
fields = ['id', 'name', 'permissions']
|
||||
fields = ["id", "name", "permissions"]
|
||||
|
||||
|
||||
# 🔹 Fix for User serialization
|
||||
class UserSerializer(serializers.ModelSerializer):
|
||||
groups = serializers.SlugRelatedField(
|
||||
many=True, queryset=Group.objects.all(), slug_field="name"
|
||||
) # ✅ Fix ManyToMany `_meta` error
|
||||
password = serializers.CharField(write_only=True)
|
||||
channel_profiles = serializers.PrimaryKeyRelatedField(
|
||||
queryset=ChannelProfile.objects.all(), many=True, required=False
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
fields = ['id', 'username', 'email', 'groups']
|
||||
fields = [
|
||||
"id",
|
||||
"username",
|
||||
"email",
|
||||
"user_level",
|
||||
"password",
|
||||
"channel_profiles",
|
||||
"custom_properties",
|
||||
"avatar_config",
|
||||
"is_active",
|
||||
"is_staff",
|
||||
"is_superuser",
|
||||
"last_login",
|
||||
"date_joined",
|
||||
"first_name",
|
||||
"last_name",
|
||||
]
|
||||
|
||||
def create(self, validated_data):
|
||||
channel_profiles = validated_data.pop("channel_profiles", [])
|
||||
|
||||
user = User(**validated_data)
|
||||
user.set_password(validated_data["password"])
|
||||
user.is_active = True
|
||||
user.save()
|
||||
|
||||
user.channel_profiles.set(channel_profiles)
|
||||
|
||||
return user
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
password = validated_data.pop("password", None)
|
||||
channel_profiles = validated_data.pop("channel_profiles", None)
|
||||
|
||||
for attr, value in validated_data.items():
|
||||
setattr(instance, attr, value)
|
||||
|
||||
if password:
|
||||
instance.set_password(password)
|
||||
|
||||
instance.save()
|
||||
|
||||
if channel_profiles is not None:
|
||||
instance.channel_profiles.set(channel_profiles)
|
||||
|
||||
return instance
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ from django.db.models.signals import post_save
|
|||
from django.dispatch import receiver
|
||||
from .models import User
|
||||
|
||||
|
||||
@receiver(post_save, sender=User)
|
||||
def handle_new_user(sender, instance, created, **kwargs):
|
||||
if created:
|
||||
|
|
|
|||
|
|
@ -1,11 +1,10 @@
|
|||
from django.urls import path, include
|
||||
from django.urls import path, include, re_path
|
||||
from drf_yasg.views import get_schema_view
|
||||
from drf_yasg import openapi
|
||||
from rest_framework.permissions import AllowAny
|
||||
|
||||
app_name = 'api'
|
||||
|
||||
# Configure Swagger Schema
|
||||
schema_view = get_schema_view(
|
||||
openapi.Info(
|
||||
title="Dispatcharr API",
|
||||
|
|
@ -26,6 +25,8 @@ urlpatterns = [
|
|||
path('hdhr/', include(('apps.hdhr.api_urls', 'hdhr'), namespace='hdhr')),
|
||||
path('m3u/', include(('apps.m3u.api_urls', 'm3u'), namespace='m3u')),
|
||||
path('core/', include(('core.api_urls', 'core'), namespace='core')),
|
||||
path('plugins/', include(('apps.plugins.api_urls', 'plugins'), namespace='plugins')),
|
||||
path('vod/', include(('apps.vod.api_urls', 'vod'), namespace='vod')),
|
||||
# path('output/', include(('apps.output.api_urls', 'output'), namespace='output')),
|
||||
#path('player/', include(('apps.player.api_urls', 'player'), namespace='player')),
|
||||
#path('settings/', include(('apps.settings.api_urls', 'settings'), namespace='settings')),
|
||||
|
|
@ -34,7 +35,7 @@ urlpatterns = [
|
|||
|
||||
|
||||
# Swagger Documentation api_urls
|
||||
path('swagger/', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
|
||||
re_path(r'^swagger/?$', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
|
||||
path('redoc/', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'),
|
||||
path('swagger.json', schema_view.without_ui(cache_timeout=0), name='schema-json'),
|
||||
]
|
||||
|
|
|
|||
|
|
@ -6,12 +6,21 @@ from .api_views import (
|
|||
ChannelGroupViewSet,
|
||||
BulkDeleteStreamsAPIView,
|
||||
BulkDeleteChannelsAPIView,
|
||||
BulkDeleteLogosAPIView,
|
||||
CleanupUnusedLogosAPIView,
|
||||
LogoViewSet,
|
||||
ChannelProfileViewSet,
|
||||
UpdateChannelMembershipAPIView,
|
||||
BulkUpdateChannelMembershipAPIView,
|
||||
RecordingViewSet,
|
||||
RecurringRecordingRuleViewSet,
|
||||
GetChannelStreamsAPIView,
|
||||
SeriesRulesAPIView,
|
||||
DeleteSeriesRuleAPIView,
|
||||
EvaluateSeriesRulesAPIView,
|
||||
BulkRemoveSeriesRecordingsAPIView,
|
||||
BulkDeleteUpcomingRecordingsAPIView,
|
||||
ComskipConfigAPIView,
|
||||
)
|
||||
|
||||
app_name = 'channels' # for DRF routing
|
||||
|
|
@ -23,14 +32,24 @@ router.register(r'channels', ChannelViewSet, basename='channel')
|
|||
router.register(r'logos', LogoViewSet, basename='logo')
|
||||
router.register(r'profiles', ChannelProfileViewSet, basename='profile')
|
||||
router.register(r'recordings', RecordingViewSet, basename='recording')
|
||||
router.register(r'recurring-rules', RecurringRecordingRuleViewSet, basename='recurring-rule')
|
||||
|
||||
urlpatterns = [
|
||||
# Bulk delete is a single APIView, not a ViewSet
|
||||
path('streams/bulk-delete/', BulkDeleteStreamsAPIView.as_view(), name='bulk_delete_streams'),
|
||||
path('channels/bulk-delete/', BulkDeleteChannelsAPIView.as_view(), name='bulk_delete_channels'),
|
||||
path('logos/bulk-delete/', BulkDeleteLogosAPIView.as_view(), name='bulk_delete_logos'),
|
||||
path('logos/cleanup/', CleanupUnusedLogosAPIView.as_view(), name='cleanup_unused_logos'),
|
||||
path('channels/<int:channel_id>/streams/', GetChannelStreamsAPIView.as_view(), name='get_channel_streams'),
|
||||
path('profiles/<int:profile_id>/channels/<int:channel_id>/', UpdateChannelMembershipAPIView.as_view(), name='update_channel_membership'),
|
||||
path('profiles/<int:profile_id>/channels/bulk-update/', BulkUpdateChannelMembershipAPIView.as_view(), name='bulk_update_channel_membership'),
|
||||
# DVR series rules (order matters: specific routes before catch-all slug)
|
||||
path('series-rules/', SeriesRulesAPIView.as_view(), name='series_rules'),
|
||||
path('series-rules/evaluate/', EvaluateSeriesRulesAPIView.as_view(), name='evaluate_series_rules'),
|
||||
path('series-rules/bulk-remove/', BulkRemoveSeriesRecordingsAPIView.as_view(), name='bulk_remove_series_recordings'),
|
||||
path('series-rules/<str:tvg_id>/', DeleteSeriesRuleAPIView.as_view(), name='delete_series_rule'),
|
||||
path('recordings/bulk-delete-upcoming/', BulkDeleteUpcomingRecordingsAPIView.as_view(), name='bulk_delete_upcoming_recordings'),
|
||||
path('dvr/comskip-config/', ComskipConfigAPIView.as_view(), name='comskip_config'),
|
||||
]
|
||||
|
||||
urlpatterns += router.urls
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
18
apps/channels/migrations/0021_channel_user_level.py
Normal file
18
apps/channels/migrations/0021_channel_user_level.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.1.6 on 2025-05-18 14:31
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0020_alter_channel_channel_number'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='channel',
|
||||
name='user_level',
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
# Generated by Django 5.1.6 on 2025-07-13 23:08
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0021_channel_user_level'),
|
||||
('m3u', '0012_alter_m3uaccount_refresh_interval'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='channel',
|
||||
name='auto_created',
|
||||
field=models.BooleanField(default=False, help_text='Whether this channel was automatically created via M3U auto channel sync'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='channel',
|
||||
name='auto_created_by',
|
||||
field=models.ForeignKey(blank=True, help_text='The M3U account that auto-created this channel', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='auto_created_channels', to='m3u.m3uaccount'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='channelgroupm3uaccount',
|
||||
name='auto_channel_sync',
|
||||
field=models.BooleanField(default=False, help_text='Automatically create/delete channels to match streams in this group'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='channelgroupm3uaccount',
|
||||
name='auto_sync_channel_start',
|
||||
field=models.FloatField(blank=True, help_text='Starting channel number for auto-created channels in this group', null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
# Generated by Django 5.1.6 on 2025-07-29 02:39
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0022_channel_auto_created_channel_auto_created_by_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='stream',
|
||||
name='stream_stats',
|
||||
field=models.JSONField(blank=True, help_text='JSON object containing stream statistics like video codec, resolution, etc.', null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='stream',
|
||||
name='stream_stats_updated_at',
|
||||
field=models.DateTimeField(blank=True, db_index=True, help_text='When stream statistics were last updated', null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
# Generated by Django 5.2.4 on 2025-08-22 20:14
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0023_stream_stream_stats_stream_stream_stats_updated_at'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='channelgroupm3uaccount',
|
||||
name='channel_group',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='m3u_accounts', to='dispatcharr_channels.channelgroup'),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
# Generated by Django 5.2.4 on 2025-09-02 14:30
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0024_alter_channelgroupm3uaccount_channel_group'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='channelgroupm3uaccount',
|
||||
name='custom_properties',
|
||||
field=models.JSONField(blank=True, default=dict, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recording',
|
||||
name='custom_properties',
|
||||
field=models.JSONField(blank=True, default=dict, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='stream',
|
||||
name='custom_properties',
|
||||
field=models.JSONField(blank=True, default=dict, null=True),
|
||||
),
|
||||
]
|
||||
31
apps/channels/migrations/0026_recurringrecordingrule.py
Normal file
31
apps/channels/migrations/0026_recurringrecordingrule.py
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
# Generated by Django 5.0.14 on 2025-09-18 14:56
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0025_alter_channelgroupm3uaccount_custom_properties_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='RecurringRecordingRule',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('days_of_week', models.JSONField(default=list)),
|
||||
('start_time', models.TimeField()),
|
||||
('end_time', models.TimeField()),
|
||||
('enabled', models.BooleanField(default=True)),
|
||||
('name', models.CharField(blank=True, max_length=255)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
('channel', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='recurring_rules', to='dispatcharr_channels.channel')),
|
||||
],
|
||||
options={
|
||||
'ordering': ['channel', 'start_time'],
|
||||
},
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
# Generated by Django 5.2.4 on 2025-10-05 20:50
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0026_recurringrecordingrule'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='recurringrecordingrule',
|
||||
name='end_date',
|
||||
field=models.DateField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='recurringrecordingrule',
|
||||
name='start_date',
|
||||
field=models.DateField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
# Generated by Django 5.2.4 on 2025-10-06 22:55
|
||||
|
||||
import django.utils.timezone
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0027_recurringrecordingrule_end_date_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='channel',
|
||||
name='created_at',
|
||||
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now, help_text='Timestamp when this channel was created'),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='channel',
|
||||
name='updated_at',
|
||||
field=models.DateTimeField(auto_now=True, help_text='Timestamp when this channel was last updated'),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
# Generated migration to backfill stream_hash for existing custom streams
|
||||
|
||||
from django.db import migrations
|
||||
import hashlib
|
||||
|
||||
|
||||
def backfill_custom_stream_hashes(apps, schema_editor):
|
||||
"""
|
||||
Generate stream_hash for all custom streams that don't have one.
|
||||
Uses stream ID to create a stable hash that won't change when name/url is edited.
|
||||
"""
|
||||
Stream = apps.get_model('dispatcharr_channels', 'Stream')
|
||||
|
||||
custom_streams_without_hash = Stream.objects.filter(
|
||||
is_custom=True,
|
||||
stream_hash__isnull=True
|
||||
)
|
||||
|
||||
updated_count = 0
|
||||
for stream in custom_streams_without_hash:
|
||||
# Generate a stable hash using the stream's ID
|
||||
# This ensures the hash never changes even if name/url is edited
|
||||
unique_string = f"custom_stream_{stream.id}"
|
||||
stream.stream_hash = hashlib.sha256(unique_string.encode()).hexdigest()
|
||||
stream.save(update_fields=['stream_hash'])
|
||||
updated_count += 1
|
||||
|
||||
if updated_count > 0:
|
||||
print(f"Backfilled stream_hash for {updated_count} custom streams")
|
||||
else:
|
||||
print("No custom streams needed stream_hash backfill")
|
||||
|
||||
|
||||
def reverse_backfill(apps, schema_editor):
|
||||
"""
|
||||
Reverse migration - clear stream_hash for custom streams.
|
||||
Note: This will break preview functionality for custom streams.
|
||||
"""
|
||||
Stream = apps.get_model('dispatcharr_channels', 'Stream')
|
||||
|
||||
custom_streams = Stream.objects.filter(is_custom=True)
|
||||
count = custom_streams.update(stream_hash=None)
|
||||
print(f"Cleared stream_hash for {count} custom streams")
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0028_channel_created_at_channel_updated_at'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(backfill_custom_stream_hashes, reverse_backfill),
|
||||
]
|
||||
18
apps/channels/migrations/0030_alter_stream_url.py
Normal file
18
apps/channels/migrations/0030_alter_stream_url.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.2.4 on 2025-10-28 20:00
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dispatcharr_channels', '0029_backfill_custom_stream_hashes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='stream',
|
||||
name='url',
|
||||
field=models.URLField(blank=True, max_length=4096, null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -9,12 +9,14 @@ from datetime import datetime
|
|||
import hashlib
|
||||
import json
|
||||
from apps.epg.models import EPGData
|
||||
from apps.accounts.models import User
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# If you have an M3UAccount model in apps.m3u, you can still import it:
|
||||
from apps.m3u.models import M3UAccount
|
||||
|
||||
|
||||
# Add fallback functions if Redis isn't available
|
||||
def get_total_viewers(channel_id):
|
||||
"""Get viewer count from Redis or return 0 if Redis isn't available"""
|
||||
|
|
@ -25,6 +27,7 @@ def get_total_viewers(channel_id):
|
|||
except Exception:
|
||||
return 0
|
||||
|
||||
|
||||
class ChannelGroup(models.Model):
|
||||
name = models.TextField(unique=True, db_index=True)
|
||||
|
||||
|
|
@ -45,12 +48,14 @@ class ChannelGroup(models.Model):
|
|||
|
||||
return created_objects
|
||||
|
||||
|
||||
class Stream(models.Model):
|
||||
"""
|
||||
Represents a single stream (e.g. from an M3U source or custom URL).
|
||||
"""
|
||||
|
||||
name = models.CharField(max_length=255, default="Default Stream")
|
||||
url = models.URLField(max_length=2000, blank=True, null=True)
|
||||
url = models.URLField(max_length=4096, blank=True, null=True)
|
||||
m3u_account = models.ForeignKey(
|
||||
M3UAccount,
|
||||
on_delete=models.CASCADE,
|
||||
|
|
@ -60,7 +65,7 @@ class Stream(models.Model):
|
|||
)
|
||||
logo_url = models.TextField(blank=True, null=True)
|
||||
tvg_id = models.CharField(max_length=255, blank=True, null=True)
|
||||
local_file = models.FileField(upload_to='uploads/', blank=True, null=True)
|
||||
local_file = models.FileField(upload_to="uploads/", blank=True, null=True)
|
||||
current_viewers = models.PositiveIntegerField(default=0)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
channel_group = models.ForeignKey(
|
||||
|
|
@ -68,18 +73,18 @@ class Stream(models.Model):
|
|||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='streams'
|
||||
related_name="streams",
|
||||
)
|
||||
stream_profile = models.ForeignKey(
|
||||
StreamProfile,
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='streams'
|
||||
related_name="streams",
|
||||
)
|
||||
is_custom = models.BooleanField(
|
||||
default=False,
|
||||
help_text="Whether this is a user-created stream or from an M3U account"
|
||||
help_text="Whether this is a user-created stream or from an M3U account",
|
||||
)
|
||||
stream_hash = models.CharField(
|
||||
max_length=255,
|
||||
|
|
@ -89,30 +94,43 @@ class Stream(models.Model):
|
|||
db_index=True,
|
||||
)
|
||||
last_seen = models.DateTimeField(db_index=True, default=datetime.now)
|
||||
custom_properties = models.TextField(null=True, blank=True)
|
||||
custom_properties = models.JSONField(default=dict, blank=True, null=True)
|
||||
|
||||
# Stream statistics fields
|
||||
stream_stats = models.JSONField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="JSON object containing stream statistics like video codec, resolution, etc."
|
||||
)
|
||||
stream_stats_updated_at = models.DateTimeField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="When stream statistics were last updated",
|
||||
db_index=True
|
||||
)
|
||||
|
||||
class Meta:
|
||||
# If you use m3u_account, you might do unique_together = ('name','url','m3u_account')
|
||||
verbose_name = "Stream"
|
||||
verbose_name_plural = "Streams"
|
||||
ordering = ['-updated_at']
|
||||
ordering = ["-updated_at"]
|
||||
|
||||
def __str__(self):
|
||||
return self.name or self.url or f"Stream ID {self.id}"
|
||||
|
||||
@classmethod
|
||||
def generate_hash_key(cls, name, url, tvg_id, keys=None):
|
||||
def generate_hash_key(cls, name, url, tvg_id, keys=None, m3u_id=None):
|
||||
if keys is None:
|
||||
keys = CoreSettings.get_m3u_hash_key().split(",")
|
||||
|
||||
stream_parts = {
|
||||
"name": name, "url": url, "tvg_id": tvg_id
|
||||
}
|
||||
stream_parts = {"name": name, "url": url, "tvg_id": tvg_id, "m3u_id": m3u_id}
|
||||
|
||||
hash_parts = {key: stream_parts[key] for key in keys if key in stream_parts}
|
||||
|
||||
# Serialize and hash the dictionary
|
||||
serialized_obj = json.dumps(hash_parts, sort_keys=True) # sort_keys ensures consistent ordering
|
||||
serialized_obj = json.dumps(
|
||||
hash_parts, sort_keys=True
|
||||
) # sort_keys ensures consistent ordering
|
||||
hash_object = hashlib.sha256(serialized_obj.encode())
|
||||
return hash_object.hexdigest()
|
||||
|
||||
|
|
@ -128,13 +146,23 @@ class Stream(models.Model):
|
|||
return stream, False # False means it was updated, not created
|
||||
except cls.DoesNotExist:
|
||||
# If it doesn't exist, create a new object with the given hash
|
||||
fields_to_update['stream_hash'] = hash_value # Make sure the hash field is set
|
||||
fields_to_update["stream_hash"] = (
|
||||
hash_value # Make sure the hash field is set
|
||||
)
|
||||
stream = cls.objects.create(**fields_to_update)
|
||||
return stream, True # True means it was created
|
||||
|
||||
# @TODO: honor stream's stream profile
|
||||
def get_stream_profile(self):
|
||||
stream_profile = StreamProfile.objects.get(id=CoreSettings.get_default_stream_profile_id())
|
||||
"""
|
||||
Get the stream profile for this stream.
|
||||
Uses the stream's own profile if set, otherwise returns the default.
|
||||
"""
|
||||
if self.stream_profile:
|
||||
return self.stream_profile
|
||||
|
||||
stream_profile = StreamProfile.objects.get(
|
||||
id=CoreSettings.get_default_stream_profile_id()
|
||||
)
|
||||
|
||||
return stream_profile
|
||||
|
||||
|
|
@ -152,7 +180,9 @@ class Stream(models.Model):
|
|||
m3u_account = self.m3u_account
|
||||
m3u_profiles = m3u_account.profiles.all()
|
||||
default_profile = next((obj for obj in m3u_profiles if obj.is_default), None)
|
||||
profiles = [default_profile] + [obj for obj in m3u_profiles if not obj.is_default]
|
||||
profiles = [default_profile] + [
|
||||
obj for obj in m3u_profiles if not obj.is_default
|
||||
]
|
||||
|
||||
for profile in profiles:
|
||||
logger.info(profile)
|
||||
|
|
@ -167,13 +197,19 @@ class Stream(models.Model):
|
|||
if profile.max_streams == 0 or current_connections < profile.max_streams:
|
||||
# Start a new stream
|
||||
redis_client.set(f"channel_stream:{self.id}", self.id)
|
||||
redis_client.set(f"stream_profile:{self.id}", profile.id) # Store only the matched profile
|
||||
redis_client.set(
|
||||
f"stream_profile:{self.id}", profile.id
|
||||
) # Store only the matched profile
|
||||
|
||||
# Increment connection count for profiles with limits
|
||||
if profile.max_streams > 0:
|
||||
redis_client.incr(profile_connections_key)
|
||||
|
||||
return self.id, profile.id, None # Return newly assigned stream and matched profile
|
||||
return (
|
||||
self.id,
|
||||
profile.id,
|
||||
None,
|
||||
) # Return newly assigned stream and matched profile
|
||||
|
||||
# 4. No available streams
|
||||
return None, None, None
|
||||
|
|
@ -194,7 +230,9 @@ class Stream(models.Model):
|
|||
redis_client.delete(f"stream_profile:{stream_id}") # Remove profile association
|
||||
|
||||
profile_id = int(profile_id)
|
||||
logger.debug(f"Found profile ID {profile_id} associated with stream {stream_id}")
|
||||
logger.debug(
|
||||
f"Found profile ID {profile_id} associated with stream {stream_id}"
|
||||
)
|
||||
|
||||
profile_connections_key = f"profile_connections:{profile_id}"
|
||||
|
||||
|
|
@ -203,6 +241,7 @@ class Stream(models.Model):
|
|||
if current_count > 0:
|
||||
redis_client.decr(profile_connections_key)
|
||||
|
||||
|
||||
class ChannelManager(models.Manager):
|
||||
def active(self):
|
||||
return self.all()
|
||||
|
|
@ -212,38 +251,35 @@ class Channel(models.Model):
|
|||
channel_number = models.FloatField(db_index=True)
|
||||
name = models.CharField(max_length=255)
|
||||
logo = models.ForeignKey(
|
||||
'Logo',
|
||||
"Logo",
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='channels',
|
||||
related_name="channels",
|
||||
)
|
||||
|
||||
# M2M to Stream now in the same file
|
||||
streams = models.ManyToManyField(
|
||||
Stream,
|
||||
blank=True,
|
||||
through='ChannelStream',
|
||||
related_name='channels'
|
||||
Stream, blank=True, through="ChannelStream", related_name="channels"
|
||||
)
|
||||
|
||||
channel_group = models.ForeignKey(
|
||||
'ChannelGroup',
|
||||
"ChannelGroup",
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='channels',
|
||||
help_text="Channel group this channel belongs to."
|
||||
related_name="channels",
|
||||
help_text="Channel group this channel belongs to.",
|
||||
)
|
||||
tvg_id = models.CharField(max_length=255, blank=True, null=True)
|
||||
tvc_guide_stationid = models.CharField(max_length=255, blank=True, null=True)
|
||||
|
||||
|
||||
epg_data = models.ForeignKey(
|
||||
EPGData,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='channels'
|
||||
related_name="channels",
|
||||
)
|
||||
|
||||
stream_profile = models.ForeignKey(
|
||||
|
|
@ -251,16 +287,41 @@ class Channel(models.Model):
|
|||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='channels'
|
||||
related_name="channels",
|
||||
)
|
||||
|
||||
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True, db_index=True)
|
||||
uuid = models.UUIDField(
|
||||
default=uuid.uuid4, editable=False, unique=True, db_index=True
|
||||
)
|
||||
|
||||
user_level = models.IntegerField(default=0)
|
||||
|
||||
auto_created = models.BooleanField(
|
||||
default=False,
|
||||
help_text="Whether this channel was automatically created via M3U auto channel sync"
|
||||
)
|
||||
auto_created_by = models.ForeignKey(
|
||||
"m3u.M3UAccount",
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="auto_created_channels",
|
||||
help_text="The M3U account that auto-created this channel"
|
||||
)
|
||||
|
||||
created_at = models.DateTimeField(
|
||||
auto_now_add=True,
|
||||
help_text="Timestamp when this channel was created"
|
||||
)
|
||||
updated_at = models.DateTimeField(
|
||||
auto_now=True,
|
||||
help_text="Timestamp when this channel was last updated"
|
||||
)
|
||||
|
||||
def clean(self):
|
||||
# Enforce unique channel_number within a given group
|
||||
existing = Channel.objects.filter(
|
||||
channel_number=self.channel_number,
|
||||
channel_group=self.channel_group
|
||||
channel_number=self.channel_number, channel_group=self.channel_group
|
||||
).exclude(id=self.id)
|
||||
if existing.exists():
|
||||
raise ValidationError(
|
||||
|
|
@ -272,7 +333,7 @@ class Channel(models.Model):
|
|||
|
||||
@classmethod
|
||||
def get_next_available_channel_number(cls, starting_from=1):
|
||||
used_numbers = set(cls.objects.all().values_list('channel_number', flat=True))
|
||||
used_numbers = set(cls.objects.all().values_list("channel_number", flat=True))
|
||||
n = starting_from
|
||||
while n in used_numbers:
|
||||
n += 1
|
||||
|
|
@ -282,7 +343,9 @@ class Channel(models.Model):
|
|||
def get_stream_profile(self):
|
||||
stream_profile = self.stream_profile
|
||||
if not stream_profile:
|
||||
stream_profile = StreamProfile.objects.get(id=CoreSettings.get_default_stream_profile_id())
|
||||
stream_profile = StreamProfile.objects.get(
|
||||
id=CoreSettings.get_default_stream_profile_id()
|
||||
)
|
||||
|
||||
return stream_profile
|
||||
|
||||
|
|
@ -312,44 +375,55 @@ class Channel(models.Model):
|
|||
profile_id = int(profile_id_bytes)
|
||||
return stream_id, profile_id, None
|
||||
except (ValueError, TypeError):
|
||||
logger.debug(f"Invalid profile ID retrieved from Redis: {profile_id_bytes}")
|
||||
logger.debug(
|
||||
f"Invalid profile ID retrieved from Redis: {profile_id_bytes}"
|
||||
)
|
||||
except (ValueError, TypeError):
|
||||
logger.debug(f"Invalid stream ID retrieved from Redis: {stream_id_bytes}")
|
||||
logger.debug(
|
||||
f"Invalid stream ID retrieved from Redis: {stream_id_bytes}"
|
||||
)
|
||||
|
||||
# No existing active stream, attempt to assign a new one
|
||||
has_streams_but_maxed_out = False
|
||||
has_active_profiles = False
|
||||
|
||||
# Iterate through channel streams and their profiles
|
||||
for stream in self.streams.all().order_by('channelstream__order'):
|
||||
for stream in self.streams.all().order_by("channelstream__order"):
|
||||
# Retrieve the M3U account associated with the stream.
|
||||
m3u_account = stream.m3u_account
|
||||
if not m3u_account:
|
||||
logger.debug(f"Stream {stream.id} has no M3U account")
|
||||
continue
|
||||
|
||||
m3u_profiles = m3u_account.profiles.all()
|
||||
default_profile = next((obj for obj in m3u_profiles if obj.is_default), None)
|
||||
|
||||
if not default_profile:
|
||||
logger.debug(f"M3U account {m3u_account.id} has no default profile")
|
||||
if m3u_account.is_active == False:
|
||||
logger.debug(f"M3U account {m3u_account.id} is inactive, skipping.")
|
||||
continue
|
||||
|
||||
profiles = [default_profile] + [obj for obj in m3u_profiles if not obj.is_default]
|
||||
m3u_profiles = m3u_account.profiles.filter(is_active=True)
|
||||
default_profile = next(
|
||||
(obj for obj in m3u_profiles if obj.is_default), None
|
||||
)
|
||||
|
||||
if not default_profile:
|
||||
logger.debug(f"M3U account {m3u_account.id} has no active default profile")
|
||||
continue
|
||||
|
||||
profiles = [default_profile] + [
|
||||
obj for obj in m3u_profiles if not obj.is_default
|
||||
]
|
||||
|
||||
for profile in profiles:
|
||||
# Skip inactive profiles
|
||||
if not profile.is_active:
|
||||
logger.debug(f"Skipping inactive profile {profile.id}")
|
||||
continue
|
||||
|
||||
has_active_profiles = True
|
||||
|
||||
profile_connections_key = f"profile_connections:{profile.id}"
|
||||
current_connections = int(redis_client.get(profile_connections_key) or 0)
|
||||
current_connections = int(
|
||||
redis_client.get(profile_connections_key) or 0
|
||||
)
|
||||
|
||||
# Check if profile has available slots (or unlimited connections)
|
||||
if profile.max_streams == 0 or current_connections < profile.max_streams:
|
||||
if (
|
||||
profile.max_streams == 0
|
||||
or current_connections < profile.max_streams
|
||||
):
|
||||
# Start a new stream
|
||||
redis_client.set(f"channel_stream:{self.id}", stream.id)
|
||||
redis_client.set(f"stream_profile:{stream.id}", profile.id)
|
||||
|
|
@ -358,17 +432,23 @@ class Channel(models.Model):
|
|||
if profile.max_streams > 0:
|
||||
redis_client.incr(profile_connections_key)
|
||||
|
||||
return stream.id, profile.id, None # Return newly assigned stream and matched profile
|
||||
return (
|
||||
stream.id,
|
||||
profile.id,
|
||||
None,
|
||||
) # Return newly assigned stream and matched profile
|
||||
else:
|
||||
# This profile is at max connections
|
||||
has_streams_but_maxed_out = True
|
||||
logger.debug(f"Profile {profile.id} at max connections: {current_connections}/{profile.max_streams}")
|
||||
logger.debug(
|
||||
f"Profile {profile.id} at max connections: {current_connections}/{profile.max_streams}"
|
||||
)
|
||||
|
||||
# No available streams - determine specific reason
|
||||
if has_streams_but_maxed_out:
|
||||
error_reason = "All M3U profiles have reached maximum connection limits"
|
||||
error_reason = "All active M3U profiles have reached maximum connection limits"
|
||||
elif has_active_profiles:
|
||||
error_reason = "No compatible profile found for any assigned stream"
|
||||
error_reason = "No compatible active profile found for any assigned stream"
|
||||
else:
|
||||
error_reason = "No active profiles found for any assigned stream"
|
||||
|
||||
|
|
@ -388,7 +468,9 @@ class Channel(models.Model):
|
|||
redis_client.delete(f"channel_stream:{self.id}") # Remove active stream
|
||||
|
||||
stream_id = int(stream_id)
|
||||
logger.debug(f"Found stream ID {stream_id} associated with channel stream {self.id}")
|
||||
logger.debug(
|
||||
f"Found stream ID {stream_id} associated with channel stream {self.id}"
|
||||
)
|
||||
|
||||
# Get the matched profile for cleanup
|
||||
profile_id = redis_client.get(f"stream_profile:{stream_id}")
|
||||
|
|
@ -399,7 +481,9 @@ class Channel(models.Model):
|
|||
redis_client.delete(f"stream_profile:{stream_id}") # Remove profile association
|
||||
|
||||
profile_id = int(profile_id)
|
||||
logger.debug(f"Found profile ID {profile_id} associated with stream {stream_id}")
|
||||
logger.debug(
|
||||
f"Found profile ID {profile_id} associated with stream {stream_id}"
|
||||
)
|
||||
|
||||
profile_connections_key = f"profile_connections:{profile_id}"
|
||||
|
||||
|
|
@ -452,20 +536,26 @@ class Channel(models.Model):
|
|||
# Increment connection count for new profile
|
||||
new_profile_connections_key = f"profile_connections:{new_profile_id}"
|
||||
redis_client.incr(new_profile_connections_key)
|
||||
logger.info(f"Updated stream {stream_id} profile from {current_profile_id} to {new_profile_id}")
|
||||
logger.info(
|
||||
f"Updated stream {stream_id} profile from {current_profile_id} to {new_profile_id}"
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
class ChannelProfile(models.Model):
|
||||
name = models.CharField(max_length=100, unique=True)
|
||||
|
||||
|
||||
class ChannelProfileMembership(models.Model):
|
||||
channel_profile = models.ForeignKey(ChannelProfile, on_delete=models.CASCADE)
|
||||
channel = models.ForeignKey(Channel, on_delete=models.CASCADE)
|
||||
enabled = models.BooleanField(default=True) # Track if the channel is enabled for this group
|
||||
enabled = models.BooleanField(
|
||||
default=True
|
||||
) # Track if the channel is enabled for this group
|
||||
|
||||
class Meta:
|
||||
unique_together = ('channel_profile', 'channel')
|
||||
unique_together = ("channel_profile", "channel")
|
||||
|
||||
|
||||
class ChannelStream(models.Model):
|
||||
channel = models.ForeignKey(Channel, on_delete=models.CASCADE)
|
||||
|
|
@ -473,27 +563,35 @@ class ChannelStream(models.Model):
|
|||
order = models.PositiveIntegerField(default=0) # Ordering field
|
||||
|
||||
class Meta:
|
||||
ordering = ['order'] # Ensure streams are retrieved in order
|
||||
ordering = ["order"] # Ensure streams are retrieved in order
|
||||
constraints = [
|
||||
models.UniqueConstraint(fields=['channel', 'stream'], name='unique_channel_stream')
|
||||
models.UniqueConstraint(
|
||||
fields=["channel", "stream"], name="unique_channel_stream"
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
class ChannelGroupM3UAccount(models.Model):
|
||||
channel_group = models.ForeignKey(
|
||||
ChannelGroup,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='m3u_account'
|
||||
ChannelGroup, on_delete=models.CASCADE, related_name="m3u_accounts"
|
||||
)
|
||||
m3u_account = models.ForeignKey(
|
||||
M3UAccount,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='channel_group'
|
||||
M3UAccount, on_delete=models.CASCADE, related_name="channel_group"
|
||||
)
|
||||
custom_properties = models.TextField(null=True, blank=True)
|
||||
custom_properties = models.JSONField(default=dict, blank=True, null=True)
|
||||
enabled = models.BooleanField(default=True)
|
||||
auto_channel_sync = models.BooleanField(
|
||||
default=False,
|
||||
help_text='Automatically create/delete channels to match streams in this group'
|
||||
)
|
||||
auto_sync_channel_start = models.FloatField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text='Starting channel number for auto-created channels in this group'
|
||||
)
|
||||
|
||||
class Meta:
|
||||
unique_together = ('channel_group', 'm3u_account')
|
||||
unique_together = ("channel_group", "m3u_account")
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.channel_group.name} - {self.m3u_account.name} (Enabled: {self.enabled})"
|
||||
|
|
@ -506,12 +604,47 @@ class Logo(models.Model):
|
|||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
|
||||
class Recording(models.Model):
|
||||
channel = models.ForeignKey("Channel", on_delete=models.CASCADE, related_name="recordings")
|
||||
channel = models.ForeignKey(
|
||||
"Channel", on_delete=models.CASCADE, related_name="recordings"
|
||||
)
|
||||
start_time = models.DateTimeField()
|
||||
end_time = models.DateTimeField()
|
||||
task_id = models.CharField(max_length=255, null=True, blank=True)
|
||||
custom_properties = models.TextField(null=True, blank=True)
|
||||
custom_properties = models.JSONField(default=dict, blank=True, null=True)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.channel.name} - {self.start_time} to {self.end_time}"
|
||||
|
||||
|
||||
class RecurringRecordingRule(models.Model):
|
||||
"""Rule describing a recurring manual DVR schedule."""
|
||||
|
||||
channel = models.ForeignKey(
|
||||
"Channel",
|
||||
on_delete=models.CASCADE,
|
||||
related_name="recurring_rules",
|
||||
)
|
||||
days_of_week = models.JSONField(default=list)
|
||||
start_time = models.TimeField()
|
||||
end_time = models.TimeField()
|
||||
enabled = models.BooleanField(default=True)
|
||||
name = models.CharField(max_length=255, blank=True)
|
||||
start_date = models.DateField(null=True, blank=True)
|
||||
end_date = models.DateField(null=True, blank=True)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ["channel", "start_time"]
|
||||
|
||||
def __str__(self):
|
||||
channel_name = getattr(self.channel, "name", str(self.channel_id))
|
||||
return f"Recurring rule for {channel_name}"
|
||||
|
||||
def cleaned_days(self):
|
||||
try:
|
||||
return sorted({int(d) for d in (self.days_of_week or []) if 0 <= int(d) <= 6})
|
||||
except Exception:
|
||||
return []
|
||||
|
|
|
|||
|
|
@ -1,108 +1,225 @@
|
|||
import json
|
||||
from datetime import datetime
|
||||
|
||||
from rest_framework import serializers
|
||||
from .models import Stream, Channel, ChannelGroup, ChannelStream, ChannelGroupM3UAccount, Logo, ChannelProfile, ChannelProfileMembership, Recording
|
||||
from .models import (
|
||||
Stream,
|
||||
Channel,
|
||||
ChannelGroup,
|
||||
ChannelStream,
|
||||
ChannelGroupM3UAccount,
|
||||
Logo,
|
||||
ChannelProfile,
|
||||
ChannelProfileMembership,
|
||||
Recording,
|
||||
RecurringRecordingRule,
|
||||
)
|
||||
from apps.epg.serializers import EPGDataSerializer
|
||||
from core.models import StreamProfile
|
||||
from apps.epg.models import EPGData
|
||||
from django.urls import reverse
|
||||
from rest_framework import serializers
|
||||
from django.utils import timezone
|
||||
from core.utils import validate_flexible_url
|
||||
|
||||
|
||||
class LogoSerializer(serializers.ModelSerializer):
|
||||
cache_url = serializers.SerializerMethodField()
|
||||
channel_count = serializers.SerializerMethodField()
|
||||
is_used = serializers.SerializerMethodField()
|
||||
channel_names = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = Logo
|
||||
fields = ['id', 'name', 'url', 'cache_url']
|
||||
fields = ["id", "name", "url", "cache_url", "channel_count", "is_used", "channel_names"]
|
||||
|
||||
def validate_url(self, value):
|
||||
"""Validate that the URL is unique for creation or update"""
|
||||
if self.instance and self.instance.url == value:
|
||||
return value
|
||||
|
||||
if Logo.objects.filter(url=value).exists():
|
||||
raise serializers.ValidationError("A logo with this URL already exists.")
|
||||
|
||||
return value
|
||||
|
||||
def create(self, validated_data):
|
||||
"""Handle logo creation with proper URL validation"""
|
||||
return Logo.objects.create(**validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
"""Handle logo updates"""
|
||||
for attr, value in validated_data.items():
|
||||
setattr(instance, attr, value)
|
||||
instance.save()
|
||||
return instance
|
||||
|
||||
def get_cache_url(self, obj):
|
||||
# return f"/api/channels/logos/{obj.id}/cache/"
|
||||
request = self.context.get('request')
|
||||
request = self.context.get("request")
|
||||
if request:
|
||||
return request.build_absolute_uri(reverse('api:channels:logo-cache', args=[obj.id]))
|
||||
return reverse('api:channels:logo-cache', args=[obj.id])
|
||||
return request.build_absolute_uri(
|
||||
reverse("api:channels:logo-cache", args=[obj.id])
|
||||
)
|
||||
return reverse("api:channels:logo-cache", args=[obj.id])
|
||||
|
||||
def get_channel_count(self, obj):
|
||||
"""Get the number of channels using this logo"""
|
||||
return obj.channels.count()
|
||||
|
||||
def get_is_used(self, obj):
|
||||
"""Check if this logo is used by any channels"""
|
||||
return obj.channels.exists()
|
||||
|
||||
def get_channel_names(self, obj):
|
||||
"""Get the names of channels using this logo (limited to first 5)"""
|
||||
names = []
|
||||
|
||||
# Get channel names
|
||||
channels = obj.channels.all()[:5]
|
||||
for channel in channels:
|
||||
names.append(f"Channel: {channel.name}")
|
||||
|
||||
# Calculate total count for "more" message
|
||||
total_count = self.get_channel_count(obj)
|
||||
if total_count > 5:
|
||||
names.append(f"...and {total_count - 5} more")
|
||||
|
||||
return names
|
||||
|
||||
|
||||
#
|
||||
# Stream
|
||||
#
|
||||
class StreamSerializer(serializers.ModelSerializer):
|
||||
url = serializers.CharField(
|
||||
required=False,
|
||||
allow_blank=True,
|
||||
allow_null=True,
|
||||
validators=[validate_flexible_url]
|
||||
)
|
||||
stream_profile_id = serializers.PrimaryKeyRelatedField(
|
||||
queryset=StreamProfile.objects.all(),
|
||||
source='stream_profile',
|
||||
source="stream_profile",
|
||||
allow_null=True,
|
||||
required=False
|
||||
required=False,
|
||||
)
|
||||
read_only_fields = ['is_custom', 'm3u_account', 'stream_hash']
|
||||
read_only_fields = ["is_custom", "m3u_account", "stream_hash"]
|
||||
|
||||
class Meta:
|
||||
model = Stream
|
||||
fields = [
|
||||
'id',
|
||||
'name',
|
||||
'url',
|
||||
'm3u_account', # Uncomment if using M3U fields
|
||||
'logo_url',
|
||||
'tvg_id',
|
||||
'local_file',
|
||||
'current_viewers',
|
||||
'updated_at',
|
||||
'last_seen',
|
||||
'stream_profile_id',
|
||||
'is_custom',
|
||||
'channel_group',
|
||||
'stream_hash',
|
||||
"id",
|
||||
"name",
|
||||
"url",
|
||||
"m3u_account", # Uncomment if using M3U fields
|
||||
"logo_url",
|
||||
"tvg_id",
|
||||
"local_file",
|
||||
"current_viewers",
|
||||
"updated_at",
|
||||
"last_seen",
|
||||
"stream_profile_id",
|
||||
"is_custom",
|
||||
"channel_group",
|
||||
"stream_hash",
|
||||
"stream_stats",
|
||||
"stream_stats_updated_at",
|
||||
]
|
||||
|
||||
def get_fields(self):
|
||||
fields = super().get_fields()
|
||||
|
||||
# Unable to edit specific properties if this stream was created from an M3U account
|
||||
if self.instance and getattr(self.instance, 'm3u_account', None) and not self.instance.is_custom:
|
||||
fields['id'].read_only = True
|
||||
fields['name'].read_only = True
|
||||
fields['url'].read_only = True
|
||||
fields['m3u_account'].read_only = True
|
||||
fields['tvg_id'].read_only = True
|
||||
fields['channel_group'].read_only = True
|
||||
|
||||
if (
|
||||
self.instance
|
||||
and getattr(self.instance, "m3u_account", None)
|
||||
and not self.instance.is_custom
|
||||
):
|
||||
fields["id"].read_only = True
|
||||
fields["name"].read_only = True
|
||||
fields["url"].read_only = True
|
||||
fields["m3u_account"].read_only = True
|
||||
fields["tvg_id"].read_only = True
|
||||
fields["channel_group"].read_only = True
|
||||
|
||||
return fields
|
||||
|
||||
|
||||
class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer):
|
||||
m3u_accounts = serializers.IntegerField(source="m3u_accounts.id", read_only=True)
|
||||
enabled = serializers.BooleanField()
|
||||
auto_channel_sync = serializers.BooleanField(default=False)
|
||||
auto_sync_channel_start = serializers.FloatField(allow_null=True, required=False)
|
||||
custom_properties = serializers.JSONField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = ChannelGroupM3UAccount
|
||||
fields = ["m3u_accounts", "channel_group", "enabled", "auto_channel_sync", "auto_sync_channel_start", "custom_properties"]
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
|
||||
custom_props = instance.custom_properties or {}
|
||||
|
||||
return data
|
||||
|
||||
def to_internal_value(self, data):
|
||||
# Accept both dict and JSON string for custom_properties (for backward compatibility)
|
||||
val = data.get("custom_properties")
|
||||
if isinstance(val, str):
|
||||
try:
|
||||
data["custom_properties"] = json.loads(val)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return super().to_internal_value(data)
|
||||
|
||||
#
|
||||
# Channel Group
|
||||
#
|
||||
class ChannelGroupSerializer(serializers.ModelSerializer):
|
||||
channel_count = serializers.IntegerField(read_only=True)
|
||||
m3u_account_count = serializers.IntegerField(read_only=True)
|
||||
m3u_accounts = ChannelGroupM3UAccountSerializer(
|
||||
many=True,
|
||||
read_only=True
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = ChannelGroup
|
||||
fields = ['id', 'name']
|
||||
fields = ["id", "name", "channel_count", "m3u_account_count", "m3u_accounts"]
|
||||
|
||||
|
||||
class ChannelProfileSerializer(serializers.ModelSerializer):
|
||||
channels = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = ChannelProfile
|
||||
fields = ['id', 'name', 'channels']
|
||||
fields = ["id", "name", "channels"]
|
||||
|
||||
def get_channels(self, obj):
|
||||
memberships = ChannelProfileMembership.objects.filter(channel_profile=obj, enabled=True)
|
||||
return [
|
||||
membership.channel.id
|
||||
for membership in memberships
|
||||
]
|
||||
memberships = ChannelProfileMembership.objects.filter(
|
||||
channel_profile=obj, enabled=True
|
||||
)
|
||||
return [membership.channel.id for membership in memberships]
|
||||
|
||||
|
||||
class ChannelProfileMembershipSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = ChannelProfileMembership
|
||||
fields = ['channel', 'enabled']
|
||||
fields = ["channel", "enabled"]
|
||||
|
||||
|
||||
class ChanneProfilelMembershipUpdateSerializer(serializers.Serializer):
|
||||
channel_id = serializers.IntegerField() # Ensure channel_id is an integer
|
||||
enabled = serializers.BooleanField()
|
||||
|
||||
|
||||
class BulkChannelProfileMembershipSerializer(serializers.Serializer):
|
||||
channels = serializers.ListField(
|
||||
child=ChanneProfilelMembershipUpdateSerializer(), # Use the nested serializer
|
||||
allow_empty=False
|
||||
allow_empty=False,
|
||||
)
|
||||
|
||||
def validate_channels(self, value):
|
||||
|
|
@ -110,6 +227,7 @@ class BulkChannelProfileMembershipSerializer(serializers.Serializer):
|
|||
raise serializers.ValidationError("At least one channel must be provided.")
|
||||
return value
|
||||
|
||||
|
||||
#
|
||||
# Channel
|
||||
#
|
||||
|
|
@ -119,14 +237,10 @@ class ChannelSerializer(serializers.ModelSerializer):
|
|||
channel_number = serializers.FloatField(
|
||||
allow_null=True,
|
||||
required=False,
|
||||
error_messages={
|
||||
'invalid': 'Channel number must be a valid decimal number.'
|
||||
}
|
||||
error_messages={"invalid": "Channel number must be a valid decimal number."},
|
||||
)
|
||||
channel_group_id = serializers.PrimaryKeyRelatedField(
|
||||
queryset=ChannelGroup.objects.all(),
|
||||
source="channel_group",
|
||||
required=False
|
||||
queryset=ChannelGroup.objects.all(), source="channel_group", required=False
|
||||
)
|
||||
epg_data_id = serializers.PrimaryKeyRelatedField(
|
||||
queryset=EPGData.objects.all(),
|
||||
|
|
@ -137,41 +251,49 @@ class ChannelSerializer(serializers.ModelSerializer):
|
|||
|
||||
stream_profile_id = serializers.PrimaryKeyRelatedField(
|
||||
queryset=StreamProfile.objects.all(),
|
||||
source='stream_profile',
|
||||
allow_null=True,
|
||||
required=False
|
||||
)
|
||||
|
||||
streams = serializers.PrimaryKeyRelatedField(queryset=Stream.objects.all(), many=True, required=False)
|
||||
|
||||
logo_id = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Logo.objects.all(),
|
||||
source='logo',
|
||||
source="stream_profile",
|
||||
allow_null=True,
|
||||
required=False,
|
||||
)
|
||||
|
||||
streams = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Stream.objects.all(), many=True, required=False
|
||||
)
|
||||
|
||||
logo_id = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Logo.objects.all(),
|
||||
source="logo",
|
||||
allow_null=True,
|
||||
required=False,
|
||||
)
|
||||
|
||||
auto_created_by_name = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = Channel
|
||||
fields = [
|
||||
'id',
|
||||
'channel_number',
|
||||
'name',
|
||||
'channel_group_id',
|
||||
'tvg_id',
|
||||
'tvc_guide_stationid',
|
||||
'epg_data_id',
|
||||
'streams',
|
||||
'stream_profile_id',
|
||||
'uuid',
|
||||
'logo_id',
|
||||
"id",
|
||||
"channel_number",
|
||||
"name",
|
||||
"channel_group_id",
|
||||
"tvg_id",
|
||||
"tvc_guide_stationid",
|
||||
"epg_data_id",
|
||||
"streams",
|
||||
"stream_profile_id",
|
||||
"uuid",
|
||||
"logo_id",
|
||||
"user_level",
|
||||
"auto_created",
|
||||
"auto_created_by",
|
||||
"auto_created_by_name",
|
||||
]
|
||||
|
||||
def to_representation(self, instance):
|
||||
include_streams = self.context.get('include_streams', False)
|
||||
include_streams = self.context.get("include_streams", False)
|
||||
|
||||
if include_streams:
|
||||
self.fields['streams'] = serializers.SerializerMethodField()
|
||||
self.fields["streams"] = serializers.SerializerMethodField()
|
||||
|
||||
return super().to_representation(instance)
|
||||
|
||||
|
|
@ -180,22 +302,28 @@ class ChannelSerializer(serializers.ModelSerializer):
|
|||
|
||||
def get_streams(self, obj):
|
||||
"""Retrieve ordered stream IDs for GET requests."""
|
||||
return StreamSerializer(obj.streams.all().order_by('channelstream__order'), many=True).data
|
||||
return StreamSerializer(
|
||||
obj.streams.all().order_by("channelstream__order"), many=True
|
||||
).data
|
||||
|
||||
def create(self, validated_data):
|
||||
streams = validated_data.pop('streams', [])
|
||||
channel_number = validated_data.pop('channel_number', Channel.get_next_available_channel_number())
|
||||
streams = validated_data.pop("streams", [])
|
||||
channel_number = validated_data.pop(
|
||||
"channel_number", Channel.get_next_available_channel_number()
|
||||
)
|
||||
validated_data["channel_number"] = channel_number
|
||||
channel = Channel.objects.create(**validated_data)
|
||||
|
||||
# Add streams in the specified order
|
||||
for index, stream in enumerate(streams):
|
||||
ChannelStream.objects.create(channel=channel, stream_id=stream.id, order=index)
|
||||
ChannelStream.objects.create(
|
||||
channel=channel, stream_id=stream.id, order=index
|
||||
)
|
||||
|
||||
return channel
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
streams = validated_data.pop('streams', None)
|
||||
streams = validated_data.pop("streams", None)
|
||||
|
||||
# Update standard fields
|
||||
for attr, value in validated_data.items():
|
||||
|
|
@ -206,8 +334,7 @@ class ChannelSerializer(serializers.ModelSerializer):
|
|||
if streams is not None:
|
||||
# Normalize stream IDs
|
||||
normalized_ids = [
|
||||
stream.id if hasattr(stream, "id") else stream
|
||||
for stream in streams
|
||||
stream.id if hasattr(stream, "id") else stream for stream in streams
|
||||
]
|
||||
print(normalized_ids)
|
||||
|
||||
|
|
@ -234,9 +361,7 @@ class ChannelSerializer(serializers.ModelSerializer):
|
|||
cs.save(update_fields=["order"])
|
||||
else:
|
||||
ChannelStream.objects.create(
|
||||
channel=instance,
|
||||
stream_id=stream_id,
|
||||
order=order
|
||||
channel=instance, stream_id=stream_id, order=order
|
||||
)
|
||||
|
||||
return instance
|
||||
|
|
@ -250,34 +375,71 @@ class ChannelSerializer(serializers.ModelSerializer):
|
|||
# Ensure it's processed as a float
|
||||
return float(value)
|
||||
except (ValueError, TypeError):
|
||||
raise serializers.ValidationError("Channel number must be a valid decimal number.")
|
||||
raise serializers.ValidationError(
|
||||
"Channel number must be a valid decimal number."
|
||||
)
|
||||
|
||||
def validate_stream_profile(self, value):
|
||||
"""Handle special case where empty/0 values mean 'use default' (null)"""
|
||||
if value == '0' or value == 0 or value == '' or value is None:
|
||||
if value == "0" or value == 0 or value == "" or value is None:
|
||||
return None
|
||||
return value # PrimaryKeyRelatedField will handle the conversion to object
|
||||
|
||||
class ChannelGroupM3UAccountSerializer(serializers.ModelSerializer):
|
||||
enabled = serializers.BooleanField()
|
||||
|
||||
class Meta:
|
||||
model = ChannelGroupM3UAccount
|
||||
fields = ['id', 'channel_group', 'enabled']
|
||||
|
||||
# Optionally, if you only need the id of the ChannelGroup, you can customize it like this:
|
||||
# channel_group = serializers.PrimaryKeyRelatedField(queryset=ChannelGroup.objects.all())
|
||||
def get_auto_created_by_name(self, obj):
|
||||
"""Get the name of the M3U account that auto-created this channel."""
|
||||
if obj.auto_created_by:
|
||||
return obj.auto_created_by.name
|
||||
return None
|
||||
|
||||
|
||||
class RecordingSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Recording
|
||||
fields = '__all__'
|
||||
read_only_fields = ['task_id']
|
||||
fields = "__all__"
|
||||
read_only_fields = ["task_id"]
|
||||
|
||||
def validate(self, data):
|
||||
start_time = data.get('start_time')
|
||||
end_time = data.get('end_time')
|
||||
from core.models import CoreSettings
|
||||
start_time = data.get("start_time")
|
||||
end_time = data.get("end_time")
|
||||
|
||||
if start_time and timezone.is_naive(start_time):
|
||||
start_time = timezone.make_aware(start_time, timezone.get_current_timezone())
|
||||
data["start_time"] = start_time
|
||||
if end_time and timezone.is_naive(end_time):
|
||||
end_time = timezone.make_aware(end_time, timezone.get_current_timezone())
|
||||
data["end_time"] = end_time
|
||||
|
||||
# If this is an EPG-based recording (program provided), apply global pre/post offsets
|
||||
try:
|
||||
cp = data.get("custom_properties") or {}
|
||||
is_epg_based = isinstance(cp, dict) and isinstance(cp.get("program"), (dict,))
|
||||
except Exception:
|
||||
is_epg_based = False
|
||||
|
||||
if is_epg_based and start_time and end_time:
|
||||
try:
|
||||
pre_min = int(CoreSettings.get_dvr_pre_offset_minutes())
|
||||
except Exception:
|
||||
pre_min = 0
|
||||
try:
|
||||
post_min = int(CoreSettings.get_dvr_post_offset_minutes())
|
||||
except Exception:
|
||||
post_min = 0
|
||||
from datetime import timedelta
|
||||
try:
|
||||
if pre_min and pre_min > 0:
|
||||
start_time = start_time - timedelta(minutes=pre_min)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
if post_min and post_min > 0:
|
||||
end_time = end_time + timedelta(minutes=post_min)
|
||||
except Exception:
|
||||
pass
|
||||
# write back adjusted times so scheduling uses them
|
||||
data["start_time"] = start_time
|
||||
data["end_time"] = end_time
|
||||
|
||||
now = timezone.now() # timezone-aware current time
|
||||
|
||||
|
|
@ -286,8 +448,61 @@ class RecordingSerializer(serializers.ModelSerializer):
|
|||
|
||||
if start_time < now:
|
||||
# Optional: Adjust start_time if it's in the past but end_time is in the future
|
||||
data['start_time'] = now # or: timezone.now() + timedelta(seconds=1)
|
||||
if end_time <= data['start_time']:
|
||||
data["start_time"] = now # or: timezone.now() + timedelta(seconds=1)
|
||||
if end_time <= data["start_time"]:
|
||||
raise serializers.ValidationError("End time must be after start time.")
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class RecurringRecordingRuleSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = RecurringRecordingRule
|
||||
fields = "__all__"
|
||||
read_only_fields = ["created_at", "updated_at"]
|
||||
|
||||
def validate_days_of_week(self, value):
|
||||
if not value:
|
||||
raise serializers.ValidationError("Select at least one day of the week")
|
||||
cleaned = []
|
||||
for entry in value:
|
||||
try:
|
||||
iv = int(entry)
|
||||
except (TypeError, ValueError):
|
||||
raise serializers.ValidationError("Days of week must be integers 0-6")
|
||||
if iv < 0 or iv > 6:
|
||||
raise serializers.ValidationError("Days of week must be between 0 (Monday) and 6 (Sunday)")
|
||||
cleaned.append(iv)
|
||||
return sorted(set(cleaned))
|
||||
|
||||
def validate(self, attrs):
|
||||
start = attrs.get("start_time") or getattr(self.instance, "start_time", None)
|
||||
end = attrs.get("end_time") or getattr(self.instance, "end_time", None)
|
||||
start_date = attrs.get("start_date") if "start_date" in attrs else getattr(self.instance, "start_date", None)
|
||||
end_date = attrs.get("end_date") if "end_date" in attrs else getattr(self.instance, "end_date", None)
|
||||
if start_date is None:
|
||||
existing_start = getattr(self.instance, "start_date", None)
|
||||
if existing_start is None:
|
||||
raise serializers.ValidationError("Start date is required")
|
||||
if start_date and end_date and end_date < start_date:
|
||||
raise serializers.ValidationError("End date must be on or after start date")
|
||||
if end_date is None:
|
||||
existing_end = getattr(self.instance, "end_date", None)
|
||||
if existing_end is None:
|
||||
raise serializers.ValidationError("End date is required")
|
||||
if start and end and start_date and end_date:
|
||||
start_dt = datetime.combine(start_date, start)
|
||||
end_dt = datetime.combine(end_date, end)
|
||||
if end_dt <= start_dt:
|
||||
raise serializers.ValidationError("End datetime must be after start datetime")
|
||||
elif start and end and end == start:
|
||||
raise serializers.ValidationError("End time must be different from start time")
|
||||
# Normalize empty strings to None for dates
|
||||
if attrs.get("end_date") == "":
|
||||
attrs["end_date"] = None
|
||||
if attrs.get("start_date") == "":
|
||||
attrs["start_date"] = None
|
||||
return super().validate(attrs)
|
||||
|
||||
def create(self, validated_data):
|
||||
return super().create(validated_data)
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from .models import Channel, Stream, ChannelProfile, ChannelProfileMembership, R
|
|||
from apps.m3u.models import M3UAccount
|
||||
from apps.epg.tasks import parse_programs_for_tvg_id
|
||||
import logging, requests, time
|
||||
from .tasks import run_recording
|
||||
from .tasks import run_recording, prefetch_recording_artwork
|
||||
from django.utils.timezone import now, is_aware, make_aware
|
||||
from datetime import timedelta
|
||||
|
||||
|
|
@ -45,6 +45,20 @@ def set_default_m3u_account(sender, instance, **kwargs):
|
|||
else:
|
||||
raise ValueError("No default M3UAccount found.")
|
||||
|
||||
@receiver(post_save, sender=Stream)
|
||||
def generate_custom_stream_hash(sender, instance, created, **kwargs):
|
||||
"""
|
||||
Generate a stable stream_hash for custom streams after creation.
|
||||
Uses the stream's ID to ensure the hash never changes even if name/url is edited.
|
||||
"""
|
||||
if instance.is_custom and not instance.stream_hash and created:
|
||||
import hashlib
|
||||
# Use stream ID for a stable, unique hash that never changes
|
||||
unique_string = f"custom_stream_{instance.id}"
|
||||
instance.stream_hash = hashlib.sha256(unique_string.encode()).hexdigest()
|
||||
# Use update to avoid triggering signals again
|
||||
Stream.objects.filter(id=instance.id).update(stream_hash=instance.stream_hash)
|
||||
|
||||
@receiver(post_save, sender=Channel)
|
||||
def refresh_epg_programs(sender, instance, created, **kwargs):
|
||||
"""
|
||||
|
|
@ -62,15 +76,6 @@ def refresh_epg_programs(sender, instance, created, **kwargs):
|
|||
logger.info(f"New channel {instance.id} ({instance.name}) created with EPG data, refreshing program data")
|
||||
parse_programs_for_tvg_id.delay(instance.epg_data.id)
|
||||
|
||||
@receiver(post_save, sender=Channel)
|
||||
def add_new_channel_to_groups(sender, instance, created, **kwargs):
|
||||
if created:
|
||||
profiles = ChannelProfile.objects.all()
|
||||
ChannelProfileMembership.objects.bulk_create([
|
||||
ChannelProfileMembership(channel_profile=profile, channel=instance)
|
||||
for profile in profiles
|
||||
])
|
||||
|
||||
@receiver(post_save, sender=ChannelProfile)
|
||||
def create_profile_memberships(sender, instance, created, **kwargs):
|
||||
if created:
|
||||
|
|
@ -82,8 +87,9 @@ def create_profile_memberships(sender, instance, created, **kwargs):
|
|||
|
||||
def schedule_recording_task(instance):
|
||||
eta = instance.start_time
|
||||
# Pass recording_id first so task can persist metadata to the correct row
|
||||
task = run_recording.apply_async(
|
||||
args=[instance.channel_id, str(instance.start_time), str(instance.end_time)],
|
||||
args=[instance.id, instance.channel_id, str(instance.start_time), str(instance.end_time)],
|
||||
eta=eta
|
||||
)
|
||||
return task.id
|
||||
|
|
@ -132,6 +138,11 @@ def schedule_task_on_save(sender, instance, created, **kwargs):
|
|||
instance.save(update_fields=['task_id'])
|
||||
else:
|
||||
print("Start time is in the past. Not scheduling.")
|
||||
# Kick off poster/artwork prefetch to enrich Upcoming cards
|
||||
try:
|
||||
prefetch_recording_artwork.apply_async(args=[instance.id], countdown=1)
|
||||
except Exception as e:
|
||||
print("Error scheduling artwork prefetch:", e)
|
||||
except Exception as e:
|
||||
import traceback
|
||||
print("Error in post_save signal:", e)
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
0
apps/channels/tests/__init__.py
Normal file
0
apps/channels/tests/__init__.py
Normal file
40
apps/channels/tests/test_recurring_rules.py
Normal file
40
apps/channels/tests/test_recurring_rules.py
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
from datetime import datetime, timedelta
|
||||
from django.test import TestCase
|
||||
from django.utils import timezone
|
||||
|
||||
from apps.channels.models import Channel, RecurringRecordingRule, Recording
|
||||
from apps.channels.tasks import sync_recurring_rule_impl, purge_recurring_rule_impl
|
||||
|
||||
|
||||
class RecurringRecordingRuleTasksTests(TestCase):
|
||||
def test_sync_recurring_rule_creates_and_purges_recordings(self):
|
||||
now = timezone.now()
|
||||
channel = Channel.objects.create(channel_number=1, name='Test Channel')
|
||||
|
||||
start_time = (now + timedelta(minutes=15)).time().replace(second=0, microsecond=0)
|
||||
end_time = (now + timedelta(minutes=75)).time().replace(second=0, microsecond=0)
|
||||
|
||||
rule = RecurringRecordingRule.objects.create(
|
||||
channel=channel,
|
||||
days_of_week=[now.weekday()],
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
)
|
||||
|
||||
created = sync_recurring_rule_impl(rule.id, drop_existing=True, horizon_days=1)
|
||||
self.assertEqual(created, 1)
|
||||
|
||||
recording = Recording.objects.filter(custom_properties__rule__id=rule.id).first()
|
||||
self.assertIsNotNone(recording)
|
||||
self.assertEqual(recording.channel, channel)
|
||||
self.assertEqual(recording.custom_properties.get('rule', {}).get('id'), rule.id)
|
||||
|
||||
expected_start = timezone.make_aware(
|
||||
datetime.combine(recording.start_time.date(), start_time),
|
||||
timezone.get_current_timezone(),
|
||||
)
|
||||
self.assertLess(abs((recording.start_time - expected_start).total_seconds()), 60)
|
||||
|
||||
removed = purge_recurring_rule_impl(rule.id)
|
||||
self.assertEqual(removed, 1)
|
||||
self.assertFalse(Recording.objects.filter(custom_properties__rule__id=rule.id).exists())
|
||||
|
|
@ -2,18 +2,27 @@ import logging, os
|
|||
from rest_framework import viewsets, status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.decorators import action
|
||||
from drf_yasg.utils import swagger_auto_schema
|
||||
from drf_yasg import openapi
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
from .models import EPGSource, ProgramData, EPGData # Added ProgramData
|
||||
from .serializers import ProgramDataSerializer, EPGSourceSerializer, EPGDataSerializer # Updated serializer
|
||||
from .serializers import (
|
||||
ProgramDataSerializer,
|
||||
EPGSourceSerializer,
|
||||
EPGDataSerializer,
|
||||
) # Updated serializer
|
||||
from .tasks import refresh_epg_data
|
||||
from apps.accounts.permissions import (
|
||||
Authenticated,
|
||||
permission_classes_by_action,
|
||||
permission_classes_by_method,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# ─────────────────────────────
|
||||
# 1) EPG Source API (CRUD)
|
||||
# ─────────────────────────────
|
||||
|
|
@ -21,30 +30,38 @@ class EPGSourceViewSet(viewsets.ModelViewSet):
|
|||
"""
|
||||
API endpoint that allows EPG sources to be viewed or edited.
|
||||
"""
|
||||
|
||||
queryset = EPGSource.objects.all()
|
||||
serializer_class = EPGSourceSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
def list(self, request, *args, **kwargs):
|
||||
logger.debug("Listing all EPG sources.")
|
||||
return super().list(request, *args, **kwargs)
|
||||
|
||||
@action(detail=False, methods=['post'])
|
||||
@action(detail=False, methods=["post"])
|
||||
def upload(self, request):
|
||||
if 'file' not in request.FILES:
|
||||
return Response({'error': 'No file uploaded'}, status=status.HTTP_400_BAD_REQUEST)
|
||||
if "file" not in request.FILES:
|
||||
return Response(
|
||||
{"error": "No file uploaded"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
file = request.FILES['file']
|
||||
file = request.FILES["file"]
|
||||
file_name = file.name
|
||||
file_path = os.path.join('/data/uploads/epgs', file_name)
|
||||
file_path = os.path.join("/data/uploads/epgs", file_name)
|
||||
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
with open(file_path, 'wb+') as destination:
|
||||
with open(file_path, "wb+") as destination:
|
||||
for chunk in file.chunks():
|
||||
destination.write(chunk)
|
||||
|
||||
new_obj_data = request.data.copy()
|
||||
new_obj_data['file_path'] = file_path
|
||||
new_obj_data["file_path"] = file_path
|
||||
|
||||
serializer = self.get_serializer(data=new_obj_data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
|
@ -57,70 +74,111 @@ class EPGSourceViewSet(viewsets.ModelViewSet):
|
|||
instance = self.get_object()
|
||||
|
||||
# Check if we're toggling is_active
|
||||
if 'is_active' in request.data and instance.is_active != request.data['is_active']:
|
||||
if (
|
||||
"is_active" in request.data
|
||||
and instance.is_active != request.data["is_active"]
|
||||
):
|
||||
# Set appropriate status based on new is_active value
|
||||
if request.data['is_active']:
|
||||
request.data['status'] = 'idle'
|
||||
if request.data["is_active"]:
|
||||
request.data["status"] = "idle"
|
||||
else:
|
||||
request.data['status'] = 'disabled'
|
||||
request.data["status"] = "disabled"
|
||||
|
||||
# Continue with regular partial update
|
||||
return super().partial_update(request, *args, **kwargs)
|
||||
|
||||
|
||||
# ─────────────────────────────
|
||||
# 2) Program API (CRUD)
|
||||
# ─────────────────────────────
|
||||
class ProgramViewSet(viewsets.ModelViewSet):
|
||||
"""Handles CRUD operations for EPG programs"""
|
||||
|
||||
queryset = ProgramData.objects.all()
|
||||
serializer_class = ProgramDataSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
def list(self, request, *args, **kwargs):
|
||||
logger.debug("Listing all EPG programs.")
|
||||
return super().list(request, *args, **kwargs)
|
||||
|
||||
|
||||
# ─────────────────────────────
|
||||
# 3) EPG Grid View
|
||||
# ─────────────────────────────
|
||||
class EPGGridAPIView(APIView):
|
||||
"""Returns all programs airing in the next 24 hours including currently running ones and recent ones"""
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [
|
||||
perm() for perm in permission_classes_by_method[self.request.method]
|
||||
]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Retrieve programs from the previous hour, currently running and upcoming for the next 24 hours",
|
||||
responses={200: ProgramDataSerializer(many=True)}
|
||||
responses={200: ProgramDataSerializer(many=True)},
|
||||
)
|
||||
def get(self, request, format=None):
|
||||
# Use current time instead of midnight
|
||||
now = timezone.now()
|
||||
one_hour_ago = now - timedelta(hours=1)
|
||||
twenty_four_hours_later = now + timedelta(hours=24)
|
||||
logger.debug(f"EPGGridAPIView: Querying programs between {one_hour_ago} and {twenty_four_hours_later}.")
|
||||
logger.debug(
|
||||
f"EPGGridAPIView: Querying programs between {one_hour_ago} and {twenty_four_hours_later}."
|
||||
)
|
||||
|
||||
# Use select_related to prefetch EPGData and include programs from the last hour
|
||||
programs = ProgramData.objects.select_related('epg').filter(
|
||||
programs = ProgramData.objects.select_related("epg").filter(
|
||||
# Programs that end after one hour ago (includes recently ended programs)
|
||||
end_time__gt=one_hour_ago,
|
||||
# AND start before the end time window
|
||||
start_time__lt=twenty_four_hours_later
|
||||
start_time__lt=twenty_four_hours_later,
|
||||
)
|
||||
count = programs.count()
|
||||
logger.debug(f"EPGGridAPIView: Found {count} program(s), including recently ended, currently running, and upcoming shows.")
|
||||
logger.debug(
|
||||
f"EPGGridAPIView: Found {count} program(s), including recently ended, currently running, and upcoming shows."
|
||||
)
|
||||
|
||||
# Generate dummy programs for channels that have no EPG data
|
||||
# Generate dummy programs for channels that have no EPG data OR dummy EPG sources
|
||||
from apps.channels.models import Channel
|
||||
from apps.epg.models import EPGSource
|
||||
from django.db.models import Q
|
||||
|
||||
# Get channels with no EPG data
|
||||
# Get channels with no EPG data at all (standard dummy)
|
||||
channels_without_epg = Channel.objects.filter(Q(epg_data__isnull=True))
|
||||
channels_count = channels_without_epg.count()
|
||||
|
||||
# Log more detailed information about channels missing EPG data
|
||||
if channels_count > 0:
|
||||
# Get channels with custom dummy EPG sources (generate on-demand with patterns)
|
||||
channels_with_custom_dummy = Channel.objects.filter(
|
||||
epg_data__epg_source__source_type='dummy'
|
||||
).distinct()
|
||||
|
||||
# Log what we found
|
||||
without_count = channels_without_epg.count()
|
||||
custom_count = channels_with_custom_dummy.count()
|
||||
|
||||
if without_count > 0:
|
||||
channel_names = [f"{ch.name} (ID: {ch.id})" for ch in channels_without_epg]
|
||||
logger.warning(f"EPGGridAPIView: Missing EPG data for these channels: {', '.join(channel_names)}")
|
||||
logger.debug(
|
||||
f"EPGGridAPIView: Channels needing standard dummy EPG: {', '.join(channel_names)}"
|
||||
)
|
||||
|
||||
logger.debug(f"EPGGridAPIView: Found {channels_count} channels with no EPG data.")
|
||||
if custom_count > 0:
|
||||
channel_names = [f"{ch.name} (ID: {ch.id})" for ch in channels_with_custom_dummy]
|
||||
logger.debug(
|
||||
f"EPGGridAPIView: Channels needing custom dummy EPG: {', '.join(channel_names)}"
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
f"EPGGridAPIView: Found {without_count} channels needing standard dummy, {custom_count} needing custom dummy EPG."
|
||||
)
|
||||
|
||||
# Serialize the regular programs
|
||||
serialized_programs = ProgramDataSerializer(programs, many=True).data
|
||||
|
|
@ -130,43 +188,122 @@ class EPGGridAPIView(APIView):
|
|||
(0, 4): [
|
||||
"Late Night with {channel} - Where insomniacs unite!",
|
||||
"The 'Why Am I Still Awake?' Show on {channel}",
|
||||
"Counting Sheep - A {channel} production for the sleepless"
|
||||
"Counting Sheep - A {channel} production for the sleepless",
|
||||
],
|
||||
(4, 8): [
|
||||
"Dawn Patrol - Rise and shine with {channel}!",
|
||||
"Early Bird Special - Coffee not included",
|
||||
"Morning Zombies - Before coffee viewing on {channel}"
|
||||
"Morning Zombies - Before coffee viewing on {channel}",
|
||||
],
|
||||
(8, 12): [
|
||||
"Mid-Morning Meetings - Pretend you're paying attention while watching {channel}",
|
||||
"The 'I Should Be Working' Hour on {channel}",
|
||||
"Productivity Killer - {channel}'s daytime programming"
|
||||
"Productivity Killer - {channel}'s daytime programming",
|
||||
],
|
||||
(12, 16): [
|
||||
"Lunchtime Laziness with {channel}",
|
||||
"The Afternoon Slump - Brought to you by {channel}",
|
||||
"Post-Lunch Food Coma Theater on {channel}"
|
||||
"Post-Lunch Food Coma Theater on {channel}",
|
||||
],
|
||||
(16, 20): [
|
||||
"Rush Hour - {channel}'s alternative to traffic",
|
||||
"The 'What's For Dinner?' Debate on {channel}",
|
||||
"Evening Escapism - {channel}'s remedy for reality"
|
||||
"Evening Escapism - {channel}'s remedy for reality",
|
||||
],
|
||||
(20, 24): [
|
||||
"Prime Time Placeholder - {channel}'s finest not-programming",
|
||||
"The 'Netflix Was Too Complicated' Show on {channel}",
|
||||
"Family Argument Avoider - Courtesy of {channel}"
|
||||
]
|
||||
"Family Argument Avoider - Courtesy of {channel}",
|
||||
],
|
||||
}
|
||||
|
||||
# Generate and append dummy programs
|
||||
dummy_programs = []
|
||||
for channel in channels_without_epg:
|
||||
# Use the channel UUID as tvg_id for dummy programs to match in the guide
|
||||
|
||||
# Import the function from output.views
|
||||
from apps.output.views import generate_dummy_programs as gen_dummy_progs
|
||||
|
||||
# Handle channels with CUSTOM dummy EPG sources (with patterns)
|
||||
for channel in channels_with_custom_dummy:
|
||||
# For dummy EPGs, ALWAYS use channel UUID to ensure unique programs per channel
|
||||
# This prevents multiple channels assigned to the same dummy EPG from showing identical data
|
||||
# Each channel gets its own unique program data even if they share the same EPG source
|
||||
dummy_tvg_id = str(channel.uuid)
|
||||
|
||||
try:
|
||||
# Create programs every 4 hours for the next 24 hours
|
||||
# Get the custom dummy EPG source
|
||||
epg_source = channel.epg_data.epg_source if channel.epg_data else None
|
||||
|
||||
logger.debug(f"Generating custom dummy programs for channel: {channel.name} (ID: {channel.id})")
|
||||
|
||||
# Determine which name to parse based on custom properties
|
||||
name_to_parse = channel.name
|
||||
if epg_source and epg_source.custom_properties:
|
||||
custom_props = epg_source.custom_properties
|
||||
name_source = custom_props.get('name_source')
|
||||
|
||||
if name_source == 'stream':
|
||||
# Get the stream index (1-based from user, convert to 0-based)
|
||||
stream_index = custom_props.get('stream_index', 1) - 1
|
||||
|
||||
# Get streams ordered by channelstream order
|
||||
channel_streams = channel.streams.all().order_by('channelstream__order')
|
||||
|
||||
if channel_streams.exists() and 0 <= stream_index < channel_streams.count():
|
||||
stream = list(channel_streams)[stream_index]
|
||||
name_to_parse = stream.name
|
||||
logger.debug(f"Using stream name for parsing: {name_to_parse} (stream index: {stream_index})")
|
||||
else:
|
||||
logger.warning(f"Stream index {stream_index} not found for channel {channel.name}, falling back to channel name")
|
||||
elif name_source == 'channel':
|
||||
logger.debug(f"Using channel name for parsing: {name_to_parse}")
|
||||
|
||||
# Generate programs using custom patterns from the dummy EPG source
|
||||
# Use the same tvg_id that will be set in the program data
|
||||
generated = gen_dummy_progs(
|
||||
channel_id=dummy_tvg_id,
|
||||
channel_name=name_to_parse,
|
||||
num_days=1,
|
||||
program_length_hours=4,
|
||||
epg_source=epg_source
|
||||
)
|
||||
|
||||
# Custom dummy should always return data (either from patterns or fallback)
|
||||
if generated:
|
||||
logger.debug(f"Generated {len(generated)} custom dummy programs for {channel.name}")
|
||||
# Convert generated programs to API format
|
||||
for program in generated:
|
||||
dummy_program = {
|
||||
"id": f"dummy-custom-{channel.id}-{program['start_time'].hour}",
|
||||
"epg": {"tvg_id": dummy_tvg_id, "name": channel.name},
|
||||
"start_time": program['start_time'].isoformat(),
|
||||
"end_time": program['end_time'].isoformat(),
|
||||
"title": program['title'],
|
||||
"description": program['description'],
|
||||
"tvg_id": dummy_tvg_id,
|
||||
"sub_title": None,
|
||||
"custom_properties": None,
|
||||
}
|
||||
dummy_programs.append(dummy_program)
|
||||
else:
|
||||
logger.warning(f"No programs generated for custom dummy EPG channel: {channel.name}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error creating custom dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}"
|
||||
)
|
||||
|
||||
# Handle channels with NO EPG data (standard dummy with humorous descriptions)
|
||||
for channel in channels_without_epg:
|
||||
# For channels with no EPG, use UUID to ensure uniqueness (matches frontend logic)
|
||||
# The frontend uses: tvgRecord?.tvg_id ?? channel.uuid
|
||||
# Since there's no EPG data, it will fall back to UUID
|
||||
dummy_tvg_id = str(channel.uuid)
|
||||
|
||||
try:
|
||||
logger.debug(f"Generating standard dummy programs for channel: {channel.name} (ID: {channel.id})")
|
||||
|
||||
# Create programs every 4 hours for the next 24 hours with humorous descriptions
|
||||
for hour_offset in range(0, 24, 4):
|
||||
# Use timedelta for time arithmetic instead of replace() to avoid hour overflow
|
||||
start_time = now + timedelta(hours=hour_offset)
|
||||
|
|
@ -184,7 +321,9 @@ class EPGGridAPIView(APIView):
|
|||
if start_range <= hour < end_range:
|
||||
# Pick a description using the sum of the hour and day as seed
|
||||
# This makes it somewhat random but consistent for the same timeslot
|
||||
description = descriptions[(hour + day) % len(descriptions)].format(channel=channel.name)
|
||||
description = descriptions[
|
||||
(hour + day) % len(descriptions)
|
||||
].format(channel=channel.name)
|
||||
break
|
||||
else:
|
||||
# Fallback description if somehow no range matches
|
||||
|
|
@ -192,29 +331,31 @@ class EPGGridAPIView(APIView):
|
|||
|
||||
# Create a dummy program in the same format as regular programs
|
||||
dummy_program = {
|
||||
'id': f"dummy-{channel.id}-{hour_offset}", # Create a unique ID
|
||||
'epg': {
|
||||
'tvg_id': dummy_tvg_id,
|
||||
'name': channel.name
|
||||
},
|
||||
'start_time': start_time.isoformat(),
|
||||
'end_time': end_time.isoformat(),
|
||||
'title': f"{channel.name}",
|
||||
'description': description,
|
||||
'tvg_id': dummy_tvg_id,
|
||||
'sub_title': None,
|
||||
'custom_properties': None
|
||||
"id": f"dummy-standard-{channel.id}-{hour_offset}",
|
||||
"epg": {"tvg_id": dummy_tvg_id, "name": channel.name},
|
||||
"start_time": start_time.isoformat(),
|
||||
"end_time": end_time.isoformat(),
|
||||
"title": f"{channel.name}",
|
||||
"description": description,
|
||||
"tvg_id": dummy_tvg_id,
|
||||
"sub_title": None,
|
||||
"custom_properties": None,
|
||||
}
|
||||
dummy_programs.append(dummy_program)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}")
|
||||
logger.error(
|
||||
f"Error creating standard dummy programs for channel {channel.name} (ID: {channel.id}): {str(e)}"
|
||||
)
|
||||
|
||||
# Combine regular and dummy programs
|
||||
all_programs = list(serialized_programs) + dummy_programs
|
||||
logger.debug(f"EPGGridAPIView: Returning {len(all_programs)} total programs (including {len(dummy_programs)} dummy programs).")
|
||||
logger.debug(
|
||||
f"EPGGridAPIView: Returning {len(all_programs)} total programs (including {len(dummy_programs)} dummy programs)."
|
||||
)
|
||||
|
||||
return Response({"data": all_programs}, status=status.HTTP_200_OK)
|
||||
|
||||
return Response({'data': all_programs}, status=status.HTTP_200_OK)
|
||||
|
||||
# ─────────────────────────────
|
||||
# 4) EPG Import View
|
||||
|
|
@ -222,15 +363,41 @@ class EPGGridAPIView(APIView):
|
|||
class EPGImportAPIView(APIView):
|
||||
"""Triggers an EPG data refresh"""
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [
|
||||
perm() for perm in permission_classes_by_method[self.request.method]
|
||||
]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Triggers an EPG data import",
|
||||
responses={202: "EPG data import initiated"}
|
||||
responses={202: "EPG data import initiated"},
|
||||
)
|
||||
def post(self, request, format=None):
|
||||
logger.info("EPGImportAPIView: Received request to import EPG data.")
|
||||
refresh_epg_data.delay(request.data.get('id', None)) # Trigger Celery task
|
||||
epg_id = request.data.get("id", None)
|
||||
|
||||
# Check if this is a dummy EPG source
|
||||
try:
|
||||
from .models import EPGSource
|
||||
epg_source = EPGSource.objects.get(id=epg_id)
|
||||
if epg_source.source_type == 'dummy':
|
||||
logger.info(f"EPGImportAPIView: Skipping refresh for dummy EPG source {epg_id}")
|
||||
return Response(
|
||||
{"success": False, "message": "Dummy EPG sources do not require refreshing."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except EPGSource.DoesNotExist:
|
||||
pass # Let the task handle the missing source
|
||||
|
||||
refresh_epg_data.delay(epg_id) # Trigger Celery task
|
||||
logger.info("EPGImportAPIView: Task dispatched to refresh EPG data.")
|
||||
return Response({'success': True, 'message': 'EPG data import initiated.'}, status=status.HTTP_202_ACCEPTED)
|
||||
return Response(
|
||||
{"success": True, "message": "EPG data import initiated."},
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
|
||||
|
||||
# ─────────────────────────────
|
||||
|
|
@ -240,6 +407,13 @@ class EPGDataViewSet(viewsets.ReadOnlyModelViewSet):
|
|||
"""
|
||||
API endpoint that allows EPGData objects to be viewed.
|
||||
"""
|
||||
|
||||
queryset = EPGData.objects.all()
|
||||
serializer_class = EPGDataSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.2.4 on 2025-09-02 14:30
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0014_epgsource_extracted_file_path'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='programdata',
|
||||
name='custom_properties',
|
||||
field=models.JSONField(blank=True, default=dict, null=True),
|
||||
),
|
||||
]
|
||||
18
apps/epg/migrations/0016_epgdata_icon_url.py
Normal file
18
apps/epg/migrations/0016_epgdata_icon_url.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.2.4 on 2025-09-16 22:01
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0015_alter_programdata_custom_properties'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='epgdata',
|
||||
name='icon_url',
|
||||
field=models.URLField(blank=True, max_length=500, null=True),
|
||||
),
|
||||
]
|
||||
18
apps/epg/migrations/0017_alter_epgsource_url.py
Normal file
18
apps/epg/migrations/0017_alter_epgsource_url.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.2.4 on 2025-09-24 21:07
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0016_epgdata_icon_url'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='epgsource',
|
||||
name='url',
|
||||
field=models.URLField(blank=True, max_length=1000, null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
# Generated by Django 5.2.4 on 2025-10-17 17:02
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0017_alter_epgsource_url'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='epgsource',
|
||||
name='custom_properties',
|
||||
field=models.JSONField(blank=True, default=dict, help_text='Custom properties for dummy EPG configuration (regex patterns, timezone, duration, etc.)', null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='epgsource',
|
||||
name='source_type',
|
||||
field=models.CharField(choices=[('xmltv', 'XMLTV URL'), ('schedules_direct', 'Schedules Direct API'), ('dummy', 'Custom Dummy EPG')], max_length=20),
|
||||
),
|
||||
]
|
||||
18
apps/epg/migrations/0019_alter_programdata_sub_title.py
Normal file
18
apps/epg/migrations/0019_alter_programdata_sub_title.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.2.4 on 2025-10-22 21:59
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0018_epgsource_custom_properties_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='programdata',
|
||||
name='sub_title',
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,119 @@
|
|||
# Generated migration to replace {time} placeholders with {starttime}
|
||||
|
||||
import re
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def migrate_time_placeholders(apps, schema_editor):
|
||||
"""
|
||||
Replace {time} with {starttime} and {time24} with {starttime24}
|
||||
in all dummy EPG source custom_properties templates.
|
||||
"""
|
||||
EPGSource = apps.get_model('epg', 'EPGSource')
|
||||
|
||||
# Fields that contain templates with placeholders
|
||||
template_fields = [
|
||||
'title_template',
|
||||
'description_template',
|
||||
'upcoming_title_template',
|
||||
'upcoming_description_template',
|
||||
'ended_title_template',
|
||||
'ended_description_template',
|
||||
'channel_logo_url',
|
||||
'program_poster_url',
|
||||
]
|
||||
|
||||
# Get all dummy EPG sources
|
||||
dummy_sources = EPGSource.objects.filter(source_type='dummy')
|
||||
|
||||
updated_count = 0
|
||||
for source in dummy_sources:
|
||||
if not source.custom_properties:
|
||||
continue
|
||||
|
||||
modified = False
|
||||
custom_props = source.custom_properties.copy()
|
||||
|
||||
for field in template_fields:
|
||||
if field in custom_props and custom_props[field]:
|
||||
original_value = custom_props[field]
|
||||
|
||||
# Replace {time24} first (before {time}) to avoid double replacement
|
||||
# e.g., {time24} shouldn't become {starttime24} via {time} -> {starttime}
|
||||
new_value = original_value
|
||||
new_value = re.sub(r'\{time24\}', '{starttime24}', new_value)
|
||||
new_value = re.sub(r'\{time\}', '{starttime}', new_value)
|
||||
|
||||
if new_value != original_value:
|
||||
custom_props[field] = new_value
|
||||
modified = True
|
||||
|
||||
if modified:
|
||||
source.custom_properties = custom_props
|
||||
source.save(update_fields=['custom_properties'])
|
||||
updated_count += 1
|
||||
|
||||
if updated_count > 0:
|
||||
print(f"Migration complete: Updated {updated_count} dummy EPG source(s) with new placeholder names.")
|
||||
else:
|
||||
print("No dummy EPG sources needed placeholder updates.")
|
||||
|
||||
|
||||
def reverse_migration(apps, schema_editor):
|
||||
"""
|
||||
Reverse the migration by replacing {starttime} back to {time}.
|
||||
"""
|
||||
EPGSource = apps.get_model('epg', 'EPGSource')
|
||||
|
||||
template_fields = [
|
||||
'title_template',
|
||||
'description_template',
|
||||
'upcoming_title_template',
|
||||
'upcoming_description_template',
|
||||
'ended_title_template',
|
||||
'ended_description_template',
|
||||
'channel_logo_url',
|
||||
'program_poster_url',
|
||||
]
|
||||
|
||||
dummy_sources = EPGSource.objects.filter(source_type='dummy')
|
||||
|
||||
updated_count = 0
|
||||
for source in dummy_sources:
|
||||
if not source.custom_properties:
|
||||
continue
|
||||
|
||||
modified = False
|
||||
custom_props = source.custom_properties.copy()
|
||||
|
||||
for field in template_fields:
|
||||
if field in custom_props and custom_props[field]:
|
||||
original_value = custom_props[field]
|
||||
|
||||
# Reverse the replacements
|
||||
new_value = original_value
|
||||
new_value = re.sub(r'\{starttime24\}', '{time24}', new_value)
|
||||
new_value = re.sub(r'\{starttime\}', '{time}', new_value)
|
||||
|
||||
if new_value != original_value:
|
||||
custom_props[field] = new_value
|
||||
modified = True
|
||||
|
||||
if modified:
|
||||
source.custom_properties = custom_props
|
||||
source.save(update_fields=['custom_properties'])
|
||||
updated_count += 1
|
||||
|
||||
if updated_count > 0:
|
||||
print(f"Reverse migration complete: Reverted {updated_count} dummy EPG source(s) to old placeholder names.")
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epg', '0019_alter_programdata_sub_title'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migrate_time_placeholders, reverse_migration),
|
||||
]
|
||||
|
|
@ -8,6 +8,7 @@ class EPGSource(models.Model):
|
|||
SOURCE_TYPE_CHOICES = [
|
||||
('xmltv', 'XMLTV URL'),
|
||||
('schedules_direct', 'Schedules Direct API'),
|
||||
('dummy', 'Custom Dummy EPG'),
|
||||
]
|
||||
|
||||
STATUS_IDLE = 'idle'
|
||||
|
|
@ -28,7 +29,7 @@ class EPGSource(models.Model):
|
|||
|
||||
name = models.CharField(max_length=255, unique=True)
|
||||
source_type = models.CharField(max_length=20, choices=SOURCE_TYPE_CHOICES)
|
||||
url = models.URLField(blank=True, null=True) # For XMLTV
|
||||
url = models.URLField(max_length=1000, blank=True, null=True) # For XMLTV
|
||||
api_key = models.CharField(max_length=255, blank=True, null=True) # For Schedules Direct
|
||||
is_active = models.BooleanField(default=True)
|
||||
file_path = models.CharField(max_length=1024, blank=True, null=True)
|
||||
|
|
@ -38,6 +39,12 @@ class EPGSource(models.Model):
|
|||
refresh_task = models.ForeignKey(
|
||||
PeriodicTask, on_delete=models.SET_NULL, null=True, blank=True
|
||||
)
|
||||
custom_properties = models.JSONField(
|
||||
default=dict,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Custom properties for dummy EPG configuration (regex patterns, timezone, duration, etc.)"
|
||||
)
|
||||
status = models.CharField(
|
||||
max_length=20,
|
||||
choices=STATUS_CHOICES,
|
||||
|
|
@ -127,6 +134,7 @@ class EPGData(models.Model):
|
|||
# and a name (which might simply be the tvg_id if no real channel exists).
|
||||
tvg_id = models.CharField(max_length=255, null=True, blank=True, db_index=True)
|
||||
name = models.CharField(max_length=255)
|
||||
icon_url = models.URLField(max_length=500, null=True, blank=True)
|
||||
epg_source = models.ForeignKey(
|
||||
EPGSource,
|
||||
on_delete=models.CASCADE,
|
||||
|
|
@ -147,10 +155,10 @@ class ProgramData(models.Model):
|
|||
start_time = models.DateTimeField()
|
||||
end_time = models.DateTimeField()
|
||||
title = models.CharField(max_length=255)
|
||||
sub_title = models.CharField(max_length=255, blank=True, null=True)
|
||||
sub_title = models.TextField(blank=True, null=True)
|
||||
description = models.TextField(blank=True, null=True)
|
||||
tvg_id = models.CharField(max_length=255, null=True, blank=True)
|
||||
custom_properties = models.TextField(null=True, blank=True)
|
||||
custom_properties = models.JSONField(default=dict, blank=True, null=True)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.title} ({self.start_time} - {self.end_time})"
|
||||
|
|
|
|||
|
|
@ -1,10 +1,17 @@
|
|||
from core.utils import validate_flexible_url
|
||||
from rest_framework import serializers
|
||||
from .models import EPGSource, EPGData, ProgramData
|
||||
from apps.channels.models import Channel
|
||||
|
||||
class EPGSourceSerializer(serializers.ModelSerializer):
|
||||
epg_data_ids = serializers.SerializerMethodField()
|
||||
epg_data_count = serializers.SerializerMethodField()
|
||||
read_only_fields = ['created_at', 'updated_at']
|
||||
url = serializers.CharField(
|
||||
required=False,
|
||||
allow_blank=True,
|
||||
allow_null=True,
|
||||
validators=[validate_flexible_url]
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = EPGSource
|
||||
|
|
@ -21,11 +28,13 @@ class EPGSourceSerializer(serializers.ModelSerializer):
|
|||
'last_message',
|
||||
'created_at',
|
||||
'updated_at',
|
||||
'epg_data_ids'
|
||||
'custom_properties',
|
||||
'epg_data_count'
|
||||
]
|
||||
|
||||
def get_epg_data_ids(self, obj):
|
||||
return list(obj.epgs.values_list('id', flat=True))
|
||||
def get_epg_data_count(self, obj):
|
||||
"""Return the count of EPG data entries instead of all IDs to prevent large payloads"""
|
||||
return obj.epgs.count()
|
||||
|
||||
class ProgramDataSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
|
|
@ -45,5 +54,6 @@ class EPGDataSerializer(serializers.ModelSerializer):
|
|||
'id',
|
||||
'tvg_id',
|
||||
'name',
|
||||
'icon_url',
|
||||
'epg_source',
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
from django.db.models.signals import post_save, post_delete, pre_save
|
||||
from django.dispatch import receiver
|
||||
from .models import EPGSource
|
||||
from .models import EPGSource, EPGData
|
||||
from .tasks import refresh_epg_data, delete_epg_refresh_task_by_id
|
||||
from django_celery_beat.models import PeriodicTask, IntervalSchedule
|
||||
from core.utils import is_protected_path
|
||||
from core.utils import is_protected_path, send_websocket_update
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
|
@ -12,15 +12,77 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
@receiver(post_save, sender=EPGSource)
|
||||
def trigger_refresh_on_new_epg_source(sender, instance, created, **kwargs):
|
||||
# Trigger refresh only if the source is newly created and active
|
||||
if created and instance.is_active:
|
||||
# Trigger refresh only if the source is newly created, active, and not a dummy EPG
|
||||
if created and instance.is_active and instance.source_type != 'dummy':
|
||||
refresh_epg_data.delay(instance.id)
|
||||
|
||||
@receiver(post_save, sender=EPGSource)
|
||||
def create_dummy_epg_data(sender, instance, created, **kwargs):
|
||||
"""
|
||||
Automatically create EPGData for dummy EPG sources when they are created.
|
||||
This allows channels to be assigned to dummy EPGs immediately without
|
||||
requiring a refresh first.
|
||||
"""
|
||||
if instance.source_type == 'dummy':
|
||||
# Ensure dummy EPGs always have idle status and no status message
|
||||
if instance.status != EPGSource.STATUS_IDLE or instance.last_message:
|
||||
instance.status = EPGSource.STATUS_IDLE
|
||||
instance.last_message = None
|
||||
instance.save(update_fields=['status', 'last_message'])
|
||||
|
||||
# Create a URL-friendly tvg_id from the dummy EPG name
|
||||
# Replace spaces and special characters with underscores
|
||||
friendly_tvg_id = instance.name.replace(' ', '_').replace('-', '_')
|
||||
# Remove any characters that aren't alphanumeric or underscores
|
||||
friendly_tvg_id = ''.join(c for c in friendly_tvg_id if c.isalnum() or c == '_')
|
||||
# Convert to lowercase for consistency
|
||||
friendly_tvg_id = friendly_tvg_id.lower()
|
||||
# Prefix with 'dummy_' to make it clear this is a dummy EPG
|
||||
friendly_tvg_id = f"dummy_{friendly_tvg_id}"
|
||||
|
||||
# Create or update the EPGData record
|
||||
epg_data, data_created = EPGData.objects.get_or_create(
|
||||
tvg_id=friendly_tvg_id,
|
||||
epg_source=instance,
|
||||
defaults={
|
||||
'name': instance.name,
|
||||
'icon_url': None
|
||||
}
|
||||
)
|
||||
|
||||
# Update name if it changed and record already existed
|
||||
if not data_created and epg_data.name != instance.name:
|
||||
epg_data.name = instance.name
|
||||
epg_data.save(update_fields=['name'])
|
||||
|
||||
if data_created:
|
||||
logger.info(f"Auto-created EPGData for dummy EPG source: {instance.name} (ID: {instance.id})")
|
||||
|
||||
# Send websocket update to notify frontend that EPG data has been created
|
||||
# This allows the channel form to immediately show the new dummy EPG without refreshing
|
||||
send_websocket_update('updates', 'update', {
|
||||
'type': 'epg_data_created',
|
||||
'source_id': instance.id,
|
||||
'source_name': instance.name,
|
||||
'epg_data_id': epg_data.id
|
||||
})
|
||||
else:
|
||||
logger.debug(f"EPGData already exists for dummy EPG source: {instance.name} (ID: {instance.id})")
|
||||
|
||||
@receiver(post_save, sender=EPGSource)
|
||||
def create_or_update_refresh_task(sender, instance, **kwargs):
|
||||
"""
|
||||
Create or update a Celery Beat periodic task when an EPGSource is created/updated.
|
||||
Skip creating tasks for dummy EPG sources as they don't need refreshing.
|
||||
"""
|
||||
# Skip task creation for dummy EPGs
|
||||
if instance.source_type == 'dummy':
|
||||
# If there's an existing task, disable it
|
||||
if instance.refresh_task:
|
||||
instance.refresh_task.enabled = False
|
||||
instance.refresh_task.save(update_fields=['enabled'])
|
||||
return
|
||||
|
||||
task_name = f"epg_source-refresh-{instance.id}"
|
||||
interval, _ = IntervalSchedule.objects.get_or_create(
|
||||
every=int(instance.refresh_interval),
|
||||
|
|
@ -80,7 +142,14 @@ def delete_refresh_task(sender, instance, **kwargs):
|
|||
def update_status_on_active_change(sender, instance, **kwargs):
|
||||
"""
|
||||
When an EPGSource's is_active field changes, update the status accordingly.
|
||||
For dummy EPGs, always ensure status is idle and no status message.
|
||||
"""
|
||||
# Dummy EPGs should always be idle with no status message
|
||||
if instance.source_type == 'dummy':
|
||||
instance.status = EPGSource.STATUS_IDLE
|
||||
instance.last_message = None
|
||||
return
|
||||
|
||||
if instance.pk: # Only for existing records, not new ones
|
||||
try:
|
||||
# Get the current record from the database
|
||||
|
|
|
|||
|
|
@ -29,6 +29,25 @@ from core.utils import acquire_task_lock, release_task_lock, send_websocket_upda
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def validate_icon_url_fast(icon_url, max_length=None):
|
||||
"""
|
||||
Fast validation for icon URLs during parsing.
|
||||
Returns None if URL is too long, original URL otherwise.
|
||||
If max_length is None, gets it dynamically from the EPGData model field.
|
||||
"""
|
||||
if max_length is None:
|
||||
# Get max_length dynamically from the model field
|
||||
max_length = EPGData._meta.get_field('icon_url').max_length
|
||||
|
||||
if icon_url and len(icon_url) > max_length:
|
||||
logger.warning(f"Icon URL too long ({len(icon_url)} > {max_length}), skipping: {icon_url[:100]}...")
|
||||
return None
|
||||
return icon_url
|
||||
|
||||
|
||||
MAX_EXTRACT_CHUNK_SIZE = 65536 # 64kb (base2)
|
||||
|
||||
|
||||
def send_epg_update(source_id, action, progress, **kwargs):
|
||||
"""Send WebSocket update about EPG download/parsing progress"""
|
||||
# Start with the base data dictionary
|
||||
|
|
@ -114,8 +133,9 @@ def delete_epg_refresh_task_by_id(epg_id):
|
|||
@shared_task
|
||||
def refresh_all_epg_data():
|
||||
logger.info("Starting refresh_epg_data task.")
|
||||
active_sources = EPGSource.objects.filter(is_active=True)
|
||||
logger.debug(f"Found {active_sources.count()} active EPGSource(s).")
|
||||
# Exclude dummy EPG sources from refresh - they don't need refreshing
|
||||
active_sources = EPGSource.objects.filter(is_active=True).exclude(source_type='dummy')
|
||||
logger.debug(f"Found {active_sources.count()} active EPGSource(s) (excluding dummy EPGs).")
|
||||
|
||||
for source in active_sources:
|
||||
refresh_epg_data(source.id)
|
||||
|
|
@ -161,6 +181,13 @@ def refresh_epg_data(source_id):
|
|||
gc.collect()
|
||||
return
|
||||
|
||||
# Skip refresh for dummy EPG sources - they don't need refreshing
|
||||
if source.source_type == 'dummy':
|
||||
logger.info(f"Skipping refresh for dummy EPG source {source.name} (ID: {source_id})")
|
||||
release_task_lock('refresh_epg_data', source_id)
|
||||
gc.collect()
|
||||
return
|
||||
|
||||
# Continue with the normal processing...
|
||||
logger.info(f"Processing EPGSource: {source.name} (type: {source.source_type})")
|
||||
if source.source_type == 'xmltv':
|
||||
|
|
@ -186,6 +213,12 @@ def refresh_epg_data(source_id):
|
|||
fetch_schedules_direct(source)
|
||||
|
||||
source.save(update_fields=['updated_at'])
|
||||
# After successful EPG refresh, evaluate DVR series rules to schedule new episodes
|
||||
try:
|
||||
from apps.channels.tasks import evaluate_series_rules
|
||||
evaluate_series_rules.delay()
|
||||
except Exception:
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.error(f"Error in refresh_epg_data for source {source_id}: {e}", exc_info=True)
|
||||
try:
|
||||
|
|
@ -641,7 +674,11 @@ def extract_compressed_file(file_path, output_path=None, delete_original=False):
|
|||
# Reset file pointer and extract the content
|
||||
gz_file.seek(0)
|
||||
with open(extracted_path, 'wb') as out_file:
|
||||
out_file.write(gz_file.read())
|
||||
while True:
|
||||
chunk = gz_file.read(MAX_EXTRACT_CHUNK_SIZE)
|
||||
if not chunk or len(chunk) == 0:
|
||||
break
|
||||
out_file.write(chunk)
|
||||
except Exception as e:
|
||||
logger.error(f"Error extracting GZIP file: {e}", exc_info=True)
|
||||
return None
|
||||
|
|
@ -685,9 +722,13 @@ def extract_compressed_file(file_path, output_path=None, delete_original=False):
|
|||
return None
|
||||
|
||||
# Extract the first XML file
|
||||
xml_content = zip_file.read(xml_files[0])
|
||||
with open(extracted_path, 'wb') as out_file:
|
||||
out_file.write(xml_content)
|
||||
with zip_file.open(xml_files[0], "r") as xml_file:
|
||||
while True:
|
||||
chunk = xml_file.read(MAX_EXTRACT_CHUNK_SIZE)
|
||||
if not chunk or len(chunk) == 0:
|
||||
break
|
||||
out_file.write(chunk)
|
||||
|
||||
logger.info(f"Successfully extracted zip file to: {extracted_path}")
|
||||
|
||||
|
|
@ -815,6 +856,7 @@ def parse_channels_only(source):
|
|||
processed_channels = 0
|
||||
batch_size = 500 # Process in batches to limit memory usage
|
||||
progress = 0 # Initialize progress variable here
|
||||
icon_url_max_length = EPGData._meta.get_field('icon_url').max_length # Get max length for icon_url field
|
||||
|
||||
# Track memory at key points
|
||||
if process:
|
||||
|
|
@ -843,7 +885,7 @@ def parse_channels_only(source):
|
|||
|
||||
# Change iterparse to look for both channel and programme elements
|
||||
logger.debug(f"Creating iterparse context for channels and programmes")
|
||||
channel_parser = etree.iterparse(source_file, events=('end',), tag=('channel', 'programme'), remove_blank_text=True)
|
||||
channel_parser = etree.iterparse(source_file, events=('end',), tag=('channel', 'programme'), remove_blank_text=True, recover=True)
|
||||
if process:
|
||||
logger.debug(f"[parse_channels_only] Memory after creating iterparse: {process.memory_info().rss / 1024 / 1024:.2f} MB")
|
||||
|
||||
|
|
@ -857,10 +899,15 @@ def parse_channels_only(source):
|
|||
tvg_id = elem.get('id', '').strip()
|
||||
if tvg_id:
|
||||
display_name = None
|
||||
icon_url = None
|
||||
for child in elem:
|
||||
if child.tag == 'display-name' and child.text:
|
||||
if display_name is None and child.tag == 'display-name' and child.text:
|
||||
display_name = child.text.strip()
|
||||
break
|
||||
elif child.tag == 'icon':
|
||||
raw_icon_url = child.get('src', '').strip()
|
||||
icon_url = validate_icon_url_fast(raw_icon_url, icon_url_max_length)
|
||||
if display_name and icon_url:
|
||||
break # No need to continue if we have both
|
||||
|
||||
if not display_name:
|
||||
display_name = tvg_id
|
||||
|
|
@ -878,17 +925,24 @@ def parse_channels_only(source):
|
|||
epgs_to_create.append(EPGData(
|
||||
tvg_id=tvg_id,
|
||||
name=display_name,
|
||||
icon_url=icon_url,
|
||||
epg_source=source,
|
||||
))
|
||||
logger.debug(f"[parse_channels_only] Added new channel to epgs_to_create 1: {tvg_id} - {display_name}")
|
||||
processed_channels += 1
|
||||
continue
|
||||
|
||||
# We use the cached object to check if the name has changed
|
||||
# We use the cached object to check if the name or icon_url has changed
|
||||
epg_obj = existing_epgs[tvg_id]
|
||||
needs_update = False
|
||||
if epg_obj.name != display_name:
|
||||
# Only update if the name actually changed
|
||||
epg_obj.name = display_name
|
||||
needs_update = True
|
||||
if epg_obj.icon_url != icon_url:
|
||||
epg_obj.icon_url = icon_url
|
||||
needs_update = True
|
||||
|
||||
if needs_update:
|
||||
epgs_to_update.append(epg_obj)
|
||||
logger.debug(f"[parse_channels_only] Added channel to update to epgs_to_update: {tvg_id} - {display_name}")
|
||||
else:
|
||||
|
|
@ -899,6 +953,7 @@ def parse_channels_only(source):
|
|||
epgs_to_create.append(EPGData(
|
||||
tvg_id=tvg_id,
|
||||
name=display_name,
|
||||
icon_url=icon_url,
|
||||
epg_source=source,
|
||||
))
|
||||
logger.debug(f"[parse_channels_only] Added new channel to epgs_to_create 2: {tvg_id} - {display_name}")
|
||||
|
|
@ -921,7 +976,7 @@ def parse_channels_only(source):
|
|||
logger.info(f"[parse_channels_only] Bulk updating {len(epgs_to_update)} EPG entries")
|
||||
if process:
|
||||
logger.info(f"[parse_channels_only] Memory before bulk_update: {process.memory_info().rss / 1024 / 1024:.2f} MB")
|
||||
EPGData.objects.bulk_update(epgs_to_update, ["name"])
|
||||
EPGData.objects.bulk_update(epgs_to_update, ["name", "icon_url"])
|
||||
if process:
|
||||
logger.info(f"[parse_channels_only] Memory after bulk_update: {process.memory_info().rss / 1024 / 1024:.2f} MB")
|
||||
epgs_to_update = []
|
||||
|
|
@ -988,7 +1043,7 @@ def parse_channels_only(source):
|
|||
logger.debug(f"[parse_channels_only] Created final batch of {len(epgs_to_create)} EPG entries")
|
||||
|
||||
if epgs_to_update:
|
||||
EPGData.objects.bulk_update(epgs_to_update, ["name"])
|
||||
EPGData.objects.bulk_update(epgs_to_update, ["name", "icon_url"])
|
||||
logger.debug(f"[parse_channels_only] Updated final batch of {len(epgs_to_update)} EPG entries")
|
||||
if process:
|
||||
logger.debug(f"[parse_channels_only] Memory after final batch creation: {process.memory_info().rss / 1024 / 1024:.2f} MB")
|
||||
|
|
@ -1102,6 +1157,12 @@ def parse_programs_for_tvg_id(epg_id):
|
|||
epg = EPGData.objects.get(id=epg_id)
|
||||
epg_source = epg.epg_source
|
||||
|
||||
# Skip program parsing for dummy EPG sources - they don't have program data files
|
||||
if epg_source.source_type == 'dummy':
|
||||
logger.info(f"Skipping program parsing for dummy EPG source {epg_source.name} (ID: {epg_id})")
|
||||
release_task_lock('parse_epg_programs', epg_id)
|
||||
return
|
||||
|
||||
if not Channel.objects.filter(epg_data=epg).exists():
|
||||
logger.info(f"No channels matched to EPG {epg.tvg_id}")
|
||||
release_task_lock('parse_epg_programs', epg_id)
|
||||
|
|
@ -1195,7 +1256,7 @@ def parse_programs_for_tvg_id(epg_id):
|
|||
source_file = open(file_path, 'rb')
|
||||
|
||||
# Stream parse the file using lxml's iterparse
|
||||
program_parser = etree.iterparse(source_file, events=('end',), tag='programme', remove_blank_text=True)
|
||||
program_parser = etree.iterparse(source_file, events=('end',), tag='programme', remove_blank_text=True, recover=True)
|
||||
|
||||
for _, elem in program_parser:
|
||||
if elem.get('channel') == epg.tvg_id:
|
||||
|
|
@ -1224,10 +1285,7 @@ def parse_programs_for_tvg_id(epg_id):
|
|||
|
||||
if custom_props:
|
||||
logger.trace(f"Number of custom properties: {len(custom_props)}")
|
||||
try:
|
||||
custom_properties_json = json.dumps(custom_props)
|
||||
except Exception as e:
|
||||
logger.error(f"Error serializing custom properties to JSON: {e}", exc_info=True)
|
||||
custom_properties_json = custom_props
|
||||
|
||||
programs_to_create.append(ProgramData(
|
||||
epg=epg,
|
||||
|
|
@ -1612,6 +1670,11 @@ def extract_custom_properties(prog):
|
|||
if categories:
|
||||
custom_props['categories'] = categories
|
||||
|
||||
# Extract keywords (new)
|
||||
keywords = [kw.text.strip() for kw in prog.findall('keyword') if kw.text and kw.text.strip()]
|
||||
if keywords:
|
||||
custom_props['keywords'] = keywords
|
||||
|
||||
# Extract episode numbers
|
||||
for ep_num in prog.findall('episode-num'):
|
||||
system = ep_num.get('system', '')
|
||||
|
|
@ -1637,6 +1700,9 @@ def extract_custom_properties(prog):
|
|||
elif system == 'dd_progid' and ep_num.text:
|
||||
# Store the dd_progid format
|
||||
custom_props['dd_progid'] = ep_num.text.strip()
|
||||
# Add support for other systems like thetvdb.com, themoviedb.org, imdb.com
|
||||
elif system in ['thetvdb.com', 'themoviedb.org', 'imdb.com'] and ep_num.text:
|
||||
custom_props[f'{system}_id'] = ep_num.text.strip()
|
||||
|
||||
# Extract ratings more efficiently
|
||||
rating_elem = prog.find('rating')
|
||||
|
|
@ -1647,37 +1713,172 @@ def extract_custom_properties(prog):
|
|||
if rating_elem.get('system'):
|
||||
custom_props['rating_system'] = rating_elem.get('system')
|
||||
|
||||
# Extract star ratings (new)
|
||||
star_ratings = []
|
||||
for star_rating in prog.findall('star-rating'):
|
||||
value_elem = star_rating.find('value')
|
||||
if value_elem is not None and value_elem.text:
|
||||
rating_data = {'value': value_elem.text.strip()}
|
||||
if star_rating.get('system'):
|
||||
rating_data['system'] = star_rating.get('system')
|
||||
star_ratings.append(rating_data)
|
||||
if star_ratings:
|
||||
custom_props['star_ratings'] = star_ratings
|
||||
|
||||
# Extract credits more efficiently
|
||||
credits_elem = prog.find('credits')
|
||||
if credits_elem is not None:
|
||||
credits = {}
|
||||
for credit_type in ['director', 'actor', 'writer', 'presenter', 'producer']:
|
||||
names = [e.text.strip() for e in credits_elem.findall(credit_type) if e.text and e.text.strip()]
|
||||
if names:
|
||||
credits[credit_type] = names
|
||||
for credit_type in ['director', 'actor', 'writer', 'adapter', 'producer', 'composer', 'editor', 'presenter', 'commentator', 'guest']:
|
||||
if credit_type == 'actor':
|
||||
# Handle actors with roles and guest status
|
||||
actors = []
|
||||
for actor_elem in credits_elem.findall('actor'):
|
||||
if actor_elem.text and actor_elem.text.strip():
|
||||
actor_data = {'name': actor_elem.text.strip()}
|
||||
if actor_elem.get('role'):
|
||||
actor_data['role'] = actor_elem.get('role')
|
||||
if actor_elem.get('guest') == 'yes':
|
||||
actor_data['guest'] = True
|
||||
actors.append(actor_data)
|
||||
if actors:
|
||||
credits['actor'] = actors
|
||||
else:
|
||||
names = [e.text.strip() for e in credits_elem.findall(credit_type) if e.text and e.text.strip()]
|
||||
if names:
|
||||
credits[credit_type] = names
|
||||
if credits:
|
||||
custom_props['credits'] = credits
|
||||
|
||||
# Extract other common program metadata
|
||||
date_elem = prog.find('date')
|
||||
if date_elem is not None and date_elem.text:
|
||||
custom_props['year'] = date_elem.text.strip()[:4] # Just the year part
|
||||
custom_props['date'] = date_elem.text.strip()
|
||||
|
||||
country_elem = prog.find('country')
|
||||
if country_elem is not None and country_elem.text:
|
||||
custom_props['country'] = country_elem.text.strip()
|
||||
|
||||
# Extract language information (new)
|
||||
language_elem = prog.find('language')
|
||||
if language_elem is not None and language_elem.text:
|
||||
custom_props['language'] = language_elem.text.strip()
|
||||
|
||||
orig_language_elem = prog.find('orig-language')
|
||||
if orig_language_elem is not None and orig_language_elem.text:
|
||||
custom_props['original_language'] = orig_language_elem.text.strip()
|
||||
|
||||
# Extract length (new)
|
||||
length_elem = prog.find('length')
|
||||
if length_elem is not None and length_elem.text:
|
||||
try:
|
||||
length_value = int(length_elem.text.strip())
|
||||
length_units = length_elem.get('units', 'minutes')
|
||||
custom_props['length'] = {'value': length_value, 'units': length_units}
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# Extract video information (new)
|
||||
video_elem = prog.find('video')
|
||||
if video_elem is not None:
|
||||
video_info = {}
|
||||
for video_attr in ['present', 'colour', 'aspect', 'quality']:
|
||||
attr_elem = video_elem.find(video_attr)
|
||||
if attr_elem is not None and attr_elem.text:
|
||||
video_info[video_attr] = attr_elem.text.strip()
|
||||
if video_info:
|
||||
custom_props['video'] = video_info
|
||||
|
||||
# Extract audio information (new)
|
||||
audio_elem = prog.find('audio')
|
||||
if audio_elem is not None:
|
||||
audio_info = {}
|
||||
for audio_attr in ['present', 'stereo']:
|
||||
attr_elem = audio_elem.find(audio_attr)
|
||||
if attr_elem is not None and attr_elem.text:
|
||||
audio_info[audio_attr] = attr_elem.text.strip()
|
||||
if audio_info:
|
||||
custom_props['audio'] = audio_info
|
||||
|
||||
# Extract subtitles information (new)
|
||||
subtitles = []
|
||||
for subtitle_elem in prog.findall('subtitles'):
|
||||
subtitle_data = {}
|
||||
if subtitle_elem.get('type'):
|
||||
subtitle_data['type'] = subtitle_elem.get('type')
|
||||
lang_elem = subtitle_elem.find('language')
|
||||
if lang_elem is not None and lang_elem.text:
|
||||
subtitle_data['language'] = lang_elem.text.strip()
|
||||
if subtitle_data:
|
||||
subtitles.append(subtitle_data)
|
||||
|
||||
if subtitles:
|
||||
custom_props['subtitles'] = subtitles
|
||||
|
||||
# Extract reviews (new)
|
||||
reviews = []
|
||||
for review_elem in prog.findall('review'):
|
||||
if review_elem.text and review_elem.text.strip():
|
||||
review_data = {'content': review_elem.text.strip()}
|
||||
if review_elem.get('type'):
|
||||
review_data['type'] = review_elem.get('type')
|
||||
if review_elem.get('source'):
|
||||
review_data['source'] = review_elem.get('source')
|
||||
if review_elem.get('reviewer'):
|
||||
review_data['reviewer'] = review_elem.get('reviewer')
|
||||
reviews.append(review_data)
|
||||
if reviews:
|
||||
custom_props['reviews'] = reviews
|
||||
|
||||
# Extract images (new)
|
||||
images = []
|
||||
for image_elem in prog.findall('image'):
|
||||
if image_elem.text and image_elem.text.strip():
|
||||
image_data = {'url': image_elem.text.strip()}
|
||||
for attr in ['type', 'size', 'orient', 'system']:
|
||||
if image_elem.get(attr):
|
||||
image_data[attr] = image_elem.get(attr)
|
||||
images.append(image_data)
|
||||
if images:
|
||||
custom_props['images'] = images
|
||||
|
||||
icon_elem = prog.find('icon')
|
||||
if icon_elem is not None and icon_elem.get('src'):
|
||||
custom_props['icon'] = icon_elem.get('src')
|
||||
|
||||
# Simpler approach for boolean flags
|
||||
for kw in ['previously-shown', 'premiere', 'new', 'live']:
|
||||
# Simpler approach for boolean flags - expanded list
|
||||
for kw in ['previously-shown', 'premiere', 'new', 'live', 'last-chance']:
|
||||
if prog.find(kw) is not None:
|
||||
custom_props[kw.replace('-', '_')] = True
|
||||
|
||||
# Extract premiere and last-chance text content if available
|
||||
premiere_elem = prog.find('premiere')
|
||||
if premiere_elem is not None:
|
||||
custom_props['premiere'] = True
|
||||
if premiere_elem.text and premiere_elem.text.strip():
|
||||
custom_props['premiere_text'] = premiere_elem.text.strip()
|
||||
|
||||
last_chance_elem = prog.find('last-chance')
|
||||
if last_chance_elem is not None:
|
||||
custom_props['last_chance'] = True
|
||||
if last_chance_elem.text and last_chance_elem.text.strip():
|
||||
custom_props['last_chance_text'] = last_chance_elem.text.strip()
|
||||
|
||||
# Extract previously-shown details
|
||||
prev_shown_elem = prog.find('previously-shown')
|
||||
if prev_shown_elem is not None:
|
||||
custom_props['previously_shown'] = True
|
||||
prev_shown_data = {}
|
||||
if prev_shown_elem.get('start'):
|
||||
prev_shown_data['start'] = prev_shown_elem.get('start')
|
||||
if prev_shown_elem.get('channel'):
|
||||
prev_shown_data['channel'] = prev_shown_elem.get('channel')
|
||||
if prev_shown_data:
|
||||
custom_props['previously_shown_details'] = prev_shown_data
|
||||
|
||||
return custom_props
|
||||
|
||||
|
||||
def clear_element(elem):
|
||||
"""Clear an XML element and its parent to free memory."""
|
||||
try:
|
||||
|
|
@ -1756,3 +1957,20 @@ def detect_file_format(file_path=None, content=None):
|
|||
|
||||
# If we reach here, we couldn't reliably determine the format
|
||||
return format_type, is_compressed, file_extension
|
||||
|
||||
|
||||
def generate_dummy_epg(source):
|
||||
"""
|
||||
DEPRECATED: This function is no longer used.
|
||||
|
||||
Dummy EPG programs are now generated on-demand when they are requested
|
||||
(during XMLTV export or EPG grid display), rather than being pre-generated
|
||||
and stored in the database.
|
||||
|
||||
See: apps/output/views.py - generate_custom_dummy_programs()
|
||||
|
||||
This function remains for backward compatibility but should not be called.
|
||||
"""
|
||||
logger.warning(f"generate_dummy_epg() called for {source.name} but this function is deprecated. "
|
||||
f"Dummy EPG programs are now generated on-demand.")
|
||||
return True
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from rest_framework import viewsets, status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from apps.accounts.permissions import Authenticated, permission_classes_by_action
|
||||
from django.http import JsonResponse, HttpResponseForbidden, HttpResponse
|
||||
import logging
|
||||
from drf_yasg.utils import swagger_auto_schema
|
||||
|
|
@ -17,22 +17,30 @@ from django.views import View
|
|||
from django.utils.decorators import method_decorator
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from apps.m3u.models import M3UAccountProfile
|
||||
|
||||
# Configure logger
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@login_required
|
||||
def hdhr_dashboard_view(request):
|
||||
"""Render the HDHR management page."""
|
||||
hdhr_devices = HDHRDevice.objects.all()
|
||||
return render(request, "hdhr/hdhr.html", {"hdhr_devices": hdhr_devices})
|
||||
|
||||
|
||||
# 🔹 1) HDHomeRun Device API
|
||||
class HDHRDeviceViewSet(viewsets.ModelViewSet):
|
||||
"""Handles CRUD operations for HDHomeRun devices"""
|
||||
|
||||
queryset = HDHRDevice.objects.all()
|
||||
serializer_class = HDHRDeviceSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
|
||||
# 🔹 2) Discover API
|
||||
|
|
@ -41,48 +49,19 @@ class DiscoverAPIView(APIView):
|
|||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Retrieve HDHomeRun device discovery information",
|
||||
responses={200: openapi.Response("HDHR Discovery JSON")}
|
||||
responses={200: openapi.Response("HDHR Discovery JSON")},
|
||||
)
|
||||
def get(self, request, profile=None):
|
||||
uri_parts = ["hdhr"]
|
||||
if profile is not None:
|
||||
uri_parts.append(profile)
|
||||
|
||||
base_url = request.build_absolute_uri(f'/{"/".join(uri_parts)}/').rstrip('/')
|
||||
base_url = request.build_absolute_uri(f'/{"/".join(uri_parts)}/').rstrip("/")
|
||||
device = HDHRDevice.objects.first()
|
||||
|
||||
# Calculate tuner count from active profiles from active M3U accounts (excluding default "custom Default" profile)
|
||||
profiles = M3UAccountProfile.objects.filter(
|
||||
is_active=True,
|
||||
m3u_account__is_active=True # Only include profiles from enabled M3U accounts
|
||||
).exclude(id=1)
|
||||
|
||||
# 1. Check if any profile has unlimited streams (max_streams=0)
|
||||
has_unlimited = profiles.filter(max_streams=0).exists()
|
||||
|
||||
# 2. Calculate tuner count from limited profiles
|
||||
limited_tuners = 0
|
||||
if not has_unlimited:
|
||||
limited_tuners = profiles.filter(max_streams__gt=0).aggregate(
|
||||
total=models.Sum('max_streams')
|
||||
).get('total', 0) or 0
|
||||
|
||||
# 3. Add custom stream count to tuner count
|
||||
custom_stream_count = Stream.objects.filter(is_custom=True).count()
|
||||
logger.debug(f"Found {custom_stream_count} custom streams")
|
||||
|
||||
# 4. Calculate final tuner count
|
||||
if has_unlimited:
|
||||
# If there are unlimited profiles, start with 10 plus custom streams
|
||||
tuner_count = 10 + custom_stream_count
|
||||
else:
|
||||
# Otherwise use the limited profile sum plus custom streams
|
||||
tuner_count = limited_tuners + custom_stream_count
|
||||
|
||||
# 5. Ensure minimum of 1 tuners
|
||||
tuner_count = max(1, tuner_count)
|
||||
|
||||
logger.debug(f"Calculated tuner count: {tuner_count} (limited profiles: {limited_tuners}, custom streams: {custom_stream_count}, unlimited: {has_unlimited})")
|
||||
# Calculate tuner count using centralized function
|
||||
from apps.m3u.utils import calculate_tuner_count
|
||||
tuner_count = calculate_tuner_count(minimum=1, unlimited_default=10)
|
||||
|
||||
# Create a unique DeviceID for the HDHomeRun device based on profile ID or a default value
|
||||
device_ID = "12345678" # Default DeviceID
|
||||
|
|
@ -123,17 +102,17 @@ class LineupAPIView(APIView):
|
|||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Retrieve the available channel lineup",
|
||||
responses={200: openapi.Response("Channel Lineup JSON")}
|
||||
responses={200: openapi.Response("Channel Lineup JSON")},
|
||||
)
|
||||
def get(self, request, profile=None):
|
||||
if profile is not None:
|
||||
channel_profile = ChannelProfile.objects.get(name=profile)
|
||||
channels = Channel.objects.filter(
|
||||
channelprofilemembership__channel_profile=channel_profile,
|
||||
channelprofilemembership__enabled=True
|
||||
).order_by('channel_number')
|
||||
channelprofilemembership__enabled=True,
|
||||
).order_by("channel_number")
|
||||
else:
|
||||
channels = Channel.objects.all().order_by('channel_number')
|
||||
channels = Channel.objects.all().order_by("channel_number")
|
||||
|
||||
lineup = []
|
||||
for ch in channels:
|
||||
|
|
@ -146,13 +125,15 @@ class LineupAPIView(APIView):
|
|||
else:
|
||||
formatted_channel_number = ""
|
||||
|
||||
lineup.append({
|
||||
"GuideNumber": formatted_channel_number,
|
||||
"GuideName": ch.name,
|
||||
"URL": request.build_absolute_uri(f"/proxy/ts/stream/{ch.uuid}"),
|
||||
"Guide_ID": formatted_channel_number,
|
||||
"Station": formatted_channel_number,
|
||||
})
|
||||
lineup.append(
|
||||
{
|
||||
"GuideNumber": formatted_channel_number,
|
||||
"GuideName": ch.name,
|
||||
"URL": request.build_absolute_uri(f"/proxy/ts/stream/{ch.uuid}"),
|
||||
"Guide_ID": formatted_channel_number,
|
||||
"Station": formatted_channel_number,
|
||||
}
|
||||
)
|
||||
return JsonResponse(lineup, safe=False)
|
||||
|
||||
|
||||
|
|
@ -162,14 +143,14 @@ class LineupStatusAPIView(APIView):
|
|||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Retrieve the HDHomeRun lineup status",
|
||||
responses={200: openapi.Response("Lineup Status JSON")}
|
||||
responses={200: openapi.Response("Lineup Status JSON")},
|
||||
)
|
||||
def get(self, request, profile=None):
|
||||
data = {
|
||||
"ScanInProgress": 0,
|
||||
"ScanPossible": 0,
|
||||
"Source": "Cable",
|
||||
"SourceList": ["Cable"]
|
||||
"SourceList": ["Cable"],
|
||||
}
|
||||
return JsonResponse(data)
|
||||
|
||||
|
|
@ -180,10 +161,10 @@ class HDHRDeviceXMLAPIView(APIView):
|
|||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Retrieve the HDHomeRun device XML configuration",
|
||||
responses={200: openapi.Response("HDHR Device XML")}
|
||||
responses={200: openapi.Response("HDHR Device XML")},
|
||||
)
|
||||
def get(self, request):
|
||||
base_url = request.build_absolute_uri('/hdhr/').rstrip('/')
|
||||
base_url = request.build_absolute_uri("/hdhr/").rstrip("/")
|
||||
|
||||
xml_response = f"""<?xml version="1.0" encoding="utf-8"?>
|
||||
<root>
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from rest_framework import viewsets, status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from apps.accounts.permissions import Authenticated, permission_classes_by_action
|
||||
from django.http import JsonResponse, HttpResponseForbidden, HttpResponse
|
||||
from drf_yasg.utils import swagger_auto_schema
|
||||
from drf_yasg import openapi
|
||||
|
|
@ -16,18 +16,26 @@ from django.utils.decorators import method_decorator
|
|||
from django.contrib.auth.decorators import login_required
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
|
||||
|
||||
@login_required
|
||||
def hdhr_dashboard_view(request):
|
||||
"""Render the HDHR management page."""
|
||||
hdhr_devices = HDHRDevice.objects.all()
|
||||
return render(request, "hdhr/hdhr.html", {"hdhr_devices": hdhr_devices})
|
||||
|
||||
|
||||
# 🔹 1) HDHomeRun Device API
|
||||
class HDHRDeviceViewSet(viewsets.ModelViewSet):
|
||||
"""Handles CRUD operations for HDHomeRun devices"""
|
||||
|
||||
queryset = HDHRDevice.objects.all()
|
||||
serializer_class = HDHRDeviceSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
|
||||
# 🔹 2) Discover API
|
||||
|
|
@ -36,10 +44,10 @@ class DiscoverAPIView(APIView):
|
|||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Retrieve HDHomeRun device discovery information",
|
||||
responses={200: openapi.Response("HDHR Discovery JSON")}
|
||||
responses={200: openapi.Response("HDHR Discovery JSON")},
|
||||
)
|
||||
def get(self, request):
|
||||
base_url = request.build_absolute_uri('/hdhr/').rstrip('/')
|
||||
base_url = request.build_absolute_uri("/hdhr/").rstrip("/")
|
||||
device = HDHRDevice.objects.first()
|
||||
|
||||
if not device:
|
||||
|
|
@ -75,15 +83,15 @@ class LineupAPIView(APIView):
|
|||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Retrieve the available channel lineup",
|
||||
responses={200: openapi.Response("Channel Lineup JSON")}
|
||||
responses={200: openapi.Response("Channel Lineup JSON")},
|
||||
)
|
||||
def get(self, request):
|
||||
channels = Channel.objects.all().order_by('channel_number')
|
||||
channels = Channel.objects.all().order_by("channel_number")
|
||||
lineup = [
|
||||
{
|
||||
"GuideNumber": str(ch.channel_number),
|
||||
"GuideName": ch.name,
|
||||
"URL": request.build_absolute_uri(f"/proxy/ts/stream/{ch.uuid}")
|
||||
"URL": request.build_absolute_uri(f"/proxy/ts/stream/{ch.uuid}"),
|
||||
}
|
||||
for ch in channels
|
||||
]
|
||||
|
|
@ -96,14 +104,14 @@ class LineupStatusAPIView(APIView):
|
|||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Retrieve the HDHomeRun lineup status",
|
||||
responses={200: openapi.Response("Lineup Status JSON")}
|
||||
responses={200: openapi.Response("Lineup Status JSON")},
|
||||
)
|
||||
def get(self, request):
|
||||
data = {
|
||||
"ScanInProgress": 0,
|
||||
"ScanPossible": 0,
|
||||
"Source": "Cable",
|
||||
"SourceList": ["Cable"]
|
||||
"SourceList": ["Cable"],
|
||||
}
|
||||
return JsonResponse(data)
|
||||
|
||||
|
|
@ -114,10 +122,10 @@ class HDHRDeviceXMLAPIView(APIView):
|
|||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Retrieve the HDHomeRun device XML configuration",
|
||||
responses={200: openapi.Response("HDHR Device XML")}
|
||||
responses={200: openapi.Response("HDHR Device XML")},
|
||||
)
|
||||
def get(self, request):
|
||||
base_url = request.build_absolute_uri('/hdhr/').rstrip('/')
|
||||
base_url = request.build_absolute_uri("/hdhr/").rstrip("/")
|
||||
|
||||
xml_response = f"""<?xml version="1.0" encoding="utf-8"?>
|
||||
<root>
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
from django.contrib import admin
|
||||
from django.utils.html import format_html
|
||||
from .models import M3UAccount, M3UFilter, ServerGroup, UserAgent
|
||||
from .models import M3UAccount, M3UFilter, ServerGroup, UserAgent, M3UAccountProfile
|
||||
import json
|
||||
|
||||
|
||||
class M3UFilterInline(admin.TabularInline):
|
||||
model = M3UFilter
|
||||
|
|
@ -8,50 +10,181 @@ class M3UFilterInline(admin.TabularInline):
|
|||
verbose_name = "M3U Filter"
|
||||
verbose_name_plural = "M3U Filters"
|
||||
|
||||
|
||||
@admin.register(M3UAccount)
|
||||
class M3UAccountAdmin(admin.ModelAdmin):
|
||||
list_display = ('name', 'server_url', 'server_group', 'max_streams', 'is_active', 'user_agent_display', 'uploaded_file_link', 'created_at', 'updated_at')
|
||||
list_filter = ('is_active', 'server_group')
|
||||
search_fields = ('name', 'server_url', 'server_group__name')
|
||||
list_display = (
|
||||
"name",
|
||||
"server_url",
|
||||
"server_group",
|
||||
"max_streams",
|
||||
"priority",
|
||||
"is_active",
|
||||
"user_agent_display",
|
||||
"uploaded_file_link",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
)
|
||||
list_filter = ("is_active", "server_group")
|
||||
search_fields = ("name", "server_url", "server_group__name")
|
||||
inlines = [M3UFilterInline]
|
||||
actions = ['activate_accounts', 'deactivate_accounts']
|
||||
actions = ["activate_accounts", "deactivate_accounts"]
|
||||
|
||||
# Handle both ForeignKey and ManyToManyField cases for UserAgent
|
||||
def user_agent_display(self, obj):
|
||||
if hasattr(obj, 'user_agent'): # ForeignKey case
|
||||
if hasattr(obj, "user_agent"): # ForeignKey case
|
||||
return obj.user_agent.user_agent if obj.user_agent else "None"
|
||||
elif hasattr(obj, 'user_agents'): # ManyToManyField case
|
||||
elif hasattr(obj, "user_agents"): # ManyToManyField case
|
||||
return ", ".join([ua.user_agent for ua in obj.user_agents.all()]) or "None"
|
||||
return "None"
|
||||
|
||||
user_agent_display.short_description = "User Agent(s)"
|
||||
|
||||
def vod_enabled_display(self, obj):
|
||||
"""Display whether VOD is enabled for this account"""
|
||||
if obj.custom_properties:
|
||||
custom_props = obj.custom_properties or {}
|
||||
return "Yes" if custom_props.get('enable_vod', False) else "No"
|
||||
return "No"
|
||||
vod_enabled_display.short_description = "VOD Enabled"
|
||||
vod_enabled_display.boolean = True
|
||||
|
||||
def uploaded_file_link(self, obj):
|
||||
if obj.uploaded_file:
|
||||
return format_html("<a href='{}' target='_blank'>Download M3U</a>", obj.uploaded_file.url)
|
||||
return format_html(
|
||||
"<a href='{}' target='_blank'>Download M3U</a>", obj.uploaded_file.url
|
||||
)
|
||||
return "No file uploaded"
|
||||
|
||||
uploaded_file_link.short_description = "Uploaded File"
|
||||
|
||||
@admin.action(description='Activate selected accounts')
|
||||
@admin.action(description="Activate selected accounts")
|
||||
def activate_accounts(self, request, queryset):
|
||||
queryset.update(is_active=True)
|
||||
|
||||
@admin.action(description='Deactivate selected accounts')
|
||||
@admin.action(description="Deactivate selected accounts")
|
||||
def deactivate_accounts(self, request, queryset):
|
||||
queryset.update(is_active=False)
|
||||
|
||||
# Add ManyToManyField for Django Admin (if applicable)
|
||||
if hasattr(M3UAccount, 'user_agents'):
|
||||
filter_horizontal = ('user_agents',) # Only for ManyToManyField
|
||||
if hasattr(M3UAccount, "user_agents"):
|
||||
filter_horizontal = ("user_agents",) # Only for ManyToManyField
|
||||
|
||||
|
||||
@admin.register(M3UFilter)
|
||||
class M3UFilterAdmin(admin.ModelAdmin):
|
||||
list_display = ('m3u_account', 'filter_type', 'regex_pattern', 'exclude')
|
||||
list_filter = ('filter_type', 'exclude')
|
||||
search_fields = ('regex_pattern',)
|
||||
ordering = ('m3u_account',)
|
||||
list_display = ("m3u_account", "filter_type", "regex_pattern", "exclude")
|
||||
list_filter = ("filter_type", "exclude")
|
||||
search_fields = ("regex_pattern",)
|
||||
ordering = ("m3u_account",)
|
||||
|
||||
|
||||
@admin.register(ServerGroup)
|
||||
class ServerGroupAdmin(admin.ModelAdmin):
|
||||
list_display = ('name',)
|
||||
search_fields = ('name',)
|
||||
list_display = ("name",)
|
||||
search_fields = ("name",)
|
||||
|
||||
|
||||
@admin.register(M3UAccountProfile)
|
||||
class M3UAccountProfileAdmin(admin.ModelAdmin):
|
||||
list_display = (
|
||||
"name",
|
||||
"m3u_account",
|
||||
"is_default",
|
||||
"is_active",
|
||||
"max_streams",
|
||||
"current_viewers",
|
||||
"account_status_display",
|
||||
"account_expiration_display",
|
||||
"last_refresh_display",
|
||||
)
|
||||
list_filter = ("is_active", "is_default", "m3u_account__account_type")
|
||||
search_fields = ("name", "m3u_account__name")
|
||||
readonly_fields = ("account_info_display",)
|
||||
|
||||
def account_status_display(self, obj):
|
||||
"""Display account status from custom properties"""
|
||||
status = obj.get_account_status()
|
||||
if status:
|
||||
# Create colored status display
|
||||
color_map = {
|
||||
'Active': 'green',
|
||||
'Expired': 'red',
|
||||
'Disabled': 'red',
|
||||
'Banned': 'red',
|
||||
}
|
||||
color = color_map.get(status, 'black')
|
||||
return format_html(
|
||||
'<span style="color: {};">{}</span>',
|
||||
color,
|
||||
status
|
||||
)
|
||||
return "Unknown"
|
||||
account_status_display.short_description = "Account Status"
|
||||
|
||||
def account_expiration_display(self, obj):
|
||||
"""Display account expiration from custom properties"""
|
||||
expiration = obj.get_account_expiration()
|
||||
if expiration:
|
||||
from datetime import datetime
|
||||
if expiration < datetime.now():
|
||||
return format_html(
|
||||
'<span style="color: red;">{}</span>',
|
||||
expiration.strftime('%Y-%m-%d %H:%M')
|
||||
)
|
||||
else:
|
||||
return format_html(
|
||||
'<span style="color: green;">{}</span>',
|
||||
expiration.strftime('%Y-%m-%d %H:%M')
|
||||
)
|
||||
return "Unknown"
|
||||
account_expiration_display.short_description = "Expires"
|
||||
|
||||
def last_refresh_display(self, obj):
|
||||
"""Display last refresh time from custom properties"""
|
||||
last_refresh = obj.get_last_refresh()
|
||||
if last_refresh:
|
||||
return last_refresh.strftime('%Y-%m-%d %H:%M:%S')
|
||||
return "Never"
|
||||
last_refresh_display.short_description = "Last Refresh"
|
||||
|
||||
def account_info_display(self, obj):
|
||||
"""Display formatted account information from custom properties"""
|
||||
if not obj.custom_properties:
|
||||
return "No account information available"
|
||||
|
||||
html_parts = []
|
||||
|
||||
# User Info
|
||||
user_info = obj.custom_properties.get('user_info', {})
|
||||
if user_info:
|
||||
html_parts.append("<h3>User Information:</h3>")
|
||||
html_parts.append("<ul>")
|
||||
for key, value in user_info.items():
|
||||
if key == 'exp_date' and value:
|
||||
try:
|
||||
from datetime import datetime
|
||||
exp_date = datetime.fromtimestamp(float(value))
|
||||
value = exp_date.strftime('%Y-%m-%d %H:%M:%S')
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
html_parts.append(f"<li><strong>{key}:</strong> {value}</li>")
|
||||
html_parts.append("</ul>")
|
||||
|
||||
# Server Info
|
||||
server_info = obj.custom_properties.get('server_info', {})
|
||||
if server_info:
|
||||
html_parts.append("<h3>Server Information:</h3>")
|
||||
html_parts.append("<ul>")
|
||||
for key, value in server_info.items():
|
||||
html_parts.append(f"<li><strong>{key}:</strong> {value}</li>")
|
||||
html_parts.append("</ul>")
|
||||
|
||||
# Last Refresh
|
||||
last_refresh = obj.custom_properties.get('last_refresh')
|
||||
if last_refresh:
|
||||
html_parts.append(f"<p><strong>Last Refresh:</strong> {last_refresh}</p>")
|
||||
|
||||
return format_html(''.join(html_parts)) if html_parts else "No account information available"
|
||||
|
||||
account_info_display.short_description = "Account Information"
|
||||
|
|
|
|||
|
|
@ -1,18 +1,44 @@
|
|||
from django.urls import path, include
|
||||
from rest_framework.routers import DefaultRouter
|
||||
from .api_views import M3UAccountViewSet, M3UFilterViewSet, ServerGroupViewSet, RefreshM3UAPIView, RefreshSingleM3UAPIView, UserAgentViewSet, M3UAccountProfileViewSet
|
||||
from .api_views import (
|
||||
M3UAccountViewSet,
|
||||
M3UFilterViewSet,
|
||||
ServerGroupViewSet,
|
||||
RefreshM3UAPIView,
|
||||
RefreshSingleM3UAPIView,
|
||||
RefreshAccountInfoAPIView,
|
||||
UserAgentViewSet,
|
||||
M3UAccountProfileViewSet,
|
||||
)
|
||||
|
||||
app_name = 'm3u'
|
||||
app_name = "m3u"
|
||||
|
||||
router = DefaultRouter()
|
||||
router.register(r'accounts', M3UAccountViewSet, basename='m3u-account')
|
||||
router.register(r'accounts\/(?P<account_id>\d+)\/profiles', M3UAccountProfileViewSet, basename='m3u-account-profiles')
|
||||
router.register(r'filters', M3UFilterViewSet, basename='m3u-filter')
|
||||
router.register(r'server-groups', ServerGroupViewSet, basename='server-group')
|
||||
router.register(r"accounts", M3UAccountViewSet, basename="m3u-account")
|
||||
router.register(
|
||||
r"accounts\/(?P<account_id>\d+)\/profiles",
|
||||
M3UAccountProfileViewSet,
|
||||
basename="m3u-account-profiles",
|
||||
)
|
||||
router.register(
|
||||
r"accounts\/(?P<account_id>\d+)\/filters",
|
||||
M3UFilterViewSet,
|
||||
basename="m3u-filters",
|
||||
)
|
||||
router.register(r"server-groups", ServerGroupViewSet, basename="server-group")
|
||||
|
||||
urlpatterns = [
|
||||
path('refresh/', RefreshM3UAPIView.as_view(), name='m3u_refresh'),
|
||||
path('refresh/<int:account_id>/', RefreshSingleM3UAPIView.as_view(), name='m3u_refresh_single'),
|
||||
path("refresh/", RefreshM3UAPIView.as_view(), name="m3u_refresh"),
|
||||
path(
|
||||
"refresh/<int:account_id>/",
|
||||
RefreshSingleM3UAPIView.as_view(),
|
||||
name="m3u_refresh_single",
|
||||
),
|
||||
path(
|
||||
"refresh-account-info/<int:profile_id>/",
|
||||
RefreshAccountInfoAPIView.as_view(),
|
||||
name="m3u_refresh_account_info",
|
||||
),
|
||||
]
|
||||
|
||||
urlpatterns += router.urls
|
||||
|
|
|
|||
|
|
@ -1,7 +1,11 @@
|
|||
from rest_framework import viewsets, status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from apps.accounts.permissions import (
|
||||
Authenticated,
|
||||
permission_classes_by_action,
|
||||
permission_classes_by_method,
|
||||
)
|
||||
from drf_yasg.utils import swagger_auto_schema
|
||||
from drf_yasg import openapi
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
|
@ -11,13 +15,14 @@ import os
|
|||
from rest_framework.decorators import action
|
||||
from django.conf import settings
|
||||
from .tasks import refresh_m3u_groups
|
||||
import json
|
||||
|
||||
# Import all models, including UserAgent.
|
||||
from .models import M3UAccount, M3UFilter, ServerGroup, M3UAccountProfile
|
||||
from core.models import UserAgent
|
||||
from apps.channels.models import ChannelGroupM3UAccount
|
||||
from core.serializers import UserAgentSerializer
|
||||
# Import all serializers, including the UserAgentSerializer.
|
||||
from apps.vod.models import M3UVODCategoryRelation
|
||||
|
||||
from .serializers import (
|
||||
M3UAccountSerializer,
|
||||
M3UFilterSerializer,
|
||||
|
|
@ -25,80 +30,112 @@ from .serializers import (
|
|||
M3UAccountProfileSerializer,
|
||||
)
|
||||
|
||||
from .tasks import refresh_single_m3u_account, refresh_m3u_accounts
|
||||
from django.core.files.storage import default_storage
|
||||
from django.core.files.base import ContentFile
|
||||
from .tasks import refresh_single_m3u_account, refresh_m3u_accounts, refresh_account_info
|
||||
import json
|
||||
|
||||
|
||||
class M3UAccountViewSet(viewsets.ModelViewSet):
|
||||
"""Handles CRUD operations for M3U accounts"""
|
||||
queryset = M3UAccount.objects.prefetch_related('channel_group')
|
||||
|
||||
queryset = M3UAccount.objects.prefetch_related("channel_group")
|
||||
serializer_class = M3UAccountSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
# Handle file upload first, if any
|
||||
file_path = None
|
||||
if 'file' in request.FILES:
|
||||
file = request.FILES['file']
|
||||
if "file" in request.FILES:
|
||||
file = request.FILES["file"]
|
||||
file_name = file.name
|
||||
file_path = os.path.join('/data/uploads/m3us', file_name)
|
||||
file_path = os.path.join("/data/uploads/m3us", file_name)
|
||||
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
with open(file_path, 'wb+') as destination:
|
||||
with open(file_path, "wb+") as destination:
|
||||
for chunk in file.chunks():
|
||||
destination.write(chunk)
|
||||
|
||||
# Add file_path to the request data so it's available during creation
|
||||
request.data._mutable = True # Allow modification of the request data
|
||||
request.data['file_path'] = file_path # Include the file path if a file was uploaded
|
||||
|
||||
request.data["file_path"] = (
|
||||
file_path # Include the file path if a file was uploaded
|
||||
)
|
||||
|
||||
# Handle the user_agent field - convert "null" string to None
|
||||
if 'user_agent' in request.data and request.data['user_agent'] == 'null':
|
||||
request.data['user_agent'] = None
|
||||
|
||||
if "user_agent" in request.data and request.data["user_agent"] == "null":
|
||||
request.data["user_agent"] = None
|
||||
|
||||
# Handle server_url appropriately
|
||||
if 'server_url' in request.data and not request.data['server_url']:
|
||||
request.data.pop('server_url')
|
||||
|
||||
if "server_url" in request.data and not request.data["server_url"]:
|
||||
request.data.pop("server_url")
|
||||
|
||||
request.data._mutable = False # Make the request data immutable again
|
||||
|
||||
# Now call super().create() to create the instance
|
||||
response = super().create(request, *args, **kwargs)
|
||||
|
||||
print(response.data.get('account_type'))
|
||||
if response.data.get('account_type') == M3UAccount.Types.XC:
|
||||
refresh_m3u_groups(response.data.get('id'))
|
||||
account_type = response.data.get("account_type")
|
||||
account_id = response.data.get("id")
|
||||
|
||||
# Notify frontend that a new playlist was created
|
||||
from core.utils import send_websocket_update
|
||||
send_websocket_update('updates', 'update', {
|
||||
'type': 'playlist_created',
|
||||
'playlist_id': account_id
|
||||
})
|
||||
|
||||
if account_type == M3UAccount.Types.XC:
|
||||
refresh_m3u_groups(account_id)
|
||||
|
||||
# Check if VOD is enabled
|
||||
enable_vod = request.data.get("enable_vod", False)
|
||||
if enable_vod:
|
||||
from apps.vod.tasks import refresh_categories
|
||||
|
||||
refresh_categories(account_id)
|
||||
|
||||
# After the instance is created, return the response
|
||||
return response
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
old_vod_enabled = False
|
||||
|
||||
# Check current VOD setting
|
||||
if instance.custom_properties:
|
||||
custom_props = instance.custom_properties or {}
|
||||
old_vod_enabled = custom_props.get("enable_vod", False)
|
||||
|
||||
# Handle file upload first, if any
|
||||
file_path = None
|
||||
if 'file' in request.FILES:
|
||||
file = request.FILES['file']
|
||||
if "file" in request.FILES:
|
||||
file = request.FILES["file"]
|
||||
file_name = file.name
|
||||
file_path = os.path.join('/data/uploads/m3us', file_name)
|
||||
file_path = os.path.join("/data/uploads/m3us", file_name)
|
||||
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
with open(file_path, 'wb+') as destination:
|
||||
with open(file_path, "wb+") as destination:
|
||||
for chunk in file.chunks():
|
||||
destination.write(chunk)
|
||||
|
||||
# Add file_path to the request data so it's available during creation
|
||||
request.data._mutable = True # Allow modification of the request data
|
||||
request.data['file_path'] = file_path # Include the file path if a file was uploaded
|
||||
|
||||
request.data["file_path"] = (
|
||||
file_path # Include the file path if a file was uploaded
|
||||
)
|
||||
|
||||
# Handle the user_agent field - convert "null" string to None
|
||||
if 'user_agent' in request.data and request.data['user_agent'] == 'null':
|
||||
request.data['user_agent'] = None
|
||||
|
||||
if "user_agent" in request.data and request.data["user_agent"] == "null":
|
||||
request.data["user_agent"] = None
|
||||
|
||||
# Handle server_url appropriately
|
||||
if 'server_url' in request.data and not request.data['server_url']:
|
||||
request.data.pop('server_url')
|
||||
|
||||
if "server_url" in request.data and not request.data["server_url"]:
|
||||
request.data.pop("server_url")
|
||||
|
||||
request.data._mutable = False # Make the request data immutable again
|
||||
|
||||
if instance.file_path and os.path.exists(instance.file_path):
|
||||
|
|
@ -107,6 +144,18 @@ class M3UAccountViewSet(viewsets.ModelViewSet):
|
|||
# Now call super().update() to update the instance
|
||||
response = super().update(request, *args, **kwargs)
|
||||
|
||||
# Check if VOD setting changed and trigger refresh if needed
|
||||
new_vod_enabled = request.data.get("enable_vod", old_vod_enabled)
|
||||
|
||||
if (
|
||||
instance.account_type == M3UAccount.Types.XC
|
||||
and not old_vod_enabled
|
||||
and new_vod_enabled
|
||||
):
|
||||
from apps.vod.tasks import refresh_vod_content
|
||||
|
||||
refresh_vod_content.delay(instance.id)
|
||||
|
||||
# After the instance is updated, return the response
|
||||
return response
|
||||
|
||||
|
|
@ -115,75 +164,281 @@ class M3UAccountViewSet(viewsets.ModelViewSet):
|
|||
instance = self.get_object()
|
||||
|
||||
# Check if we're toggling is_active
|
||||
if 'is_active' in request.data and instance.is_active != request.data['is_active']:
|
||||
if (
|
||||
"is_active" in request.data
|
||||
and instance.is_active != request.data["is_active"]
|
||||
):
|
||||
# Set appropriate status based on new is_active value
|
||||
if request.data['is_active']:
|
||||
request.data['status'] = M3UAccount.Status.IDLE
|
||||
if request.data["is_active"]:
|
||||
request.data["status"] = M3UAccount.Status.IDLE
|
||||
else:
|
||||
request.data['status'] = M3UAccount.Status.DISABLED
|
||||
request.data["status"] = M3UAccount.Status.DISABLED
|
||||
|
||||
# Continue with regular partial update
|
||||
return super().partial_update(request, *args, **kwargs)
|
||||
|
||||
@action(detail=True, methods=["post"], url_path="refresh-vod")
|
||||
def refresh_vod(self, request, pk=None):
|
||||
"""Trigger VOD content refresh for XtreamCodes accounts"""
|
||||
account = self.get_object()
|
||||
|
||||
if account.account_type != M3UAccount.Types.XC:
|
||||
return Response(
|
||||
{"error": "VOD refresh is only available for XtreamCodes accounts"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Check if VOD is enabled
|
||||
vod_enabled = False
|
||||
if account.custom_properties:
|
||||
custom_props = account.custom_properties or {}
|
||||
vod_enabled = custom_props.get("enable_vod", False)
|
||||
|
||||
if not vod_enabled:
|
||||
return Response(
|
||||
{"error": "VOD is not enabled for this account"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
try:
|
||||
from apps.vod.tasks import refresh_vod_content
|
||||
|
||||
refresh_vod_content.delay(account.id)
|
||||
return Response(
|
||||
{"message": f"VOD refresh initiated for account {account.name}"},
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"error": f"Failed to initiate VOD refresh: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
@action(detail=True, methods=["patch"], url_path="group-settings")
|
||||
def update_group_settings(self, request, pk=None):
|
||||
"""Update auto channel sync settings for M3U account groups"""
|
||||
account = self.get_object()
|
||||
group_settings = request.data.get("group_settings", [])
|
||||
category_settings = request.data.get("category_settings", [])
|
||||
|
||||
try:
|
||||
for setting in group_settings:
|
||||
group_id = setting.get("channel_group")
|
||||
enabled = setting.get("enabled", True)
|
||||
auto_sync = setting.get("auto_channel_sync", False)
|
||||
sync_start = setting.get("auto_sync_channel_start")
|
||||
custom_properties = setting.get("custom_properties", {})
|
||||
|
||||
if group_id:
|
||||
ChannelGroupM3UAccount.objects.update_or_create(
|
||||
channel_group_id=group_id,
|
||||
m3u_account=account,
|
||||
defaults={
|
||||
"enabled": enabled,
|
||||
"auto_channel_sync": auto_sync,
|
||||
"auto_sync_channel_start": sync_start,
|
||||
"custom_properties": custom_properties,
|
||||
},
|
||||
)
|
||||
|
||||
for setting in category_settings:
|
||||
category_id = setting.get("id")
|
||||
enabled = setting.get("enabled", True)
|
||||
custom_properties = setting.get("custom_properties", {})
|
||||
|
||||
if category_id:
|
||||
M3UVODCategoryRelation.objects.update_or_create(
|
||||
category_id=category_id,
|
||||
m3u_account=account,
|
||||
defaults={
|
||||
"enabled": enabled,
|
||||
"custom_properties": custom_properties,
|
||||
},
|
||||
)
|
||||
|
||||
return Response({"message": "Group settings updated successfully"})
|
||||
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"error": f"Failed to update group settings: {str(e)}"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class M3UFilterViewSet(viewsets.ModelViewSet):
|
||||
"""Handles CRUD operations for M3U filters"""
|
||||
queryset = M3UFilter.objects.all()
|
||||
serializer_class = M3UFilterSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
def get_queryset(self):
|
||||
m3u_account_id = self.kwargs["account_id"]
|
||||
return M3UFilter.objects.filter(m3u_account_id=m3u_account_id)
|
||||
|
||||
def perform_create(self, serializer):
|
||||
# Get the account ID from the URL
|
||||
account_id = self.kwargs["account_id"]
|
||||
|
||||
# # Get the M3UAccount instance for the account_id
|
||||
# m3u_account = M3UAccount.objects.get(id=account_id)
|
||||
|
||||
# Save the 'm3u_account' in the serializer context
|
||||
serializer.context["m3u_account"] = account_id
|
||||
|
||||
# Perform the actual save
|
||||
serializer.save(m3u_account_id=account_id)
|
||||
|
||||
|
||||
class ServerGroupViewSet(viewsets.ModelViewSet):
|
||||
"""Handles CRUD operations for Server Groups"""
|
||||
|
||||
queryset = ServerGroup.objects.all()
|
||||
serializer_class = ServerGroupSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
|
||||
class RefreshM3UAPIView(APIView):
|
||||
"""Triggers refresh for all active M3U accounts"""
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [
|
||||
perm() for perm in permission_classes_by_method[self.request.method]
|
||||
]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Triggers a refresh of all active M3U accounts",
|
||||
responses={202: "M3U refresh initiated"}
|
||||
responses={202: "M3U refresh initiated"},
|
||||
)
|
||||
def post(self, request, format=None):
|
||||
refresh_m3u_accounts.delay()
|
||||
return Response({'success': True, 'message': 'M3U refresh initiated.'}, status=status.HTTP_202_ACCEPTED)
|
||||
return Response(
|
||||
{"success": True, "message": "M3U refresh initiated."},
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
|
||||
|
||||
class RefreshSingleM3UAPIView(APIView):
|
||||
"""Triggers refresh for a single M3U account"""
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [
|
||||
perm() for perm in permission_classes_by_method[self.request.method]
|
||||
]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Triggers a refresh of a single M3U account",
|
||||
responses={202: "M3U account refresh initiated"}
|
||||
responses={202: "M3U account refresh initiated"},
|
||||
)
|
||||
def post(self, request, account_id, format=None):
|
||||
refresh_single_m3u_account.delay(account_id)
|
||||
return Response({'success': True, 'message': f'M3U account {account_id} refresh initiated.'},
|
||||
status=status.HTTP_202_ACCEPTED)
|
||||
return Response(
|
||||
{
|
||||
"success": True,
|
||||
"message": f"M3U account {account_id} refresh initiated.",
|
||||
},
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
|
||||
|
||||
class RefreshAccountInfoAPIView(APIView):
|
||||
"""Triggers account info refresh for a single M3U account"""
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [
|
||||
perm() for perm in permission_classes_by_method[self.request.method]
|
||||
]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
@swagger_auto_schema(
|
||||
operation_description="Triggers a refresh of account information for a specific M3U profile",
|
||||
responses={202: "Account info refresh initiated", 400: "Profile not found or not XtreamCodes"},
|
||||
)
|
||||
def post(self, request, profile_id, format=None):
|
||||
try:
|
||||
from .models import M3UAccountProfile
|
||||
profile = M3UAccountProfile.objects.get(id=profile_id)
|
||||
account = profile.m3u_account
|
||||
|
||||
if account.account_type != M3UAccount.Types.XC:
|
||||
return Response(
|
||||
{
|
||||
"success": False,
|
||||
"error": "Account info refresh is only available for XtreamCodes accounts",
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
refresh_account_info.delay(profile_id)
|
||||
return Response(
|
||||
{
|
||||
"success": True,
|
||||
"message": f"Account info refresh initiated for profile {profile.name}.",
|
||||
},
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
except M3UAccountProfile.DoesNotExist:
|
||||
return Response(
|
||||
{
|
||||
"success": False,
|
||||
"error": "Profile not found",
|
||||
},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
|
||||
class UserAgentViewSet(viewsets.ModelViewSet):
|
||||
"""Handles CRUD operations for User Agents"""
|
||||
|
||||
queryset = UserAgent.objects.all()
|
||||
serializer_class = UserAgentSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
|
||||
class M3UAccountProfileViewSet(viewsets.ModelViewSet):
|
||||
queryset = M3UAccountProfile.objects.all()
|
||||
serializer_class = M3UAccountProfileSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [perm() for perm in permission_classes_by_action[self.action]]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
def get_queryset(self):
|
||||
m3u_account_id = self.kwargs['account_id']
|
||||
m3u_account_id = self.kwargs["account_id"]
|
||||
return M3UAccountProfile.objects.filter(m3u_account_id=m3u_account_id)
|
||||
|
||||
def perform_create(self, serializer):
|
||||
# Get the account ID from the URL
|
||||
account_id = self.kwargs['account_id']
|
||||
account_id = self.kwargs["account_id"]
|
||||
|
||||
# Get the M3UAccount instance for the account_id
|
||||
m3u_account = M3UAccount.objects.get(id=account_id)
|
||||
|
||||
# Save the 'm3u_account' in the serializer context
|
||||
serializer.context['m3u_account'] = m3u_account
|
||||
serializer.context["m3u_account"] = m3u_account
|
||||
|
||||
# Perform the actual save
|
||||
serializer.save(m3u_account_id=m3u_account)
|
||||
|
|
|
|||
|
|
@ -4,6 +4,13 @@ from .models import M3UAccount, M3UFilter
|
|||
import re
|
||||
|
||||
class M3UAccountForm(forms.ModelForm):
|
||||
enable_vod = forms.BooleanField(
|
||||
required=False,
|
||||
initial=False,
|
||||
label="Enable VOD Content",
|
||||
help_text="Parse and import VOD (movies/series) content for XtreamCodes accounts"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = M3UAccount
|
||||
fields = [
|
||||
|
|
@ -13,8 +20,34 @@ class M3UAccountForm(forms.ModelForm):
|
|||
'server_group',
|
||||
'max_streams',
|
||||
'is_active',
|
||||
'enable_vod',
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# Set initial value for enable_vod from custom_properties
|
||||
if self.instance and self.instance.custom_properties:
|
||||
custom_props = self.instance.custom_properties or {}
|
||||
self.fields['enable_vod'].initial = custom_props.get('enable_vod', False)
|
||||
|
||||
def save(self, commit=True):
|
||||
instance = super().save(commit=False)
|
||||
|
||||
# Handle enable_vod field
|
||||
enable_vod = self.cleaned_data.get('enable_vod', False)
|
||||
|
||||
# Parse existing custom_properties
|
||||
custom_props = instance.custom_properties or {}
|
||||
|
||||
# Update VOD preference
|
||||
custom_props['enable_vod'] = enable_vod
|
||||
instance.custom_properties = custom_props
|
||||
|
||||
if commit:
|
||||
instance.save()
|
||||
return instance
|
||||
|
||||
def clean_uploaded_file(self):
|
||||
uploaded_file = self.cleaned_data.get('uploaded_file')
|
||||
if uploaded_file:
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
from django.db import migrations
|
||||
from core.models import CoreSettings
|
||||
|
||||
|
||||
def create_custom_account(apps, schema_editor):
|
||||
default_user_agent_id = CoreSettings.get_default_user_agent_id()
|
||||
|
||||
|
|
@ -18,7 +19,7 @@ def create_custom_account(apps, schema_editor):
|
|||
M3UAccountProfile = apps.get_model("m3u", "M3UAccountProfile")
|
||||
M3UAccountProfile.objects.create(
|
||||
m3u_account=m3u_account,
|
||||
name=f'{m3u_account.name} Default',
|
||||
name=f"{m3u_account.name} Default",
|
||||
max_streams=m3u_account.max_streams,
|
||||
is_default=True,
|
||||
is_active=True,
|
||||
|
|
@ -26,10 +27,12 @@ def create_custom_account(apps, schema_editor):
|
|||
replace_pattern="$1",
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('m3u', '0002_m3uaccount_locked'),
|
||||
("m3u", "0002_m3uaccount_locked"),
|
||||
("core", "0004_preload_core_settings"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
|
|
|
|||
|
|
@ -7,24 +7,29 @@ from django.db import migrations, models
|
|||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('django_celery_beat', '0019_alter_periodictasks_options'),
|
||||
('m3u', '0004_m3uaccount_stream_profile'),
|
||||
("django_celery_beat", "0019_alter_periodictasks_options"),
|
||||
("m3u", "0004_m3uaccount_stream_profile"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='m3uaccount',
|
||||
name='custom_properties',
|
||||
model_name="m3uaccount",
|
||||
name="custom_properties",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='m3uaccount',
|
||||
name='refresh_interval',
|
||||
model_name="m3uaccount",
|
||||
name="refresh_interval",
|
||||
field=models.IntegerField(default=24),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='m3uaccount',
|
||||
name='refresh_task',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='django_celery_beat.periodictask'),
|
||||
model_name="m3uaccount",
|
||||
name="refresh_task",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
to="django_celery_beat.periodictask",
|
||||
),
|
||||
),
|
||||
]
|
||||
|
|
|
|||
18
apps/m3u/migrations/0013_alter_m3ufilter_filter_type.py
Normal file
18
apps/m3u/migrations/0013_alter_m3ufilter_filter_type.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.1.6 on 2025-07-22 21:16
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('m3u', '0012_alter_m3uaccount_refresh_interval'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='m3ufilter',
|
||||
name='filter_type',
|
||||
field=models.CharField(choices=[('group', 'Group'), ('name', 'Stream Name'), ('url', 'Stream URL')], default='group', help_text='Filter based on either group title or stream name.', max_length=50),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
# Generated by Django 5.1.6 on 2025-07-31 17:14
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('m3u', '0013_alter_m3ufilter_filter_type'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='m3ufilter',
|
||||
options={'ordering': ['order']},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='m3ufilter',
|
||||
name='order',
|
||||
field=models.PositiveIntegerField(default=0),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
# Generated by Django 5.2.4 on 2025-08-02 16:06
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('m3u', '0014_alter_m3ufilter_options_m3ufilter_order'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='m3ufilter',
|
||||
options={},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='m3ufilter',
|
||||
name='custom_properties',
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
18
apps/m3u/migrations/0016_m3uaccount_priority.py
Normal file
18
apps/m3u/migrations/0016_m3uaccount_priority.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.2.4 on 2025-08-20 22:35
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('m3u', '0015_alter_m3ufilter_options_m3ufilter_custom_properties'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='m3uaccount',
|
||||
name='priority',
|
||||
field=models.PositiveIntegerField(default=0, help_text='Priority for VOD provider selection (higher numbers = higher priority). Used when multiple providers offer the same content.'),
|
||||
),
|
||||
]
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
# Generated by Django 5.2.4 on 2025-09-02 15:19
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('m3u', '0016_m3uaccount_priority'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='m3uaccount',
|
||||
name='custom_properties',
|
||||
field=models.JSONField(blank=True, default=dict, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='m3uaccount',
|
||||
name='server_url',
|
||||
field=models.URLField(blank=True, help_text='The base URL of the M3U server (optional if a file is uploaded)', max_length=1000, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='m3ufilter',
|
||||
name='custom_properties',
|
||||
field=models.JSONField(blank=True, default=dict, null=True),
|
||||
),
|
||||
]
|
||||
18
apps/m3u/migrations/0018_add_profile_custom_properties.py
Normal file
18
apps/m3u/migrations/0018_add_profile_custom_properties.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 5.2.4 on 2025-09-09 20:57
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('m3u', '0017_alter_m3uaccount_custom_properties_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='m3uaccountprofile',
|
||||
name='custom_properties',
|
||||
field=models.JSONField(blank=True, default=dict, help_text='Custom properties for storing account information from provider (e.g., XC account details, expiration dates)', null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -7,7 +7,8 @@ from apps.channels.models import StreamProfile
|
|||
from django_celery_beat.models import PeriodicTask
|
||||
from core.models import CoreSettings, UserAgent
|
||||
|
||||
CUSTOM_M3U_ACCOUNT_NAME="custom"
|
||||
CUSTOM_M3U_ACCOUNT_NAME = "custom"
|
||||
|
||||
|
||||
class M3UAccount(models.Model):
|
||||
class Types(models.TextChoices):
|
||||
|
|
@ -25,84 +26,78 @@ class M3UAccount(models.Model):
|
|||
|
||||
"""Represents an M3U Account for IPTV streams."""
|
||||
name = models.CharField(
|
||||
max_length=255,
|
||||
unique=True,
|
||||
help_text="Unique name for this M3U account"
|
||||
max_length=255, unique=True, help_text="Unique name for this M3U account"
|
||||
)
|
||||
server_url = models.URLField(
|
||||
max_length=1000,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="The base URL of the M3U server (optional if a file is uploaded)"
|
||||
)
|
||||
file_path = models.CharField(
|
||||
max_length=255,
|
||||
blank=True,
|
||||
null=True
|
||||
help_text="The base URL of the M3U server (optional if a file is uploaded)",
|
||||
)
|
||||
file_path = models.CharField(max_length=255, blank=True, null=True)
|
||||
server_group = models.ForeignKey(
|
||||
'ServerGroup',
|
||||
"ServerGroup",
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='m3u_accounts',
|
||||
help_text="The server group this M3U account belongs to"
|
||||
related_name="m3u_accounts",
|
||||
help_text="The server group this M3U account belongs to",
|
||||
)
|
||||
max_streams = models.PositiveIntegerField(
|
||||
default=0,
|
||||
help_text="Maximum number of concurrent streams (0 for unlimited)"
|
||||
default=0, help_text="Maximum number of concurrent streams (0 for unlimited)"
|
||||
)
|
||||
is_active = models.BooleanField(
|
||||
default=True,
|
||||
help_text="Set to false to deactivate this M3U account"
|
||||
default=True, help_text="Set to false to deactivate this M3U account"
|
||||
)
|
||||
created_at = models.DateTimeField(
|
||||
auto_now_add=True,
|
||||
help_text="Time when this account was created"
|
||||
auto_now_add=True, help_text="Time when this account was created"
|
||||
)
|
||||
updated_at = models.DateTimeField(
|
||||
null=True, blank=True,
|
||||
help_text="Time when this account was last successfully refreshed"
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Time when this account was last successfully refreshed",
|
||||
)
|
||||
status = models.CharField(
|
||||
max_length=20,
|
||||
choices=Status.choices,
|
||||
default=Status.IDLE
|
||||
max_length=20, choices=Status.choices, default=Status.IDLE
|
||||
)
|
||||
last_message = models.TextField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Last status message, including success results or error information"
|
||||
help_text="Last status message, including success results or error information",
|
||||
)
|
||||
user_agent = models.ForeignKey(
|
||||
'core.UserAgent',
|
||||
"core.UserAgent",
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='m3u_accounts',
|
||||
help_text="The User-Agent associated with this M3U account."
|
||||
related_name="m3u_accounts",
|
||||
help_text="The User-Agent associated with this M3U account.",
|
||||
)
|
||||
locked = models.BooleanField(
|
||||
default=False,
|
||||
help_text="Protected - can't be deleted or modified"
|
||||
default=False, help_text="Protected - can't be deleted or modified"
|
||||
)
|
||||
stream_profile = models.ForeignKey(
|
||||
StreamProfile,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='m3u_accounts'
|
||||
related_name="m3u_accounts",
|
||||
)
|
||||
account_type = models.CharField(choices=Types.choices, default=Types.STADNARD)
|
||||
username = models.CharField(max_length=255, null=True, blank=True)
|
||||
password = models.CharField(max_length=255, null=True, blank=True)
|
||||
custom_properties = models.TextField(null=True, blank=True)
|
||||
custom_properties = models.JSONField(default=dict, blank=True, null=True)
|
||||
refresh_interval = models.IntegerField(default=0)
|
||||
refresh_task = models.ForeignKey(
|
||||
PeriodicTask, on_delete=models.SET_NULL, null=True, blank=True
|
||||
)
|
||||
stale_stream_days = models.PositiveIntegerField(
|
||||
default=7,
|
||||
help_text="Number of days after which a stream will be removed if not seen in the M3U source."
|
||||
help_text="Number of days after which a stream will be removed if not seen in the M3U source.",
|
||||
)
|
||||
priority = models.PositiveIntegerField(
|
||||
default=0,
|
||||
help_text="Priority for VOD provider selection (higher numbers = higher priority). Used when multiple providers offer the same content.",
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
|
|
@ -134,17 +129,19 @@ class M3UAccount(models.Model):
|
|||
def get_user_agent(self):
|
||||
user_agent = self.user_agent
|
||||
if not user_agent:
|
||||
user_agent = UserAgent.objects.get(id=CoreSettings.get_default_user_agent_id())
|
||||
user_agent = UserAgent.objects.get(
|
||||
id=CoreSettings.get_default_user_agent_id()
|
||||
)
|
||||
|
||||
return user_agent
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
# Prevent auto_now behavior by handling updated_at manually
|
||||
if 'update_fields' in kwargs and 'updated_at' not in kwargs['update_fields']:
|
||||
if "update_fields" in kwargs and "updated_at" not in kwargs["update_fields"]:
|
||||
# Don't modify updated_at for regular updates
|
||||
kwargs.setdefault('update_fields', [])
|
||||
if 'updated_at' in kwargs['update_fields']:
|
||||
kwargs['update_fields'].remove('updated_at')
|
||||
kwargs.setdefault("update_fields", [])
|
||||
if "updated_at" in kwargs["update_fields"]:
|
||||
kwargs["update_fields"].remove("updated_at")
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
# def get_channel_groups(self):
|
||||
|
|
@ -158,35 +155,40 @@ class M3UAccount(models.Model):
|
|||
# """Return all streams linked to this account with enabled ChannelGroups."""
|
||||
# return self.streams.filter(channel_group__in=ChannelGroup.objects.filter(m3u_account__enabled=True))
|
||||
|
||||
|
||||
class M3UFilter(models.Model):
|
||||
"""Defines filters for M3U accounts based on stream name or group title."""
|
||||
|
||||
FILTER_TYPE_CHOICES = (
|
||||
('group', 'Group Title'),
|
||||
('name', 'Stream Name'),
|
||||
("group", "Group"),
|
||||
("name", "Stream Name"),
|
||||
("url", "Stream URL"),
|
||||
)
|
||||
|
||||
m3u_account = models.ForeignKey(
|
||||
M3UAccount,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='filters',
|
||||
help_text="The M3U account this filter is applied to."
|
||||
related_name="filters",
|
||||
help_text="The M3U account this filter is applied to.",
|
||||
)
|
||||
filter_type = models.CharField(
|
||||
max_length=50,
|
||||
choices=FILTER_TYPE_CHOICES,
|
||||
default='group',
|
||||
help_text="Filter based on either group title or stream name."
|
||||
default="group",
|
||||
help_text="Filter based on either group title or stream name.",
|
||||
)
|
||||
regex_pattern = models.CharField(
|
||||
max_length=200,
|
||||
help_text="A regex pattern to match streams or groups."
|
||||
max_length=200, help_text="A regex pattern to match streams or groups."
|
||||
)
|
||||
exclude = models.BooleanField(
|
||||
default=True,
|
||||
help_text="If True, matching items are excluded; if False, only matches are included."
|
||||
help_text="If True, matching items are excluded; if False, only matches are included.",
|
||||
)
|
||||
order = models.PositiveIntegerField(default=0)
|
||||
custom_properties = models.JSONField(default=dict, blank=True, null=True)
|
||||
|
||||
def applies_to(self, stream_name, group_name):
|
||||
target = group_name if self.filter_type == 'group' else stream_name
|
||||
target = group_name if self.filter_type == "group" else stream_name
|
||||
return bool(re.search(self.regex_pattern, target, re.IGNORECASE))
|
||||
|
||||
def clean(self):
|
||||
|
|
@ -196,7 +198,9 @@ class M3UFilter(models.Model):
|
|||
raise ValidationError(f"Invalid regex pattern: {self.regex_pattern}")
|
||||
|
||||
def __str__(self):
|
||||
filter_type_display = dict(self.FILTER_TYPE_CHOICES).get(self.filter_type, 'Unknown')
|
||||
filter_type_display = dict(self.FILTER_TYPE_CHOICES).get(
|
||||
self.filter_type, "Unknown"
|
||||
)
|
||||
exclude_status = "Exclude" if self.exclude else "Include"
|
||||
return f"[{self.m3u_account.name}] {filter_type_display}: {self.regex_pattern} ({exclude_status})"
|
||||
|
||||
|
|
@ -222,40 +226,35 @@ class M3UFilter(models.Model):
|
|||
|
||||
class ServerGroup(models.Model):
|
||||
"""Represents a logical grouping of servers or channels."""
|
||||
|
||||
name = models.CharField(
|
||||
max_length=100,
|
||||
unique=True,
|
||||
help_text="Unique name for this server group."
|
||||
max_length=100, unique=True, help_text="Unique name for this server group."
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
from django.db import models
|
||||
|
||||
class M3UAccountProfile(models.Model):
|
||||
"""Represents a profile associated with an M3U Account."""
|
||||
|
||||
m3u_account = models.ForeignKey(
|
||||
'M3UAccount',
|
||||
"M3UAccount",
|
||||
on_delete=models.CASCADE,
|
||||
related_name='profiles',
|
||||
help_text="The M3U account this profile belongs to."
|
||||
related_name="profiles",
|
||||
help_text="The M3U account this profile belongs to.",
|
||||
)
|
||||
name = models.CharField(
|
||||
max_length=255,
|
||||
help_text="Name for the M3U account profile"
|
||||
max_length=255, help_text="Name for the M3U account profile"
|
||||
)
|
||||
is_default = models.BooleanField(
|
||||
default=False,
|
||||
help_text="Set to false to deactivate this profile"
|
||||
default=False, help_text="Set to false to deactivate this profile"
|
||||
)
|
||||
max_streams = models.PositiveIntegerField(
|
||||
default=0,
|
||||
help_text="Maximum number of concurrent streams (0 for unlimited)"
|
||||
default=0, help_text="Maximum number of concurrent streams (0 for unlimited)"
|
||||
)
|
||||
is_active = models.BooleanField(
|
||||
default=True,
|
||||
help_text="Set to false to deactivate this profile"
|
||||
default=True, help_text="Set to false to deactivate this profile"
|
||||
)
|
||||
search_pattern = models.CharField(
|
||||
max_length=255,
|
||||
|
|
@ -264,22 +263,95 @@ class M3UAccountProfile(models.Model):
|
|||
max_length=255,
|
||||
)
|
||||
current_viewers = models.PositiveIntegerField(default=0)
|
||||
custom_properties = models.JSONField(
|
||||
default=dict,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Custom properties for storing account information from provider (e.g., XC account details, expiration dates)"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
constraints = [
|
||||
models.UniqueConstraint(fields=['m3u_account', 'name'], name='unique_account_name')
|
||||
models.UniqueConstraint(
|
||||
fields=["m3u_account", "name"], name="unique_account_name"
|
||||
)
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.name} ({self.m3u_account.name})"
|
||||
|
||||
def get_account_expiration(self):
|
||||
"""Get account expiration date from custom properties if available"""
|
||||
if not self.custom_properties:
|
||||
return None
|
||||
|
||||
user_info = self.custom_properties.get('user_info', {})
|
||||
exp_date = user_info.get('exp_date')
|
||||
|
||||
if exp_date:
|
||||
try:
|
||||
from datetime import datetime
|
||||
# XC exp_date is typically a Unix timestamp
|
||||
if isinstance(exp_date, (int, float)):
|
||||
return datetime.fromtimestamp(exp_date)
|
||||
elif isinstance(exp_date, str):
|
||||
# Try to parse as timestamp first, then as ISO date
|
||||
try:
|
||||
return datetime.fromtimestamp(float(exp_date))
|
||||
except ValueError:
|
||||
return datetime.fromisoformat(exp_date)
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
def get_account_status(self):
|
||||
"""Get account status from custom properties if available"""
|
||||
if not self.custom_properties:
|
||||
return None
|
||||
|
||||
user_info = self.custom_properties.get('user_info', {})
|
||||
return user_info.get('status')
|
||||
|
||||
def get_max_connections(self):
|
||||
"""Get maximum connections from custom properties if available"""
|
||||
if not self.custom_properties:
|
||||
return None
|
||||
|
||||
user_info = self.custom_properties.get('user_info', {})
|
||||
return user_info.get('max_connections')
|
||||
|
||||
def get_active_connections(self):
|
||||
"""Get active connections from custom properties if available"""
|
||||
if not self.custom_properties:
|
||||
return None
|
||||
|
||||
user_info = self.custom_properties.get('user_info', {})
|
||||
return user_info.get('active_cons')
|
||||
|
||||
def get_last_refresh(self):
|
||||
"""Get last refresh timestamp from custom properties if available"""
|
||||
if not self.custom_properties:
|
||||
return None
|
||||
|
||||
last_refresh = self.custom_properties.get('last_refresh')
|
||||
if last_refresh:
|
||||
try:
|
||||
from datetime import datetime
|
||||
return datetime.fromisoformat(last_refresh)
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@receiver(models.signals.post_save, sender=M3UAccount)
|
||||
def create_profile_for_m3u_account(sender, instance, created, **kwargs):
|
||||
"""Automatically create an M3UAccountProfile when M3UAccount is created."""
|
||||
if created:
|
||||
M3UAccountProfile.objects.create(
|
||||
m3u_account=instance,
|
||||
name=f'{instance.name} Default',
|
||||
name=f"{instance.name} Default",
|
||||
max_streams=instance.max_streams,
|
||||
is_default=True,
|
||||
is_active=True,
|
||||
|
|
@ -292,6 +364,5 @@ def create_profile_for_m3u_account(sender, instance, created, **kwargs):
|
|||
is_default=True,
|
||||
)
|
||||
|
||||
|
||||
profile.max_streams = instance.max_streams
|
||||
profile.save()
|
||||
|
|
|
|||
|
|
@ -1,41 +1,106 @@
|
|||
from rest_framework import serializers
|
||||
from core.utils import validate_flexible_url
|
||||
from rest_framework import serializers, status
|
||||
from rest_framework.response import Response
|
||||
from .models import M3UAccount, M3UFilter, ServerGroup, M3UAccountProfile
|
||||
from core.models import UserAgent
|
||||
from apps.channels.models import ChannelGroup, ChannelGroupM3UAccount
|
||||
from apps.channels.serializers import ChannelGroupM3UAccountSerializer, ChannelGroupSerializer
|
||||
from apps.channels.serializers import (
|
||||
ChannelGroupM3UAccountSerializer,
|
||||
)
|
||||
import logging
|
||||
import json
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class M3UFilterSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for M3U Filters"""
|
||||
channel_groups = ChannelGroupM3UAccountSerializer(source='m3u_account', many=True)
|
||||
|
||||
class Meta:
|
||||
model = M3UFilter
|
||||
fields = ['id', 'filter_type', 'regex_pattern', 'exclude', 'channel_groups']
|
||||
fields = [
|
||||
"id",
|
||||
"filter_type",
|
||||
"regex_pattern",
|
||||
"exclude",
|
||||
"order",
|
||||
"custom_properties",
|
||||
]
|
||||
|
||||
from rest_framework import serializers
|
||||
from .models import M3UAccountProfile
|
||||
|
||||
class M3UAccountProfileSerializer(serializers.ModelSerializer):
|
||||
account = serializers.SerializerMethodField()
|
||||
|
||||
def get_account(self, obj):
|
||||
"""Include basic account information for frontend use"""
|
||||
return {
|
||||
'id': obj.m3u_account.id,
|
||||
'name': obj.m3u_account.name,
|
||||
'account_type': obj.m3u_account.account_type,
|
||||
'is_xtream_codes': obj.m3u_account.account_type == 'XC'
|
||||
}
|
||||
|
||||
class Meta:
|
||||
model = M3UAccountProfile
|
||||
fields = ['id', 'name', 'max_streams', 'is_active', 'is_default', 'current_viewers', 'search_pattern', 'replace_pattern']
|
||||
read_only_fields = ['id']
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"max_streams",
|
||||
"is_active",
|
||||
"is_default",
|
||||
"current_viewers",
|
||||
"search_pattern",
|
||||
"replace_pattern",
|
||||
"custom_properties",
|
||||
"account",
|
||||
]
|
||||
read_only_fields = ["id", "account"]
|
||||
extra_kwargs = {
|
||||
'search_pattern': {'required': False, 'allow_blank': True},
|
||||
'replace_pattern': {'required': False, 'allow_blank': True},
|
||||
}
|
||||
|
||||
def create(self, validated_data):
|
||||
m3u_account = self.context.get('m3u_account')
|
||||
m3u_account = self.context.get("m3u_account")
|
||||
|
||||
# Use the m3u_account when creating the profile
|
||||
validated_data['m3u_account_id'] = m3u_account.id
|
||||
validated_data["m3u_account_id"] = m3u_account.id
|
||||
|
||||
return super().create(validated_data)
|
||||
|
||||
def validate(self, data):
|
||||
"""Custom validation to handle default profiles"""
|
||||
# For updates to existing instances
|
||||
if self.instance and self.instance.is_default:
|
||||
# For default profiles, search_pattern and replace_pattern are not required
|
||||
# and we don't want to validate them since they shouldn't be changed
|
||||
return data
|
||||
|
||||
# For non-default profiles or new profiles, ensure required fields are present
|
||||
if not data.get('search_pattern'):
|
||||
raise serializers.ValidationError({
|
||||
'search_pattern': ['This field is required for non-default profiles.']
|
||||
})
|
||||
if not data.get('replace_pattern'):
|
||||
raise serializers.ValidationError({
|
||||
'replace_pattern': ['This field is required for non-default profiles.']
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
if instance.is_default:
|
||||
raise serializers.ValidationError("Default profiles cannot be modified.")
|
||||
# For default profiles, only allow updating name and custom_properties (for notes)
|
||||
allowed_fields = {'name', 'custom_properties'}
|
||||
|
||||
# Remove any fields that aren't allowed for default profiles
|
||||
disallowed_fields = set(validated_data.keys()) - allowed_fields
|
||||
if disallowed_fields:
|
||||
raise serializers.ValidationError(
|
||||
f"Default profiles can only modify name and notes. "
|
||||
f"Cannot modify: {', '.join(disallowed_fields)}"
|
||||
)
|
||||
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
|
|
@ -43,13 +108,15 @@ class M3UAccountProfileSerializer(serializers.ModelSerializer):
|
|||
if instance.is_default:
|
||||
return Response(
|
||||
{"error": "Default profiles cannot be deleted."},
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
return super().destroy(request, *args, **kwargs)
|
||||
|
||||
|
||||
class M3UAccountSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for M3U Account"""
|
||||
filters = M3UFilterSerializer(many=True, read_only=True)
|
||||
|
||||
filters = serializers.SerializerMethodField()
|
||||
# Include user_agent as a mandatory field using its primary key.
|
||||
user_agent = serializers.PrimaryKeyRelatedField(
|
||||
queryset=UserAgent.objects.all(),
|
||||
|
|
@ -57,28 +124,96 @@ class M3UAccountSerializer(serializers.ModelSerializer):
|
|||
allow_null=True,
|
||||
)
|
||||
profiles = M3UAccountProfileSerializer(many=True, read_only=True)
|
||||
read_only_fields = ['locked', 'created_at', 'updated_at']
|
||||
read_only_fields = ["locked", "created_at", "updated_at"]
|
||||
# channel_groups = serializers.SerializerMethodField()
|
||||
channel_groups = ChannelGroupM3UAccountSerializer(source='channel_group', many=True, required=False)
|
||||
channel_groups = ChannelGroupM3UAccountSerializer(
|
||||
source="channel_group", many=True, required=False
|
||||
)
|
||||
server_url = serializers.CharField(
|
||||
required=False,
|
||||
allow_blank=True,
|
||||
allow_null=True,
|
||||
validators=[validate_flexible_url],
|
||||
)
|
||||
enable_vod = serializers.BooleanField(required=False, write_only=True)
|
||||
auto_enable_new_groups_live = serializers.BooleanField(required=False, write_only=True)
|
||||
auto_enable_new_groups_vod = serializers.BooleanField(required=False, write_only=True)
|
||||
auto_enable_new_groups_series = serializers.BooleanField(required=False, write_only=True)
|
||||
|
||||
class Meta:
|
||||
model = M3UAccount
|
||||
fields = [
|
||||
'id', 'name', 'server_url', 'file_path', 'server_group',
|
||||
'max_streams', 'is_active', 'created_at', 'updated_at', 'filters', 'user_agent', 'profiles', 'locked',
|
||||
'channel_groups', 'refresh_interval', 'custom_properties', 'account_type', 'username', 'password', 'stale_stream_days',
|
||||
'status', 'last_message',
|
||||
"id",
|
||||
"name",
|
||||
"server_url",
|
||||
"file_path",
|
||||
"server_group",
|
||||
"max_streams",
|
||||
"is_active",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"filters",
|
||||
"user_agent",
|
||||
"profiles",
|
||||
"locked",
|
||||
"channel_groups",
|
||||
"refresh_interval",
|
||||
"custom_properties",
|
||||
"account_type",
|
||||
"username",
|
||||
"password",
|
||||
"stale_stream_days",
|
||||
"priority",
|
||||
"status",
|
||||
"last_message",
|
||||
"enable_vod",
|
||||
"auto_enable_new_groups_live",
|
||||
"auto_enable_new_groups_vod",
|
||||
"auto_enable_new_groups_series",
|
||||
]
|
||||
extra_kwargs = {
|
||||
'password': {
|
||||
'required': False,
|
||||
'allow_blank': True,
|
||||
"password": {
|
||||
"required": False,
|
||||
"allow_blank": True,
|
||||
},
|
||||
}
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
|
||||
# Parse custom_properties to get VOD preference and auto_enable_new_groups settings
|
||||
custom_props = instance.custom_properties or {}
|
||||
|
||||
data["enable_vod"] = custom_props.get("enable_vod", False)
|
||||
data["auto_enable_new_groups_live"] = custom_props.get("auto_enable_new_groups_live", True)
|
||||
data["auto_enable_new_groups_vod"] = custom_props.get("auto_enable_new_groups_vod", True)
|
||||
data["auto_enable_new_groups_series"] = custom_props.get("auto_enable_new_groups_series", True)
|
||||
return data
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
# Handle enable_vod preference and auto_enable_new_groups settings
|
||||
enable_vod = validated_data.pop("enable_vod", None)
|
||||
auto_enable_new_groups_live = validated_data.pop("auto_enable_new_groups_live", None)
|
||||
auto_enable_new_groups_vod = validated_data.pop("auto_enable_new_groups_vod", None)
|
||||
auto_enable_new_groups_series = validated_data.pop("auto_enable_new_groups_series", None)
|
||||
|
||||
# Get existing custom_properties
|
||||
custom_props = instance.custom_properties or {}
|
||||
|
||||
# Update preferences
|
||||
if enable_vod is not None:
|
||||
custom_props["enable_vod"] = enable_vod
|
||||
if auto_enable_new_groups_live is not None:
|
||||
custom_props["auto_enable_new_groups_live"] = auto_enable_new_groups_live
|
||||
if auto_enable_new_groups_vod is not None:
|
||||
custom_props["auto_enable_new_groups_vod"] = auto_enable_new_groups_vod
|
||||
if auto_enable_new_groups_series is not None:
|
||||
custom_props["auto_enable_new_groups_series"] = auto_enable_new_groups_series
|
||||
|
||||
validated_data["custom_properties"] = custom_props
|
||||
|
||||
# Pop out channel group memberships so we can handle them manually
|
||||
channel_group_data = validated_data.pop('channel_group', [])
|
||||
channel_group_data = validated_data.pop("channel_group", [])
|
||||
|
||||
# First, update the M3UAccount itself
|
||||
for attr, value in validated_data.items():
|
||||
|
|
@ -88,13 +223,12 @@ class M3UAccountSerializer(serializers.ModelSerializer):
|
|||
# Prepare a list of memberships to update
|
||||
memberships_to_update = []
|
||||
for group_data in channel_group_data:
|
||||
group = group_data.get('channel_group')
|
||||
enabled = group_data.get('enabled')
|
||||
group = group_data.get("channel_group")
|
||||
enabled = group_data.get("enabled")
|
||||
|
||||
try:
|
||||
membership = ChannelGroupM3UAccount.objects.get(
|
||||
m3u_account=instance,
|
||||
channel_group=group
|
||||
m3u_account=instance, channel_group=group
|
||||
)
|
||||
membership.enabled = enabled
|
||||
memberships_to_update.append(membership)
|
||||
|
|
@ -103,13 +237,39 @@ class M3UAccountSerializer(serializers.ModelSerializer):
|
|||
|
||||
# Perform the bulk update
|
||||
if memberships_to_update:
|
||||
ChannelGroupM3UAccount.objects.bulk_update(memberships_to_update, ['enabled'])
|
||||
ChannelGroupM3UAccount.objects.bulk_update(
|
||||
memberships_to_update, ["enabled"]
|
||||
)
|
||||
|
||||
return instance
|
||||
|
||||
def create(self, validated_data):
|
||||
# Handle enable_vod preference and auto_enable_new_groups settings during creation
|
||||
enable_vod = validated_data.pop("enable_vod", False)
|
||||
auto_enable_new_groups_live = validated_data.pop("auto_enable_new_groups_live", True)
|
||||
auto_enable_new_groups_vod = validated_data.pop("auto_enable_new_groups_vod", True)
|
||||
auto_enable_new_groups_series = validated_data.pop("auto_enable_new_groups_series", True)
|
||||
|
||||
# Parse existing custom_properties or create new
|
||||
custom_props = validated_data.get("custom_properties", {})
|
||||
|
||||
# Set preferences (default to True for auto_enable_new_groups)
|
||||
custom_props["enable_vod"] = enable_vod
|
||||
custom_props["auto_enable_new_groups_live"] = auto_enable_new_groups_live
|
||||
custom_props["auto_enable_new_groups_vod"] = auto_enable_new_groups_vod
|
||||
custom_props["auto_enable_new_groups_series"] = auto_enable_new_groups_series
|
||||
validated_data["custom_properties"] = custom_props
|
||||
|
||||
return super().create(validated_data)
|
||||
|
||||
def get_filters(self, obj):
|
||||
filters = obj.filters.order_by("order")
|
||||
return M3UFilterSerializer(filters, many=True).data
|
||||
|
||||
|
||||
class ServerGroupSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for Server Group"""
|
||||
|
||||
class Meta:
|
||||
model = ServerGroup
|
||||
fields = ['id', 'name']
|
||||
fields = ["id", "name"]
|
||||
|
|
|
|||
2670
apps/m3u/tasks.py
2670
apps/m3u/tasks.py
File diff suppressed because it is too large
Load diff
|
|
@ -1,9 +1,40 @@
|
|||
# apps/m3u/utils.py
|
||||
import threading
|
||||
import logging
|
||||
from django.db import models
|
||||
|
||||
lock = threading.Lock()
|
||||
# Dictionary to track usage: {m3u_account_id: current_usage}
|
||||
active_streams_map = {}
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def normalize_stream_url(url):
|
||||
"""
|
||||
Normalize stream URLs for compatibility with FFmpeg.
|
||||
|
||||
Handles VLC-specific syntax like udp://@239.0.0.1:1234 by removing the @ symbol.
|
||||
FFmpeg doesn't recognize the @ prefix for multicast addresses.
|
||||
|
||||
Args:
|
||||
url (str): The stream URL to normalize
|
||||
|
||||
Returns:
|
||||
str: The normalized URL
|
||||
"""
|
||||
if not url:
|
||||
return url
|
||||
|
||||
# Handle VLC-style UDP multicast URLs: udp://@239.0.0.1:1234 -> udp://239.0.0.1:1234
|
||||
# The @ symbol in VLC means "listen on all interfaces" but FFmpeg doesn't use this syntax
|
||||
if url.startswith('udp://@'):
|
||||
normalized = url.replace('udp://@', 'udp://', 1)
|
||||
logger.debug(f"Normalized VLC-style UDP URL: {url} -> {normalized}")
|
||||
return normalized
|
||||
|
||||
# Could add other normalizations here in the future (rtp://@, etc.)
|
||||
return url
|
||||
|
||||
|
||||
def increment_stream_count(account):
|
||||
with lock:
|
||||
|
|
@ -24,3 +55,64 @@ def decrement_stream_count(account):
|
|||
active_streams_map[account.id] = current_usage
|
||||
account.active_streams = current_usage
|
||||
account.save(update_fields=['active_streams'])
|
||||
|
||||
|
||||
def calculate_tuner_count(minimum=1, unlimited_default=10):
|
||||
"""
|
||||
Calculate tuner/connection count from active M3U profiles and custom streams.
|
||||
This is the centralized function used by both HDHR and XtreamCodes APIs.
|
||||
|
||||
Args:
|
||||
minimum (int): Minimum number to return (default: 1)
|
||||
unlimited_default (int): Default value when unlimited profiles exist (default: 10)
|
||||
|
||||
Returns:
|
||||
int: Calculated tuner/connection count
|
||||
"""
|
||||
try:
|
||||
from apps.m3u.models import M3UAccountProfile
|
||||
from apps.channels.models import Stream
|
||||
|
||||
# Calculate tuner count from active profiles from active M3U accounts (excluding default "custom Default" profile)
|
||||
profiles = M3UAccountProfile.objects.filter(
|
||||
is_active=True,
|
||||
m3u_account__is_active=True, # Only include profiles from enabled M3U accounts
|
||||
).exclude(id=1)
|
||||
|
||||
# 1. Check if any profile has unlimited streams (max_streams=0)
|
||||
has_unlimited = profiles.filter(max_streams=0).exists()
|
||||
|
||||
# 2. Calculate tuner count from limited profiles
|
||||
limited_tuners = 0
|
||||
if not has_unlimited:
|
||||
limited_tuners = (
|
||||
profiles.filter(max_streams__gt=0)
|
||||
.aggregate(total=models.Sum("max_streams"))
|
||||
.get("total", 0)
|
||||
or 0
|
||||
)
|
||||
|
||||
# 3. Add custom stream count to tuner count
|
||||
custom_stream_count = Stream.objects.filter(is_custom=True).count()
|
||||
logger.debug(f"Found {custom_stream_count} custom streams")
|
||||
|
||||
# 4. Calculate final tuner count
|
||||
if has_unlimited:
|
||||
# If there are unlimited profiles, start with unlimited_default plus custom streams
|
||||
tuner_count = unlimited_default + custom_stream_count
|
||||
else:
|
||||
# Otherwise use the limited profile sum plus custom streams
|
||||
tuner_count = limited_tuners + custom_stream_count
|
||||
|
||||
# 5. Ensure minimum number
|
||||
tuner_count = max(minimum, tuner_count)
|
||||
|
||||
logger.debug(
|
||||
f"Calculated tuner count: {tuner_count} (limited profiles: {limited_tuners}, custom streams: {custom_stream_count}, unlimited: {has_unlimited})"
|
||||
)
|
||||
|
||||
return tuner_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating tuner count: {e}")
|
||||
return minimum # Fallback to minimum value
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ from django.views import View
|
|||
from django.utils.decorators import method_decorator
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from django.http import JsonResponse
|
||||
from apps.m3u.models import M3UAccount
|
||||
import json
|
||||
|
||||
|
|
|
|||
|
|
@ -1,16 +1,14 @@
|
|||
from django.urls import path, re_path, include
|
||||
from .views import generate_m3u, generate_epg
|
||||
from .views import m3u_endpoint, epg_endpoint, xc_get, xc_movie_stream, xc_series_stream
|
||||
from core.views import stream_view
|
||||
|
||||
app_name = 'output'
|
||||
app_name = "output"
|
||||
|
||||
urlpatterns = [
|
||||
# Allow `/m3u`, `/m3u/`, `/m3u/profile_name`, and `/m3u/profile_name/`
|
||||
re_path(r'^m3u(?:/(?P<profile_name>[^/]+))?/?$', generate_m3u, name='generate_m3u'),
|
||||
|
||||
re_path(r"^m3u(?:/(?P<profile_name>[^/]+))?/?$", m3u_endpoint, name="m3u_endpoint"),
|
||||
# Allow `/epg`, `/epg/`, `/epg/profile_name`, and `/epg/profile_name/`
|
||||
re_path(r'^epg(?:/(?P<profile_name>[^/]+))?/?$', generate_epg, name='generate_epg'),
|
||||
|
||||
re_path(r"^epg(?:/(?P<profile_name>[^/]+))?/?$", epg_endpoint, name="epg_endpoint"),
|
||||
# Allow both `/stream/<int:stream_id>` and `/stream/<int:stream_id>/`
|
||||
re_path(r'^stream/(?P<channel_uuid>[0-9a-fA-F\-]+)/?$', stream_view, name='stream'),
|
||||
re_path(r"^stream/(?P<channel_uuid>[0-9a-fA-F\-]+)/?$", stream_view, name="stream"),
|
||||
]
|
||||
|
|
|
|||
2751
apps/output/views.py
2751
apps/output/views.py
File diff suppressed because it is too large
Load diff
2
apps/plugins/__init__.py
Normal file
2
apps/plugins/__init__.py
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
default_app_config = "apps.plugins.apps.PluginsConfig"
|
||||
|
||||
22
apps/plugins/api_urls.py
Normal file
22
apps/plugins/api_urls.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
from django.urls import path
|
||||
from .api_views import (
|
||||
PluginsListAPIView,
|
||||
PluginReloadAPIView,
|
||||
PluginSettingsAPIView,
|
||||
PluginRunAPIView,
|
||||
PluginEnabledAPIView,
|
||||
PluginImportAPIView,
|
||||
PluginDeleteAPIView,
|
||||
)
|
||||
|
||||
app_name = "plugins"
|
||||
|
||||
urlpatterns = [
|
||||
path("plugins/", PluginsListAPIView.as_view(), name="list"),
|
||||
path("plugins/reload/", PluginReloadAPIView.as_view(), name="reload"),
|
||||
path("plugins/import/", PluginImportAPIView.as_view(), name="import"),
|
||||
path("plugins/<str:key>/delete/", PluginDeleteAPIView.as_view(), name="delete"),
|
||||
path("plugins/<str:key>/settings/", PluginSettingsAPIView.as_view(), name="settings"),
|
||||
path("plugins/<str:key>/run/", PluginRunAPIView.as_view(), name="run"),
|
||||
path("plugins/<str:key>/enabled/", PluginEnabledAPIView.as_view(), name="enabled"),
|
||||
]
|
||||
306
apps/plugins/api_views.py
Normal file
306
apps/plugins/api_views.py
Normal file
|
|
@ -0,0 +1,306 @@
|
|||
import logging
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import api_view
|
||||
from django.conf import settings
|
||||
from django.core.files.uploadedfile import UploadedFile
|
||||
import io
|
||||
import os
|
||||
import zipfile
|
||||
import shutil
|
||||
import tempfile
|
||||
from apps.accounts.permissions import (
|
||||
Authenticated,
|
||||
permission_classes_by_method,
|
||||
)
|
||||
|
||||
from .loader import PluginManager
|
||||
from .models import PluginConfig
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PluginsListAPIView(APIView):
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [
|
||||
perm() for perm in permission_classes_by_method[self.request.method]
|
||||
]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
def get(self, request):
|
||||
pm = PluginManager.get()
|
||||
# Ensure registry is up-to-date on each request
|
||||
pm.discover_plugins()
|
||||
return Response({"plugins": pm.list_plugins()})
|
||||
|
||||
|
||||
class PluginReloadAPIView(APIView):
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [
|
||||
perm() for perm in permission_classes_by_method[self.request.method]
|
||||
]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
def post(self, request):
|
||||
pm = PluginManager.get()
|
||||
pm.discover_plugins()
|
||||
return Response({"success": True, "count": len(pm._registry)})
|
||||
|
||||
|
||||
class PluginImportAPIView(APIView):
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [
|
||||
perm() for perm in permission_classes_by_method[self.request.method]
|
||||
]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
def post(self, request):
|
||||
file: UploadedFile = request.FILES.get("file")
|
||||
if not file:
|
||||
return Response({"success": False, "error": "Missing 'file' upload"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
pm = PluginManager.get()
|
||||
plugins_dir = pm.plugins_dir
|
||||
|
||||
try:
|
||||
zf = zipfile.ZipFile(file)
|
||||
except zipfile.BadZipFile:
|
||||
return Response({"success": False, "error": "Invalid zip file"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Extract to a temporary directory first to avoid server reload thrash
|
||||
tmp_root = tempfile.mkdtemp(prefix="plugin_import_")
|
||||
try:
|
||||
file_members = [m for m in zf.infolist() if not m.is_dir()]
|
||||
if not file_members:
|
||||
shutil.rmtree(tmp_root, ignore_errors=True)
|
||||
return Response({"success": False, "error": "Archive is empty"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
for member in file_members:
|
||||
name = member.filename
|
||||
if not name or name.endswith("/"):
|
||||
continue
|
||||
# Normalize and prevent path traversal
|
||||
norm = os.path.normpath(name)
|
||||
if norm.startswith("..") or os.path.isabs(norm):
|
||||
shutil.rmtree(tmp_root, ignore_errors=True)
|
||||
return Response({"success": False, "error": "Unsafe path in archive"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
dest_path = os.path.join(tmp_root, norm)
|
||||
os.makedirs(os.path.dirname(dest_path), exist_ok=True)
|
||||
with zf.open(member, 'r') as src, open(dest_path, 'wb') as dst:
|
||||
shutil.copyfileobj(src, dst)
|
||||
|
||||
# Find candidate directory containing plugin.py or __init__.py
|
||||
candidates = []
|
||||
for dirpath, dirnames, filenames in os.walk(tmp_root):
|
||||
has_pluginpy = "plugin.py" in filenames
|
||||
has_init = "__init__.py" in filenames
|
||||
if has_pluginpy or has_init:
|
||||
depth = len(os.path.relpath(dirpath, tmp_root).split(os.sep))
|
||||
candidates.append((0 if has_pluginpy else 1, depth, dirpath))
|
||||
if not candidates:
|
||||
shutil.rmtree(tmp_root, ignore_errors=True)
|
||||
return Response({"success": False, "error": "Invalid plugin: missing plugin.py or package __init__.py"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
candidates.sort()
|
||||
chosen = candidates[0][2]
|
||||
# Determine plugin key: prefer chosen folder name; if chosen is tmp_root, use zip base name
|
||||
base_name = os.path.splitext(getattr(file, "name", "plugin"))[0]
|
||||
plugin_key = os.path.basename(chosen.rstrip(os.sep))
|
||||
if chosen.rstrip(os.sep) == tmp_root.rstrip(os.sep):
|
||||
plugin_key = base_name
|
||||
plugin_key = plugin_key.replace(" ", "_").lower()
|
||||
|
||||
final_dir = os.path.join(plugins_dir, plugin_key)
|
||||
if os.path.exists(final_dir):
|
||||
# If final dir exists but contains a valid plugin, refuse; otherwise clear it
|
||||
if os.path.exists(os.path.join(final_dir, "plugin.py")) or os.path.exists(os.path.join(final_dir, "__init__.py")):
|
||||
shutil.rmtree(tmp_root, ignore_errors=True)
|
||||
return Response({"success": False, "error": f"Plugin '{plugin_key}' already exists"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
try:
|
||||
shutil.rmtree(final_dir)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Move chosen directory into final location
|
||||
if chosen.rstrip(os.sep) == tmp_root.rstrip(os.sep):
|
||||
# Move all contents into final_dir
|
||||
os.makedirs(final_dir, exist_ok=True)
|
||||
for item in os.listdir(tmp_root):
|
||||
shutil.move(os.path.join(tmp_root, item), os.path.join(final_dir, item))
|
||||
else:
|
||||
shutil.move(chosen, final_dir)
|
||||
# Cleanup temp
|
||||
shutil.rmtree(tmp_root, ignore_errors=True)
|
||||
target_dir = final_dir
|
||||
finally:
|
||||
try:
|
||||
shutil.rmtree(tmp_root, ignore_errors=True)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Reload discovery and validate plugin entry
|
||||
pm.discover_plugins()
|
||||
plugin = pm._registry.get(plugin_key)
|
||||
if not plugin:
|
||||
# Cleanup the copied folder to avoid leaving invalid plugin behind
|
||||
try:
|
||||
shutil.rmtree(target_dir, ignore_errors=True)
|
||||
except Exception:
|
||||
pass
|
||||
return Response({"success": False, "error": "Invalid plugin: missing Plugin class in plugin.py or __init__.py"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Extra validation: ensure Plugin.run exists
|
||||
instance = getattr(plugin, "instance", None)
|
||||
run_method = getattr(instance, "run", None)
|
||||
if not callable(run_method):
|
||||
try:
|
||||
shutil.rmtree(target_dir, ignore_errors=True)
|
||||
except Exception:
|
||||
pass
|
||||
return Response({"success": False, "error": "Invalid plugin: Plugin class must define a callable run(action, params, context)"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Find DB config to return enabled/ever_enabled
|
||||
try:
|
||||
cfg = PluginConfig.objects.get(key=plugin_key)
|
||||
enabled = cfg.enabled
|
||||
ever_enabled = getattr(cfg, "ever_enabled", False)
|
||||
except PluginConfig.DoesNotExist:
|
||||
enabled = False
|
||||
ever_enabled = False
|
||||
|
||||
return Response({
|
||||
"success": True,
|
||||
"plugin": {
|
||||
"key": plugin.key,
|
||||
"name": plugin.name,
|
||||
"version": plugin.version,
|
||||
"description": plugin.description,
|
||||
"enabled": enabled,
|
||||
"ever_enabled": ever_enabled,
|
||||
"fields": plugin.fields or [],
|
||||
"actions": plugin.actions or [],
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
class PluginSettingsAPIView(APIView):
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [
|
||||
perm() for perm in permission_classes_by_method[self.request.method]
|
||||
]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
def post(self, request, key):
|
||||
pm = PluginManager.get()
|
||||
data = request.data or {}
|
||||
settings = data.get("settings", {})
|
||||
try:
|
||||
updated = pm.update_settings(key, settings)
|
||||
return Response({"success": True, "settings": updated})
|
||||
except Exception as e:
|
||||
return Response({"success": False, "error": str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class PluginRunAPIView(APIView):
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [
|
||||
perm() for perm in permission_classes_by_method[self.request.method]
|
||||
]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
def post(self, request, key):
|
||||
pm = PluginManager.get()
|
||||
action = request.data.get("action")
|
||||
params = request.data.get("params", {})
|
||||
if not action:
|
||||
return Response({"success": False, "error": "Missing 'action'"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Respect plugin enabled flag
|
||||
try:
|
||||
cfg = PluginConfig.objects.get(key=key)
|
||||
if not cfg.enabled:
|
||||
return Response({"success": False, "error": "Plugin is disabled"}, status=status.HTTP_403_FORBIDDEN)
|
||||
except PluginConfig.DoesNotExist:
|
||||
return Response({"success": False, "error": "Plugin not found"}, status=status.HTTP_404_NOT_FOUND)
|
||||
|
||||
try:
|
||||
result = pm.run_action(key, action, params)
|
||||
return Response({"success": True, "result": result})
|
||||
except PermissionError as e:
|
||||
return Response({"success": False, "error": str(e)}, status=status.HTTP_403_FORBIDDEN)
|
||||
except Exception as e:
|
||||
logger.exception("Plugin action failed")
|
||||
return Response({"success": False, "error": str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
|
||||
|
||||
class PluginEnabledAPIView(APIView):
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [
|
||||
perm() for perm in permission_classes_by_method[self.request.method]
|
||||
]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
def post(self, request, key):
|
||||
enabled = request.data.get("enabled")
|
||||
if enabled is None:
|
||||
return Response({"success": False, "error": "Missing 'enabled' boolean"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
try:
|
||||
cfg = PluginConfig.objects.get(key=key)
|
||||
cfg.enabled = bool(enabled)
|
||||
# Mark that this plugin has been enabled at least once
|
||||
if cfg.enabled and not cfg.ever_enabled:
|
||||
cfg.ever_enabled = True
|
||||
cfg.save(update_fields=["enabled", "ever_enabled", "updated_at"])
|
||||
return Response({"success": True, "enabled": cfg.enabled, "ever_enabled": cfg.ever_enabled})
|
||||
except PluginConfig.DoesNotExist:
|
||||
return Response({"success": False, "error": "Plugin not found"}, status=status.HTTP_404_NOT_FOUND)
|
||||
|
||||
|
||||
class PluginDeleteAPIView(APIView):
|
||||
def get_permissions(self):
|
||||
try:
|
||||
return [
|
||||
perm() for perm in permission_classes_by_method[self.request.method]
|
||||
]
|
||||
except KeyError:
|
||||
return [Authenticated()]
|
||||
|
||||
def delete(self, request, key):
|
||||
pm = PluginManager.get()
|
||||
plugins_dir = pm.plugins_dir
|
||||
target_dir = os.path.join(plugins_dir, key)
|
||||
# Safety: ensure path inside plugins_dir
|
||||
abs_plugins = os.path.abspath(plugins_dir) + os.sep
|
||||
abs_target = os.path.abspath(target_dir)
|
||||
if not abs_target.startswith(abs_plugins):
|
||||
return Response({"success": False, "error": "Invalid plugin path"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Remove files
|
||||
if os.path.isdir(target_dir):
|
||||
try:
|
||||
shutil.rmtree(target_dir)
|
||||
except Exception as e:
|
||||
return Response({"success": False, "error": f"Failed to delete plugin files: {e}"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
|
||||
# Remove DB record
|
||||
try:
|
||||
PluginConfig.objects.filter(key=key).delete()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Reload registry
|
||||
pm.discover_plugins()
|
||||
return Response({"success": True})
|
||||
54
apps/plugins/apps.py
Normal file
54
apps/plugins/apps.py
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
from django.apps import AppConfig
|
||||
import os
|
||||
import sys
|
||||
from django.db.models.signals import post_migrate
|
||||
|
||||
|
||||
class PluginsConfig(AppConfig):
|
||||
name = "apps.plugins"
|
||||
verbose_name = "Plugins"
|
||||
|
||||
def ready(self):
|
||||
"""Wire up plugin discovery without hitting the DB during app init.
|
||||
|
||||
- Skip during common management commands that don't need discovery.
|
||||
- Register post_migrate handler to sync plugin registry to DB after migrations.
|
||||
- Do an in-memory discovery (no DB) so registry is available early.
|
||||
"""
|
||||
try:
|
||||
# Allow explicit opt-out via env var
|
||||
if os.environ.get("DISPATCHARR_SKIP_PLUGIN_AUTODISCOVERY", "").lower() in ("1", "true", "yes"):
|
||||
return
|
||||
|
||||
argv = sys.argv[1:] if len(sys.argv) > 1 else []
|
||||
mgmt_cmds_to_skip = {
|
||||
# Skip immediate discovery for these commands
|
||||
"makemigrations", "collectstatic", "check", "test", "shell", "showmigrations",
|
||||
}
|
||||
if argv and argv[0] in mgmt_cmds_to_skip:
|
||||
return
|
||||
|
||||
# Run discovery with DB sync after the plugins app has been migrated
|
||||
def _post_migrate_discover(sender=None, app_config=None, **kwargs):
|
||||
try:
|
||||
if app_config and getattr(app_config, 'label', None) != 'plugins':
|
||||
return
|
||||
from .loader import PluginManager
|
||||
PluginManager.get().discover_plugins(sync_db=True)
|
||||
except Exception:
|
||||
import logging
|
||||
logging.getLogger(__name__).exception("Plugin discovery failed in post_migrate")
|
||||
|
||||
post_migrate.connect(
|
||||
_post_migrate_discover,
|
||||
dispatch_uid="apps.plugins.post_migrate_discover",
|
||||
)
|
||||
|
||||
# Perform non-DB discovery now to populate in-memory registry.
|
||||
from .loader import PluginManager
|
||||
PluginManager.get().discover_plugins(sync_db=False)
|
||||
except Exception:
|
||||
# Avoid breaking startup due to plugin errors
|
||||
import logging
|
||||
|
||||
logging.getLogger(__name__).exception("Plugin discovery wiring failed during app ready")
|
||||
254
apps/plugins/loader.py
Normal file
254
apps/plugins/loader.py
Normal file
|
|
@ -0,0 +1,254 @@
|
|||
import importlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from django.db import transaction
|
||||
|
||||
from .models import PluginConfig
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class LoadedPlugin:
|
||||
key: str
|
||||
name: str
|
||||
version: str = ""
|
||||
description: str = ""
|
||||
module: Any = None
|
||||
instance: Any = None
|
||||
fields: List[Dict[str, Any]] = field(default_factory=list)
|
||||
actions: List[Dict[str, Any]] = field(default_factory=list)
|
||||
|
||||
|
||||
class PluginManager:
|
||||
"""Singleton manager that discovers and runs plugins from /data/plugins."""
|
||||
|
||||
_instance: Optional["PluginManager"] = None
|
||||
|
||||
@classmethod
|
||||
def get(cls) -> "PluginManager":
|
||||
if not cls._instance:
|
||||
cls._instance = PluginManager()
|
||||
return cls._instance
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.plugins_dir = os.environ.get("DISPATCHARR_PLUGINS_DIR", "/data/plugins")
|
||||
self._registry: Dict[str, LoadedPlugin] = {}
|
||||
|
||||
# Ensure plugins directory exists
|
||||
os.makedirs(self.plugins_dir, exist_ok=True)
|
||||
if self.plugins_dir not in sys.path:
|
||||
sys.path.append(self.plugins_dir)
|
||||
|
||||
def discover_plugins(self, *, sync_db: bool = True) -> Dict[str, LoadedPlugin]:
|
||||
if sync_db:
|
||||
logger.info(f"Discovering plugins in {self.plugins_dir}")
|
||||
else:
|
||||
logger.debug(f"Discovering plugins (no DB sync) in {self.plugins_dir}")
|
||||
self._registry.clear()
|
||||
|
||||
try:
|
||||
for entry in sorted(os.listdir(self.plugins_dir)):
|
||||
path = os.path.join(self.plugins_dir, entry)
|
||||
if not os.path.isdir(path):
|
||||
continue
|
||||
|
||||
plugin_key = entry.replace(" ", "_").lower()
|
||||
|
||||
try:
|
||||
self._load_plugin(plugin_key, path)
|
||||
except Exception:
|
||||
logger.exception(f"Failed to load plugin '{plugin_key}' from {path}")
|
||||
|
||||
logger.info(f"Discovered {len(self._registry)} plugin(s)")
|
||||
except FileNotFoundError:
|
||||
logger.warning(f"Plugins directory not found: {self.plugins_dir}")
|
||||
|
||||
# Sync DB records (optional)
|
||||
if sync_db:
|
||||
try:
|
||||
self._sync_db_with_registry()
|
||||
except Exception:
|
||||
# Defer sync if database is not ready (e.g., first startup before migrate)
|
||||
logger.exception("Deferring plugin DB sync; database not ready yet")
|
||||
return self._registry
|
||||
|
||||
def _load_plugin(self, key: str, path: str):
|
||||
# Plugin can be a package and/or contain plugin.py. Prefer plugin.py when present.
|
||||
has_pkg = os.path.exists(os.path.join(path, "__init__.py"))
|
||||
has_pluginpy = os.path.exists(os.path.join(path, "plugin.py"))
|
||||
if not (has_pkg or has_pluginpy):
|
||||
logger.debug(f"Skipping {path}: no plugin.py or package")
|
||||
return
|
||||
|
||||
candidate_modules = []
|
||||
if has_pluginpy:
|
||||
candidate_modules.append(f"{key}.plugin")
|
||||
if has_pkg:
|
||||
candidate_modules.append(key)
|
||||
|
||||
module = None
|
||||
plugin_cls = None
|
||||
last_error = None
|
||||
for module_name in candidate_modules:
|
||||
try:
|
||||
logger.debug(f"Importing plugin module {module_name}")
|
||||
module = importlib.import_module(module_name)
|
||||
plugin_cls = getattr(module, "Plugin", None)
|
||||
if plugin_cls is not None:
|
||||
break
|
||||
else:
|
||||
logger.warning(f"Module {module_name} has no Plugin class")
|
||||
except Exception as e:
|
||||
last_error = e
|
||||
logger.exception(f"Error importing module {module_name}")
|
||||
|
||||
if plugin_cls is None:
|
||||
if last_error:
|
||||
raise last_error
|
||||
else:
|
||||
logger.warning(f"No Plugin class found for {key}; skipping")
|
||||
return
|
||||
|
||||
instance = plugin_cls()
|
||||
|
||||
name = getattr(instance, "name", key)
|
||||
version = getattr(instance, "version", "")
|
||||
description = getattr(instance, "description", "")
|
||||
fields = getattr(instance, "fields", [])
|
||||
actions = getattr(instance, "actions", [])
|
||||
|
||||
self._registry[key] = LoadedPlugin(
|
||||
key=key,
|
||||
name=name,
|
||||
version=version,
|
||||
description=description,
|
||||
module=module,
|
||||
instance=instance,
|
||||
fields=fields,
|
||||
actions=actions,
|
||||
)
|
||||
|
||||
def _sync_db_with_registry(self):
|
||||
with transaction.atomic():
|
||||
for key, lp in self._registry.items():
|
||||
obj, _ = PluginConfig.objects.get_or_create(
|
||||
key=key,
|
||||
defaults={
|
||||
"name": lp.name,
|
||||
"version": lp.version,
|
||||
"description": lp.description,
|
||||
"settings": {},
|
||||
},
|
||||
)
|
||||
# Update meta if changed
|
||||
changed = False
|
||||
if obj.name != lp.name:
|
||||
obj.name = lp.name
|
||||
changed = True
|
||||
if obj.version != lp.version:
|
||||
obj.version = lp.version
|
||||
changed = True
|
||||
if obj.description != lp.description:
|
||||
obj.description = lp.description
|
||||
changed = True
|
||||
if changed:
|
||||
obj.save()
|
||||
|
||||
def list_plugins(self) -> List[Dict[str, Any]]:
|
||||
from .models import PluginConfig
|
||||
|
||||
plugins: List[Dict[str, Any]] = []
|
||||
try:
|
||||
configs = {c.key: c for c in PluginConfig.objects.all()}
|
||||
except Exception as e:
|
||||
# Database might not be migrated yet; fall back to registry only
|
||||
logger.warning("PluginConfig table unavailable; listing registry only: %s", e)
|
||||
configs = {}
|
||||
|
||||
# First, include all discovered plugins
|
||||
for key, lp in self._registry.items():
|
||||
conf = configs.get(key)
|
||||
plugins.append(
|
||||
{
|
||||
"key": key,
|
||||
"name": lp.name,
|
||||
"version": lp.version,
|
||||
"description": lp.description,
|
||||
"enabled": conf.enabled if conf else False,
|
||||
"ever_enabled": getattr(conf, "ever_enabled", False) if conf else False,
|
||||
"fields": lp.fields or [],
|
||||
"settings": (conf.settings if conf else {}),
|
||||
"actions": lp.actions or [],
|
||||
"missing": False,
|
||||
}
|
||||
)
|
||||
|
||||
# Then, include any DB-only configs (files missing or failed to load)
|
||||
discovered_keys = set(self._registry.keys())
|
||||
for key, conf in configs.items():
|
||||
if key in discovered_keys:
|
||||
continue
|
||||
plugins.append(
|
||||
{
|
||||
"key": key,
|
||||
"name": conf.name,
|
||||
"version": conf.version,
|
||||
"description": conf.description,
|
||||
"enabled": conf.enabled,
|
||||
"ever_enabled": getattr(conf, "ever_enabled", False),
|
||||
"fields": [],
|
||||
"settings": conf.settings or {},
|
||||
"actions": [],
|
||||
"missing": True,
|
||||
}
|
||||
)
|
||||
|
||||
return plugins
|
||||
|
||||
def get_plugin(self, key: str) -> Optional[LoadedPlugin]:
|
||||
return self._registry.get(key)
|
||||
|
||||
def update_settings(self, key: str, settings: Dict[str, Any]) -> Dict[str, Any]:
|
||||
cfg = PluginConfig.objects.get(key=key)
|
||||
cfg.settings = settings or {}
|
||||
cfg.save(update_fields=["settings", "updated_at"])
|
||||
return cfg.settings
|
||||
|
||||
def run_action(self, key: str, action_id: str, params: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
||||
lp = self.get_plugin(key)
|
||||
if not lp or not lp.instance:
|
||||
raise ValueError(f"Plugin '{key}' not found")
|
||||
|
||||
cfg = PluginConfig.objects.get(key=key)
|
||||
if not cfg.enabled:
|
||||
raise PermissionError(f"Plugin '{key}' is disabled")
|
||||
params = params or {}
|
||||
|
||||
# Provide a context object to the plugin
|
||||
context = {
|
||||
"settings": cfg.settings or {},
|
||||
"logger": logger,
|
||||
"actions": {a.get("id"): a for a in (lp.actions or [])},
|
||||
}
|
||||
|
||||
# Run either via Celery if plugin provides a delayed method, or inline
|
||||
run_method = getattr(lp.instance, "run", None)
|
||||
if not callable(run_method):
|
||||
raise ValueError(f"Plugin '{key}' has no runnable 'run' method")
|
||||
|
||||
try:
|
||||
result = run_method(action_id, params, context)
|
||||
except Exception:
|
||||
logger.exception(f"Plugin '{key}' action '{action_id}' failed")
|
||||
raise
|
||||
|
||||
# Normalize return
|
||||
if isinstance(result, dict):
|
||||
return result
|
||||
return {"status": "ok", "result": result}
|
||||
29
apps/plugins/migrations/0001_initial.py
Normal file
29
apps/plugins/migrations/0001_initial.py
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
# Generated by Django 5.2.4 on 2025-09-13 13:51
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='PluginConfig',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('key', models.CharField(max_length=128, unique=True)),
|
||||
('name', models.CharField(max_length=255)),
|
||||
('version', models.CharField(blank=True, default='', max_length=64)),
|
||||
('description', models.TextField(blank=True, default='')),
|
||||
('enabled', models.BooleanField(default=False)),
|
||||
('ever_enabled', models.BooleanField(default=False)),
|
||||
('settings', models.JSONField(blank=True, default=dict)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
),
|
||||
]
|
||||
1
apps/plugins/migrations/__init__.py
Normal file
1
apps/plugins/migrations/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
|||
# This file marks the migrations package for the plugins app.
|
||||
19
apps/plugins/models.py
Normal file
19
apps/plugins/models.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
from django.db import models
|
||||
|
||||
|
||||
class PluginConfig(models.Model):
|
||||
"""Stores discovered plugins and their persisted settings."""
|
||||
|
||||
key = models.CharField(max_length=128, unique=True)
|
||||
name = models.CharField(max_length=255)
|
||||
version = models.CharField(max_length=64, blank=True, default="")
|
||||
description = models.TextField(blank=True, default="")
|
||||
enabled = models.BooleanField(default=False)
|
||||
# Tracks whether this plugin has ever been enabled at least once
|
||||
ever_enabled = models.BooleanField(default=False)
|
||||
settings = models.JSONField(default=dict, blank=True)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.name} ({self.key})"
|
||||
28
apps/plugins/serializers.py
Normal file
28
apps/plugins/serializers.py
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
from rest_framework import serializers
|
||||
|
||||
|
||||
class PluginActionSerializer(serializers.Serializer):
|
||||
id = serializers.CharField()
|
||||
label = serializers.CharField()
|
||||
description = serializers.CharField(required=False, allow_blank=True)
|
||||
|
||||
|
||||
class PluginFieldSerializer(serializers.Serializer):
|
||||
id = serializers.CharField()
|
||||
label = serializers.CharField()
|
||||
type = serializers.ChoiceField(choices=["string", "number", "boolean", "select"]) # simple types
|
||||
default = serializers.JSONField(required=False)
|
||||
help_text = serializers.CharField(required=False, allow_blank=True)
|
||||
options = serializers.ListField(child=serializers.DictField(), required=False)
|
||||
|
||||
|
||||
class PluginSerializer(serializers.Serializer):
|
||||
key = serializers.CharField()
|
||||
name = serializers.CharField()
|
||||
version = serializers.CharField(allow_blank=True)
|
||||
description = serializers.CharField(allow_blank=True)
|
||||
enabled = serializers.BooleanField()
|
||||
fields = PluginFieldSerializer(many=True)
|
||||
settings = serializers.JSONField()
|
||||
actions = PluginActionSerializer(many=True)
|
||||
|
||||
|
|
@ -1,4 +1,6 @@
|
|||
"""Shared configuration between proxy types"""
|
||||
import time
|
||||
from django.db import connection
|
||||
|
||||
class BaseConfig:
|
||||
DEFAULT_USER_AGENT = 'VLC/3.0.20 LibVLC/3.0.20' # Will only be used if connection to settings fail
|
||||
|
|
@ -9,8 +11,56 @@ class BaseConfig:
|
|||
CONNECTION_TIMEOUT = 10 # seconds to wait for initial connection
|
||||
MAX_STREAM_SWITCHES = 10 # Maximum number of stream switch attempts before giving up
|
||||
BUFFER_CHUNK_SIZE = 188 * 1361 # ~256KB
|
||||
# Redis settings
|
||||
REDIS_CHUNK_TTL = 60 # Number in seconds - Chunks expire after 1 minute
|
||||
BUFFERING_TIMEOUT = 15 # Seconds to wait for buffering before switching streams
|
||||
BUFFER_SPEED = 1 # What speed to condsider the stream buffering, 1x is normal speed, 2x is double speed, etc.
|
||||
|
||||
# Cache for proxy settings (class-level, shared across all instances)
|
||||
_proxy_settings_cache = None
|
||||
_proxy_settings_cache_time = 0
|
||||
_proxy_settings_cache_ttl = 10 # Cache for 10 seconds
|
||||
|
||||
@classmethod
|
||||
def get_proxy_settings(cls):
|
||||
"""Get proxy settings from CoreSettings JSON data with fallback to defaults (cached)"""
|
||||
# Check if cache is still valid
|
||||
now = time.time()
|
||||
if cls._proxy_settings_cache is not None and (now - cls._proxy_settings_cache_time) < cls._proxy_settings_cache_ttl:
|
||||
return cls._proxy_settings_cache
|
||||
|
||||
# Cache miss or expired - fetch from database
|
||||
try:
|
||||
from core.models import CoreSettings
|
||||
settings = CoreSettings.get_proxy_settings()
|
||||
cls._proxy_settings_cache = settings
|
||||
cls._proxy_settings_cache_time = now
|
||||
return settings
|
||||
|
||||
except Exception:
|
||||
# Return defaults if database query fails
|
||||
return {
|
||||
"buffering_timeout": 15,
|
||||
"buffering_speed": 1.0,
|
||||
"redis_chunk_ttl": 60,
|
||||
"channel_shutdown_delay": 0,
|
||||
"channel_init_grace_period": 5,
|
||||
}
|
||||
|
||||
finally:
|
||||
# Always close the connection after reading settings
|
||||
try:
|
||||
connection.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def get_redis_chunk_ttl(cls):
|
||||
"""Get Redis chunk TTL from database or default"""
|
||||
settings = cls.get_proxy_settings()
|
||||
return settings.get("redis_chunk_ttl", 60)
|
||||
|
||||
@property
|
||||
def REDIS_CHUNK_TTL(self):
|
||||
return self.get_redis_chunk_ttl()
|
||||
|
||||
class HLSConfig(BaseConfig):
|
||||
MIN_SEGMENTS = 12
|
||||
|
|
@ -32,6 +82,8 @@ class TSConfig(BaseConfig):
|
|||
INITIAL_BEHIND_CHUNKS = 4 # How many chunks behind to start a client (4 chunks = ~1MB)
|
||||
CHUNK_BATCH_SIZE = 5 # How many chunks to fetch in one batch
|
||||
KEEPALIVE_INTERVAL = 0.5 # Seconds between keepalive packets when at buffer head
|
||||
# Chunk read timeout
|
||||
CHUNK_TIMEOUT = 5 # Seconds to wait for each chunk read
|
||||
|
||||
# Streaming settings
|
||||
TARGET_BITRATE = 8000000 # Target bitrate (8 Mbps)
|
||||
|
|
@ -40,21 +92,14 @@ class TSConfig(BaseConfig):
|
|||
|
||||
# Resource management
|
||||
CLEANUP_INTERVAL = 60 # Check for inactive channels every 60 seconds
|
||||
CHANNEL_SHUTDOWN_DELAY = 0 # How long to wait after last client before shutdown (seconds)
|
||||
|
||||
# Client tracking settings
|
||||
CLIENT_RECORD_TTL = 5 # How long client records persist in Redis (seconds). Client will be considered MIA after this time.
|
||||
CLIENT_RECORD_TTL = 60 # How long client records persist in Redis (seconds). Client will be considered MIA after this time.
|
||||
CLEANUP_CHECK_INTERVAL = 1 # How often to check for disconnected clients (seconds)
|
||||
CHANNEL_INIT_GRACE_PERIOD = 5 # How long to wait for first client after initialization (seconds)
|
||||
CLIENT_HEARTBEAT_INTERVAL = 1 # How often to send client heartbeats (seconds)
|
||||
GHOST_CLIENT_MULTIPLIER = 5.0 # How many heartbeat intervals before client considered ghost (5 would mean 5 secondsif heartbeat interval is 1)
|
||||
CLIENT_HEARTBEAT_INTERVAL = 5 # How often to send client heartbeats (seconds)
|
||||
GHOST_CLIENT_MULTIPLIER = 6.0 # How many heartbeat intervals before client considered ghost (6 would mean 36 seconds if heartbeat interval is 6)
|
||||
CLIENT_WAIT_TIMEOUT = 30 # Seconds to wait for client to connect
|
||||
|
||||
|
||||
# TS packets are 188 bytes
|
||||
# Make chunk size a multiple of TS packet size for perfect alignment
|
||||
# ~1MB is ideal for streaming (matches typical media buffer sizes)
|
||||
|
||||
# Stream health and recovery settings
|
||||
MAX_HEALTH_RECOVERY_ATTEMPTS = 2 # Maximum times to attempt recovery for a single stream
|
||||
MAX_RECONNECT_ATTEMPTS = 3 # Maximum reconnects to try before switching streams
|
||||
|
|
@ -64,3 +109,47 @@ class TSConfig(BaseConfig):
|
|||
|
||||
|
||||
|
||||
# Database-dependent settings with fallbacks
|
||||
@classmethod
|
||||
def get_channel_shutdown_delay(cls):
|
||||
"""Get channel shutdown delay from database or default"""
|
||||
settings = cls.get_proxy_settings()
|
||||
return settings.get("channel_shutdown_delay", 0)
|
||||
|
||||
@classmethod
|
||||
def get_buffering_timeout(cls):
|
||||
"""Get buffering timeout from database or default"""
|
||||
settings = cls.get_proxy_settings()
|
||||
return settings.get("buffering_timeout", 15)
|
||||
|
||||
@classmethod
|
||||
def get_buffering_speed(cls):
|
||||
"""Get buffering speed threshold from database or default"""
|
||||
settings = cls.get_proxy_settings()
|
||||
return settings.get("buffering_speed", 1.0)
|
||||
|
||||
@classmethod
|
||||
def get_channel_init_grace_period(cls):
|
||||
"""Get channel init grace period from database or default"""
|
||||
settings = cls.get_proxy_settings()
|
||||
return settings.get("channel_init_grace_period", 5)
|
||||
|
||||
# Dynamic property access for these settings
|
||||
@property
|
||||
def CHANNEL_SHUTDOWN_DELAY(self):
|
||||
return self.get_channel_shutdown_delay()
|
||||
|
||||
@property
|
||||
def BUFFERING_TIMEOUT(self):
|
||||
return self.get_buffering_timeout()
|
||||
|
||||
@property
|
||||
def BUFFERING_SPEED(self):
|
||||
return self.get_buffering_speed()
|
||||
|
||||
@property
|
||||
def CHANNEL_INIT_GRACE_PERIOD(self):
|
||||
return self.get_channel_init_grace_period()
|
||||
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ import gc # Add import for garbage collection
|
|||
from core.utils import RedisClient
|
||||
from apps.proxy.ts_proxy.channel_status import ChannelStatus
|
||||
from core.utils import send_websocket_update
|
||||
from apps.proxy.vod_proxy.connection_manager import get_connection_manager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -59,3 +60,13 @@ def fetch_channel_stats():
|
|||
# Explicitly clean up large data structures
|
||||
all_channels = None
|
||||
gc.collect()
|
||||
|
||||
@shared_task
|
||||
def cleanup_vod_connections():
|
||||
"""Clean up stale VOD connections"""
|
||||
try:
|
||||
connection_manager = get_connection_manager()
|
||||
connection_manager.cleanup_stale_connections(max_age_seconds=3600) # 1 hour
|
||||
logger.info("VOD connection cleanup completed")
|
||||
except Exception as e:
|
||||
logger.error(f"Error in VOD connection cleanup: {e}", exc_info=True)
|
||||
|
|
|
|||
|
|
@ -264,6 +264,63 @@ class ChannelStatus:
|
|||
'last_data_age': time.time() - manager.last_data_time
|
||||
}
|
||||
|
||||
# Add FFmpeg stream information
|
||||
video_codec = metadata.get(ChannelMetadataField.VIDEO_CODEC.encode('utf-8'))
|
||||
if video_codec:
|
||||
info['video_codec'] = video_codec.decode('utf-8')
|
||||
|
||||
resolution = metadata.get(ChannelMetadataField.RESOLUTION.encode('utf-8'))
|
||||
if resolution:
|
||||
info['resolution'] = resolution.decode('utf-8')
|
||||
|
||||
source_fps = metadata.get(ChannelMetadataField.SOURCE_FPS.encode('utf-8'))
|
||||
if source_fps:
|
||||
info['source_fps'] = float(source_fps.decode('utf-8'))
|
||||
|
||||
pixel_format = metadata.get(ChannelMetadataField.PIXEL_FORMAT.encode('utf-8'))
|
||||
if pixel_format:
|
||||
info['pixel_format'] = pixel_format.decode('utf-8')
|
||||
|
||||
source_bitrate = metadata.get(ChannelMetadataField.SOURCE_BITRATE.encode('utf-8'))
|
||||
if source_bitrate:
|
||||
info['source_bitrate'] = float(source_bitrate.decode('utf-8'))
|
||||
|
||||
audio_codec = metadata.get(ChannelMetadataField.AUDIO_CODEC.encode('utf-8'))
|
||||
if audio_codec:
|
||||
info['audio_codec'] = audio_codec.decode('utf-8')
|
||||
|
||||
sample_rate = metadata.get(ChannelMetadataField.SAMPLE_RATE.encode('utf-8'))
|
||||
if sample_rate:
|
||||
info['sample_rate'] = int(sample_rate.decode('utf-8'))
|
||||
|
||||
audio_channels = metadata.get(ChannelMetadataField.AUDIO_CHANNELS.encode('utf-8'))
|
||||
if audio_channels:
|
||||
info['audio_channels'] = audio_channels.decode('utf-8')
|
||||
|
||||
audio_bitrate = metadata.get(ChannelMetadataField.AUDIO_BITRATE.encode('utf-8'))
|
||||
if audio_bitrate:
|
||||
info['audio_bitrate'] = float(audio_bitrate.decode('utf-8'))
|
||||
|
||||
# Add FFmpeg performance stats
|
||||
ffmpeg_speed = metadata.get(ChannelMetadataField.FFMPEG_SPEED.encode('utf-8'))
|
||||
if ffmpeg_speed:
|
||||
info['ffmpeg_speed'] = float(ffmpeg_speed.decode('utf-8'))
|
||||
|
||||
ffmpeg_fps = metadata.get(ChannelMetadataField.FFMPEG_FPS.encode('utf-8'))
|
||||
if ffmpeg_fps:
|
||||
info['ffmpeg_fps'] = float(ffmpeg_fps.decode('utf-8'))
|
||||
|
||||
actual_fps = metadata.get(ChannelMetadataField.ACTUAL_FPS.encode('utf-8'))
|
||||
if actual_fps:
|
||||
info['actual_fps'] = float(actual_fps.decode('utf-8'))
|
||||
|
||||
ffmpeg_bitrate = metadata.get(ChannelMetadataField.FFMPEG_BITRATE.encode('utf-8'))
|
||||
if ffmpeg_bitrate:
|
||||
info['ffmpeg_bitrate'] = float(ffmpeg_bitrate.decode('utf-8'))
|
||||
stream_type = metadata.get(ChannelMetadataField.STREAM_TYPE.encode('utf-8'))
|
||||
if stream_type:
|
||||
info['stream_type'] = stream_type.decode('utf-8')
|
||||
|
||||
return info
|
||||
|
||||
@staticmethod
|
||||
|
|
@ -422,6 +479,31 @@ class ChannelStatus:
|
|||
except ValueError:
|
||||
logger.warning(f"Invalid m3u_profile_id format in Redis: {m3u_profile_id_bytes}")
|
||||
|
||||
# Add stream info to basic info as well
|
||||
video_codec = metadata.get(ChannelMetadataField.VIDEO_CODEC.encode('utf-8'))
|
||||
if video_codec:
|
||||
info['video_codec'] = video_codec.decode('utf-8')
|
||||
|
||||
resolution = metadata.get(ChannelMetadataField.RESOLUTION.encode('utf-8'))
|
||||
if resolution:
|
||||
info['resolution'] = resolution.decode('utf-8')
|
||||
|
||||
source_fps = metadata.get(ChannelMetadataField.SOURCE_FPS.encode('utf-8'))
|
||||
if source_fps:
|
||||
info['source_fps'] = float(source_fps.decode('utf-8'))
|
||||
ffmpeg_speed = metadata.get(ChannelMetadataField.FFMPEG_SPEED.encode('utf-8'))
|
||||
if ffmpeg_speed:
|
||||
info['ffmpeg_speed'] = float(ffmpeg_speed.decode('utf-8'))
|
||||
audio_codec = metadata.get(ChannelMetadataField.AUDIO_CODEC.encode('utf-8'))
|
||||
if audio_codec:
|
||||
info['audio_codec'] = audio_codec.decode('utf-8')
|
||||
audio_channels = metadata.get(ChannelMetadataField.AUDIO_CHANNELS.encode('utf-8'))
|
||||
if audio_channels:
|
||||
info['audio_channels'] = audio_channels.decode('utf-8')
|
||||
stream_type = metadata.get(ChannelMetadataField.STREAM_TYPE.encode('utf-8'))
|
||||
if stream_type:
|
||||
info['stream_type'] = stream_type.decode('utf-8')
|
||||
|
||||
return info
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting channel info: {e}", exc_info=True) # Added exc_info for better debugging
|
||||
|
|
|
|||
|
|
@ -4,13 +4,15 @@ import threading
|
|||
import logging
|
||||
import time
|
||||
import json
|
||||
import gevent
|
||||
from typing import Set, Optional
|
||||
from apps.proxy.config import TSConfig as Config
|
||||
from redis.exceptions import ConnectionError, TimeoutError
|
||||
from .constants import EventType
|
||||
from .constants import EventType, ChannelState, ChannelMetadataField
|
||||
from .config_helper import ConfigHelper
|
||||
from .redis_keys import RedisKeys
|
||||
from .utils import get_logger
|
||||
from core.utils import send_websocket_update
|
||||
|
||||
logger = get_logger()
|
||||
|
||||
|
|
@ -24,6 +26,7 @@ class ClientManager:
|
|||
self.lock = threading.Lock()
|
||||
self.last_active_time = time.time()
|
||||
self.worker_id = worker_id # Store worker ID as instance variable
|
||||
self._heartbeat_running = True # Flag to control heartbeat thread
|
||||
|
||||
# STANDARDIZED KEYS: Move client set under channel namespace
|
||||
self.client_set_key = RedisKeys.clients(channel_id)
|
||||
|
|
@ -35,35 +38,68 @@ class ClientManager:
|
|||
self._start_heartbeat_thread()
|
||||
self._registered_clients = set() # Track already registered client IDs
|
||||
|
||||
def _start_heartbeat_thread(self):
|
||||
"""Start thread to regularly refresh client presence in Redis"""
|
||||
def heartbeat_task():
|
||||
no_clients_count = 0 # Track consecutive empty cycles
|
||||
max_empty_cycles = 3 # Exit after this many consecutive empty checks
|
||||
def _trigger_stats_update(self):
|
||||
"""Trigger a channel stats update via WebSocket"""
|
||||
try:
|
||||
# Import here to avoid potential import issues
|
||||
from apps.proxy.ts_proxy.channel_status import ChannelStatus
|
||||
import redis
|
||||
|
||||
logger.debug(f"Started heartbeat thread for channel {self.channel_id} (interval: {self.heartbeat_interval}s)")
|
||||
# Get all channels from Redis
|
||||
redis_client = redis.Redis.from_url('redis://localhost:6379', decode_responses=True)
|
||||
all_channels = []
|
||||
cursor = 0
|
||||
|
||||
while True:
|
||||
cursor, keys = redis_client.scan(cursor, match="ts_proxy:channel:*:clients", count=100)
|
||||
for key in keys:
|
||||
# Extract channel ID from key
|
||||
parts = key.split(':')
|
||||
if len(parts) >= 4:
|
||||
ch_id = parts[2]
|
||||
channel_info = ChannelStatus.get_basic_channel_info(ch_id)
|
||||
if channel_info:
|
||||
all_channels.append(channel_info)
|
||||
|
||||
if cursor == 0:
|
||||
break
|
||||
|
||||
# Send WebSocket update using existing infrastructure
|
||||
send_websocket_update(
|
||||
"updates",
|
||||
"update",
|
||||
{
|
||||
"success": True,
|
||||
"type": "channel_stats",
|
||||
"stats": json.dumps({'channels': all_channels, 'count': len(all_channels)})
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.debug(f"Failed to trigger stats update: {e}")
|
||||
|
||||
def _start_heartbeat_thread(self):
|
||||
"""Start thread to regularly refresh client presence in Redis for local clients"""
|
||||
def heartbeat_task():
|
||||
logger.debug(f"Started heartbeat thread for channel {self.channel_id} (interval: {self.heartbeat_interval}s)")
|
||||
|
||||
while self._heartbeat_running:
|
||||
try:
|
||||
# Wait for the interval
|
||||
time.sleep(self.heartbeat_interval)
|
||||
# Wait for the interval, but check stop flag frequently for quick shutdown
|
||||
# Sleep in 1-second increments to allow faster response to stop signal
|
||||
for _ in range(int(self.heartbeat_interval)):
|
||||
if not self._heartbeat_running:
|
||||
break
|
||||
time.sleep(1)
|
||||
|
||||
# Final check before doing work
|
||||
if not self._heartbeat_running:
|
||||
break
|
||||
|
||||
# Send heartbeat for all local clients
|
||||
with self.lock:
|
||||
if not self.clients or not self.redis_client:
|
||||
# No clients left, increment our counter
|
||||
no_clients_count += 1
|
||||
|
||||
# If we've seen no clients for several consecutive checks, exit the thread
|
||||
if no_clients_count >= max_empty_cycles:
|
||||
logger.info(f"No clients for channel {self.channel_id} after {no_clients_count} consecutive checks, exiting heartbeat thread")
|
||||
return # This exits the thread
|
||||
|
||||
# Skip this cycle if we have no clients
|
||||
# Skip this cycle if we have no local clients
|
||||
if not self.clients:
|
||||
continue
|
||||
else:
|
||||
# Reset counter when we see clients
|
||||
no_clients_count = 0
|
||||
|
||||
# IMPROVED GHOST DETECTION: Check for stale clients before sending heartbeats
|
||||
current_time = time.time()
|
||||
|
|
@ -134,11 +170,20 @@ class ClientManager:
|
|||
except Exception as e:
|
||||
logger.error(f"Error in client heartbeat thread: {e}")
|
||||
|
||||
logger.debug(f"Heartbeat thread exiting for channel {self.channel_id}")
|
||||
|
||||
thread = threading.Thread(target=heartbeat_task, daemon=True)
|
||||
thread.name = f"client-heartbeat-{self.channel_id}"
|
||||
thread.start()
|
||||
logger.debug(f"Started client heartbeat thread for channel {self.channel_id} (interval: {self.heartbeat_interval}s)")
|
||||
|
||||
def stop(self):
|
||||
"""Stop the heartbeat thread and cleanup"""
|
||||
logger.debug(f"Stopping ClientManager for channel {self.channel_id}")
|
||||
self._heartbeat_running = False
|
||||
# Give the thread a moment to exit gracefully
|
||||
# Note: We don't join() here because it's a daemon thread and will exit on its own
|
||||
|
||||
def _execute_redis_command(self, command_func):
|
||||
"""Execute Redis command with error handling"""
|
||||
if not self.redis_client:
|
||||
|
|
@ -237,6 +282,9 @@ class ClientManager:
|
|||
json.dumps(event_data)
|
||||
)
|
||||
|
||||
# Trigger channel stats update via WebSocket
|
||||
self._trigger_stats_update()
|
||||
|
||||
# Get total clients across all workers
|
||||
total_clients = self.get_total_client_count()
|
||||
logger.info(f"New client connected: {client_id} (local: {len(self.clients)}, total: {total_clients})")
|
||||
|
|
@ -251,6 +299,8 @@ class ClientManager:
|
|||
|
||||
def remove_client(self, client_id):
|
||||
"""Remove a client from this channel and Redis"""
|
||||
client_ip = None
|
||||
|
||||
with self.lock:
|
||||
if client_id in self.clients:
|
||||
self.clients.remove(client_id)
|
||||
|
|
@ -261,6 +311,14 @@ class ClientManager:
|
|||
self.last_active_time = time.time()
|
||||
|
||||
if self.redis_client:
|
||||
# Get client IP before removing the data
|
||||
client_key = f"ts_proxy:channel:{self.channel_id}:clients:{client_id}"
|
||||
client_data = self.redis_client.hgetall(client_key)
|
||||
if client_data and b'ip_address' in client_data:
|
||||
client_ip = client_data[b'ip_address'].decode('utf-8')
|
||||
elif client_data and 'ip_address' in client_data:
|
||||
client_ip = client_data['ip_address']
|
||||
|
||||
# Remove from channel's client set
|
||||
self.redis_client.srem(self.client_set_key, client_id)
|
||||
|
||||
|
|
@ -290,6 +348,9 @@ class ClientManager:
|
|||
})
|
||||
self.redis_client.publish(RedisKeys.events_channel(self.channel_id), event_data)
|
||||
|
||||
# Trigger channel stats update via WebSocket
|
||||
self._trigger_stats_update()
|
||||
|
||||
total_clients = self.get_total_client_count()
|
||||
logger.info(f"Client disconnected: {client_id} (local: {len(self.clients)}, total: {total_clients})")
|
||||
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ class ConfigHelper:
|
|||
@staticmethod
|
||||
def channel_shutdown_delay():
|
||||
"""Get channel shutdown delay in seconds"""
|
||||
return ConfigHelper.get('CHANNEL_SHUTDOWN_DELAY', 0)
|
||||
return Config.get_channel_shutdown_delay()
|
||||
|
||||
@staticmethod
|
||||
def initial_behind_chunks():
|
||||
|
|
@ -54,7 +54,7 @@ class ConfigHelper:
|
|||
@staticmethod
|
||||
def redis_chunk_ttl():
|
||||
"""Get Redis chunk TTL in seconds"""
|
||||
return ConfigHelper.get('REDIS_CHUNK_TTL', 60)
|
||||
return Config.get_redis_chunk_ttl()
|
||||
|
||||
@staticmethod
|
||||
def chunk_size():
|
||||
|
|
@ -85,3 +85,27 @@ class ConfigHelper:
|
|||
def failover_grace_period():
|
||||
"""Get extra time (in seconds) to allow for stream switching before disconnecting clients"""
|
||||
return ConfigHelper.get('FAILOVER_GRACE_PERIOD', 20) # Default to 20 seconds
|
||||
|
||||
@staticmethod
|
||||
def buffering_timeout():
|
||||
"""Get buffering timeout in seconds"""
|
||||
return Config.get_buffering_timeout()
|
||||
|
||||
@staticmethod
|
||||
def buffering_speed():
|
||||
"""Get buffering speed threshold"""
|
||||
return Config.get_buffering_speed()
|
||||
|
||||
@staticmethod
|
||||
def channel_init_grace_period():
|
||||
"""Get channel initialization grace period in seconds"""
|
||||
return Config.get_channel_init_grace_period()
|
||||
|
||||
@staticmethod
|
||||
def chunk_timeout():
|
||||
"""
|
||||
Get chunk timeout in seconds (used for both socket and HTTP read timeouts).
|
||||
This controls how long we wait for each chunk before timing out.
|
||||
Set this higher (e.g., 30s) for slow providers that may have intermittent delays.
|
||||
"""
|
||||
return ConfigHelper.get('CHUNK_TIMEOUT', 5) # Default 5 seconds
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ class ChannelState:
|
|||
ERROR = "error"
|
||||
STOPPING = "stopping"
|
||||
STOPPED = "stopped"
|
||||
BUFFERING = "buffering"
|
||||
|
||||
# Event types
|
||||
class EventType:
|
||||
|
|
@ -32,6 +33,8 @@ class EventType:
|
|||
# Stream types
|
||||
class StreamType:
|
||||
HLS = "hls"
|
||||
RTSP = "rtsp"
|
||||
UDP = "udp"
|
||||
TS = "ts"
|
||||
UNKNOWN = "unknown"
|
||||
|
||||
|
|
@ -63,6 +66,33 @@ class ChannelMetadataField:
|
|||
STREAM_SWITCH_TIME = "stream_switch_time"
|
||||
STREAM_SWITCH_REASON = "stream_switch_reason"
|
||||
|
||||
# FFmpeg performance metrics
|
||||
FFMPEG_SPEED = "ffmpeg_speed"
|
||||
FFMPEG_FPS = "ffmpeg_fps"
|
||||
ACTUAL_FPS = "actual_fps"
|
||||
FFMPEG_OUTPUT_BITRATE = "ffmpeg_output_bitrate"
|
||||
FFMPEG_STATS_UPDATED = "ffmpeg_stats_updated"
|
||||
|
||||
# Video stream info
|
||||
VIDEO_CODEC = "video_codec"
|
||||
RESOLUTION = "resolution"
|
||||
WIDTH = "width"
|
||||
HEIGHT = "height"
|
||||
SOURCE_FPS = "source_fps"
|
||||
PIXEL_FORMAT = "pixel_format"
|
||||
VIDEO_BITRATE = "video_bitrate"
|
||||
|
||||
# Audio stream info
|
||||
AUDIO_CODEC = "audio_codec"
|
||||
SAMPLE_RATE = "sample_rate"
|
||||
AUDIO_CHANNELS = "audio_channels"
|
||||
AUDIO_BITRATE = "audio_bitrate"
|
||||
|
||||
# Stream format info
|
||||
STREAM_TYPE = "stream_type"
|
||||
# Stream info timestamp
|
||||
STREAM_INFO_UPDATED = "stream_info_updated"
|
||||
|
||||
# Client metadata fields
|
||||
CONNECTED_AT = "connected_at"
|
||||
LAST_ACTIVE = "last_active"
|
||||
|
|
|
|||
138
apps/proxy/ts_proxy/http_streamer.py
Normal file
138
apps/proxy/ts_proxy/http_streamer.py
Normal file
|
|
@ -0,0 +1,138 @@
|
|||
"""
|
||||
HTTP Stream Reader - Thread-based HTTP stream reader that writes to a pipe.
|
||||
This allows us to use the same fetch_chunk() path for both transcode and HTTP streams.
|
||||
"""
|
||||
|
||||
import threading
|
||||
import os
|
||||
import requests
|
||||
from requests.adapters import HTTPAdapter
|
||||
from .utils import get_logger
|
||||
|
||||
logger = get_logger()
|
||||
|
||||
|
||||
class HTTPStreamReader:
|
||||
"""Thread-based HTTP stream reader that writes to a pipe"""
|
||||
|
||||
def __init__(self, url, user_agent=None, chunk_size=8192):
|
||||
self.url = url
|
||||
self.user_agent = user_agent
|
||||
self.chunk_size = chunk_size
|
||||
self.session = None
|
||||
self.response = None
|
||||
self.thread = None
|
||||
self.pipe_read = None
|
||||
self.pipe_write = None
|
||||
self.running = False
|
||||
|
||||
def start(self):
|
||||
"""Start the HTTP stream reader thread"""
|
||||
# Create a pipe (works on Windows and Unix)
|
||||
self.pipe_read, self.pipe_write = os.pipe()
|
||||
|
||||
# Start the reader thread
|
||||
self.running = True
|
||||
self.thread = threading.Thread(target=self._read_stream, daemon=True)
|
||||
self.thread.start()
|
||||
|
||||
logger.info(f"Started HTTP stream reader thread for {self.url}")
|
||||
return self.pipe_read
|
||||
|
||||
def _read_stream(self):
|
||||
"""Thread worker that reads HTTP stream and writes to pipe"""
|
||||
try:
|
||||
# Build headers
|
||||
headers = {}
|
||||
if self.user_agent:
|
||||
headers['User-Agent'] = self.user_agent
|
||||
|
||||
logger.info(f"HTTP reader connecting to {self.url}")
|
||||
|
||||
# Create session
|
||||
self.session = requests.Session()
|
||||
|
||||
# Disable retries for faster failure detection
|
||||
adapter = HTTPAdapter(max_retries=0, pool_connections=1, pool_maxsize=1)
|
||||
self.session.mount('http://', adapter)
|
||||
self.session.mount('https://', adapter)
|
||||
|
||||
# Stream the URL
|
||||
self.response = self.session.get(
|
||||
self.url,
|
||||
headers=headers,
|
||||
stream=True,
|
||||
timeout=(5, 30) # 5s connect, 30s read
|
||||
)
|
||||
|
||||
if self.response.status_code != 200:
|
||||
logger.error(f"HTTP {self.response.status_code} from {self.url}")
|
||||
return
|
||||
|
||||
logger.info(f"HTTP reader connected successfully, streaming data...")
|
||||
|
||||
# Stream chunks to pipe
|
||||
chunk_count = 0
|
||||
for chunk in self.response.iter_content(chunk_size=self.chunk_size):
|
||||
if not self.running:
|
||||
break
|
||||
|
||||
if chunk:
|
||||
try:
|
||||
# Write binary data to pipe
|
||||
os.write(self.pipe_write, chunk)
|
||||
chunk_count += 1
|
||||
|
||||
# Log progress periodically
|
||||
if chunk_count % 1000 == 0:
|
||||
logger.debug(f"HTTP reader streamed {chunk_count} chunks")
|
||||
except OSError as e:
|
||||
logger.error(f"Pipe write error: {e}")
|
||||
break
|
||||
|
||||
logger.info("HTTP stream ended")
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"HTTP reader request error: {e}")
|
||||
except Exception as e:
|
||||
logger.error(f"HTTP reader unexpected error: {e}", exc_info=True)
|
||||
finally:
|
||||
self.running = False
|
||||
# Close write end of pipe to signal EOF
|
||||
try:
|
||||
if self.pipe_write is not None:
|
||||
os.close(self.pipe_write)
|
||||
self.pipe_write = None
|
||||
except:
|
||||
pass
|
||||
|
||||
def stop(self):
|
||||
"""Stop the HTTP stream reader"""
|
||||
logger.info("Stopping HTTP stream reader")
|
||||
self.running = False
|
||||
|
||||
# Close response
|
||||
if self.response:
|
||||
try:
|
||||
self.response.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
# Close session
|
||||
if self.session:
|
||||
try:
|
||||
self.session.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
# Close write end of pipe
|
||||
if self.pipe_write is not None:
|
||||
try:
|
||||
os.close(self.pipe_write)
|
||||
self.pipe_write = None
|
||||
except:
|
||||
pass
|
||||
|
||||
# Wait for thread
|
||||
if self.thread and self.thread.is_alive():
|
||||
self.thread.join(timeout=2.0)
|
||||
|
|
@ -131,6 +131,8 @@ class ProxyServer:
|
|||
max_retries = 10
|
||||
base_retry_delay = 1 # Start with 1 second delay
|
||||
max_retry_delay = 30 # Cap at 30 seconds
|
||||
pubsub_client = None
|
||||
pubsub = None
|
||||
|
||||
while True:
|
||||
try:
|
||||
|
|
@ -206,7 +208,7 @@ class ProxyServer:
|
|||
self.redis_client.setex(disconnect_key, 60, str(time.time()))
|
||||
|
||||
# Get configured shutdown delay or default
|
||||
shutdown_delay = getattr(Config, 'CHANNEL_SHUTDOWN_DELAY', 0)
|
||||
shutdown_delay = ConfigHelper.channel_shutdown_delay()
|
||||
|
||||
if shutdown_delay > 0:
|
||||
logger.info(f"Waiting {shutdown_delay}s before stopping channel...")
|
||||
|
|
@ -339,20 +341,27 @@ class ProxyServer:
|
|||
logger.error(f"Error in event listener: {e}. Retrying in {final_delay:.1f}s (attempt {retry_count})")
|
||||
gevent.sleep(final_delay) # REPLACE: time.sleep(final_delay)
|
||||
|
||||
# Try to clean up the old connection
|
||||
try:
|
||||
if 'pubsub' in locals():
|
||||
pubsub.close()
|
||||
if 'pubsub_client' in locals():
|
||||
pubsub_client.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in event listener: {e}")
|
||||
# Add a short delay to prevent rapid retries on persistent errors
|
||||
gevent.sleep(5) # REPLACE: time.sleep(5)
|
||||
|
||||
finally:
|
||||
# Always clean up PubSub connections in all error paths
|
||||
try:
|
||||
if pubsub:
|
||||
pubsub.close()
|
||||
pubsub = None
|
||||
except Exception as e:
|
||||
logger.debug(f"Error closing pubsub: {e}")
|
||||
|
||||
try:
|
||||
if pubsub_client:
|
||||
pubsub_client.close()
|
||||
pubsub_client = None
|
||||
except Exception as e:
|
||||
logger.debug(f"Error closing pubsub_client: {e}")
|
||||
|
||||
thread = threading.Thread(target=event_listener, daemon=True)
|
||||
thread.name = "redis-event-listener"
|
||||
thread.start()
|
||||
|
|
@ -472,7 +481,7 @@ class ProxyServer:
|
|||
if b'state' in metadata:
|
||||
state = metadata[b'state'].decode('utf-8')
|
||||
active_states = [ChannelState.INITIALIZING, ChannelState.CONNECTING,
|
||||
ChannelState.WAITING_FOR_CLIENTS, ChannelState.ACTIVE]
|
||||
ChannelState.WAITING_FOR_CLIENTS, ChannelState.ACTIVE, ChannelState.BUFFERING]
|
||||
if state in active_states:
|
||||
logger.info(f"Channel {channel_id} already being initialized with state {state}")
|
||||
# Create buffer and client manager only if we don't have them
|
||||
|
|
@ -486,17 +495,18 @@ class ProxyServer:
|
|||
)
|
||||
return True
|
||||
|
||||
# Create buffer and client manager instances
|
||||
buffer = StreamBuffer(channel_id, redis_client=self.redis_client)
|
||||
client_manager = ClientManager(
|
||||
channel_id,
|
||||
redis_client=self.redis_client,
|
||||
worker_id=self.worker_id
|
||||
)
|
||||
# Create buffer and client manager instances (or reuse if they exist)
|
||||
if channel_id not in self.stream_buffers:
|
||||
buffer = StreamBuffer(channel_id, redis_client=self.redis_client)
|
||||
self.stream_buffers[channel_id] = buffer
|
||||
|
||||
# Store in local tracking
|
||||
self.stream_buffers[channel_id] = buffer
|
||||
self.client_managers[channel_id] = client_manager
|
||||
if channel_id not in self.client_managers:
|
||||
client_manager = ClientManager(
|
||||
channel_id,
|
||||
redis_client=self.redis_client,
|
||||
worker_id=self.worker_id
|
||||
)
|
||||
self.client_managers[channel_id] = client_manager
|
||||
|
||||
# IMPROVED: Set initializing state in Redis BEFORE any other operations
|
||||
if self.redis_client:
|
||||
|
|
@ -550,13 +560,15 @@ class ProxyServer:
|
|||
logger.info(f"Channel {channel_id} already owned by worker {current_owner}")
|
||||
logger.info(f"This worker ({self.worker_id}) will read from Redis buffer only")
|
||||
|
||||
# Create buffer but not stream manager
|
||||
buffer = StreamBuffer(channel_id=channel_id, redis_client=self.redis_client)
|
||||
self.stream_buffers[channel_id] = buffer
|
||||
# Create buffer but not stream manager (only if not already exists)
|
||||
if channel_id not in self.stream_buffers:
|
||||
buffer = StreamBuffer(channel_id=channel_id, redis_client=self.redis_client)
|
||||
self.stream_buffers[channel_id] = buffer
|
||||
|
||||
# Create client manager with channel_id and redis_client
|
||||
client_manager = ClientManager(channel_id=channel_id, redis_client=self.redis_client, worker_id=self.worker_id)
|
||||
self.client_managers[channel_id] = client_manager
|
||||
# Create client manager with channel_id and redis_client (only if not already exists)
|
||||
if channel_id not in self.client_managers:
|
||||
client_manager = ClientManager(channel_id=channel_id, redis_client=self.redis_client, worker_id=self.worker_id)
|
||||
self.client_managers[channel_id] = client_manager
|
||||
|
||||
return True
|
||||
|
||||
|
|
@ -571,13 +583,15 @@ class ProxyServer:
|
|||
# Another worker just acquired ownership
|
||||
logger.info(f"Another worker just acquired ownership of channel {channel_id}")
|
||||
|
||||
# Create buffer but not stream manager
|
||||
buffer = StreamBuffer(channel_id=channel_id, redis_client=self.redis_client)
|
||||
self.stream_buffers[channel_id] = buffer
|
||||
# Create buffer but not stream manager (only if not already exists)
|
||||
if channel_id not in self.stream_buffers:
|
||||
buffer = StreamBuffer(channel_id=channel_id, redis_client=self.redis_client)
|
||||
self.stream_buffers[channel_id] = buffer
|
||||
|
||||
# Create client manager with channel_id and redis_client
|
||||
client_manager = ClientManager(channel_id=channel_id, redis_client=self.redis_client, worker_id=self.worker_id)
|
||||
self.client_managers[channel_id] = client_manager
|
||||
# Create client manager with channel_id and redis_client (only if not already exists)
|
||||
if channel_id not in self.client_managers:
|
||||
client_manager = ClientManager(channel_id=channel_id, redis_client=self.redis_client, worker_id=self.worker_id)
|
||||
self.client_managers[channel_id] = client_manager
|
||||
|
||||
return True
|
||||
|
||||
|
|
@ -596,7 +610,7 @@ class ProxyServer:
|
|||
if channel_user_agent:
|
||||
metadata["user_agent"] = channel_user_agent
|
||||
|
||||
# CRITICAL FIX: Make sure stream_id is always set in metadata and properly logged
|
||||
# Make sure stream_id is always set in metadata and properly logged
|
||||
if channel_stream_id:
|
||||
metadata["stream_id"] = str(channel_stream_id)
|
||||
logger.info(f"Storing stream_id {channel_stream_id} in metadata for channel {channel_id}")
|
||||
|
|
@ -632,13 +646,14 @@ class ProxyServer:
|
|||
logger.info(f"Created StreamManager for channel {channel_id} with stream ID {channel_stream_id}")
|
||||
self.stream_managers[channel_id] = stream_manager
|
||||
|
||||
# Create client manager with channel_id, redis_client AND worker_id
|
||||
client_manager = ClientManager(
|
||||
channel_id=channel_id,
|
||||
redis_client=self.redis_client,
|
||||
worker_id=self.worker_id
|
||||
)
|
||||
self.client_managers[channel_id] = client_manager
|
||||
# Create client manager with channel_id, redis_client AND worker_id (only if not already exists)
|
||||
if channel_id not in self.client_managers:
|
||||
client_manager = ClientManager(
|
||||
channel_id=channel_id,
|
||||
redis_client=self.redis_client,
|
||||
worker_id=self.worker_id
|
||||
)
|
||||
self.client_managers[channel_id] = client_manager
|
||||
|
||||
# Start stream manager thread only for the owner
|
||||
thread = threading.Thread(target=stream_manager.run, daemon=True)
|
||||
|
|
@ -689,7 +704,8 @@ class ProxyServer:
|
|||
owner = metadata.get(b'owner', b'').decode('utf-8')
|
||||
|
||||
# States that indicate the channel is running properly
|
||||
valid_states = [ChannelState.ACTIVE, ChannelState.WAITING_FOR_CLIENTS, ChannelState.CONNECTING]
|
||||
valid_states = [ChannelState.ACTIVE, ChannelState.WAITING_FOR_CLIENTS,
|
||||
ChannelState.CONNECTING, ChannelState.BUFFERING, ChannelState.INITIALIZING]
|
||||
|
||||
# If the channel is in a valid state, check if the owner is still active
|
||||
if state in valid_states:
|
||||
|
|
@ -707,7 +723,7 @@ class ProxyServer:
|
|||
elif state in [ChannelState.STOPPING, ChannelState.STOPPED, ChannelState.ERROR]:
|
||||
# These states indicate the channel should be reinitialized
|
||||
logger.info(f"Channel {channel_id} exists but in terminal state: {state}")
|
||||
return False
|
||||
return True
|
||||
else:
|
||||
# Unknown or initializing state, check how long it's been in this state
|
||||
if b'state_changed_at' in metadata:
|
||||
|
|
@ -845,6 +861,10 @@ class ProxyServer:
|
|||
# Clean up client manager - SAFE CHECK HERE TOO
|
||||
if channel_id in self.client_managers:
|
||||
try:
|
||||
client_manager = self.client_managers[channel_id]
|
||||
# Stop the heartbeat thread before deleting
|
||||
if hasattr(client_manager, 'stop'):
|
||||
client_manager.stop()
|
||||
del self.client_managers[channel_id]
|
||||
logger.info(f"Removed client manager for channel {channel_id}")
|
||||
except KeyError:
|
||||
|
|
@ -941,7 +961,7 @@ class ProxyServer:
|
|||
|
||||
# If waiting for clients, check grace period
|
||||
if connection_ready_time:
|
||||
grace_period = ConfigHelper.get('CHANNEL_INIT_GRACE_PERIOD', 20)
|
||||
grace_period = ConfigHelper.channel_init_grace_period()
|
||||
time_since_ready = time.time() - connection_ready_time
|
||||
|
||||
# Add this debug log
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ This separates business logic from HTTP handling in views.
|
|||
import logging
|
||||
import time
|
||||
import json
|
||||
import re
|
||||
from django.shortcuts import get_object_or_404
|
||||
from apps.channels.models import Channel, Stream
|
||||
from apps.proxy.config import TSConfig as Config
|
||||
|
|
@ -181,7 +182,7 @@ class ChannelService:
|
|||
old_url = manager.url
|
||||
|
||||
# Update the stream
|
||||
success = manager.update_url(new_url, stream_id)
|
||||
success = manager.update_url(new_url, stream_id, m3u_profile_id)
|
||||
logger.info(f"Stream URL changed from {old_url} to {new_url}, result: {success}")
|
||||
|
||||
result.update({
|
||||
|
|
@ -415,6 +416,222 @@ class ChannelService:
|
|||
logger.error(f"Error validating channel state: {e}", exc_info=True)
|
||||
return False, None, None, {"error": f"Exception: {str(e)}"}
|
||||
|
||||
@staticmethod
|
||||
def parse_and_store_stream_info(channel_id, stream_info_line, stream_type="video", stream_id=None):
|
||||
"""Parse FFmpeg stream info line and store in Redis metadata and database"""
|
||||
try:
|
||||
if stream_type == "input":
|
||||
# Example lines:
|
||||
# Input #0, mpegts, from 'http://example.com/stream.ts':
|
||||
# Input #0, hls, from 'http://example.com/stream.m3u8':
|
||||
|
||||
# Extract input format (e.g., "mpegts", "hls", "flv", etc.)
|
||||
input_match = re.search(r'Input #\d+,\s*([^,]+)', stream_info_line)
|
||||
input_format = input_match.group(1).strip() if input_match else None
|
||||
|
||||
# Store in Redis if we have valid data
|
||||
if input_format:
|
||||
ChannelService._update_stream_info_in_redis(channel_id, None, None, None, None, None, None, None, None, None, None, None, input_format)
|
||||
# Save to database if stream_id is provided
|
||||
if stream_id:
|
||||
ChannelService._update_stream_stats_in_db(stream_id, stream_type=input_format)
|
||||
|
||||
logger.debug(f"Input format info - Format: {input_format} for channel {channel_id}")
|
||||
|
||||
elif stream_type == "video":
|
||||
# Example line:
|
||||
# Stream #0:0: Video: h264 (Main), yuv420p(tv, progressive), 1280x720 [SAR 1:1 DAR 16:9], q=2-31, 2000 kb/s, 29.97 fps, 90k tbn
|
||||
|
||||
# Extract video codec (e.g., "h264", "mpeg2video", etc.)
|
||||
codec_match = re.search(r'Video:\s*([a-zA-Z0-9_]+)', stream_info_line)
|
||||
video_codec = codec_match.group(1) if codec_match else None
|
||||
|
||||
# Extract resolution (e.g., "1280x720") - be more specific to avoid hex values
|
||||
# Look for resolution patterns that are realistic video dimensions
|
||||
resolution_match = re.search(r'\b(\d{3,5})x(\d{3,5})\b', stream_info_line)
|
||||
if resolution_match:
|
||||
width = int(resolution_match.group(1))
|
||||
height = int(resolution_match.group(2))
|
||||
# Validate that these look like reasonable video dimensions
|
||||
if 100 <= width <= 10000 and 100 <= height <= 10000:
|
||||
resolution = f"{width}x{height}"
|
||||
else:
|
||||
width = height = resolution = None
|
||||
else:
|
||||
width = height = resolution = None
|
||||
|
||||
# Extract source FPS (e.g., "29.97 fps")
|
||||
fps_match = re.search(r'(\d+(?:\.\d+)?)\s*fps', stream_info_line)
|
||||
source_fps = float(fps_match.group(1)) if fps_match else None
|
||||
|
||||
# Extract pixel format (e.g., "yuv420p")
|
||||
pixel_format_match = re.search(r'Video:\s*[^,]+,\s*([^,(]+)', stream_info_line)
|
||||
pixel_format = None
|
||||
if pixel_format_match:
|
||||
pf = pixel_format_match.group(1).strip()
|
||||
# Clean up pixel format (remove extra info in parentheses)
|
||||
if '(' in pf:
|
||||
pf = pf.split('(')[0].strip()
|
||||
pixel_format = pf
|
||||
|
||||
# Extract bitrate if present (e.g., "2000 kb/s")
|
||||
video_bitrate = None
|
||||
bitrate_match = re.search(r'(\d+(?:\.\d+)?)\s*kb/s', stream_info_line)
|
||||
if bitrate_match:
|
||||
video_bitrate = float(bitrate_match.group(1))
|
||||
|
||||
# Store in Redis if we have valid data
|
||||
if any(x is not None for x in [video_codec, resolution, source_fps, pixel_format, video_bitrate]):
|
||||
ChannelService._update_stream_info_in_redis(channel_id, video_codec, resolution, width, height, source_fps, pixel_format, video_bitrate, None, None, None, None, None)
|
||||
# Save to database if stream_id is provided
|
||||
if stream_id:
|
||||
ChannelService._update_stream_stats_in_db(
|
||||
stream_id,
|
||||
video_codec=video_codec,
|
||||
resolution=resolution,
|
||||
source_fps=source_fps,
|
||||
pixel_format=pixel_format,
|
||||
video_bitrate=video_bitrate
|
||||
)
|
||||
|
||||
logger.info(f"Video stream info - Codec: {video_codec}, Resolution: {resolution}, "
|
||||
f"Source FPS: {source_fps}, Pixel Format: {pixel_format}, "
|
||||
f"Video Bitrate: {video_bitrate} kb/s")
|
||||
|
||||
elif stream_type == "audio":
|
||||
# Example line:
|
||||
# Stream #0:1[0x101]: Audio: aac (LC) ([15][0][0][0] / 0x000F), 48000 Hz, stereo, fltp, 64 kb/s
|
||||
|
||||
# Extract audio codec (e.g., "aac", "mp3", etc.)
|
||||
codec_match = re.search(r'Audio:\s*([a-zA-Z0-9_]+)', stream_info_line)
|
||||
audio_codec = codec_match.group(1) if codec_match else None
|
||||
|
||||
# Extract sample rate (e.g., "48000 Hz")
|
||||
sample_rate_match = re.search(r'(\d+)\s*Hz', stream_info_line)
|
||||
sample_rate = int(sample_rate_match.group(1)) if sample_rate_match else None
|
||||
|
||||
# Extract channel layout (e.g., "stereo", "5.1", "mono")
|
||||
# Look for common channel layouts
|
||||
channel_match = re.search(r'\b(mono|stereo|5\.1|7\.1|quad|2\.1)\b', stream_info_line, re.IGNORECASE)
|
||||
channels = channel_match.group(1) if channel_match else None
|
||||
|
||||
# Extract audio bitrate if present (e.g., "64 kb/s")
|
||||
audio_bitrate = None
|
||||
bitrate_match = re.search(r'(\d+(?:\.\d+)?)\s*kb/s', stream_info_line)
|
||||
if bitrate_match:
|
||||
audio_bitrate = float(bitrate_match.group(1))
|
||||
|
||||
# Store in Redis if we have valid data
|
||||
if any(x is not None for x in [audio_codec, sample_rate, channels, audio_bitrate]):
|
||||
ChannelService._update_stream_info_in_redis(channel_id, None, None, None, None, None, None, None, audio_codec, sample_rate, channels, audio_bitrate, None)
|
||||
# Save to database if stream_id is provided
|
||||
if stream_id:
|
||||
ChannelService._update_stream_stats_in_db(
|
||||
stream_id,
|
||||
audio_codec=audio_codec,
|
||||
sample_rate=sample_rate,
|
||||
audio_channels=channels,
|
||||
audio_bitrate=audio_bitrate
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.debug(f"Error parsing FFmpeg {stream_type} stream info: {e}")
|
||||
|
||||
@staticmethod
|
||||
def _update_stream_info_in_redis(channel_id, codec, resolution, width, height, fps, pixel_format, video_bitrate, audio_codec=None, sample_rate=None, channels=None, audio_bitrate=None, input_format=None):
|
||||
"""Update stream info in Redis metadata"""
|
||||
try:
|
||||
proxy_server = ProxyServer.get_instance()
|
||||
if not proxy_server.redis_client:
|
||||
return False
|
||||
|
||||
metadata_key = RedisKeys.channel_metadata(channel_id)
|
||||
update_data = {
|
||||
ChannelMetadataField.STREAM_INFO_UPDATED: str(time.time())
|
||||
}
|
||||
|
||||
# Video info
|
||||
if codec is not None:
|
||||
update_data[ChannelMetadataField.VIDEO_CODEC] = str(codec)
|
||||
|
||||
if resolution is not None:
|
||||
update_data[ChannelMetadataField.RESOLUTION] = str(resolution)
|
||||
|
||||
if width is not None:
|
||||
update_data[ChannelMetadataField.WIDTH] = str(width)
|
||||
|
||||
if height is not None:
|
||||
update_data[ChannelMetadataField.HEIGHT] = str(height)
|
||||
|
||||
if fps is not None:
|
||||
update_data[ChannelMetadataField.SOURCE_FPS] = str(round(fps, 2))
|
||||
|
||||
if pixel_format is not None:
|
||||
update_data[ChannelMetadataField.PIXEL_FORMAT] = str(pixel_format)
|
||||
|
||||
if video_bitrate is not None:
|
||||
update_data[ChannelMetadataField.VIDEO_BITRATE] = str(round(video_bitrate, 1))
|
||||
|
||||
# Audio info
|
||||
if audio_codec is not None:
|
||||
update_data[ChannelMetadataField.AUDIO_CODEC] = str(audio_codec)
|
||||
|
||||
if sample_rate is not None:
|
||||
update_data[ChannelMetadataField.SAMPLE_RATE] = str(sample_rate)
|
||||
|
||||
if channels is not None:
|
||||
update_data[ChannelMetadataField.AUDIO_CHANNELS] = str(channels)
|
||||
|
||||
if audio_bitrate is not None:
|
||||
update_data[ChannelMetadataField.AUDIO_BITRATE] = str(round(audio_bitrate, 1))
|
||||
if input_format is not None:
|
||||
update_data[ChannelMetadataField.STREAM_TYPE] = str(input_format)
|
||||
|
||||
proxy_server.redis_client.hset(metadata_key, mapping=update_data)
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating stream info in Redis: {e}")
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def _update_stream_stats_in_db(stream_id, **stats):
|
||||
"""Update stream stats in database"""
|
||||
from django.db import connection
|
||||
|
||||
try:
|
||||
from apps.channels.models import Stream
|
||||
from django.utils import timezone
|
||||
|
||||
stream = Stream.objects.get(id=stream_id)
|
||||
|
||||
# Get existing stats or create new dict
|
||||
current_stats = stream.stream_stats or {}
|
||||
|
||||
# Update with new stats
|
||||
for key, value in stats.items():
|
||||
if value is not None:
|
||||
current_stats[key] = value
|
||||
|
||||
# Save updated stats and timestamp
|
||||
stream.stream_stats = current_stats
|
||||
stream.stream_stats_updated_at = timezone.now()
|
||||
stream.save(update_fields=['stream_stats', 'stream_stats_updated_at'])
|
||||
|
||||
logger.debug(f"Updated stream stats in database for stream {stream_id}: {stats}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating stream stats in database for stream {stream_id}: {e}")
|
||||
return False
|
||||
|
||||
finally:
|
||||
# Always close database connection after update
|
||||
try:
|
||||
connection.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Helper methods for Redis operations
|
||||
|
||||
@staticmethod
|
||||
|
|
@ -470,7 +687,7 @@ class ChannelService:
|
|||
|
||||
switch_request = {
|
||||
"event": EventType.STREAM_SWITCH,
|
||||
"channel_id": channel_id,
|
||||
"channel_id": str(channel_id),
|
||||
"url": new_url,
|
||||
"user_agent": user_agent,
|
||||
"stream_id": stream_id,
|
||||
|
|
@ -495,7 +712,7 @@ class ChannelService:
|
|||
|
||||
stop_request = {
|
||||
"event": EventType.CHANNEL_STOP,
|
||||
"channel_id": channel_id,
|
||||
"channel_id": str(channel_id),
|
||||
"requester_worker_id": proxy_server.worker_id,
|
||||
"timestamp": time.time()
|
||||
}
|
||||
|
|
@ -518,7 +735,7 @@ class ChannelService:
|
|||
|
||||
stop_request = {
|
||||
"event": EventType.CLIENT_STOP,
|
||||
"channel_id": channel_id,
|
||||
"channel_id": str(channel_id),
|
||||
"client_id": client_id,
|
||||
"requester_worker_id": proxy_server.worker_id,
|
||||
"timestamp": time.time()
|
||||
|
|
|
|||
|
|
@ -303,6 +303,14 @@ class StreamBuffer:
|
|||
# Retrieve chunks
|
||||
chunks = self.get_chunks_exact(client_index, chunk_count)
|
||||
|
||||
# Check if we got significantly fewer chunks than expected (likely due to expiration)
|
||||
# Only check if we expected multiple chunks and got none or very few
|
||||
if chunk_count > 3 and len(chunks) == 0 and chunks_behind > 10:
|
||||
# Chunks are missing - likely expired from Redis
|
||||
# Return empty list to signal client should skip forward
|
||||
logger.debug(f"Chunks missing for client at index {client_index}, buffer at {self.index} ({chunks_behind} behind)")
|
||||
return [], client_index
|
||||
|
||||
# Check total size
|
||||
total_size = sum(len(c) for c in chunks)
|
||||
|
||||
|
|
@ -316,7 +324,7 @@ class StreamBuffer:
|
|||
additional_size = sum(len(c) for c in more_chunks)
|
||||
if total_size + additional_size <= MAX_SIZE:
|
||||
chunks.extend(more_chunks)
|
||||
chunk_count += additional
|
||||
chunk_count += len(more_chunks) # Fixed: count actual additional chunks retrieved
|
||||
|
||||
return chunks, client_index + chunk_count
|
||||
|
||||
|
|
|
|||
|
|
@ -52,6 +52,10 @@ class StreamGenerator:
|
|||
self.last_stats_bytes = 0
|
||||
self.current_rate = 0.0
|
||||
|
||||
# TTL refresh tracking
|
||||
self.last_ttl_refresh = time.time()
|
||||
self.ttl_refresh_interval = 3 # Refresh TTL every 3 seconds of active streaming
|
||||
|
||||
def generate(self):
|
||||
"""
|
||||
Generator function that produces the stream content for the client.
|
||||
|
|
@ -204,6 +208,18 @@ class StreamGenerator:
|
|||
self.empty_reads += 1
|
||||
self.consecutive_empty += 1
|
||||
|
||||
# Check if we're too far behind (chunks expired from Redis)
|
||||
chunks_behind = self.buffer.index - self.local_index
|
||||
if chunks_behind > 50: # If more than 50 chunks behind, jump forward
|
||||
# Calculate new position: stay a few chunks behind current buffer
|
||||
initial_behind = ConfigHelper.initial_behind_chunks()
|
||||
new_index = max(self.local_index, self.buffer.index - initial_behind)
|
||||
|
||||
logger.warning(f"[{self.client_id}] Client too far behind ({chunks_behind} chunks), jumping from {self.local_index} to {new_index}")
|
||||
self.local_index = new_index
|
||||
self.consecutive_empty = 0 # Reset since we're repositioning
|
||||
continue # Try again immediately with new position
|
||||
|
||||
if self._should_send_keepalive(self.local_index):
|
||||
keepalive_packet = create_ts_packet('keepalive')
|
||||
logger.debug(f"[{self.client_id}] Sending keepalive packet while waiting at buffer head")
|
||||
|
|
@ -324,7 +340,20 @@ class StreamGenerator:
|
|||
ChannelMetadataField.STATS_UPDATED_AT: str(current_time)
|
||||
}
|
||||
proxy_server.redis_client.hset(client_key, mapping=stats)
|
||||
# No need to set expiration as client heartbeat will refresh this key
|
||||
|
||||
# Refresh TTL periodically while actively streaming
|
||||
# This provides proof-of-life independent of heartbeat thread
|
||||
if current_time - self.last_ttl_refresh > self.ttl_refresh_interval:
|
||||
try:
|
||||
# Refresh TTL on client key
|
||||
proxy_server.redis_client.expire(client_key, Config.CLIENT_RECORD_TTL)
|
||||
# Also refresh the client set TTL
|
||||
client_set_key = f"ts_proxy:channel:{self.channel_id}:clients"
|
||||
proxy_server.redis_client.expire(client_set_key, Config.CLIENT_RECORD_TTL)
|
||||
self.last_ttl_refresh = current_time
|
||||
logger.debug(f"[{self.client_id}] Refreshed client TTL (active streaming)")
|
||||
except Exception as ttl_error:
|
||||
logger.debug(f"[{self.client_id}] Failed to refresh TTL: {ttl_error}")
|
||||
except Exception as e:
|
||||
logger.warning(f"[{self.client_id}] Failed to store stats in Redis: {e}")
|
||||
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -8,7 +8,7 @@ from typing import Optional, Tuple, List
|
|||
from django.shortcuts import get_object_or_404
|
||||
from apps.channels.models import Channel, Stream
|
||||
from apps.m3u.models import M3UAccount, M3UAccountProfile
|
||||
from core.models import UserAgent, CoreSettings
|
||||
from core.models import UserAgent, CoreSettings, StreamProfile
|
||||
from .utils import get_logger
|
||||
from uuid import UUID
|
||||
import requests
|
||||
|
|
@ -17,7 +17,6 @@ logger = get_logger()
|
|||
|
||||
def get_stream_object(id: str):
|
||||
try:
|
||||
uuid_obj = UUID(id, version=4)
|
||||
logger.info(f"Fetching channel ID {id}")
|
||||
return get_object_or_404(Channel, uuid=id)
|
||||
except:
|
||||
|
|
@ -27,16 +26,67 @@ def get_stream_object(id: str):
|
|||
|
||||
def generate_stream_url(channel_id: str) -> Tuple[str, str, bool, Optional[int]]:
|
||||
"""
|
||||
Generate the appropriate stream URL for a channel based on its profile settings.
|
||||
Generate the appropriate stream URL for a channel or stream based on its profile settings.
|
||||
|
||||
Args:
|
||||
channel_id: The UUID of the channel
|
||||
channel_id: The UUID of the channel or stream hash
|
||||
|
||||
Returns:
|
||||
Tuple[str, str, bool, Optional[int]]: (stream_url, user_agent, transcode_flag, profile_id)
|
||||
"""
|
||||
try:
|
||||
channel = get_stream_object(channel_id)
|
||||
channel_or_stream = get_stream_object(channel_id)
|
||||
|
||||
# Handle direct stream preview (custom streams)
|
||||
if isinstance(channel_or_stream, Stream):
|
||||
stream = channel_or_stream
|
||||
logger.info(f"Previewing stream directly: {stream.id} ({stream.name})")
|
||||
|
||||
# For custom streams, we need to get the M3U account and profile
|
||||
m3u_account = stream.m3u_account
|
||||
if not m3u_account:
|
||||
logger.error(f"Stream {stream.id} has no M3U account")
|
||||
return None, None, False, None
|
||||
|
||||
# Get the default profile for this M3U account (custom streams use default)
|
||||
m3u_profiles = m3u_account.profiles.all()
|
||||
profile = next((obj for obj in m3u_profiles if obj.is_default), None)
|
||||
|
||||
if not profile:
|
||||
logger.error(f"No default profile found for M3U account {m3u_account.id}")
|
||||
return None, None, False, None
|
||||
|
||||
# Get the appropriate user agent
|
||||
stream_user_agent = m3u_account.get_user_agent().user_agent
|
||||
if stream_user_agent is None:
|
||||
stream_user_agent = UserAgent.objects.get(id=CoreSettings.get_default_user_agent_id())
|
||||
logger.debug(f"No user agent found for account, using default: {stream_user_agent}")
|
||||
|
||||
# Get stream URL (no transformation for custom streams)
|
||||
stream_url = stream.url
|
||||
|
||||
# Check if the stream has its own stream_profile set, otherwise use default
|
||||
if stream.stream_profile:
|
||||
stream_profile = stream.stream_profile
|
||||
logger.debug(f"Using stream's own stream profile: {stream_profile.name}")
|
||||
else:
|
||||
stream_profile = StreamProfile.objects.get(
|
||||
id=CoreSettings.get_default_stream_profile_id()
|
||||
)
|
||||
logger.debug(f"Using default stream profile: {stream_profile.name}")
|
||||
|
||||
# Check if transcoding is needed
|
||||
if stream_profile.is_proxy() or stream_profile is None:
|
||||
transcode = False
|
||||
else:
|
||||
transcode = True
|
||||
|
||||
stream_profile_id = stream_profile.id
|
||||
|
||||
return stream_url, stream_user_agent, transcode, stream_profile_id
|
||||
|
||||
# Handle channel preview (existing logic)
|
||||
channel = channel_or_stream
|
||||
|
||||
# Get stream and profile for this channel
|
||||
# Note: get_stream now returns 3 values (stream_id, profile_id, error_reason)
|
||||
|
|
@ -126,7 +176,10 @@ def get_stream_info_for_switch(channel_id: str, target_stream_id: Optional[int]
|
|||
dict: Stream information including URL, user agent and transcode flag
|
||||
"""
|
||||
try:
|
||||
from core.utils import RedisClient
|
||||
|
||||
channel = get_object_or_404(Channel, uuid=channel_id)
|
||||
redis_client = RedisClient.get_client()
|
||||
|
||||
# Use the target stream if specified, otherwise use current stream
|
||||
if target_stream_id:
|
||||
|
|
@ -135,24 +188,58 @@ def get_stream_info_for_switch(channel_id: str, target_stream_id: Optional[int]
|
|||
# Get the stream object
|
||||
stream = get_object_or_404(Stream, pk=stream_id)
|
||||
|
||||
# Find compatible profile for this stream
|
||||
profiles = M3UAccountProfile.objects.filter(m3u_account=stream.m3u_account)
|
||||
# Find compatible profile for this stream with connection availability check
|
||||
m3u_account = stream.m3u_account
|
||||
if not m3u_account:
|
||||
return {'error': 'Stream has no M3U account'}
|
||||
|
||||
if not profiles.exists():
|
||||
# Try to get default profile
|
||||
default_profile = M3UAccountProfile.objects.filter(
|
||||
m3u_account=stream.m3u_account,
|
||||
is_default=True
|
||||
).first()
|
||||
m3u_profiles = m3u_account.profiles.filter(is_active=True)
|
||||
default_profile = next((obj for obj in m3u_profiles if obj.is_default), None)
|
||||
|
||||
if default_profile:
|
||||
m3u_profile_id = default_profile.id
|
||||
if not default_profile:
|
||||
return {'error': 'M3U account has no default profile'}
|
||||
|
||||
# Check profiles in order: default first, then others
|
||||
profiles = [default_profile] + [obj for obj in m3u_profiles if not obj.is_default]
|
||||
|
||||
selected_profile = None
|
||||
for profile in profiles:
|
||||
|
||||
# Check connection availability
|
||||
if redis_client:
|
||||
profile_connections_key = f"profile_connections:{profile.id}"
|
||||
current_connections = int(redis_client.get(profile_connections_key) or 0)
|
||||
|
||||
# Check if this channel is already using this profile
|
||||
channel_using_profile = False
|
||||
existing_stream_id = redis_client.get(f"channel_stream:{channel.id}")
|
||||
if existing_stream_id:
|
||||
# Decode bytes to string/int for proper Redis key lookup
|
||||
existing_stream_id = existing_stream_id.decode('utf-8')
|
||||
existing_profile_id = redis_client.get(f"stream_profile:{existing_stream_id}")
|
||||
if existing_profile_id and int(existing_profile_id.decode('utf-8')) == profile.id:
|
||||
channel_using_profile = True
|
||||
logger.debug(f"Channel {channel.id} already using profile {profile.id}")
|
||||
|
||||
# Calculate effective connections (subtract 1 if channel already using this profile)
|
||||
effective_connections = current_connections - (1 if channel_using_profile else 0)
|
||||
|
||||
# Check if profile has available slots
|
||||
if profile.max_streams == 0 or effective_connections < profile.max_streams:
|
||||
selected_profile = profile
|
||||
logger.debug(f"Selected profile {profile.id} with {effective_connections}/{profile.max_streams} effective connections (current: {current_connections}, already using: {channel_using_profile})")
|
||||
break
|
||||
else:
|
||||
logger.debug(f"Profile {profile.id} at max connections: {effective_connections}/{profile.max_streams} (current: {current_connections}, already using: {channel_using_profile})")
|
||||
else:
|
||||
logger.error(f"No profile found for stream {stream_id}")
|
||||
return {'error': 'No profile found for stream'}
|
||||
else:
|
||||
# Use first available profile
|
||||
m3u_profile_id = profiles.first().id
|
||||
# No Redis available, assume first active profile is okay
|
||||
selected_profile = profile
|
||||
break
|
||||
|
||||
if not selected_profile:
|
||||
return {'error': 'No profiles available with connection capacity'}
|
||||
|
||||
m3u_profile_id = selected_profile.id
|
||||
else:
|
||||
stream_id, m3u_profile_id, error_reason = channel.get_stream()
|
||||
if stream_id is None or m3u_profile_id is None:
|
||||
|
|
@ -162,8 +249,15 @@ def get_stream_info_for_switch(channel_id: str, target_stream_id: Optional[int]
|
|||
stream = get_object_or_404(Stream, pk=stream_id)
|
||||
profile = get_object_or_404(M3UAccountProfile, pk=m3u_profile_id)
|
||||
|
||||
# Get the user agent from the M3U account
|
||||
# Check connections left
|
||||
m3u_account = M3UAccount.objects.get(id=profile.m3u_account.id)
|
||||
#connections_left = get_connections_left(m3u_profile_id)
|
||||
|
||||
#if connections_left <= 0:
|
||||
#logger.warning(f"No connections left for M3U account {m3u_account.id}")
|
||||
#return {'error': 'No connections left'}
|
||||
|
||||
# Get the user agent from the M3U account
|
||||
user_agent = m3u_account.get_user_agent().user_agent
|
||||
|
||||
# Generate URL using the transform function directly
|
||||
|
|
@ -172,7 +266,7 @@ def get_stream_info_for_switch(channel_id: str, target_stream_id: Optional[int]
|
|||
# Get transcode info from the channel's stream profile
|
||||
stream_profile = channel.get_stream_profile()
|
||||
transcode = not (stream_profile.is_proxy() or stream_profile is None)
|
||||
profile_value = str(stream_profile)
|
||||
profile_value = stream_profile.id
|
||||
|
||||
return {
|
||||
'url': stream_url,
|
||||
|
|
@ -198,15 +292,18 @@ def get_alternate_streams(channel_id: str, current_stream_id: Optional[int] = No
|
|||
List[dict]: List of stream information dictionaries with stream_id and profile_id
|
||||
"""
|
||||
try:
|
||||
from core.utils import RedisClient
|
||||
|
||||
# Get channel object
|
||||
channel = get_stream_object(channel_id)
|
||||
if isinstance(channel, Stream):
|
||||
logger.error(f"Stream is not a channel")
|
||||
return []
|
||||
|
||||
redis_client = RedisClient.get_client()
|
||||
logger.debug(f"Looking for alternate streams for channel {channel_id}, current stream ID: {current_stream_id}")
|
||||
|
||||
# Get all assigned streams for this channel using the correct ordering from the channelstream table
|
||||
# Get all assigned streams for this channel using the correct ordering
|
||||
streams = channel.streams.all().order_by('channelstream__order')
|
||||
logger.debug(f"Channel {channel_id} has {streams.count()} total assigned streams")
|
||||
|
||||
|
|
@ -218,7 +315,6 @@ def get_alternate_streams(channel_id: str, current_stream_id: Optional[int] = No
|
|||
|
||||
# Process each stream in the user-defined order
|
||||
for stream in streams:
|
||||
# Log each stream we're checking
|
||||
logger.debug(f"Checking stream ID {stream.id} ({stream.name}) for channel {channel_id}")
|
||||
|
||||
# Skip the current failing stream
|
||||
|
|
@ -226,44 +322,76 @@ def get_alternate_streams(channel_id: str, current_stream_id: Optional[int] = No
|
|||
logger.debug(f"Skipping current stream ID {current_stream_id}")
|
||||
continue
|
||||
|
||||
# Find compatible profiles for this stream
|
||||
# Find compatible profiles for this stream with connection checking
|
||||
try:
|
||||
# Check if we can find profiles via m3u_account
|
||||
profiles = M3UAccountProfile.objects.filter(m3u_account=stream.m3u_account)
|
||||
if not profiles.exists():
|
||||
logger.debug(f"No profiles found via m3u_account for stream {stream.id}")
|
||||
# Fallback to the default profile of the account
|
||||
default_profile = M3UAccountProfile.objects.filter(
|
||||
m3u_account=stream.m3u_account,
|
||||
is_default=True
|
||||
).first()
|
||||
if default_profile:
|
||||
profiles = [default_profile]
|
||||
m3u_account = stream.m3u_account
|
||||
if not m3u_account:
|
||||
logger.debug(f"Stream {stream.id} has no M3U account")
|
||||
continue
|
||||
if m3u_account.is_active == False:
|
||||
logger.debug(f"M3U account {m3u_account.id} is inactive, skipping.")
|
||||
continue
|
||||
m3u_profiles = m3u_account.profiles.filter(is_active=True)
|
||||
default_profile = next((obj for obj in m3u_profiles if obj.is_default), None)
|
||||
|
||||
if not default_profile:
|
||||
logger.debug(f"M3U account {m3u_account.id} has no default profile")
|
||||
continue
|
||||
|
||||
# Check profiles in order with connection availability
|
||||
profiles = [default_profile] + [obj for obj in m3u_profiles if not obj.is_default]
|
||||
|
||||
selected_profile = None
|
||||
for profile in profiles:
|
||||
# Check connection availability
|
||||
if redis_client:
|
||||
profile_connections_key = f"profile_connections:{profile.id}"
|
||||
current_connections = int(redis_client.get(profile_connections_key) or 0)
|
||||
|
||||
# Check if this channel is already using this profile
|
||||
channel_using_profile = False
|
||||
existing_stream_id = redis_client.get(f"channel_stream:{channel.id}")
|
||||
if existing_stream_id:
|
||||
# Decode bytes to string/int for proper Redis key lookup
|
||||
existing_stream_id = existing_stream_id.decode('utf-8')
|
||||
existing_profile_id = redis_client.get(f"stream_profile:{existing_stream_id}")
|
||||
if existing_profile_id and int(existing_profile_id.decode('utf-8')) == profile.id:
|
||||
channel_using_profile = True
|
||||
logger.debug(f"Channel {channel.id} already using profile {profile.id}")
|
||||
|
||||
# Calculate effective connections (subtract 1 if channel already using this profile)
|
||||
effective_connections = current_connections - (1 if channel_using_profile else 0)
|
||||
|
||||
# Check if profile has available slots
|
||||
if profile.max_streams == 0 or effective_connections < profile.max_streams:
|
||||
selected_profile = profile
|
||||
logger.debug(f"Found available profile {profile.id} for stream {stream.id}: {effective_connections}/{profile.max_streams} effective (current: {current_connections}, already using: {channel_using_profile})")
|
||||
break
|
||||
else:
|
||||
logger.debug(f"Profile {profile.id} at max connections: {effective_connections}/{profile.max_streams} (current: {current_connections}, already using: {channel_using_profile})")
|
||||
else:
|
||||
logger.warning(f"No default profile found for m3u_account {stream.m3u_account.id}")
|
||||
continue
|
||||
|
||||
# Get first compatible profile
|
||||
profile = profiles.first()
|
||||
if profile:
|
||||
logger.debug(f"Found compatible profile ID {profile.id} for stream ID {stream.id}")
|
||||
# No Redis available, assume first active profile is okay
|
||||
selected_profile = profile
|
||||
break
|
||||
|
||||
if selected_profile:
|
||||
alternate_streams.append({
|
||||
'stream_id': stream.id,
|
||||
'profile_id': profile.id,
|
||||
'profile_id': selected_profile.id,
|
||||
'name': stream.name
|
||||
})
|
||||
else:
|
||||
logger.debug(f"No compatible profile found for stream ID {stream.id}")
|
||||
logger.debug(f"No available profiles for stream ID {stream.id}")
|
||||
|
||||
except Exception as inner_e:
|
||||
logger.error(f"Error finding profiles for stream {stream.id}: {inner_e}")
|
||||
continue
|
||||
|
||||
if alternate_streams:
|
||||
stream_ids = ', '.join([str(s['stream_id']) for s in alternate_streams])
|
||||
logger.info(f"Found {len(alternate_streams)} alternate streams for channel {channel_id}: [{stream_ids}]")
|
||||
logger.info(f"Found {len(alternate_streams)} alternate streams with available connections for channel {channel_id}: [{stream_ids}]")
|
||||
else:
|
||||
logger.warning(f"No alternate streams found for channel {channel_id}")
|
||||
logger.warning(f"No alternate streams with available connections found for channel {channel_id}")
|
||||
|
||||
return alternate_streams
|
||||
except Exception as e:
|
||||
|
|
@ -274,6 +402,9 @@ def validate_stream_url(url, user_agent=None, timeout=(5, 5)):
|
|||
"""
|
||||
Validate if a stream URL is accessible without downloading the full content.
|
||||
|
||||
Note: UDP/RTP/RTSP streams are automatically considered valid as they cannot
|
||||
be validated via HTTP methods.
|
||||
|
||||
Args:
|
||||
url (str): The URL to validate
|
||||
user_agent (str): User agent to use for the request
|
||||
|
|
@ -282,6 +413,12 @@ def validate_stream_url(url, user_agent=None, timeout=(5, 5)):
|
|||
Returns:
|
||||
tuple: (is_valid, final_url, status_code, message)
|
||||
"""
|
||||
# Check if URL uses non-HTTP protocols (UDP/RTP/RTSP)
|
||||
# These cannot be validated via HTTP methods, so we skip validation
|
||||
if url.startswith(('udp://', 'rtp://', 'rtsp://')):
|
||||
logger.info(f"Skipping HTTP validation for non-HTTP protocol: {url}")
|
||||
return True, url, 200, "Non-HTTP protocol (UDP/RTP/RTSP) - validation skipped"
|
||||
|
||||
try:
|
||||
# Create session with proper headers
|
||||
session = requests.Session()
|
||||
|
|
@ -381,3 +518,47 @@ def validate_stream_url(url, user_agent=None, timeout=(5, 5)):
|
|||
finally:
|
||||
if 'session' in locals():
|
||||
session.close()
|
||||
|
||||
def get_connections_left(m3u_profile_id: int) -> int:
|
||||
"""
|
||||
Get the number of available connections left for an M3U profile.
|
||||
|
||||
Args:
|
||||
m3u_profile_id: The ID of the M3U profile
|
||||
|
||||
Returns:
|
||||
int: Number of connections available (0 if none available)
|
||||
"""
|
||||
try:
|
||||
from core.utils import RedisClient
|
||||
|
||||
# Get the M3U profile
|
||||
m3u_profile = M3UAccountProfile.objects.get(id=m3u_profile_id)
|
||||
|
||||
# If max_streams is 0, it means unlimited
|
||||
if m3u_profile.max_streams == 0:
|
||||
return 999999 # Return a large number to indicate unlimited
|
||||
|
||||
# Get Redis client
|
||||
redis_client = RedisClient.get_client()
|
||||
if not redis_client:
|
||||
logger.warning("Redis not available, assuming connections available")
|
||||
return max(0, m3u_profile.max_streams - 1) # Conservative estimate
|
||||
|
||||
# Check current connections for this specific profile
|
||||
profile_connections_key = f"profile_connections:{m3u_profile_id}"
|
||||
current_connections = int(redis_client.get(profile_connections_key) or 0)
|
||||
|
||||
# Calculate available connections
|
||||
connections_left = max(0, m3u_profile.max_streams - current_connections)
|
||||
|
||||
logger.debug(f"M3U profile {m3u_profile_id}: {current_connections}/{m3u_profile.max_streams} used, {connections_left} available")
|
||||
|
||||
return connections_left
|
||||
|
||||
except M3UAccountProfile.DoesNotExist:
|
||||
logger.error(f"M3U profile {m3u_profile_id} not found")
|
||||
return 0
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting connections left for M3U profile {m3u_profile_id}: {e}")
|
||||
return 0
|
||||
|
|
|
|||
|
|
@ -7,19 +7,27 @@ logger = logging.getLogger("ts_proxy")
|
|||
|
||||
def detect_stream_type(url):
|
||||
"""
|
||||
Detect if stream URL is HLS or TS format.
|
||||
Detect if stream URL is HLS, RTSP/RTP, UDP, or TS format.
|
||||
|
||||
Args:
|
||||
url (str): The stream URL to analyze
|
||||
|
||||
Returns:
|
||||
str: 'hls' or 'ts' depending on detected format
|
||||
str: 'hls', 'rtsp', 'udp', or 'ts' depending on detected format
|
||||
"""
|
||||
if not url:
|
||||
return 'unknown'
|
||||
|
||||
url_lower = url.lower()
|
||||
|
||||
# Check for UDP streams (requires FFmpeg)
|
||||
if url_lower.startswith('udp://'):
|
||||
return 'udp'
|
||||
|
||||
# Check for RTSP/RTP streams (requires FFmpeg)
|
||||
if url_lower.startswith('rtsp://') or url_lower.startswith('rtp://'):
|
||||
return 'rtsp'
|
||||
|
||||
# Look for common HLS indicators
|
||||
if (url_lower.endswith('.m3u8') or
|
||||
'.m3u8?' in url_lower or
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -5,4 +5,5 @@ app_name = 'proxy'
|
|||
urlpatterns = [
|
||||
path('ts/', include('apps.proxy.ts_proxy.urls')),
|
||||
path('hls/', include('apps.proxy.hls_proxy.urls')),
|
||||
path('vod/', include('apps.proxy.vod_proxy.urls')),
|
||||
]
|
||||
0
apps/proxy/vod_proxy/__init__.py
Normal file
0
apps/proxy/vod_proxy/__init__.py
Normal file
1445
apps/proxy/vod_proxy/connection_manager.py
Normal file
1445
apps/proxy/vod_proxy/connection_manager.py
Normal file
File diff suppressed because it is too large
Load diff
1370
apps/proxy/vod_proxy/multi_worker_connection_manager.py
Normal file
1370
apps/proxy/vod_proxy/multi_worker_connection_manager.py
Normal file
File diff suppressed because it is too large
Load diff
24
apps/proxy/vod_proxy/urls.py
Normal file
24
apps/proxy/vod_proxy/urls.py
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
from django.urls import path
|
||||
from . import views
|
||||
|
||||
app_name = 'vod_proxy'
|
||||
|
||||
urlpatterns = [
|
||||
# Generic VOD streaming with session ID in path (for compatibility)
|
||||
path('<str:content_type>/<uuid:content_id>/<str:session_id>', views.VODStreamView.as_view(), name='vod_stream_with_session'),
|
||||
path('<str:content_type>/<uuid:content_id>/<str:session_id>/<int:profile_id>/', views.VODStreamView.as_view(), name='vod_stream_with_session_and_profile'),
|
||||
|
||||
# Generic VOD streaming (supports movies, episodes, series) - legacy patterns
|
||||
path('<str:content_type>/<uuid:content_id>', views.VODStreamView.as_view(), name='vod_stream'),
|
||||
path('<str:content_type>/<uuid:content_id>/<int:profile_id>/', views.VODStreamView.as_view(), name='vod_stream_with_profile'),
|
||||
|
||||
# VOD playlist generation
|
||||
path('playlist/', views.VODPlaylistView.as_view(), name='vod_playlist'),
|
||||
path('playlist/<int:profile_id>/', views.VODPlaylistView.as_view(), name='vod_playlist_with_profile'),
|
||||
|
||||
# Position tracking
|
||||
path('position/<uuid:content_id>/', views.VODPositionView.as_view(), name='vod_position'),
|
||||
|
||||
# VOD Stats
|
||||
path('stats/', views.VODStatsView.as_view(), name='vod_stats'),
|
||||
]
|
||||
58
apps/proxy/vod_proxy/utils.py
Normal file
58
apps/proxy/vod_proxy/utils.py
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
"""
|
||||
Utility functions for VOD proxy operations.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from django.http import HttpResponse
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_client_info(request):
|
||||
"""
|
||||
Extract client IP and User-Agent from request.
|
||||
|
||||
Args:
|
||||
request: Django HttpRequest object
|
||||
|
||||
Returns:
|
||||
tuple: (client_ip, user_agent)
|
||||
"""
|
||||
# Get client IP, checking for proxy headers
|
||||
client_ip = request.META.get('HTTP_X_FORWARDED_FOR')
|
||||
if client_ip:
|
||||
# Take the first IP if there are multiple (comma-separated)
|
||||
client_ip = client_ip.split(',')[0].strip()
|
||||
else:
|
||||
client_ip = request.META.get('HTTP_X_REAL_IP') or request.META.get('REMOTE_ADDR', 'unknown')
|
||||
|
||||
# Get User-Agent
|
||||
user_agent = request.META.get('HTTP_USER_AGENT', 'unknown')
|
||||
|
||||
return client_ip, user_agent
|
||||
|
||||
|
||||
def create_vod_response(content, content_type='video/mp4', filename=None):
|
||||
"""
|
||||
Create a streaming HTTP response for VOD content.
|
||||
|
||||
Args:
|
||||
content: Content to stream (file-like object or bytes)
|
||||
content_type: MIME type of the content
|
||||
filename: Optional filename for Content-Disposition header
|
||||
|
||||
Returns:
|
||||
HttpResponse: Configured HTTP response for streaming
|
||||
"""
|
||||
response = HttpResponse(content, content_type=content_type)
|
||||
|
||||
if filename:
|
||||
response['Content-Disposition'] = f'attachment; filename="{filename}"'
|
||||
|
||||
# Add headers for streaming
|
||||
response['Accept-Ranges'] = 'bytes'
|
||||
response['Cache-Control'] = 'no-cache, no-store, must-revalidate'
|
||||
response['Pragma'] = 'no-cache'
|
||||
response['Expires'] = '0'
|
||||
|
||||
return response
|
||||
1013
apps/proxy/vod_proxy/views.py
Normal file
1013
apps/proxy/vod_proxy/views.py
Normal file
File diff suppressed because it is too large
Load diff
0
apps/vod/__init__.py
Normal file
0
apps/vod/__init__.py
Normal file
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue